code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package com.twitter.scrooge
trait ThriftException extends Exception | VisualDNA/scrooge | scrooge-core/src/main/scala/com/twitter/scrooge/ThriftException.scala | Scala | apache-2.0 | 68 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.tstreams.agents.consumer.subscriber
import com.bwsw.tstreams.agents.consumer
import com.bwsw.tstreams.agents.consumer.Offset.IOffset
import com.bwsw.tstreams.agents.consumer.subscriber.QueueBuilder.InMemory
import com.bwsw.tstreams.common.PartitionIterationPolicy
/**
* Created by Ivan Kudryavtsev on 19.08.16.
*/
/**
* Class which represents options for subscriber consumer V2
*
* @param transactionsPreload
* @param dataPreload
* @param readPolicy
* @param offset
* @param agentAddress
* @param zkPrefixPath
* @param transactionBufferWorkersThreadPoolAmount
* @param useLastOffset
*/
class SubscriberOptions(val transactionsPreload: Int,
val dataPreload: Int,
val readPolicy: PartitionIterationPolicy,
val offset: IOffset,
val useLastOffset: Boolean,
val rememberFirstStartOffset: Boolean = true,
val agentAddress: String,
val zkPrefixPath: String,
val transactionBufferWorkersThreadPoolAmount: Int = 1,
val processingEngineWorkersThreadAmount: Int = 1,
val pollingFrequencyDelayMs: Int = 1000,
val transactionQueueMaxLengthThreshold: Int = 10000,
val transactionsQueueBuilder: QueueBuilder.Abstract = new InMemory
) {
def getConsumerOptions() = consumer.ConsumerOptions(
transactionsPreload = transactionsPreload,
dataPreload = dataPreload,
readPolicy = readPolicy,
offset = offset,
checkpointAtStart = rememberFirstStartOffset,
useLastOffset = useLastOffset)
}
| bwsw/t-streams | src/main/scala/com/bwsw/tstreams/agents/consumer/subscriber/SubscriberOptions.scala | Scala | apache-2.0 | 2,564 |
package pimpathon
import scala.{PartialFunction => ~>}
import scala.collection.generic.{Growable, Shrinkable}
import scala.util.Try
import pimpathon.boolean._
import pimpathon.function._
import scala.reflect.ClassTag
object any {
implicit class AnyPimps[A](val self: A) extends AnyVal {
def calc[B](f: A β B): B = f(self)
def |>[B](f: A β B): B = f(self)
def calcIf[B](p: Predicate[A])(f: A β B): Option[B] = p(self).option(f(self))
def calcUnless[B](p: Predicate[A])(f: A β B): Option[B] = (!p(self)).option(f(self))
def calcPF[B](pf: A ~> B): Option[B] = pf.lift(self)
def transform(pf: A ~> A): A = pf.unify(self)
def transformIf(condition: Boolean)(f: A β A): A = if (condition) f(self) else self
def tapIf[Discarded](p: Predicate[A])(actions: (A β Discarded)*): A = if (p(self)) tap(actions: _*) else self
def tapUnless[Discarded](p: Predicate[A])(actions: (A β Discarded)*): A = if (p(self)) self else tap(actions: _*)
def tapPF[Discarded](action: A ~> Discarded): A = { action.lift(self); self }
def castTo[B](implicit tag: ClassTag[B]): Option[B] =
if (tag.runtimeClass.isAssignableFrom(self.getClass)) Some(self.asInstanceOf[B]) else None
def attempt[B](f: A β B): Try[B] = Try(f(self))
def partialMatch[B](pf: A ~> B): Option[B] = PartialFunction.condOpt(self)(pf)
def lpair[B](f: A β B): (B, A) = (f(self), self)
def rpair[B](f: A β B): (A, B) = (self, f(self))
def filterSelf(p: Predicate[A]): Option[A] = p(self).option(self)
def ifSelf(p: Predicate[A]): Option[A] = p(self).option(self)
def filterNotSelf(p: Predicate[A]): Option[A] = (!p(self)).option(self)
def unlessSelf(p: Predicate[A]): Option[A] = (!p(self)).option(self)
def isOneOf(as: A*): Boolean = as.contains(self)
def isNotOneOf(as: A*): Boolean = !as.contains(self)
def containedIn(s: Set[A]): Boolean = s.contains(self)
def notContainedIn(s: Set[A]): Boolean = !s.contains(self)
def passes: AnyCapturer[A] = new AnyCapturer[A](self, b β b.option(self))
def fails: AnyCapturer[A] = new AnyCapturer[A](self, b β (!b).option(self))
def withFinally[B](f: A β Unit)(t: A β B): B = try t(self) finally f(self)
def tryFinally[B](t: A β B)(f: A β Unit): B = try t(self) finally f(self)
def cond[B](p: Predicate[A], ifTrue: A β B, ifFalse: A β B): B = if (p(self)) ifTrue(self) else ifFalse(self)
def addTo(as: Growable[A]): A = tap(as += _)
def removeFrom(as: Shrinkable[A]): A = tap(as -= _)
def unfold[B](f: A β Option[(B, A)]): Stream[B] = f(self).fold(Stream.empty[B])(ba β ba._1 #:: ba._2.unfold(f))
def tap[Discarded](actions: (A β Discarded)*): A = { actions.foreach(action β action(self)); self }
def bounded(lower: A, upper: A)(implicit na: Numeric[A]): A = na.min(na.max(lower, self), upper)
def indent: String = self.toString.split("\n").map(" " + _).mkString("\n")
}
class AnyCapturer[A](a: A, andThen: Boolean β Option[A]) {
def one(disjuncts: Predicate[A]*): Option[A] = andThen(function.or(disjuncts: _*).apply(a))
def all(conjuncts: Predicate[A]*): Option[A] = andThen(function.and(conjuncts: _*).apply(a))
def none(conjuncts: Predicate[A]*): Option[A] = andThen(function.nand(conjuncts: _*).apply(a))
def some(disjuncts: Predicate[A]*): Option[A] = andThen(function.nor(disjuncts: _*).apply(a))
}
} | stacycurl/pimpathon | src/main/scala/pimpathon/any.scala | Scala | apache-2.0 | 3,434 |
/*
* ScalaCL - putting Scala on the GPU with JavaCL / OpenCL
* http://scalacl.googlecode.com/
*
* Copyright (c) 2009-2013, Olivier Chafik (http://ochafik.com/)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of Olivier Chafik nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY OLIVIER CHAFIK AND CONTRIBUTORS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package scalacl
package impl
import com.nativelibs4java.opencl._
import java.util.concurrent.locks._
import collection.mutable.ArrayBuffer
private[scalacl] trait ScheduledData {
def context: Context
def finish(): Unit
def eventCompleted(event: CLEvent): Unit
def startRead(out: ArrayBuffer[CLEvent]): Unit
def startWrite(out: ArrayBuffer[CLEvent]): Unit
def endRead(event: CLEvent): Unit
def endWrite(event: CLEvent): Unit
}
| nativelibs4java/ScalaCL | src/main/scala/scalacl/impl/data/ScheduledData.scala | Scala | bsd-3-clause | 2,145 |
package com.aurelpaulovic.scala_kata.s_99
/*
* P15 (**) Duplicate the elements of a list a given number of times.
*
* Example:
* scala> duplicateN(3, List('a, 'b, 'c, 'c, 'd))
* res0: List[Symbol] = List('a, 'a, 'a, 'b, 'b, 'b, 'c, 'c, 'c, 'c, 'c, 'c, 'd, 'd, 'd)
*/
package object p15 {
def duplicateN[A](n: Int, ls: List[A]): List[A] = ls flatMap { e => List.fill(n)(e) }
} | AurelPaulovic/scala-kata | src/main/scala/com/aurelpaulovic/scala_kata/s_99/p15/package.scala | Scala | apache-2.0 | 386 |
object Test {
sealed trait Marker
trait M1 extends Marker
sealed trait Marker2 extends Marker
sealed trait M2 extends Marker2
sealed trait T
trait A extends T with M2
sealed trait T2 extends T
case class B() extends T2 with Marker
class C extends T2 with M1
case object D extends T2 with Marker
trait Unrelated
def m1(s: T & Marker) = s match {
case _: Unrelated => ;
}
}
| som-snytt/dotty | tests/patmat/nontrivial-andtype.scala | Scala | apache-2.0 | 405 |
package controllers
import javax.inject._
import org.pac4j.core.config.Config
import org.pac4j.core.profile.CommonProfile
import org.pac4j.play.scala.Security
import org.pac4j.play.store.PlaySessionStore
import play.api._
import play.api.mvc._
import play.libs.concurrent.HttpExecutionContext
@Singleton
class ApplicationController @Inject()(val config: Config, val playSessionStore: PlaySessionStore, override val ec: HttpExecutionContext) extends Controller with Security[CommonProfile] {
def index = Action { implicit request =>
Ok(views.html.index())
}
def secure = Secure("RedirectUnauthenticatedClient") { profiles =>
Action { request =>
Ok(views.html.secure())
}
}
}
| kristiankime/play-pac4j-slick | app/controllers/ApplicationController.scala | Scala | mit | 705 |
package org.workcraft.plugins.petrify
import org.workcraft.services.Exporter
import org.workcraft.services.Format
import org.workcraft.services.ModelServiceProvider
import org.workcraft.services.ServiceNotAvailableException
import org.workcraft.services.ExportJob
import java.io.OutputStream
import org.workcraft.scala.effects.IO
import org.workcraft.scala.effects.IO._
import java.io.PrintWriter
import java.io.BufferedOutputStream
import scalaz._
import Scalaz._
import org.workcraft.plugins.petri2.PetriNetService
import org.workcraft.plugins.petri2.PetriNet
import org.workcraft.plugins.petri2.Transition
import org.workcraft.plugins.petri2.Place
import org.workcraft.plugins.petri2.ConsumerArc
import org.workcraft.plugins.petri2.ProducerArc
import java.io.File
import java.io.FileOutputStream
import org.workcraft.services.ExportError
object DotGExporter extends Exporter {
val targetFormat = Format.DotG
def export(model: ModelServiceProvider): Either[ServiceNotAvailableException, ExportJob] = model.implementation(PetriNetService) match {
case Some(impl) => Right(new PnToDotGExportJob(impl))
case None => Left(new ServiceNotAvailableException(PetriNetService))
}
}
class PnToDotGExportJob(snapshot: IO[PetriNet]) extends ExportJob {
val complete = false
def job(file: File) = snapshot >>= (net => ioPure.pure {
var writer: PrintWriter = null
try {
writer = new PrintWriter(new BufferedOutputStream(new FileOutputStream(file)))
val (tPostset, _) = net.incidence
val (_, pPostset) = net.placeIncidence
writer.println("# File generated by Workcraft (http://workcraft.org)")
writer.println(".dummy " + net.transitions.map(net.labelling(_)).sorted.mkString(" "))
writer.println(".graph")
writer.println((
tPostset.mapValues(_.map(net.labelling(_)).sorted).toList.map { case (t, x) => (net.labelling(t), x) } ++
pPostset.mapValues(_.map(net.labelling(_)).sorted).toList.map { case (p, x) => (net.labelling(p), x) }).sortBy(_._1).map { case (from, to) => from + " " + to.mkString(" ")}.mkString(" \\n"))
writer.println(".marking {" + net.places.filter(net.marking(_) > 0).map(p => {
val mrk = net.marking(p)
val name = net.labelling(p)
if (mrk == 1) name else name + "=" + mrk
}).sorted.mkString(" ") + "}")
writer.println (".end")
None
} catch {
case e => Some(ExportError.Exception(e))
} finally {
if (writer != null)
writer.close()
}
})
} | tuura/workcraft-2.2 | PetrifyPlugin2/src/main/scala/org/workcraft/plugins/petrify/DotGExporter.scala | Scala | gpl-3.0 | 2,527 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.sql.{Date, Timestamp}
import java.text.SimpleDateFormat
import java.time.Instant
import java.util.Locale
import java.util.concurrent.TimeUnit
import org.apache.spark.sql.catalyst.util.{DateTimeUtils, IntervalUtils}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types.DoubleType
import org.apache.spark.unsafe.types.CalendarInterval
class DateFunctionsSuite extends QueryTest with SharedSparkSession {
import testImplicits._
test("function current_date") {
val df1 = Seq((1, 2), (3, 1)).toDF("a", "b")
val d0 = DateTimeUtils.millisToDays(System.currentTimeMillis())
val d1 = DateTimeUtils.fromJavaDate(df1.select(current_date()).collect().head.getDate(0))
val d2 = DateTimeUtils.fromJavaDate(
sql("""SELECT CURRENT_DATE()""").collect().head.getDate(0))
val d3 = DateTimeUtils.millisToDays(System.currentTimeMillis())
assert(d0 <= d1 && d1 <= d2 && d2 <= d3 && d3 - d0 <= 1)
}
test("function current_timestamp and now") {
val df1 = Seq((1, 2), (3, 1)).toDF("a", "b")
checkAnswer(df1.select(countDistinct(current_timestamp())), Row(1))
// Execution in one query should return the same value
checkAnswer(sql("""SELECT CURRENT_TIMESTAMP() = CURRENT_TIMESTAMP()"""), Row(true))
// Current timestamp should return the current timestamp ...
val before = System.currentTimeMillis
val got = sql("SELECT CURRENT_TIMESTAMP()").collect().head.getTimestamp(0).getTime
val after = System.currentTimeMillis
assert(got >= before && got <= after)
// Now alias
checkAnswer(sql("""SELECT CURRENT_TIMESTAMP() = NOW()"""), Row(true))
}
val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.US)
val sdfDate = new SimpleDateFormat("yyyy-MM-dd", Locale.US)
val d = new Date(sdf.parse("2015-04-08 13:10:15").getTime)
val ts = new Timestamp(sdf.parse("2013-04-08 13:10:15").getTime)
test("timestamp comparison with date strings") {
val df = Seq(
(1, Timestamp.valueOf("2015-01-01 00:00:00")),
(2, Timestamp.valueOf("2014-01-01 00:00:00"))).toDF("i", "t")
checkAnswer(
df.select("t").filter($"t" <= "2014-06-01"),
Row(Timestamp.valueOf("2014-01-01 00:00:00")) :: Nil)
checkAnswer(
df.select("t").filter($"t" >= "2014-06-01"),
Row(Timestamp.valueOf("2015-01-01 00:00:00")) :: Nil)
}
test("date comparison with date strings") {
val df = Seq(
(1, Date.valueOf("2015-01-01")),
(2, Date.valueOf("2014-01-01"))).toDF("i", "t")
checkAnswer(
df.select("t").filter($"t" <= "2014-06-01"),
Row(Date.valueOf("2014-01-01")) :: Nil)
checkAnswer(
df.select("t").filter($"t" >= "2015"),
Row(Date.valueOf("2015-01-01")) :: Nil)
}
test("date format") {
val df = Seq((d, sdf.format(d), ts)).toDF("a", "b", "c")
checkAnswer(
df.select(date_format($"a", "y"), date_format($"b", "y"), date_format($"c", "y")),
Row("2015", "2015", "2013"))
checkAnswer(
df.selectExpr("date_format(a, 'y')", "date_format(b, 'y')", "date_format(c, 'y')"),
Row("2015", "2015", "2013"))
}
test("year") {
val df = Seq((d, sdfDate.format(d), ts)).toDF("a", "b", "c")
checkAnswer(
df.select(year($"a"), year($"b"), year($"c")),
Row(2015, 2015, 2013))
checkAnswer(
df.selectExpr("year(a)", "year(b)", "year(c)"),
Row(2015, 2015, 2013))
}
test("quarter") {
val ts = new Timestamp(sdf.parse("2013-11-08 13:10:15").getTime)
val df = Seq((d, sdfDate.format(d), ts)).toDF("a", "b", "c")
checkAnswer(
df.select(quarter($"a"), quarter($"b"), quarter($"c")),
Row(2, 2, 4))
checkAnswer(
df.selectExpr("quarter(a)", "quarter(b)", "quarter(c)"),
Row(2, 2, 4))
}
test("month") {
val df = Seq((d, sdfDate.format(d), ts)).toDF("a", "b", "c")
checkAnswer(
df.select(month($"a"), month($"b"), month($"c")),
Row(4, 4, 4))
checkAnswer(
df.selectExpr("month(a)", "month(b)", "month(c)"),
Row(4, 4, 4))
}
test("dayofmonth") {
val df = Seq((d, sdfDate.format(d), ts)).toDF("a", "b", "c")
checkAnswer(
df.select(dayofmonth($"a"), dayofmonth($"b"), dayofmonth($"c")),
Row(8, 8, 8))
checkAnswer(
df.selectExpr("day(a)", "day(b)", "dayofmonth(c)"),
Row(8, 8, 8))
}
test("dayofyear") {
val df = Seq((d, sdfDate.format(d), ts)).toDF("a", "b", "c")
checkAnswer(
df.select(dayofyear($"a"), dayofyear($"b"), dayofyear($"c")),
Row(98, 98, 98))
checkAnswer(
df.selectExpr("dayofyear(a)", "dayofyear(b)", "dayofyear(c)"),
Row(98, 98, 98))
}
test("hour") {
val df = Seq((d, sdf.format(d), ts)).toDF("a", "b", "c")
checkAnswer(
df.select(hour($"a"), hour($"b"), hour($"c")),
Row(0, 13, 13))
checkAnswer(
df.selectExpr("hour(a)", "hour(b)", "hour(c)"),
Row(0, 13, 13))
}
test("minute") {
val df = Seq((d, sdf.format(d), ts)).toDF("a", "b", "c")
checkAnswer(
df.select(minute($"a"), minute($"b"), minute($"c")),
Row(0, 10, 10))
checkAnswer(
df.selectExpr("minute(a)", "minute(b)", "minute(c)"),
Row(0, 10, 10))
}
test("second") {
val df = Seq((d, sdf.format(d), ts)).toDF("a", "b", "c")
checkAnswer(
df.select(second($"a"), second($"b"), second($"c")),
Row(0, 15, 15))
checkAnswer(
df.selectExpr("second(a)", "second(b)", "second(c)"),
Row(0, 15, 15))
}
test("weekofyear") {
val df = Seq((d, sdfDate.format(d), ts)).toDF("a", "b", "c")
checkAnswer(
df.select(weekofyear($"a"), weekofyear($"b"), weekofyear($"c")),
Row(15, 15, 15))
checkAnswer(
df.selectExpr("weekofyear(a)", "weekofyear(b)", "weekofyear(c)"),
Row(15, 15, 15))
}
test("function date_add") {
val st1 = "2015-06-01 12:34:56"
val st2 = "2015-06-02 12:34:56"
val t1 = Timestamp.valueOf(st1)
val t2 = Timestamp.valueOf(st2)
val s1 = "2015-06-01"
val s2 = "2015-06-02"
val d1 = Date.valueOf(s1)
val d2 = Date.valueOf(s2)
val df = Seq((t1, d1, s1, st1), (t2, d2, s2, st2)).toDF("t", "d", "s", "ss")
checkAnswer(
df.select(date_add(col("d"), 1)),
Seq(Row(Date.valueOf("2015-06-02")), Row(Date.valueOf("2015-06-03"))))
checkAnswer(
df.select(date_add(col("t"), 3)),
Seq(Row(Date.valueOf("2015-06-04")), Row(Date.valueOf("2015-06-05"))))
checkAnswer(
df.select(date_add(col("s"), 5)),
Seq(Row(Date.valueOf("2015-06-06")), Row(Date.valueOf("2015-06-07"))))
checkAnswer(
df.select(date_add(col("ss"), 7)),
Seq(Row(Date.valueOf("2015-06-08")), Row(Date.valueOf("2015-06-09"))))
checkAnswer(
df.withColumn("x", lit(1)).select(date_add(col("d"), col("x"))),
Seq(Row(Date.valueOf("2015-06-02")), Row(Date.valueOf("2015-06-03"))))
checkAnswer(df.selectExpr("DATE_ADD(null, 1)"), Seq(Row(null), Row(null)))
checkAnswer(
df.selectExpr("""DATE_ADD(d, 1)"""),
Seq(Row(Date.valueOf("2015-06-02")), Row(Date.valueOf("2015-06-03"))))
}
test("function date_sub") {
val st1 = "2015-06-01 12:34:56"
val st2 = "2015-06-02 12:34:56"
val t1 = Timestamp.valueOf(st1)
val t2 = Timestamp.valueOf(st2)
val s1 = "2015-06-01"
val s2 = "2015-06-02"
val d1 = Date.valueOf(s1)
val d2 = Date.valueOf(s2)
val df = Seq((t1, d1, s1, st1), (t2, d2, s2, st2)).toDF("t", "d", "s", "ss")
checkAnswer(
df.select(date_sub(col("d"), 1)),
Seq(Row(Date.valueOf("2015-05-31")), Row(Date.valueOf("2015-06-01"))))
checkAnswer(
df.select(date_sub(col("t"), 1)),
Seq(Row(Date.valueOf("2015-05-31")), Row(Date.valueOf("2015-06-01"))))
checkAnswer(
df.select(date_sub(col("s"), 1)),
Seq(Row(Date.valueOf("2015-05-31")), Row(Date.valueOf("2015-06-01"))))
checkAnswer(
df.select(date_sub(col("ss"), 1)),
Seq(Row(Date.valueOf("2015-05-31")), Row(Date.valueOf("2015-06-01"))))
checkAnswer(
df.select(date_sub(lit(null), 1)).limit(1), Row(null))
checkAnswer(
df.withColumn("x", lit(1)).select(date_sub(col("d"), col("x"))),
Seq(Row(Date.valueOf("2015-05-31")), Row(Date.valueOf("2015-06-01"))))
checkAnswer(df.selectExpr("""DATE_SUB(d, null)"""), Seq(Row(null), Row(null)))
checkAnswer(
df.selectExpr("""DATE_SUB(d, 1)"""),
Seq(Row(Date.valueOf("2015-05-31")), Row(Date.valueOf("2015-06-01"))))
}
test("time_add") {
val t1 = Timestamp.valueOf("2015-07-31 23:59:59")
val t2 = Timestamp.valueOf("2015-12-31 00:00:00")
val d1 = Date.valueOf("2015-07-31")
val d2 = Date.valueOf("2015-12-31")
val i = new CalendarInterval(2, 2, 2000000L)
val df = Seq((1, t1, d1), (3, t2, d2)).toDF("n", "t", "d")
checkAnswer(
df.selectExpr(s"d + INTERVAL'${IntervalUtils.toMultiUnitsString(i)}'"),
Seq(Row(Date.valueOf("2015-10-02")), Row(Date.valueOf("2016-03-02"))))
checkAnswer(
df.selectExpr(s"t + INTERVAL'${IntervalUtils.toMultiUnitsString(i)}'"),
Seq(Row(Timestamp.valueOf("2015-10-03 00:00:01")),
Row(Timestamp.valueOf("2016-03-02 00:00:02"))))
}
test("time_sub") {
val t1 = Timestamp.valueOf("2015-10-01 00:00:01")
val t2 = Timestamp.valueOf("2016-02-29 00:00:02")
val d1 = Date.valueOf("2015-09-30")
val d2 = Date.valueOf("2016-02-29")
val i = new CalendarInterval(2, 2, 2000000L)
val df = Seq((1, t1, d1), (3, t2, d2)).toDF("n", "t", "d")
checkAnswer(
df.selectExpr(s"d - INTERVAL'${IntervalUtils.toMultiUnitsString(i)}'"),
Seq(Row(Date.valueOf("2015-07-27")), Row(Date.valueOf("2015-12-26"))))
checkAnswer(
df.selectExpr(s"t - INTERVAL'${IntervalUtils.toMultiUnitsString(i)}'"),
Seq(Row(Timestamp.valueOf("2015-07-29 23:59:59")),
Row(Timestamp.valueOf("2015-12-27 00:00:00"))))
}
test("function add_months") {
val d1 = Date.valueOf("2015-08-31")
val d2 = Date.valueOf("2015-02-28")
val df = Seq((1, d1), (2, d2)).toDF("n", "d")
checkAnswer(
df.select(add_months(col("d"), 1)),
Seq(Row(Date.valueOf("2015-09-30")), Row(Date.valueOf("2015-03-28"))))
checkAnswer(
df.selectExpr("add_months(d, -1)"),
Seq(Row(Date.valueOf("2015-07-31")), Row(Date.valueOf("2015-01-28"))))
checkAnswer(
df.withColumn("x", lit(1)).select(add_months(col("d"), col("x"))),
Seq(Row(Date.valueOf("2015-09-30")), Row(Date.valueOf("2015-03-28"))))
}
test("function months_between") {
val d1 = Date.valueOf("2015-07-31")
val d2 = Date.valueOf("2015-02-16")
val t1 = Timestamp.valueOf("2014-09-30 23:30:00")
val t2 = Timestamp.valueOf("2015-09-16 12:00:00")
val s1 = "2014-09-15 11:30:00"
val s2 = "2015-10-01 00:00:00"
val df = Seq((t1, d1, s1), (t2, d2, s2)).toDF("t", "d", "s")
checkAnswer(df.select(months_between(col("t"), col("d"))), Seq(Row(-10.0), Row(7.0)))
checkAnswer(df.selectExpr("months_between(t, s)"), Seq(Row(0.5), Row(-0.5)))
checkAnswer(df.selectExpr("months_between(t, s, true)"), Seq(Row(0.5), Row(-0.5)))
Seq(true, false).foreach { roundOff =>
checkAnswer(df.select(months_between(col("t"), col("d"), roundOff)),
Seq(Row(-10.0), Row(7.0)))
checkAnswer(df.withColumn("r", lit(false)).selectExpr("months_between(t, s, r)"),
Seq(Row(0.5), Row(-0.5)))
}
}
test("function last_day") {
val df1 = Seq((1, "2015-07-23"), (2, "2015-07-24")).toDF("i", "d")
val df2 = Seq((1, "2015-07-23 00:11:22"), (2, "2015-07-24 11:22:33")).toDF("i", "t")
checkAnswer(
df1.select(last_day(col("d"))),
Seq(Row(Date.valueOf("2015-07-31")), Row(Date.valueOf("2015-07-31"))))
checkAnswer(
df2.select(last_day(col("t"))),
Seq(Row(Date.valueOf("2015-07-31")), Row(Date.valueOf("2015-07-31"))))
}
test("function next_day") {
val df1 = Seq(("mon", "2015-07-23"), ("tuesday", "2015-07-20")).toDF("dow", "d")
val df2 = Seq(("th", "2015-07-23 00:11:22"), ("xx", "2015-07-24 11:22:33")).toDF("dow", "t")
checkAnswer(
df1.select(next_day(col("d"), "MONDAY")),
Seq(Row(Date.valueOf("2015-07-27")), Row(Date.valueOf("2015-07-27"))))
checkAnswer(
df2.select(next_day(col("t"), "th")),
Seq(Row(Date.valueOf("2015-07-30")), Row(Date.valueOf("2015-07-30"))))
}
test("function to_date") {
val d1 = Date.valueOf("2015-07-22")
val d2 = Date.valueOf("2015-07-01")
val d3 = Date.valueOf("2014-12-31")
val t1 = Timestamp.valueOf("2015-07-22 10:00:00")
val t2 = Timestamp.valueOf("2014-12-31 23:59:59")
val t3 = Timestamp.valueOf("2014-12-31 23:59:59")
val s1 = "2015-07-22 10:00:00"
val s2 = "2014-12-31"
val s3 = "2014-31-12"
val df = Seq((d1, t1, s1), (d2, t2, s2), (d3, t3, s3)).toDF("d", "t", "s")
checkAnswer(
df.select(to_date(col("t"))),
Seq(Row(Date.valueOf("2015-07-22")), Row(Date.valueOf("2014-12-31")),
Row(Date.valueOf("2014-12-31"))))
checkAnswer(
df.select(to_date(col("d"))),
Seq(Row(Date.valueOf("2015-07-22")), Row(Date.valueOf("2015-07-01")),
Row(Date.valueOf("2014-12-31"))))
checkAnswer(
df.select(to_date(col("s"))),
Seq(Row(Date.valueOf("2015-07-22")), Row(Date.valueOf("2014-12-31")), Row(null)))
checkAnswer(
df.selectExpr("to_date(t)"),
Seq(Row(Date.valueOf("2015-07-22")), Row(Date.valueOf("2014-12-31")),
Row(Date.valueOf("2014-12-31"))))
checkAnswer(
df.selectExpr("to_date(d)"),
Seq(Row(Date.valueOf("2015-07-22")), Row(Date.valueOf("2015-07-01")),
Row(Date.valueOf("2014-12-31"))))
checkAnswer(
df.selectExpr("to_date(s)"),
Seq(Row(Date.valueOf("2015-07-22")), Row(Date.valueOf("2014-12-31")), Row(null)))
// now with format
checkAnswer(
df.select(to_date(col("t"), "yyyy-MM-dd")),
Seq(Row(Date.valueOf("2015-07-22")), Row(Date.valueOf("2014-12-31")),
Row(Date.valueOf("2014-12-31"))))
checkAnswer(
df.select(to_date(col("d"), "yyyy-MM-dd")),
Seq(Row(Date.valueOf("2015-07-22")), Row(Date.valueOf("2015-07-01")),
Row(Date.valueOf("2014-12-31"))))
checkAnswer(
df.select(to_date(col("s"), "yyyy-MM-dd")),
Seq(Row(null), Row(Date.valueOf("2014-12-31")), Row(null)))
// now switch format
checkAnswer(
df.select(to_date(col("s"), "yyyy-dd-MM")),
Seq(Row(null), Row(null), Row(Date.valueOf("2014-12-31"))))
// invalid format
checkAnswer(
df.select(to_date(col("s"), "yyyy-hh-MM")),
Seq(Row(null), Row(null), Row(null)))
checkAnswer(
df.select(to_date(col("s"), "yyyy-dd-aa")),
Seq(Row(null), Row(null), Row(null)))
// february
val x1 = "2016-02-29"
val x2 = "2017-02-29"
val df1 = Seq(x1, x2).toDF("x")
checkAnswer(
df1.select(to_date(col("x"))), Row(Date.valueOf("2016-02-29")) :: Row(null) :: Nil)
}
test("function trunc") {
val df = Seq(
(1, Timestamp.valueOf("2015-07-22 10:00:00")),
(2, Timestamp.valueOf("2014-12-31 00:00:00"))).toDF("i", "t")
checkAnswer(
df.select(trunc(col("t"), "YY")),
Seq(Row(Date.valueOf("2015-01-01")), Row(Date.valueOf("2014-01-01"))))
checkAnswer(
df.selectExpr("trunc(t, 'Month')"),
Seq(Row(Date.valueOf("2015-07-01")), Row(Date.valueOf("2014-12-01"))))
checkAnswer(
df.selectExpr("trunc(t, 'decade')"),
Seq(Row(Date.valueOf("2010-01-01")), Row(Date.valueOf("2010-01-01"))))
}
test("function date_trunc") {
val df = Seq(
(1, Timestamp.valueOf("2015-07-22 10:01:40.123456")),
(2, Timestamp.valueOf("2014-12-31 05:29:06.123456"))).toDF("i", "t")
checkAnswer(
df.select(date_trunc("YY", col("t"))),
Seq(Row(Timestamp.valueOf("2015-01-01 00:00:00")),
Row(Timestamp.valueOf("2014-01-01 00:00:00"))))
checkAnswer(
df.selectExpr("date_trunc('MONTH', t)"),
Seq(Row(Timestamp.valueOf("2015-07-01 00:00:00")),
Row(Timestamp.valueOf("2014-12-01 00:00:00"))))
checkAnswer(
df.selectExpr("date_trunc('DAY', t)"),
Seq(Row(Timestamp.valueOf("2015-07-22 00:00:00")),
Row(Timestamp.valueOf("2014-12-31 00:00:00"))))
checkAnswer(
df.selectExpr("date_trunc('HOUR', t)"),
Seq(Row(Timestamp.valueOf("2015-07-22 10:00:00")),
Row(Timestamp.valueOf("2014-12-31 05:00:00"))))
checkAnswer(
df.selectExpr("date_trunc('MINUTE', t)"),
Seq(Row(Timestamp.valueOf("2015-07-22 10:01:00")),
Row(Timestamp.valueOf("2014-12-31 05:29:00"))))
checkAnswer(
df.selectExpr("date_trunc('SECOND', t)"),
Seq(Row(Timestamp.valueOf("2015-07-22 10:01:40")),
Row(Timestamp.valueOf("2014-12-31 05:29:06"))))
checkAnswer(
df.selectExpr("date_trunc('WEEK', t)"),
Seq(Row(Timestamp.valueOf("2015-07-20 00:00:00")),
Row(Timestamp.valueOf("2014-12-29 00:00:00"))))
checkAnswer(
df.selectExpr("date_trunc('QUARTER', t)"),
Seq(Row(Timestamp.valueOf("2015-07-01 00:00:00")),
Row(Timestamp.valueOf("2014-10-01 00:00:00"))))
checkAnswer(
df.selectExpr("date_trunc('MILLISECOND', t)"),
Seq(Row(Timestamp.valueOf("2015-07-22 10:01:40.123")),
Row(Timestamp.valueOf("2014-12-31 05:29:06.123"))))
checkAnswer(
df.selectExpr("date_trunc('DECADE', t)"),
Seq(Row(Timestamp.valueOf("2010-01-01 00:00:00")),
Row(Timestamp.valueOf("2010-01-01 00:00:00"))))
Seq("century", "millennium").foreach { level =>
checkAnswer(
df.selectExpr(s"date_trunc('$level', t)"),
Seq(Row(Timestamp.valueOf("2001-01-01 00:00:00")),
Row(Timestamp.valueOf("2001-01-01 00:00:00"))))
}
}
test("from_unixtime") {
val sdf1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.US)
val fmt2 = "yyyy-MM-dd HH:mm:ss.SSS"
val sdf2 = new SimpleDateFormat(fmt2, Locale.US)
val fmt3 = "yy-MM-dd HH-mm-ss"
val sdf3 = new SimpleDateFormat(fmt3, Locale.US)
val df = Seq((1000, "yyyy-MM-dd HH:mm:ss.SSS"), (-1000, "yy-MM-dd HH-mm-ss")).toDF("a", "b")
checkAnswer(
df.select(from_unixtime(col("a"))),
Seq(Row(sdf1.format(new Timestamp(1000000))), Row(sdf1.format(new Timestamp(-1000000)))))
checkAnswer(
df.select(from_unixtime(col("a"), fmt2)),
Seq(Row(sdf2.format(new Timestamp(1000000))), Row(sdf2.format(new Timestamp(-1000000)))))
checkAnswer(
df.select(from_unixtime(col("a"), fmt3)),
Seq(Row(sdf3.format(new Timestamp(1000000))), Row(sdf3.format(new Timestamp(-1000000)))))
checkAnswer(
df.selectExpr("from_unixtime(a)"),
Seq(Row(sdf1.format(new Timestamp(1000000))), Row(sdf1.format(new Timestamp(-1000000)))))
checkAnswer(
df.selectExpr(s"from_unixtime(a, '$fmt2')"),
Seq(Row(sdf2.format(new Timestamp(1000000))), Row(sdf2.format(new Timestamp(-1000000)))))
checkAnswer(
df.selectExpr(s"from_unixtime(a, '$fmt3')"),
Seq(Row(sdf3.format(new Timestamp(1000000))), Row(sdf3.format(new Timestamp(-1000000)))))
}
private def secs(millis: Long): Long = TimeUnit.MILLISECONDS.toSeconds(millis)
test("unix_timestamp") {
val date1 = Date.valueOf("2015-07-24")
val date2 = Date.valueOf("2015-07-25")
val ts1 = Timestamp.valueOf("2015-07-24 10:00:00.3")
val ts2 = Timestamp.valueOf("2015-07-25 02:02:02.2")
val s1 = "2015/07/24 10:00:00.5"
val s2 = "2015/07/25 02:02:02.6"
val ss1 = "2015-07-24 10:00:00"
val ss2 = "2015-07-25 02:02:02"
val fmt = "yyyy/MM/dd HH:mm:ss.S"
val df = Seq((date1, ts1, s1, ss1), (date2, ts2, s2, ss2)).toDF("d", "ts", "s", "ss")
checkAnswer(df.select(unix_timestamp(col("ts"))), Seq(
Row(secs(ts1.getTime)), Row(secs(ts2.getTime))))
checkAnswer(df.select(unix_timestamp(col("ss"))), Seq(
Row(secs(ts1.getTime)), Row(secs(ts2.getTime))))
checkAnswer(df.select(unix_timestamp(col("d"), fmt)), Seq(
Row(secs(date1.getTime)), Row(secs(date2.getTime))))
checkAnswer(df.select(unix_timestamp(col("s"), fmt)), Seq(
Row(secs(ts1.getTime)), Row(secs(ts2.getTime))))
checkAnswer(df.selectExpr("unix_timestamp(ts)"), Seq(
Row(secs(ts1.getTime)), Row(secs(ts2.getTime))))
checkAnswer(df.selectExpr("unix_timestamp(ss)"), Seq(
Row(secs(ts1.getTime)), Row(secs(ts2.getTime))))
checkAnswer(df.selectExpr(s"unix_timestamp(d, '$fmt')"), Seq(
Row(secs(date1.getTime)), Row(secs(date2.getTime))))
checkAnswer(df.selectExpr(s"unix_timestamp(s, '$fmt')"), Seq(
Row(secs(ts1.getTime)), Row(secs(ts2.getTime))))
val x1 = "2015-07-24 10:00:00"
val x2 = "2015-25-07 02:02:02"
val x3 = "2015-07-24 25:02:02"
val x4 = "2015-24-07 26:02:02"
val ts3 = Timestamp.valueOf("2015-07-24 02:25:02")
val ts4 = Timestamp.valueOf("2015-07-24 00:10:00")
val df1 = Seq(x1, x2, x3, x4).toDF("x")
checkAnswer(df1.select(unix_timestamp(col("x"))), Seq(
Row(secs(ts1.getTime)), Row(null), Row(null), Row(null)))
checkAnswer(df1.selectExpr("unix_timestamp(x)"), Seq(
Row(secs(ts1.getTime)), Row(null), Row(null), Row(null)))
checkAnswer(df1.select(unix_timestamp(col("x"), "yyyy-dd-MM HH:mm:ss")), Seq(
Row(null), Row(secs(ts2.getTime)), Row(null), Row(null)))
checkAnswer(df1.selectExpr(s"unix_timestamp(x, 'yyyy-MM-dd mm:HH:ss')"), Seq(
Row(secs(ts4.getTime)), Row(null), Row(secs(ts3.getTime)), Row(null)))
// invalid format
checkAnswer(df1.selectExpr(s"unix_timestamp(x, 'yyyy-MM-dd aa:HH:ss')"), Seq(
Row(null), Row(null), Row(null), Row(null)))
// february
val y1 = "2016-02-29"
val y2 = "2017-02-29"
val ts5 = Timestamp.valueOf("2016-02-29 00:00:00")
val df2 = Seq(y1, y2).toDF("y")
checkAnswer(df2.select(unix_timestamp(col("y"), "yyyy-MM-dd")), Seq(
Row(secs(ts5.getTime)), Row(null)))
val now = sql("select unix_timestamp()").collect().head.getLong(0)
checkAnswer(
sql(s"select cast ($now as timestamp)"),
Row(new java.util.Date(TimeUnit.SECONDS.toMillis(now))))
}
test("to_unix_timestamp") {
val date1 = Date.valueOf("2015-07-24")
val date2 = Date.valueOf("2015-07-25")
val ts1 = Timestamp.valueOf("2015-07-24 10:00:00.3")
val ts2 = Timestamp.valueOf("2015-07-25 02:02:02.2")
val s1 = "2015/07/24 10:00:00.5"
val s2 = "2015/07/25 02:02:02.6"
val ss1 = "2015-07-24 10:00:00"
val ss2 = "2015-07-25 02:02:02"
val fmt = "yyyy/MM/dd HH:mm:ss.S"
val df = Seq((date1, ts1, s1, ss1), (date2, ts2, s2, ss2)).toDF("d", "ts", "s", "ss")
checkAnswer(df.selectExpr("to_unix_timestamp(ts)"), Seq(
Row(secs(ts1.getTime)), Row(secs(ts2.getTime))))
checkAnswer(df.selectExpr("to_unix_timestamp(ss)"), Seq(
Row(secs(ts1.getTime)), Row(secs(ts2.getTime))))
checkAnswer(df.selectExpr(s"to_unix_timestamp(d, '$fmt')"), Seq(
Row(secs(date1.getTime)), Row(secs(date2.getTime))))
checkAnswer(df.selectExpr(s"to_unix_timestamp(s, '$fmt')"), Seq(
Row(secs(ts1.getTime)), Row(secs(ts2.getTime))))
val x1 = "2015-07-24 10:00:00"
val x2 = "2015-25-07 02:02:02"
val x3 = "2015-07-24 25:02:02"
val x4 = "2015-24-07 26:02:02"
val ts3 = Timestamp.valueOf("2015-07-24 02:25:02")
val ts4 = Timestamp.valueOf("2015-07-24 00:10:00")
val df1 = Seq(x1, x2, x3, x4).toDF("x")
checkAnswer(df1.selectExpr("to_unix_timestamp(x)"), Seq(
Row(secs(ts1.getTime)), Row(null), Row(null), Row(null)))
checkAnswer(df1.selectExpr(s"to_unix_timestamp(x, 'yyyy-MM-dd mm:HH:ss')"), Seq(
Row(secs(ts4.getTime)), Row(null), Row(secs(ts3.getTime)), Row(null)))
// february
val y1 = "2016-02-29"
val y2 = "2017-02-29"
val ts5 = Timestamp.valueOf("2016-02-29 00:00:00")
val df2 = Seq(y1, y2).toDF("y")
checkAnswer(df2.select(unix_timestamp(col("y"), "yyyy-MM-dd")), Seq(
Row(secs(ts5.getTime)), Row(null)))
// invalid format
checkAnswer(df1.selectExpr(s"to_unix_timestamp(x, 'yyyy-MM-dd bb:HH:ss')"), Seq(
Row(null), Row(null), Row(null), Row(null)))
}
test("to_timestamp") {
val date1 = Date.valueOf("2015-07-24")
val date2 = Date.valueOf("2015-07-25")
val ts_date1 = Timestamp.valueOf("2015-07-24 00:00:00")
val ts_date2 = Timestamp.valueOf("2015-07-25 00:00:00")
val ts1 = Timestamp.valueOf("2015-07-24 10:00:00")
val ts2 = Timestamp.valueOf("2015-07-25 02:02:02")
val s1 = "2015/07/24 10:00:00.5"
val s2 = "2015/07/25 02:02:02.6"
val ts1m = Timestamp.valueOf("2015-07-24 10:00:00.5")
val ts2m = Timestamp.valueOf("2015-07-25 02:02:02.6")
val ss1 = "2015-07-24 10:00:00"
val ss2 = "2015-07-25 02:02:02"
val fmt = "yyyy/MM/dd HH:mm:ss.S"
val df = Seq((date1, ts1, s1, ss1), (date2, ts2, s2, ss2)).toDF("d", "ts", "s", "ss")
checkAnswer(df.select(to_timestamp(col("ss"))),
df.select(unix_timestamp(col("ss")).cast("timestamp")))
checkAnswer(df.select(to_timestamp(col("ss"))), Seq(
Row(ts1), Row(ts2)))
checkAnswer(df.select(to_timestamp(col("s"), fmt)), Seq(
Row(ts1m), Row(ts2m)))
checkAnswer(df.select(to_timestamp(col("ts"), fmt)), Seq(
Row(ts1), Row(ts2)))
checkAnswer(df.select(to_timestamp(col("d"), "yyyy-MM-dd")), Seq(
Row(ts_date1), Row(ts_date2)))
}
test("datediff") {
val df = Seq(
(Date.valueOf("2015-07-24"), Timestamp.valueOf("2015-07-24 01:00:00"),
"2015-07-23", "2015-07-23 03:00:00"),
(Date.valueOf("2015-07-25"), Timestamp.valueOf("2015-07-25 02:00:00"),
"2015-07-24", "2015-07-24 04:00:00")
).toDF("a", "b", "c", "d")
checkAnswer(df.select(datediff(col("a"), col("b"))), Seq(Row(0), Row(0)))
checkAnswer(df.select(datediff(col("a"), col("c"))), Seq(Row(1), Row(1)))
checkAnswer(df.select(datediff(col("d"), col("b"))), Seq(Row(-1), Row(-1)))
checkAnswer(df.selectExpr("datediff(a, d)"), Seq(Row(1), Row(1)))
}
test("to_timestamp with microseconds precision") {
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
val timestamp = "1970-01-01T00:00:00.123456Z"
val df = Seq(timestamp).toDF("t")
checkAnswer(df.select(to_timestamp($"t", "yyyy-MM-dd'T'HH:mm:ss.SSSSSSX")),
Seq(Row(Instant.parse(timestamp))))
}
}
test("handling null field by date_part") {
val input = Seq(Date.valueOf("2019-09-20")).toDF("d")
Seq("date_part(null, d)", "date_part(null, date'2019-09-20')").foreach { expr =>
val df = input.selectExpr(expr)
assert(df.schema.headOption.get.dataType == DoubleType)
checkAnswer(df, Row(null))
}
}
}
| jkbradley/spark | sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala | Scala | apache-2.0 | 27,560 |
import eu.inn.binders.naming.{PlainConverter}
import java.math.BigInteger
import java.net.InetAddress
import java.nio.ByteBuffer
import java.util.{UUID, Date}
import org.scalatest.{FlatSpec, Matchers}
import eu.inn.binders._
import org.mockito.Mockito._
class TestStatementSpec extends FlatSpec with Matchers with CustomMockers {
val (yesterday, now) = {
import java.util._
val cal = Calendar.getInstance()
cal.setTime(new Date())
cal.add(Calendar.DATE, -11)
(cal.getTime(), new Date())
}
case class TestInt(i1: Int, i2: Option[Int], i3: Option[Int])
"Row " should " bind int fields " in {
val cr = stmt("i1", "i2", "i3")
val s = mock[com.datastax.driver.core.Session]
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bind(TestInt(10, Some(20), None))
verify(cr).setInt("i1", 10)
verify(cr).setInt("i2", 20)
verify(cr).setToNull("i3")
}
"Row " should " bind int parameters " in {
val cr = stmt("i1", "i2")
val s = mock[com.datastax.driver.core.Session]
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bindArgs(10,Some(20),None.asInstanceOf[Option[Int]])
verify(cr).setInt(0, 10)
verify(cr).setInt(1, 20)
verify(cr).setToNull(2)
}
case class TestLong(i1: Long, i2: Option[Long], i3: Option[Long])
"Row " should " bind long fields " in {
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bind(TestLong(10l, Some(20l), None))
verify(cr).setLong("i1", 10)
verify(cr).setLong("i2", 20)
verify(cr).setToNull("i3")
}
"Row " should " bind long parameters " in {
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bindArgs(10l,Some(20l),None.asInstanceOf[Option[Long]])
verify(cr).setLong(0, 10l)
verify(cr).setLong(1, 20l)
verify(cr).setToNull(2)
}
case class TestString(i1: String, i2: Option[String], i3: Option[String], i4: Option[String])
"Row " should " bind string fields " in {
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3", "i4")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bind(TestString("10", Some("20"), None, Option(null)))
verify(cr).setString("i1", "10")
verify(cr).setString("i2", "20")
verify(cr).setToNull("i3")
verify(cr).setToNull("i4")
}
"Row " should " bind string parameters " in {
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3", "i4")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bindArgs("10", Some("20"), None.asInstanceOf[Option[String]], Option(null.asInstanceOf[String]))
verify(cr).setString(0, "10")
verify(cr).setString(1, "20")
verify(cr).setToNull(2)
verify(cr).setToNull(3)
}
case class TestDate(i1: Date, i2: Option[Date], i3: Option[Date])
"Row " should " bind date fields " in {
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bind(TestDate(yesterday, Some(now), None))
verify(cr).setDate("i1", yesterday)
verify(cr).setDate("i2", now)
verify(cr).setToNull("i3")
}
"Row " should " bind date parameters " in {
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bindArgs(yesterday, now, null.asInstanceOf[Date])
verify(cr).setDate(0, yesterday)
verify(cr).setDate(1, now)
verify(cr).setDate(2, null)
}
case class TestBoolean(i1: Boolean, i2: Option[Boolean], i3: Option[Boolean])
"Row " should " bind boolean fields " in {
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bind(TestBoolean(true, Some(false), None))
verify(cr).setBool("i1", true)
verify(cr).setBool("i2", false)
verify(cr).setToNull("i3")
}
"Row " should " bind boolean parameters " in {
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bindArgs(true, false, None.asInstanceOf[Option[Boolean]])
verify(cr).setBool(0, true)
verify(cr).setBool(1, false)
verify(cr).setToNull(2)
}
case class TestFloat(i1: Float, i2: Option[Float], i3: Option[Float])
"Row " should " nbind float fields " in {
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bind(TestFloat(1.0f, Some(2.0f), None))
verify(cr).setFloat("i1", 1.0f)
verify(cr).setFloat("i2", 2.0f)
verify(cr).setToNull("i3")
}
"Row " should " bind float parameters " in {
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bindArgs(1.0f, 2.0f, None.asInstanceOf[Option[Float]])
verify(cr).setFloat(0, 1.0f)
verify(cr).setFloat(1, 2.0f)
verify(cr).setToNull(2)
}
case class TestDouble(i1: Double, i2: Option[Double], i3: Option[Double])
"Row " should " bind double fields " in {
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bind(TestDouble(1.0, Some(2.0), None))
verify(cr).setDouble("i1", 1.0)
verify(cr).setDouble("i2", 2.0)
verify(cr).setToNull("i3")
}
"Row " should " bind double parameters " in {
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bindArgs(1.0, 2.0, None.asInstanceOf[Option[Double]])
verify(cr).setDouble(0, 1.0)
verify(cr).setDouble(1, 2.0)
verify(cr).setToNull(2)
}
case class TestBytes(i1: ByteBuffer, i2: Option[ByteBuffer], i3: Option[ByteBuffer])
"Row " should " bind ByteBuffer fields " in {
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bind(TestBytes(ByteBuffer.wrap(Array[Byte](1, 2, 3)), Some(ByteBuffer.wrap(Array[Byte](5, 6, 7))), None))
verify(cr).setBytes("i1", ByteBuffer.wrap(Array[Byte](1, 2, 3)))
verify(cr).setBytes("i2", ByteBuffer.wrap(Array[Byte](5, 6, 7)))
verify(cr).setToNull("i3")
}
"Row " should " bind ByteBuffer parameters " in {
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bindArgs(ByteBuffer.wrap(Array[Byte](1, 2, 3)), Some(ByteBuffer.wrap(Array[Byte](5, 6, 7))), Option(null.asInstanceOf[ByteBuffer]))
verify(cr).setBytes(0, ByteBuffer.wrap(Array[Byte](1, 2, 3)))
verify(cr).setBytes(1, ByteBuffer.wrap(Array[Byte](5, 6, 7)))
verify(cr).setToNull(2)
}
case class TestBigInteger(i1: BigInteger, i2: Option[BigInteger], i3: Option[BigInteger])
"Row " should " unbind BigInteger fields " in {
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bind(TestBigInteger(new BigInteger("123"), Some(new BigInteger("567")), None))
verify(cr).setVarint("i1", new BigInteger("123"))
verify(cr).setVarint("i2", new BigInteger("567"))
verify(cr).setToNull("i3")
}
"Row " should " bind BigInteger parameters " in {
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bindArgs(new BigInteger("123"), new BigInteger("567"), Option(null.asInstanceOf[BigInteger]))
verify(cr).setVarint(0, new BigInteger("123"))
verify(cr).setVarint(1, new BigInteger("567"))
verify(cr).setToNull(2)
}
case class TestBigDecimal(i1: BigDecimal, i2: Option[BigDecimal], i3: Option[BigDecimal])
"Row " should " bind BigDecimal fields " in {
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bind(TestBigDecimal(BigDecimal("123"), Some(BigDecimal("567")), None))
verify(cr).setDecimal("i1", BigDecimal("123").bigDecimal)
verify(cr).setDecimal("i2", BigDecimal("567").bigDecimal)
verify(cr).setToNull("i3")
}
"Row " should " bind BigDecimal parameters " in {
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bindArgs(BigDecimal("123"), BigDecimal("567"), None.asInstanceOf[Option[BigDecimal]])
verify(cr).setDecimal(0, BigDecimal("123").bigDecimal)
verify(cr).setDecimal(1, BigDecimal("567").bigDecimal)
verify(cr).setToNull(2)
}
case class TestUUID(i1: UUID, i2: Option[UUID], i3: Option[UUID])
"Row " should " bind UUID fields " in {
val uuid1 = UUID.randomUUID()
val uuid2 = UUID.randomUUID()
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bind(TestUUID(uuid1, Some(uuid2), None))
verify(cr).setUUID("i1", uuid1)
verify(cr).setUUID("i2", uuid2)
verify(cr).setToNull("i3")
}
"Row " should " bind UUID parameters " in {
val uuid1 = UUID.randomUUID()
val uuid2 = UUID.randomUUID()
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bindArgs(uuid1, uuid2, None.asInstanceOf[Option[UUID]])
verify(cr).setUUID(0, uuid1)
verify(cr).setUUID(1, uuid2)
verify(cr).setToNull(2)
}
case class TestInetAddress(i1: InetAddress, i2: Option[InetAddress], i3: Option[InetAddress])
"Row " should " bind InetAddress fields " in {
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bind(TestInetAddress(InetAddress.getLocalHost, Some(InetAddress.getLoopbackAddress), None))
verify(cr).setInet("i1", InetAddress.getLocalHost)
verify(cr).setInet("i2", InetAddress.getLoopbackAddress)
verify(cr).setToNull("i3")
}
"Row " should " bind InetAddress parameters " in {
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bindArgs(InetAddress.getLocalHost, Some(InetAddress.getLoopbackAddress), None.asInstanceOf[Option[InetAddress]])
verify(cr).setInet(0, InetAddress.getLocalHost)
verify(cr).setInet(1, InetAddress.getLoopbackAddress)
verify(cr).setToNull(2)
}
case class TestList(i1: List[Int], i2: List[String], i3: List[Date])
"Row " should " bind list fields " in {
import scala.collection.JavaConversions._
val lst1 = List(1, 2, 3)
val lst2 = List("1", "2", "3")
val lst3 = List(yesterday, now)
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bind(TestList(lst1, lst2, lst3))
verify(cr).setList("i1", lst1)
verify(cr).setList("i2", lst2)
verify(cr).setList("i3", lst3)
}
case class TestSet(i1: Set[Int], i2: Set[String], i3: Set[Date])
"Row " should " unbind set fields " in {
import scala.collection.JavaConversions._
val set1 = Set(1, 2, 3)
val set2 = Set("1", "2", "3")
val set3 = Set(yesterday, now)
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2", "i3")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bind(TestSet(set1, set2, set3))
verify(cr).setSet("i1", set1)
verify(cr).setSet("i2", set2)
verify(cr).setSet("i3", set3)
}
case class TestMap(i1: Map[Int, String], i2: Map[Long, Date])
"Row " should " unbind map fields " in {
import scala.collection.JavaConversions._
val map1 = Map(1 -> "11", 2 -> "22")
val map2 = Map(0l -> yesterday, 1l -> now)
val s = mock[com.datastax.driver.core.Session]
val cr = stmt("i1", "i2")
val br = new eu.inn.binders.cassandra.Statement[PlainConverter](s, cr)
br.bind(TestMap(map1, map2))
verify(cr).setMap("i1", map1)
verify(cr).setMap("i2", map2)
}
} | InnovaCo/binders-cassandra | src/test/scala/TestStatementSpec.scala | Scala | bsd-3-clause | 12,982 |
package solve
import scala.collection.mutable.ListBuffer
object Problem11 extends App {
val str = """08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08
49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00
81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65
52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91
22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80
24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50
32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70
67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21
24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72
21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95
78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92
16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57
86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58
19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40
04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66
88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69
04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36
20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16
20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54
01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48"""
val n = 4
val array = Array.ofDim[Int](20 + n * 2, 20 + n * 2)
str.split("\\n").zipWithIndex.foreach { is =>
val (s, i) = is
s.split(" ").zipWithIndex.foreach { is2 =>
val (s2, i2) = is2
array(i + n)(i2 + n) = s2.toInt
}
}
var max = 0
def check(v: Int) = if (max < v) max = v
for (i <- n until n + 20; j <- n until n + 20) {
check(array(i)(j) * array(i)(j + 1) * array(i)(j + 2) * array(i)(j + 3))
check(array(j)(i) * array(j)(i + 1) * array(j)(i + 2) * array(j)(i + 3))
}
for (i <- 0 until n + 20; j <- 0 until n + 20) {
check(array(i)(j) * array(i + 1)(j + 1) * array(i + 2)(j + 2) * array(i + 3)(j + 3))
check(array(i)(j + 3) * array(i + 1)(j + 2) * array(i + 2)(j + 1) * array(i + 3)(j))
}
println(max)
} | knaou/project-euler-scala | src/solve/Problem11.scala | Scala | mit | 2,080 |
package com.twitter.app
import com.twitter.app.lifecycle.Event._
import com.twitter.conversions.DurationOps._
import com.twitter.util._
import java.lang.reflect.InvocationTargetException
import java.util.concurrent.ConcurrentLinkedQueue
import java.util.concurrent.atomic.AtomicReference
import scala.collection.mutable
import scala.jdk.CollectionConverters._
import scala.util.control.NonFatal
/**
* A composable application trait that includes flag parsing as well
* as basic application lifecycle (pre- and post- main). Flag parsing
* is done via [[com.twitter.app.Flags]], an instance of which is
* defined in the member `flag`. Applications should be constructed
* with modularity in mind, and common functionality should be
* extracted into mixins.
*
* Flags should only be constructed in the constructor, and should only be read
* in the premain or later, after they have been parsed.
*
* {{{
* import com.twitter.app.App
*
* object MyApp extends App {
* val n = flag("n", 100, "Number of items to process")
*
* def main(): Unit = {
* for (i <- 0 until n()) process(i)
* }
* }
* }}}
*
* Note that a missing `main` is OK: mixins may provide behavior that
* does not require defining a custom `main` method.
*
* @define LoadServiceApplyScaladocLink
* [[com.twitter.app.LoadService.apply[T]()(implicitevidence\\$1:scala\\.reflect\\.ClassTag[T]):Seq[T]* LoadService.apply]]
*/
trait App extends ClosableOnce with CloseOnceAwaitably with Lifecycle {
/** The name of the application, based on the classname */
def name: String = getClass.getName.stripSuffix("$")
//failfastOnFlagsNotParsed is called in the ctor of App.scala here which is a bad idea
//as things like this can happen https://stackoverflow.com/questions/18138397/calling-method-from-constructor
private[this] val _flag: Flags =
new Flags(name, includeGlobal = includeGlobalFlags, failfastOnFlagsNotParsed)
/** The [[com.twitter.app.Flags]] instance associated with this application */
final def flag: Flags = _flag
private var _args = Array[String]()
/** The remaining, unparsed arguments */
def args: Array[String] = _args
/** Whether or not to accept undefined flags */
protected def allowUndefinedFlags: Boolean = false
/** Whether or not to include [[GlobalFlag GlobalFlag's]] when [[Flags]] are parsed */
protected def includeGlobalFlags: Boolean = true
/**
* Users of this code should override this to `true` so that
* you fail-fast instead of being surprised at runtime by code that
* is reading from flags before they have been parsed.
*
* Ideally this would default to `true`, however, in order to avoid
* breaking existing users, it was introduced using `false`.
*/
protected def failfastOnFlagsNotParsed: Boolean = false
/** Exit on error with the given Throwable */
protected def exitOnError(throwable: Throwable): Unit = {
throwable.printStackTrace()
throwable match {
case _: CloseException =>
// exception occurred while closing, do not attempt to close again
System.err.println(throwable.getMessage)
System.exit(1)
case _ =>
exitOnError("Exception thrown in main on startup")
}
}
/** Exit on error with the given `reason` String */
protected def exitOnError(reason: String): Unit = {
exitOnError(reason, "")
}
/**
* The details Fn may be an expensive operation (which could fail). We want to
* ensure that we've at least written the `reason` field to System.err before
* attempting to write the `detail` field so that users will at a minimum see the
* `reason` for the exit regardless of any extra details.
*/
protected def exitOnError(reason: String, details: => String): Unit = {
System.err.println(reason)
// want to ensure "reason" is written before attempting to write any details
System.err.flush()
System.err.println(details)
// if we have gotten here we may not yet have attempted to close, ensure we do.
try {
Await.ready(close(), closeDeadline - Time.now)
} catch {
case NonFatal(exc) =>
exitOnError(newCloseException(Seq(exc)))
}
System.exit(1)
}
/**
* By default any failure during the graceful shutdown portion of an [[App]]'s lifecycle
* bubbles up and causes non-zero return codes in the process. Setting this to `false`
* allows an application to suppress these errors and express that graceful shutdown logic
* should not be a determinant of the process exit code.
*/
protected def suppressGracefulShutdownErrors: Boolean = false
private val inits: mutable.Buffer[() => Unit] = mutable.Buffer.empty
private val premains: mutable.Buffer[() => Unit] = mutable.Buffer.empty
private val exits: ConcurrentLinkedQueue[Closable] = new ConcurrentLinkedQueue
private val lastExits: ConcurrentLinkedQueue[Closable] = new ConcurrentLinkedQueue
private val postmains: ConcurrentLinkedQueue[() => Unit] = new ConcurrentLinkedQueue
// finagle isn't available here, so no DefaultTimer
protected lazy val shutdownTimer: Timer = new JavaTimer(isDaemon = true)
/**
* Programmatically specify which implementations to use in
* $LoadServiceApplyScaladocLink
* for the given interfaces. This allows applications to circumvent
* the standard service loading mechanism when needed. It may be useful
* if the application has a broad and/or rapidly changing set of dependencies.
*
* For example, to require `SuperCoolMetrics` be used as the
* `com.twitter.finagle.stats.StatsReceiver` implementation:
* {{{
* import com.initech.SuperCoolMetrics
* import com.twitter.app.App
* import com.twitter.app.LoadService.Binding
* import com.twitter.finagle.stats.StatsReceiver
*
* class MyApp extends App {
* val implementationToUse = new SuperCoolMetrics()
*
* override protected[this] val loadServiceBindings: Seq[Binding[_]] = {
* Seq(new Binding(classOf[StatsReceiver], implementationToUse))
* }
*
* def main(): Unit = {
* val loaded = LoadService[StatsReceiver]()
* assert(Seq(implementationToUse) == loaded)
* }
* }
* }}}
*
* If this is called for a `Class[T]` after $LoadServiceApplyScaladocLink
* has been called for that same interface, an `IllegalStateException`
* will be thrown. For this reason, bindings are done as early
* as possible in the application lifecycle, before even `inits`
* and flag parsing.
*
* @note this should not generally be used by "libraries" as it forces
* their user's implementation choice.
*
* @return a mapping from `Class` to the 1 or more implementations to
* be used by $LoadServiceApplyScaladocLink for that interface.
*
* @see $LoadServiceApplyScaladocLink
*/
protected[this] def loadServiceBindings: Seq[LoadService.Binding[_]] = Nil
/**
* Parse the command line arguments as an Array of Strings. The default
* implementation parses the given String[] as [[Flag]] input values.
*
* Users may override this method to specify different functionality.
*
* @param args String Array which represents the command line input given
* to the application.
*/
protected[this] def parseArgs(args: Array[String]): Unit = {
flag.parseArgs(args, allowUndefinedFlags) match {
case Flags.Ok(remainder) =>
_args = remainder.toArray
case Flags.Help(usage) =>
throw FlagUsageError(usage)
case Flags.Error(reason) =>
throw FlagParseException(reason)
}
}
/**
* Invoke `f` before anything else (including flag parsing).
*/
protected final def init(f: => Unit): Unit = {
inits += (() => f)
}
/**
* Invoke `f` right before the user's main is invoked.
*/
protected final def premain(f: => Unit): Unit = {
premains += (() => f)
}
/** Minimum duration to allow for exits to be processed. */
final val MinGrace: Duration = 1.second
/**
* Default amount of time to wait for shutdown.
* This value is not used as a default if `close()` is called without parameters. It simply
* provides a default value to be passed as `close(grace)`.
*/
def defaultCloseGracePeriod: Duration = Duration.Zero
/**
* The actual close grace period.
*/
@volatile private[this] var closeDeadline = Time.Top
/**
* Close `closable` when shutdown is requested. Closables are closed in parallel.
*/
final def closeOnExit(closable: Closable): Unit = synchronized {
if (isClosed) {
// `close()` has already been called, we close eagerly
closable.close(closeDeadline)
} else {
// `close()` not yet called, safe to add it
exits.add(closable)
}
}
/**
* Register a `closable` to be closed on application shutdown after those registered
* via `closeOnExit`.
*
* @note Application shutdown occurs in two sequential phases to allow explicit
* encoding of resource lifecycle relationships. Concretely this is useful
* for encoding that a monitoring resource should outlive a monitored
* resource.
*
* In all cases, the close deadline is enforced.
*/
final def closeOnExitLast(closable: Closable): Unit = synchronized {
if (isClosed) {
// `close()` already called, we need to close this here, but only
// after `close()` completes and `closing` is satisfied
close(closeDeadline)
.transform { _ => closable.close(closeDeadline) }
.by(shutdownTimer, closeDeadline)
} else {
// `close()` not yet called, safe to add it
lastExits.add(closable)
}
}
/**
* Invoke `f` when shutdown is requested. Exit hooks run in parallel and are
* executed after all postmains complete. The thread resumes when all exit
* hooks complete or `closeDeadline` expires.
*
* @see [[runOnExit(Runnable)]] for a variant that is more suitable for Java.
*/
protected final def onExit(f: => Unit): Unit = {
closeOnExit {
Closable.make { deadline => // close() ensures that this deadline is sane
FuturePool.unboundedPool(f).by(shutdownTimer, deadline)
}
}
}
/**
* Invoke `runnable.run()` when shutdown is requested. Exit hooks run in parallel
* and are executed after all postmains complete. The thread resumes when all exit
* hooks complete or `closeDeadline` expires.
*
* This is a Java friendly API to allow Java 8 users to invoke `onExit` with lambda
* expressions.
*
* For example (in Java):
*
* {{{
* runOnExit(() -> {
* clientA.close();
* clientB.close();
* });
* }}}
*
* @see [[onExit(f: => Unit)]] for a variant that is more suitable for Scala.
*/
protected final def runOnExit(runnable: Runnable): Unit = {
onExit(runnable.run())
}
/**
* Invoke `f` when shutdown is requested. Exit hooks run in parallel and are
* executed after all closeOnExit functions complete. The thread resumes when all exit
* hooks complete or `closeDeadline` expires.
*
* @see [[runOnExitLast(Runnable)]] for a variant that is more suitable for Java.
*/
protected final def onExitLast(f: => Unit): Unit = {
closeOnExitLast {
Closable.make { deadline => // close() ensures that this deadline is sane
FuturePool.unboundedPool(f).by(shutdownTimer, deadline)
}
}
}
/**
* Invoke `runnable.run()` when shutdown is requested. Exit hooks run in parallel
* and are executed after all closeOnExit functions complete. The thread resumes
* when all exit hooks complete or `closeDeadline` expires.
*
* This is a Java friendly API to allow Java 8 users to invoke `onExitLast` with lambda
* expressions.
*
* For example (in Java):
*
* {{{
* runOnExitLast(() -> {
* clientA.close();
* clientB.close();
* });
* }}}
*
* @see [[onExitLast(f: => Unit)]] for a variant that is more suitable for Scala.
*/
protected final def runOnExitLast(runnable: Runnable): Unit = {
onExitLast(runnable.run())
}
/**
* Invoke `f` after the user's main has exited.
*/
protected final def postmain(f: => Unit): Unit = {
postmains.add(() => f)
}
override protected def closeOnce(deadline: Time): Future[Unit] = synchronized {
closeDeadline = deadline.max(Time.now + MinGrace)
Future
.collectToTry(exits.asScala.toSeq.map(_.close(closeDeadline)))
.by(shutdownTimer, closeDeadline)
.transform {
case Return(results) =>
observeFuture(CloseExitLast) {
closeLastExits(results, closeDeadline)
}
case Throw(t) =>
// this would be triggered by a timeout on the collectToTry of exits,
// still try to close last exits
observeFuture(CloseExitLast) {
closeLastExits(Seq(Throw(t)), closeDeadline)
}
}
}
private[this] def newCloseException(errors: Seq[Throwable]): CloseException = {
val message = if (errors.size == 1) {
"An error occurred on exit"
} else {
s"${errors.size} errors occurred on exit"
}
val exc = new CloseException(message)
errors.foreach { error => exc.addSuppressed(error) }
exc
}
private[this] final def closeLastExits(
onExitResults: Seq[Try[Unit]],
deadline: Time
): Future[Unit] = {
Future
.collectToTry(lastExits.asScala.toSeq.map(_.close(deadline)))
.by(shutdownTimer, deadline)
.transform {
case Return(results) =>
val errors = (onExitResults ++ results).collect { case Throw(e) => e }
if (errors.isEmpty) {
Future.Done
} else {
Future.exception(newCloseException(errors))
}
case Throw(t) =>
Future.exception(newCloseException(Seq(t)))
}
}
final def main(args: Array[String]): Unit = {
try {
nonExitingMain(args)
} catch {
case FlagUsageError(reason) =>
exitOnError(reason)
case FlagParseException(reason, _) =>
exitOnError(reason, flag.usage)
case t: Throwable =>
exitOnError(t)
}
}
final def nonExitingMain(args: Array[String]): Unit = {
observe(Register) {
App.register(this)
}
observe(LoadBindings) {
loadServiceBindings.foreach { binding => LoadService.bind(binding) }
}
observe(Init) {
for (f <- inits) f()
}
observe(ParseArgs) {
parseArgs(args)
}
observe(PreMain) {
for (f <- premains) f()
}
observe(Main) {
// Get a main() if it's defined. It's possible to define traits that only use pre/post mains.
val mainMethod =
try Some(getClass.getMethod("main"))
catch {
case _: NoSuchMethodException => None
}
// Invoke main() if it exists.
mainMethod.foreach { method =>
try method.invoke(this)
catch {
case e: InvocationTargetException => throw e.getCause
}
}
}
observe(PostMain) {
for (f <- postmains.asScala) f()
}
observe(Close) {
// We get a reference to the `close()` Future in order to Await upon it, to ensure the thread
// waits for `close()` to complete.
// Note that the Future returned here is the same as `promise` exposed via ClosableOnce,
// which is the same result that would be returned by an `Await.result` on `this`.
val closeFuture = close(defaultCloseGracePeriod)
// The deadline to 'close' is advisory; we enforce it here.
if (!suppressGracefulShutdownErrors) Await.result(closeFuture, closeDeadline - Time.now)
else {
try { // even if we suppress shutdown errors, we give the resources time to close
Await.ready(closeFuture, closeDeadline - Time.now)
} catch {
case e: TimeoutException =>
throw e // we want TimeoutExceptions to propagate
case NonFatal(_) => ()
}
}
}
}
}
object App {
private[this] val ref = new AtomicReference[Option[App]](None)
/**
* The currently registered App, if any. While the expectation is that there
* will be a single running App per process, the most-recently registered
* App will be returned in the event that more than one exists.
*/
def registered: Option[App] = ref.get
private[app] def register(app: App): Unit = ref.set(Some(app))
}
| twitter/util | util-app/src/main/scala/com/twitter/app/App.scala | Scala | apache-2.0 | 16,447 |
import org.scalatest.FunSuite
import scala.reflect.runtime.universe._
import scala.reflect.runtime.{currentMirror => cm}
class Definition extends FunSuite {
test("macro annotations get the MACRO flag") {
assert(cm.staticClass("identity").isMacro === true)
}
} | hiroshi-cl/InverseMacros | paradise_tests/src/test/scala/annotations/run/Definition.scala | Scala | bsd-2-clause | 268 |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.cosmos.spark
import com.azure.cosmos.models.CosmosParameterizedQuery
import org.apache.spark.sql.sources.{
And, EqualNullSafe, EqualTo, Filter, GreaterThan,
GreaterThanOrEqual, In, IsNotNull, IsNull, LessThan, LessThanOrEqual, Not, Or,
StringContains, StringEndsWith, StringStartsWith
}
import scala.collection.mutable.ListBuffer
private case class FilterAnalyzer() {
// TODO: moderakh it is worth looking at DOM/AST:
// https://github.com/Azure/azure-cosmos-dotnet-v3/tree/master/Microsoft.Azure.Cosmos/src/SqlObjects
// https://github.com/Azure/azure-sdk-for-java/pull/17789#discussion_r530574888
def analyze(filters: Array[Filter], cosmosReadConfig: CosmosReadConfig): AnalyzedFilters = {
if (cosmosReadConfig.customQuery.isDefined) {
AnalyzedFilters(
cosmosReadConfig.customQuery.get,
Array.empty[Filter],
filters)
} else {
val queryBuilder = new StringBuilder
queryBuilder.append("SELECT * FROM r")
val list = ListBuffer[(String, Any)]()
val filtersToBePushedDownToCosmos = ListBuffer[Filter]()
val filtersNotSupportedByCosmos = ListBuffer[Filter]()
val whereClauseBuilder = new StringBuilder
for (filter <- filters) {
val filterAsCosmosPredicate = new StringBuilder()
val canBePushedDownToCosmos = appendCosmosQueryPredicate(filterAsCosmosPredicate, list, filter)
if (canBePushedDownToCosmos) {
if (filtersToBePushedDownToCosmos.nonEmpty) {
whereClauseBuilder.append(" AND ")
}
filtersToBePushedDownToCosmos.append(filter)
whereClauseBuilder.append(filterAsCosmosPredicate)
} else {
filtersNotSupportedByCosmos.append(filter)
}
}
if (whereClauseBuilder.nonEmpty) {
queryBuilder.append(" WHERE ")
queryBuilder.append(whereClauseBuilder)
}
AnalyzedFilters(
CosmosParameterizedQuery(queryBuilder.toString(), list.map(f => f._1).toList, list.map(f => f._2).toList),
filtersToBePushedDownToCosmos.toArray,
filtersNotSupportedByCosmos.toArray)
}
}
/**
* Provides Json Field path prefixed by the root. For example: "r['id']
* @param sparkFilterColumnName the column name of the filter
* @return cosmosFieldpath
*/
private def canonicalCosmosFieldPath(sparkFilterColumnName: String): String = {
val result = new StringBuilder(FilterAnalyzer.rootName)
sparkFilterColumnName.split('.').foreach(cNamePart => result.append(s"['${normalizedFieldName(cNamePart)}']"))
result.toString
}
/**
* Parameter name in the parametrized query: e.g. @param1.
* @param paramNumber parameter index
* @return
*/
private def paramName(paramNumber: Integer): String = {
s"@param$paramNumber"
}
// scalastyle:off cyclomatic.complexity
// scalastyle:off method.length
// scalastyle:off multiple.string.literals
private def appendCosmosQueryPredicate(queryBuilder: StringBuilder,
list: scala.collection.mutable.ListBuffer[(String, Any)],
filter: Filter): Boolean = {
val pName = paramName(list.size)
filter match {
case EqualTo(attr, value) =>
queryBuilder.append(canonicalCosmosFieldPath(attr)).append("=").append(pName)
list.append((pName, value))
true
case EqualNullSafe(attr, value) =>
// TODO moderakh check the difference between EqualTo and EqualNullSafe
queryBuilder.append(canonicalCosmosFieldPath(attr)).append("=").append(pName)
list.append((pName, value))
true
case LessThan(attr, value) =>
queryBuilder.append(canonicalCosmosFieldPath(attr)).append("<").append(pName)
list.append((pName, value))
true
case GreaterThan(attr, value) =>
queryBuilder.append(canonicalCosmosFieldPath(attr)).append(">").append(pName)
list.append((pName, value))
true
case LessThanOrEqual(attr, value) =>
queryBuilder.append(canonicalCosmosFieldPath(attr)).append("<=").append(pName)
list.append((pName, value))
true
case GreaterThanOrEqual(attr, value) =>
queryBuilder.append(canonicalCosmosFieldPath(attr)).append(">=").append(pName)
list.append((pName, value))
true
case In(attr, values) =>
queryBuilder.append(canonicalCosmosFieldPath(attr)).append(" IN ")
queryBuilder.append("(")
queryBuilder.append(
values.map(
value => {
val pName = paramName(list.size)
list.append((pName, value))
pName
}
).mkString(","))
queryBuilder.append(")")
true
case StringStartsWith(attr, value: String) =>
queryBuilder.append("STARTSWITH(").append(canonicalCosmosFieldPath(attr)).append(",").append(pName).append(")")
list.append((pName, value))
true
case StringEndsWith(attr, value: String) =>
queryBuilder.append("ENDSWITH(").append(canonicalCosmosFieldPath(attr)).append(",").append(pName).append(")")
list.append((pName, value))
true
case StringContains(attr, value: String) =>
queryBuilder.append("CONTAINS(").append(canonicalCosmosFieldPath(attr)).append(",").append(pName).append(")")
list.append((pName, value))
true
case IsNull(attr) =>
queryBuilder.append(s"(IS_NULL(${canonicalCosmosFieldPath(attr)}) OR NOT(IS_DEFINED(${canonicalCosmosFieldPath(attr)})))")
true
case IsNotNull(attr) =>
queryBuilder.append(s"(NOT(IS_NULL(${canonicalCosmosFieldPath(attr)})) AND IS_DEFINED(${canonicalCosmosFieldPath(attr)}))")
true
case And(leftFilter, rightFilter) =>
queryBuilder.append("(")
val isLeftCosmosPredicate = appendCosmosQueryPredicate(queryBuilder, list, leftFilter)
queryBuilder.append(" AND ")
val isRightCosmosPredicate = appendCosmosQueryPredicate(queryBuilder, list, rightFilter)
queryBuilder.append(")")
isLeftCosmosPredicate && isRightCosmosPredicate
case Or(leftFilter, rightFilter) =>
queryBuilder.append("(")
val isLeftCosmosPredicate = appendCosmosQueryPredicate(queryBuilder, list, leftFilter)
queryBuilder.append(" OR ")
val isRightCosmosPredicate = appendCosmosQueryPredicate(queryBuilder, list, rightFilter)
queryBuilder.append(")")
isLeftCosmosPredicate && isRightCosmosPredicate
case Not(childFilter) =>
queryBuilder.append("NOT(")
val isInnerCosmosPredicate = appendCosmosQueryPredicate(queryBuilder, list, childFilter)
queryBuilder.append(")")
isInnerCosmosPredicate
case _: Filter =>
// the unsupported filter will be applied by the spark platform itself.
// TODO: moderakh how count, avg, min, max are pushed down? or orderby?
// spark 3.0 does not support aggregate push downs, but spark 3.1 will
// https://issues.apache.org/jira/browse/SPARK-22390
// https://github.com/apache/spark/pull/29695/files
false
}
}
// scalastyle:on multiple.string.literals
// scalastyle:on method.length
// scalastyle:on cyclomatic.complexity
private def normalizedFieldName(jsonFieldName: String): String = {
// TODO: moderakh what happens if jsonFieldName has "[" in it? we need to escape it?
jsonFieldName
}
}
private object FilterAnalyzer {
private val rootName = "r"
}
| Azure/azure-sdk-for-java | sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/FilterAnalyzer.scala | Scala | mit | 7,683 |
package ohnosequences.obesnp
trait Chromosome {
def name: String
def getGenes: List[Gene]
def getSNPs: List[SNP]
}
| ohnosequences/obe.SNP | src/main/scala/ohnosequences/obesnp/Chromosome.scala | Scala | mit | 124 |
/*
* Singleton.scala
* (LucreEvent)
*
* Copyright (c) 2011-2015 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Lesser General Public License v2.1+
*
*
* For further information, please contact Hanns Holger Rutz at
* contact@sciss.de
*/
package de.sciss
package lucre
package event
package impl
import serial.DataOutput
/** A `Singleton` event is one which doesn't carry any state. This is a utility trait
* which provides no-op implementations for `writeData` and `disposeData`.
*/
trait Singleton[S <: stm.Sys[S] /* , A, Repr */ ] extends InvariantSelector[S] {
final protected def disposeData()(implicit tx: S#Tx) = ()
final protected def writeData(out: DataOutput) = ()
}
| Sciss/LucreEvent | core/src/main/scala/de/sciss/lucre/event/impl/Singleton.scala | Scala | lgpl-2.1 | 741 |
package doodle
package image
package examples
import cats.instances.list._
import doodle.core._
import doodle.image.Image
import doodle.syntax._
import doodle.image.syntax._
object Polygon {
def polygon(sides: Int, radius: Double) = {
import PathElement._
val centerAngle = 360.degrees / sides.toDouble
val elements = (0 until sides) map { index =>
val point = Point.polar(radius, centerAngle * index.toDouble)
if (index == 0) moveTo(point) else lineTo(point)
}
Image
.closedPath(elements)
.strokeWidth(5)
.strokeColor(Color.hsl(centerAngle, 1.0, .5))
}
def image = ((3 to 20) map (polygon(_, 100))).toList.allOn
}
| underscoreio/doodle | image/shared/src/main/scala/doodle/image/examples/Polygon.scala | Scala | apache-2.0 | 678 |
import sbt._
object Dependencies {
private object flink {
lazy val namespace = "org.apache.flink"
lazy val version = "1.3.2"
lazy val core = namespace %% "flink-scala" % version
lazy val streaming = namespace %% "flink-streaming-scala" % version
lazy val clients = namespace %% "flink-clients" % version
}
private object influxdb {
lazy val namespace = "com.paulgoldbaum"
lazy val version = "0.5.2"
lazy val scala = namespace %% "scala-influxdb-client" % version
}
private object jpmml {
lazy val version = "1.1.4-DEV"
lazy val namespace = "io.radicalbit"
lazy val core = namespace %% "flink-jpmml-scala" % version
}
private object logging {
lazy val namespace = "org.slf4j"
lazy val version = "1.7.7"
lazy val slf4j = namespace % "slf4j-api" % version
}
lazy val simpleDependencies = Seq(
flink.core % Provided,
flink.streaming % Provided,
flink.clients % Provided,
influxdb.scala
)
lazy val jpmmlDependencies = Seq(
flink.core % Provided,
flink.streaming % Provided,
flink.clients % Provided,
jpmml.core,
logging.slf4j
)
} | spi-x-i/flink-handson | project/Dependencies.scala | Scala | gpl-3.0 | 1,148 |
import java.time.LocalDate
import java.time.LocalDateTime
import java.time.format.DateTimeFormatter
import org.scalatest.FunSuite
import org.scalatest.Matchers
class GigasecondTests extends FunSuite with Matchers {
private def dateTime(str: String): LocalDateTime =
LocalDateTime.from(DateTimeFormatter.ISO_DATE_TIME.parse(str))
private def date(str: String): LocalDate =
LocalDate.from(DateTimeFormatter.ISO_DATE.parse(str))
test("modern time") {
val input = date("2011-04-25")
val expected = dateTime("2043-01-01T01:46:40")
Gigasecond.addGigaseconds(input) should be(expected)
}
test("after epoch time") {
val input = date("1977-06-13")
val expected = dateTime("2009-02-19T01:46:40")
Gigasecond.addGigaseconds(input) should be(expected)
}
test("before epoch time") {
val input = date("1959-07-19")
val expected = dateTime("1991-03-27T01:46:40")
Gigasecond.addGigaseconds(input) should be(expected)
}
test("full time specified") {
val input = dateTime("2015-01-24T22:00:00")
val expected = dateTime("2046-10-02T23:46:40")
Gigasecond.addGigaseconds(input) should be(expected)
}
test("full time with day roll-over") {
val input = dateTime("2015-01-24T23:59:59")
val expected = dateTime("2046-10-03T01:46:39")
Gigasecond.addGigaseconds(input) should be(expected)
}
test("your birthday") {
val yourBirthday = date("1982-01-14")
val expected = dateTime("2013-09-22T01:46:40")
Gigasecond.addGigaseconds(yourBirthday) should be(expected)
}
}
| stanciua/exercism | scala/gigasecond/src/test/scala/GigasecondTest.scala | Scala | mit | 1,554 |
import scala.collection.immutable.SortedSet
import scala.math.Ordering.Double.TotalOrdering
import scala.math.Ordering.Implicits._
object Test extends App {
def test[T](t1 : T, t2 : T)(implicit ord : Ordering[T]) = {
val cmp = ord.compare(t1, t2);
val cmp2 = ord.compare(t2, t1);
assert((cmp == 0) == (cmp2 == 0))
assert((cmp > 0) == (cmp2 < 0))
assert((cmp < 0) == (cmp2 > 0))
}
def testAll[T](t1 : T, t2 : T)(implicit ord : Ordering[T]) = {
assert(ord.compare(t1, t2) < 0)
test(t1, t2);
test(t1, t1);
test(t2, t2);
}
assert(Ordering[String].compare("australopithecus", "brontausaurus") < 0)
// assert(Ordering[Unit].compare((), ()) == 0)
testAll("bar", "foo");
testAll[Byte](0, 1);
testAll(false, true)
testAll(1, 2);
testAll(1.0, 2.0);
testAll(None, Some(1));
testAll[Seq[Int]](List(1), List(1, 2));
testAll[Seq[Int]](List(1, 2), List(2));
testAll[SortedSet[Int]](SortedSet(1), SortedSet(1, 2))
testAll[SortedSet[Int]](SortedSet(1, 2), SortedSet(2))
testAll((1, "bar"), (1, "foo"))
testAll((1, "foo"), (2, "bar"))
// sortBy
val words = "The quick brown fox jumped over the lazy dog".split(' ')
val result = words.sortBy(x => (x.length, x.head))
assert(result sameElements Array[String]("The", "dog", "fox", "the", "lazy", "over", "brown", "quick", "jumped"))
}
| martijnhoekstra/scala | test/files/run/OrderingTest.scala | Scala | apache-2.0 | 1,351 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cs.ucla.edu.bwaspark.datatype
import java.io.ObjectInputStream
import java.io.ObjectOutputStream
import scala.Serializable
//Original data structure: mem_pestat_t in bwamem.h
class MemPeStat extends Serializable {
var low: Int = 0
var high: Int = 0
var failed: Int = 0
var avg: Double = 0
var std: Double = 0
private def writeObject(out: ObjectOutputStream) {
out.writeInt(low)
out.writeInt(high)
out.writeInt(failed)
out.writeDouble(avg)
out.writeDouble(std)
}
private def readObject(in: ObjectInputStream) {
low = in.readInt
high = in.readInt
failed = in.readInt
avg = in.readDouble
std = in.readDouble
}
private def readObjectNoData() {
}
}
| ytchen0323/cloud-scale-bwamem | src/main/scala/cs/ucla/edu/bwaspark/datatype/MemPeStat.scala | Scala | apache-2.0 | 1,524 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{Expression, Generator}
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, ExprCode}
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.functions._
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types.{IntegerType, StructType}
class GeneratorFunctionSuite extends QueryTest with SharedSparkSession {
import testImplicits._
test("stack") {
val df = spark.range(1)
// Empty DataFrame suppress the result generation
checkAnswer(spark.emptyDataFrame.selectExpr("stack(1, 1, 2, 3)"), Nil)
// Rows & columns
checkAnswer(df.selectExpr("stack(1, 1, 2, 3)"), Row(1, 2, 3) :: Nil)
checkAnswer(df.selectExpr("stack(2, 1, 2, 3)"), Row(1, 2) :: Row(3, null) :: Nil)
checkAnswer(df.selectExpr("stack(3, 1, 2, 3)"), Row(1) :: Row(2) :: Row(3) :: Nil)
checkAnswer(df.selectExpr("stack(4, 1, 2, 3)"), Row(1) :: Row(2) :: Row(3) :: Row(null) :: Nil)
// Various column types
checkAnswer(df.selectExpr("stack(3, 1, 1.1, 'a', 2, 2.2, 'b', 3, 3.3, 'c')"),
Row(1, 1.1, "a") :: Row(2, 2.2, "b") :: Row(3, 3.3, "c") :: Nil)
// Null values
checkAnswer(df.selectExpr("stack(3, 1, 1.1, null, 2, null, 'b', null, 3.3, 'c')"),
Row(1, 1.1, null) :: Row(2, null, "b") :: Row(null, 3.3, "c") :: Nil)
// Repeat generation at every input row
checkAnswer(spark.range(2).selectExpr("stack(2, 1, 2, 3)"),
Row(1, 2) :: Row(3, null) :: Row(1, 2) :: Row(3, null) :: Nil)
// The first argument must be a positive constant integer.
val m = intercept[AnalysisException] {
df.selectExpr("stack(1.1, 1, 2, 3)")
}.getMessage
assert(m.contains("The number of rows must be a positive constant integer."))
val m2 = intercept[AnalysisException] {
df.selectExpr("stack(-1, 1, 2, 3)")
}.getMessage
assert(m2.contains("The number of rows must be a positive constant integer."))
// The data for the same column should have the same type.
val m3 = intercept[AnalysisException] {
df.selectExpr("stack(2, 1, '2.2')")
}.getMessage
assert(m3.contains("data type mismatch: Argument 1 (int) != Argument 2 (string)"))
// stack on column data
val df2 = Seq((2, 1, 2, 3)).toDF("n", "a", "b", "c")
checkAnswer(df2.selectExpr("stack(2, a, b, c)"), Row(1, 2) :: Row(3, null) :: Nil)
val m4 = intercept[AnalysisException] {
df2.selectExpr("stack(n, a, b, c)")
}.getMessage
assert(m4.contains("The number of rows must be a positive constant integer."))
val df3 = Seq((2, 1, 2.0)).toDF("n", "a", "b")
val m5 = intercept[AnalysisException] {
df3.selectExpr("stack(2, a, b)")
}.getMessage
assert(m5.contains("data type mismatch: Argument 1 (int) != Argument 2 (double)"))
}
test("single explode") {
val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList")
checkAnswer(
df.select(explode($"intList")),
Row(1) :: Row(2) :: Row(3) :: Nil)
}
test("single explode_outer") {
val df = Seq((1, Seq(1, 2, 3)), (2, Seq())).toDF("a", "intList")
checkAnswer(
df.select(explode_outer($"intList")),
Row(1) :: Row(2) :: Row(3) :: Row(null) :: Nil)
}
test("single posexplode") {
val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList")
checkAnswer(
df.select(posexplode($"intList")),
Row(0, 1) :: Row(1, 2) :: Row(2, 3) :: Nil)
}
test("single posexplode_outer") {
val df = Seq((1, Seq(1, 2, 3)), (2, Seq())).toDF("a", "intList")
checkAnswer(
df.select(posexplode_outer($"intList")),
Row(0, 1) :: Row(1, 2) :: Row(2, 3) :: Row(null, null) :: Nil)
}
test("explode and other columns") {
val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList")
checkAnswer(
df.select($"a", explode($"intList")),
Row(1, 1) ::
Row(1, 2) ::
Row(1, 3) :: Nil)
checkAnswer(
df.select($"*", explode($"intList")),
Row(1, Seq(1, 2, 3), 1) ::
Row(1, Seq(1, 2, 3), 2) ::
Row(1, Seq(1, 2, 3), 3) :: Nil)
}
test("explode_outer and other columns") {
val df = Seq((1, Seq(1, 2, 3)), (2, Seq())).toDF("a", "intList")
checkAnswer(
df.select($"a", explode_outer($"intList")),
Row(1, 1) ::
Row(1, 2) ::
Row(1, 3) ::
Row(2, null) ::
Nil)
checkAnswer(
df.select($"*", explode_outer($"intList")),
Row(1, Seq(1, 2, 3), 1) ::
Row(1, Seq(1, 2, 3), 2) ::
Row(1, Seq(1, 2, 3), 3) ::
Row(2, Seq(), null) ::
Nil)
}
test("aliased explode") {
val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList")
checkAnswer(
df.select(explode($"intList").as("int")).select($"int"),
Row(1) :: Row(2) :: Row(3) :: Nil)
checkAnswer(
df.select(explode($"intList").as("int")).select(sum($"int")),
Row(6) :: Nil)
}
test("aliased explode_outer") {
val df = Seq((1, Seq(1, 2, 3)), (2, Seq())).toDF("a", "intList")
checkAnswer(
df.select(explode_outer($"intList").as("int")).select($"int"),
Row(1) :: Row(2) :: Row(3) :: Row(null) :: Nil)
checkAnswer(
df.select(explode($"intList").as("int")).select(sum($"int")),
Row(6) :: Nil)
}
test("explode on map") {
val df = Seq((1, Map("a" -> "b"))).toDF("a", "map")
checkAnswer(
df.select(explode($"map")),
Row("a", "b"))
}
test("explode_outer on map") {
val df = Seq((1, Map("a" -> "b")), (2, Map[String, String]()),
(3, Map("c" -> "d"))).toDF("a", "map")
checkAnswer(
df.select(explode_outer($"map")),
Row("a", "b") :: Row(null, null) :: Row("c", "d") :: Nil)
}
test("explode on map with aliases") {
val df = Seq((1, Map("a" -> "b"))).toDF("a", "map")
checkAnswer(
df.select(explode($"map").as("key1" :: "value1" :: Nil)).select("key1", "value1"),
Row("a", "b"))
}
test("explode_outer on map with aliases") {
val df = Seq((3, None), (1, Some(Map("a" -> "b")))).toDF("a", "map")
checkAnswer(
df.select(explode_outer($"map").as("key1" :: "value1" :: Nil)).select("key1", "value1"),
Row("a", "b") :: Row(null, null) :: Nil)
}
test("self join explode") {
val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList")
val exploded = df.select(explode($"intList").as("i"))
checkAnswer(
exploded.join(exploded, exploded("i") === exploded("i")).agg(count("*")),
Row(3) :: Nil)
}
test("inline raises exception on array of null type") {
val m = intercept[AnalysisException] {
spark.range(2).selectExpr("inline(array())")
}.getMessage
assert(m.contains("data type mismatch"))
}
test("inline with empty table") {
checkAnswer(
spark.range(0).selectExpr("inline(array(struct(10, 100)))"),
Nil)
}
test("inline on literal") {
checkAnswer(
spark.range(2).selectExpr("inline(array(struct(10, 100), struct(20, 200), struct(30, 300)))"),
Row(10, 100) :: Row(20, 200) :: Row(30, 300) ::
Row(10, 100) :: Row(20, 200) :: Row(30, 300) :: Nil)
}
test("inline on column") {
val df = Seq((1, 2)).toDF("a", "b")
checkAnswer(
df.selectExpr("inline(array(struct(a), struct(a)))"),
Row(1) :: Row(1) :: Nil)
checkAnswer(
df.selectExpr("inline(array(struct(a, b), struct(a, b)))"),
Row(1, 2) :: Row(1, 2) :: Nil)
// Spark think [struct<a:int>, struct<b:int>] is heterogeneous due to name difference.
val m = intercept[AnalysisException] {
df.selectExpr("inline(array(struct(a), struct(b)))")
}.getMessage
assert(m.contains("data type mismatch"))
checkAnswer(
df.selectExpr("inline(array(struct(a), named_struct('a', b)))"),
Row(1) :: Row(2) :: Nil)
// Spark think [struct<a:int>, struct<col1:int>] is heterogeneous due to name difference.
val m2 = intercept[AnalysisException] {
df.selectExpr("inline(array(struct(a), struct(2)))")
}.getMessage
assert(m2.contains("data type mismatch"))
checkAnswer(
df.selectExpr("inline(array(struct(a), named_struct('a', 2)))"),
Row(1) :: Row(2) :: Nil)
checkAnswer(
df.selectExpr("struct(a)").selectExpr("inline(array(*))"),
Row(1) :: Nil)
checkAnswer(
df.selectExpr("array(struct(a), named_struct('a', b))").selectExpr("inline(*)"),
Row(1) :: Row(2) :: Nil)
}
test("inline_outer") {
val df = Seq((1, "2"), (3, "4"), (5, "6")).toDF("col1", "col2")
val df2 = df.select(
when($"col1" === 1, null).otherwise(array(struct($"col1", $"col2"))).as("col1"))
checkAnswer(
df2.selectExpr("inline(col1)"),
Row(3, "4") :: Row(5, "6") :: Nil
)
checkAnswer(
df2.selectExpr("inline_outer(col1)"),
Row(null, null) :: Row(3, "4") :: Row(5, "6") :: Nil
)
}
test("SPARK-14986: Outer lateral view with empty generate expression") {
checkAnswer(
sql("select nil from values 1 lateral view outer explode(array()) n as nil"),
Row(null) :: Nil
)
}
test("outer explode()") {
checkAnswer(
sql("select * from values 1, 2 lateral view outer explode(array()) a as b"),
Row(1, null) :: Row(2, null) :: Nil)
}
test("outer generator()") {
spark.sessionState.functionRegistry
.createOrReplaceTempFunction("empty_gen", _ => EmptyGenerator())
checkAnswer(
sql("select * from values 1, 2 lateral view outer empty_gen() a as b"),
Row(1, null) :: Row(2, null) :: Nil)
}
test("generator in aggregate expression") {
withTempView("t1") {
Seq((1, 1), (1, 2), (2, 3)).toDF("c1", "c2").createTempView("t1")
checkAnswer(
sql("select explode(array(min(c2), max(c2))) from t1"),
Row(1) :: Row(3) :: Nil
)
checkAnswer(
sql("select posexplode(array(min(c2), max(c2))) from t1 group by c1"),
Row(0, 1) :: Row(1, 2) :: Row(0, 3) :: Row(1, 3) :: Nil
)
// test generator "stack" which require foldable argument
checkAnswer(
sql("select stack(2, min(c1), max(c1), min(c2), max(c2)) from t1"),
Row(1, 2) :: Row(1, 3) :: Nil
)
val msg1 = intercept[AnalysisException] {
sql("select 1 + explode(array(min(c2), max(c2))) from t1 group by c1")
}.getMessage
assert(msg1.contains("Generators are not supported when it's nested in expressions"))
val msg2 = intercept[AnalysisException] {
sql(
"""select
| explode(array(min(c2), max(c2))),
| posexplode(array(min(c2), max(c2)))
|from t1 group by c1
""".stripMargin)
}.getMessage
assert(msg2.contains("Only one generator allowed per aggregate clause"))
}
}
}
case class EmptyGenerator() extends Generator {
override def children: Seq[Expression] = Nil
override def elementSchema: StructType = new StructType().add("id", IntegerType)
override def eval(input: InternalRow): TraversableOnce[InternalRow] = Seq.empty
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val iteratorClass = classOf[Iterator[_]].getName
ev.copy(code =
code"$iteratorClass<InternalRow> ${ev.value} = $iteratorClass$$.MODULE$$.empty();")
}
}
| jkbradley/spark | sql/core/src/test/scala/org/apache/spark/sql/GeneratorFunctionSuite.scala | Scala | apache-2.0 | 12,142 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding
import scala.util.{ Try, Failure }
import java.util.TimeZone
import java.text.DateFormat
trait DateParser extends java.io.Serializable { self =>
def parse(s: String)(implicit tz: TimeZone): Try[RichDate]
// Map the input before parsing (name from functional programming: contravariant map)
def contramap(fn: String => String): DateParser = new DateParser {
def parse(s: String)(implicit tz: TimeZone): Try[RichDate] = self.parse(fn(s))
}
def rescueWith(second: DateParser): DateParser =
new DateParser {
def parse(s: String)(implicit tz: TimeZone) = {
self.parse(s) orElse second.parse(s)
}
}
}
object DateParser {
/**
* This is scalding's default date parser. You can choose this
* by setting an implicit val DateParser.
* Note that DateParsers using SimpleDateFormat from Java are
* not thread-safe, thus the def here. You can cache the result
* if you are sure
*/
def default: DateParser = new DateParser {
def parse(s: String)(implicit tz: TimeZone) =
DateOps.getDateParser(s)
.map { p => p.parse(s) }
.getOrElse(Failure(new IllegalArgumentException("Could not find parser for: " + s)))
}
/** Try these Parsers in order */
def apply(items: Iterable[DateParser]): DateParser =
items.reduce { _.rescueWith(_) }
/** Using the type-class pattern */
def parse(s: String)(implicit tz: TimeZone, p: DateParser): Try[RichDate] = p.parse(s)(tz)
/**
* Note that DateFormats in Java are generally not thread-safe,
* so you should not share the result here across threads
*/
implicit def from(df: DateFormat): DateParser = new DateParser {
def parse(s: String)(implicit tz: TimeZone) = Try {
df.setTimeZone(tz)
RichDate(df.parse(s))
}
}
/**
* This ignores the time-zone assuming it must be in the String
*/
def from(fn: String => RichDate) = new DateParser {
def parse(s: String)(implicit tz: TimeZone) = Try(fn(s))
}
def from(fn: (String, TimeZone) => RichDate) = new DateParser {
def parse(s: String)(implicit tz: TimeZone) = Try(fn(s, tz))
}
}
/**
* //Scalding used to support Natty, this is removed. To add it back, use something like this in your code,
* //possibly with:
* //implicit val myParser = DateParser(Seq(DateParser.default, NattyParser))
*
* object NattyParser extends DateParser {
* def parse(s: String)(implicit tz: TimeZone) = Try {
* val timeParser = new natty.Parser(tz)
* val dateGroups = timeParser.parse(s)
* if (dateGroups.size == 0) {
* throw new IllegalArgumentException("Could not convert string: '" + str + "' into a date.")
* }
* // a DateGroup can have more than one Date (e.g. if you do "Sept. 11th or 12th"),
* // but we're just going to take the first
* val dates = dateGroups.get(0).getDates()
* RichDate(dates.get(0))
* }
* }
*
*/
| bendridi/scalding | scalding-date/src/main/scala/com/twitter/scalding/DateParser.scala | Scala | apache-2.0 | 3,438 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.parquet
import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName
import org.apache.parquet.schema.Type.Repetition
import org.apache.parquet.schema.{MessageType, OriginalType, Type, Types}
import org.locationtech.geomesa.features.serialization.ObjectType
import org.locationtech.geomesa.features.serialization.ObjectType.ObjectType
import org.opengis.feature.`type`.AttributeDescriptor
import org.opengis.feature.simple.SimpleFeatureType
object SimpleFeatureParquetSchema {
val FeatureIDField = "__fid__"
def apply(sft: SimpleFeatureType): MessageType = {
import scala.collection.JavaConversions._
val idField =
Types.primitive(PrimitiveTypeName.BINARY, Repetition.REPEATED)
.as(OriginalType.UTF8)
.named(FeatureIDField)
// NOTE: idField goes at the end of the record
new MessageType(sft.getTypeName, sft.getAttributeDescriptors.map(convertField) :+ idField)
}
def convertField(ad: AttributeDescriptor): Type = {
import PrimitiveTypeName._
import Type.Repetition
val bindings = ObjectType.selectType(ad)
bindings.head match {
case ObjectType.GEOMETRY =>
// TODO: currently only dealing with Points
Types.buildGroup(Repetition.REQUIRED)
.primitive(DOUBLE, Repetition.REQUIRED).named("x")
.primitive(DOUBLE, Repetition.REQUIRED).named("y")
.named(ad.getLocalName)
case ObjectType.DATE =>
Types.primitive(INT64, Repetition.OPTIONAL)
.named(ad.getLocalName)
case ObjectType.STRING =>
Types.primitive(BINARY, Repetition.OPTIONAL)
.as(OriginalType.UTF8)
.named(ad.getLocalName)
case ObjectType.INT =>
Types.primitive(INT32, Repetition.OPTIONAL)
.named(ad.getLocalName)
case ObjectType.DOUBLE =>
Types.primitive(DOUBLE, Repetition.OPTIONAL)
.named(ad.getLocalName)
case ObjectType.LONG =>
Types.primitive(INT64, Repetition.OPTIONAL)
.named(ad.getLocalName)
case ObjectType.FLOAT =>
Types.primitive(FLOAT, Repetition.OPTIONAL)
.named(ad.getLocalName)
case ObjectType.BOOLEAN =>
Types.primitive(BOOLEAN, Repetition.OPTIONAL)
.named(ad.getLocalName)
case ObjectType.BYTES =>
Types.primitive(BINARY, Repetition.OPTIONAL)
.named(ad.getLocalName)
case ObjectType.LIST =>
Types.optionalList().optionalElement(matchType(bindings(1)))
.named(ad.getLocalName)
case ObjectType.MAP =>
Types.optionalMap()
.key(matchType(bindings(1)))
.optionalValue(matchType(bindings(2)))
.named(ad.getLocalName)
case ObjectType.UUID =>
Types.primitive(BINARY, Repetition.OPTIONAL)
.named(ad.getLocalName)
}
}
private def matchType(objType: ObjectType): PrimitiveTypeName = {
import PrimitiveTypeName._
objType match {
case ObjectType.DATE => INT64
case ObjectType.STRING => BINARY
case ObjectType.INT => INT32
case ObjectType.DOUBLE => DOUBLE
case ObjectType.LONG => INT64
case ObjectType.FLOAT => FLOAT
case ObjectType.BOOLEAN => BOOLEAN
case ObjectType.BYTES => BINARY
case ObjectType.UUID => BINARY
}
}
def buildAttributeWriters(sft: SimpleFeatureType): Array[AttributeWriter] = {
import scala.collection.JavaConversions._
sft.getAttributeDescriptors.zipWithIndex.map { case (ad, i) => AttributeWriter(ad, i) }.toArray
}
}
| ddseapy/geomesa | geomesa-fs/geomesa-fs-storage/geomesa-fs-storage-parquet/src/main/scala/org/locationtech/geomesa/parquet/SimpleFeatureParquetSchema.scala | Scala | apache-2.0 | 4,016 |
package au.com.dius.pact.model.unfiltered
import java.net.URI
import java.util.zip.GZIPInputStream
import au.com.dius.pact.model.{OptionalBody, Request, Response}
import com.typesafe.scalalogging.StrictLogging
import io.netty.handler.codec.http.{HttpResponse => NHttpResponse}
import unfiltered.netty.ReceivedMessage
import unfiltered.request.HttpRequest
import unfiltered.response._
import scala.collection.JavaConversions
@Deprecated
object Conversions extends StrictLogging {
case class Headers(headers: java.util.Map[String, String]) extends unfiltered.response.Responder[Any] {
def respond(res: HttpResponse[Any]) {
import collection.JavaConversions._
if (headers != null) {
headers.foreach { case (key, value) => res.header(key, value) }
}
}
}
implicit def pactToUnfilteredResponse(response: Response): ResponseFunction[NHttpResponse] = {
if (response.getBody.isPresent) {
Status(response.getStatus) ~> Headers(response.getHeaders) ~> ResponseString(response.getBody.getValue())
} else Status(response.getStatus) ~> Headers(response.getHeaders)
}
def toHeaders(request: HttpRequest[ReceivedMessage]): java.util.Map[String, String] = {
JavaConversions.mapAsJavaMap(request.headerNames.map(name =>
name -> request.headers(name).mkString(",")).toMap)
}
def toQuery(request: HttpRequest[ReceivedMessage]): java.util.Map[String, java.util.List[String]] = {
JavaConversions.mapAsJavaMap(request.parameterNames.map(name =>
name -> JavaConversions.seqAsJavaList(request.parameterValues(name))).toMap)
}
def toPath(uri: String) = new URI(uri).getPath
def toBody(request: HttpRequest[ReceivedMessage], charset: String = "UTF-8") = {
val is = if (request.headers(ContentEncoding.GZip.name).contains("gzip")) {
new GZIPInputStream(request.inputStream)
} else {
request.inputStream
}
if(is == null) "" else scala.io.Source.fromInputStream(is).mkString
}
implicit def unfilteredRequestToPactRequest(request: HttpRequest[ReceivedMessage]): Request = {
new Request(request.method, toPath(request.uri), toQuery(request), toHeaders(request),
OptionalBody.body(toBody(request)))
}
}
| algra/pact-jvm | pact-jvm-consumer/src/main/scala/au/com/dius/pact/model/unfiltered/Conversions.scala | Scala | apache-2.0 | 2,213 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.mv.plans.util
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, AttributeReference,
AttributeSet, Expression, NamedExpression, PredicateHelper}
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.execution.datasources.LogicalRelation
import org.apache.carbondata.mv.plans.modular.Flags._
import org.apache.carbondata.mv.plans.modular.JoinEdge
/**
* SelectModule is extracted from logical plan of SPJG query. All join conditions
* filter, and project operators in a single Aggregate-less subtree of logical plan
* are collected.
*
* The returned values for this match are as follows:
* - Conditions for equi-join
* - Conditions for filter
* - Project list for project
*
*/
object ExtractSelectModule extends PredicateHelper {
type ReturnType = (Seq[NamedExpression], Seq[Expression], Seq[Expression], Map[Int, String],
Seq[JoinEdge], Seq[LogicalPlan], FlagSet, Seq[Seq[Any]], Seq[Seq[Any]])
def unapply(plan: LogicalPlan): Option[ReturnType] = {
val (outputs, inputs, predicates, joinedges, children, isSelect, _, flags, fspecs, wspecs) =
collectProjectsFiltersJoinsAndSort(plan)
if (!isSelect) {
None
} else {
Some(
outputs,
inputs,
predicates,
collectChildAliasMappings(
AttributeSet(outputs).toSeq ++ AttributeSet(predicates).toSeq,
children),
joinedges,
children,
flags,
fspecs,
wspecs)
}
}
def collectProjectsFiltersJoinsAndSort(plan: LogicalPlan): (Seq[NamedExpression],
Seq[Expression], Seq[Expression], Seq[JoinEdge], Seq[LogicalPlan], Boolean, Map[Attribute,
Expression], FlagSet, Seq[Seq[Any]], Seq[Seq[Any]]) = {
plan match {
case Project(fields, child) =>
val (_, inputs, predicates, joinedges, children, _, aliases, flags, fspecs, wspecs) =
collectProjectsFiltersJoinsAndSort(child)
val substitutedFields = fields.map(substitute(aliases)).asInstanceOf[Seq[NamedExpression]]
(substitutedFields, inputs, predicates, joinedges, children, true, collectAliases(
substitutedFields), flags, fspecs, wspecs)
case Filter(condition, child) =>
val (outputs, inputs, predicates, joinedges, children, _, aliases, flags, fspecs, wspecs)
= collectProjectsFiltersJoinsAndSort(child)
val substitutedCondition = substitute(aliases)(condition)
(outputs, inputs, predicates.flatMap(splitConjunctivePredicates) ++
splitConjunctivePredicates(substitutedCondition), joinedges, children,
true, aliases, flags, fspecs, wspecs)
case Sort(order, global, child) =>
val (outputs, inputs, predicates, joinedges, children, _, aliases, flags, fspecs, wspecs)
= collectProjectsFiltersJoinsAndSort(child)
val substitutedOrder = order.map(substitute(aliases))
(outputs, inputs, predicates, joinedges, children, true, aliases, if (global) {
flags.setFlag(SORT).setFlag(GLOBAL)
} else {
flags.setFlag(SORT)
}, Seq(Seq(order)) ++ fspecs, wspecs)
case Join(left, right, joinType, condition) =>
val (loutputs, linputs, lpredicates, ljoinedges, lchildren, _, laliases, lflags, lfspecs,
lwspecs) = collectProjectsFiltersJoinsAndSort(left)
val (routputs, rinputs, rpredicates, rjoinedges, rchildren, _, raliases, rflags, rfspecs,
rwspecs) = collectProjectsFiltersJoinsAndSort(right)
val (lcondition, rcondition, ccondition) = split(condition, lchildren, rchildren)
val joinEdge = collectJoinEdge(ccondition, lchildren, rchildren, joinType)
val adjustedJoinEdges = rjoinedges
.map(e => JoinEdge(e.left + lchildren.size, e.right + lchildren.size, e.joinType))
val output: Seq[Attribute] = {
joinType match {
case LeftSemi =>
left.output
case LeftOuter =>
left.output ++ right.output.map(_.withNullability(true))
case RightOuter =>
left.output.map(_.withNullability(true)) ++ right.output
case FullOuter =>
left.output.map(_.withNullability(true)) ++ right.output.map(_.withNullability(true))
case LeftAnti =>
left.output
case _ =>
left.output ++ right.output
}
}
if (lfspecs.isEmpty && rfspecs.isEmpty && lflags == NoFlags && rflags == NoFlags &&
lwspecs.isEmpty && rwspecs.isEmpty) {
(output, (linputs ++ rinputs), lpredicates.flatMap(splitConjunctivePredicates) ++
rpredicates.flatMap(splitConjunctivePredicates) ++
lcondition ++ rcondition ++ ccondition, ljoinedges ++
joinEdge ++
adjustedJoinEdges,
lchildren ++ rchildren, true, laliases ++ raliases, NoFlags, Seq.empty, Seq.empty)
} else {
throw new UnsupportedOperationException(
s"unsupported join: \\n left child ${ left } " +
s"\\n right child ${ right }")
}
case other =>
(other.output, other.output, Nil, Nil, Seq(other), false, Map.empty, NoFlags, Seq.empty, Seq
.empty)
}
}
def collectAliases(fields: Seq[Expression]): Map[Attribute, Expression] = {
fields.collect {
case a@Alias(child, _) => a.toAttribute -> child
}.toMap
}
def substitute(aliases: Map[Attribute, Expression])(expr: Expression): Expression = {
expr.transform {
case a@Alias(ref: AttributeReference, name) =>
aliases.get(ref).map(Alias(_, name)(a.exprId, a.qualifier)).getOrElse(a)
case a: AttributeReference =>
aliases.get(a).map(Alias(_, a.name)(a.exprId, a.qualifier)).getOrElse(a)
}
}
def collectChildAliasMappings(attributeSet: Seq[Attribute], children: Seq[LogicalPlan]
): Map[Int, String] = {
val aq = attributeSet.filter(_.qualifier.nonEmpty)
children.zipWithIndex.flatMap {
case (child, i) =>
aq.find(child.outputSet.contains(_)).map(_.qualifier).flatten.map((i, _))
}.toMap
}
def split(condition: Option[Expression],
lchildren: Seq[LogicalPlan],
rchildren: Seq[LogicalPlan]): (Seq[Expression], Seq[Expression], Seq[Expression]) = {
val left = lchildren.map(_.outputSet).foldLeft(AttributeSet(Set.empty))(_ ++ _)
val right = rchildren.map(_.outputSet).foldLeft(AttributeSet(Set.empty))(_ ++ _)
val conditions = condition.map(splitConjunctivePredicates).getOrElse(Nil)
val (leftEvaluationCondition, rest) = conditions.partition(_.references subsetOf left)
val (rightEvaluationCondition, commonCondition) = rest.partition(_.references subsetOf right)
(leftEvaluationCondition, rightEvaluationCondition, commonCondition)
}
/*
* collectJoinEdge only valid when condition are common condition of above split, left and
* right children correspond
* to respective two children parameters of above split
*
*/
def collectJoinEdge(condition: Seq[Expression],
lchildren: Seq[LogicalPlan],
rchildren: Seq[LogicalPlan],
joinType: JoinType): Seq[JoinEdge] = {
val common = condition.map(_.references).foldLeft(AttributeSet(Set.empty))(_ ++ _)
val lIdxSeq = lchildren
.collect { case x if x.outputSet.intersect(common).nonEmpty => lchildren.indexOf(x) }
val rIdxSeq = rchildren
.collect { case x if x.outputSet.intersect(common).nonEmpty => rchildren.indexOf(x) +
lchildren.size
}
for (l <- lIdxSeq; r <- rIdxSeq) yield {
JoinEdge(l, r, joinType)
}
}
}
object ExtractSelectModuleForWindow extends PredicateHelper {
type ReturnType = (Seq[NamedExpression], Seq[Expression], Seq[Expression], Map[Int, String],
Seq[JoinEdge], Seq[LogicalPlan], FlagSet, Seq[Seq[Any]], Seq[Seq[Any]])
def unapply(plan: LogicalPlan): Option[ReturnType] = {
collectSelectFromWindowChild(plan)
}
def collectSelectFromWindowChild(plan: LogicalPlan): Option[(Seq[NamedExpression],
Seq[Expression], Seq[Expression], Map[Int, String], Seq[JoinEdge], Seq[LogicalPlan], FlagSet,
Seq[Seq[Any]], Seq[Seq[Any]])] = {
plan match {
case agg@Aggregate(_, _, _) =>
Some(
agg.aggregateExpressions,
agg.child.output,
Seq.empty,
Map.empty,
Seq.empty,
Seq(agg),
NoFlags,
Seq.empty,
Seq.empty)
case ExtractSelectModule(
output,
input,
predicate,
aliasmap,
joinedge,
children,
flags,
fspec,
wspec) =>
Some(output, input, predicate, aliasmap, joinedge, children, flags, fspec, wspec)
case Window(exprs, _, _, child) =>
val ret: Option[(Seq[NamedExpression], Seq[Expression], Seq[Expression], Map[Int,
String], Seq[JoinEdge], Seq[LogicalPlan], FlagSet, Seq[Seq[Any]], Seq[Seq[Any]])] =
collectSelectFromWindowChild(
child)
ret.map(r => (r._1, r._2, r._3, r._4, r._5, r._6, r._7, r._8, Seq(Seq(exprs)) ++ r._9))
case other => None
}
}
}
/**
* GroupByModule is extracted from the Aggregate node of logical plan.
* The groupingExpressions, aggregateExpressions are collected.
*
* The returned values for this match are as follows:
* - Grouping attributes for the Aggregate node.
* - Aggregates for the Aggregate node.
* - Project list for project
*
*/
object ExtractGroupByModule extends PredicateHelper {
type ReturnType = (Seq[NamedExpression], Seq[Expression], Seq[Expression], Option[String],
LogicalPlan, FlagSet, Seq[Seq[Any]])
def unapply(plan: LogicalPlan): Option[ReturnType] = {
plan match {
case a@logical.Aggregate(_, _, e@Expand(_, _, p: Project)) if isGroupingSet(a, e, p) =>
// Assumption: Aggregate's groupingExpressions is composed of
// 1) the grouping attributes
// 2) gid, which is always the last one
val g = a.groupingExpressions.map(_.asInstanceOf[Attribute])
val numOriginalOutput = e.output.size - g.size
Some(
a.aggregateExpressions,
e.output,
a.groupingExpressions,
None,
p,
NoFlags.setFlag(EXPAND),
Seq(Seq(e.projections, e.output, numOriginalOutput)))
case logical.Aggregate(groupingExpressions, aggregateExpressions, child) =>
Some(
aggregateExpressions,
child.output,
groupingExpressions,
None,
child,
NoFlags,
Seq.empty)
case other => None
}
}
private def isGroupingSet(a: Aggregate, e: Expand, p: Project): Boolean = {
assert(a.child == e && e.child == p)
if (a.groupingExpressions.forall(_.isInstanceOf[Attribute])) {
val g = a.groupingExpressions.map(_.asInstanceOf[Attribute])
sameOutput(
e.output.drop(e.output.size - g.size),
a.groupingExpressions.map(_.asInstanceOf[Attribute]))
} else {
false
}
}
private def sameOutput(output1: Seq[Attribute], output2: Seq[Attribute]): Boolean = {
output1.size == output2.size &&
output1.zip(output2).forall(pair => pair._1.semanticEquals(pair._2))
}
}
object ExtractUnionModule extends PredicateHelper {
type ReturnType = (Seq[LogicalPlan], FlagSet, Seq[Seq[Any]])
def unapply(plan: LogicalPlan): Option[ReturnType] = {
plan match {
case u: Union =>
val children = collectUnionChildren(u)
Some(children, NoFlags, Seq.empty)
case _ => None
}
}
private def collectUnionChildren(plan: LogicalPlan): List[LogicalPlan] = {
plan match {
case Union(children) => children.toList match {
case head :: Nil => collectUnionChildren(head)
case head :: tail => collectUnionChildren(head) ++ collectUnionChildren(Union(tail))
case Nil => Nil
}
case other => other :: Nil
}
}
}
object ExtractTableModule extends PredicateHelper {
type ReturnType = (String, String, Seq[NamedExpression], Seq[LogicalPlan], FlagSet, Seq[Seq[Any]])
def unapply(plan: LogicalPlan): Option[ReturnType] = {
plan match {
// uncomment for cloudera1 version
// case m: CatalogRelation =>
// Some(m.tableMeta.database, m.tableMeta.identifier.table, m.output, Nil, NoFlags,
// Seq.empty)
// uncomment for apache version
case m: HiveTableRelation =>
Some(m.tableMeta.database, m.tableMeta.identifier.table, m.output, Nil, NoFlags,
Seq.empty)
case l: LogicalRelation =>
val tableIdentifier = l.catalogTable.map(_.identifier)
val database = tableIdentifier.map(_.database).flatten.getOrElse(null)
val table = tableIdentifier.map(_.table).getOrElse(null)
Some(database, table, l.output, Nil, NoFlags, Seq.empty)
case l: LocalRelation => // used for unit test
Some(null, null, l.output, Nil, NoFlags, Seq.empty)
case _ => None
}
}
}
| sgururajshetty/carbondata | datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/Logical2ModularExtractions.scala | Scala | apache-2.0 | 14,187 |
/**
* Copyright (C) 2013 Carnegie Mellon University
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tdb.worker
import akka.actor.{Actor, ActorLogging, ActorRef, Props}
import akka.pattern.{ask, pipe}
import com.datastax.driver.core.Cluster
import scala.collection.JavaConversions._
import scala.collection.mutable
import scala.collection.mutable.Map
import scala.concurrent.{Await, Promise}
import scala.util.{Failure, Success}
import tdb.Adjustable
import tdb.Constants._
import tdb.datastore._
import tdb.list._
import tdb.messages._
import tdb.stats.Stats
import tdb.util._
object Worker {
def props(info: WorkerInfo, masterRef: ActorRef) =
Props(classOf[Worker], info, masterRef)
}
class Worker(_info: WorkerInfo, masterRef: ActorRef)
extends Actor with ActorLogging {
import context.dispatcher
log.info("Worker launched.")
private val info = {
val message = RegisterWorkerMessage(_info)
val returnedInfo =
Await.result((masterRef ? message).mapTo[WorkerInfo], DURATION)
returnedInfo.storeType match {
case "cassandra" =>
val cluster = Cluster.builder()
.addContactPoint(returnedInfo.ip)
.build()
val metadata = cluster.getMetadata()
printf("Connected to cluster: %s\\n",
metadata.getClusterName())
for (host <- metadata.getAllHosts()) {
printf("Datatacenter: %s; Host: %s; Rack: %s\\n",
host.getDatacenter(), host.getAddress(), host.getRack())
}
returnedInfo.copy(cluster = cluster)
case _ => returnedInfo
}
}
private val datastores = Map[TaskId, ActorRef]()
def receive = {
case PebbleMessage(taskId: TaskId, modId: ModId) =>
sender ! "done"
case CreateTaskMessage(taskId: TaskId, parentId: TaskId) =>
val taskProps = Task.props(
taskId, info.mainDatastoreId, parentId, masterRef)
val taskRef = context.actorOf(taskProps, taskId + "")
sender ! taskRef
case CreateDatastoreMessage(listConf, datastoreId: TaskId, thisRange, recovery) =>
val modifierRef = listConf match {
case null =>
context.actorOf(DatastoreActor.props(info, datastoreId))
case aggregatorConf: AggregatorListConf =>
context.actorOf(AggregatorModifierActor.props(
aggregatorConf, info, datastoreId, masterRef, recovery))
case columnConf: ColumnListConf =>
/*if (columnConf.chunkSize > 1)
context.actorOf(ColumnChunkModifierActor.props(
columnConf, info, datastoreId, thisRange))
else*/
context.actorOf(ColumnModifierActor.props(
columnConf, info, datastoreId, thisRange))
case _ =>
context.actorOf(ModifierActor.props(
listConf, info, datastoreId, thisRange, masterRef, recovery))
}
datastores(datastoreId) = modifierRef
sender ! modifierRef
case SplitFileMessage(dir: String, fileName: String, partitions: Int) =>
if (tdb.examples.Experiment.fast) {
if (!OS.exists(dir)) {
OS.mkdir(dir)
tdb.scripts.Split.main(Array(
"-d", dir,
"-f", fileName,
"-p", partitions + ""))
}
} else {
if (OS.exists(dir)) {
OS.rmdir(dir)
}
OS.mkdir(dir)
tdb.scripts.Split.main(Array(
"-d", dir,
"-f", fileName,
"-p", partitions + ""))
}
sender ! "done"
case CreateModMessage(value) =>
(datastores(info.mainDatastoreId) ? CreateModMessage(value)) pipeTo sender
case ClearMessage =>
for ((taskId, datastoreRef) <- datastores) {
context.stop(datastoreRef)
}
datastores.clear()
Stats.clear()
sender ! "done"
case "ping" => sender ! "done"
case x => println("Worker received unhandled message " + x)
}
override def postStop() {
if (info.cluster != null) {
info.cluster.close()
}
}
}
| twmarshall/tdb | core/src/main/scala/tdb/worker/Worker.scala | Scala | apache-2.0 | 4,550 |
// Copyright 2016 Foursquare Labs Inc. All Rights Reserved.
package io.fsq.spindle.rogue
import io.fsq.field.Field
import io.fsq.spindle.runtime.{FieldDescriptor, MetaRecord, Record, StructFieldDescriptor}
class SpindleIndexSubField[R <: Record[R], MR <: MetaRecord[R, MR], ER <: Record[ER], EM <: MetaRecord[ER, EM]](
parent: StructFieldDescriptor[R, MR, ER, EM],
subField: FieldDescriptor[_, ER, EM]
) extends Field[ER, MR] {
def name = parent.name + "." + subField.name
def owner = parent.owner
}
| foursquare/fsqio | src/jvm/io/fsq/spindle/rogue/SpindleMongoIndex.scala | Scala | apache-2.0 | 511 |
package chrome.tabs.bindings
import chrome.windows.bindings.Window
import scala.scalajs.js
object TabCreateProperties {
def apply(
windowId: js.UndefOr[Window.Id] = js.undefined,
index: js.UndefOr[Int] = js.undefined,
url: js.UndefOr[String] = js.undefined,
active: js.UndefOr[Boolean] = js.undefined,
pinned: js.UndefOr[Boolean] = js.undefined,
openerTabId: js.UndefOr[Tab.Id] = js.undefined): TabCreateProperties = {
js.Dynamic
.literal(
windowId = windowId,
index = index,
url = url,
active = active,
pinned = pinned,
openerTabId = openerTabId
)
.asInstanceOf[TabCreateProperties]
}
}
@js.native
trait TabCreateProperties extends js.Object {
def windowId: js.UndefOr[Window.Id] = js.native
def index: js.UndefOr[Int] = js.native
def url: js.UndefOr[String] = js.native
def active: js.UndefOr[Boolean] = js.native
def pinned: js.UndefOr[Boolean] = js.native
def openerTabId: js.UndefOr[Tab.Id] = js.native
}
| lucidd/scala-js-chrome | bindings/src/main/scala/chrome/tabs/bindings/TabCreateProperties.scala | Scala | mit | 1,057 |
/*
* Copyright 2015 Matt Massie
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// GENERATED SOURCE: DO NOT EDIT.
package com.github.massie.avrotuples
import java.io._
import java.util
import com.esotericsoftware.kryo.{Kryo, KryoSerializable}
import com.esotericsoftware.kryo.io.{Input, Output}
import org.apache.avro.Schema
import org.apache.avro.generic.GenericData
import org.apache.avro.io.{DecoderFactory, EncoderFactory}
import org.apache.avro.specific.{SpecificDatumReader, SpecificDatumWriter, SpecificRecord}
import org.apache.avro.util.Utf8
object AvroTuple4 {
val SCHEMA$ = AvroTupleSchemas.SCHEMAS(3)
val FLAT_SCHEMA = AvroTupleSchemas.FLAT_SCHEMAS(3)
val reader = new SpecificDatumReader[AvroTuple4[_, _, _, _]](SCHEMA$)
val writer = new SpecificDatumWriter[AvroTuple4[_, _, _, _]](SCHEMA$)
def readFromInputStream(tuple: AvroTuple4[_, _, _, _], in: InputStream) = {
AvroTuple4.reader.read(tuple, DecoderFactory.get.directBinaryDecoder(in, null))
}
def writeToOutputStream(tuple: AvroTuple4[_, _, _, _], out: OutputStream) = {
AvroTuple4.writer.write(tuple, EncoderFactory.get.directBinaryEncoder(out, null))
}
def fromInputStream(in: InputStream) : AvroTuple4[_, _, _, _] = {
readFromInputStream(null.asInstanceOf[AvroTuple4[_, _, _, _]], in)
}
def fromBytes(bytes: Array[Byte]): AvroTuple4[_, _, _, _] = {
val in = new ByteArrayInputStream(bytes)
val tuple = fromInputStream(in)
in.close()
tuple
}
}
final case class AvroTuple4[T1, T2, T3, T4](
@transient var _1: T1,
@transient var _2: T2,
@transient var _3: T3,
@transient var _4: T4)
extends Product4[T1, T2, T3, T4] with SpecificRecord with KryoSerializable with Externalizable {
def this() = this(null.asInstanceOf[T1],
null.asInstanceOf[T2],
null.asInstanceOf[T3],
null.asInstanceOf[T4])
def update(n1: T1, n2: T2, n3: T3, n4: T4): AvroTuple4[T1, T2, T3, T4] = {
_1 = n1
_2 = n2
_3 = n3
_4 = n4
this
}
@throws(classOf[IndexOutOfBoundsException])
override def get(i: Int): AnyRef = i match {
case 0 => val values = new util.ArrayList[AnyRef](productArity)
values.add(0, _1.asInstanceOf[AnyRef])
values.add(1, _2.asInstanceOf[AnyRef])
values.add(2, _3.asInstanceOf[AnyRef])
values.add(3, _4.asInstanceOf[AnyRef])
values.asInstanceOf[AnyRef]
case _ => throw new IndexOutOfBoundsException(i.toString)
}
private def utf8string(obj: Any) = obj match {
case u: Utf8 => u.toString
case _ => obj
}
@throws(classOf[IndexOutOfBoundsException])
override def put(i: Int, v: scala.Any): Unit = i match {
case 0 =>
val array = v match {
case avroArray: GenericData.Array[_]=> avroArray
case javaArray: util.ArrayList[_]=> javaArray
}
assert(array.size == productArity,
s"Tried to put ${array.size} values into AvroTuple with productArity of $productArity")
_1 = utf8string(array.get(0)).asInstanceOf[T1]
_2 = utf8string(array.get(1)).asInstanceOf[T2]
_3 = utf8string(array.get(2)).asInstanceOf[T3]
_4 = utf8string(array.get(3)).asInstanceOf[T4]
case _ => throw new IndexOutOfBoundsException(i.toString)
}
override def getSchema: Schema = AvroTuple4.SCHEMA$
override def toString: String = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + ")"
def toBytes: Array[Byte] = {
val byteStream = new ByteArrayOutputStream()
AvroTuple4.writeToOutputStream(this, byteStream)
byteStream.flush()
val bytes = byteStream.toByteArray
byteStream.close()
bytes
}
override def readExternal(in: ObjectInput): Unit = {
AvroTuple4.readFromInputStream(this, ExternalizableInput(in))
}
override def writeExternal(out: ObjectOutput): Unit = {
AvroTuple4.writeToOutputStream(this, ExternalizableOutput(out))
}
override def write(kryo: Kryo, output: Output): Unit = {
AvroTuple4.writeToOutputStream(this, output.getOutputStream)
}
override def read(kryo: Kryo, input: Input): Unit = {
AvroTuple4.readFromInputStream(this, input.getInputStream)
}
} | julianpeeters/avrotuples | src/main/scala/com/github/massie/avrotuples/AvroTuple4.scala | Scala | apache-2.0 | 4,688 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.operators
import monix.reactive.Observable
import monix.execution.exceptions.DummyException
import scala.concurrent.duration._
object CombineLatest4Suite extends BaseOperatorSuite {
def waitFirst = Duration.Zero
def waitNext = Duration.Zero
def createObservable(sc: Int) = Some {
val sourceCount = 10
val o1 = Observable.now(1)
val o2 = Observable.now(2)
val o3 = Observable.now(3)
val o4 = Observable.range(0L, sourceCount.toLong)
val o = Observable.combineLatestMap4(o1, o2, o3, o4)(_ + _ + _ + _)
Sample(o, count(sourceCount), sum(sourceCount), waitFirst, waitNext)
}
def count(sourceCount: Int) = sourceCount
def sum(sourceCount: Int) =
sourceCount * (sourceCount + 1) / 2 +
(5 * sourceCount)
def observableInError(sourceCount: Int, ex: Throwable) = Some {
val o1 = Observable.now(1)
val o2 = Observable.now(2)
val o3 = Observable.now(3)
val flawed = createObservableEndingInError(Observable.range(0L, sourceCount.toLong), ex)
val o = Observable.combineLatestMap4(o1, o2, o3, flawed)(_ + _ + _ + _)
Sample(o, count(sourceCount - 1), sum(sourceCount - 1), waitFirst, waitNext)
}
def brokenUserCodeObservable(sourceCount: Int, ex: Throwable) = Some {
val dummy = DummyException("dummy")
val o1 = Observable.now(1)
val o2 = Observable.now(2)
val o3 = Observable.now(3)
val o4 = Observable.range(0L, sourceCount.toLong)
val o = Observable.combineLatestMap4(o1, o2, o3, o4) { (a1, a2, a3, a4) =>
if (a4 == sourceCount - 1) throw dummy else a1 + a2 + a3 + a4
}
Sample(o, count(sourceCount - 1), sum(sourceCount - 1), waitFirst, waitNext)
}
override def cancelableObservables(): Seq[Sample] = {
val sample1 = {
val o1 = Observable.range(0, 10).delayOnNext(1.second)
val o2 = Observable.range(0, 10).delayOnNext(1.second)
val o3 = Observable.range(0, 10).delayOnNext(1.second)
val o4 = Observable.range(0, 10).delayOnNext(1.second)
Observable.combineLatestMap4(o1, o2, o3, o4)(_ + _ + _ + _)
}
Seq(Sample(sample1, 0, 0, 0.seconds, 0.seconds))
}
}
| alexandru/monifu | monix-reactive/shared/src/test/scala/monix/reactive/internal/operators/CombineLatest4Suite.scala | Scala | apache-2.0 | 2,827 |
package mist.api.jdsl
import java.util
import mist.api._
import mist.api.FnContext
import mist.api.data.JsMap
import mist.api.encoding.defaultEncoders._
import mist.api.encoding.JsSyntax._
import org.scalatest.{FunSpec, Matchers}
import org.scalatest.prop.TableDrivenPropertyChecks._
class JArgDefSpec extends FunSpec with Matchers {
import JArgsDef._
def miss: Failed = Failed.InternalError("fail")
def javaList[T](values: T*): java.util.List[T] = {
val list = new util.ArrayList[T]()
values.foreach(v => list.add(v))
list
}
val expected = Table[JUserArg[_], JsMap, Extraction[_]](
("arg", "data", "expected"),
(booleanArg("b"), JsMap("b" -> true.js), Extracted(true)),
(booleanArg("b", false), JsMap.empty, Extracted(false)),
(booleanArg("b", false), JsMap("b" -> true.js), Extracted(true)),
(booleanArg("b"), JsMap.empty, miss),
(optBooleanArg("b"), JsMap("b" -> true.js), Extracted(java.util.Optional.of(true))),
(optBooleanArg("b"), JsMap.empty, Extracted(java.util.Optional.empty())),
(intArg("n"), JsMap("n" -> 2.js), Extracted(2)),
(intArg("n", 0), JsMap.empty, Extracted(0)),
(intArg("n"), JsMap.empty, miss),
(optIntArg("n"), JsMap("n" -> 42.js), Extracted(java.util.Optional.of(42))),
(optIntArg("n"), JsMap.empty, Extracted(java.util.Optional.empty())),
(stringArg("s"), JsMap("s" -> "value".js), Extracted("value")),
(stringArg("s", "value"), JsMap.empty, Extracted("value")),
(stringArg("s"), JsMap.empty, miss),
(optStringArg("s"), JsMap("s" -> "yoyo".js), Extracted(java.util.Optional.of("yoyo"))),
(optStringArg("s"), JsMap.empty, Extracted(java.util.Optional.empty())),
(doubleArg("d"), JsMap("d" -> 2.4.js), Extracted(2.4)),
(doubleArg("d", 2.2), JsMap.empty, Extracted(2.2)),
(doubleArg("d"), JsMap.empty, miss),
(optDoubleArg("d"), JsMap("d" -> 42.1.js), Extracted(java.util.Optional.of(42.1))),
(optDoubleArg("d"), JsMap.empty, Extracted(java.util.Optional.empty())),
(intListArg("ints"), JsMap("ints" -> Seq(1,2,3).js), Extracted(javaList(1, 2, 3))),
(doubleListArg("doubles"), JsMap("doubles" -> Seq(1.1,2.2,3.3).js), Extracted(javaList(1.1, 2.2, 3.3))),
(stringListArg("strings"), JsMap("strings" -> Seq("a", "b", "c").js), Extracted(javaList("a", "b", "c"))),
(booleanListArg("boolean"), JsMap("boolean" -> Seq(true, false).js), Extracted(javaList(true, false)))
)
it("should extract expected result") {
forAll(expected) { (arg, params, expected) =>
val ctx = FnContext.onlyInput(params)
val result = arg.asScala.extract(ctx)
(expected, result) match {
case (extr: Extracted[_], res: Extracted[_]) => res shouldBe extr
case (extr: Extracted[_], res: Failed) => fail(s"for $arg got $res, expected $extr")
case (extr: Failed, res: Extracted[_]) => fail(s"for $arg got $res, expected $extr")
case (extr: Failed, res: Failed) =>
}
}
}
it("should validate") {
val arg = intArg("a").validated(new Func1[java.lang.Integer, java.lang.Boolean] {
override def apply(a1: java.lang.Integer): java.lang.Boolean = a1 > 2
}).asScala
arg.extract(FnContext.onlyInput(JsMap("a" -> 5.js))) shouldBe Extracted(5)
arg.extract(FnContext.onlyInput(JsMap("a" -> 1.js))) shouldBe a[Failed]
}
}
| Hydrospheredata/mist | mist-lib/src/test/scala/mist/api/jdsl/JArgDefSpec.scala | Scala | apache-2.0 | 3,508 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
/**
* An iterator that wraps around an existing iterator to provide task killing functionality.
* It works by checking the interrupted flag in [[TaskContext]].
*/
class InterruptibleIterator[+T](val context: TaskContext, val delegate: Iterator[T])
extends Iterator[T] {
def hasNext: Boolean = !context.interrupted && delegate.hasNext
def next(): T = delegate.next()
}
| dotunolafunmiloye/spark | core/src/main/scala/org/apache/spark/InterruptibleIterator.scala | Scala | apache-2.0 | 1,207 |
/*
* Copyright 2011-2017 Chris de Vreeze
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.cdevreeze.yaidom.utils
import eu.cdevreeze.yaidom.core.EName
import eu.cdevreeze.yaidom.core.QName
import eu.cdevreeze.yaidom.core.Scope
import eu.cdevreeze.yaidom.parse.DocumentParserUsingSax
import eu.cdevreeze.yaidom.queryapi.ClarkElemApi.withEName
import eu.cdevreeze.yaidom.resolved
import eu.cdevreeze.yaidom.simple
import org.scalatest.funsuite.AnyFunSuite
/**
* ClarkElem test case.
*
* @author Chris de Vreeze
*/
class ClarkElemTest extends AnyFunSuite {
private val docParser = DocumentParserUsingSax.newInstance()
test("testCreateXbrlInstance") {
val instance = createInstance()
import scope._
assertResult(QName("xbrl").res) {
instance.resolvedName
}
assertResult(Some("1000")) {
instance.findElem(withEName(QName("gaap:CashAndCashEquivalents").res)).map(_.text)
}
val uri = classOf[ClarkElemTest].getResource("sample-xbrl-instance.xml").toURI
val parsedInstance = ClarkNode.Elem.from(docParser.parse(uri).documentElement)
val filteredInstance = parsedInstance.minusAttribute(QName("xsi:schemaLocation").res) transformChildElemsToNodeSeq {
case e@ClarkNode.Elem(EName(_, "schemaRef"), _, _) =>
Vector(e)
case e@ClarkNode.Elem(EName(_, "context"), _, _) if e.attributeOption(EName("id")).contains("I-2007") =>
Vector(e)
case e@ClarkNode.Elem(EName(_, "unit"), _, _) if e.attributeOption(EName("id")).contains("U-Monetary") =>
Vector(e)
case e@ClarkNode.Elem(EName(_, "CashAndCashEquivalents"), _, _) if e.attributeOption(EName("contextRef")).contains("I-2007") &&
e.attributeOption(EName("unitRef")).contains("U-Monetary") =>
Vector(e)
case e =>
Vector()
}
assertResult(resolved.Elem.from(filteredInstance).removeAllInterElementWhitespace.coalesceAndNormalizeAllText) {
resolved.Elem.from(instance).removeAllInterElementWhitespace.coalesceAndNormalizeAllText
}
}
test("testConvertXbrlInstance") {
val uri = classOf[ClarkElemTest].getResource("sample-xbrl-instance.xml").toURI
val parsedInstance = docParser.parse(uri).documentElement
val clarkInstance = ClarkNode.Elem.from(parsedInstance)
val adaptedScope =
scope.withoutDefaultNamespace ++ Scope.from("xbrli" -> "http://www.xbrl.org/2003/instance")
assertResult(adaptedScope) {
adaptedScope.withoutDefaultNamespace.makeInvertible
}
val convertedInstance = simple.Elem.from(clarkInstance, adaptedScope)
assertResult(resolved.Elem.from(parsedInstance)) {
resolved.Elem.from(clarkInstance)
}
assertResult(resolved.Elem.from(convertedInstance)) {
resolved.Elem.from(clarkInstance)
}
}
private def createContext(): ClarkNode.Elem = {
import ClarkNode.Node._
import scope._
val identifier =
textElem(QName("identifier").res, Vector(EName("scheme") -> "http://www.sec.gov/CIK"), "1234567890")
val segment =
emptyElem(QName("segment").res)
.plusChildren(
Vector(
textElem(QName("xbrldi:explicitMember").res, Vector(EName("dimension") -> "gaap:EntityAxis"), "gaap:ABCCompanyDomain"),
textElem(QName("xbrldi:explicitMember").res, Vector(EName("dimension") -> "gaap:BusinessSegmentAxis"), "gaap:ConsolidatedGroupDomain"),
textElem(QName("xbrldi:explicitMember").res, Vector(EName("dimension") -> "gaap:VerificationAxis"), "gaap:UnqualifiedOpinionMember"),
textElem(QName("xbrldi:explicitMember").res, Vector(EName("dimension") -> "gaap:PremiseAxis"), "gaap:ActualMember"),
textElem(QName("xbrldi:explicitMember").res, Vector(EName("dimension") -> "gaap:ReportDateAxis"), "gaap:ReportedAsOfMarch182008Member")))
val entity = elem(QName("entity").res, Vector(identifier, segment))
val period =
emptyElem(QName("period").res)
.plusChild(textElem(QName("instant").res, "2007-12-31"))
emptyElem(QName("context").res)
.plusAttribute(EName("id"), "I-2007")
.plusChildren(Vector(entity, period))
}
private def createUnit(): ClarkNode.Elem = {
import ClarkNode.Node._
import scope._
// Mind the prefix 'iso427' below. If we convert this to a simple Elem,
// we need a Scope that includes that prefix!
emptyElem(QName("unit").res)
.plusAttribute(EName("id"), "U-Monetary")
.plusChild(
textElem(QName("measure").res, "iso4217:USD"))
}
private def createFact(): ClarkNode.Elem = {
import ClarkNode.Node._
import scope._
textElem(QName("gaap:CashAndCashEquivalents").res, "1000")
.plusAttribute(EName("id"), "Item-01")
.plusAttribute(EName("contextRef"), "I-2007")
.plusAttribute(EName("unitRef"), "U-Monetary")
.plusAttribute(EName("decimals"), "INF")
}
private def createInstance(): ClarkNode.Elem = {
import ClarkNode.Node._
import scope._
val schemaRef =
emptyElem(QName("link:schemaRef").res)
.plusAttribute(QName("xlink:type").res, "simple")
.plusAttribute(QName("xlink:href").res, "gaap.xsd")
emptyElem(QName("xbrl").res)
.plusChild(schemaRef)
.plusChild(createContext())
.plusChild(createUnit())
.plusChild(createFact())
}
private val scope = Scope.from(
"" -> "http://www.xbrl.org/2003/instance",
"xlink" -> "http://www.w3.org/1999/xlink",
"link" -> "http://www.xbrl.org/2003/linkbase",
"gaap" -> "http://xasb.org/gaap",
"xsi" -> "http://www.w3.org/2001/XMLSchema-instance",
"iso4217" -> "http://www.xbrl.org/2003/iso4217",
"xbrldi" -> "http://xbrl.org/2006/xbrldi")
}
| dvreeze/yaidom | jvm/src/test/scala/eu/cdevreeze/yaidom/utils/ClarkElemTest.scala | Scala | apache-2.0 | 6,217 |
package org.scalajs.jasmine
import scala.scalajs.js
trait JasmineEnv extends js.Object {
def Clock: JasmineEnv.Clock = js.native
}
object JasmineEnv {
trait Clock extends js.Object {
def tick(time: Double): Unit = js.native
def useMock(): Unit = js.native
}
}
| jmnarloch/scala-js | jasmine-test-framework/src/main/scala/org/scalajs/jasmine/JasmineEnv.scala | Scala | bsd-3-clause | 277 |
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.3
* @date Sat Oct 8 12:37:32 EDT 2016
* @see LICENSE (MIT style license file).
*
* @see www.jstatsoft.org/article/view/v051i04/v51i04.pdf
*/
package scalation.calculus
import scala.math.sqrt
import scalation.math.{double_exp, FunctionS2S, int_exp}
import scalation.util.banner
import Integral.β«
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Hilbert` class provides operators to add, subtract, mutiply, divide and
* raise functions. Given two functions, 'f' and 'g', a new function is created.
* It also provides methods for computing dot/inner products, norms and
* distances for functions defined in Hilbert Space.
* On interval [a, b]
* <p>
* Lp-norm (f) = [ β«f(t)^p dt ]^1/p
* <p>
* @see implicit conversion 'functionS2S2Hilbert' in `package.scala`
* @param f the function to convert into a Hilbert function
*/
class Hilbert (f: FunctionS2S)
{
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Negate the function 'f' (unary minus), returning a new function.
*/
def unary_- = (x: Double) => -f(x)
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add function 'f' and function 'g', returning a new function.
* @param g the other function
*/
def + (g: FunctionS2S) = (x: Double) => f(x) + g(x) // function of x
def + (g: Double) = (x: Double) => f(x) + g // constant function
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From function 'f' subtract function 'g', returning a new function.
* @param g the other function
*/
def - (g: FunctionS2S) = (x: Double) => f(x) - g(x)
def - (g: Double) = (x: Double) => f(x) - g
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply function 'f' by function 'g', returning a new function.
* @param g the other function
*/
def * (g: FunctionS2S) = (x: Double) => f(x) * g(x)
def * (g: Double) = (x: Double) => f(x) * g
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Divide function 'f' by function 'g', returning a new function.
* @param g the other function
*/
def / (g: FunctionS2S) = (x: Double) => f(x) / g(x)
def / (g: Double) = (x: Double) => f(x) / g
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Raise function 'f' to the 'p'th power, returning a new function.
* @param p the integer-valued power/exponent
*/
def ~^ (p: Int) = (x: Double) => f(x) ~^ p
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Raise function 'f' to the 'p'th power, returning a new function.
* @param p the power/exponent
*/
def ~^ (p: Double) = (x: Double) => f(x) ~^ p
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the dot/inner product of functions 'f' and 'g'.
* @param g the other function
* @param a the start of the interval
* @param b the end of the interval
*/
def dot (g: FunctionS2S, a: Double = 0.0, b: Double = 1.0): Double =
{
β« ((a, b), f * g)
} // dot
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the L2 norm squared of function 'f', returning a new function.
* @param a the start of the interval
* @param b the end of the interval
*/
def normSq (a: Double = 0.0, b: Double = 1.0): Double =
{
β« ((a, b), f ~^ 2)
} // normSq
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the L2 norm of function 'f'.
* @param a the start of the interval
* @param b the end of the interval
*/
def norm (a: Double = 0.0, b: Double = 1.0): Double =
{
sqrt (normSq (a, b))
} // norm
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the Lp norm squared of function 'f'.
* @param p the level, e.g., 1, 2, ...
* @param a the start of the interval
* @param b the end of the interval
*/
def normSq_p (p: Int, a: Double = 0.0, b: Double = 1.0): Double =
{
β« ((a, b), f ~^ p)
} // normSq_p
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the Lp norm of function 'f'.
* @param p the level, e.g., 1, 2, ...
* @param a the start of the interval
* @param b the end of the interval
*/
def norm_p (p: Int, a: Double = 0.0, b: Double = 1.0): Double =
{
normSq_p (p, a, b) ~^ (1.0 / p.toDouble)
} // normP
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the distance in L2 space between function 'f' and function 'g'.
* @param g the other function
* @param a the start of the interval
* @param b the end of the interval
*/
def dist (g: FunctionS2S, a: Double = 0.0, b: Double = 1.0): Double =
{
(f - g).norm (a, b)
} // dist
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the distance in Lp space between function 'f' and function 'g'.
* @param g the other function
* @param p the level, e.g., 1, 2, ...
* @param a the start of the interval
* @param b the end of the interval
*/
def dist_p (g: FunctionS2S, p: Int, a: Double = 0.0, b: Double = 1.0): Double =
{
(f - g).norm_p (p, a, b)
} // dist_p
} // Hilbert class
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `HilbertTest` object is used to test the `Hilbert` class.
* > run-main scalation.calculus.HilbertTest
*/
object HilbertTest extends App
{
banner ("functions: f(x) = 2t, g(x) = t^2")
val f = (t: Double) => 2.0 * t // function definition
def g (t: Double) = t * t // method defintion - `g _` converts unapplied method to function
// Unapplied methods are only converted to functions when a function type is expected.
// You can make this conversion explicit by writing `g _` or `g(_)` instead of `g`.
banner ("functional operators")
val h = f * 2 + g _ // define a new function h
println ("f(1) = " + f(1))
println ("g(1) = " + g(1))
println ("(-f)(1) = " + (-f)(1))
println ("(f + g)(1) = " + (f + g _)(1))
println ("(f - g)(1) = " + (f - g _)(1))
println ("(f * g)(1) = " + (f * g _)(1))
println ("(f / g)(1) = " + (f / g _)(1))
println ("(f ~^ 2)(1) = " + (f ~^ 2)(1))
println ("h(1) = " + h(1))
banner ("dot product")
println ("f dot f = " + (f dot f))
println ("f dot g = " + (f dot g))
banner ("norm (f)")
println ("L2 norm = " + f.norm ())
println ("L1 norm = " + f.norm_p (1))
println ("L2 norm = " + f.norm_p (2))
println ("L3 norm = " + f.norm_p (3))
banner ("dist (f, g)")
println ("L2 distance = " + f.dist (g))
println ("L1 distance = " + f.dist_p (g, 1))
println ("L2 distance = " + f.dist_p (g, 2))
println ("L3 distance = " + f.dist_p (g, 3))
} // HilbertTest object
| scalation/fda | scalation_1.3/scalation_modeling/src/main/scala/scalation/calculus/Hilbert.scala | Scala | mit | 7,577 |
package it.mighe.ssbi.instructions
import it.mighe.ssbi.Instruction
import it.mighe.ssbi.Tape
class AdjustPointerInstruction(val offset: Int) extends Instruction {
override def execute(tape: Tape): Instruction = {
tape.adjustPointer(offset)
next
}
override def toString = {
s"AdjustPointerInstruction(offset: $offset)"
}
} | mighe/ssbi | src/main/scala/it/mighe/ssbi/instructions/AdjustPointerInstruction.scala | Scala | mit | 346 |
/*
Copyright 2013 Ilya Lakhin (ΠΠ»ΡΡ ΠΠ»Π΅ΠΊΡΠ°Π½Π΄ΡΠΎΠ²ΠΈΡ ΠΠ°Ρ
ΠΈΠ½)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package name.lakhin.eliah.projects
package papacarlo.utils
final case class Bounds(from: Int, until: Int) {
val defined = from <= until
val length = until - from
def iterator = (if (defined) from until until else 0 until 0).iterator
def pair = from -> until
def map(from: Int => Int, until: Int => Int) =
if (defined) Bounds(from(this.from), until(this.until))
else this
def map(offset: Int => Int) =
if (defined) Bounds(offset(this.from), offset(this.until))
else this
def union(point: Int) =
if (defined) Bounds(point min from, (point + 1) max until)
else Bounds(point, point + 1)
def union(another: Bounds) =
if (defined && another.defined)
Bounds(from min another.from, until max another.until)
else if (defined) this
else another
def inject(injection: Bounds) =
if (defined && injection.defined) {
if (injection.from <= from)
Bounds(injection.from, until + injection.length)
else if (injection.from <= until)
Bounds(from, until + injection.length)
else
Bounds(from, injection.until)
} else if (defined) this
else injection
def takeout(another: Bounds) =
if (defined && another.defined) {
if (until <= another.from) this
else if (another.until <= from)
Bounds(from - another.length, until - another.length)
else if (from <= another.from)
Bounds(from, (until - another.length) max another.from)
else
Bounds(another.until, until - another.from)
} else this
def enlarge(leftRadius: Int, rightRadius: Int) =
if (defined) Bounds(from - leftRadius, until + rightRadius)
else this
def shift(offset: Int) =
if (defined) Bounds(from + offset, until + offset)
else this
def slice[A](seq: IndexedSeq[A]) =
if (defined) seq.slice(from, until)
else seq
def slice[A](seq: List[A]) =
if (defined) seq.slice(from, until)
else seq
def substring(string: String) =
if (defined) string.substring(from, until)
else string
def replace[A](target: IndexedSeq[A], replacement: Seq[A]) =
if (defined) target.take(from) ++ replacement ++ target.drop(until)
else target
def replace[A](target: List[A], replacement: List[A]) =
if (defined) target.take(from) ::: replacement ::: target.drop(until)
else target
def replace[A](target: String, replacement: String) =
if (defined)
target.substring(0, from) + replacement + target.substring(until)
else target
def includes(value: Int) = defined && from <= value && value < until
def intersects(another: Bounds) =
defined && another.defined && from < another.until && another.from < until
def touches(another: Bounds) =
defined && another.defined && from <= another.until && another.from <= until
}
object Bounds {
val undefined = Bounds(0, -1)
def point(position: Int) = Bounds(position, position + 1)
def cursor(position: Int) = Bounds(position, position)
}
| Eliah-Lakhin/papa-carlo | src/main/scala/name.lakhin.eliah.projects/papacarlo/utils/Bounds.scala | Scala | apache-2.0 | 3,618 |
package com.bostontechnologies.quickfixs.fields
import com.bostontechnologies.quickfixs.components.Party
import quickfix.{Group, Message}
import quickfix.field.{PartyID, NoPartyIDs}
import scala.collection.JavaConversions._
trait RichParties {
val self: Message
def partyCount: Int = self.getInt(NoPartyIDs.FIELD)
def parties: Seq[Party] = self.getGroups(NoPartyIDs.FIELD).map(Party(_))
def +=(party: Party) {
val group = new Group(NoPartyIDs.FIELD, PartyID.FIELD)
group.setFields(party.toFields)
self.addGroup(group)
}
def ++=(parties: Iterable[Party]) {
parties.foreach(this += _)
}
} | Forexware/quickfixs | src/main/scala/com/bostontechnologies/quickfixs/fields/RichParties.scala | Scala | apache-2.0 | 622 |
package com.twitter.finagle.thrift
import com.twitter.finagle.{Path, Dtab}
import com.twitter.finagle.tracing.{Flags, SpanId, TraceId}
import java.util.ArrayList
import org.scalatest.FunSuite
class RichRequestHeaderTest extends FunSuite {
test("None if clientId is not set") {
val header = new thrift.RequestHeader
val richHeader = new RichRequestHeader(header)
assert(None == richHeader.clientId)
}
test("None if clientId.name is not set") {
val header = (new thrift.RequestHeader)
.setClient_id(new thrift.ClientId)
val richHeader = new RichRequestHeader(header)
assert(None == richHeader.clientId)
}
test("Some(clientId)") {
val header = (new thrift.RequestHeader)
.setClient_id(new thrift.ClientId("foo"))
val richHeader = new RichRequestHeader(header)
assert(Some(ClientId("foo")) == richHeader.clientId)
}
test("empth path if dest is null") {
val header = new thrift.RequestHeader
val richHeader = new RichRequestHeader(header)
assert(Path.empty == richHeader.dest)
}
test("path if dest is non-null") {
val header = (new thrift.RequestHeader)
.setDest("/foo")
val richHeader = new RichRequestHeader(header)
assert(Path.read("/foo") == richHeader.dest)
}
test("null dtab") {
val header = new thrift.RequestHeader
val richHeader = new RichRequestHeader(header)
assert(Dtab.empty == richHeader.dtab)
}
test("non-null dtab") {
val delegations = new ArrayList[thrift.Delegation]
delegations.add(new thrift.Delegation("/foo", "/bar"))
val header = (new thrift.RequestHeader)
.setDelegations(delegations)
val richHeader = new RichRequestHeader(header)
assert(Dtab.read("/foo=>/bar") == richHeader.dtab)
}
test("default traceId") {
val header = new thrift.RequestHeader
val richHeader = new RichRequestHeader(header)
assert(TraceId(Some(SpanId(0)), None, SpanId(0), None, Flags()) == richHeader.traceId)
}
test("non-default traceId") {
val header = (new thrift.RequestHeader)
.setTrace_id(0)
.setParent_span_id(1)
.setSpan_id(2)
.setSampled(true)
.setFlags(4)
val richHeader = new RichRequestHeader(header)
val expected = TraceId(Some(SpanId(0)), Some(SpanId(1)), SpanId(2), Some(true), Flags(4))
assert(expected == richHeader.traceId)
}
}
| luciferous/finagle | finagle-thrift/src/test/scala/com/twitter/finagle/thrift/RichRequestHeaderTest.scala | Scala | apache-2.0 | 2,364 |
package com.cloudwick.generator.osge
import java.io.File
import java.text.SimpleDateFormat
import java.util.Calendar
import java.util.concurrent.atomic.AtomicLong
import com.cloudwick.generator.avro.{Customer, Fact, OSGERecord, Revenue}
import com.cloudwick.generator.utils._
import org.apache.avro.specific.SpecificRecord
import org.slf4j.LoggerFactory
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
/**
* Writes events to file
* @author ashrith
*/
class Writer(eventsStartRange: Int,
eventsEndRange: Int,
counter: AtomicLong,
sizeCounter: AtomicLong,
config: OptionsConfig) extends Runnable with LazyLogging {
lazy val utils = new Utils
lazy val dateUtils = new DateUtils
lazy val dateFormatter = new SimpleDateFormat("dd-MMM-yy HH:mm:ss")
lazy val sleepTime = if(config.eventsPerSec == 0) 0 else 1000/config.eventsPerSec
def threadName = Thread.currentThread().getName
def formatEventToString(osgeEvent: OSGEEvent, formatter: String) = {
val formatChar = formatter match {
case "tsv" => '\\t'
case "csv" => ','
case _ => '\\t'
}
"%s%c%s%c%s%c%s%c%d%c%s%c%s%c%d%c%d%c%d%c%d%c%d%c%d%c%d%c%d%c%s\\n".format(osgeEvent.cID, formatChar,
osgeEvent.cName, formatChar, osgeEvent.cEmail, formatChar, osgeEvent.cGender, formatChar, osgeEvent.cAge,
formatChar, osgeEvent.cAddress, formatChar, osgeEvent.cCountry, formatChar, osgeEvent.cRegisterDate, formatChar,
osgeEvent.cFriendCount, formatChar, osgeEvent.cLifeTime, formatChar, osgeEvent.cityGamePlayed, formatChar,
osgeEvent.pictionaryGamePlayed, formatChar, osgeEvent.scrambleGamePlayed, formatChar, osgeEvent.sniperGamePlayed,
formatChar, osgeEvent.cRevenue, formatChar, osgeEvent.paidSubscriber)
}
def formatEventMultiToString(osgeEvent: OSGEEvent, formatter: String) = {
val dateFormatter = new SimpleDateFormat("dd-MMM-yy HH:mm:ss")
val formatChar = formatter match {
case "tsv" => '\\t'
case "csv" => ','
case _ => '\\t'
}
val ms = scala.collection.mutable.Map[String, ArrayBuffer[String]](
"Customer" -> new ArrayBuffer[String](1),
"Revenue" -> new ArrayBuffer[String],
"Fact" -> new ArrayBuffer[String](osgeEvent.cLifeTime)
)
ms("Customer") += "%s%c%s%c%s%c%d%c%d%c%s%c%d%c%d\\n".format(osgeEvent.cID, formatChar, osgeEvent.cName, formatChar,
osgeEvent.cGender, formatChar, osgeEvent.cAge, formatChar, osgeEvent.cRegisterDate, formatChar,
osgeEvent.cCountry, formatChar, osgeEvent.cFriendCount, formatChar, osgeEvent.cLifeTime)
if (osgeEvent.cRevenue != 0) {
ms("Revenue") += "%s%c%d%c%d\\n".format(osgeEvent.cID, formatChar, osgeEvent.paidDate, formatChar, osgeEvent.cRevenue)
}
1 to osgeEvent.cLifeTime foreach { _ =>
val gamesProbMap = if (osgeEvent.cGender == "female") {
Customers.GAMES_FEMALE_PROBABILITY
} else {
Customers.GAMES_MALE_PROBABILITY
}
ms("Fact") += "%s%c%s%c%d\\n".format(osgeEvent.cID, formatChar, utils.pickWeightedKey(gamesProbMap), formatChar,
dateUtils.genDate(dateFormatter.format(osgeEvent.cRegisterDate), dateFormatter.format(Calendar.getInstance().getTimeInMillis)))
}
ms
}
def avroEvent(event: OSGEEvent) = {
OSGERecord.newBuilder()
.setCId(event.cID)
.setCName(event.cName)
.setCEmail(event.cEmail)
.setCGender(event.cGender)
.setCAge(event.cAge)
.setCAddress(event.cAddress)
.setCCountry(event.cCountry)
.setCRegisterDate(event.cRegisterDate)
.setCFriendCount(event.cFriendCount)
.setCLifeTime(event.cLifeTime)
.setCityPlayedCount(event.cityGamePlayed)
.setPictionaryPlayedCount(event.pictionaryGamePlayed)
.setScramblePlayedCount(event.scrambleGamePlayed)
.setSniperPlayedCount(event.sniperGamePlayed)
.setCRevenue(event.cRevenue)
.setPaidSubscriber(event.paidSubscriber)
.build()
}
def customerEvent(event: OSGEEvent) = {
Customer.newBuilder()
.setCId(event.cID)
.setCName(event.cName)
.setCGender(event.cGender)
.setCAge(event.cAge)
.setCRegisterDate(event.cRegisterDate)
.setCCountry(event.cCountry)
.setCFriendCount(event.cFriendCount)
.setCLifeTime(event.cLifeTime)
.build()
}
def revenueEvent(event: OSGEEvent) = {
Revenue.newBuilder()
.setCId(event.cID)
.setCPaidDate(event.paidDate)
.setCRevenue(event.cRevenue)
.build()
}
def factEvent(event: OSGEEvent) = {
val gamesProbMap = if (event.cGender == "female") {
Customers.GAMES_FEMALE_PROBABILITY
} else {
Customers.GAMES_MALE_PROBABILITY
}
Fact.newBuilder()
.setCId(event.cID)
.setCGamePlayed(utils.pickWeightedKey(gamesProbMap))
.setCGamePlayedDate(
dateUtils.genDate(dateFormatter.format(event.cRegisterDate),
dateFormatter.format(Calendar.getInstance().getTimeInMillis))
)
.build()
}
def run() = {
val totalEvents = eventsEndRange - eventsStartRange + 1
var batchCount: Int = 0
var outputFileHandler: FileHandler = null
var outputAvroFileHandler: AvroFileHandler[OSGERecord] = null
var outputFileCustomerHandler: FileHandler = null
var outputFileRevenueHandler: FileHandler = null
var outputFileFactHandler: FileHandler = null
var outputAvroFileCustomerHandler: AvroFileHandler[Customer] = null
var outputAvroFileRevenueHandler: AvroFileHandler[Revenue] = null
var outputAvroFileFactHandler: AvroFileHandler[Fact] = null
var eventsText: ArrayBuffer[String] = null
var customerEventsText: ArrayBuffer[String] = null
var revenueEventsText: ArrayBuffer[String] = null
var factEventsText: ArrayBuffer[String] = null
var eventsAvro: ArrayBuffer[OSGERecord] = null
var customerEventsAvro: ArrayBuffer[Customer] = null
var revenueEventsAvro: ArrayBuffer[Revenue] = null
var factEventsAvro: ArrayBuffer[Fact] = null
var multiTableText: mutable.Map[String, ArrayBuffer[String]] = null
var multiTableAvro: mutable.Map[String, ArrayBuffer[SpecificRecord]] = null
var customer: Customer = null
var fact: Fact = null
var revenue: Revenue = null
if (config.multiTable) {
if (config.outputFormat == "avro") {
outputAvroFileCustomerHandler = new AvroFileHandler[Customer](new File(config.filePath, s"osge_customers_$threadName.data").toString, config.fileRollSize)
outputAvroFileRevenueHandler = new AvroFileHandler[Revenue](new File(config.filePath, s"osge_revenue_$threadName.data").toString, config.fileRollSize)
outputAvroFileFactHandler = new AvroFileHandler[Fact](new File(config.filePath, s"osge_fact_$threadName.data").toString, config.fileRollSize)
customerEventsAvro = new ArrayBuffer[Customer](config.flushBatch)
revenueEventsAvro = new ArrayBuffer[Revenue](config.flushBatch)
factEventsAvro = new ArrayBuffer[Fact](config.flushBatch)
} else {
outputFileCustomerHandler = new FileHandler(new File(config.filePath, s"osge_customers_$threadName.data").toString, config.fileRollSize)
outputFileRevenueHandler = new FileHandler(new File(config.filePath, s"osge_revenue_$threadName.data").toString, config.fileRollSize)
outputFileFactHandler = new FileHandler(new File(config.filePath, s"osge_fact_$threadName.data").toString, config.fileRollSize)
customerEventsText = new ArrayBuffer[String](config.flushBatch)
revenueEventsText = new ArrayBuffer[String](config.flushBatch)
factEventsText = new ArrayBuffer[String](config.flushBatch)
}
} else {
if (config.outputFormat == "avro") {
outputAvroFileHandler = new AvroFileHandler[OSGERecord](new File(config.filePath,s"osge_$threadName.data").toString, config.fileRollSize)
eventsAvro = new ArrayBuffer[OSGERecord](config.flushBatch)
} else {
outputFileHandler = new FileHandler(new File(config.filePath, s"osge_$threadName.data").toString, config.fileRollSize)
eventsText = new ArrayBuffer[String](config.flushBatch)
}
}
var osgeEvent: OSGEEvent = null
try {
if (config.outputFormat == "avro") {
if (config.multiTable) {
outputAvroFileCustomerHandler.openFile()
outputAvroFileRevenueHandler.openFile()
outputAvroFileFactHandler.openFile()
} else {
outputAvroFileHandler.openFile()
}
} else {
if (config.multiTable) {
outputFileCustomerHandler.openFile()
outputFileRevenueHandler.openFile()
outputFileFactHandler.openFile()
} else {
outputFileHandler.openFile()
}
}
var textPlaceHolder:String = null
var avroPlaceHolder:OSGERecord = null
(eventsStartRange to eventsEndRange).foreach { eventCount =>
batchCount += 1
osgeEvent = new OSGEGenerator().eventGenerate
/*
* Fill the buffers
*/
if (config.multiTable) {
if (config.outputFormat == "avro") {
customer = customerEvent(osgeEvent)
customerEventsAvro += customer
sizeCounter.getAndAdd(sizeCounter.getAndAdd(customer.toString.getBytes.length))
if (osgeEvent.cRevenue != 0) {
revenue = revenueEvent(osgeEvent)
revenueEventsAvro += revenue
sizeCounter.getAndAdd(sizeCounter.getAndAdd(revenue.toString.getBytes.length))
}
1 to osgeEvent.cLifeTime foreach { _ =>
fact = factEvent(osgeEvent)
factEventsAvro += fact
sizeCounter.getAndAdd(sizeCounter.getAndAdd(fact.toString.getBytes.length))
}
} else {
multiTableText = formatEventMultiToString(osgeEvent, config.outputFormat)
customerEventsText ++= multiTableText("Customer")
multiTableText("Customer").map(x => sizeCounter.getAndAdd(x.getBytes.length))
revenueEventsText ++= multiTableText("Revenue")
multiTableText("Revenue").map(x => sizeCounter.getAndAdd(x.getBytes.length))
factEventsText ++= multiTableText("Fact")
multiTableText("Fact").map(x => sizeCounter.getAndAdd(x.getBytes.length))
}
} else {
if (config.outputFormat == "avro") {
avroPlaceHolder = avroEvent(osgeEvent)
eventsAvro += avroPlaceHolder
sizeCounter.getAndAdd(avroPlaceHolder.toString.getBytes.length)
} else {
textPlaceHolder = formatEventToString(osgeEvent, config.outputFormat)
eventsText += textPlaceHolder
sizeCounter.getAndAdd(textPlaceHolder.getBytes.length)
}
}
// increment universal record counter
counter.getAndIncrement
if (batchCount == config.flushBatch || batchCount == totalEvents) {
if (config.multiTable) {
if (config.outputFormat == "avro") {
outputAvroFileCustomerHandler.publishBuffered(customerEventsAvro)
outputAvroFileRevenueHandler.publishBuffered(revenueEventsAvro)
outputAvroFileFactHandler.publishBuffered(factEventsAvro)
customerEventsAvro.clear()
revenueEventsAvro.clear()
factEventsAvro.clear()
} else {
outputFileCustomerHandler.publishBuffered(customerEventsText)
outputFileRevenueHandler.publishBuffered(revenueEventsText)
outputFileFactHandler.publishBuffered(factEventsText)
customerEventsText.clear()
revenueEventsText.clear()
factEventsText.clear()
}
batchCount = 0
} else {
if (config.outputFormat == "avro") {
outputAvroFileHandler.publishBuffered(eventsAvro)
eventsAvro.clear()
} else {
outputFileHandler.publishBuffered(eventsText)
eventsText.clear()
}
batchCount = 0
}
}
}
logger.debug(s"Events generated by $threadName is: $totalEvents from ($eventsStartRange) to ($eventsEndRange)")
} catch {
case e: Exception => logger.error("Error:: {}", e)
}
finally {
if (config.multiTable) {
if (config.outputFormat == "avro") {
outputAvroFileCustomerHandler.close()
outputAvroFileRevenueHandler.close()
outputAvroFileFactHandler.close()
} else {
outputFileCustomerHandler.close()
outputFileRevenueHandler.close()
outputFileFactHandler.close()
}
} else {
if (config.outputFormat == "avro") {
outputAvroFileHandler.close()
} else {
outputFileHandler.close()
}
}
}
}
} | Arpit1286/generator | src/main/scala/com/cloudwick/generator/osge/Writer.scala | Scala | apache-2.0 | 13,001 |
package controllers.s_employment
import org.specs2.mutable._
import utils.WithBrowser
import controllers.{Formulate, BrowserMatchers}
class GEmploymentIntegrationSpec extends Specification
trait EmployedSinceClaimDate extends BrowserMatchers {
this: WithBrowser[_] =>
def beginClaim() = {
Formulate.claimDate(browser)
Formulate.employment(browser)
}
}
trait EducatedSinceClaimDate extends BrowserMatchers {
this: WithBrowser[_] =>
def beginClaim() = {
Formulate.claimDate(browser)
Formulate.nationalityAndResidency(browser)
Formulate.paymentsFromAbroad(browser)
Formulate.yourCourseDetails(browser)
}
}
trait EducatedAndEmployedSinceClaimDate extends BrowserMatchers {
this: WithBrowser[_] =>
def beginClaim() = {
Formulate.claimDate(browser)
Formulate.nationalityAndResidency(browser)
Formulate.paymentsFromAbroad(browser)
Formulate.employment(browser)
}
}
trait NotEmployedSinceClaimDate extends BrowserMatchers {
this: WithBrowser[_] =>
def beginClaim() = {
Formulate.claimDate(browser)
Formulate.notInEmployment(browser)
}
}
| Department-for-Work-and-Pensions/ClaimCapture | c3/test/controllers/s_employment/GEmploymentIntegrationSpec.scala | Scala | mit | 1,119 |
package benchmarks.conflict
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicInteger
import benchmarks.{EngineParam, Workload}
import org.openjdk.jmh.annotations._
import rescala.core.{Engine, Struct}
import rescala.reactives._
@AuxCounters
@State(Scope.Thread)
class EvaluationCounter {
var tried: Int = _
var succeeded: Int = _
@Setup(Level.Iteration)
def reset() = {
tried = 0
succeeded = 0
}
}
@State(Scope.Group)
@BenchmarkMode(Array(Mode.Throughput))
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@Warmup(iterations = 5, time = 1000, timeUnit = TimeUnit.MILLISECONDS)
@Measurement(iterations = 5, time = 1000, timeUnit = TimeUnit.MILLISECONDS)
@Fork(1)
@Threads(2)
class ExpensiveConflict[S <: Struct] {
var input: AtomicInteger = new AtomicInteger(0)
var cheapSource: Var[Int, S] = _
var expensiveSource: Var[Int, S] = _
var result: Signal[Int, S] = _
var engine: Engine[S] = _
var tried: Int = _
@Setup(Level.Iteration)
def setup(engine: EngineParam[S], work: Workload) = {
this.engine = engine.engine
implicit val e = this.engine
tried = 0
cheapSource = Var(input.incrementAndGet())
expensiveSource = Var(input.incrementAndGet())
val expensive = expensiveSource.map{ v => tried += 1; val r = v + 1; work.consume(); r }
result = Signals.lift(expensive, cheapSource)(_ + _).map{v => val r = v + 1; work.consumeSecondary(); r}
}
@Benchmark
@Group("g")
@GroupThreads(1)
def cheap() = {
cheapSource.set(input.incrementAndGet())(engine)
}
@Benchmark
@Group("g")
@GroupThreads(1)
def expensive(counter: EvaluationCounter) = {
expensiveSource.set(input.incrementAndGet())(engine)
counter.tried += tried
counter.succeeded += 1
tried = 0
}
}
| volkc/REScala | Research/Microbenchmarks/src/main/scala/benchmarks/conflict/ExpensiveConflict.scala | Scala | apache-2.0 | 1,781 |
package org.rebeam.tree.view.transition
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.VdomNode
import scala.scalajs.js
import scala.scalajs.js.|
/**
* See https://github.com/reactjs/react-transition-group/tree/v1-stable#high-level-api-csstransitiongroup
*/
object CSSTransitionGroup {
@js.native
trait Props extends js.Object {
var transitionName : js.UndefOr[String | GroupNames]
var transitionAppear : js.UndefOr[Boolean]
var transitionEnter : js.UndefOr[Boolean]
var transitionLeave : js.UndefOr[Boolean]
var transitionAppearTimeout : js.UndefOr[Int]
var transitionEnterTimeout : js.UndefOr[Int]
var transitionLeaveTimeout : js.UndefOr[Int]
// component name for the TransitionGroup itself
var component : js.UndefOr[String]
// class for the TransitionGroup itself
var className : js.UndefOr[String]
}
@js.native
trait GroupNames extends js.Object {
var appear : js.UndefOr[String]
var enter : js.UndefOr[String]
var leave : js.UndefOr[String]
var appearActive : js.UndefOr[String]
var enterActive : js.UndefOr[String]
var leaveActive : js.UndefOr[String]
}
private val rawGroupComponent = js.Dynamic.global.CSSTransitionGroup
private val groupComponent = JsComponent[Props, Children.Varargs, Null](rawGroupComponent)
/**
* [[CSSTransitionGroup]] is based on `ReactTransitionGroup` and is an easy way to perform CSS transitions and
* animations when a React component enters or leaves the DOM.
*
* @param name The prefix for all class-names that will be applied to elements to trigger animations.
* @param appear Enable/disable animating appear animations.
* @param enter Enable/disable animating enter animations.
* @param leave Enable/disable animating leave animations.
* @param appearTimeout Timeout in milliseconds.
* @param enterTimeout Timeout in milliseconds.
* @param leaveTimeout Timeout in milliseconds.
* @param component The container type around all child elements. By default this renders as a span.
* @param className Class name to apply to the container.
* @param children You must provide the key attribute for all children of [[CSSTransitionGroup]],
* even when only rendering a single item. This is how React will determine which
* children have entered, left, or stayed.
* @see https://facebook.github.io/react/docs/animation.html
*/
def apply(
name : js.UndefOr[String | GroupNames],
appear : js.UndefOr[Boolean] = js.undefined,
enter : js.UndefOr[Boolean] = js.undefined,
leave : js.UndefOr[Boolean] = js.undefined,
appearTimeout: js.UndefOr[Int] = js.undefined,
enterTimeout : js.UndefOr[Int] = js.undefined,
leaveTimeout : js.UndefOr[Int] = js.undefined,
component : js.UndefOr[String] = js.undefined,
className : js.UndefOr[String] = js.undefined
)(children: VdomNode*): JsComponent.Unmounted[Props, Null] = {
val p = (new js.Object).asInstanceOf[Props]
p.transitionName = name
p.transitionAppear = appear
p.transitionEnter = enter
p.transitionLeave = leave
p.transitionAppearTimeout = appearTimeout
p.transitionEnterTimeout = enterTimeout
p.transitionLeaveTimeout = leaveTimeout
p.component = component
p.className = className
groupComponent.apply(p)(children:_*)
}
}
| trepidacious/tree-material-ui | js/src/main/scala/org/rebeam/tree/view/transition/CSSTransitionGroup.scala | Scala | gpl-3.0 | 3,560 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.frontend.v2_3.ast
import org.neo4j.cypher.internal.frontend.v2_3.ast.Expression.SemanticContext
import org.neo4j.cypher.internal.frontend.v2_3.symbols._
import org.neo4j.cypher.internal.frontend.v2_3.InputPosition
case class HasLabels(expression: Expression, labels: Seq[LabelName])(val position: InputPosition)
extends Expression with SimpleTyping {
protected def possibleTypes = CTBoolean
override def semanticCheck(ctx: SemanticContext) =
expression.semanticCheck(ctx) chain
expression.expectType(CTNode.covariant) chain
super.semanticCheck(ctx)
}
| HuangLS/neo4j | community/cypher/frontend-2.3/src/main/scala/org/neo4j/cypher/internal/frontend/v2_3/ast/HasLabels.scala | Scala | apache-2.0 | 1,403 |
package org.apache.spark.hbase.keyspace
import org.apache.hadoop.hbase.filter.FuzzyRowFilter
import org.apache.spark.SparkContext
import org.apache.spark.hbase.keyspace.KeySpaceRegistry.KSREG
import org.apache.spark.hbase.{HBaseRDDFunctions, HBaseQuery, HBaseFilter, HBaseRDD}
import scala.collection.JavaConverters._
import scala.reflect.ClassTag
/**
* Created by mharis on 10/07/15.
*
* HBaseRDDKS is an HBaseRDD[(Key, hbase.client.Result)] - Key is defined only in the scope of this special package
* and its purpose is to allow mixing different key types in the same table and preserve even distribution across regions.
*
* In any Key instance, the first 4 bytes are salt for the key generated by the particular KeySpace to
* which the Key belongs and the following 2 bytes are the signature of the KeySpace. With this representation it
*
* is possible to ensure that:
* 1) any type of key can be "made" to be distributed evenly
* 2) different key types can be mixed in a single hbase table (but don't have to be - depends on application)
* 3) fuzzy row filter can be applied on the 2-byte key space signature to fast forward on hbase server-side
*
* columns is a sequence of string identifiers which can either reference a column family, e.g. 'N' or a specific
* column, e.g. 'F:propensity'
*/
abstract class HBaseRDDKS[V](sc: SparkContext, tableNameAsString: String, keySpace: Short)(implicit reg: KSREG)
extends HBaseRDD[Key, V](sc, tableNameAsString, Seq(new HBaseFilter() {
override def configureQuery(query: HBaseQuery): Unit = {
println(s"Configuring ${query} with fuzzy row filter")
val fuzzyRowfilter = new org.apache.hadoop.hbase.util.Pair(
KeySpace(keySpace)(reg).allocate(0),
Array[Byte](1, 1, 1, 1, 0, 0))
query.addFilter(new FuzzyRowFilter(List(fuzzyRowfilter).asJava))
}
})) {
def this(sc: SparkContext, tableNameAsString: String)(implicit reg: KSREG) = this(sc, tableNameAsString, (-1.toShort))
override def fromBytes = (rowKey: Array[Byte], o:Int, l: Int) => Key(rowKey) //TODO Key from offset
override def toBytes = (key: Key) => key.bytes
} | michal-harish/spark-on-hbase | src/main/scala/org/apache/spark/hbase/keyspace/HBaseRDDKS.scala | Scala | apache-2.0 | 2,155 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.planning
import org.apache.spark.Logging
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.trees.TreeNode
/**
* Given a [[plans.logical.LogicalPlan LogicalPlan]], returns a list of `PhysicalPlan`s that can
* be used for execution. If this strategy does not apply to the give logical operation then an
* empty list should be returned.
*/
abstract class GenericStrategy[PhysicalPlan <: TreeNode[PhysicalPlan]] extends Logging {
def apply(plan: LogicalPlan): Seq[PhysicalPlan]
}
/**
* Abstract class for transforming [[plans.logical.LogicalPlan LogicalPlan]]s into physical plans.
* Child classes are responsible for specifying a list of [[Strategy]] objects that each of which
* can return a list of possible physical plan options. If a given strategy is unable to plan all
* of the remaining operators in the tree, it can call [[planLater]], which returns a placeholder
* object that will be filled in using other available strategies.
*
* TODO: RIGHT NOW ONLY ONE PLAN IS RETURNED EVER...
* PLAN SPACE EXPLORATION WILL BE IMPLEMENTED LATER.
*
* @tparam PhysicalPlan The type of physical plan produced by this [[QueryPlanner]]
*/
abstract class QueryPlanner[PhysicalPlan <: TreeNode[PhysicalPlan]] {
/** A list of execution strategies that can be used by the planner */
def strategies: Seq[GenericStrategy[PhysicalPlan]]
/**
* Returns a placeholder for a physical plan that executes `plan`. This placeholder will be
* filled in automatically by the QueryPlanner using the other execution strategies that are
* available.
*/
protected def planLater(plan: LogicalPlan): PhysicalPlan = this.plan(plan).next()
def plan(plan: LogicalPlan): Iterator[PhysicalPlan] = {
// Obviously a lot to do here still...
val iter = strategies.view.flatMap(_(plan)).toIterator
assert(iter.hasNext, s"No plan for $plan")
iter
}
}
| chenc10/Spark-PAF | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/QueryPlanner.scala | Scala | apache-2.0 | 2,766 |
package dotty.tools.dotc
package transform
import core._
import TreeTransforms._
import Contexts.Context
import Flags._
import SymUtils._
import Symbols._
import SymDenotations._
import Types._
import Decorators._
import DenotTransformers._
import StdNames._
import NameOps._
import Phases._
import ast.untpd
import ast.Trees._
import collection.mutable
/** This phase performs the following transformations:
*
* 1. (done in `traitDefs` and `transformSym`) Map every concrete trait getter
*
* <mods> def x(): T = expr
*
* to the pair of definitions:
*
* <mods> def x(): T
* protected def initial$x(): T = { stats; expr }
*
* where `stats` comprises all statements between either the start of the trait
* or the previous field definition which are not definitions (i.e. are executed for
* their side effects).
*
* 2. (done in `traitDefs`) Make every concrete trait setter
*
* <mods> def x_=(y: T) = ()
*
* deferred by mapping it to
*
* <mods> def x_=(y: T)
*
* 3. For a non-trait class C:
*
* For every trait M directly implemented by the class (see SymUtils.mixin), in
* reverse linearization order, add the following definitions to C:
*
* 3.1 (done in `traitInits`) For every parameter accessor `<mods> def x(): T` in M,
* in order of textual occurrence, add
*
* <mods> def x() = e
*
* where `e` is the constructor argument in C that corresponds to `x`. Issue
* an error if no such argument exists.
*
* 3.2 (done in `traitInits`) For every concrete trait getter `<mods> def x(): T` in M
* which is not a parameter accessor, in order of textual occurrence, produce the following:
*
* 3.2.1 If `x` is also a member of `C`, and M is a Dotty trait:
*
* <mods> def x(): T = super[M].initial$x()
*
* 3.2.2 If `x` is also a member of `C`, and M is a Scala 2.x trait:
*
* <mods> def x(): T = _
*
* 3.2.3 If `x` is not a member of `C`, and M is a Dotty trait:
*
* super[M].initial$x()
*
* 3.2.4 If `x` is not a member of `C`, and M is a Scala2.x trait, nothing gets added.
*
*
* 3.3 (done in `superCallOpt`) The call:
*
* super[M].<init>
*
* 3.4 (done in `setters`) For every concrete setter `<mods> def x_=(y: T)` in M:
*
* <mods> def x_=(y: T) = ()
*
* 4. (done in `transformTemplate` and `transformSym`) Drop all parameters from trait
* constructors.
*
* 5. (done in `transformSym`) Drop ParamAccessor flag from all parameter accessors in traits.
*
* Conceptually, this is the second half of the previous mixin phase. It needs to run
* after erasure because it copies references to possibly private inner classes and objects
* into enclosing classes where they are not visible. This can only be done if all references
* are symbolic.
*/
class Mixin extends MiniPhaseTransform with SymTransformer { thisTransform =>
import ast.tpd._
override def phaseName: String = "mixin"
override def runsAfter: Set[Class[_ <: Phase]] = Set(classOf[Erasure])
override def transformSym(sym: SymDenotation)(implicit ctx: Context): SymDenotation =
if (sym.is(Accessor, butNot = Deferred) && sym.owner.is(Trait))
sym.copySymDenotation(initFlags = sym.flags &~ ParamAccessor | Deferred).ensureNotPrivate
else if (sym.isConstructor && sym.owner.is(Trait) && sym.info.firstParamTypes.nonEmpty)
sym.copySymDenotation(info = MethodType(Nil, sym.info.resultType))
else
sym
private def initializer(sym: Symbol)(implicit ctx: Context): TermSymbol = {
val initName = InitializerName(sym.name.asTermName)
sym.owner.info.decl(initName).symbol
.orElse(
ctx.newSymbol(
sym.owner,
initName,
Protected | Synthetic | Method,
sym.info,
coord = sym.symbol.coord).enteredAfter(thisTransform))
.asTerm
}
override def transformTemplate(impl: Template)(implicit ctx: Context, info: TransformerInfo) = {
val cls = impl.symbol.owner.asClass
val ops = new MixinOps(cls, thisTransform)
import ops._
def traitDefs(stats: List[Tree]): List[Tree] = {
val initBuf = new mutable.ListBuffer[Tree]
stats.flatMap({
case stat: DefDef if stat.symbol.isGetter && !stat.rhs.isEmpty && !stat.symbol.is(Flags.Lazy) =>
// make initializer that has all effects of previous getter,
// replace getter rhs with empty tree.
val vsym = stat.symbol
val isym = initializer(vsym)
val rhs = Block(
initBuf.toList.map(_.changeOwner(impl.symbol, isym)),
stat.rhs.changeOwner(vsym, isym).wildcardToDefault)
initBuf.clear()
cpy.DefDef(stat)(rhs = EmptyTree) :: DefDef(isym, rhs) :: Nil
case stat: DefDef if stat.symbol.isSetter =>
cpy.DefDef(stat)(rhs = EmptyTree) :: Nil
case stat: DefTree =>
stat :: Nil
case stat =>
initBuf += stat
Nil
}) ++ initBuf
}
/** Map constructor call to a pair of a supercall and a list of arguments
* to be used as initializers of trait parameters if the target of the call
* is a trait.
*/
def transformConstructor(tree: Tree): (Tree, List[Tree]) = {
val Apply(sel @ Select(New(_), nme.CONSTRUCTOR), args) = tree
val (callArgs, initArgs) = if (tree.symbol.owner.is(Trait)) (Nil, args) else (args, Nil)
(superRef(tree.symbol, tree.pos).appliedToArgs(callArgs), initArgs)
}
val superCallsAndArgs = (
for (p <- impl.parents if p.symbol.isConstructor)
yield p.symbol.owner -> transformConstructor(p)
).toMap
val superCalls = superCallsAndArgs.mapValues(_._1)
val initArgs = superCallsAndArgs.mapValues(_._2)
def superCallOpt(baseCls: Symbol): List[Tree] = superCalls.get(baseCls) match {
case Some(call) =>
if (defn.PhantomClasses.contains(baseCls)) Nil else call :: Nil
case None =>
if (baseCls.is(NoInitsTrait) || defn.PhantomClasses.contains(baseCls)) Nil
else {
//println(i"synth super call ${baseCls.primaryConstructor}: ${baseCls.primaryConstructor.info}")
transformFollowingDeep(superRef(baseCls.primaryConstructor).appliedToNone) :: Nil
}
}
def wasDeferred(sym: Symbol) =
ctx.atPhase(thisTransform) { implicit ctx => sym is Deferred }
def traitInits(mixin: ClassSymbol): List[Tree] = {
var argNum = 0
def nextArgument() = initArgs.get(mixin) match {
case Some(arguments) =>
try arguments(argNum) finally argNum += 1
case None =>
val (msg, pos) = impl.parents.find(_.tpe.typeSymbol == mixin) match {
case Some(parent) => ("lacks argument list", parent.pos)
case None =>
("""is indirectly implemented,
|needs to be implemented directly so that arguments can be passed""".stripMargin,
cls.pos)
}
ctx.error(i"parameterized $mixin $msg", pos)
EmptyTree
}
for (getter <- mixin.info.decls.filter(getr => getr.isGetter && !wasDeferred(getr)).toList) yield {
val isScala2x = mixin.is(Scala2x)
def default = Underscore(getter.info.resultType)
def initial = transformFollowing(superRef(initializer(getter)).appliedToNone)
/** A call to the implementation of `getter` in `mixin`'s implementation class */
def lazyGetterCall = {
def canbeImplClassGetter(sym: Symbol) = sym.info.firstParamTypes match {
case t :: Nil => t.isDirectRef(mixin)
case _ => false
}
val implClassGetter = mixin.implClass.info.nonPrivateDecl(getter.name)
.suchThat(canbeImplClassGetter).symbol
ref(mixin.implClass).select(implClassGetter).appliedTo(This(cls))
}
if (isCurrent(getter) || getter.is(ExpandedName)) {
val rhs =
if (ctx.atPhase(thisTransform)(implicit ctx => getter.is(ParamAccessor))) nextArgument()
else if (isScala2x)
if (getter.is(Lazy)) lazyGetterCall
else Underscore(getter.info.resultType)
else transformFollowing(superRef(initializer(getter)).appliedToNone)
// transformFollowing call is needed to make memoize & lazy vals run
transformFollowing(DefDef(implementation(getter.asTerm), rhs))
}
else if (isScala2x) EmptyTree
else initial
}
}
def setters(mixin: ClassSymbol): List[Tree] =
for (setter <- mixin.info.decls.filter(setr => setr.isSetter && !wasDeferred(setr)).toList)
yield DefDef(implementation(setter.asTerm), unitLiteral.withPos(cls.pos))
cpy.Template(impl)(
constr =
if (cls.is(Trait) && impl.constr.vparamss.flatten.nonEmpty)
cpy.DefDef(impl.constr)(vparamss = Nil :: Nil)
else impl.constr,
parents = impl.parents.map(p => TypeTree(p.tpe).withPos(p.pos)),
body =
if (cls is Trait) traitDefs(impl.body)
else {
val mixInits = mixins.flatMap { mixin =>
flatten(traitInits(mixin)) ::: superCallOpt(mixin) ::: setters(mixin)
}
superCallOpt(superCls) ::: mixInits ::: impl.body
})
}
}
| vsalvis/dotty | src/dotty/tools/dotc/transform/Mixin.scala | Scala | bsd-3-clause | 9,463 |
/*
* Copyright (C) 2020 MapRoulette contributors (see CONTRIBUTORS.md).
* Licensed under the Apache License, Version 2.0 (see LICENSE).
*/
package org.maproulette.framework.model
import play.api.libs.json._
import play.api.libs.json.JodaWrites._
import play.api.libs.json.JodaReads._
/**
* @author nrotstan
*/
trait TeamMember {}
object TeamMember {
val STATUS_MEMBER = GroupMember.STATUS_MEMBER
val STATUS_INVITED = 1
}
/**
* Represents basic user fields relevant to team membership
*/
case class TeamUser(
id: Long,
userId: Long,
osmId: Long,
name: String,
teamId: Long,
teamGrants: List[Grant],
status: Int
) extends Identifiable
object TeamUser {
implicit val writes: Writes[TeamUser] = Json.writes[TeamUser]
implicit val reads: Reads[TeamUser] = Json.reads[TeamUser]
def fromUser(teamId: Long, member: GroupMember, user: User) = {
val teamTarget = GrantTarget.group(teamId)
val teamGrants = user.grants.filter(g => g.target == teamTarget)
TeamUser(
member.id,
user.id,
user.osmProfile.id,
user.osmProfile.displayName,
teamId,
teamGrants,
member.status
)
}
}
/**
* Bundles together a team with grants
*/
case class ManagingTeam(
team: Group,
grants: List[Grant]
)
object ManagingTeam {
implicit val writes: Writes[ManagingTeam] = Json.writes[ManagingTeam]
implicit val reads: Reads[ManagingTeam] = Json.reads[ManagingTeam]
}
| mgcuthbert/maproulette2 | app/org/maproulette/framework/model/Team.scala | Scala | apache-2.0 | 1,465 |
package sml.instructions
import sml.Machine
trait Bnz extends Instruction {
/**
* Address of the register to evaluate
*/
val register: Int
/**
* Instruction to execute if evaluation returns > 0
*/
val target: String
}
/**
* Instruct Machine to execute specific instruction if value in
* specified register is > 0
*/
case class BnzInstruction(label: String, opcode: String, register: Int, target: String) extends Bnz {
/**
* @see Instruction#execute(m: Machine)
*/
override def execute(m: Machine): Unit =
if (m.regs(register) != 0) m.pc = m.prog.indexWhere(_.label == target)
/**
* @see Instruction#toString()
*/
override def toString: String =
super.toString + s" if the value in register $register is 0 execute $target\\n"
}
object BnzInstruction {
def apply(label: String, reg: Int, tgt: String): Bnz =
new BnzInstruction(label, "bnz", reg, tgt)
}
| BBK-PiJ-2015-67/sdp-portfolio | coursework/cw-one/src/main/scala/sml/instructions/BnzInstruction.scala | Scala | unlicense | 926 |
package com.codility.challenge._week_1_codility
import org.scalatest.FunSuite
/**
* Created by obarros on 13/11/2016.
*/
class Solution2$Test extends FunSuite {
test("testSolution") {
assert(Solution.solution(5) == List(1, 2, "Fizz", 4, "Buzz"))
}
test("testSolution 0") {
assert(Solution.solution(1) == List(1))
}
test("testSolution 1") {
assert(Solution.solution(6) == List(1, 2, "Fizz", 4, "Buzz", "Fizz"))
}
test("testSolution 2") {
assert(Solution.solution(7) == List(1, 2, "Fizz", 4, "Buzz", "Fizz", "Woof"))
}
test("testSolution 3") {
assert(Solution.solution(8) == List(1, 2, "Fizz", 4, "Buzz", "Fizz", "Woof", 8))
}
test("testSolution 4") {
assert(Solution.solution(9) == List(1, 2, "Fizz", 4, "Buzz", "Fizz", "Woof", 8, "Fizz"))
}
test("testSolution 100") {
assert(Solution.solution(100) == List(1, 2, "Fizz", 4, "Buzz", "Fizz", "Woof", 8, "Fizz", "Buzz", 11, "Fizz", 13, "Woof", "FizzBuzz", 16, 17, "Fizz", 19, "Buzz", "FizzWoof", 22, 23, "Fizz", "Buzz", 26, "Fizz", "Woof", 29, "FizzBuzz", 31, 32, "Fizz", 34, "BuzzWoof", "Fizz", 37, 38, "Fizz", "Buzz", 41, "FizzWoof", 43, 44, "FizzBuzz", 46, 47, "Fizz", "Woof", "Buzz", "Fizz", 52, 53, "Fizz", "Buzz", "Woof", "Fizz", 58, 59, "FizzBuzz", 61, 62, "FizzWoof", 64, "Buzz", "Fizz", 67, 68, "Fizz", "BuzzWoof", 71, "Fizz", 73, 74, "FizzBuzz", 76, "Woof", "Fizz", 79, "Buzz", "Fizz", 82, 83, "FizzWoof", "Buzz", 86, "Fizz", 88, 89, "FizzBuzz", "Woof", 92, "Fizz", 94, "Buzz", "Fizz", 97, "Woof", "Fizz", "Buzz")
)
}
test("testSolution 24") {
assert(Solution.solution(24) == List(1, 2, "Fizz", 4, "Buzz", "Fizz", "Woof", 8, "Fizz", "Buzz", 11, "Fizz", 13, "Woof", "FizzBuzz", 16, 17, "Fizz", 19, "Buzz", "FizzWoof", 22, 23, "Fizz"))
}
test("testSolution 1000") {
assert(Solution.solution(1000).size == 1000)}
}
| Obarros/Codility | src/test/scala-2.11/com/codility/challenge/_week_1_codility/Solution$Test.scala | Scala | mit | 1,857 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.expressions
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.types.Row
import org.apache.flink.api.java.typeutils.RowTypeInfo
import org.apache.flink.table.expressions.utils.ExpressionTestBase
import org.junit.Test
/**
* Tests all SQL expressions that are currently supported according to the documentation.
* This tests should be kept in sync with the documentation to reduce confusion due to the
* large amount of SQL functions.
*
* The tests do not test every parameter combination of a function.
* They are rather a function existence test and simple functional test.
*
* The tests are split up and ordered like the sections in the documentation.
*/
class SqlExpressionTest extends ExpressionTestBase {
@Test
def testComparisonFunctions(): Unit = {
testSqlApi("1 = 1", "true")
testSqlApi("1 <> 1", "false")
testSqlApi("5 > 2", "true")
testSqlApi("2 >= 2", "true")
testSqlApi("5 < 2", "false")
testSqlApi("2 <= 2", "true")
testSqlApi("1 IS NULL", "false")
testSqlApi("1 IS NOT NULL", "true")
testSqlApi("NULLIF(1,1) IS DISTINCT FROM NULLIF(1,1)", "false")
testSqlApi("NULLIF(1,1) IS NOT DISTINCT FROM NULLIF(1,1)", "true")
testSqlApi("NULLIF(1,1) IS NOT DISTINCT FROM NULLIF(1,1)", "true")
testSqlApi("12 BETWEEN 11 AND 13", "true")
testSqlApi("12 BETWEEN ASYMMETRIC 13 AND 11", "false")
testSqlApi("12 BETWEEN SYMMETRIC 13 AND 11", "true")
testSqlApi("12 NOT BETWEEN 11 AND 13", "false")
testSqlApi("12 NOT BETWEEN ASYMMETRIC 13 AND 11", "true")
testSqlApi("12 NOT BETWEEN SYMMETRIC 13 AND 11", "false")
testSqlApi("'TEST' LIKE '%EST'", "true")
//testSqlApi("'%EST' LIKE '.%EST' ESCAPE '.'", "true") // TODO
testSqlApi("'TEST' NOT LIKE '%EST'", "false")
//testSqlApi("'%EST' NOT LIKE '.%EST' ESCAPE '.'", "false") // TODO
testSqlApi("'TEST' SIMILAR TO '.EST'", "true")
//testSqlApi("'TEST' SIMILAR TO ':.EST' ESCAPE ':'", "true") // TODO
testSqlApi("'TEST' NOT SIMILAR TO '.EST'", "false")
//testSqlApi("'TEST' NOT SIMILAR TO ':.EST' ESCAPE ':'", "false") // TODO
testSqlApi("'TEST' IN ('west', 'TEST', 'rest')", "true")
testSqlApi("'TEST' IN ('west', 'rest')", "false")
testSqlApi("'TEST' NOT IN ('west', 'TEST', 'rest')", "false")
testSqlApi("'TEST' NOT IN ('west', 'rest')", "true")
// sub-query functions are not listed here
}
@Test
def testLogicalFunctions(): Unit = {
testSqlApi("TRUE OR FALSE", "true")
testSqlApi("TRUE AND FALSE", "false")
testSqlApi("NOT TRUE", "false")
testSqlApi("TRUE IS FALSE", "false")
testSqlApi("TRUE IS NOT FALSE", "true")
testSqlApi("TRUE IS TRUE", "true")
testSqlApi("TRUE IS NOT TRUE", "false")
testSqlApi("NULLIF(TRUE,TRUE) IS UNKNOWN", "true")
testSqlApi("NULLIF(TRUE,TRUE) IS NOT UNKNOWN", "false")
}
@Test
def testArithmeticFunctions(): Unit = {
testSqlApi("+5", "5")
testSqlApi("-5", "-5")
testSqlApi("5+5", "10")
testSqlApi("5-5", "0")
testSqlApi("5*5", "25")
testSqlApi("5/5", "1")
testSqlApi("POWER(5, 5)", "3125.0")
testSqlApi("ABS(-5)", "5")
testSqlApi("MOD(-26, 5)", "-1")
testSqlApi("SQRT(4)", "2.0")
testSqlApi("LN(1)", "0.0")
testSqlApi("LOG10(1)", "0.0")
testSqlApi("EXP(0)", "1.0")
testSqlApi("CEIL(2.5)", "3")
testSqlApi("CEILING(2.5)", "3")
testSqlApi("FLOOR(2.5)", "2")
testSqlApi("SIN(2.5)", "0.5984721441039564")
testSqlApi("COS(2.5)", "-0.8011436155469337")
testSqlApi("TAN(2.5)", "-0.7470222972386603")
testSqlApi("COT(2.5)", "-1.3386481283041514")
testSqlApi("ASIN(0.5)", "0.5235987755982989")
testSqlApi("ACOS(0.5)", "1.0471975511965979")
testSqlApi("ATAN(0.5)", "0.4636476090008061")
testSqlApi("DEGREES(0.5)", "28.64788975654116")
testSqlApi("RADIANS(0.5)", "0.008726646259971648")
testSqlApi("SIGN(-1.1)", "-1")
testSqlApi("ROUND(-12.345, 2)", "-12.35")
testSqlApi("PI", "3.141592653589793")
}
@Test
def testStringFunctions(): Unit = {
testSqlApi("'test' || 'string'", "teststring")
testSqlApi("CHAR_LENGTH('string')", "6")
testSqlApi("CHARACTER_LENGTH('string')", "6")
testSqlApi("UPPER('string')", "STRING")
testSqlApi("LOWER('STRING')", "string")
testSqlApi("POSITION('STR' IN 'STRING')", "1")
testSqlApi("TRIM(BOTH ' STRING ')", "STRING")
testSqlApi("TRIM(LEADING 'x' FROM 'xxxxSTRINGxxxx')", "STRINGxxxx")
testSqlApi("TRIM(TRAILING 'x' FROM 'xxxxSTRINGxxxx')", "xxxxSTRING")
testSqlApi(
"OVERLAY('This is a old string' PLACING 'new' FROM 11 FOR 3)",
"This is a new string")
testSqlApi("SUBSTRING('hello world', 2)", "ello world")
testSqlApi("SUBSTRING('hello world', 2, 3)", "ell")
testSqlApi("INITCAP('hello world')", "Hello World")
}
@Test
def testConditionalFunctions(): Unit = {
testSqlApi("CASE 2 WHEN 1, 2 THEN 2 ELSE 3 END", "2")
testSqlApi("CASE WHEN 1 = 2 THEN 2 WHEN 1 = 1 THEN 3 ELSE 3 END", "3")
testSqlApi("NULLIF(1, 1)", "null")
testSqlApi("COALESCE(NULL, 5)", "5")
}
@Test
def testTypeConversionFunctions(): Unit = {
testSqlApi("CAST(2 AS DOUBLE)", "2.0")
}
@Test
def testValueConstructorFunctions(): Unit = {
// TODO we need a special code path that flattens ROW types
// testSqlApi("ROW('hello world', 12)", "hello world") // test base only returns field 0
// testSqlApi("('hello world', 12)", "hello world") // test base only returns field 0
testSqlApi("ARRAY[TRUE, FALSE][2]", "false")
testSqlApi("ARRAY[TRUE, TRUE]", "[true, true]")
}
@Test
def testDateTimeFunctions(): Unit = {
testSqlApi("DATE '1990-10-14'", "1990-10-14")
testSqlApi("TIME '12:12:12'", "12:12:12")
testSqlApi("TIMESTAMP '1990-10-14 12:12:12.123'", "1990-10-14 12:12:12.123")
testSqlApi("INTERVAL '10 00:00:00.004' DAY TO SECOND", "+10 00:00:00.004")
testSqlApi("INTERVAL '10 00:12' DAY TO MINUTE", "+10 00:12:00.000")
testSqlApi("INTERVAL '2-10' YEAR TO MONTH", "+2-10")
testSqlApi("EXTRACT(DAY FROM DATE '1990-12-01')", "1")
testSqlApi("EXTRACT(DAY FROM INTERVAL '19 12:10:10.123' DAY TO SECOND(3))", "19")
testSqlApi("FLOOR(TIME '12:44:31' TO MINUTE)", "12:44:00")
testSqlApi("CEIL(TIME '12:44:31' TO MINUTE)", "12:45:00")
testSqlApi("QUARTER(DATE '2016-04-12')", "2")
testSqlApi(
"(TIME '2:55:00', INTERVAL '1' HOUR) OVERLAPS (TIME '3:30:00', INTERVAL '2' HOUR)",
"true")
}
@Test
def testArrayFunctions(): Unit = {
testSqlApi("CARDINALITY(ARRAY[TRUE, TRUE, FALSE])", "3")
testSqlApi("ELEMENT(ARRAY['HELLO WORLD'])", "HELLO WORLD")
}
override def testData: Any = new Row(0)
override def typeInfo: TypeInformation[Any] =
new RowTypeInfo().asInstanceOf[TypeInformation[Any]]
}
| hongyuhong/flink | flink-libraries/flink-table/src/test/scala/org/apache/flink/table/expressions/SqlExpressionTest.scala | Scala | apache-2.0 | 7,647 |
package com.twitter.zipkin.collector
import com.twitter.conversions.time._
import com.twitter.finagle.stats.InMemoryStatsReceiver
import com.twitter.util.{Await, Future}
import org.scalatest._
/**
* Tests the BlockingItemQueue to make sure that it can store and consume elements even when adding
* more elements than what its initial capacity is
*/
class BlockingItemQueueTest extends FunSuite {
val Item = ()
def fill(queue: BlockingItemQueue[Unit, Unit], items: Int): Future[Boolean] = {
val results = (0 until items) map { _ =>
queue.add(Item) transform { e => Future.value(e) }
}
Future.collect(results).map(_.forall(_.isReturn))
}
test("Sleeps on a max queue and waits for the worker to drain") {
val stats:InMemoryStatsReceiver = new InMemoryStatsReceiver()
val queue = new BlockingItemQueue[Unit, Unit](10, 1, fallingBehindWorker, 100.millis,
100.millis, stats)
// Add 11 and not 10 because the first one that's going to be added will be consumed right away
assert(Await.result(fill(queue, 11)))
assert(Await.ready(queue.add(Item)).poll.get.isReturn)
Await.ready(queue.close())
assert(stats.counter("queueFull").apply() >= 1)
assert(stats.counter("successes").apply() == 12)
assert(queue.size() == 0)
}
def fallingBehindWorker(param: Unit): Future[Unit] = {
Future { Thread.sleep(100)
param
}
}
}
| zhoffice/zipkin | zipkin-collector/src/test/scala/com/twitter/zipkin/collector/BlockingItemQueueTest.scala | Scala | apache-2.0 | 1,462 |
package gitbucket.core.util
import gitbucket.core.api.JsonFormat
import gitbucket.core.controller.Context
import gitbucket.core.servlet.Database
import javax.servlet.http.{HttpSession, HttpServletRequest}
import scala.util.matching.Regex
import scala.util.control.Exception._
import slick.jdbc.JdbcBackend
/**
* Provides some usable implicit conversions.
*/
object Implicits {
// Convert to slick session.
implicit def request2Session(implicit request: HttpServletRequest): JdbcBackend#Session = Database.getSession(request)
implicit def context2ApiJsonFormatContext(implicit context: Context): JsonFormat.Context = JsonFormat.Context(context.baseUrl)
implicit class RichSeq[A](seq: Seq[A]) {
def splitWith(condition: (A, A) => Boolean): Seq[Seq[A]] = split(seq)(condition)
@scala.annotation.tailrec
private def split[A](list: Seq[A], result: Seq[Seq[A]] = Nil)(condition: (A, A) => Boolean): Seq[Seq[A]] =
list match {
case x :: xs => {
xs.span(condition(x, _)) match {
case (matched, remained) => split(remained, result :+ (x :: matched))(condition)
}
}
case Nil => result
}
}
implicit class RichString(value: String){
def replaceBy(regex: Regex)(replace: Regex.MatchData => Option[String]): String = {
val sb = new StringBuilder()
var i = 0
regex.findAllIn(value).matchData.foreach { m =>
sb.append(value.substring(i, m.start))
i = m.end
replace(m) match {
case Some(s) => sb.append(s)
case None => sb.append(m.matched)
}
}
if(i < value.length){
sb.append(value.substring(i))
}
sb.toString
}
def toIntOpt: Option[Int] = catching(classOf[NumberFormatException]) opt {
Integer.parseInt(value)
}
}
implicit class RichRequest(request: HttpServletRequest){
def paths: Array[String] = (request.getRequestURI.substring(request.getContextPath.length + 1) match{
case path if path.startsWith("api/v3/repos/") => path.substring(13/* "/api/v3/repos".length */)
case path => path
}).split("/")
def hasQueryString: Boolean = request.getQueryString != null
def hasAttribute(name: String): Boolean = request.getAttribute(name) != null
}
implicit class RichSession(session: HttpSession){
def putAndGet[T](key: String, value: T): T = {
session.setAttribute(key, value)
value
}
def getAndRemove[T](key: String): Option[T] = {
val value = session.getAttribute(key).asInstanceOf[T]
if(value == null){
session.removeAttribute(key)
}
Option(value)
}
}
}
| skohar/gitbucket | src/main/scala/gitbucket/core/util/Implicits.scala | Scala | apache-2.0 | 2,676 |
package lila.round
import scala.concurrent.duration._
import scala.math
import play.api.libs.json._
import lila.common.Maths.truncateAt
import lila.common.PimpedJson._
import lila.game.JsonView._
import lila.game.{ Pov, Game, PerfPicker, Source, GameRepo, CorrespondenceClock }
import lila.pref.Pref
import lila.user.{ User, UserRepo }
import chess.format.Forsyth
import chess.{ Color, Clock }
import actorApi.SocketStatus
final class JsonView(
chatApi: lila.chat.ChatApi,
noteApi: NoteApi,
userJsonView: lila.user.JsonView,
getSocketStatus: String => Fu[SocketStatus],
canTakeback: Game => Fu[Boolean],
baseAnimationDuration: Duration,
moretimeSeconds: Int) {
import JsonView._
private def checkCount(game: Game, color: Color) =
(game.variant == chess.variant.ThreeCheck) option game.checkCount(color)
def playerJson(
pov: Pov,
pref: Pref,
apiVersion: Int,
playerUser: Option[User],
initialFen: Option[String],
withBlurs: Boolean): Fu[JsObject] =
getSocketStatus(pov.game.id) zip
(pov.opponent.userId ?? UserRepo.byId) zip
canTakeback(pov.game) zip
getPlayerChat(pov.game, playerUser) map {
case (((socket, opponentUser), takebackable), chat) =>
import pov._
Json.obj(
"game" -> gameJson(game, initialFen),
"clock" -> game.clock.map(clockJson),
"correspondence" -> game.correspondenceClock,
"player" -> Json.obj(
"id" -> playerId,
"color" -> player.color.name,
"version" -> socket.version,
"spectator" -> false,
"user" -> playerUser.map { userJsonView(_, game.perfType) },
"rating" -> player.rating,
"ratingDiff" -> player.ratingDiff,
"provisional" -> player.provisional.option(true),
"offeringRematch" -> player.isOfferingRematch.option(true),
"offeringDraw" -> player.isOfferingDraw.option(true),
"proposingTakeback" -> player.isProposingTakeback.option(true),
"onGame" -> (player.isAi || socket.onGame(player.color)),
"checks" -> checkCount(game, player.color),
"hold" -> (withBlurs option hold(player)),
"blurs" -> (withBlurs option blurs(game, player))
).noNull,
"opponent" -> Json.obj(
"color" -> opponent.color.name,
"ai" -> opponent.aiLevel,
"user" -> opponentUser.map { userJsonView(_, game.perfType) },
"rating" -> opponent.rating,
"ratingDiff" -> opponent.ratingDiff,
"provisional" -> opponent.provisional.option(true),
"offeringRematch" -> opponent.isOfferingRematch.option(true),
"offeringDraw" -> opponent.isOfferingDraw.option(true),
"proposingTakeback" -> opponent.isProposingTakeback.option(true),
"onGame" -> (opponent.isAi || socket.onGame(opponent.color)),
"isGone" -> (!opponent.isAi && socket.isGone(opponent.color)),
"checks" -> checkCount(game, opponent.color),
"hold" -> (withBlurs option hold(opponent)),
"blurs" -> (withBlurs option blurs(game, opponent))
).noNull,
"url" -> Json.obj(
"socket" -> s"/$fullId/socket/v$apiVersion",
"round" -> s"/$fullId"
),
"pref" -> Json.obj(
"blindfold" -> pref.isBlindfold,
"animationDuration" -> animationDuration(pov, pref),
"highlight" -> (pref.highlight || pref.isBlindfold),
"destination" -> (pref.destination && !pref.isBlindfold),
"coords" -> pref.coords,
"replay" -> pref.replay,
"autoQueen" -> (pov.game.variant == chess.variant.Antichess).fold(Pref.AutoQueen.NEVER, pref.autoQueen),
"clockTenths" -> pref.clockTenths,
"clockBar" -> pref.clockBar,
"clockSound" -> pref.clockSound,
"enablePremove" -> pref.premove,
"showCaptured" -> pref.captured,
"submitMove" -> {
import Pref.SubmitMove._
pref.submitMove match {
case _ if game.hasAi => false
case ALWAYS => true
case CORRESPONDENCE_UNLIMITED if game.isCorrespondence => true
case CORRESPONDENCE_ONLY if game.hasCorrespondenceClock => true
case _ => false
}
},
"confirmResign" -> (pref.confirmResign == Pref.ConfirmResign.YES).option(true)),
"chat" -> chat.map { c =>
JsArray(c.lines map {
case lila.chat.UserLine(username, text, _) => Json.obj(
"u" -> username,
"t" -> text)
case lila.chat.PlayerLine(color, text) => Json.obj(
"c" -> color.name,
"t" -> text)
})
},
"possibleMoves" -> possibleMoves(pov),
"possibleDrops" -> possibleDrops(pov),
"takebackable" -> takebackable,
"crazyhouse" -> pov.game.crazyData).noNull
}
def watcherJson(
pov: Pov,
pref: Pref,
apiVersion: Int,
user: Option[User],
tv: Option[OnTv],
withBlurs: Boolean,
initialFen: Option[String] = None,
withMoveTimes: Boolean) =
getSocketStatus(pov.game.id) zip
getWatcherChat(pov.game, user) zip
UserRepo.pair(pov.player.userId, pov.opponent.userId) map {
case ((socket, chat), (playerUser, opponentUser)) =>
import pov._
Json.obj(
"game" -> {
gameJson(game, initialFen) ++ Json.obj(
"moveTimes" -> withMoveTimes.option(game.moveTimes),
"opening" -> game.opening,
"joinable" -> game.joinable,
"importedBy" -> game.pgnImport.flatMap(_.user)).noNull
},
"clock" -> game.clock.map(clockJson),
"correspondence" -> game.correspondenceClock,
"player" -> Json.obj(
"color" -> color.name,
"version" -> socket.version,
"spectator" -> true,
"ai" -> player.aiLevel,
"user" -> playerUser.map { userJsonView(_, game.perfType) },
"name" -> player.name,
"rating" -> player.rating,
"ratingDiff" -> player.ratingDiff,
"provisional" -> player.provisional.option(true),
"onGame" -> (player.isAi || socket.onGame(player.color)),
"checks" -> checkCount(game, player.color),
"berserk" -> player.berserk.option(true),
"hold" -> (withBlurs option hold(player)),
"blurs" -> (withBlurs option blurs(game, player))
).noNull,
"opponent" -> Json.obj(
"color" -> opponent.color.name,
"ai" -> opponent.aiLevel,
"user" -> opponentUser.map { userJsonView(_, game.perfType) },
"name" -> opponent.name,
"rating" -> opponent.rating,
"ratingDiff" -> opponent.ratingDiff,
"provisional" -> opponent.provisional.option(true),
"onGame" -> (opponent.isAi || socket.onGame(opponent.color)),
"checks" -> checkCount(game, opponent.color),
"berserk" -> opponent.berserk.option(true),
"hold" -> (withBlurs option hold(opponent)),
"blurs" -> (withBlurs option blurs(game, opponent))
).noNull,
"orientation" -> pov.color.name,
"url" -> Json.obj(
"socket" -> s"/$gameId/${color.name}/socket",
"round" -> s"/$gameId/${color.name}"
),
"pref" -> Json.obj(
"animationDuration" -> animationDuration(pov, pref),
"highlight" -> pref.highlight,
"coords" -> pref.coords,
"replay" -> pref.replay,
"clockTenths" -> pref.clockTenths,
"clockBar" -> pref.clockBar,
"showCaptured" -> pref.captured
),
"tv" -> tv.map { onTv =>
Json.obj("channel" -> onTv.channel, "flip" -> onTv.flip)
},
"chat" -> chat.map { c =>
JsArray(c.lines map {
case lila.chat.UserLine(username, text, _) => Json.obj(
"u" -> username,
"t" -> text)
})
}
).noNull
}
def userAnalysisJson(pov: Pov, pref: Pref, orientation: chess.Color, owner: Boolean) =
(pov.game.pgnMoves.nonEmpty ?? GameRepo.initialFen(pov.game)) map { initialFen =>
import pov._
val fen = Forsyth >> game.toChess
Json.obj(
"game" -> Json.obj(
"id" -> gameId,
"variant" -> game.variant,
"opening" -> game.opening,
"initialFen" -> {
if (pov.game.pgnMoves.isEmpty) fen
else (initialFen | chess.format.Forsyth.initial)
},
"fen" -> fen,
"turns" -> game.turns,
"player" -> game.turnColor.name,
"status" -> game.status),
"player" -> Json.obj(
"id" -> owner.option(pov.playerId),
"color" -> color.name
),
"opponent" -> Json.obj(
"color" -> opponent.color.name
),
"orientation" -> orientation.name,
"pref" -> Json.obj(
"animationDuration" -> animationDuration(pov, pref),
"highlight" -> pref.highlight,
"destination" -> pref.destination,
"coords" -> pref.coords
),
"path" -> pov.game.turns,
"userAnalysis" -> true)
}
private def gameJson(game: Game, initialFen: Option[String]) = Json.obj(
"id" -> game.id,
"variant" -> game.variant,
"speed" -> game.speed.key,
"perf" -> PerfPicker.key(game),
"rated" -> game.rated,
"initialFen" -> (initialFen | chess.format.Forsyth.initial),
"fen" -> (Forsyth >> game.toChess),
"player" -> game.turnColor.name,
"winner" -> game.winnerColor.map(_.name),
"turns" -> game.turns,
"startedAtTurn" -> game.startedAtTurn,
"lastMove" -> game.castleLastMoveTime.lastMoveString,
"threefold" -> game.toChessHistory.threefoldRepetition,
"check" -> game.check.map(_.key),
"rematch" -> game.next,
"source" -> game.source.map(sourceJson),
"status" -> game.status,
"boosted" -> game.boosted.option(true),
"tournamentId" -> game.tournamentId).noNull
private def blurs(game: Game, player: lila.game.Player) = {
val percent = game.playerBlurPercent(player.color)
(percent > 30) option Json.obj(
"nb" -> player.blurs,
"percent" -> percent
)
}
private def hold(player: lila.game.Player) = player.holdAlert map { h =>
Json.obj(
"ply" -> h.ply,
"mean" -> h.mean,
"sd" -> h.sd)
}
private def getPlayerChat(game: Game, forUser: Option[User]): Fu[Option[lila.chat.MixedChat]] =
game.hasChat optionFu {
chatApi.playerChat find game.id map (_ forUser forUser)
}
private def getWatcherChat(game: Game, forUser: Option[User]): Fu[Option[lila.chat.UserChat]] =
forUser ?? { user =>
chatApi.userChat find s"${game.id}/w" map (_ forUser user.some) map (_.some)
}
private def getUsers(game: Game) = UserRepo.pair(
game.whitePlayer.userId,
game.blackPlayer.userId)
private def sourceJson(source: Source) = source.name
private def clockJson(clock: Clock): JsObject =
clockWriter.writes(clock) + ("moretime" -> JsNumber(moretimeSeconds))
private def possibleMoves(pov: Pov) = (pov.game playableBy pov.player) option {
pov.game.toChess.situation.destinations map {
case (from, dests) => from.key -> dests.mkString
}
}
private def possibleDrops(pov: Pov) = (pov.game playableBy pov.player) ?? {
pov.game.toChess.situation.drops map { drops =>
JsString(drops.map(_.key).mkString)
}
}
private def animationFactor(pref: Pref): Float = pref.animation match {
case 0 => 0
case 1 => 0.5f
case 2 => 1
case 3 => 2
case _ => 1
}
private def animationDuration(pov: Pov, pref: Pref) = math.round {
animationFactor(pref) * baseAnimationDuration.toMillis * pov.game.finished.fold(
1,
math.max(0, math.min(1.2, ((pov.game.estimateTotalTime - 60) / 60) * 0.2))
)
}
}
object JsonView {
implicit val variantWriter: OWrites[chess.variant.Variant] = OWrites { v =>
Json.obj(
"key" -> v.key,
"name" -> v.name,
"short" -> v.shortName,
"title" -> v.title)
}
implicit val statusWriter: OWrites[chess.Status] = OWrites { s =>
Json.obj(
"id" -> s.id,
"name" -> s.name)
}
implicit val clockWriter: OWrites[Clock] = OWrites { c =>
Json.obj(
"running" -> c.isRunning,
"initial" -> c.limit,
"increment" -> c.increment,
"white" -> truncateAt(c.remainingTime(Color.White), 2),
"black" -> truncateAt(c.remainingTime(Color.Black), 2),
"emerg" -> c.emergTime)
}
implicit val correspondenceWriter: OWrites[CorrespondenceClock] = OWrites { c =>
Json.obj(
"daysPerTurn" -> c.daysPerTurn,
"increment" -> c.increment,
"white" -> c.whiteTime,
"black" -> c.blackTime,
"emerg" -> c.emerg)
}
implicit val openingWriter: OWrites[chess.Opening] = OWrites { o =>
Json.obj(
"code" -> o.code,
"name" -> o.name,
"size" -> o.size
)
}
}
| JimmyMow/lila | modules/round/src/main/JsonView.scala | Scala | mit | 13,672 |
package fos
abstract class Expr
case class Var() extends Expr
object Analyzer {
def substitution(expr: Expr, cls: (Var,Var)): Expr =
expr match {
case cls._2 => cls._1 // source of the error
case _ => expr
}
}
| yusuke2255/dotty | tests/pos/t0301.scala | Scala | bsd-3-clause | 235 |
package de.tu_berlin.formic.example
import java.time.Instant
import akka.actor.ActorSystem
import org.openqa.selenium.chrome.ChromeDriver
import org.scalatest.selenium.WebBrowser
import org.scalatest.time.{Seconds, Span}
import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}
/**
* @author Ronny BrΓ€unlich
*/
class WebSiteSpec extends FlatSpec
with Matchers
with WebBrowser
with BeforeAndAfterAll
with PersistenceCleanup {
//val service = new ChromeDriverService.Builder().withVerbose(true).build()
//implicit val webDriver = new ChromeDriver(service)
implicit val webDriver = new ChromeDriver()
val host = "http://localhost:8080"
var serverThread: ServerThread = _
override def beforeAll(): Unit = {
serverThread = new ServerThread
serverThread.setDaemon(true)
serverThread.run()
println("Starting FormicServer for WebSiteSpec")
implicitlyWait(Span(10, Seconds))
}
override def afterAll(): Unit = {
deleteStorageLocations(serverThread.exampleServer.server.system)
serverThread.terminate()
close()
}
"The home page" should "redirect to the index page" in {
go to host
currentUrl should be(host + "/index")
}
"The creation page" should "offer a button to create a text" in {
go to host + "/index"
click on id("new-string-button")
}
"The button to create a text" should "write the name and append a text area" in {
go to host + "/index"
click on id("new-string-button")
className("stringId").element.text should include("String data type with id")
className("stringInput").element shouldNot be(null)
}
"A single user" should "be able to write some text" in {
go to host + "/index"
click on id("new-string-button")
val inputTextArea = textArea(className("stringInput"))
inputTextArea.underlying.sendKeys("abc")
inputTextArea.value should be("abc")
}
"A second user" should "be able to subscribe to other string" in {
go to host + "/index"
click on id("new-string-button")
val inputTextArea = textArea(className("stringInput"))
val stringId = inputTextArea.underlying.getAttribute("id")
inputTextArea.underlying.sendKeys("abc")
Thread.sleep(5000)
val secondUserDriver = new ChromeDriver()
go.to(host + "/index")(secondUserDriver)
textField("subscribe-id")(secondUserDriver).value = stringId
click.on("subscribe-button")(secondUserDriver)
Thread.sleep(5000)
textArea(stringId)(secondUserDriver).value should be("abc")
secondUserDriver.quit()
}
"The creation page" should "offer a button to create a tree" in {
go to host + "/index"
click on id("new-tree-button")
}
"The button to create a tree" should "create a div containing input, buttons and a list" in {
go to host + "/index"
click on id("new-tree-button")
Thread.sleep(2000)
val treeHeadTag = tagName("div").findElement.get
val treeId = treeHeadTag.attribute("id").get.replaceFirst("head", "")
id("insert" + treeId).findElement should not be empty
id("delete" + treeId).findElement should not be empty
id("input" + treeId).findElement should not be empty
Thread.sleep(10000)
id(treeId).findElement should not be empty
val treeTag = id(treeId).findElement.get
treeTag.text should include("Tree data type with id " + treeId)
id("path" + treeId).findElement should not be empty
xpath(s"//div[@id='$treeId']/div/ul/li").findElement.get.text should be("empty")
}
"A single user" should "be able to modify the tree" in {
go to host + "/index"
click on id("new-tree-button")
Thread.sleep(5000)
val treeHeadTag = tagName("div").findElement.get
val treeId = treeHeadTag.attribute("id").get.replaceFirst("head", "")
numberField("input" + treeId).value = "2"
click on id("insert" + treeId)
Thread.sleep(3000)
singleSel("path" + treeId).value should equal("0")
xpath(s"//div[@id='$treeId']/div/ul/li").findElement.get.text should be("2")
}
"A second user" should "be able to subscribe to a tree" in {
go to host + "/index"
click on id("new-tree-button")
Thread.sleep(5000)
val treeHeadTag = tagName("div").findElement.get
val treeId = treeHeadTag.attribute("id").get.replaceFirst("head", "")
numberField("input" + treeId).value = "3"
click on id("insert" + treeId)
Thread.sleep(5000)
val secondUserDriver = new ChromeDriver()
go.to(host + "/index")(secondUserDriver)
textField("subscribe-id")(secondUserDriver).value = treeId
click.on("subscribe-button")(secondUserDriver)
Thread.sleep(3000)
xpath(s"//div[@id='$treeId']/div/ul/li").findElement(secondUserDriver).get.text should be("3")
secondUserDriver.quit()
}
"Two users" should "be able to concurrently edit the tree" ignore {
go to host + "/index"
click on id("new-tree-button")
Thread.sleep(3000)
val treeHeadTag = tagName("div").findElement.get
val treeId = treeHeadTag.attribute("id").get.replaceFirst("head", "")
numberField("input" + treeId).value = "1"
click on id("insert" + treeId)
Thread.sleep(3000)
val secondUserDriver = new ChromeDriver()
go.to(host + "/index")(secondUserDriver)
textField("subscribe-id")(secondUserDriver).value = treeId
click.on("subscribe-button")(secondUserDriver)
Thread.sleep(3000)
xpath(s"//div[@id='$treeId']/div/ul/li").findElement(secondUserDriver).get.text should be("1")
val user1Id = id("userId").findElement.get.text.replace("User: ", "")
val user2Id = id("userId").findElement(secondUserDriver).get.text.replace("User: ", "")
val firstValue = "10"
val secondValue = "100"
numberField("input" + treeId).value = if (user1Id > user2Id) firstValue else secondValue
numberField("input" + treeId)(secondUserDriver).value = if (user2Id <= user1Id) secondValue else firstValue
val t1 = new Thread(new Runnable {
override def run(): Unit = {
println("Before Click1 " + Instant.now)
click on id("insert" + treeId)
println("After Click1 " + Instant.now)
}
})
val t2 = new Thread(new Runnable {
override def run(): Unit = {
println("Before Click2 " + Instant.now)
click.on("insert" + treeId)(secondUserDriver)
println("After Click2 " + Instant.now)
}
})
t1.start()
t2.start()
//println("Before Click1 " + Instant.now)
//click on id("insert" + treeId)
//println("After Click1 " + Instant.now)
//click.on("insert" + treeId)(secondUserDriver)
//println("After Click2 " + Instant.now)
Thread.sleep(5000)
val elementsUser1 = xpath(s"//div[@id='$treeId']/div/ul/li/ul/li").findAllElements.toList
elementsUser1.head.text should be(firstValue)
elementsUser1(1).text should be(secondValue)
val elementsUser2 = xpath(s"//div[@id='$treeId']/div/ul/li/ul/li").findAllElements(secondUserDriver).toList
elementsUser2.head.text should be(firstValue)
elementsUser2(1).text should be(secondValue)
secondUserDriver.quit()
}
}
| rbraeunlich/formic | example/jvm/src/test/scala/de/tu_berlin/formic/example/WebSiteSpec.scala | Scala | apache-2.0 | 7,064 |
package mageknight.mana
sealed trait Mana
object Blue extends Mana
object White extends Mana
object Green extends Mana
object Red extends Mana
object Black extends Mana
object Gold extends Mana
| fadeddata/mk | src/main/scala/Mana.scala | Scala | mit | 196 |
package at.fh.swengb.resifoAndroid
import android.app.Activity
import android.content.Intent
import android.os.Bundle
import android.support.v7.app.AppCompatActivity
import android.view.{View, Window, WindowManager}
import android.view.View.OnClickListener
import android.widget.ImageView
import at.fh.swengb.resifoAndroid.activities.list.{Credits, ListActivity}
//import at.fh.swengb.resifoAndroid.activities.meldezettelEdit.TEST
import at.fh.swengb.resifoAndroid.db.DBHelper
/**
* Created by laszlobalo on 31.12.16.
*/
class MainActivity extends AppCompatActivity {
val db = new DBHelper(this)
override protected def onCreate(savedInstanceState: Bundle) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
// db.upgradeDB
val nextButton: ImageView = findViewById(R.id.nxtButton).asInstanceOf[ImageView]
val infoButton: ImageView = findViewById(R.id.infButton).asInstanceOf[ImageView]
nextButton.setOnClickListener(new OnClickListener {
def onClick(v: View): Unit = {
startActivity(new Intent(getApplicationContext, classOf[ListActivity]))
}
})
infoButton.setOnClickListener(new OnClickListener {
def onClick(v: View): Unit = {
startActivity(new Intent(getApplicationContext, classOf[Credits]))
}
})
}
} | Gulasch4ever/resifo-android | app/src/main/scala/at/fh/swengb/resifoAndroid/MainActivity.scala | Scala | gpl-3.0 | 1,322 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.algebird
/**
* This is a combinator on semigroups, after you do the plus, you transform B with a fold function
* This will not be valid for all fold functions. You need to prove that it is still associative.
*
* Clearly only values of (a,b) are valid if fold(a,b) == b, so keep that in mind.
*
* I have not yet found a sufficient condition on (A,B) => B that makes it correct
* Clearly a (trivial) constant function {(l,r) => r} works.
* Also, if B is List[T], and (l:A,r:List[T]) = r.sortBy(fn(l))
* this works as well (due to the associativity on A, and the fact that the list never loses data).
*
* For approximate lists (like top-K applications) this might work (or be close enough to associative
* that for approximation algorithms it is fine), and in fact, that is the main motivation of this code:
* Produce some ordering in A, and use it to do sorted-topK on the list in B.
*
* Seems like an open topic here.... you are obliged to think on your own about this.
*/
class SemigroupCombinator[A: Semigroup, B: Semigroup](fold: (A, B) => B) extends Semigroup[(A, B)] {
def plus(left: (A, B), right: (A, B)) = {
val first = Semigroup.plus(left._1, right._1)
(first, fold(first, Semigroup.plus(left._2, right._2)))
}
}
class MonoidCombinator[A: Monoid, B: Monoid](fold: (A, B) => B)
extends SemigroupCombinator[A, B](fold)
with Monoid[(A, B)] {
def zero = (Monoid.zero[A], Monoid.zero[B])
}
| nevillelyh/algebird | algebird-core/src/main/scala/com/twitter/algebird/Combinator.scala | Scala | apache-2.0 | 2,010 |
import scala.swing._
import java.awt.Dimension
/**
* Extends SimpleGUIApplication in Scala <= 2.8.
*/
object HelloSwingWorldApp extends SimpleSwingApplication {
def top = new MainFrame {
title = "Swing in Scala"
preferredSize = new Dimension(320, 100)
contents = new Label {
text = "Hello, Swing programming in Scala!"
}
}
}
| michaelflowersky/geekingspree | swing/HelloSwingWorldApp.scala | Scala | mit | 380 |
package pimpathon
import org.junit.Test
import pimpathon.function.Predicate
import scala.collection.{mutable β M}
import pimpathon.any._
import pimpathon.stream._
import pimpathon.util._
class StreamTest {
@Test def cond(): Unit = {
stream.cond[Int](cond = false, util.goBoom) === Stream.empty[Int]
stream.cond(cond = true, Stream(1, 2, 3)) === Stream(1, 2, 3)
}
@Test def continuallyWhile(): Unit = {
stream.continuallyWhile(1)(_ β false) === Stream.empty[Int]
stream.continuallyWhile(1)(_ β true).take(1000).toList === List.fill(1000)(1)
M.Stack[Int](1, 2, 3).tap(ints β stream.continuallyWhile(ints.pop())(_ < 3).toList === List(1, 2))
}
@Test def uncons(): Unit = on(Stream.empty[Int], Stream(1, 2, 3))
.calling(_.uncons("empty", s β s"size: ${s.size}")).produces("empty", "size: 3")
@Test def unconsC(): Unit = on(Stream.empty[Int], Stream(1, 2, 3))
.calling(_.unconsC("empty", h β t β s"size: ${1 + t.size}")).produces("empty", "size: 3")
@Test def tailOption(): Unit = on(Stream.empty[Int], Stream(0), Stream(0, 1)).calling(_.tailOption)
.produces(None, Some(Stream.empty[Int]), Some(Stream(1)))
@Test def lazyScanLeft(): Unit =
blockingInts(start = 1, end = 4).lazyScanLeft(0)(_ + _).take(4).toList === List(0, 1, 3, 6)
@Test def reverseInits(): Unit = blockingInts(start = 1, end = 4).reverseInits.take(4).toList.map(_.toList) ===
List(Nil, List(1), List(1, 2), List(1, 2, 3))
private def blockingInts(start: Int, end: Int): Stream[Int] = blockWhen(Stream.iterate(start)(_ + 1))(_ == end)
private def blockWhen[A](in: Stream[A])(p: Predicate[A]): Stream[A] = in.map(_.tapIf(p)(_ β block()))
private def block() = this.synchronized(this.wait(0))
} | raymanoz/pimpathon | src/test/scala/pimpathon/stream.scala | Scala | apache-2.0 | 1,765 |
/*
* Copyright (C) 2005, The OpenURP Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openurp.edu.exam.model
import org.openurp.base.edu.model.Teacher
import org.openurp.edu.clazz.model.Clazz
import org.beangle.data.model.LongId
import org.openurp.base.model.User
/**
* ζε¦δ»»ε‘δΈηηθι
ι’
*/
class InvigilationClazzQuota extends LongId {
/** ζε¦δ»»ε‘ */
var clazz: Clazz = _
/** ζεΈ */
var teacher: Teacher = _
/** η³»ζ° */
var ratio: Float = _
/** θ―ΎζΆ */
var creditHours: Float = _
/** ζ¬‘ζ° */
var amount: Float = _
def this(clazz: Clazz, teacher: Teacher) = {
this()
this.clazz = clazz
this.teacher = teacher
}
}
| openurp/api | edu/src/main/scala/org/openurp/edu/exam/model/InvigilationClazzQuota.scala | Scala | lgpl-3.0 | 1,334 |
package poker.core
final class CardTest extends org.scalatest.FunSuite {
import Card._
private val validCards = Traversable("2H", "9C", "JD", "QS", "KH", "AH")
private val onlySuits = Traversable("H", "C", "D", "S")
private val onlyRanks = Traversable("2", "9", "J", "K", "Q", "A")
private val invalidNumberOfRanksOrSuits = Traversable("", "9C8", "2HJD")
private val invalidSuits = Traversable("1H", "2I", "0S", "RC", "BH")
test("Card construction") {
assert(Card("2H").rank === Rank('2'))
assert(Card("2H").suit === Suit('H'))
onlySuits.foreach(doIntercept)
onlyRanks.foreach(doIntercept)
invalidNumberOfRanksOrSuits.foreach(doIntercept)
invalidSuits.foreach(doIntercept)
def doIntercept(s: String) = {
intercept[IllegalArgumentException] {
Card(s)
}
}
}
test("isValid") {
assert(validCards.forall(isValid))
assert(notValid(onlySuits))
assert(notValid(onlyRanks))
assert(notValid(invalidNumberOfRanksOrSuits))
assert(notValid(invalidSuits))
def notValid(strings: Traversable[String]): Boolean = {
strings.forall(!isValid(_))
}
}
test("compare") {
doCompare("3C", "4C", -1)
doCompare("3C", "4D", -1)
doCompare("AH", "JS", 3)
doCompare("KH", "KC", 0)
doCompare("TS", "2S", 8)
def doCompare(c1: String, c2: String, res: Int): Unit = {
assert(Card(c1).compare(Card(c2)) === res)
}
}
}
| kyuksel/poker | src/test/scala/poker/core/CardTest.scala | Scala | mit | 1,435 |
package mesosphere.marathon
package core.task.state
import com.typesafe.scalalogging.StrictLogging
import mesosphere.marathon.state._
import scala.jdk.CollectionConverters._
import org.apache.mesos
import scala.annotation.tailrec
/**
* Metadata about a task's networking information.
*
* @param hostPorts The hostPorts as taken originally from the accepted offer
* @param hostName the agent's hostName
* @param ipAddresses all associated IP addresses, computed from mesosStatus
*/
case class NetworkInfo(hostName: String, hostPorts: Seq[Int], ipAddresses: Seq[mesos.Protos.NetworkInfo.IPAddress]) {
import NetworkInfo._
/**
* compute the effective IP address based on whether the runSpec declares container-mode networking; if so
* then choose the first address from the list provided by Mesos. Otherwise, in host- and bridge-mode
* networking just use the agent hostname as the effective IP.
*
* we assume that container-mode networking is exclusive of bridge-mode networking.
*/
def effectiveIpAddress(runSpec: RunSpec): Option[String] = {
if (runSpec.networks.hasContainerNetworking) {
pickFirstIpAddressFrom(ipAddresses)
} else {
Some(hostName)
}
}
/**
* generate a list of possible port assignments, perhaps even including assignments for which no effective
* address or port is available. A returned `PortAssignment` for which there is no `effectiveAddress` will have
* have an `effectivePort` of `NoPort`.
*
* @param app the app run specification
* @param includeUnresolved when `true` include assignments without effective address and port
*/
def portAssignments(app: AppDefinition, includeUnresolved: Boolean): Seq[PortAssignment] = {
computePortAssignments(app, hostName, hostPorts, effectiveIpAddress(app), includeUnresolved)
}
/**
* Update the network info with the given mesos TaskStatus. This will eventually update ipAddresses and the
* effectiveIpAddress.
*
* Note: Only makes sense to call this the task just became running as the reported ip addresses are not
* expected to change during a tasks lifetime.
*/
def update(mesosStatus: mesos.Protos.TaskStatus): NetworkInfo = {
val newIpAddresses = resolveIpAddresses(mesosStatus)
if (ipAddresses != newIpAddresses) {
copy(ipAddresses = newIpAddresses)
} else {
// nothing has changed
this
}
}
}
object NetworkInfo extends StrictLogging {
/**
* Pick the IP address based on an ip address configuration as given in teh AppDefinition
*
* Only applicable if the app definition defines an IP address. PortDefinitions cannot be configured in addition,
* and we currently expect that there is at most one IP address assigned.
*/
private[state] def pickFirstIpAddressFrom(ipAddresses: Seq[mesos.Protos.NetworkInfo.IPAddress]): Option[String] = {
// Explicitly take the ipAddress from the first given object, if available. We do not expect to receive
// IPAddresses that do not define an ipAddress.
ipAddresses.headOption.map { ipAddress =>
require(ipAddress.hasIpAddress, s"$ipAddress does not define an ipAddress")
ipAddress.getIpAddress
}
}
def resolveIpAddresses(mesosStatus: mesos.Protos.TaskStatus): Seq[mesos.Protos.NetworkInfo.IPAddress] = {
if (mesosStatus.hasContainerStatus && mesosStatus.getContainerStatus.getNetworkInfosCount > 0) {
mesosStatus.getContainerStatus.getNetworkInfosList.asScala.iterator.flatMap(_.getIpAddressesList.asScala).toSeq
} else {
Nil
}
}
private def computePortAssignments(
app: AppDefinition,
hostName: String,
hostPorts: Seq[Int],
effectiveIpAddress: Option[String],
includeUnresolved: Boolean
): Seq[PortAssignment] = {
def fromPortMappings(container: Container): Seq[PortAssignment] = {
import Container.PortMapping
@tailrec
def gen(ports: List[Int], mappings: List[PortMapping], assignments: List[PortAssignment]): List[PortAssignment] = {
(ports, mappings) match {
case (hostPort :: xs, PortMapping(containerPort, Some(_), _, _, portName, _, _) :: rs) =>
// agent port was requested, and we strongly prefer agentIP:hostPort (legacy reasons?)
val assignment = PortAssignment(
portName = portName,
effectiveIpAddress = Option(hostName),
effectivePort = hostPort,
hostPort = Option(hostPort),
// See [[TaskBuilder.computeContainerInfo.boundPortMappings]] for more info.
containerPort = if (containerPort == 0) Option(hostPort) else Option(containerPort)
)
gen(xs, rs, assignment :: assignments)
case (_, mapping :: rs) if mapping.hostPort.isEmpty =>
// no port was requested on the agent (really, this is only possible for container networking)
val assignment = PortAssignment(
portName = mapping.name,
// if there's no assigned IP and we have no host port, then this container isn't reachable
effectiveIpAddress = effectiveIpAddress,
// just pick containerPort; we don't have an agent port to fall back on regardless,
// of effectiveIp or hasAssignedIpAddress
effectivePort = effectiveIpAddress.fold(PortAssignment.NoPort)(_ => mapping.containerPort),
hostPort = None,
containerPort = Some(mapping.containerPort)
)
gen(ports, rs, assignment :: assignments)
case (Nil, Nil) =>
assignments
case _ =>
throw new IllegalStateException(
s"failed to align remaining allocated host ports $ports with remaining declared port mappings $mappings in app ${app.id}"
)
}
}
gen(hostPorts.to(List), container.portMappings.to(List), Nil).reverse
}
def fromPortDefinitions: Seq[PortAssignment] =
app.portDefinitions.zip(hostPorts).map {
case (portDefinition, hostPort) =>
PortAssignment(
portName = portDefinition.name,
effectiveIpAddress = effectiveIpAddress,
effectivePort = hostPort,
hostPort = Some(hostPort)
)
}
app.container.collect {
case c: Container if app.networks.hasNonHostNetworking =>
// don't return assignments that haven't yet been allocated a port
val mappings = fromPortMappings(c)
if (includeUnresolved) mappings else mappings.filter(_.isResolved)
}.getOrElse(fromPortDefinitions)
}
}
| mesosphere/marathon | src/main/scala/mesosphere/marathon/core/task/state/NetworkInfo.scala | Scala | apache-2.0 | 6,656 |
package com.asto.dop.streamprocessor.process
import com.asto.dop.streamprocessor.DOPContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SQLContext
import org.apache.spark.{Logging, SparkContext}
object HDFSProcessor extends Serializable {
def save(rdd: RDD[String], context: DOPContext): Unit = {
val sqlContext = SQLContextInstance.getInstance(rdd.sparkContext)
import sqlContext.implicits._
rdd.toDF().write.mode("append").parquet(s"${context.hdfs_outputAddress}messages.parquet")
}
}
object SQLContextInstance extends Serializable with Logging {
@transient private var instance: SQLContext = _
def getInstance(sparkContext: SparkContext): SQLContext = {
if (instance == null) {
log.info("Initialization SQLContext Instance.")
instance = new SQLContext(sparkContext)
}
instance
}
} | zj-lingxin/dop | source/stream-processor/src/main/scala/com/asto/dop/streamprocessor/process/HDFSProcessor.scala | Scala | mit | 854 |
package jp.ac.titech.cs.se.nakamura.cfgen.dao
import org.specs2.Specification
import mockit.Deencapsulation
import scala.collection.mutable
class JwdDaoSpec extends Specification {
def is = s2"""$sequential
εθͺγ§ζ¦εΏ΅γζ£γγεεγγγγγ $test
"""
def test = {
// 202960, 292951
val word = "η θ"
val ids = JwdDao getConceptIds word
ids.size must_== 2
(ids contains ConceptId(202960)) && (ids contains ConceptId(292951)) must_== true
val cache: mutable.HashMap[String, List[ConceptId]]= Deencapsulation.getField(JwdDao, "cache")
cache.nonEmpty must_== true
}
}
| satokazuma/caseframe | caseframe-gen/src/test/scala/jp/ac/titech/cs/se/nakamura/cfgen/dao/JwdDaoSpec.scala | Scala | mit | 639 |
///////////////////////////////////////////////////////////////////////////////
// TwitterPullLocationVariance.scala
//
// Copyright (C) 2012 Stephen Roller, The University of Texas at Austin
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
///////////////////////////////////////////////////////////////////////////////
package opennlp.fieldspring.preprocess
import net.liftweb.json
import com.nicta.scoobi.Scoobi._
import java.io._
import java.lang.Double.isNaN
import java.text.{SimpleDateFormat, ParseException}
import math.pow
import opennlp.fieldspring.util.Twokenize
import opennlp.fieldspring.util.distances.{spheredist, SphereCoord}
/*
* This program takes, as input, files which contain one tweet
* per line in json format as directly pulled from the twitter
* api. It outputs a folder that may be used as the
* --input-corpus argument of tg-geolocate.
*/
object TwitterPullLocationVariance extends ScoobiApp {
type Tweet = (Double, Double)
type Record = (String, Tweet)
def force_value(value: json.JValue): String = {
if ((value values) == null)
null
else
(value values) toString
}
def is_valid_tweet(id_r: (String, Record)): Boolean = {
// filters out invalid tweets, as well as trivial spam
val (tw_id, (a, (lat, lng))) = id_r
a != "" && tw_id != "" && !isNaN(lat) && !isNaN(lng)
}
def preferred_latlng(latlng1: (Double, Double), latlng2: (Double, Double)): (Double, Double) = {
val (lat1, lng1) = latlng1
val (lat2, lng2) = latlng2
if (!isNaN(lat1) && !isNaN(lng1))
(lat1, lng1)
else
(lat2, lng2)
}
val empty_tweet: (String, Record) = ("", ("", (Double.NaN, Double.NaN)))
def parse_json(line: String): (String, Record) = {
try {
val parsed = json.parse(line)
val author = force_value(parsed \\ "user" \\ "screen_name")
val tweet_id = force_value(parsed \\ "id_str")
val (blat, blng) =
try {
val bounding_box = (parsed \\ "place" \\ "bounding_box" \\ "coordinates" values)
.asInstanceOf[List[List[List[Double]]]](0)
val bounding_box_sum = bounding_box.reduce((a, b) => List(a(1) + b(1), a(0) + b(0)))
(bounding_box_sum(0) / bounding_box.length, bounding_box_sum(1) / bounding_box.length)
} catch {
case npe: NullPointerException => (Double.NaN, Double.NaN)
case cce: ClassCastException => (Double.NaN, Double.NaN)
}
val (plat, plng) =
if ((parsed \\ "coordinates" values) == null ||
(force_value(parsed \\ "coordinates" \\ "type") != "Point")) {
(Double.NaN, Double.NaN)
} else {
val latlng: List[Number] =
(parsed \\ "coordinates" \\ "coordinates" values).asInstanceOf[List[Number]]
(latlng(1).doubleValue, latlng(0).doubleValue)
}
val (lat, lng) = preferred_latlng((plat, plng), (blat, blng))
(tweet_id, (author, (lat, lng)))
} catch {
case jpe: json.JsonParser.ParseException => empty_tweet
case npe: NullPointerException => empty_tweet
case nfe: NumberFormatException => empty_tweet
}
}
def tweet_once(id_rs: (String, Iterable[Record])): Record = {
val (id, rs) = id_rs
rs.head
}
def has_latlng(r: Record): Boolean = {
val (a, (lat, lng)) = r
!isNaN(lat) && !isNaN(lng)
}
def cartesian_product[T1, T2](A: Seq[T1], B: Seq[T2]): Iterable[(T1, T2)] = {
for (a <- A; b <- B) yield (a, b)
}
def mean_variance_and_maxdistance(inpt: (String, Iterable[(Double, Double)])):
// Author, AvgLat, AvgLng, AvgDistance, DistanceVariance, MaxDistance
(String, Double, Double, Double, Double, Double) = {
val (author, latlngs_i) = inpt
val latlngs = latlngs_i.toSeq
val lats = latlngs.map(_._1)
val lngs = latlngs.map(_._2)
val avgpoint = SphereCoord(lats.sum / lats.length, lngs.sum / lngs.length)
val allpoints = latlngs.map(ll => SphereCoord(ll._1, ll._2))
val distances = allpoints.map(spheredist(_, avgpoint))
val avgdistance = distances.sum / distances.length
val distancevariance = distances.map(x => pow(x - avgdistance, 2)).sum / distances.length
val maxdistance = cartesian_product(allpoints, allpoints)
.map{case (a, b) => spheredist(a, b)}.max
(author, avgpoint.lat, avgpoint.long, avgdistance, distancevariance, maxdistance)
}
def nicely_format(r: (String, Double, Double, Double, Double, Double)): String = {
val (a, b, c, d, e, f) = r
Seq(a, b, c, d, e, f) mkString "\\t"
}
def checkpoint_str(r: Record): String = {
val (a, (lat, lng)) = r
a + "\\t" + lat + "\\t" + lng
}
def from_checkpoint_to_record(s: String): Record = {
val s_a = s.split("\\t")
(s_a(0), (s_a(1).toDouble, s_a(2).toDouble))
}
def run() {
val (inputPath, outputPath) =
if (args.length == 2) {
(args(0), args(1))
} else {
sys.error("Expecting input and output path.")
}
/*
val lines: DList[String] = TextInput.fromTextFile(inputPath)
val values_extracted = lines.map(parse_json).filter(is_valid_tweet)
val single_tweets = values_extracted.groupByKey.map(tweet_once)
.filter(has_latlng)
val checkpointed = single_tweets.map(checkpoint_str)
persist(TextOutput.toTextFile(checkpointed, inputPath + "-st"))
*/
val single_tweets_lines: DList[String] = TextInput.fromTextFile(inputPath + "-st")
val single_tweets_reloaded = single_tweets_lines.map(from_checkpoint_to_record)
val grouped_by_author = single_tweets_reloaded.groupByKey
val averaged = grouped_by_author.map(mean_variance_and_maxdistance)
val nicely_formatted = averaged.map(nicely_format)
persist(TextOutput.toTextFile(nicely_formatted, outputPath))
}
}
| utcompling/fieldspring | src/main/scala/opennlp/fieldspring/preprocess/TwitterPullLocationVariance.scala | Scala | apache-2.0 | 6,375 |
package com.twitter.finagle.exp.mysql.util
object BufferUtil {
/**
* Helper method to do a hex dump of a sequence of bytes.
*/
def hex(data: Seq[Byte], output: StringBuilder = new
StringBuilder()): String = {
val (begin,end) = data.splitAt(16)
val hexline = begin.map { "%02X".format(_) } mkString(" ")
val charline = begin.map { b => if (0x20 <= b && b <= 0x7E)
b.toChar else " " } mkString("")
val line = "%-47s".format(hexline) + " " + charline
val res = output ++= line ++= "\\n"
if (end.isEmpty)
res.toString
else
hex(end, res)
}
} | firebase/finagle | finagle-mysql/src/main/scala/com/twitter/finagle/mysql/util/BufferUtil.scala | Scala | apache-2.0 | 592 |
package ingraph.compiler.cypher2gplan.builders
import java.util.concurrent.atomic.AtomicLong
import ingraph.compiler.cypher2gplan.util.{BuilderUtil, StringUtil}
import ingraph.compiler.exceptions._
import ingraph.model.expr.{EdgeLabelSet, ElementAttribute}
import ingraph.model.{expr, gplan}
import org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.catalyst.{expressions => cExpr}
import org.slizaa.neo4j.opencypher.openCypher.RelationshipDetail
import org.slizaa.neo4j.opencypher.{openCypher => oc}
import scala.collection.mutable.ListBuffer
object AttributeBuilder {
def buildAttribute(n: oc.NodePattern): expr.VertexAttribute = {
val nls = BuilderUtil.parseToVertexLabelSet(n.getNodeLabels)
val props = LiteralBuilder.buildProperties(n.getProperties)
Option(n.getVariable) match {
case Some(nv) => expr.VertexAttribute(nv.getName, nls, props, isAnonymous = false)
case None => expr.VertexAttribute(generateUniqueName, nls, props, isAnonymous = true)
}
}
def buildAttribute(el: oc.RelationshipPattern): expr.AbstractEdgeAttribute = {
Option(el.getDetail) match {
case Some(elDetail) => {
val (name, isAnon) = Option(elDetail.getVariable) match {
case Some(ev) => (ev.getName, false)
case None => (generateUniqueName, true)
}
val els = BuilderUtil.parseToEdgeLabelSet(elDetail.getRelTypeNames)
val props = LiteralBuilder.buildProperties(elDetail.getProperties)
Option(elDetail.getRange) match {
case Some(r) => {
val minHops = StringUtil.toOptionInt(r.getLower)
val maxHops = if (r.isVariableLength) {
StringUtil.toOptionInt(r.getUpper)
} else {
minHops
}
expr.EdgeListAttribute(name, els, props, isAnonymous = isAnon, minHops, maxHops)
}
case None => expr.EdgeAttribute(name, els, props, isAnonymous = isAnon)
}
}
case None => expr.EdgeAttribute(generateUniqueName, isAnonymous = true)
}
}
def buildAttribute(e: oc.PropertyLookupExpression): UnresolvedAttribute = {
UnresolvedAttribute(Seq(e.getLeft.asInstanceOf[oc.VariableRef].getVariableRef.getName, e.getPropertyLookups.get(0).getPropertyKeyName))
}
def buildAttribute(v: oc.VariableDeclaration): UnresolvedAttribute = {
UnresolvedAttribute(v.getName)
}
def buildAttribute(vr: oc.VariableRef): UnresolvedAttribute = {
buildAttribute(vr.getVariableRef)
}
// always use .getAndIncrement on this object
private val generatedNameCounterNext = new AtomicLong
def generateUniqueName: String = {
s"_e${generatedNameCounterNext.getAndIncrement}"
}
/**
* Extract vertex and edge attributes from the pattern.
*
* Check if filter is a chain of Expand operators on top of a single GetVertices, and attributes are properly chained
* and if not, ExpandChainException is thrown.
*
* @param pattern
* @throws ExpandChainException
* @return
*/
def extractAttributesFromExpandChain(pattern: gplan.GNode): ListBuffer[Expression] = {
val relationshipVariableExpressions = ListBuffer.empty[Expression]
var currOp = pattern
var chainElem: ElementAttribute = null
while (currOp != null) {
currOp match {
case gplan.GetVertices(v) if chainElem == null || v == chainElem => relationshipVariableExpressions.append(v); currOp = null; chainElem = null
case gplan.Expand(src, trg, edge, _, child) if chainElem == null || trg == chainElem => relationshipVariableExpressions.append(trg, edge); currOp = child; chainElem = src
case gplan.GetVertices(_) | gplan.Expand(_, _, _, _, _) => throw new ExpandChainException("We should never see this condition: Expand and Getvertices not properly chained")
case e => throw new UnexpectedTypeException(e, "expand chain")
}
}
relationshipVariableExpressions
}
// protected def AttributeVariable buildPropertyLookupHelper(Variable ev, ExpressionNodeLabelsAndPropertyLookup e) {
// if (e.propertyLookups.length == 1) {
// val attributeName = e.propertyLookups.get(0).propertyKeyName
//
// if (ev instanceof ExpressionVariable) {
// val innerVariable = extractElementVariable(ev)
// if (innerVariable === null) {
// logger.unrecoverableError('''Can't find neither VertexVariable nor EdgeVariable wrapped into the ExpressionVariable, so can't build attribute variable.''')
// }
// }
//
// switch ev {
// VertexVariable
// , EdgeVariable
// , ExpressionVariable
// : {
// ev.createAttribute(attributeName)
// }
// default: {
// logger.unrecoverableError('''Unsupported type received: Β«ev.class.nameΒ»''')
// null
// }
// }
//
// } else {
// logger.unrecoverableError('''PropertyLookup count Β«e.propertyLookups.lengthΒ» not supported.''')
// null
// }
// }
//
// def dispatch Variable buildRelalgVariable(ExpressionNodeLabelsAndPropertyLookup e) {
// val v = buildRelalgVariable(e.left)
// buildPropertyLookupHelper(v, e)
// }
//
// def dispatch Variable buildRelalgVariable(VariableRef varRef) {
// val useExpressionVariables = findOutExpressionVariableUsageFromContext(varRef)
// buildRelalgVariableInternal(varRef, useExpressionVariables)
// }
//
// /**
// * Variable lookup scope depends on the clause in which the variable is being resolved.
// *
// * UNWIND, ORDER BY and WITH...WHERE clauses use expression variables, others don't use them for name lookup
// */
// def findOutExpressionVariableUsageFromContext(EObject eo) {
// EUtil.hasInContainerHierarchy(eo, #[typeof(Unwind)], typeof(Cypher))
// || EUtil.hasInContainerHierarchy(eo, #[typeof(Order)], typeof(Cypher))
// || EUtil.hasInContainerHierarchy(eo, #[typeof(Where), typeof(With)], typeof(Cypher))
// }
//
// /**
// * Resolves a variable by its name.
// *
// * Expression variables from return has the highest priority in name resolution for order by
// * and UNWIND, but they don't play a role when building later return items.
// *
// * @param useExpressionVariables indicate whether we should take expressionvariables into account for name resolution.
// */
// protected def Variable buildRelalgVariableInternal(VariableRef varRef, boolean useExpressionVariables) {
// val v = findVariable(varRef.variableRef.name, useExpressionVariables)
// if (v === null) {
// logger.unrecoverableError('''Variable name not found: Β«varRef.variableRef.nameΒ»''')
// null
// } else {
// v
// }
// }
//
// def buildEdgeVariable(RelationshipDetail r) {
// val edgeVariable = if (r?.range === null) {
// edgeVariableFactory.createElement(r)
// } else {
// edgeListVariableFactory.createElement(r) => [
// minHops = if (r?.range.lower === null) {
// 1
// } else {
// Integer.valueOf(r?.range.lower)
// }
// maxHops = if (r?.range.upper === null) {
// modelFactory.createMaxHops() => [
// maxHopsType = MaxHopsType.UNLIMITED
// ]
// } else {
// modelFactory.createMaxHops() => [
// maxHopsType = MaxHopsType.LIMITED
// hops = Integer.valueOf(r?.range.upper)
// ]
// }
// ]
// }
//
// // add labels to the variable
// edgeVariable.combineLabelSet(r?.types?.relTypeName, edgeLabelFactory)
//
// edgeVariable
// }
//
// def VertexVariable buildVertexVariable(NodePattern n) {
// val vertexVariable = vertexVariableFactory.createElement(n)
//
// // add labels to the variable
// n.nodeLabels?.nodeLabels?.forEach [
// vertexVariable.ensureLabel(vertexLabelFactory.createElement(it.labelName))
// ]
// vertexVariable
// }
//
// /**
// * Wraps an expression into an ExpressionVariable with its name given or inferred.
// *
// * @param name the name, or null to have it inferred.
// * @param expression the expression to wrap
// *
// * @return the ExpressionVariable itself.
// */
// def ExpressionVariable buildExpressionVariable(String name, Expression expression) {
// val iName = if (name === null) {
// ExpressionNameInferencer.inferName(expression, logger)
// } else {
// name
// }
//
// expressionVariableFactoryExtended.createElement(iName, expression) => [
// hasInferredName = (name === null)
// ]
// }
//
// def protected ensureLabel(VertexVariable vertexVariable, VertexLabel label) {
// if (!vertexVariable.vertexLabelSet.vertexLabels.contains(label)) {
// vertexVariable.vertexLabelSet => [
// vertexLabels.add(label)
// status = LabelSetStatus.NON_EMPTY
// ]
// }
// }
//
// def void combineLabelSet(AbstractEdgeVariable edgeVariable, EList<String> labels, EdgeLabelFactory edgeLabelFactory) {
// /*
// * if we receive an empty labelset, this does not change the labelset constraint
// * if the edge variable
// */
// if (labels === null || labels.empty) {
// return
// }
//
// if (edgeVariable.edgeLabelSet.status == LabelSetStatus.EMPTY) {
// // no previous labelset constraint was in effect
// labels.forEach [
// val label = edgeLabelFactory.createElement(it)
// if (!edgeVariable.edgeLabelSet.edgeLabels.contains(label)) {
// edgeVariable.edgeLabelSet.edgeLabels.add(label)
// }
// ]
// edgeVariable.edgeLabelSet.status = if (edgeVariable.edgeLabelSet.edgeLabels.empty) {
// LabelSetStatus.EMPTY
// } else {
// LabelSetStatus.NON_EMPTY
// }
// } else {
// // we had a previous, non-empty labelset
// // we combine (intersect) the labelset received with the previous one
// val List<EdgeLabel> intersection = new ArrayList<EdgeLabel>
//
// labels.forEach [
// val label = edgeLabelFactory.createElement(it)
// if (!intersection.contains(label) && edgeVariable.edgeLabelSet.edgeLabels.contains(label)) {
// intersection.add(label)
// }
// ]
//
// /*
// * a tiny optimization: if a set has the same number of element
// * before and after intersecting with an other set, it is the same.
// *
// * So we need to replace labelset only if their size changed
// */
//
// if (edgeVariable.edgeLabelSet.edgeLabels.size != intersection.size) {
// edgeVariable.edgeLabelSet.edgeLabels.clear
// edgeVariable.edgeLabelSet.edgeLabels.addAll(intersection)
// }
//
// edgeVariable.edgeLabelSet.status = if (edgeVariable.edgeLabelSet.edgeLabels.empty) {
// logger.warning('''Contradicting labelset constraints found for edge variable Β«edgeVariable.nameΒ»''')
// LabelSetStatus.CONTRADICTING
// } else {
// LabelSetStatus.NON_EMPTY
// }
//
// }
// }
//
// /**
// * Packs the appropriate variable into a VariableExpression.
// *
// * This builder method ensures that the new VariableEpression instance
// * is registered to the container registered with this builder.
// */
// def buildVariableExpression(Variable v) {
// modelFactory.createVariableExpression => [
// variable = v
// expressionContainer = topLevelContainer
// ]
// }
//
// /**
// * Builds or resolves the appropriate variable and then packs it into a VariableExpression.
// *
// * This builder method ensures that the new VariableEpression instance
// * is registered to the container registered with this builder.
// */
// def dispatch buildVariableExpression(VariableRef v, boolean useExpressionVariables) {
// modelFactory.createVariableExpression => [
// variable = if (useExpressionVariables) { buildRelalgVariableInternal(v, true) } else { buildRelalgVariable(v) }
// expressionContainer = topLevelContainer
// ]
// }
//
// /**
// * Builds or resolves the appropriate variable and then packs it into a VariableExpression.
// *
// * This builder method ensures that the new VariableEpression instance
// * is registered to the container registered with this builder.
// */
// def dispatch buildVariableExpression(ExpressionNodeLabelsAndPropertyLookup e, boolean useExpressionVariables) {
// modelFactory.createVariableExpression => [
// variable = if (useExpressionVariables) {
// val e_left = e.left
// if (e_left instanceof VariableRef) {
// val v = buildRelalgVariableInternal(e_left, true)
// buildPropertyLookupHelper(v, e)
// } else {
// logger.unrecoverableError('''Unexpected type found as base type for property lookup. Expected: variable reference. Found: Β«e_left.class.nameΒ»''')
// null
// }
// } else { buildRelalgVariable(e) }
// expressionContainer = topLevelContainer
// ]
// }
//
// def getVertexVariableFactoryElements() {
// vertexVariableFactory.elements
// }
// def getEdgeVariableFactoryElements() {
// edgeVariableFactory.elements
// }
//
// /**
// * Finds and returns a variable by name in the variable registers,
// * i.e. in the factories or in the chained variables.
// *
// * @param useExpressionVariables specifies whether to look into the expressionVariableFactory,
// * i.e. if we are interested in variables from the WITH/RETURN/UNWIND clauses of the current subquery.
// * Note, that chained variables are always looked up
// *
// * If not found or null was passed for name, null is returned.
// */
// def findVariable(String name, boolean useExpressionVariables) {
// if (name === null) {
// null
// } else if (useExpressionVariables && expressionVariableFactoryExtended.hasElement(name)) {
// expressionVariableFactoryExtended.getElement(name)
// } else if (expressionVariableChain.get(name) !== null) {
// expressionVariableChain.get(name)
// } else if (vertexVariableFactory.hasElement(name)) {
// vertexVariableFactory.getElement(name)
// } else if (edgeVariableFactory.hasElement(name)) {
// edgeVariableFactory.getElement(name)
// } else if (edgeListVariableFactory.hasElement(name)) {
// edgeListVariableFactory.getElement(name)
// } else {
// null
// }
// }
}
| FTSRG/ingraph | compiler/src/main/scala/ingraph/compiler/cypher2gplan/builders/AttributeBuilder.scala | Scala | epl-1.0 | 14,365 |
package philosophy.finallytagless
import language.higherKinds
import cats.{~>, Monad}
trait Term[F[_[_]],X] { self =>
def run[M[_] : Monad]( interpreter: Interpreter[F,M] ) : M[X]
def flatMap[A]( f : X => Term[F,A] ) : Term[F,A] = new Term[F,A] {
def run[M[_] : Monad]( interpreter: Interpreter[F, M] ): M[A] =
Monad[M].flatMap( self.run( interpreter ) ){ x => f(x).run( interpreter ) }
}
def map[A]( f: X => A ) = flatMap( f andThen { a => Term.pure[F,A](a) } )
def embed[G[_[_]]]( implicit E: Embed[F,G] ) : Term[G,X] = new Term[G,X] {
def run[M[_] : Monad](interpreter: Interpreter[G, M]): M[X] = self.run( E( interpreter ) )
}
}
object Term {
def pure[F[_[_]],X]( x:X ) = new Term[F,X] {
def run[M[_] : Monad](interpreter: Interpreter[F, M]): M[X] = Monad[M].pure( x )
}
implicit def monad[F[_[_]]] : Monad[Term[F,?]] = new Monad[Term[F,?]] {
def flatMap[A, B](fa: Term[F,A])(f: (A) => Term[F,B]): Term[F,B] = fa.flatMap( f )
def pure[A](x: A): Term[F,A] = Term.pure[F,A]( x )
}
def apply[F[_[_]]] : TermBuilder[F] = new TermBuilder[F] {}
}
trait TermBuilder[F[_[_]]] {
type X[_]
def apply[A]( f : F[X]=>X[A] ) : Term[F,A] = new Term[F,A] {
def run[M[_] : Monad](interpreter: Interpreter[F, M]): M[A] =
interpreter.apply( f.asInstanceOf[F[interpreter.G] => interpreter.G[A]] )
}
}
trait Embed[F[_[_]],G[_[_]]] {
def apply[M[_] : Monad]( f: Interpreter[G,M] ) : Interpreter[F,M]
}
trait ~~>[F[_[_]],G[_[_]]] extends Embed[F,G] {
def embed[M[_] : Monad]( f : Interpreter[G,M] ) : F[M]
def apply[M[_] : Monad]( f: Interpreter[G,M] ) : Interpreter[F,M] = Interpreter( embed( f ) )
}
object Embed {
implicit def embedLeft[F[_[_]],G[_[_]]] = new Embed[F,InterpreterPair[F,G,?[_]]] {
def apply[M[_] : Monad](f: Interpreter[InterpreterPair[F,G,?[_]],M]): Interpreter[F,M] = Interpreter.leftOf( f )
}
implicit def embedRefl[F[_[_]]] = new Embed[F,F] {
def apply[M[_] : Monad](f: Interpreter[F, M]): Interpreter[F, M] = f
}
implicit def embedRight[F[_[_]],G[_[_]],H[_[_]]]( implicit E: Embed[F,H]) = new Embed[F,InterpreterPair[G,H,?[_]]] {
def apply[M[_] : Monad](f: Interpreter[InterpreterPair[G,H,?[_]], M]): Interpreter[F, M] = E( Interpreter.rightOf(f) )
}
}
trait Interpreter[F[_[_]],M[_]] { self =>
import Interpreter.~
type G[_]
def init : F[G]
def nt : G ~> M
def andThen[H[_]]( n : M ~> H ) : Interpreter[F,H] =
InterpreterNT[F,G,H]( init, nt andThen n )
def and[H[_[_]]]( i: Interpreter[H,M] ) : Interpreter[(F~H)#Pair,M] =
InterpreterInit[InterpreterPair[F,H,?[_]],M]( InterpreterPair(self, i ) )
def apply[A]( f: F[G] => G[A] ) : M[A] = nt( f( init ) )
}
case class InterpreterInit[F[_[_]],M[_]]( init: F[M] ) extends Interpreter[F,M] {
type G[X] = M[X]
val nt = new (M ~> M) {
def apply[A](fa: M[A]): M[A] = fa
}
}
case class InterpreterNT[F[_[_]],G0[_],M[_]]( init: F[G0], nt: G0 ~> M) extends Interpreter[F,M] {
type G[X] = G0[X]
}
case class InterpreterPair[F[_[_]],G[_[_]],M[_]]( left: Interpreter[F,M], right: Interpreter[G,M] )
object Interpreter {
def apply[F[_[_]],M[_]]( fm : F[M] ) : Interpreter[F,M] = InterpreterInit[F,M]( fm )
def apply[F[_[_]],H[_],M[_]]( fm : F[H], nt: H ~> M ) : Interpreter[F,M] = InterpreterNT( fm, nt )
type ~[A[_[_]],B[_[_]]] = ({
type Pair[X[_]] = InterpreterPair[A,B,X]
type ![X[_]] = InterpreterPair[A,B,X]
})
def rightOf[F[_[_]],G[_[_]],M[_]]( i : Interpreter[(F~G)#Pair,M] ) : Interpreter[G,M] = i.init.right andThen i.nt
def leftOf[F[_[_]],G[_[_]],M[_]]( i : Interpreter[(F~G)#Pair,M] ) : Interpreter[F,M] = i.init.left andThen i.nt
def pairOf[F[_[_]],G[_[_]],M[_]]( i : Interpreter[(F~G)#Pair,M] ) : (Interpreter[F,M],Interpreter[G,M]) = (Interpreter.leftOf(i),Interpreter.rightOf(i))
}
| vtoro/getting-to-philosophy | src/main/scala/philosophy/finallytagless/Term.scala | Scala | mit | 3,800 |
/*
* Copyright 2020 Precog Data
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.impl.storage
import slamdata.Predef._
import monocle.Prism
import scalaz.Show
sealed abstract trait StoreError extends Product with Serializable {
def detail: String
}
object StoreError {
final case class Corrupt(detail: String) extends StoreError
val corrupt: Prism[StoreError, String] =
Prism.partial[StoreError, String] {
case Corrupt(details) => details
} (Corrupt(_))
val throwableP: Prism[Throwable, StoreError] =
Prism.partial[Throwable, StoreError] {
case StoreErrorException(re) => re
} (StoreErrorException(_))
implicit val show: Show[StoreError] = Show.shows {
case Corrupt(detail) => "StoreError.Corrupt(" + detail + ")"
}
////
private final case class StoreErrorException(err: StoreError)
extends Exception(err.detail)
}
| djspiewak/quasar | impl/src/main/scala/quasar/impl/storage/StoreError.scala | Scala | apache-2.0 | 1,408 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.sparkbench.datagen.convert
import org.apache.hadoop.conf.Configuration
import org.apache.spark.{SparkConf, SparkContext}
object PagerankConvert{
val conf = new Configuration()
def main(args: Array[String]){
if (args.length != 2){
System.err.println("Usage: Convert <input_directory> <output_file_path>")
System.exit(1)
}
val sparkConf = new SparkConf().setAppName("HiBench PageRank Converter")
val sc = new SparkContext(sparkConf)
val input_path = args(0) //"/HiBench/Pagerank/Input/edges"
val output_name = args(1) // "/HiBench/Pagerank/Input/edges.txt"
val parallel = sc.getConf.getInt("spark.default.parallelism", sc.defaultParallelism)
val data = sc.textFile(input_path).map{case(line)=>
val elements = line.split('\\t')
val elements_tuple = elements.slice(elements.length-2, elements.length)
"%s %s".format(elements_tuple(0), elements_tuple(1))
}
data.repartition(parallel)
data.saveAsTextFile(output_name)
sc.stop()
}
}
| yhli365/HiBench | src/sparkbench/src/main/scala/com/intel/sparkbench/datagen/convert/PagerankConvert.scala | Scala | apache-2.0 | 1,844 |
package com.amarjanica.discourse
package api
import java.net.URLEncoder
import com.amarjanica.discourse.models._
class UserApi(client: BaseDiscourseClient) {
/*This will not work if you have local logins disabled!*/
def create(createUserRequest: CreateUser): CreateUserResponse =
client.post("/users", createUserRequest).as[CreateUserResponse]
def get(username: String): UserDetail =
client.get(s"/users/${URLEncoder.encode(username, "UTF-8")}.json").as[UserDetail]
def update(user: User): UpdateUserStatus =
client.put(s"/users/${URLEncoder.encode(user.username, "UTF-8")}", user).as[UpdateUserStatus]
def updateEmail(username: String, email: String) {
client.put(s"/users/${URLEncoder.encode(username, "UTF-8")}/preferences/email",
UpdateEmail(
email = email,
apiKey = client.credentials.get.api_key,
apiUsername = client.credentials.get.api_username
)
)
}
def updateUsername(currentUsername: String, newUsername: String) {
client.put(s"/users/${URLEncoder.encode(currentUsername, "UTF-8")}/preferences/username",
UpdateUsername(
newUsername = newUsername,
apiKey = client.credentials.get.api_key
)
)
}
/*Admin access needed*/
def updateTrustLevel(userId: Int, level: Int) {
client.put(s"/admin/users/$userId/trust_level",
UpdateTrustLevel(
userId = userId,
level = level
)
)
}
def suspend(userId: Int, suspendUser: SuspendUser) {
client.put(s"/admin/users/$userId/suspend",
suspendUser
)
}
def unsuspend(userId: Int, level: Int) {
client.put(s"/admin/users/$userId/unsuspend")
}
def list(kind: String): List[User] =
client.get(s"/admin/users/list/$kind.json").asList[User]
}
| amarjanica/discourse-scala-client | src/main/scala/com/amarjanica/discourse/api/UserApi.scala | Scala | mit | 1,771 |
package org.scalawiki.xml
import java.io.{InputStream, OutputStream}
import java.nio.charset.StandardCharsets
import scala.io.Source
class XmlIndex(val pages: Seq[PageIndex]) {
val _byId = pages.groupBy(_.id).mapValues(_.head)
val _byTitle = pages.groupBy(_.title).mapValues(_.head)
val nl = System.lineSeparator()
def save(os: OutputStream) = {
pages.map(pi => (pi.toString + nl).getBytes(StandardCharsets.UTF_8)).foreach(os.write)
}
}
object XmlIndex {
def fromParser(parser: XmlParser): Iterator[PageIndex] =
parser.iterator.map { page =>
PageIndex(parser.pageStartingByteOffset, page.id.get, page.title)
}
def fromInputStream(is: InputStream): Iterator[PageIndex] =
Source.fromInputStream(is).getLines().map(PageIndex.fromString)
} | intracer/scalawiki | scalawiki-dumps/src/main/scala/org/scalawiki/xml/XmlIndex.scala | Scala | apache-2.0 | 780 |
package org.openapitools.client.api
import argonaut._
import argonaut.EncodeJson._
import argonaut.DecodeJson._
import org.http4s.{EntityDecoder, EntityEncoder}
import org.http4s.argonaut._
import org.joda.time.DateTime
import QueueBlockedItem._
case class QueueBlockedItem (
`class`: Option[String],
actions: Option[List[CauseAction]],
blocked: Option[Boolean],
buildable: Option[Boolean],
id: Option[Integer],
inQueueSince: Option[Integer],
params: Option[String],
stuck: Option[Boolean],
task: Option[FreeStyleProject],
url: Option[String],
why: Option[String],
buildableStartMilliseconds: Option[Integer])
object QueueBlockedItem {
import DateTimeCodecs._
implicit val QueueBlockedItemCodecJson: CodecJson[QueueBlockedItem] = CodecJson.derive[QueueBlockedItem]
implicit val QueueBlockedItemDecoder: EntityDecoder[QueueBlockedItem] = jsonOf[QueueBlockedItem]
implicit val QueueBlockedItemEncoder: EntityEncoder[QueueBlockedItem] = jsonEncoderOf[QueueBlockedItem]
}
| cliffano/swaggy-jenkins | clients/scalaz/generated/src/main/scala/org/openapitools/client/api/QueueBlockedItem.scala | Scala | mit | 985 |
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.twitter.zipkin.common
import com.twitter.algebird.{Semigroup, Moments, Monoid}
import com.twitter.util.Time
import com.twitter.conversions.time._
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class DependenciesTest extends FunSuite {
test("services compare correctly") {
val s1 = Service("foo")
val s2 = Service("bar")
val s3 = Service("foo")
val s4 = Service("Foo")
val s5 = Service("FOO")
assert(s1 === s1)
assert(s1 === s3)
assert(s1 != s2)
assert(s1 != s4) // not sure if case sensitivity is required, but we should be aware if it changes
assert(s1 != s5)
}
test("DependencyLinks") {
val m1 = Moments(2)
val m2 = Moments(4)
val d1 = DependencyLink(Service("tfe"), Service("mobileweb"), m1)
val d2 = DependencyLink(Service("tfe"), Service("mobileweb"), m2)
val d3 = DependencyLink(Service("Gizmoduck"), Service("tflock"), m2)
// combine
assert(Semigroup.plus(d1, d2) === d1.copy(durationMoments = Monoid.plus(m1, m2)))
// assert if incompatible links are combined
intercept[AssertionError] { Semigroup.plus(d1, d3) }
}
test("Dependencies") {
val m1 = Moments(2)
val m2 = Moments(4)
val dl1 = DependencyLink(Service("tfe"), Service("mobileweb"), m1)
val dl2 = DependencyLink(Service("tfe"), Service("mobileweb"), m2)
val dl3 = DependencyLink(Service("Gizmoduck"), Service("tflock"), m2)
val dl4 = DependencyLink(Service("mobileweb"), Service("Gizmoduck"), m2)
val dl5 = dl1.copy(durationMoments = Monoid.plus(m1,m2))
val deps1 = Dependencies(Time.fromSeconds(0), Time.fromSeconds(0)+1.hour, List(dl1, dl3))
val deps2 = Dependencies(Time.fromSeconds(0)+1.hour, Time.fromSeconds(0)+2.hours, List(dl2, dl4))
// express identity when added to zero
val result = Monoid.plus(deps1, Monoid.zero[Dependencies])
assert(result === deps1)
// combine
val result2 = Monoid.plus(deps1, deps2)
assert(result2.startTime === Time.fromSeconds(0))
assert(result2.endTime === Time.fromSeconds(0)+2.hours)
def counts(e: Traversable[_]) = e groupBy identity mapValues (_.size)
assert(counts(result2.links) == counts(Seq(dl4, dl5, dl3)))
}
}
| wyzssw/zipkin | zipkin-common/src/test/scala/com/twitter/zipkin/common/DependenciesTest.scala | Scala | apache-2.0 | 2,900 |
/*
* Copyright 2019 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio.bigquery.validation
import com.google.api.services.bigquery.model.TableFieldSchema
import scala.collection.mutable
import scala.reflect.macros.blackbox
import scala.reflect.runtime.universe._
// A sample implementation to override types under certain conditions
final class SampleOverrideTypeProvider extends OverrideTypeProvider {
private def getByTypeString(tfs: TableFieldSchema): Option[Class[_]] =
Option(tfs.getDescription)
.flatMap(overrideType => Index.getIndexClass.get(overrideType))
private def getByTypeObject(c: blackbox.Context)(tpe: c.Type): Option[(c.Type, Class[_])] =
Index
.getIndexCompileTimeTypes(c)
.find { a =>
val (compileTimeType, _) = a
compileTimeType =:= tpe
}
private def getByTypeObject(tpe: Type): Option[(Type, Class[_])] =
Index.getIndexRuntimeTypes.find { a =>
val (runtimeType, _) = a
runtimeType =:= tpe
}
def shouldOverrideType(tfs: TableFieldSchema): Boolean =
getByTypeString(tfs).nonEmpty
def shouldOverrideType(c: blackbox.Context)(tpe: c.Type): Boolean =
getByTypeObject(c)(tpe).nonEmpty
def shouldOverrideType(tpe: Type): Boolean =
getByTypeObject(tpe).nonEmpty
def getBigQueryType(tpe: Type): String = {
val optionalTuple = getByTypeObject(tpe)
optionalTuple match {
case Some(tuple) =>
val (_, correspondingType) = tuple
correspondingType
.getMethod("bigQueryType")
.invoke(null)
.asInstanceOf[String]
case None => throw new IllegalArgumentException("Should never be here")
}
}
def getScalaType(c: blackbox.Context)(tfs: TableFieldSchema): c.Tree = {
import c.universe._
val typeClassOption: Option[Class[_]] = getByTypeString(tfs)
typeClassOption match {
case Some(typeClass) =>
val packageName = typeClass.getPackage.getName
val className = TypeName(typeClass.getSimpleName)
tq"${c.parse("_root_." + packageName)}.$className"
case None => throw new IllegalArgumentException("Should never be here")
}
}
def createInstance(c: blackbox.Context)(tpe: c.Type, tree: c.Tree): c.Tree = {
import c.universe._
val optionalTuple = getByTypeObject(c)(tpe)
optionalTuple match {
case Some(tuple) =>
val (_, correspondingType) = tuple
val name = correspondingType.getPackage.getName + "." + correspondingType.getSimpleName
val instanceOfType =
q"${c.parse(name)}.parse(${c.parse(s"$tree.asInstanceOf[String]")})"
instanceOfType
case None => throw new IllegalArgumentException("Should never be here")
}
}
def initializeToTable(c: blackbox.Context)(
modifiers: c.universe.Modifiers,
variableName: c.universe.TermName,
tpe: c.universe.Tree
): Unit = ()
}
class Country(val data: String) extends AnyVal
object Country {
def apply(data: String): Country = {
if (!isValid(data)) {
throw new IllegalArgumentException("Not valid")
}
new Country(data)
}
def isValid(data: String): Boolean = data.length == 2
def parse(data: String): Country = Country(data)
def stringType: String = "COUNTRY"
def bigQueryType: String = "STRING"
}
// Internal index to keep track of class mappings this can be done in a number of ways
object Index {
def getIndexCompileTimeTypes(c: blackbox.Context): mutable.Map[c.Type, Class[_]] = {
import c.universe._
mutable.Map[Type, Class[_]](typeOf[Country] -> classOf[Country])
}
def getIndexClass: mutable.Map[String, Class[_]] =
mutable.Map[String, Class[_]](Country.stringType -> classOf[Country])
def getIndexRuntimeTypes: mutable.Map[Type, Class[_]] =
mutable.Map[Type, Class[_]](typeOf[Country] -> classOf[Country])
}
| spotify/scio | scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/validation/SampleOverrideTypeProvider.scala | Scala | apache-2.0 | 4,385 |
package org.photon.common
import java.util.concurrent.locks.ReentrantReadWriteLock
object JavaConversion {
import scala.language.implicitConversions
implicit def fn2runnable(fn: => Unit) = new Runnable {
def run = fn
}
implicit class RichReentrantReadWriteLock(val l: ReentrantReadWriteLock) extends AnyVal {
def write[R](fn: => R): R = {
l.writeLock.lock()
try {
fn
} finally {
l.writeLock.unlock()
}
}
def read[R](fn: => R): R = {
l.readLock.lock()
try {
fn
} finally {
l.readLock.unlock()
}
}
}
}
| Emudofus/Photon | common/main/src/org/photon/common/JavaConversion.scala | Scala | mit | 560 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.kernel.protocol.v5.handler
import akka.pattern.ask
import org.apache.toree.kernel.protocol.v5._
import org.apache.toree.kernel.protocol.v5.content._
import org.apache.toree.kernel.protocol.v5.kernel.{ActorLoader, Utilities}
import Utilities._
import org.apache.toree.utils.{MessageLogSupport, LogLike}
import play.api.data.validation.ValidationError
import play.api.libs.json.{JsPath, Json}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.util.Success
class CodeCompleteHandler(actorLoader: ActorLoader)
extends BaseHandler(actorLoader) with MessageLogSupport
{
override def process(kernelMessage: KernelMessage): Future[_] = {
logKernelMessageAction("Generating code completion for", kernelMessage)
Utilities.parseAndHandle(
kernelMessage.contentString,
CompleteRequest.completeRequestReads,
completeRequest(kernelMessage, _ : CompleteRequest)
)
}
private def completeRequest(km: KernelMessage, cr: CompleteRequest):
Future[(Int, List[String])] = {
val interpreterActor = actorLoader.load(SystemActorType.Interpreter)
val codeCompleteFuture = ask(interpreterActor, cr).mapTo[(Int, List[String])]
codeCompleteFuture.onComplete {
case Success(tuple) =>
val reply = CompleteReplyOk(tuple._2, tuple._1,
cr.cursor_pos, Metadata())
val completeReplyType = MessageType.Outgoing.CompleteReply.toString
logKernelMessageAction("Sending code complete reply for", km)
actorLoader.load(SystemActorType.KernelMessageRelay) !
km.copy(
header = HeaderBuilder.create(completeReplyType),
parentHeader = km.header,
contentString = Json.toJson(reply).toString
)
case _ =>
new Exception("Parse error in CodeCompleteHandler")
}
codeCompleteFuture
}
}
| chipsenkbeil/incubator-toree | kernel/src/main/scala/org/apache/toree/kernel/protocol/v5/handler/CodeCompleteHandler.scala | Scala | apache-2.0 | 2,761 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.nio.charset.StandardCharsets
import java.time.{Duration, Instant, LocalDate, LocalDateTime, Period, ZoneOffset}
import java.time.temporal.ChronoUnit
import java.util.TimeZone
import scala.reflect.runtime.universe.TypeTag
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, ScalaReflection}
import org.apache.spark.sql.catalyst.encoders.ExamplePointUDT
import org.apache.spark.sql.catalyst.util.DateTimeConstants._
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.CalendarInterval
class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
test("null") {
checkEvaluation(Literal.create(null, BooleanType), null)
checkEvaluation(Literal.create(null, ByteType), null)
checkEvaluation(Literal.create(null, ShortType), null)
checkEvaluation(Literal.create(null, IntegerType), null)
checkEvaluation(Literal.create(null, LongType), null)
checkEvaluation(Literal.create(null, FloatType), null)
checkEvaluation(Literal.create(null, DoubleType), null)
checkEvaluation(Literal.create(null, StringType), null)
checkEvaluation(Literal.create(null, BinaryType), null)
checkEvaluation(Literal.create(null, DecimalType.USER_DEFAULT), null)
checkEvaluation(Literal.create(null, DateType), null)
checkEvaluation(Literal.create(null, TimestampType), null)
checkEvaluation(Literal.create(null, CalendarIntervalType), null)
checkEvaluation(Literal.create(null, YearMonthIntervalType), null)
checkEvaluation(Literal.create(null, DayTimeIntervalType), null)
checkEvaluation(Literal.create(null, ArrayType(ByteType, true)), null)
checkEvaluation(Literal.create(null, ArrayType(StringType, true)), null)
checkEvaluation(Literal.create(null, MapType(StringType, IntegerType)), null)
checkEvaluation(Literal.create(null, StructType(Seq.empty)), null)
}
test("default") {
checkEvaluation(Literal.default(BooleanType), false)
checkEvaluation(Literal.default(ByteType), 0.toByte)
checkEvaluation(Literal.default(ShortType), 0.toShort)
checkEvaluation(Literal.default(IntegerType), 0)
checkEvaluation(Literal.default(LongType), 0L)
checkEvaluation(Literal.default(FloatType), 0.0f)
checkEvaluation(Literal.default(DoubleType), 0.0)
checkEvaluation(Literal.default(StringType), "")
checkEvaluation(Literal.default(BinaryType), "".getBytes(StandardCharsets.UTF_8))
checkEvaluation(Literal.default(DecimalType.USER_DEFAULT), Decimal(0))
checkEvaluation(Literal.default(DecimalType.SYSTEM_DEFAULT), Decimal(0))
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "false") {
checkEvaluation(Literal.default(DateType), DateTimeUtils.toJavaDate(0))
checkEvaluation(Literal.default(TimestampType), DateTimeUtils.toJavaTimestamp(0L))
}
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
checkEvaluation(Literal.default(DateType), LocalDate.ofEpochDay(0))
checkEvaluation(Literal.default(TimestampType), Instant.ofEpochSecond(0))
}
checkEvaluation(Literal.default(CalendarIntervalType), new CalendarInterval(0, 0, 0L))
checkEvaluation(Literal.default(YearMonthIntervalType), 0)
checkEvaluation(Literal.default(DayTimeIntervalType), 0L)
checkEvaluation(Literal.default(ArrayType(StringType)), Array())
checkEvaluation(Literal.default(MapType(IntegerType, StringType)), Map())
checkEvaluation(Literal.default(StructType(StructField("a", StringType) :: Nil)), Row(""))
// ExamplePointUDT.sqlType is ArrayType(DoubleType, false).
checkEvaluation(Literal.default(new ExamplePointUDT), Array())
}
test("boolean literals") {
checkEvaluation(Literal(true), true)
checkEvaluation(Literal(false), false)
checkEvaluation(Literal.create(true), true)
checkEvaluation(Literal.create(false), false)
}
test("int literals") {
List(0, 1, Int.MinValue, Int.MaxValue).foreach { d =>
checkEvaluation(Literal(d), d)
checkEvaluation(Literal(d.toLong), d.toLong)
checkEvaluation(Literal(d.toShort), d.toShort)
checkEvaluation(Literal(d.toByte), d.toByte)
checkEvaluation(Literal.create(d), d)
checkEvaluation(Literal.create(d.toLong), d.toLong)
checkEvaluation(Literal.create(d.toShort), d.toShort)
checkEvaluation(Literal.create(d.toByte), d.toByte)
}
checkEvaluation(Literal(Long.MinValue), Long.MinValue)
checkEvaluation(Literal(Long.MaxValue), Long.MaxValue)
checkEvaluation(Literal.create(Long.MinValue), Long.MinValue)
checkEvaluation(Literal.create(Long.MaxValue), Long.MaxValue)
}
test("double literals") {
List(0.0, -0.0, Double.NegativeInfinity, Double.PositiveInfinity).foreach { d =>
checkEvaluation(Literal(d), d)
checkEvaluation(Literal(d.toFloat), d.toFloat)
checkEvaluation(Literal.create(d), d)
checkEvaluation(Literal.create(d.toFloat), d.toFloat)
}
checkEvaluation(Literal(Double.MinValue), Double.MinValue)
checkEvaluation(Literal(Double.MaxValue), Double.MaxValue)
checkEvaluation(Literal(Float.MinValue), Float.MinValue)
checkEvaluation(Literal(Float.MaxValue), Float.MaxValue)
checkEvaluation(Literal.create(Double.MinValue), Double.MinValue)
checkEvaluation(Literal.create(Double.MaxValue), Double.MaxValue)
checkEvaluation(Literal.create(Float.MinValue), Float.MinValue)
checkEvaluation(Literal.create(Float.MaxValue), Float.MaxValue)
}
test("string literals") {
checkEvaluation(Literal(""), "")
checkEvaluation(Literal("test"), "test")
checkEvaluation(Literal("\u0000"), "\u0000")
checkEvaluation(Literal.create(""), "")
checkEvaluation(Literal.create("test"), "test")
checkEvaluation(Literal.create("\u0000"), "\u0000")
}
test("sum two literals") {
checkEvaluation(Add(Literal(1), Literal(1)), 2)
checkEvaluation(Add(Literal.create(1), Literal.create(1)), 2)
}
test("binary literals") {
checkEvaluation(Literal.create(new Array[Byte](0), BinaryType), new Array[Byte](0))
checkEvaluation(Literal.create(new Array[Byte](2), BinaryType), new Array[Byte](2))
checkEvaluation(Literal.create(new Array[Byte](0)), new Array[Byte](0))
checkEvaluation(Literal.create(new Array[Byte](2)), new Array[Byte](2))
}
test("decimal") {
List(-0.0001, 0.0, 0.001, 1.2, 1.1111, 5).foreach { d =>
checkEvaluation(Literal(Decimal(d)), Decimal(d))
checkEvaluation(Literal(Decimal(d.toInt)), Decimal(d.toInt))
checkEvaluation(Literal(Decimal(d.toLong)), Decimal(d.toLong))
checkEvaluation(Literal(Decimal((d * 1000L).toLong, 10, 3)),
Decimal((d * 1000L).toLong, 10, 3))
checkEvaluation(Literal(BigDecimal(d.toString)), Decimal(d))
checkEvaluation(Literal(new java.math.BigDecimal(d.toString)), Decimal(d))
checkEvaluation(Literal.create(Decimal(d)), Decimal(d))
checkEvaluation(Literal.create(Decimal(d.toInt)), Decimal(d.toInt))
checkEvaluation(Literal.create(Decimal(d.toLong)), Decimal(d.toLong))
checkEvaluation(Literal.create(Decimal((d * 1000L).toLong, 10, 3)),
Decimal((d * 1000L).toLong, 10, 3))
checkEvaluation(Literal.create(BigDecimal(d.toString)), Decimal(d))
checkEvaluation(Literal.create(new java.math.BigDecimal(d.toString)), Decimal(d))
}
}
private def toCatalyst[T: TypeTag](value: T): Any = {
val ScalaReflection.Schema(dataType, _) = ScalaReflection.schemaFor[T]
CatalystTypeConverters.createToCatalystConverter(dataType)(value)
}
test("array") {
def checkArrayLiteral[T: TypeTag](a: Array[T]): Unit = {
checkEvaluation(Literal(a), toCatalyst(a))
checkEvaluation(Literal.create(a), toCatalyst(a))
}
checkArrayLiteral(Array(1, 2, 3))
checkArrayLiteral(Array("a", "b", "c"))
checkArrayLiteral(Array(1.0, 4.0))
checkArrayLiteral(Array(new CalendarInterval(1, 0, 0), new CalendarInterval(0, 1, 0)))
val arr = collection.mutable.WrappedArray.make(Array(1.0, 4.0))
checkEvaluation(Literal(arr), toCatalyst(arr))
}
test("seq") {
def checkSeqLiteral[T: TypeTag](a: Seq[T]): Unit = {
checkEvaluation(Literal.create(a), toCatalyst(a))
}
checkSeqLiteral(Seq(1, 2, 3))
checkSeqLiteral(Seq("a", "b", "c"))
checkSeqLiteral(Seq(1.0, 4.0))
checkSeqLiteral(Seq(new CalendarInterval(1, 0, 0), new CalendarInterval(0, 1, 0)))
checkSeqLiteral(Seq(Period.ZERO, Period.ofMonths(1)))
checkSeqLiteral(Seq(Duration.ZERO, Duration.ofDays(1)))
}
test("map") {
def checkMapLiteral[T: TypeTag](m: T): Unit = {
checkEvaluation(Literal.create(m), toCatalyst(m))
}
checkMapLiteral(Map("a" -> 1, "b" -> 2, "c" -> 3))
checkMapLiteral(Map("1" -> 1.0, "2" -> 2.0, "3" -> 3.0))
checkMapLiteral(Map(Period.ofMonths(1) -> Duration.ZERO))
assert(Literal.create(Map("a" -> 1)).toString === "map(keys: [a], values: [1])")
}
test("struct") {
def checkStructLiteral[T: TypeTag](s: T): Unit = {
checkEvaluation(Literal.create(s), toCatalyst(s))
}
checkStructLiteral((1, 3.0, "abcde"))
checkStructLiteral(("de", 1, 2.0f))
checkStructLiteral((1, ("fgh", 3.0)))
checkStructLiteral((Period.ZERO, ("abc", Duration.ofDays(1))))
}
test("unsupported types (map and struct) in Literal.apply") {
def checkUnsupportedTypeInLiteral(v: Any): Unit = {
val errMsgMap = intercept[RuntimeException] {
Literal(v)
}
assert(errMsgMap.getMessage.startsWith("Unsupported literal type"))
}
checkUnsupportedTypeInLiteral(Map("key1" -> 1, "key2" -> 2))
checkUnsupportedTypeInLiteral(("mike", 29, 1.0))
}
test("SPARK-24571: char literals") {
checkEvaluation(Literal('X'), "X")
checkEvaluation(Literal.create('0'), "0")
checkEvaluation(Literal('\u0000'), "\u0000")
checkEvaluation(Literal.create('\n'), "\n")
}
test("SPARK-33390: Make Literal support char array") {
checkEvaluation(Literal(Array('h', 'e', 'l', 'l', 'o')), "hello")
checkEvaluation(Literal(Array("hello".toCharArray)), Array("hello"))
// scalastyle:off
checkEvaluation(Literal(Array('ζ΅','θ―')), "ζ΅θ―")
checkEvaluation(Literal(Array('a', 'ζ΅', 'b', 'θ―', 'c')), "aζ΅bθ―c")
// scalastyle:on
}
test("construct literals from java.time.LocalDate") {
Seq(
LocalDate.of(1, 1, 1),
LocalDate.of(1582, 10, 1),
LocalDate.of(1600, 7, 30),
LocalDate.of(1969, 12, 31),
LocalDate.of(1970, 1, 1),
LocalDate.of(2019, 3, 20),
LocalDate.of(2100, 5, 17)).foreach { localDate =>
checkEvaluation(Literal(localDate), localDate)
}
}
test("construct literals from arrays of java.time.LocalDate") {
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
val localDate0 = LocalDate.of(2019, 3, 20)
checkEvaluation(Literal(Array(localDate0)), Array(localDate0))
val localDate1 = LocalDate.of(2100, 4, 22)
checkEvaluation(Literal(Array(localDate0, localDate1)), Array(localDate0, localDate1))
}
}
test("construct literals from java.time.Instant") {
Seq(
Instant.parse("0001-01-01T00:00:00Z"),
Instant.parse("1582-10-01T01:02:03Z"),
Instant.parse("1970-02-28T11:12:13Z"),
Instant.ofEpochMilli(0),
Instant.parse("2019-03-20T10:15:30Z"),
Instant.parse("2100-12-31T22:17:31Z")).foreach { instant =>
checkEvaluation(Literal(instant), instant)
}
}
test("construct literals from arrays of java.time.Instant") {
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
val instant0 = Instant.ofEpochMilli(0)
checkEvaluation(Literal(Array(instant0)), Array(instant0))
val instant1 = Instant.parse("2019-03-20T10:15:30Z")
checkEvaluation(Literal(Array(instant0, instant1)), Array(instant0, instant1))
}
}
private def withTimeZones(
sessionTimeZone: String,
systemTimeZone: String)(f: => Unit): Unit = {
withSQLConf(
SQLConf.SESSION_LOCAL_TIMEZONE.key -> sessionTimeZone,
SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
val originTimeZone = TimeZone.getDefault
try {
TimeZone.setDefault(TimeZone.getTimeZone(systemTimeZone))
f
} finally {
TimeZone.setDefault(originTimeZone)
}
}
}
test("format timestamp literal using spark.sql.session.timeZone") {
withTimeZones(sessionTimeZone = "GMT+01:00", systemTimeZone = "GMT-08:00") {
val timestamp = LocalDateTime.of(2019, 3, 21, 0, 2, 3, 456000000)
.atZone(ZoneOffset.UTC)
.toInstant
val expected = "TIMESTAMP '2019-03-21 01:02:03.456'"
val literalStr = Literal.create(timestamp).sql
assert(literalStr === expected)
}
}
test("format date literal independently from time zone") {
withTimeZones(sessionTimeZone = "GMT-11:00", systemTimeZone = "GMT-10:00") {
val date = LocalDate.of(2019, 3, 21)
val expected = "DATE '2019-03-21'"
val literalStr = Literal.create(date).sql
assert(literalStr === expected)
}
}
test("SPARK-33860: Make CatalystTypeConverters.convertToCatalyst match special Array value") {
assert(Literal(Array(1, 2, 3)) == Literal.create(Array(1, 2, 3), ArrayType(IntegerType)))
assert(Literal(Array(1L, 2L, 3L)) == Literal.create(Array(1L, 2L, 3L), ArrayType(LongType)))
assert(Literal(Array(1D, 2D, 3D)) == Literal.create(Array(1D, 2D, 3D), ArrayType(DoubleType)))
assert(Literal("123") == Literal.create(Array('1', '2', '3'), StringType))
assert(Literal(Array(1.toByte, 2.toByte, 3.toByte)) ==
Literal.create(Array(1.toByte, 2.toByte, 3.toByte), BinaryType))
assert(Literal(Array("1", "2", "3")) ==
Literal.create(Array("1", "2", "3"), ArrayType(StringType)))
assert(Literal(Array(Period.ofMonths(1))) ==
Literal.create(Array(Period.ofMonths(1)), ArrayType(YearMonthIntervalType)))
}
test("SPARK-34342: Date/Timestamp toString") {
assert(Literal.default(DateType).toString === "1970-01-01")
assert(Literal.default(TimestampType).toString === "1969-12-31 16:00:00")
withTimeZones(sessionTimeZone = "GMT+01:00", systemTimeZone = "GMT-08:00") {
val timestamp = LocalDateTime.of(2021, 2, 3, 16, 50, 3, 456000000)
.atZone(ZoneOffset.UTC)
.toInstant
val literalStr = Literal.create(timestamp).toString
assert(literalStr === "2021-02-03 17:50:03.456")
}
}
test("SPARK-34605: construct literals from java.time.Duration") {
Seq(
Duration.ofNanos(0),
Duration.ofSeconds(-1),
Duration.ofNanos(123456000),
Duration.ofDays(106751991),
Duration.ofDays(-106751991)).foreach { duration =>
checkEvaluation(Literal(duration), duration)
}
}
test("SPARK-34605: construct literals from arrays of java.time.Duration") {
val duration0 = Duration.ofDays(2).plusHours(3).plusMinutes(4)
checkEvaluation(Literal(Array(duration0)), Array(duration0))
val duration1 = Duration.ofHours(-1024)
checkEvaluation(Literal(Array(duration0, duration1)), Array(duration0, duration1))
}
test("SPARK-34615: construct literals from java.time.Period") {
Seq(
Period.ofYears(0),
Period.of(-1, 11, 0),
Period.of(1, -11, 0),
Period.ofMonths(Int.MaxValue),
Period.ofMonths(Int.MinValue)).foreach { period =>
checkEvaluation(Literal(period), period)
}
}
test("SPARK-34615: construct literals from arrays of java.time.Period") {
val period0 = Period.ofYears(123).withMonths(456)
checkEvaluation(Literal(Array(period0)), Array(period0))
val period1 = Period.ofMonths(-1024)
checkEvaluation(Literal(Array(period0, period1)), Array(period0, period1))
}
test("SPARK-35099: convert a literal of day-time interval to SQL string") {
Seq(
Duration.ofDays(-1) -> "-1 00:00:00",
Duration.of(10, ChronoUnit.MICROS) -> "0 00:00:00.00001",
Duration.of(MICROS_PER_DAY - 1, ChronoUnit.MICROS) -> "0 23:59:59.999999"
).foreach { case (duration, intervalPayload) =>
val literal = Literal.apply(duration)
val expected = s"INTERVAL '$intervalPayload' DAY TO SECOND"
assert(literal.sql === expected)
assert(literal.toString === expected)
}
}
test("SPARK-35099: convert a literal of year-month interval to SQL string") {
Seq(
Period.ofYears(-1) -> "-1-0",
Period.of(9999, 11, 0) -> "9999-11",
Period.ofMonths(-11) -> "-0-11"
).foreach { case (period, intervalPayload) =>
val literal = Literal.apply(period)
val expected = s"INTERVAL '$intervalPayload' YEAR TO MONTH"
assert(literal.sql === expected)
assert(literal.toString === expected)
}
}
}
| BryanCutler/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala | Scala | apache-2.0 | 17,721 |
package com.thangiee.lolhangouts.ui.custom
import android.content.Context
import android.util.AttributeSet
import android.widget.{TextView, ImageView, FrameLayout}
import com.thangiee.lolhangouts.R
import com.thangiee.lolhangouts.ui.core.{Presenter, CustomView}
import com.thangiee.lolhangouts.ui.utils._
class ChampIconView(implicit ctx: Context, a: AttributeSet) extends FrameLayout(ctx, a) with CustomView {
private lazy val champIcon = find[ImageView](R.id.img_champ_icon)
private lazy val champName = find[TextView](R.id.tv_champ_name)
override val presenter: Presenter = new Presenter {}
override def onAttached(): Unit = {
super.onAttached()
addView(layoutInflater.inflate(R.layout.champion_icon_view, this, false))
}
def setChampion(name: String) = {
champIcon.setImageDrawable(ChampIconAsset(name))
champName.text = name
}
}
| Thangiee/LoL-Hangouts | src/com/thangiee/lolhangouts/ui/custom/ChampIconView.scala | Scala | apache-2.0 | 869 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.kudu.data
import com.typesafe.scalalogging.LazyLogging
import org.geotools.data.Query
import org.locationtech.geomesa.index.geotools.{GeoMesaFeatureCollection, GeoMesaFeatureSource}
import org.locationtech.geomesa.process.GeoMesaProcessVisitor
import org.locationtech.geomesa.process.analytic.SamplingVisitor
import org.opengis.feature.FeatureVisitor
import org.opengis.util.ProgressListener
class KuduFeatureCollection(source: GeoMesaFeatureSource, query: Query)
extends GeoMesaFeatureCollection(source, query) with LazyLogging {
override def accepts(visitor: FeatureVisitor, progress: ProgressListener): Unit =
visitor match {
case _: SamplingVisitor => super.accepts(visitor, progress) // TODO sampling not implemented yet
case v: GeoMesaProcessVisitor => v.execute(source, query)
case v =>
logger.debug(s"Using fallback FeatureVisitor for process ${v.getClass.getName}.")
super.accepts(visitor, progress)
}
}
| ddseapy/geomesa | geomesa-kudu/geomesa-kudu-datastore/src/main/scala/org/locationtech/geomesa/kudu/data/KuduFeatureCollection.scala | Scala | apache-2.0 | 1,462 |
package de.erna.krautmail.collectors.imap.adapters
import de.erna.krautmail.collectors.imap.{ImapConnectionInfo, ImapAdapter}
import scala.concurrent.{ExecutionContext, Future}
import javax.mail.Session
import java.util.Properties
import grizzled.slf4j.Logging
/**
* User: Eros Candelaresi <eros@candelaresi.de>
* Project: server
* Date: 18.10.2013
* Time: 09:39
*/
class JavaMailImapAdapter(implicit val ec: ExecutionContext) extends ImapAdapter[JavaMailImapConnectionContext] with Logging {
/**
* Creates a fresh <code>ImapConnectionContext</code> and initializes it with empty/default values.
* @return A new, empty <code>ImapConnectionContext</code> suitable for keeping the adapters internal data.
*/
def createContext() = new JavaMailImapConnectionContext()
/**
* Connect to the given IMAP server using the provided connection info.
*
* For details on possible exceptions thrown by this method, check JavaMail
* <a href="https://javamail.java.net/nonav/docs/api/javax/mail/Service.html#connect(java.lang.String, int, java.lang.String, java.lang.String)">
* documentation</a>.
* @param connectionInfo Connection information like host, username, password, etc.
* @param context A context object that might have been filled before, eg. from previous failed connection attempts,
* etc. In most cases it will be empty
* @return A future that will contain a <b>copy</b> of the context object with updated values.
*/
def connect(connectionInfo: ImapConnectionInfo, context: JavaMailImapConnectionContext): Future[JavaMailImapConnectionContext] = {
Future {
trace("Setting up IMAP session and store...")
val props = new Properties
props.setProperty("mail.imap.connectiontimeout", "20000")
props.setProperty("mail.imap.timeout", "20000")
props.setProperty("mail.imap.connectionpooltimeout", "20000")
val session = Session.getDefaultInstance(props)
val store = session.getStore("imap")
trace("Done setting up IMAP session and store.")
debug("Connecting to IMAP server...")
store.connect(connectionInfo.endpoint.getHostName,
connectionInfo.endpoint.getPort,
connectionInfo.username,
connectionInfo.password)
debug("Done connecting to IMAP server.")
context.copy(session = Some(session), store = Some(store))
}
}
}
| DerEros/krautmail | server/collectors/imap/src/main/scala/de/erna/krautmail/collectors/imap/adapters/JavaMailImapAdapter.scala | Scala | gpl-3.0 | 2,391 |
package im.tox.antox.wrapper
import java.io.File
import java.sql.Timestamp
trait ContactInfo {
def key: String
def name: String
def avatar: Option[File]
def online: Boolean
def status: String
def statusMessage: String
def receivedAvatar: Boolean
def blocked: Boolean
def ignored: Boolean
def favorite: Boolean
def lastMessage: String
def lastMessageTimestamp: Timestamp
def unreadCount: Int
def alias: String
/**
Returns 'alias' if it has been set, otherwise returns 'name'.
*/
def getAliasOrName: String = {
if (alias != "") alias else name
}
} | ak-67/Antox | app/src/main/scala/im/tox/antox/wrapper/ContactInfo.scala | Scala | gpl-3.0 | 592 |
/*
* Copyright 2013 MaurΓcio Linhares
*
* MaurΓcio Linhares licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.github.mauricio.async.db.postgresql.messages.backend
case class CommandCompleteMessage(
val rowsAffected: Int,
val statusMessage: String
) extends ServerMessage(ServerMessage.CommandComplete)
| dripower/postgresql-async | postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/backend/CommandCompleteMessage.scala | Scala | apache-2.0 | 847 |
//package com.sksamuel.elastic4s.streams
//
//import java.util.concurrent.{CountDownLatch, TimeUnit}
//
//import akka.actor.ActorSystem
//import com.sksamuel.elastic4s.ElasticDsl._
//import com.sksamuel.elastic4s.bulk.{BulkCompatibleDefinition, RichBulkItemResponse}
//import com.sksamuel.elastic4s.jackson.ElasticJackson
//import com.sksamuel.elastic4s.mappings.dynamictemplate.DynamicMapping.Strict
//import com.sksamuel.elastic4s.testkit.ElasticSugar
//import org.reactivestreams.{Publisher, Subscriber, Subscription}
//import org.scalatest.{BeforeAndAfter, Matchers, WordSpec}
//
//import scala.util.Random
//
//class BulkIndexingSubscriberIntegrationTest extends WordSpec with ElasticSugar with Matchers with BeforeAndAfter {
//
// import ReactiveElastic._
// import scala.concurrent.duration._
//
// implicit val system = ActorSystem()
//
// val indexName = "bulkindexsubint"
// val strictIndex = "bulkindexfail"
//
// after {
// deleteIndex(indexName)
// deleteIndex(strictIndex)
// }
//
// "elastic-streams" should {
// "index all received data" in {
// ensureIndexExists(indexName)
// implicit val builder = new ShipRequestBuilder(indexName)
//
// val completionLatch = new CountDownLatch(1)
// val subscriber = client.subscriber[Ship](10, 2, completionFn = () => completionLatch.countDown)
// ShipPublisher.subscribe(subscriber)
// completionLatch.await(5, TimeUnit.SECONDS)
//
// blockUntilCount(Ship.ships.length, indexName)
// }
//
// "index all received data even if the subscriber never completes" in {
// ensureIndexExists(indexName)
// implicit val builder = new ShipRequestBuilder(indexName)
//
// // The short interval is just for the sake of test execution time, it's not a recommendation
// val subscriber = client.subscriber[Ship](8, 2, flushInterval = Some(500.millis))
// ShipEndlessPublisher.subscribe(subscriber)
//
// blockUntilCount(Ship.ships.length, indexName)
// }
//
// "index all received data and ignore failures" in {
//
// client.execute {
// createIndex(strictIndex).mappings(
// mapping("ships").fields(
// textField("name"),
// intField("description"),
// intField("size")
// ) dynamic Strict
// )
// }.await
// implicit val builder = new ShipRequestBuilder(strictIndex)
//
// val errorsExpected = 2
//
// val completionLatch = new CountDownLatch(1)
// val ackLatch = new CountDownLatch(Ship.ships.length - errorsExpected)
// val errorLatch = new CountDownLatch(errorsExpected)
// val subscriber = client.subscriber[Ship](10, 2, listener = new ResponseListener {
// override def onAck(resp: RichBulkItemResponse): Unit = ackLatch.countDown()
// override def onFailure(resp: RichBulkItemResponse): Unit = errorLatch.countDown()
// }, completionFn = () => completionLatch.countDown(), maxAttempts = 2, failureWait = 100.millis)
// ShipPublisher.subscribe(subscriber)
// completionLatch.await(5, TimeUnit.SECONDS)
//
// ackLatch.getCount should be(0)
// errorLatch.getCount should be(0)
//
// blockUntilCount(Ship.ships.length - errorsExpected, strictIndex)
// }
// }
//}
//
//object Ship {
//
// val ships = List(
// Ship("clipper"),
// Ship("anaconda"),
// Ship("courier", Some("Fast ship that delivers")),
// Ship("python"),
// Ship("fer-de-lance"),
// Ship("sidewinder"),
// Ship("cobra"),
// Ship("viper"),
// Ship("eagle"),
// Ship("vulture"),
// Ship("dropship", Some("Drop it while its hot")),
// Ship("orca"),
// Ship("type6"),
// Ship("type7"),
// Ship("type9"),
// Ship("hauler"),
// Ship("adder"),
// Ship("asp explorer"),
// Ship("diamondback")
// )
//
//}
//
//class ShipRequestBuilder(indexName: String = "bulkindexsubint") extends RequestBuilder[Ship] {
//
// import ElasticJackson.Implicits._
//
// override def request(ship: Ship): BulkCompatibleDefinition = {
// indexInto(s"$indexName/ships") source ship
// }
//}
//
//object ShipPublisher extends Publisher[Ship] {
//
// override def subscribe(s: Subscriber[_ >: Ship]): Unit = {
// var remaining = Ship.ships
// s.onSubscribe(new Subscription {
// override def cancel(): Unit = ()
// override def request(n: Long): Unit = {
// remaining.take(n.toInt).foreach(t => s.onNext(t))
// remaining = remaining.drop(n.toInt)
// if (remaining.isEmpty)
// s.onComplete()
// }
// })
// }
//}
//
//object ShipEndlessPublisher extends Publisher[Ship] {
//
// override def subscribe(s: Subscriber[_ >: Ship]): Unit = {
// var remaining = Ship.ships
// s.onSubscribe(new Subscription {
// override def cancel(): Unit = ()
// override def request(n: Long): Unit = {
// remaining.take(n.toInt).foreach(t => s.onNext(t))
// remaining = remaining.drop(n.toInt)
// }
// })
// }
//}
//
//case class Ship(name: String, description: Option[String] = None, size: Int = Random.nextInt(100))
| aroundus-inc/elastic4s | elastic4s-streams/src/test/scala/com/sksamuel/elastic4s/streams/BulkIndexingSubscriberIntegrationTest.scala | Scala | apache-2.0 | 5,081 |
/**
* Copyright (C) 2017 Pants project contributors (see CONTRIBUTORS.md).
* Licensed under the Apache License, Version 2.0 (see LICENSE).
*/
package org.pantsbuild.zinc.analysis
import java.io.File
import org.pantsbuild.zinc.util.Util
/**
* Configuration for sbt analysis and analysis output options.
*/
case class AnalysisOptions(
_cache: Option[File] = None,
cacheMap: Map[File, File] = Map.empty,
rebaseMap: Map[File, File] = Map(new File(System.getProperty("user.dir")) -> new File("/proc/self/cwd")),
clearInvalid: Boolean = true
) {
lazy val cache: File =
_cache.getOrElse {
throw new RuntimeException(s"An analysis cache file is required.")
}
def withAbsolutePaths(relativeTo: File): AnalysisOptions = {
this.copy(
_cache = Util.normaliseOpt(Some(relativeTo))(_cache),
cacheMap = Util.normaliseMap(Some(relativeTo))(cacheMap),
rebaseMap = Util.normaliseMap(Some(relativeTo))(rebaseMap)
)
}
}
| tdyas/pants | src/scala/org/pantsbuild/zinc/analysis/AnalysisOptions.scala | Scala | apache-2.0 | 983 |
package com.greencatsoft.greenlight
import com.greencatsoft.greenlight.build.TestFramework
class Greenlight extends TestFramework
| greencatsoft/greenlight | shared/main/scala/com/greencatsoft/greenlight/Greenlight.scala | Scala | apache-2.0 | 132 |
package glasskey.model.validation
import glasskey.model.ValidatedAccessToken
import glasskey.resource.{Claim, OIDCTokenData}
import scala.reflect.ClassTag
import scala.util.parsing.combinator.RegexParsers
/**
* Created by loande on 3/20/15.
*/
sealed trait Expr[T] {
def validate(token:Option[T]):Boolean
val containsExprs: Boolean
}
case class And[T](left: Expr[T], right: Expr[T]) extends Expr[T] { self =>
override val containsExprs = true
override def validate(token: Option[T]): Boolean =
left.validate(token) && right.validate(token)
override def toString: String = {
(if (left.containsExprs) s"(${left})" else s"${left}") + " and " +
(if (right.containsExprs) s"(${right})" else s"${right}")
}
}
case class Or[T](left: Expr[T], right: Expr[T]) extends Expr[T] {
override val containsExprs = true
override def validate(token: Option[T]): Boolean =
left.validate(token) || right.validate(token)
override def toString: String = {
(if (left.containsExprs) s"(${left})" else s"${left}") + " or " +
(if (right.containsExprs) s"(${right})" else s"${right}")
}
}
case class OIDCHas[T](claimName: String, value: T)(implicit tag: ClassTag[T]) extends Expr[OIDCTokenData] {
override val containsExprs = false
override def validate(token:Option[OIDCTokenData]) : Boolean =
token.isDefined && checkValue(token.get)
def checkValue(token: OIDCTokenData): Boolean = {
token.tokenDetails.find(claim => claim.name == claimName) match {
case Some(c: Claim[T]) => c.value == value
case None => false
}
}
override def toString: String = {
val withQuotes: String = if (value.isInstanceOf[String]) s"'$value'" else value.toString
s"HAS_${claimName.toUpperCase}_${tag.runtimeClass.getSimpleName.toUpperCase}(${withQuotes})"
}
}
case class OAuthHas[T](claim: OAuthClaim, value: T)(implicit tag: ClassTag[T]) extends Expr[ValidatedAccessToken] {
override val containsExprs = false
override def validate(token:Option[ValidatedAccessToken]) : Boolean =
token.isDefined && checkValue(token.get)
def checkValue(token: ValidatedAccessToken): Boolean = {
claim match {
case Scope => checkClaimOption[String](token.scope)
case Client_Id => checkClaimOption[String](token.client_id)
case Org => checkClaimOption[String](token.access_token.orgName)
case UserName => checkClaimOption[String](token.access_token.username)
}
}
override def toString: String = {
val withQuotes: String = if (value.isInstanceOf[String]) s"'$value'" else value.toString
s"HAS_${claim.getClass.getSimpleName.split("\\\\$").last.toUpperCase}_${tag.runtimeClass.getSimpleName.toUpperCase}(${withQuotes})"
}
private def checkClaimOption[T](opt : Option[T]) = opt.isDefined && opt.get == value
}
trait AuthorizationClaimParser[T] extends RegexParsers {
def param: Parser[String] = "^[a-zA-Z0-9_]*".r ^^ (new String(_))
def tokenSpecificExpressions : Parser[Expr[T]]
def expr: Parser[Expr[T]] = term ~ rep(ors) ^^ { case a ~b => (a /: b)((acc,f) => f(acc))}
def ors: Parser[Expr[T] => Expr[T]] = "or" ~ term ^^ { case "or" ~ b => Or(_, b)}
def term: Parser[Expr[T]] = factor ~ rep(ands) ^^ { case a ~ b => (a /: b)((acc,f) => f(acc))}
def ands: Parser[Expr[T] => Expr[T]] = "and" ~ factor ^^ { case "and" ~ b => And(_, b) }
def factor: Parser[Expr[T]] = tokenSpecificExpressions | "(" ~> expr <~ ")"
def parseExpression(s: String): Expr[T] = parseAll(expr, s) match {
case Success(res, _) => res
case Failure(msg, _) => throw new Exception(msg)
case Error(msg, _) => throw new Exception(msg)
}
} | MonsantoCo/glass-key | glass-key-common/src/main/scala/glasskey/model/validation/AuthorizationClaimParser.scala | Scala | bsd-3-clause | 3,687 |
/*
* Copyright 2017 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.actorregistry
import java.lang.management.ManagementFactory
import javax.management.MXBean
import akka.actor.{ActorContext, ActorRef}
import org.squbs.unicomplex.JMX._
private[actorregistry] object ActorRegistryBean {
val Pattern = "org.squbs.unicomplex:type=ActorRegistry,name="
val Total = Pattern + "*"
def objName(actor: ActorRef) (implicit context: ActorContext)= prefix + Pattern + actor.path.toString.split(s"${actor.path.root}user/").mkString("")
def totalBeans(implicit context: ActorContext) = ManagementFactory.getPlatformMBeanServer.queryNames(prefix + Total, null)
}
@MXBean
private[actorregistry] trait ActorRegistryMXBean {
def getPath : String
def getActorMessageTypeList: java.util.List[String]
}
@MXBean
private[actorregistry] trait ActorRegistryConfigMXBean {
def getCount : Int
def getTimeout: Int
}
private[actorregistry] class ActorRegistryConfigBean(timeout: Int, implicit val context: ActorContext) extends ActorRegistryConfigMXBean {
def getCount : Int = ActorRegistryBean.totalBeans.size
def getTimeout: Int = timeout
}
| paypal/squbs | squbs-actorregistry/src/main/scala/org/squbs/actorregistry/ActorRegistryBean.scala | Scala | apache-2.0 | 1,691 |
/**
* Ayla Geometry Core
* (c) 2011-2014 William Harvey
* http://www.aylasoftware.org
*/
package ayla.util
class UnionFind(n: Int) {
val parents = Array.range(0, n)
val ranks = new Array[Int](n)
def union(x: Int, y: Int): Unit = {
val xRoot = find(x)
val yRoot = find(y)
if (ranks(xRoot) > ranks(yRoot)) {
parents(yRoot) = xRoot
} else if (ranks(xRoot) < ranks(yRoot)) {
parents(xRoot) = yRoot
} else if (xRoot != yRoot) {
parents(yRoot) = xRoot
ranks(xRoot) += 1
}
}
def find(x: Int): Int = {
if (parents(x) == x)
x
else {
parents(x) = find(parents(x))
parents(x)
}
}
}
class HashUnionFind[T](initialSet: Set[T] = Set.empty[T]) {
val parents = new scala.collection.mutable.HashMap[T, T]
val ranks = new scala.collection.mutable.HashMap[T, Int]
for (x <- initialSet) {
makeSet(x)
}
def makeSet(x: T) = {
parents(x) = x
ranks(x) = 0
}
def this() = this(Set.empty[T])
def union(x: T, y: T): T = {
val xRoot = find(x)
val yRoot = find(y)
if (ranks(xRoot) > ranks(yRoot)) {
parents(yRoot) = xRoot
} else if (ranks(xRoot) < ranks(yRoot)) {
parents(xRoot) = yRoot
} else if (xRoot != yRoot) {
parents(yRoot) = xRoot
ranks(xRoot) += 1
}
return find(x)
}
def findOpt(x: T): Option[T] = {
parents.get(x) match {
case Some(p) => {
if (p == x) {
Some(x)
} else {
parents(x) = find(parents(x))
Some(parents(x))
}
}
case None => None
}
}
/**
* Automatically performs a makeSet operation if x is not found.
*/
def find(x: T): T = {
parents.get(x) match {
case Some(p) => {
if (p == x) {
x
} else {
parents(x) = find(parents(x))
parents(x)
}
}
case None => {
// Make set
parents(x) = x
ranks(x) = 0
x
}
}
}
}
| harveywi/Ayla-Geometry-Core | src/ayla/util/UnionFind.scala | Scala | gpl-2.0 | 1,994 |
package mlbigbook.ml
import fif.Data
import mlbigbook.math.{MathVectorOps, RandoMut}
import scala.annotation.tailrec
import scala.language.{higherKinds, reflectiveCalls}
import scala.reflect.ClassTag
trait Kmeans extends ClusteringModule {
/** Creates a pseudo-random number generator for the type N. */
val mkRandomNumGen: () => RandoMut[N]
// Brings in the Data type class operations as methods "accessible" using
// familiar object dot notation.
// i.e. `data.map` instead of `implicitly[Data[D]].map(data)`
import Data.ops._
override final def cluster[D[_]: Data](
conf: ClusteringConf,
dist: Distance,
toVec: Vectorizer
)(data: D[Item]): Seq[Center] =
cluster_h(
conf,
dist,
toVec,
0,
data map { toVec.vectorize },
initialize(conf.nClusters, toVec.nDimensions)
)
final def initialize(
nClusters: Int,
nDimensions: Int
): Seq[Center] = {
val r = mkRandomNumGen()
(0 until nClusters).map { id =>
Center(
id = id.toString,
mean = vops.map(vops.ones(nDimensions)) { one =>
vops.n.times(one, r.next())
}
)
}.toSeq
}
@tailrec
private[this] final def cluster_h[D[_]: Data](
conf: ClusteringConf,
dist: Distance,
toVec: Vectorizer,
currIter: Int,
data: D[V[N]],
currCenters: Seq[Center]
): Seq[Center] =
if (currIter >= conf.maxIterations)
currCenters
else {
val updatedCenters = updateCenters(dist, toVec, currCenters, data)
println(
s"""[center check: currIter=$currIter]
|[ORIGINAL # ${currCenters.size}] ${currCenters.mkString("\\t")}
|[UPDATED # ${updatedCenters.size}] ${updatedCenters.mkString(
"\\t")}
""".stripMargin
)
val sumSquaredChangeInMeansBetweenIters =
currCenters.zip(updatedCenters).foldLeft(0.0) {
case (accum, (existing, updated)) =>
val d = math.abs(
implicitly[Numeric[N]].toDouble(
dist(existing.mean, updated.mean)
)
)
accum + d
}
if (sumSquaredChangeInMeansBetweenIters < conf.tolerance)
updatedCenters
else
cluster_h(
conf,
dist,
toVec,
currIter + 1,
data,
updatedCenters
)
}
def updateCenters[D[_]: Data](
dist: Distance,
toVec: Vectorizer,
centers: Seq[Center],
data: D[V[N]]
): Seq[Center] =
data
.zip(assign(centers, dist)(data))
.groupBy { case (_, assignment) => assignment }
.map {
case (label, bothDataAndLabel) =>
val summed =
bothDataAndLabel.foldLeft(vops.zeros(toVec.nDimensions)) {
case (summing, (vector, _)) =>
vops.addV(summing, vector)
}
val newMean =
vops.divS(
summed,
implicitly[Numeric[N]].fromInt(bothDataAndLabel.size)
)
Center(
id = label,
mean = newMean
)
}
.toSeq
}
object Kmeans {
type Type[ItemToCluster, Num, Vec[_]] = Kmeans {
type Item = ItemToCluster
type N = Num
type V[_] = Vec[_]
}
def apply[ItemToCluster, Num, Vec[_]](
mathVectorOps: MathVectorOps.Type[Num, Vec],
mkRando: () => RandoMut[Num]
)(
implicit ctForI: ClassTag[ItemToCluster],
ctForN: ClassTag[Num],
ctForVn: ClassTag[Vec[Num]]
): Type[ItemToCluster, Num, Vec] = {
// val okVops: MathVectorOps.Type[Type[ItemToCluster, Num, Vec]#N, Type[ItemToCluster, Num, Vec]#V] =
// mathVectorOps
// mathVectorOps.asInstanceOf[MathVectorOps.Type[Type[ItemToCluster, Num, Vec]#N, Type[ItemToCluster, Num, Vec]#V]]
// val okCtVn: ClassTag[Type[ItemToCluster, Num, Vec]#V[Type[ItemToCluster, Num, Vec]#N]] =
// ctForVn
// ctForVn.asInstanceOf[ClassTag[Type[ItemToCluster, Num, Vec]#V[Type[ItemToCluster, Num, Vec]#N]]]
new Kmeans {
override type Item = ItemToCluster
override type N = Num
override type V[_] = Vec[_]
override lazy val mkRandomNumGen = mkRando
override lazy val vops =
mathVectorOps.asInstanceOf[MathVectorOps.Type[N, V]]
override implicit lazy val ctI = ctForI
override implicit lazy val ctN = ctForN
override implicit lazy val ctVn = ctForVn.asInstanceOf[ClassTag[V[N]]]
}
}
}
| malcolmgreaves/bigmlbook | fp4ml-main/src/main/scala/mlbigbook/ml/Kmeans.scala | Scala | lgpl-3.0 | 4,528 |
/*
* Copyright ActionML, LLC under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* ActionML licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.actionml.core.validate
/**
* @author The ActionML Team (<a href="http://actionml.com">http://actionml.com</a>)
* Created by Pavlenov Semen on 18.03.17.
*/
sealed trait ValidateError
final case class ParseError(message: String) extends ValidateError
final case class MissingParams(message: String) extends ValidateError
final case class WrongParams(message: String) extends ValidateError
final case class EventOutOfSequence(message: String) extends ValidateError
final case class ResourceNotFound(message: String) extends ValidateError
final case class NotImplemented(message: String = "Not implemented") extends ValidateError
final case class ValidRequestExecutionError(message: String = "Errors executing a valid request") extends ValidateError
| actionml/harness | rest-server/core/src/main/scala/com/actionml/core/validate/ValidateError.scala | Scala | apache-2.0 | 1,553 |
package org.apache.spark.core.server
import akka.actor.{ActorRef, ActorSystem, Props}
import akka.pattern.ask
import com.typesafe.config.ConfigFactory
import org.apache.spark.core.logging.LoggingOutputStream
import org.apache.spark.core.server.actors._
import scala.concurrent.Await
/**
* Spark-Job-REST entry point.
*/
object Main {
def main(args: Array[String]) {
LoggingOutputStream.redirectConsoleOutput
// Loads deployment configuration `deploy.conf` on top of application defaults `application.conf`
val defaultConfig = ConfigFactory.load("deploy").withFallback(ConfigFactory.load())
val masterConfig = defaultConfig.getConfig("manager")
val system = ActorSystem("ManagerSystem", masterConfig)
val supervisor = system.actorOf(Props(classOf[Supervisor]), "Supervisor")
val jarActor = createActor(Props(new JarActor(defaultConfig)), "JarActor", system, supervisor)
val contextManagerActor = createActor(Props(new ContextManagerActor(defaultConfig, jarActor)), "ContextManager", system, supervisor)
val jobManagerActor = createActor(Props(new JobActor(defaultConfig, contextManagerActor)), "JobManager", system, supervisor)
new Controller(defaultConfig, contextManagerActor, jobManagerActor, jarActor, system)
}
def createActor(props: Props, name: String, customSystem: ActorSystem, supervisor: ActorRef): ActorRef = {
val actorRefFuture = ask(supervisor, (props, name))
Await.result(actorRefFuture, timeout.duration).asInstanceOf[ActorRef]
}
}
| linzhe/matrix | src/main/scala/org/apache/spark/core/server/Main.scala | Scala | apache-2.0 | 1,515 |
package org.intracer.wmua
import db.scalikejdbc.User
import org.specs2.mutable.Specification
class UserSpec extends Specification {
"User" should
{
"be in contest" in {
User("", "", contestId = Some(1)).isInContest(Some(1)) === true
User("", "", contestId = Some(1)).isInContest(Some(2)) === false
User("", "", contestId = None).isInContest(None) === false
User("", "", contestId = Some(1)).isInContest(None) === false
User("", "", contestId = None).isInContest(Some(1)) === false
}
}
"parseList" should {
"parse empty" in {
User.parseList("") === Seq.empty
}
"parse one email" in {
val list: Seq[User] = User.parseList("123@abc.com")
val users: Seq[User] = Seq(User(email = "123@abc.com", id = None, contestId = None, fullname = ""))
list === users
}
"parse one email with name" in {
val list: Seq[User] = User.parseList("Name Surname <123@abc.com>")
val users: Seq[User] = Seq(User(email = "123@abc.com", id = None, contestId = None, fullname = "Name Surname"))
list === users
}
"parse emails" in {
val emails = Seq("123@abc.com", "234@bcd.com", "345@cde.com")
User.parseList(emails.mkString("\n")) === emails.map { email =>
User(id = None, contestId = None, fullname = "", email = email)
}
}
"parse wiki accounts" in {
val accounts = Seq("Ilya", "Antanana", "Ahonc", "Base")
User.parseList(accounts.mkString("\n")) === accounts.map { account =>
User(id = None, contestId = None, fullname = "", email = "", wikiAccount = Some(account))
}
}
"parse prefixed accounts" in {
val withoutUser = Seq("Ilya", "Antanana", "Ahonc", "Base")
val withUser = withoutUser.map("User:" + _)
User.parseList(withUser.mkString("\n")) === withoutUser.map { account =>
User(id = None, contestId = None, fullname = "", email = "", wikiAccount = Some(account))
}
}
"parse names and emails commas" in {
val strings = Seq(
"Name1 Surname1 <email1@server.com>",
"Name2 Surname2 <email2@server.com>"
)
User.parseList(strings.mkString(",")) === Seq(
User(id = None, contestId = None, fullname = "Name1 Surname1", email = "email1@server.com"),
User(id = None, contestId = None, fullname = "Name2 Surname2", email = "email2@server.com")
)
}
"parse names and emails newlines" in {
val strings = Seq(
"Name1 Surname1 <email1@server.com>",
"Name2 Surname2 <email2@server.com>"
)
User.parseList(strings.mkString("\n")) === Seq(
User(id = None, contestId = None, fullname = "Name1 Surname1", email = "email1@server.com"),
User(id = None, contestId = None, fullname = "Name2 Surname2", email = "email2@server.com")
)
}
"parse names and emails newlines and commas" in {
val strings = Seq(
"Name1 Surname1 <email1@server.com>",
"Name2 Surname2 <email2@server.com>"
)
User.parseList(strings.mkString(",\n")) === Seq(
User(id = None, contestId = None, fullname = "Name1 Surname1", email = "email1@server.com"),
User(id = None, contestId = None, fullname = "Name2 Surname2", email = "email2@server.com")
)
}
}
}
| intracer/wlxjury | test/org/intracer/wmua/UserSpec.scala | Scala | apache-2.0 | 3,309 |
/*
* Copyright 2014 JHC Systems Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sqlest.sql
import java.sql.{ Connection, PreparedStatement, Timestamp => JdbcTimestamp, Types => JdbcTypes, SQLException }
import org.joda.time.DateTime
import sqlest.ast._
import sqlest.util._
trait StatementBuilder extends BaseStatementBuilder
with SelectStatementBuilder
with InsertStatementBuilder
with UpdateStatementBuilder
with DeleteStatementBuilder
with Logging {
def apply(connection: Connection, operation: Operation) =
prepareStatement(connection, operation)
def prepareStatement(connection: Connection, operation: Operation) = {
val querySql = sql(operation)
val queryParameters = parameters(operation)
try {
logger.debug(s"Preparing statement: $querySql - $queryParameters")
val statement = connection.prepareStatement(querySql)
setParameters(operation, statement, queryParameters)
statement
} catch {
case exn: SQLException =>
logger.error(s"Error preparing statement: $querySql - $queryParameters")
throw exn
}
}
def generateRawSql(operation: Operation): String = {
val querySql = sql(operation).split("\\\\?")
val queryParameters = parameters(operation).map(parameter => constantSql(parameter.columnType, parameter.value))
querySql.zipAll(queryParameters, "", "")
.map { case (sql, parameter) => sql + parameter }
.mkString
}
def sql(operation: Operation): String = operation match {
case select: Select => selectSql(select)
case insert: Insert => insertSql(insert)
case update: Update => updateSql(update)
case delete: Delete => deleteSql(delete)
case other => sys.error("Unsupported operation type: " + other)
}
def parameters(operation: Operation): List[LiteralColumn[_]] = operation match {
case select: Select => selectArgs(select)
case insert: Insert => insertArgs(insert)
case update: Update => updateArgs(update)
case delete: Delete => deleteArgs(delete)
case other => sys.error("Unsupported operation type: " + other)
}
private def setParameters(operation: Operation, statement: PreparedStatement, parameters: List[LiteralColumn[_]]) = {
def innerSetParameters(parameters: List[LiteralColumn[_]]) = {
var index = 0
parameters foreach { parameter =>
index = index + 1 // prepared statement parameter indices are 1-based
setParameter(statement, index, parameter.columnType, parameter.value)
}
}
operation match {
case insert: InsertValues => parameters.grouped(insert.columns.size) map {
params =>
innerSetParameters(params)
statement.addBatch
}
case _: InsertFromSelect =>
innerSetParameters(parameters)
statement.addBatch
case _ => innerSetParameters(parameters)
}
}
private def setParameter[A](statement: PreparedStatement, index: Int, columnType: ColumnType[A], value: Any): Unit = columnType match {
case BooleanColumnType => statement.setBoolean(index, value.asInstanceOf[Boolean])
case IntColumnType => statement.setInt(index, value.asInstanceOf[Int])
case LongColumnType => statement.setLong(index, value.asInstanceOf[Long])
case DoubleColumnType => statement.setDouble(index, value.asInstanceOf[Double])
case BigDecimalColumnType => statement.setBigDecimal(index, value.asInstanceOf[BigDecimal].bigDecimal)
case StringColumnType => statement.setString(index, value.asInstanceOf[String])
case DateTimeColumnType => statement.setTimestamp(index, new JdbcTimestamp(value.asInstanceOf[DateTime].getMillis))
case optionType: OptionColumnType[_] =>
val option = value.asInstanceOf[Option[_]]
if (option.isEmpty) statement.setNull(index, jdbcType(optionType.baseType)) else setParameter(statement, index, optionType.baseType, value)
case mappedType: MappedColumnType[A, _] => setParameter(statement, index, mappedType.baseType, mappedType.write(value.asInstanceOf[A]))
}
private def jdbcType[A](columnType: ColumnType[A]): Int = columnType match {
case BooleanColumnType => JdbcTypes.BOOLEAN
case IntColumnType => JdbcTypes.INTEGER
case LongColumnType => JdbcTypes.INTEGER
case DoubleColumnType => JdbcTypes.DOUBLE
case BigDecimalColumnType => JdbcTypes.DECIMAL
case StringColumnType => JdbcTypes.CHAR
case DateTimeColumnType => JdbcTypes.TIMESTAMP
case optionType: OptionColumnType[_] => jdbcType(optionType.baseType)
case mappedType: MappedColumnType[_, _] => jdbcType(mappedType.baseType)
}
}
| andrewjskatz/sqlest | src/main/scala/sqlest/sql/StatementBuilder.scala | Scala | apache-2.0 | 5,134 |
/* RetryPolicyExtensions.scala
*
* Copyright (c) 2013-2014 linkedin.com
* Copyright (c) 2013-2015 zman.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package atmos.dsl
import atmos.monitor.ChainedEvents
/**
* Adds DSL extension methods to the retry policy interface.
*
* @param self The retry policy to add the extension methods to.
*/
case class RetryPolicyExtensions(self: RetryPolicy) extends AnyVal {
/**
* Creates a new retry policy by replacing the underlying policy's termination policy.
*
* @param termination The termination policy to use.
*/
def retryFor(termination: TerminationPolicy) = self.copy(termination = termination)
/**
* Creates a new retry policy by replacing the underlying policy's backoff policy.
*
* @param backoff The backoff policy to use.
*/
def using(backoff: BackoffPolicy) = self.copy(backoff = backoff)
/**
* Creates a new retry policy by replacing the underlying policy's monitor.
*
* @param monitor The monitor to use.
*/
def monitorWith(monitor: EventMonitor) = self.copy(monitor = monitor)
/**
* Creates a new retry policy by chaining the specified event monitor to the underlying policy's monitor.
*
* @param monitor The monitor to chain to the underlying policy's monitor.
*/
def alsoMonitorWith(monitor: EventMonitor) = self.copy(monitor = ChainedEvents(self.monitor, monitor))
/**
* Creates a new retry policy by replacing the underlying policy's result classifier.
*
* @param results The result classifier to use.
*/
def onResult(results: ResultClassifier) = self.copy(results = results)
/**
* Creates a new retry policy by chaining the specified result classifier to the underlying policy's classifier.
*
* @param results The result classifier to chain to the underlying policy's classifier.
*/
def orOnResult(results: ResultClassifier) = self.copy(results = self.results orElse results)
/**
* Creates a new retry policy by replacing the underlying policy's error classifier.
*
* @param errors The error classifier to use.
*/
def onError(errors: ErrorClassifier) = self.copy(errors = errors)
/**
* Creates a new retry policy by chaining the specified error classifier to the underlying policy's classifier.
*
* @param errors The error classifier to chain to the underlying policy's classifier.
*/
def orOnError(errors: ErrorClassifier) = self.copy(errors = self.errors orElse errors)
} | zmanio/atmos | src/main/scala/atmos/dsl/RetryPolicyExtensions.scala | Scala | apache-2.0 | 3,077 |
package org.scalaide.core.internal.jdt.util
import org.eclipse.core.resources.IFile
import org.eclipse.core.resources.IFolder
import org.eclipse.core.resources.IProject
import org.eclipse.core.resources.IResource
import org.eclipse.core.resources.ResourcesPlugin
import org.eclipse.core.runtime.CoreException
import org.eclipse.core.runtime.IProgressMonitor
import org.eclipse.core.runtime.IStatus
import org.eclipse.core.runtime.Status
import org.eclipse.jdt.core.IClasspathEntry
import org.eclipse.jdt.core.IJavaElement
import org.eclipse.jdt.core.IPackageFragment
import org.eclipse.jdt.core.IType
import org.eclipse.jdt.core.JavaCore
import org.eclipse.jdt.core.JavaModelException
import org.eclipse.jdt.internal.core.ImportContainerInfo
import org.eclipse.jdt.internal.core.JavaModelManager
import org.eclipse.jdt.internal.core.NameLookup
import org.eclipse.jdt.internal.ui.packageview.PackageExplorerPart
import org.eclipse.ui.progress.UIJob
import org.scalaide.util.internal.ReflectionUtils
import org.scalaide.core.internal.project.ScalaProject
object JDTUtils {
private var refreshPending = false
private val lock = new Object
def refreshPackageExplorer() = {
lock.synchronized{
if (!refreshPending) {
refreshPending = true
new UIJob("Refresh package explorer") {
def runInUIThread(monitor : IProgressMonitor) : IStatus = {
lock.synchronized {
refreshPending = false
}
val pep = PackageExplorerPart.getFromActivePerspective
if (pep != null)
pep.getTreeViewer.refresh()
Status.OK_STATUS
}
}.schedule
}
}
}
def resolveType(nameLookup : NameLookup, packageName : String, typeName : String, acceptFlags : Int) : Option[IType] = {
val pkgs = nameLookup.findPackageFragments(packageName, false)
for(p <- pkgs) {
val tpe = nameLookup.findType(typeName, p, false, acceptFlags, true, true)
if (tpe != null)
return Some(tpe)
}
return None
}
def getParentPackage(scalaFile : IFile) : IPackageFragment = {
val jp = JavaCore.create(scalaFile.getProject)
val pkg = JavaModelManager.determineIfOnClasspath(scalaFile, jp)
if (pkg != null && pkg.isInstanceOf[IPackageFragment])
pkg.asInstanceOf[IPackageFragment]
else {
// Not on classpath so use the default package
val root = jp.getPackageFragmentRoot(scalaFile.getParent)
root.getPackageFragment(IPackageFragment.DEFAULT_PACKAGE_NAME)
}
}
def flattenProject(project : IProject) : Iterator[IFile] = {
try {
if (!ScalaProject.isScalaProject(project))
return Iterator.empty
val jp = JavaCore.create(project)
jp.getRawClasspath.filter(_.getEntryKind == IClasspathEntry.CPE_SOURCE).
iterator.flatMap(entry => flatten(ResourcesPlugin.getWorkspace.getRoot.findMember(entry.getPath)))
} catch {
case _ : JavaModelException => Iterator.empty
}
}
def flatten(r : IResource) : Iterator[IFile] = {
try {
r match {
case r if r == null || !r.exists => Iterator.empty
case folder : IFolder if folder.getType == IResource.FOLDER => folder.members.iterator.flatMap{flatten _}
case file : IFile if file.getType == IResource.FILE && file.getFileExtension == "scala" => Iterator.single(file)
case _ => Iterator.empty
}
} catch {
case _ : CoreException => Iterator.empty
}
}
}
object SourceRefElementInfoUtils extends ReflectionUtils {
private val sreiClazz = Class.forName("org.eclipse.jdt.internal.core.SourceRefElementInfo")
private val setSourceRangeStartMethod = getDeclaredMethod(sreiClazz, "setSourceRangeStart", classOf[Int])
private val setSourceRangeEndMethod = getDeclaredMethod(sreiClazz, "setSourceRangeEnd", classOf[Int])
def setSourceRangeStart(srei : AnyRef, pos : Int) = setSourceRangeStartMethod.invoke(srei, new Integer(pos))
def setSourceRangeEnd(srei : AnyRef, pos : Int) = setSourceRangeEndMethod.invoke(srei, new Integer(pos))
}
object ImportContainerInfoUtils extends ReflectionUtils {
private val iciClazz = classOf[ImportContainerInfo]
private val childrenField = getDeclaredField(iciClazz, "children")
def setChildren(ic : ImportContainerInfo, children : Array[IJavaElement]): Unit = { childrenField.set(ic, children) }
def getChildren(ic : ImportContainerInfo) = childrenField.get(ic).asInstanceOf[Array[IJavaElement]]
}
| scala-ide/scala-ide | org.scala-ide.sdt.core/src/org/scalaide/core/internal/jdt/util/JDTUtils.scala | Scala | bsd-3-clause | 4,475 |
package feh.phtpe
import feh.phtpe.Prefixes._
trait AbstractPhysTypeSystem
trait AbstractPhysTypeSystemCompatibility[From <: AbstractPhysTypeSystem, To <: AbstractPhysTypeSystem]{
protected def conversion[F <: PhysType, T <: PhysType] = PhysTypeConversion.inst[F, T]
}
trait PhysTypeSystem extends AbstractPhysTypeSystem{
type Mass <: PhysType
type Time <: PhysType
type Distance <: PhysType
type Temperature <: PhysType
// type Luminance <: PhysType
type LuminousIntensity <: PhysType
type Speed = Distance / Time
type Acceleration = Distance / (Time ^ _2)
type Force = Mass ** Distance / (Time ^ _2)
}
trait PhysTypeSystemCompatibility[From <: PhysTypeSystem, To <: PhysTypeSystem] extends AbstractPhysTypeSystemCompatibility[From, To]{
implicit def mass = conversion[From#Mass, To#Mass]
implicit def time = conversion[From#Time, To#Time]
implicit def dist = conversion[From#Distance, To#Distance]
implicit def temp = conversion[From#Temperature, To#Temperature]
implicit def lumI = conversion[From#LuminousIntensity, To#LuminousIntensity]
}
object PhysTypeSystem{
trait SI extends PhysTypeSystem{
type Mass = Kilogram
type Luminance = Candela
type Temperature = Kelvin
type Distance = Meter
type Time = Second
}
trait SICompatibility[From <: PhysTypeSystem] extends PhysTypeSystemCompatibility[From, SI]
trait BigScale extends PhysTypeSystem{
type Mass = Kilogram //todo Kilo@@
type Temperature = Kelvin
type LuminousIntensity = Candela
type Distance = Units.BigScale.LightYear
type Time = Units.BigScale.Year
}
object BigScaleIsSICompatible extends SICompatibility[BigScale]
}
| fehu/phtpe | phtpe/src/main/scala/feh/phtpe/PhysTypeSystem.scala | Scala | mit | 1,796 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.