code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark import _root_.io.netty.util.internal.logging.{InternalLoggerFactory, Slf4JLoggerFactory} import org.scalatest.BeforeAndAfterAll import org.scalatest.BeforeAndAfterEach import org.scalatest.Suite /** Manages a local `sc` `SparkContext` variable, correctly stopping it after each test. */ trait LocalSparkContext extends BeforeAndAfterEach with BeforeAndAfterAll { self: Suite => @transient var sc: SparkContext = _ override def beforeAll() { super.beforeAll() InternalLoggerFactory.setDefaultFactory(Slf4JLoggerFactory.INSTANCE) } override def afterEach() { try { resetSparkContext() } finally { super.afterEach() } } def resetSparkContext(): Unit = { LocalSparkContext.stop(sc) sc = null } } object LocalSparkContext { def stop(sc: SparkContext) { if (sc != null) { sc.stop() } // To avoid RPC rebinding to the same port, since it doesn't unbind immediately on shutdown System.clearProperty("spark.driver.port") } /** Runs `f` by passing in `sc` and ensures that `sc` is stopped. */ def withSpark[T](sc: SparkContext)(f: SparkContext => T): T = { try { f(sc) } finally { stop(sc) } } }
WindCanDie/spark
core/src/test/scala/org/apache/spark/LocalSparkContext.scala
Scala
apache-2.0
2,033
package org.jetbrains.plugins.hocon.ref import com.intellij.psi.impl.source.resolve.reference.impl.providers.JavaClassReferenceProvider import com.intellij.psi.{PsiElement, PsiReference} import org.jetbrains.plugins.hocon.psi.HString import org.jetbrains.plugins.hocon.settings.HoconProjectSettings class HStringJavaClassReferenceProvider extends JavaClassReferenceProvider { import org.jetbrains.plugins.hocon.lexer.HoconTokenType._ import org.jetbrains.plugins.hocon.parser.HoconElementType._ setSoft(true) private def isEligible(element: HString) = { val settings = HoconProjectSettings.getInstance(element.getProject) (element.getNode.getElementType, element.stringType) match { case (StringValue, UnquotedString) => settings.classReferencesOnUnquotedStrings case (StringValue | KeyPart, QuotedString) => settings.classReferencesOnQuotedStrings case _ => false } } override def getReferencesByString(str: String, position: PsiElement, offsetInPosition: Int): Array[PsiReference] = position match { case hstr: HString if isEligible(hstr) => super.getReferencesByString(str, position, offsetInPosition) case _ => PsiReference.EMPTY_ARRAY } }
ilinum/intellij-scala
src/org/jetbrains/plugins/hocon/ref/HStringJavaClassReferenceProvider.scala
Scala
apache-2.0
1,214
/* * This file is part of the Linux Variability Modeling Tools (LVAT). * * Copyright (C) 2010 Steven She <shshe@gsd.uwaterloo.ca> * * LVAT is free software: you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the * Free Software Foundation, either version 3 of the License, or (at your * option) any later version. * * LVAT is distributed in the hope that it will be useful, but WITHOUT ANY * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for * more details. * * You should have received a copy of the GNU Lesser General Public License * along with LVAT. (See files COPYING and COPYING.LESSER.) If not, see * <http://www.gnu.org/licenses/>. */ package gsd.linux object KConfigUtil { /** * Chunks defaults such that each list of defaults has the same consecutive * default values (ie. Default.iv) */ def chunkDefaults(defs: List[Default]): List[List[Default]] = defs match { case Nil => Nil case Default(iv, _) :: _ => val (same, rest) = defs partition { _.iv == iv } same :: chunkDefaults(rest) } }
scas-mdd/linux-variability-analysis-tools.fm-translation
src/main/scala/gsd/linux/KConfigUtil.scala
Scala
gpl-3.0
1,220
package sp.server /** * Created by daniel on 2015-03-23. */ import akka.actor._ import akka.io.Tcp.PeerClosed import sp.system.messages._ import spray.can.Http import spray.http.HttpHeaders._ import spray.http.MediaTypes._ import spray.http._ import spray.routing.Directives._ import spray.routing._ // Enable scala features import scala.language.{implicitConversions, postfixOps} import scala.concurrent.duration._ trait ServerSideEventsDirectives { case class RegisterClosedHandler(handler: () => Unit) def sse(channel: String, eventHandler: ActorRef)(implicit refFactory: ActorRefFactory): Route = { val responseStart = HttpResponse( headers = `Cache-Control`(CacheDirectives.`no-cache`) :: Nil, entity = ":" + (" " * 2049) + "\\n" // 2k padding for IE using Yaffle ) def sseRoute(lei: String): Route = (ctx: RequestContext) => { refFactory.actorOf( Props { new Actor { ctx.responder ! ChunkedResponseStart(responseStart) context.setReceiveTimeout(5 seconds) eventHandler ! SubscribeToSSE(self) def endConnection = { ctx.responder ! ChunkedMessageEnd //ctx.complete("STOPPING SSE") eventHandler ! UnSubscribeToSSE(self) self ! PoisonPill } import sp.domain._ def receive = { case e: SPEvent => { import sp.system.messages.JsonFormatsMessage._ import org.json4s.native.JsonMethods._ val data = org.json4s.native.JsonMethods.compact(render(SPValue(e))) val ev = e.getClass.getSimpleName ctx.responder ! MessageChunk(s"event: $ev\\ndata:$data\\n\\n") } case "close" => { //ctx.responder ! ChunkedMessageEnd println("got sse close") endConnection } case ReceiveTimeout => { //ctx.responder ! ChunkedMessageEnd //println("got sse timeout") ctx.responder ! MessageChunk("\\n") } case Http.Closed => { println("got HTTP closed") endConnection } case PeerClosed => { println("got Peer closed") endConnection } case x => { println("sse got "+x) } } } } ) } val EventStreamType = register( MediaType.custom( mainType = "text", subType = "event-stream", compressible = true, binary = false ) ) get { respondWithMediaType(EventStreamType) { sseRoute(channel) } } //~ // Answer preflight requests. Needed for Yaffle // method(HttpMethods.OPTIONS) { // val preflightHeaders = List( // RawHeader("Access-Control-Allow-Methods", "GET"), // RawHeader("Access-Control-Allow-Headers", "Last-Event-ID, Cache-Control"), // RawHeader("Access-Control-Max-Age", "86400") // ) // respondWithHeaders(preflightHeaders: _*) { // complete(StatusCodes.OK) // } // } } // def lastEventId = optionalHeaderValueByName("Last-Event-ID") | parameter("lastEventId" ?) } object ServerSideEventsDirectives extends ServerSideEventsDirectives
kristoferB/SP
sp1/src/main/scala/sp/server/ServerSideEventsDirectives.scala
Scala
mit
3,416
package fpinscala.errorhandling import scala.{Option => _, Some => _, Either => _, _} // hide std library `Option`, `Some` and `Either`, since we are writing our own in this chapter sealed trait Option[+A] { def map[B](f: A => B): Option[B] = this match { case None => None case Some(v) => Some(f(v)) } def getOrElse[B>:A](default: => B): B = this match { case None => default case Some(v) => v } def flatMap[B](f: A => Option[B]): Option[B] = map(f) getOrElse None def orElse[B>:A](ob: => Option[B]): Option[B] = this map (Some(_)) getOrElse ob def filter(f: A => Boolean): Option[A] = flatMap(x => if (f(x)) Some(x) else None) } case class Some[+A](get: A) extends Option[A] case object None extends Option[Nothing] object Option { def failingFn(i: Int): Int = { val y: Int = throw new Exception("fail!") // `val y: Int = ...` declares `y` as having type `Int`, and sets it equal to the right hand side of the `=`. try { val x = 42 + 5 x + y } catch { case e: Exception => 43 } // A `catch` block is just a pattern matching block like the ones we've seen. `case e: Exception` is a pattern that matches any `Exception`, and it binds this value to the identifier `e`. The match returns the value 43. } def failingFn2(i: Int): Int = { try { val x = 42 + 5 x + ((throw new Exception("fail!")): Int) // A thrown Exception can be given any type; here we're annotating it with the type `Int` } catch { case e: Exception => 43 } } def mean(xs: Seq[Double]): Option[Double] = if (xs.isEmpty) None else Some(xs.sum / xs.length) def variance(xs: Seq[Double]): Option[Double] = { mean(xs).flatMap(m => mean(xs.map(x => math.pow(x-m,2)))) } def map2[A,B,C](a: Option[A], b: Option[B])(f: (A, B) => C): Option[C] = (a,b) match { case (Some(ax), Some(bx)) => Some(f(ax, bx)) case _ => None } def sequence[A](a: List[Option[A]]): Option[List[A]] = { def foldFunc(b: Option[A], acc: Option[List[A]]) : Option[List[A]] = (acc, b) match { case (Some(acclist), Some(item)) => Some(item :: acclist) case _ => None } a.foldRight(Some(List[A]()) : Option[List[A]])(foldFunc) } def traverse[A, B](a: List[A])(f: A => Option[B]): Option[List[B]] = { def foldFunc(b: A, acc: Option[List[B]]) : Option[List[B]] = (f(b), acc) match { case (Some(item), Some(acclist)) => Some(item :: acclist) case _ => None } a.foldRight(Some(List[B]()) : Option[List[B]])(foldFunc) } def sequence2[A](a: List[Option[A]]): Option[List[A]] = { traverse(a)(identity) } }
onewheelonly/fpinscala
exercises/src/main/scala/fpinscala/errorhandling/Option.scala
Scala
mit
2,643
/* * Skylark * http://skylark.io * * Copyright 2012-2017 Quantarray, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.quantarray.skylark.measure import com.quantarray.skylark.measure.measures._ /** * Length ^n^ converter. * * @author Araik Grigoryan */ trait ExponentialLengthConverter extends SameTypeConverter[ExponentialLengthMeasure] { override protected def convert(from: ExponentialLengthMeasure, to: ExponentialLengthMeasure): Option[Double] = ⤇(from, to) match { case `gal` ⤇ `in3` => Some(231) case `ha` ⤇ `km2` => Some(0.01) case _ => super.convert(from, to) } } object ExponentialLengthConverter { def apply(): ExponentialLengthConverter = new ExponentialLengthConverter {} }
quantarray/skylark
skylark-measure/src/main/scala/com/quantarray/skylark/measure/ExponentialLengthConverter.scala
Scala
apache-2.0
1,257
/* * Copyright 2001-2008 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalatest.events import org.scalatest._ /** * Class each of whose instances hold summary information about one ScalaTest run. * * @param testsSucceededCount the number of tests that were reported as succeeded during the run * @param testsFailedCount the number of tests that were reported as failed during the run * @param testsIgnoredCount the number of tests that were were reported as ignored during the run * @param testsPendingCount the number of tests that were reported as pending during the run * * @author Bill Venners */ final case class Summary(testsSucceededCount: Int, testsFailedCount: Int, testsIgnoredCount: Int, testsPendingCount: Int, testsCanceledCount: Int, suitesCompletedCount: Int, suitesAbortedCount: Int) { /** * The number of tests completed, which is the sum of the number of tests that succeeded and failed, excluding any * tests that were ignored, canceled, or reported as pending. */ val testsCompletedCount = testsSucceededCount + testsFailedCount /** * The total number of tests, which is the sum of the number of tests that succeeded, failed, were ignored, canceled, or * reported as pending. */ val totalTestsCount = testsSucceededCount + testsFailedCount + testsIgnoredCount + testsPendingCount + testsCanceledCount } /** * Companion object for case class <a href="Summary.html"><code>Summary</code></a>. */ object Summary
hubertp/scalatest
src/main/scala/org/scalatest/events/Summary.scala
Scala
apache-2.0
2,017
package org.scalex package search package query import scala.util.{ Try, Success, Failure } import scalaz.NonEmptyList import model.Project case class Area( name: ProjectName, version: List[Int]) { def versionString: Option[String] = version.toNel map (_.list mkString ".") def covers(p: Project): Boolean = name == p.name && covers(p.version) def covers(v: semverfi.Valid): Boolean = List(v.major, v.minor, v.patch).zipWithIndex forall { case (vv, pos) ⇒ compare(pos, vv) } private def compare(pos: Int, v: Int) = (version lift pos).fold(true)(v==) override def toString = name + (versionString ?? { "_" + _ }) } object Area { val nameVersionRegex = """^([^_]+)_(.+)$""".r def apply(str: String): Area = str match { case nameVersionRegex(name, version) ⇒ Area(name, version.split('.').toList.map(parseIntOption).flatten) case name ⇒ Area(name, Nil) } }
ornicar/scalex
src/main/scala/search/query/Area.scala
Scala
mit
926
package shared.dtos import boopickle.Default._ /** * Represents content within a request/response to API endpoints. */ /** * Wraps all API requests in a standard format. */ case class ApiRequest(msgType: String, content: Content) case class ApiResponse[T](msgType: String, content: T) sealed trait Content case class CreateUser(email: String, password: String, jsonBlob: Map[String, String], createBTCWallet: Boolean) extends Content case class CreateUserStep1(email: String) extends Content case class CreateUserStep2(email: String, jsonBlob: Map[String, String], createBTCWallet: Boolean, salt: String, verifier: String) extends Content case class CreateUserStep1Response(salt: String) case class CreateUserResponse() case class CreateUserError(reason: String = "") case class ConfirmEmail(token: String) extends Content case class ConfirmEmailResponse(agentURI: String) case class ErrorResponse(reason: String) extends Content case class InitializeSession(agentURI: String) extends Content case class InitializeSessionResponse(sessionURI: String, listOfAliases: Seq[String], defaultAlias: String, listOfLabels: Seq[String], listOfConnections: Seq[Connection], lastActiveLabel: String, jsonBlob: Map[String, String], bitcoinNetworkMode: String = "") case class InitializeSessionResponseCheck(M2: String) case class InitializeSessionErrorResponse(reason: String) case class UserLoginResponse(s: String, B: String) case class ConnectionProfileResponse(sessionURI: String, connection: Connection, jsonBlob: String) case class ResponseContent(sessionURI: String, pageOfPosts: Seq[String] = Nil, connection: Connection, filter: String) case class Connection(source: String = "", label: String = "", target: String = "") case class SessionPing(sessionURI: String) extends Content case class SubscribeRequest(sessionURI: String, expression: Expression) extends Content case class CancelSubscribeRequest(sessionURI: String, connections: Seq[Connection] = Nil, filter: String = "") extends Content case class Expression(msgType: String, content: ExpressionContent) case class ExpressionContent(cnxns: Seq[Connection], label: String, value: String = "", uid: String = "") //case class Label(text: String, color: String, imgSrc: String) case class IntroConnections(sessionURI: String = "", alias: String = "", aConnection: Connection = Connection(), bConnection: Connection = Connection(), aMessage: String = "", bMessage: String = "") extends Content case class EstablishConnection(sessionURI: String = "", aURI: String = "", bURI: String = "", label: String = "") extends Content case class LabelPost(sessionURI: String = "", labels: Seq[String] = Nil, alias: String = "") extends Content case class Introduction(introSessionId: String, correlationId: String, connection: Connection, message: String, introProfile: String) case class IntroConfirmReq(sessionURI: String = "", alias: String = "", introSessionId: String = "", correlationId: String = "", accepted: Boolean) extends Content case class IntroductionConfirmationResponse(sessionURI: String = "") case class SessionPong(sessionURI: String) case class ConnectNotification(connection: Connection, introProfile: String = "") case class JsonBlob(name: String = "", imgSrc: String = "") case class UpdateUserRequest(sessionURI: String = "", jsonBlob: JsonBlob = JsonBlob()) extends Content case class BeginIntroductionRes(sessionURI: String = "") case class AddAgentAliasesRequest(sessionURI: String = "", aliases: Seq[String]) extends Content case class VersionInfoRequest() extends Content case class VersionInfoResponse(glosevalVersion: String = "", scalaVersion: String = "", mongoDBVersion: String = "", rabbitMQVersion: String = "") case class CloseSessionRequest(sessionURI: String = "") extends Content case class OmniBalanceResponse(sessionURI: String , amp: String, btc: String, address: String) case class SendAmpsRequest(sessionURI: String, amount: String, target: String) extends Content { require(amount.nonEmpty, "Amount of AMPs should be non-zero!") require(target.nonEmpty, "Unable to amplify the post: receiver's address is missing!") } case class SendAmpsResponse(sessionURI: String, transaction: String) case class ServerModel(uid: String = "", serverAddress : String = "", isEditable : Boolean = true, serverType: String = "") /*object Content { implicit val todoPriorityPickler: Pickler[Content] = generatePickler[Content] }*/
LivelyGig/ProductWebUI
shared/src/main/scala/shared/dtos/Content.scala
Scala
apache-2.0
4,620
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.datasources import org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation import org.apache.spark.sql.catalyst.catalog.CatalogTable import org.apache.spark.sql.catalyst.expressions.{AttributeMap, AttributeReference} import org.apache.spark.sql.catalyst.plans.QueryPlan import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, LogicalPlan, Statistics} import org.apache.spark.sql.catalyst.util.truncatedString import org.apache.spark.sql.sources.BaseRelation /** * Used to link a [[BaseRelation]] in to a logical query plan. */ case class LogicalRelation( relation: BaseRelation, output: Seq[AttributeReference], catalogTable: Option[CatalogTable], override val isStreaming: Boolean) extends LeafNode with MultiInstanceRelation { // Only care about relation when canonicalizing. override def doCanonicalize(): LogicalPlan = copy( output = output.map(QueryPlan.normalizeExpressions(_, output)), catalogTable = None) override def computeStats(): Statistics = { catalogTable .flatMap(_.stats.map(_.toPlanStats(output, conf.cboEnabled))) .getOrElse(Statistics(sizeInBytes = relation.sizeInBytes)) } /** Used to lookup original attribute capitalization */ val attributeMap: AttributeMap[AttributeReference] = AttributeMap(output.map(o => (o, o))) /** * Returns a new instance of this LogicalRelation. According to the semantics of * MultiInstanceRelation, this method returns a copy of this object with * unique expression ids. We respect the `expectedOutputAttributes` and create * new instances of attributes in it. */ override def newInstance(): LogicalRelation = { this.copy(output = output.map(_.newInstance())) } override def refresh(): Unit = relation match { case fs: HadoopFsRelation => fs.location.refresh() case _ => // Do nothing. } override def simpleString(maxFields: Int): String = { s"Relation[${truncatedString(output, ",", maxFields)}] $relation" } } object LogicalRelation { def apply(relation: BaseRelation, isStreaming: Boolean = false): LogicalRelation = LogicalRelation(relation, relation.schema.toAttributes, None, isStreaming) def apply(relation: BaseRelation, table: CatalogTable): LogicalRelation = LogicalRelation(relation, relation.schema.toAttributes, Some(table), false) }
pgandhi999/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/LogicalRelation.scala
Scala
apache-2.0
3,173
package kv.tests.kissmetrics import kv.kissmetrics.DynamicService class TestDynamicService extends DynamicService { def alias(person1Id: String, person2Id: String) {} def event(personId: String, eventName: String, properties: Map[String, String] = Map()) {} def properties(personId: String, properties: Map[String, String]) {} }
kodemaniak/scala-kissmetrics
src/test/scala/kv/tests/kissmetrics/test-base-service.scala
Scala
apache-2.0
335
package sds.classfile.attribute.annotation class EnumConstValue(_typeName: Int, _constName: Int) { def typeName: Int = _typeName def constName: Int = _constName }
g1144146/sds_for_scala
src/main/scala/sds/classfile/attribute/annotation/EnumConstValue.scala
Scala
apache-2.0
172
package helloscalafxml import scalafx.scene.control._ import scalafxml.core.macros._ import scalafx.event.ActionEvent //I am a sfxml controller file @sfxml //FirstScene.fxml passes objects to this class class FirstSceneController(private val pushMeButton: Button, private val outLabel: Label) { //pushMeButton On Action Event def onButtonDown(event: ActionEvent) { outLabel.text = "HelloScalaFX" } }
AlexHolly/helloScalaFXML
src/main/scala/helloscalafxml/FirstSceneController.scala
Scala
mit
423
/* * Copyright 2015-2020 Noel Welsh * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package doodle package java2d package examples object PulsingCircle { import cats.instances.all._ import doodle.core._ import doodle.effect.Writer.Gif import doodle.syntax._ import doodle.java2d.effect._ import doodle.interact.syntax._ import monix.reactive.Observable val frame = Frame.size(600, 600).background(Color.midnightBlue) val strokeWidth = 9.0 val gapWidth = 6.0 val minimumDiameter = gapWidth + strokeWidth val maxNumberOfDisks = 15 def disk(count: Int): Picture[Unit] = count match { case 0 => circle[Algebra,Drawing](minimumDiameter.toDouble) .noFill .strokeWidth(strokeWidth) case n => circle[Algebra,Drawing]((n * 2 * (strokeWidth + gapWidth) + minimumDiameter)) .noFill .strokeWidth(strokeWidth) } def background(count: Int): Picture[Unit] = { def iter(count: Int): Picture[Unit] = count match { case 0 => disk(count) case n => disk(count).on(iter(n - 1)) } iter(count).strokeWidth(strokeWidth.toDouble).strokeColor(Color.darkGray) } def pulse(count: Int): Picture[Unit] = count match { case 0 => disk(0).strokeColor(Color.crimson) case 1 => disk(1) .strokeColor(Color.crimson) .on(disk(0).strokeColor(Color.crimson.spin(30.degrees))) case n => disk(n) .strokeColor(Color.crimson) .on(disk(n - 1) .strokeColor(Color.crimson.spin(30.degrees))) .on(disk(n - 2) .strokeColor(Color.crimson.spin(60.degrees))) } val animation: Observable[Picture[Unit]] = Observable .repeat(1) .scan((1, 0)) { (state, _) => val (inc, count) = state if (count >= maxNumberOfDisks) (-1, maxNumberOfDisks - 1) else if (count <= 0) (1, 1) else (inc, count + inc) } .map { case (_, c) => pulse(c).on(background(maxNumberOfDisks)) } def go() = animation.animateFrames(frame) def write() = animation.take(100).write[Gif]("pulsing-circle-2.gif", frame) }
underscoreio/doodle
java2d/src/main/scala/doodle/java2d/examples/PulsingCircle.scala
Scala
apache-2.0
2,706
package graphite.relay.backend case class Backend(host: String, port: Int) { override def toString = "%s:%s".format(host, port) }
markchadwick/graphite-relay
src/main/scala/backend/Backend.scala
Scala
apache-2.0
133
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.mllib.tree import scala.collection.mutable import org.apache.spark.mllib.linalg.Vectors import org.apache.spark.mllib.regression.LabeledPoint import org.apache.spark.mllib.tree.model.TreeEnsembleModel import org.apache.spark.mllib.util.TestingUtils._ import org.apache.spark.util.StatCounter object EnsembleTestHelper { /** * Aggregates all values in data, and tests whether the empirical mean and stddev are within * epsilon of the expected values. * @param data Every element of the data should be an i.i.d. sample from some distribution. */ def testRandomArrays( data: Array[Array[Double]], numCols: Int, expectedMean: Double, expectedStddev: Double, epsilon: Double): Unit = { val values = new mutable.ArrayBuffer[Double]() data.foreach { row => assert(row.size == numCols) values ++= row } val stats = new StatCounter(values) assert(stats.mean ~== expectedMean absTol epsilon) assert(stats.stdev ~== expectedStddev absTol epsilon) } def validateClassifier( model: TreeEnsembleModel, input: Seq[LabeledPoint], requiredAccuracy: Double): Unit = { val predictions = input.map(x => model.predict(x.features)) val numOffPredictions = predictions.zip(input).count { case (prediction, expected) => prediction != expected.label } val accuracy = (input.length - numOffPredictions).toDouble / input.length assert(accuracy >= requiredAccuracy, s"validateClassifier calculated accuracy $accuracy but required $requiredAccuracy.") } /** * Validates a tree ensemble model for regression. */ def validateRegressor( model: TreeEnsembleModel, input: Seq[LabeledPoint], required: Double, metricName: String = "mse"): Unit = { val predictions = input.map(x => model.predict(x.features)) val errors = predictions.zip(input).map { case (prediction, point) => point.label - prediction } val metric = metricName match { case "mse" => errors.map(err => err * err).sum / errors.size case "mae" => errors.map(math.abs).sum / errors.size } assert(metric <= required, s"validateRegressor calculated $metricName $metric but required $required.") } def generateOrderedLabeledPoints(numFeatures: Int, numInstances: Int): Array[LabeledPoint] = { val arr = new Array[LabeledPoint](numInstances) for (i <- 0 until numInstances) { val label = if (i < numInstances / 10) { 0.0 } else if (i < numInstances / 2) { 1.0 } else if (i < numInstances * 0.9) { 0.0 } else { 1.0 } val features = Array.fill[Double](numFeatures)(i.toDouble) arr(i) = new LabeledPoint(label, Vectors.dense(features)) } arr } }
rezasafi/spark
mllib/src/test/scala/org/apache/spark/mllib/tree/EnsembleTestHelper.scala
Scala
apache-2.0
3,637
/* * Copyright 2014 Cisco Systems, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cisco.oss.foundation.orchestration.scope.utils import com.cisco.oss.foundation.orchestration.scope.ScopeConstants import com.google.common.collect.ImmutableSet import com.google.common.util.concurrent.MoreExecutors._ import org.apache.commons.io.FilenameUtils import org.jclouds.ContextBuilder import org.jclouds.concurrent.config.ExecutorServiceModule import org.jclouds.sshj.config.SshjSshClientModule import org.jclouds.vsphere.FileManagerApi /** * Created with IntelliJ IDEA. * User: igreenfi * Date: 1/27/14 * Time: 1:05 PM * To change this template use File | Settings | File Templates. */ object ResourcesUtilsFactory { def instance() = { ScopeUtils.configuration.getString("cloud.provider") match { case "rackspace" => NullResourceApi case "aws" => NullResourceApi case "openstack" => NullResourceApi case "vsphere" => VSphereResourcesApi case _ => throw new UnsupportedOperationException("Could NOT match provider File API. ( provider : " + ScopeUtils.configuration.getString("cloud.provider") + " )") } } } trait ScopeResourcesApi extends Slf4jLogger { protected val modules = ImmutableSet.of(new ExecutorServiceModule(sameThreadExecutor(), sameThreadExecutor()), new SshjSshClientModule()); protected val cloudProvider = ScopeUtils.configuration.getString("cloud.provider") protected val fileProviderName = ScopeUtils.configuration.getString(s"cloud.provider.$cloudProvider.file") protected val username: String = ScopeUtils.configuration.getString(s"cloud.provider.$cloudProvider.user") protected val password: String = ScopeUtils.configuration.getString(s"cloud.provider.$cloudProvider.password") protected var context: ContextBuilder = null protected def init() { context = ContextBuilder.newBuilder(fileProviderName) .credentials(username, password) .modules(modules) cloudProvider match { case "vsphere" | "openstack" => { val endpoint = Option(ScopeUtils.configuration.getString(s"cloud.provider.$cloudProvider.endpoint")) match { case Some(ep) => ep case None => { logError(s"Could not find value for 'cloud.provider.$cloudProvider.endpoint'") throw new NoSuchElementException(s"'cloud.provider.$cloudProvider.endpoint' doesn't map to an existing object") } } context.endpoint(endpoint) } case "aws" => { val endpoint = Option(ScopeUtils.configuration.getString(s"cloud.provider.$cloudProvider.endpoint")) match { case Some(ep) => { context.endpoint(ep) } case None => { logInfo(s"Could not find value for 'cloud.provider.$cloudProvider.endpoint'. Using default") } } } case _ => } } def loadFile(file: String) } object VSphereResourcesApi extends ScopeResourcesApi { init() private val resourcesApi: FileManagerApi = context.buildInjector().getInstance(classOf[FileManagerApi]) def loadFile(file: String): Unit = { val filename = FilenameUtils.getName(file) resourcesApi.uploadFile(file, s"${ScopeConstants.SCOPE_RESOURCES_FOLDER}$filename") } } object NullResourceApi extends ScopeResourcesApi { def loadFile(file: String): Unit = { logInfo("**** Unimplemented!!! ****") } }
foundation-runtime/orchestration
src/main/java/com/cisco/oss/foundation/orchestration/scope/utils/ResourcesUtils.scala
Scala
apache-2.0
3,923
@deprecated("Suppress warnings", since="2.11") object Test extends App { import scala.reflect.{ClassManifest, classTag} println(implicitly[ClassManifest[scala.List[_]]]) println(classTag[scala.List[_]]) println(implicitly[ClassManifest[scala.collection.immutable.List[_]]]) println(classTag[scala.collection.immutable.List[_]]) println(implicitly[ClassManifest[Predef.Set[_]]]) println(classTag[Predef.Set[_]]) println(implicitly[ClassManifest[scala.collection.immutable.Set[_]]]) println(classTag[scala.collection.immutable.Set[_]]) }
lrytz/scala
test/files/run/t6329_vanilla.scala
Scala
apache-2.0
555
package controllers.circs.consent_and_declaration import app.ReportChange._ import utils.WithJsBrowser import controllers.CircumstancesScenarioFactory import org.specs2.mutable._ import utils.pageobjects.circumstances.consent_and_declaration.GCircsDeclarationPage import utils.pageobjects.circumstances.report_changes.GOtherChangeInfoPage import utils.pageobjects.circumstances.start_of_process.GCircsYourDetailsPage import utils.pageobjects.{PageObjects, TestData} class GCircsDeclarationIntegrationSpec extends Specification { section("integration", models.domain.CircumstancesConsentAndDeclaration.id) "Declaration" should { val obtainInfoAgreement = "no" val obtainInfoWhy = "Cause I want" val someOneElse = "Yes" "be presented" in new WithJsBrowser with PageObjects{ val page = GCircsDeclarationPage(context) page goToThePage() } "navigate to previous page" in new WithJsBrowser with PageObjects{ val page=GCircsYourDetailsPage(context) page goToThePage() val claim = CircumstancesScenarioFactory.aboutDetails claim.CircumstancesReportChanges = AdditionalInfo.name val otherChangePage = page runClaimWith(claim, GOtherChangeInfoPage.url) otherChangePage must beAnInstanceOf[GOtherChangeInfoPage] val prevPage = otherChangePage.goBack() prevPage must beAnInstanceOf[GCircsYourDetailsPage] } "navigate to next page" in new WithJsBrowser with PageObjects{ val page = GCircsDeclarationPage(context) val claim = CircumstancesScenarioFactory.otherChangeInfo page goToThePage() page fillPageWith claim val nextPage = page submitPage () nextPage must beAnInstanceOf[GCircsDeclarationPage] } "missing obtainInfoAgreement field" in new WithJsBrowser with PageObjects{ val page = GCircsDeclarationPage(context) val claim = new TestData claim.CircumstancesDeclarationWhy = obtainInfoWhy page goToThePage() page fillPageWith claim val errors = page.submitPage().listErrors errors.size mustEqual 1 errors(0) must contain("Do you agree to the Carer's Allowance Unit contacting anyone mentioned in this form? - You must complete this section") } "given obtainInfoAgreement is set to 'no' missing obtainInfoWhy field" in new WithJsBrowser with PageObjects{ val page = GCircsDeclarationPage(context) val claim = new TestData claim.CircumstancesDeclarationInfoAgreement = obtainInfoAgreement page goToThePage() page fillPageWith claim val errors = page.submitPage().listErrors errors.size mustEqual 1 errors(0) must contain("List anyone you don't want to be contacted and say why. - You must complete this section") } "given circsSomeOneElse checked and missing name or organisation field" in new WithJsBrowser with PageObjects{ val page = GCircsDeclarationPage(context) val claim = new TestData claim.CircumstancesDeclarationInfoAgreement = obtainInfoAgreement claim.CircumstancesDeclarationWhyNot = obtainInfoWhy claim.CircumstancesSomeOneElseConfirmation = someOneElse page goToThePage() page fillPageWith claim val errors = page.submitPage().listErrors errors.size mustEqual 1 errors(0) must contain("Your name or organisation - You must complete this section") } "not have name or organisation field with optional text" in new WithJsBrowser with PageObjects{ val page = GCircsDeclarationPage(context) val claim = new TestData claim.CircumstancesSomeOneElseConfirmation = someOneElse page goToThePage() page fillPageWith claim page.readLabel("nameOrOrganisation") mustEqual("Your name or organisation") } "page contains JS enabled check" in new WithJsBrowser with PageObjects { val page = GCircsDeclarationPage(context) page goToThePage() page.jsCheckEnabled must beTrue } } section("integration", models.domain.CircumstancesConsentAndDeclaration.id) }
Department-for-Work-and-Pensions/ClaimCapture
c3/test/controllers/circs/consent_and_declaration/GCircsDeclarationIntegrationSpec.scala
Scala
mit
4,062
package com.caibowen.prma.analyse.test import java.util.Date import com.caibowen.prma.api.LogLevel import com.caibowen.prma.query.{Statistician, Q} import com.zaxxer.hikari.HikariDataSource import gplume.scala.jdbc.DB import scala.slick.driver.MySQLDriver.simple._ import scala.slick.jdbc.{GetResult, StaticQuery => SQ} /** * @author BowenCai * @since 12/12/2014. */ object TestQuery extends App { val ds = new HikariDataSource ds.setAutoCommit(true) ds.setMinimumIdle(2) ds.setMaximumPoolSize(32) ds.setDriverClassName("com.mysql.jdbc.Driver") ds.setUsername("xKommando") ds.setPassword("123456") ds.setJdbcUrl("jdbc:mysql://localhost:3306/prma_log_event") val db = new DB(ds) db.readOnlySession{implicit session=> val r3 = Statistician.timelineCounter(1L, 1520727416662L, LogLevel.TRACE.id, LogLevel.FATAL.id) if (r3 != null) { println(r3._1) println(r3._2) println(r3._3) } } // val db = Database.forDataSource(ds) // val g = GetResult.createGetTuple3[Long, Int, Int] // // db.withSession { implicit session => // val rs = SQ.queryNA[(Long, Int, Int)]("select * from exception where id = -6052837899193958288") // .foreach(a=>println(a._1, a._2, a._3)) //// println(rs) //// println(g.children) // } // val q = sql"select * from exception where id = -6052837899193958288".as[(Long, Int, Int)] // println(q.apply) // SQ.update("") }
xkommando/PRMA
analyze/src/test/scala/com/caibowen/prma/analyse/test/TestQuery.scala
Scala
lgpl-3.0
1,419
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ai.h2o.sparkling.ml.params import ai.h2o.sparkling.ml.models.SpecificMOJOParameters import hex.genmodel.MojoModel import org.apache.spark.expose.Logging trait HasGamColsOnMOJO extends ParameterConstructorMethods with SpecificMOJOParameters with Logging { private val gamCols = nullableStringArrayArrayParam( name = "gamCols", doc = "Arrays of predictor column names for gam for smoothers using single or " + "multiple predictors like {{'c1'},{'c2','c3'},{'c4'},...}") def getGamCols(): Array[Array[String]] = $(gamCols) override private[sparkling] def setSpecificParams(h2oMojo: MojoModel): Unit = { super.setSpecificParams(h2oMojo) try { val h2oParameters = h2oMojo._modelAttributes.getModelParameters() val h2oParametersMap = h2oParameters.map(i => i.name -> i.actual_value).toMap h2oParametersMap.get("gam_columns").foreach(value => set("gamCols", value)) } catch { case e: Throwable => logError("An error occurred during a try to access H2O MOJO parameters.", e) } } }
h2oai/sparkling-water
scoring/src/main/scala/ai/h2o/sparkling/ml/params/HasGamColsOnMOJO.scala
Scala
apache-2.0
1,849
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.plan.nodes.physical.batch import org.apache.flink.api.dag.Transformation import org.apache.flink.configuration.MemorySize import org.apache.flink.streaming.api.operators.SimpleOperatorFactory import org.apache.flink.table.api.config.ExecutionConfigOptions import org.apache.flink.table.data.RowData import org.apache.flink.table.planner.calcite.FlinkTypeFactory import org.apache.flink.table.planner.codegen.ProjectionCodeGenerator.generateProjection import org.apache.flink.table.planner.codegen.{CodeGeneratorContext, LongHashJoinGenerator} import org.apache.flink.table.planner.delegation.BatchPlanner import org.apache.flink.table.planner.plan.`trait`.{FlinkRelDistribution, FlinkRelDistributionTraitDef} import org.apache.flink.table.planner.plan.cost.{FlinkCost, FlinkCostFactory} import org.apache.flink.table.planner.plan.nodes.FlinkConventions import org.apache.flink.table.planner.plan.nodes.exec.ExecEdge import org.apache.flink.table.planner.plan.nodes.exec.utils.ExecNodeUtil import org.apache.flink.table.planner.plan.utils.{FlinkRelMdUtil, JoinUtil} import org.apache.flink.table.runtime.operators.join.{HashJoinOperator, HashJoinType} import org.apache.flink.table.runtime.typeutils.{BinaryRowDataSerializer, InternalTypeInfo} import org.apache.flink.table.types.logical.RowType import org.apache.calcite.plan._ import org.apache.calcite.rel.core._ import org.apache.calcite.rel.metadata.RelMetadataQuery import org.apache.calcite.rel.{RelNode, RelWriter} import org.apache.calcite.rex.RexNode import org.apache.calcite.util.Util import java.util import scala.collection.JavaConversions._ /** * Batch physical RelNode for hash [[Join]]. */ class BatchExecHashJoin( cluster: RelOptCluster, traitSet: RelTraitSet, leftRel: RelNode, rightRel: RelNode, condition: RexNode, joinType: JoinRelType, // true if LHS is build side, else false val leftIsBuild: Boolean, // true if build side is broadcast, else false val isBroadcast: Boolean, val tryDistinctBuildRow: Boolean) extends BatchExecJoinBase(cluster, traitSet, leftRel, rightRel, condition, joinType) { private val (leftKeys, rightKeys) = JoinUtil.checkAndGetJoinKeys(keyPairs, getLeft, getRight, allowEmptyKey = true) val (buildKeys, probeKeys) = if (leftIsBuild) (leftKeys, rightKeys) else (rightKeys, leftKeys) // Inputs could be changed. See [[BiRel.replaceInput]]. def buildRel: RelNode = if (leftIsBuild) getLeft else getRight def probeRel: RelNode = if (leftIsBuild) getRight else getLeft val hashJoinType: HashJoinType = HashJoinType.of( leftIsBuild, getJoinType.generatesNullsOnRight(), getJoinType.generatesNullsOnLeft(), getJoinType == JoinRelType.SEMI, getJoinType == JoinRelType.ANTI) override def copy( traitSet: RelTraitSet, conditionExpr: RexNode, left: RelNode, right: RelNode, joinType: JoinRelType, semiJoinDone: Boolean): Join = { new BatchExecHashJoin( cluster, traitSet, left, right, conditionExpr, joinType, leftIsBuild, isBroadcast, tryDistinctBuildRow) } override def explainTerms(pw: RelWriter): RelWriter = { super.explainTerms(pw) .itemIf("isBroadcast", "true", isBroadcast) .item("build", if (leftIsBuild) "left" else "right") .itemIf("tryDistinctBuildRow", "true", tryDistinctBuildRow) } override def computeSelfCost(planner: RelOptPlanner, mq: RelMetadataQuery): RelOptCost = { val leftRowCnt = mq.getRowCount(getLeft) val rightRowCnt = mq.getRowCount(getRight) if (leftRowCnt == null || rightRowCnt == null) { return null } // assume memory is big enough to load into all build size data, spill will not happen. // count in network cost of Exchange node before build size child here val cpuCost = FlinkCost.HASH_CPU_COST * (leftRowCnt + rightRowCnt) val (buildRowCount, buildRowSize) = if (leftIsBuild) { (leftRowCnt, FlinkRelMdUtil.binaryRowAverageSize(getLeft)) } else { (rightRowCnt, FlinkRelMdUtil.binaryRowAverageSize(getRight)) } // We aim for a 200% utilization of the bucket table when all the partition buffers are full. // TODO use BinaryHashBucketArea.RECORD_BYTES instead of 8 val bucketSize = buildRowCount * 8 / FlinkCost.HASH_COLLISION_WEIGHT val recordSize = buildRowCount * (buildRowSize + BinaryRowDataSerializer.LENGTH_SIZE_IN_BYTES) val memCost = (bucketSize + recordSize) * shuffleBuildCount(mq) val costFactory = planner.getCostFactory.asInstanceOf[FlinkCostFactory] costFactory.makeCost(mq.getRowCount(this), cpuCost, 0, 0, memCost) } private[flink] def shuffleBuildCount(mq: RelMetadataQuery): Int = { val probeRel = if (leftIsBuild) getRight else getLeft if (isBroadcast) { val rowCount = Util.first(mq.getRowCount(probeRel), 1) val shuffleCount = rowCount * mq.getAverageRowSize(probeRel) / FlinkCost.SQL_DEFAULT_PARALLELISM_WORKER_PROCESS_SIZE Math.max(1, shuffleCount.toInt) } else { 1 } } override def satisfyTraits(requiredTraitSet: RelTraitSet): Option[RelNode] = { if (!isBroadcast) { satisfyTraitsOnNonBroadcastHashJoin(requiredTraitSet) } else { satisfyTraitsOnBroadcastJoin(requiredTraitSet, leftIsBuild) } } private def satisfyTraitsOnNonBroadcastHashJoin( requiredTraitSet: RelTraitSet): Option[RelNode] = { val requiredDistribution = requiredTraitSet.getTrait(FlinkRelDistributionTraitDef.INSTANCE) val (canSatisfyDistribution, leftRequiredDistribution, rightRequiredDistribution) = satisfyHashDistributionOnNonBroadcastJoin(requiredDistribution) if (!canSatisfyDistribution) { return None } val toRestrictHashDistributionByKeys = (distribution: FlinkRelDistribution) => getCluster.getPlanner .emptyTraitSet .replace(FlinkConventions.BATCH_PHYSICAL) .replace(distribution) val leftRequiredTraits = toRestrictHashDistributionByKeys(leftRequiredDistribution) val rightRequiredTraits = toRestrictHashDistributionByKeys(rightRequiredDistribution) val newLeft = RelOptRule.convert(getLeft, leftRequiredTraits) val newRight = RelOptRule.convert(getRight, rightRequiredTraits) val providedTraits = getTraitSet.replace(requiredDistribution) // HashJoin can not satisfy collation. Some(copy(providedTraits, Seq(newLeft, newRight))) } //~ ExecNode methods ----------------------------------------------------------- override def getInputEdges: util.List[ExecEdge] = { val (buildRequiredShuffle, probeRequiredShuffle) = if (isBroadcast) { (ExecEdge.RequiredShuffle.broadcast(), ExecEdge.RequiredShuffle.any()) } else { (ExecEdge.RequiredShuffle.hash(buildKeys), ExecEdge.RequiredShuffle.hash(probeKeys)) } val probeDamBehavior = if (hashJoinType.buildLeftSemiOrAnti()) { ExecEdge.DamBehavior.END_INPUT } else { ExecEdge.DamBehavior.PIPELINED } val buildEdge = ExecEdge.builder() .requiredShuffle(buildRequiredShuffle) .damBehavior(ExecEdge.DamBehavior.BLOCKING) .priority(0) .build() val probeEdge = ExecEdge.builder() .requiredShuffle(probeRequiredShuffle) .damBehavior(probeDamBehavior) .priority(1) .build() if (leftIsBuild) { List(buildEdge, probeEdge) } else { List(probeEdge, buildEdge) } } override protected def translateToPlanInternal( planner: BatchPlanner): Transformation[RowData] = { val config = planner.getTableConfig val lInput = getInputNodes.get(0).translateToPlan(planner) .asInstanceOf[Transformation[RowData]] val rInput = getInputNodes.get(1).translateToPlan(planner) .asInstanceOf[Transformation[RowData]] // get type val lType = lInput.getOutputType.asInstanceOf[InternalTypeInfo[RowData]].toRowType val rType = rInput.getOutputType.asInstanceOf[InternalTypeInfo[RowData]].toRowType val keyType = RowType.of(leftKeys.map(lType.getTypeAt): _*) val condFunc = JoinUtil.generateConditionFunction( config, cluster.getRexBuilder, getJoinInfo, lType, rType) // projection for equals val lProj = generateProjection( CodeGeneratorContext(config), "HashJoinLeftProjection", lType, keyType, leftKeys) val rProj = generateProjection( CodeGeneratorContext(config), "HashJoinRightProjection", rType, keyType, rightKeys) val (build, probe, bProj, pProj, bType, pType, reverseJoin) = if (leftIsBuild) { (lInput, rInput, lProj, rProj, lType, rType, false) } else { (rInput, lInput, rProj, lProj, rType, lType, true) } val mq = getCluster.getMetadataQuery val buildRowSize = Util.first(mq.getAverageRowSize(buildRel), 24).toInt val buildRowCount = Util.first(mq.getRowCount(buildRel), 200000).toLong val probeRowCount = Util.first(mq.getRowCount(probeRel), 200000).toLong // operator val operator = if (LongHashJoinGenerator.support(hashJoinType, keyType, filterNulls)) { LongHashJoinGenerator.gen( config, hashJoinType, keyType, bType, pType, buildKeys, probeKeys, buildRowSize, buildRowCount, reverseJoin, condFunc) } else { SimpleOperatorFactory.of(HashJoinOperator.newHashJoinOperator( hashJoinType, condFunc, reverseJoin, filterNulls, bProj, pProj, tryDistinctBuildRow, buildRowSize, buildRowCount, probeRowCount, keyType )) } val managedMemory = MemorySize.parse(config.getConfiguration.getString( ExecutionConfigOptions.TABLE_EXEC_RESOURCE_HASH_JOIN_MEMORY)).getBytes ExecNodeUtil.createTwoInputTransformation( build, probe, getRelDetailedDescription, operator, InternalTypeInfo.of(FlinkTypeFactory.toLogicalRowType(getRowType)), probe.getParallelism, managedMemory) } }
aljoscha/flink
flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/nodes/physical/batch/BatchExecHashJoin.scala
Scala
apache-2.0
10,907
/* Author: uberset Date: 2015-11-14 Licence: GPL v2 */ package uberset.basic_compiler // string = '"' string-char* '"' ; // string-char = ? any character except '"' and newline> ? ; // comment-text = comment-char* ; // comment-char = ? any character except newline> ? ; // integer = digit+ ; // digit = '0' ... '9' ; // letter = 'A' ... 'Z' ; // identifier = letter digit? ; // program = line* ; case class Program(lines: Seq[Line]) // line = [integer] statement ; case class Line(nr: Option[Int], stm: Statement) // statement = print | goto | gosub | return | let | if | rem | input | dim | for | next ; sealed abstract class Statement // next = 'NEXT' ; case class Next(id: String) extends Statement // for = 'FOR' identifier '=' expression 'TO' expression ( 'STEP' expression ) ; case class For(id: String, from: Expression, to: Expression, step: Option[Expression]) extends Statement // dim = 'DIM' identifier '(' integer ')' ; case class Dim(variable: String, upper: Int) extends Statement // input = 'INPUT' variable ; case class Input(variable: Variable) extends Statement // rem = 'REM' comment-text ; case class Rem() extends Statement // if = 'IF' condition 'THEN' integer ; case class If(cond: Condition, line: Int) extends Statement // condition = expression relop expression ; case class Condition(expr1: Expression, op: RelOp, expr2: Expression) // relop = ( '=' | '<' | '>' | '<=' | '>=' | '<>' ) , sealed abstract class RelOp case class EQ() extends RelOp case class LT() extends RelOp case class GT() extends RelOp case class LE() extends RelOp case class GE() extends RelOp case class NE() extends RelOp // let = 'LET' variable '=' expression ; case class Let(variable: Variable, expression: Expression) extends Statement // print = 'PRINT' printargument ; case class Print(arg: PrintArgument) extends Statement // printargument = string | expression ; trait PrintArgument case class StringArg(string: String) extends PrintArgument // expression = ['-'|'+'] term (addoperation term)* ; case class Expression(negation: Boolean, term: Term, ops: List[(AddOp, Term)]) extends Factor with PrintArgument // term = factor (muloperation factor)* ; case class Term(factor: Factor, ops: List[(MulOp, Factor)]) // factor = intvalue | variable | "(" expression ")" ; sealed abstract class Factor // intvalue = integer ; case class IntValue(value: Int) extends Factor // variable = identifier ( '(' expression ')' ) ; case class Variable(name: String, subscript: Option[Expression]) extends Factor // addoperation = ( '+' | '-' ) ; sealed abstract class AddOp case class Add() extends AddOp case class Sub() extends AddOp // muloperation = ( '*' | '/' ) ; sealed abstract class MulOp case class Mul() extends MulOp case class Div() extends MulOp // goto = 'GOTO' integer ; case class Goto(nr: Int) extends Statement // gosub = 'GOSUB' integer ; case class Gosub(nr: Int) extends Statement // return = 'RETURN' ; case class Return() extends Statement object Line { def apply(stm: Statement): Line = Line(None, stm) def apply(nr: Int, stm: Statement): Line = Line(Some(nr), stm) } object Print { def apply(string: String): Print = Print(StringArg(string)) } object Expression { def apply(term: Term, list: List[(AddOp, Term)]): Expression = Expression(true, term, list) def apply(term: Term): Expression = Expression(false, term, List()) def apply(neg: Boolean, term: Term): Expression = Expression(neg, term, List()) def apply(t1: Term, op: AddOp, t2: Term): Expression = Expression(false, t1, List((op, t2))) def apply(neg: Boolean, t1: Term, op: AddOp, t2: Term): Expression = Expression(neg, t1, List((op, t2))) } object Term { def apply(factor: Factor): Term = Term(factor, List()) } object Variable { def apply(name: String): Variable = Variable(name, None) def apply(name: String, expression: Expression): Variable = Variable(name, Some(expression)) } object For { def apply(id: String, from: Expression, to: Expression): For = For(id, from, to, None) def apply(id: String, from: Expression, to: Expression, step: Expression): For = For(id, from, to, Some(step)) }
uberset/basic-compiler
src/main/scala/uberset/basic_compiler/Grammar.scala
Scala
gpl-2.0
4,168
/* * Copyright 2015-2016 IBM Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package whisk.core.controller import scala.concurrent.ExecutionContext import scala.concurrent.Future import scala.util.Failure import scala.util.Success import spray.http.StatusCode import spray.http.StatusCodes.Conflict import spray.http.StatusCodes.InternalServerError import spray.http.StatusCodes.NotFound import spray.http.StatusCodes.OK import spray.httpx.SprayJsonSupport._ import spray.httpx.marshalling.ToResponseMarshallable.isMarshallable import spray.json.DefaultJsonProtocol._ import spray.json.JsBoolean import spray.json.JsObject import spray.json.JsValue import spray.json.RootJsonFormat import spray.routing.Directives import spray.routing.RequestContext import whisk.common.Logging import whisk.common.TransactionId import whisk.core.controller.PostProcess.PostProcessEntity import whisk.core.database.ArtifactStore import whisk.core.database.ArtifactStoreException import whisk.core.database.DocumentConflictException import whisk.core.database.DocumentFactory import whisk.core.database.DocumentTypeMismatchException import whisk.core.database.NoDocumentException import whisk.core.entity.DocId import whisk.core.entity.WhiskDocument import whisk.core.entity.WhiskEntity import whisk.http.ErrorResponse import whisk.http.ErrorResponse.terminate import whisk.http.Messages._ /** An exception to throw inside a Predicate future. */ protected[core] case class RejectRequest(code: StatusCode, message: Option[ErrorResponse]) extends Throwable { override def toString = s"RejectRequest($code)" + message.map(" " + _.error).getOrElse("") } protected[core] object RejectRequest { /** Creates rejection with default message for status code. */ protected[core] def apply(code: StatusCode)(implicit transid: TransactionId): RejectRequest = { RejectRequest(code, Some(ErrorResponse.response(code)(transid))) } /** Creates rejection with custom message for status code. */ protected[core] def apply(code: StatusCode, m: String)(implicit transid: TransactionId): RejectRequest = { RejectRequest(code, Some(ErrorResponse(m, transid))) } /** Creates rejection with custom message for status code derived from reason for throwable. */ protected[core] def apply(code: StatusCode, t: Throwable)(implicit transid: TransactionId): RejectRequest = { val reason = t.getMessage RejectRequest(code, if (reason != null) reason else "Rejected") } } protected[controller] object FilterEntityList { import WhiskEntity.sharedFieldName /** * Filters from a list of entities serialized to JsObjects only those * that have the shared field ("publish") equal to true and excludes * all others. */ protected[controller] def filter( resources: List[JsValue], excludePrivate: Boolean, additionalFilter: JsObject => Boolean = (_ => true)) = { if (excludePrivate) { resources filter { case obj: JsObject => obj.getFields(sharedFieldName) match { case Seq(JsBoolean(true)) => true && additionalFilter(obj) // a shared entity case _ => false } case _ => false // only expecting JsObject instances } } else resources } } /** * A convenient typedef for functions that post process an entity * on an operation and terminate the HTTP request. */ package object PostProcess { type PostProcessEntity[A] = A => RequestContext => Unit } /** A trait for REST APIs that read entities from a datastore */ trait ReadOps extends Directives { /** An execution context for futures */ protected implicit val executionContext: ExecutionContext protected implicit val logging: Logging /** * Get all entities of type A from datastore that match key. Terminates HTTP request. * * @param factory the factory that can fetch entities of type A from datastore * @param datastore the client to the database * @param key the key to use to match records in the view, optional, if not defined, use namespace * @param view the view to query * @param filter a function List[A] => List[A] that filters the results * * Responses are one of (Code, Message) * - 200 entity A [] as JSON [] * - 500 Internal Server Error */ protected def listEntities(list: Future[List[JsValue]])(implicit transid: TransactionId) = { onComplete(list) { case Success(entities) => logging.info(this, s"[LIST] entity success") complete(OK, entities) case Failure(t: Throwable) => logging.error(this, s"[LIST] entity failed: ${t.getMessage}") terminate(InternalServerError) } } /** * Gets an entity of type A from datastore. Terminates HTTP request. * * @param factory the factory that can fetch entity of type A from datastore * @param datastore the client to the database * @param docid the document id to get * @param postProcess an optional continuation to post process the result of the * get and terminate the HTTP request directly * * Responses are one of (Code, Message) * - 200 entity A as JSON * - 404 Not Found * - 500 Internal Server Error */ protected def getEntity[A, Au >: A]( factory: DocumentFactory[A], datastore: ArtifactStore[Au], docid: DocId, postProcess: Option[PostProcessEntity[A]] = None)( implicit transid: TransactionId, format: RootJsonFormat[A], ma: Manifest[A]) = { onComplete(factory.get(datastore, docid)) { case Success(entity) => logging.info(this, s"[GET] entity success") postProcess map { _(entity) } getOrElse complete(OK, entity) case Failure(t: NoDocumentException) => logging.info(this, s"[GET] entity does not exist") terminate(NotFound) case Failure(t: DocumentTypeMismatchException) => logging.info(this, s"[GET] entity conformance check failed: ${t.getMessage}") terminate(Conflict, conformanceMessage) case Failure(t: ArtifactStoreException) => logging.info(this, s"[GET] entity unreadable") terminate(InternalServerError, t.getMessage) case Failure(t: Throwable) => logging.error(this, s"[GET] entity failed: ${t.getMessage}") terminate(InternalServerError) } } /** * Gets an entity of type A from datastore and project fields for response. Terminates HTTP request. * * @param factory the factory that can fetch entity of type A from datastore * @param datastore the client to the database * @param docid the document id to get * @param project a function A => JSON which projects fields form A * * Responses are one of (Code, Message) * - 200 project(A) as JSON * - 404 Not Found * - 500 Internal Server Error */ protected def getEntityAndProject[A, Au >: A]( factory: DocumentFactory[A], datastore: ArtifactStore[Au], docid: DocId, project: A => JsObject)( implicit transid: TransactionId, format: RootJsonFormat[A], ma: Manifest[A]) = { onComplete(factory.get(datastore, docid)) { case Success(entity) => logging.info(this, s"[PROJECT] entity success") complete(OK, project(entity)) case Failure(t: NoDocumentException) => logging.info(this, s"[PROJECT] entity does not exist") terminate(NotFound) case Failure(t: DocumentTypeMismatchException) => logging.info(this, s"[PROJECT] entity conformance check failed: ${t.getMessage}") terminate(Conflict, conformanceMessage) case Failure(t: ArtifactStoreException) => logging.info(this, s"[PROJECT] entity unreadable") terminate(InternalServerError, t.getMessage) case Failure(t: Throwable) => logging.error(this, s"[PROJECT] entity failed: ${t.getMessage}") terminate(InternalServerError) } } } /** A trait for REST APIs that write entities to a datastore */ trait WriteOps extends Directives { /** An execution context for futures */ protected implicit val executionContext: ExecutionContext protected implicit val logging: Logging /** * A predicate future that completes with true iff the entity should be * stored in the datastore. Future should fail otherwise with RejectPut. */ protected type PutPredicate = Future[Boolean] /** * Creates or updates an entity of type A in the datastore. First, fetch the entity * by id from the datastore (this is required to get the document revision for an update). * If the entity does not exist, create it. If it does exist, and 'overwrite' is enabled, * update the entity. * * @param factory the factory that can fetch entity of type A from datastore * @param datastore the client to the database * @param docid the document id to put * @param overwrite updates an existing entity iff overwrite == true * @param update a function (A) => Future[A] that updates the existing entity with PUT content * @param create a function () => Future[A] that creates a new entity from PUT content * @param treatExistsAsConflict if true and document exists but overwrite is not enabled, respond * with Conflict else return OK and the existing document * * Responses are one of (Code, Message) * - 200 entity A as JSON * - 400 Bad Request * - 409 Conflict * - 500 Internal Server Error */ protected def putEntity[A, Au >: A]( factory: DocumentFactory[A], datastore: ArtifactStore[Au], docid: DocId, overwrite: Boolean, update: A => Future[A], create: () => Future[A], treatExistsAsConflict: Boolean = true, postProcess: Option[PostProcessEntity[A]] = None)( implicit transid: TransactionId, format: RootJsonFormat[A], ma: Manifest[A]) = { // marker to return an existing doc with status OK rather than conflict if overwrite is false case class IdentityPut(self: A) extends Throwable onComplete(factory.get(datastore, docid) flatMap { doc => if (overwrite) { logging.info(this, s"[PUT] entity exists, will try to update '$doc'") update(doc) } else if (treatExistsAsConflict) { logging.info(this, s"[PUT] entity exists, but overwrite is not enabled, aborting") Future failed RejectRequest(Conflict, "resource already exists") } else { Future failed IdentityPut(doc) } } recoverWith { case _: NoDocumentException => logging.info(this, s"[PUT] entity does not exist, will try to create it") create() } flatMap { a => logging.info(this, s"[PUT] entity created/updated, writing back to datastore") factory.put(datastore, a) map { _ => a } }) { case Success(entity) => logging.info(this, s"[PUT] entity success") postProcess map { _(entity) } getOrElse complete(OK, entity) case Failure(IdentityPut(a)) => logging.info(this, s"[PUT] entity exists, not overwritten") complete(OK, a) case Failure(t: DocumentConflictException) => logging.info(this, s"[PUT] entity conflict: ${t.getMessage}") terminate(Conflict, conflictMessage) case Failure(RejectRequest(code, message)) => logging.info(this, s"[PUT] entity rejected with code $code: $message") terminate(code, message) case Failure(t: DocumentTypeMismatchException) => logging.info(this, s"[PUT] entity conformance check failed: ${t.getMessage}") terminate(Conflict, conformanceMessage) case Failure(t: ArtifactStoreException) => logging.info(this, s"[PUT] entity unreadable") terminate(InternalServerError, t.getMessage) case Failure(t: Throwable) => logging.error(this, s"[PUT] entity failed: ${t.getMessage}") terminate(InternalServerError) } } /** * Deletes an entity of type A from datastore. * To delete an entity, first fetch the record to identify its revision and then delete it. * Terminates HTTP request. * * @param factory the factory that can fetch entity of type A from datastore * @param datastore the client to the database * @param docid the document id to delete * @param confirm a function (A => Future[Unit]) that confirms the entity is safe to delete (must fail future to abort) * or fails the future with an appropriate message * * Responses are one of (Code, Message) * - 200 entity A as JSON * - 404 Not Found * - 409 Conflict * - 500 Internal Server Error */ protected def deleteEntity[A <: WhiskDocument, Au >: A]( factory: DocumentFactory[A], datastore: ArtifactStore[Au], docid: DocId, confirm: A => Future[Unit], postProcess: Option[PostProcessEntity[A]] = None)( implicit transid: TransactionId, format: RootJsonFormat[A], ma: Manifest[A]) = { onComplete(factory.get(datastore, docid) flatMap { entity => confirm(entity) flatMap { case _ => factory.del(datastore, entity.docinfo) map { _ => entity } } }) { case Success(entity) => logging.info(this, s"[DEL] entity success") postProcess map { _(entity) } getOrElse complete(OK, entity) case Failure(t: NoDocumentException) => logging.info(this, s"[DEL] entity does not exist") terminate(NotFound) case Failure(t: DocumentConflictException) => logging.info(this, s"[DEL] entity conflict: ${t.getMessage}") terminate(Conflict, conflictMessage) case Failure(RejectRequest(code, message)) => logging.info(this, s"[DEL] entity rejected with code $code: $message") terminate(code, message) case Failure(t: DocumentTypeMismatchException) => logging.info(this, s"[DEL] entity conformance check failed: ${t.getMessage}") terminate(Conflict, conformanceMessage) case Failure(t: ArtifactStoreException) => logging.info(this, s"[DEL] entity unreadable") terminate(InternalServerError, t.getMessage) case Failure(t: Throwable) => logging.error(this, s"[DEL] entity failed: ${t.getMessage}") terminate(InternalServerError) } } }
CrowdFlower/incubator-openwhisk
core/controller/src/main/scala/whisk/core/controller/ApiUtils.scala
Scala
apache-2.0
15,943
class ExistentialDrawingChild(name: String, box: Box[A] forSome { type A <: Paper }, wasteBin: WasteBin[B] forSome { type B >: Paper }) { def draw = { val paper = box.take println("Drawing on "+paper) wasteBin.throwAway(paper) } override def toString = name }
grzegorzbalcerek/scala-book-examples
examples/ExistentialDrawingChild.scala
Scala
mit
282
/** * Copyright 2015, deepsense.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.deepsense.models.json.workflow import com.google.inject.{AbstractModule, Provides, Singleton} import io.deepsense.deeplang.catalogs.doperations.DOperationsCatalog import io.deepsense.models.json.graph.GraphJsonProtocol.GraphReader class GraphReaderModule extends AbstractModule { override def configure(): Unit = { // Done by 'provides' methods. } @Singleton @Provides def provideGraphReader(dOperationsCatalog: DOperationsCatalog): GraphReader = { new GraphReader(dOperationsCatalog) } }
deepsense-io/seahorse-workflow-executor
workflowjson/src/main/scala/io/deepsense/models/json/workflow/GraphReaderModule.scala
Scala
apache-2.0
1,123
/* * Copyright 2015 LG CNS. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package scouter.server.db.counter; import java.io.File import java.util.Enumeration import java.util.Iterator import java.util.Map import scouter.lang.value.MapValue import scouter.lang.value.Value import scouter.io.DataInputX import scouter.io.DataOutputX import scouter.server.db.DBCtr import scouter.util.FileUtil import scouter.util.IClose import scouter.util.IntKeyMap import scouter.util.StringIntMap import scouter.server.util.EnumerScala object RealtimeCounterDBHelper { def setTagBytes(tag: IntKeyMap[String], buff: Array[Byte]): MapValue = { if (buff == null) return null; val items = new MapValue(); try { val in = new DataInputX(buff) val count = in.readDecimal().toInt for (i <- 0 to count - 1) { val idx = in.readDecimal().toInt; val name = tag.get(idx); val value = in.readValue(); items.put(name, value); } } catch { case e: Exception => } return items; } def getTagBytes(tagMap: StringIntMap, items: MapValue): Array[Byte] = { if (tagMap.size() == 0) return null; val out = new DataOutputX(); try { out.writeDecimal(items.size()); EnumerScala.foreach(items.keys(), (name: String) => { val value = items.get(name); val idx = tagMap.get(name); if (idx >= 0) { out.writeDecimal(idx); out.writeValue(value); } }) } catch { case e: Exception => } return out.toByteArray(); } } class RealtimeCounterDBHelper extends IClose { var counterDbHeader: RealtimeCounterDBHeader = null var counterIndex: RealtimeCounterIndex = null var counterData: RealtimeCounterData = null var currentDateUnit = 0L var activeTime = 0L var path: String = null private def getDBPath(objName: String, date: String): String = { val sb = new StringBuffer(); sb.append(DBCtr.getRootPath()); sb.append("/").append(date).append("/counter"); return sb.toString(); } def open(objName: String, date: String, readOnly: Boolean): RealtimeCounterDBHelper = { path = getDBPath(objName, date) val f = new File(path); if (readOnly) { if (f.exists() == false) return null; } else { if (f.exists() == false) f.mkdirs(); } val file = path + "/real"; this.counterDbHeader = RealtimeCounterDBHeader.open(file); this.counterIndex = RealtimeCounterIndex.open(file); this.counterData = RealtimeCounterData.open(file); this.activeTime = System.currentTimeMillis(); return this; } def getKey(objName: String): String = { return objName; } override def close() { FileUtil.close(counterIndex); FileUtil.close(counterData); FileUtil.close(counterDbHeader); counterIndex = null; counterData = null; } def close(dbs: Map[String, RealtimeCounterDBHelper]) { val itr = dbs.values().iterator(); while (itr.hasNext()) { itr.next().close(); } } }
jahnaviancha/scouter
scouter.server/src/scouter/server/db/counter/RealtimeCounterDBHelper.scala
Scala
apache-2.0
4,072
/** * Copyright 2011-2017 GatlingCorp (http://gatling.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gatling.core.assertion import io.gatling.BaseSpec import io.gatling.commons.stats._ import io.gatling.commons.stats.assertion.{ AssertionValidator, Assertion } import io.gatling.commons.util.StringHelper._ import io.gatling.core.config.GatlingConfiguration import org.mockito.Mockito.when class AssertionValidatorSpec extends BaseSpec with AssertionSupport { implicit val configuration = GatlingConfiguration.loadForTest() private type Conditions[T] = List[AssertionWithPathAndTarget[T] => Assertion] private type StatsModifiers = List[Stats => Stats] private case class Stats( generalStats: GeneralStats, requestName: String = "", groupPath: List[String] = Nil, status: Option[Status] = None ) { def request = requestName.trimToOption def group = if (groupPath.nonEmpty) Some(Group(groupPath)) else None } private val SetRequestThenGroupModifiers: StatsModifiers = List(_.copy(requestName = "foo"), _.copy(groupPath = List("foo"))) private def mockDataReaderWithStats[T: Numeric]( metric: AssertionWithPathAndTarget[T], conditions: Conditions[T], stats: Stats* ) = { def mockAssertion(source: GeneralStatsSource) = when(source.assertions) thenReturn conditions.map(_(metric)) def mockStats(stat: Stats, source: GeneralStatsSource) = { when(source.requestGeneralStats(stat.request, stat.group, stat.status)) thenReturn stat.generalStats stat.group.foreach { group => when(source.groupCumulatedResponseTimeGeneralStats(group, stat.status)) thenReturn stat.generalStats } } def statsPaths = stats.map(stat => (stat.request, stat.group)).map { case (Some(request), group) => RequestStatsPath(request, group) case (None, Some(group)) => GroupStatsPath(group) case _ => throw new AssertionError("Can't have neither a request or group stats path") }.toList def mockStatsPath(source: GeneralStatsSource) = when(source.statsPaths) thenReturn statsPaths val mockedGeneralStatsSource = mock[GeneralStatsSource] mockAssertion(mockedGeneralStatsSource) stats.foreach(mockStats(_, mockedGeneralStatsSource)) mockStatsPath(mockedGeneralStatsSource) mockedGeneralStatsSource } private def validateAssertions(source: GeneralStatsSource) = AssertionValidator.validateAssertions(source).map(_.result).forall(identity) "AssertionValidator" should "fail the assertion when the request path does not exist" in { val requestStats = Stats(GeneralStats.NoPlot, requestName = "bar") val reader1 = mockDataReaderWithStats[Int](details("foo").requestsPerSec, List(_.is(100)), requestStats) validateAssertions(reader1) shouldBe false val groupStats = Stats(GeneralStats.NoPlot, groupPath = List("bar")) val reader2 = mockDataReaderWithStats[Int](details("foo").requestsPerSec, List(_.is(100)), groupStats) validateAssertions(reader2) shouldBe false val requestAndGroupStats = Stats(GeneralStats.NoPlot, requestName = "baz", groupPath = List("bar")) val reader3 = mockDataReaderWithStats[Int](details("baz").requestsPerSec, List(_.is(100)), requestAndGroupStats) validateAssertions(reader3) shouldBe false } //TODO : add test on global and forAll it should "be able to validate a meanRequestsPerSec assertion for requests and groups" in { for (modifier <- SetRequestThenGroupModifiers) { val requestAndGroupStats = modifier(Stats(GeneralStats.NoPlot.copy(meanRequestsPerSec = 5))) val conditions: Conditions[Int] = List(_.lte(10), _.gte(3), _.is(5), _.between(4, 6), _.in(1, 3, 5, 7)) val reader3 = mockDataReaderWithStats(details("foo").requestsPerSec, conditions, requestAndGroupStats) validateAssertions(reader3) shouldBe true } } //TODO : add test on global and forAll it should "be able to validate a successfulRequests.count assertion for requests and groups" in { for (modifier <- SetRequestThenGroupModifiers) { val requestStats = modifier(Stats(GeneralStats.NoPlot.copy(count = 5), status = Some(OK))) val conditions: Conditions[Long] = List(_.lte(10), _.gte(3), _.is(5), _.between(4, 6), _.in(1, 3, 5, 7)) val reader3 = mockDataReaderWithStats(details("foo").successfulRequests.count, conditions, requestStats) validateAssertions(reader3) shouldBe true } } //TODO : add test on global and forAll it should "be able to validate a failedRequests.count assertion for requests and groups" in { for (modifier <- SetRequestThenGroupModifiers) { val requestStats = modifier(Stats(GeneralStats.NoPlot.copy(count = 5), status = Some(KO))) val conditions: Conditions[Long] = List(_.lte(10), _.gte(3), _.is(5), _.between(4, 6), _.in(1, 3, 5, 7)) val reader3 = mockDataReaderWithStats(details("foo").failedRequests.count, conditions, requestStats) validateAssertions(reader3) shouldBe true } } //TODO : add test on global and forAll it should "be able to validate a allRequests.count assertion for requests and groups" in { for (modifier <- SetRequestThenGroupModifiers) { val requestStats = modifier(Stats(GeneralStats.NoPlot.copy(count = 10))) val conditions: Conditions[Long] = List(_.lte(15), _.gte(8), _.is(10), _.between(8, 12), _.in(1, 3, 10, 13)) val reader3 = mockDataReaderWithStats(details("foo").allRequests.count, conditions, requestStats) validateAssertions(reader3) shouldBe true } } //TODO : add test on global and forAll it should "be able to validate a successfulRequests.percent assertion for requests and groups" in { for (modifier <- SetRequestThenGroupModifiers) { val successful = modifier(Stats(GeneralStats.NoPlot.copy(count = 10))) val failed = modifier(Stats(GeneralStats.NoPlot.copy(count = 5), status = Some(OK))) val conditions: Conditions[Double] = List(_.lte(60), _.gte(30), _.is(50), _.between(40, 60), _.in(20, 40, 50, 80)) val reader3 = mockDataReaderWithStats(details("foo").successfulRequests.percent, conditions, successful, failed) validateAssertions(reader3) shouldBe true } } //TODO : add test on global and forAll it should "be able to validate a failedRequests.percent assertion for requests and groups" in { for (modifier <- SetRequestThenGroupModifiers) { val failed = modifier(Stats(GeneralStats.NoPlot.copy(count = 10))) val successful = modifier(Stats(GeneralStats.NoPlot.copy(count = 5), status = Some(KO))) val conditions: Conditions[Double] = List(_.lte(60), _.gte(30), _.is(50), _.between(40, 60), _.in(20, 40, 50, 80)) val reader3 = mockDataReaderWithStats(details("foo").failedRequests.percent, conditions, failed, successful) validateAssertions(reader3) shouldBe true } } //TODO : add test on global and forAll it should "be able to validate a allRequests.percent assertion for requests and groups" in { for (modifier <- SetRequestThenGroupModifiers) { val requestStats = modifier(Stats(GeneralStats.NoPlot.copy(count = 10))) val conditions: Conditions[Double] = List(_.lte(110), _.gte(90), _.is(100), _.between(80, 120), _.in(90, 100, 130)) val reader3 = mockDataReaderWithStats(details("foo").allRequests.percent, conditions, requestStats) validateAssertions(reader3) shouldBe true } } //TODO : add test on global and forAll it should "be able to validate a responseTime.min assertion for requests and groups" in { for (modifier <- SetRequestThenGroupModifiers) { val requestStats = modifier(Stats(GeneralStats.NoPlot.copy(min = 10))) val conditions: Conditions[Int] = List(_.lte(15), _.gte(8), _.is(10), _.between(8, 12), _.in(1, 3, 10, 13)) val reader3 = mockDataReaderWithStats(details("foo").responseTime.min, conditions, requestStats) validateAssertions(reader3) shouldBe true } } //TODO : add test on global and forAll it should "be able to validate a responseTime.max assertion for requests and groups" in { for (modifier <- SetRequestThenGroupModifiers) { val requestStats = modifier(Stats(GeneralStats.NoPlot.copy(max = 10))) val conditions: Conditions[Int] = List(_.lte(15), _.gte(8), _.is(10), _.between(8, 12), _.in(1, 3, 10, 13)) val reader3 = mockDataReaderWithStats(details("foo").responseTime.max, conditions, requestStats) validateAssertions(reader3) shouldBe true } } //TODO : add test on global and forAll it should "be able to validate a responseTime.mean assertion for requests and groups" in { for (modifier <- SetRequestThenGroupModifiers) { val requestStats = modifier(Stats(GeneralStats.NoPlot.copy(mean = 10))) val conditions: Conditions[Int] = List(_.lte(15), _.gte(8), _.is(10), _.between(8, 12), _.in(1, 3, 10, 13)) val reader3 = mockDataReaderWithStats(details("foo").responseTime.mean, conditions, requestStats) validateAssertions(reader3) shouldBe true } } //TODO : add test on global and forAll it should "be able to validate a responseTime.stdDev assertion for requests and groups" in { for (modifier <- SetRequestThenGroupModifiers) { val requestStats = modifier(Stats(GeneralStats.NoPlot.copy(stdDev = 10))) val conditions: Conditions[Int] = List(_.lte(15), _.gte(8), _.is(10), _.between(8, 12), _.in(1, 3, 10, 13)) val reader3 = mockDataReaderWithStats(details("foo").responseTime.stdDev, conditions, requestStats) validateAssertions(reader3) shouldBe true } } //TODO : add test on global and forAll it should "be able to validate a responseTime.percentiles1 assertion for requests and groups" in { for (modifier <- SetRequestThenGroupModifiers) { val requestStats = modifier(Stats(GeneralStats.NoPlot.copy(percentile = _ => 10))) val conditions: Conditions[Int] = List(_.lte(15), _.gte(8), _.is(10), _.between(8, 12), _.in(1, 3, 10, 13)) val reader3 = mockDataReaderWithStats(details("foo").responseTime.percentile1, conditions, requestStats) validateAssertions(reader3) shouldBe true } } //TODO : add test on global and forAll it should "be able to validate a responseTime.percentiles2 assertion for requests and groups" in { for (modifier <- SetRequestThenGroupModifiers) { val requestStats = modifier(Stats(GeneralStats.NoPlot.copy(percentile = _ => 10))) val conditions: Conditions[Int] = List(_.lte(15), _.gte(8), _.is(10), _.between(8, 12), _.in(1, 3, 10, 13)) val reader3 = mockDataReaderWithStats(details("foo").responseTime.percentile2, conditions, requestStats) validateAssertions(reader3) shouldBe true } } //TODO : add test on global and forAll it should "be able to validate a responseTime.percentiles3 assertion for requests and groups" in { for (modifier <- SetRequestThenGroupModifiers) { val requestStats = modifier(Stats(GeneralStats.NoPlot.copy(percentile = _ => 10))) val conditions: Conditions[Int] = List(_.lte(15), _.gte(8), _.is(10), _.between(8, 12), _.in(1, 3, 10, 13)) val reader3 = mockDataReaderWithStats(details("foo").responseTime.percentile3, conditions, requestStats) validateAssertions(reader3) shouldBe true } } //TODO : add test on global and forAll it should "be able to validate a responseTime.percentiles4 assertion for requests and groups" in { for (modifier <- SetRequestThenGroupModifiers) { val requestStats = modifier(Stats(GeneralStats.NoPlot.copy(percentile = _ => 10))) val conditions: Conditions[Int] = List(_.lte(15), _.gte(8), _.is(10), _.between(8, 12), _.in(1, 3, 10, 13)) val reader3 = mockDataReaderWithStats(details("foo").responseTime.percentile4, conditions, requestStats) validateAssertions(reader3) shouldBe true } } }
timve/gatling
gatling-core/src/test/scala/io/gatling/core/assertion/AssertionValidatorSpec.scala
Scala
apache-2.0
12,476
package uk.gov.gds.ier.model import uk.gov.gds.ier.test.UnitTestSuite import uk.gov.gds.ier.service.apiservice.OrdinaryApplication class OrdinaryApplicationTests extends UnitTestSuite { behavior of "OrdinaryApplication.toApiMap" it should "generate the expected payload map - simple case" in { lazy val application = createOrdinaryApplication val expected = Map( "fn" -> "John", "mn" -> "James", "ln" -> "Smith", "applicationType" -> "ordinary", "pfn" -> "James", "pmn" -> "John", "pln" -> "Smith", "dob" -> "1980-12-01", "nino" -> "XX 12 34 56 D", "nat" -> "GB, IE", "oadr" -> "none", "regproperty" -> "The (fake) Manor House", "regstreet" -> "123 Fake Street", "reglocality" -> "North Fake", "regtown" -> "Fakerton", "regarea" -> "Fakesbury", "reguprn" -> "12345", "regpostcode" -> "XX123 4XX", "pproperty" -> "The (fake) Cottage", "pstreet" -> "321 Fake Street", "plocality" -> "South Fake", "ptown" -> "Fakererly", "parea" -> "Fakesborough", "puprn" -> "54321", "ppostcode" -> "XX342 1XX", "pvote" -> "false", "opnreg" -> "false", "post" -> "true", "email" -> "test@email.com", "phone" -> "01234 5678910", "refNum" -> "12345678910", "ip" -> "256.256.256.256", "gssCode" -> "E09000007", "pgssCode" -> "E09000032", "timeTaken" -> "1234", "lang" -> "en", "webHash" -> "860da84c-74df-45b0-8ff8-d2d16ef8367a" ) val apiMap = application.toApiMap apiMap should matchMap(expected) } it should "generate the expected payload when registered while abroad" in { lazy val application = createOrdinaryApplication.copy( lastRegisteredToVote = Some(LastRegisteredToVote( lastRegisteredType = LastRegisteredType.Overseas )) ) val expected = Map( "fn" -> "John", "mn" -> "James", "ln" -> "Smith", "applicationType" -> "ordinary", "pfn" -> "James", "pmn" -> "John", "pln" -> "Smith", "dob" -> "1980-12-01", "nino" -> "XX 12 34 56 D", "nat" -> "GB, IE", "oadr" -> "none", "regproperty" -> "The (fake) Manor House", "regstreet" -> "123 Fake Street", "reglocality" -> "North Fake", "regtown" -> "Fakerton", "regarea" -> "Fakesbury", "reguprn" -> "12345", "regpostcode" -> "XX123 4XX", "pproperty" -> "The (fake) Cottage", "pstreet" -> "321 Fake Street", "plocality" -> "South Fake", "ptown" -> "Fakererly", "parea" -> "Fakesborough", "puprn" -> "54321", "ppostcode" -> "XX342 1XX", "pvote" -> "false", "opnreg" -> "false", "post" -> "true", "email" -> "test@email.com", "phone" -> "01234 5678910", "refNum" -> "12345678910", "ip" -> "256.256.256.256", "gssCode" -> "E09000007", "pgssCode" -> "E09000032", "lastcategory" -> "overseas", "timeTaken" -> "1234", "lang" -> "en", "webHash" -> "860da84c-74df-45b0-8ff8-d2d16ef8367a" ) val apiMap = application.toApiMap apiMap should matchMap(expected) } it should "generate the expected payload when application submitted in Welsh" in { lazy val application = createOrdinaryApplication.copy(language = "cy") val expected = Map( "fn" -> "John", "mn" -> "James", "ln" -> "Smith", "applicationType" -> "ordinary", "pfn" -> "James", "pmn" -> "John", "pln" -> "Smith", "dob" -> "1980-12-01", "nino" -> "XX 12 34 56 D", "nat" -> "GB, IE", "oadr" -> "none", "regproperty" -> "The (fake) Manor House", "regstreet" -> "123 Fake Street", "reglocality" -> "North Fake", "regtown" -> "Fakerton", "regarea" -> "Fakesbury", "reguprn" -> "12345", "regpostcode" -> "XX123 4XX", "pproperty" -> "The (fake) Cottage", "pstreet" -> "321 Fake Street", "plocality" -> "South Fake", "ptown" -> "Fakererly", "parea" -> "Fakesborough", "puprn" -> "54321", "ppostcode" -> "XX342 1XX", "pvote" -> "false", "opnreg" -> "false", "post" -> "true", "email" -> "test@email.com", "phone" -> "01234 5678910", "refNum" -> "12345678910", "ip" -> "256.256.256.256", "gssCode" -> "E09000007", "pgssCode" -> "E09000032", "timeTaken" -> "1234", "lang" -> "cy", "webHash" -> "860da84c-74df-45b0-8ff8-d2d16ef8367a" ) val apiMap = application.toApiMap apiMap should matchMap(expected) } it should "generate expected payload with stripped special characters and trailing spaces" in { lazy val application = createOrdinaryApplication.copy( name = Some(Name( firstName = "Chars:<>|", middleNames = Some(" Trailing spaces\\t \\t"), lastName = " Tabs are here" )), nino = Some(Nino( nino = Some("\\tAB\\t123\\t456\\t"), noNinoReason = None )) ) val expected = Map( "fn" -> "Chars:", "mn" -> "Trailing spaces", "ln" -> "Tabs are here", "applicationType" -> "ordinary", "pfn" -> "James", "pmn" -> "John", "pln" -> "Smith", "dob" -> "1980-12-01", "nino" -> "AB 123 456", "nat" -> "GB, IE", "oadr" -> "none", "regproperty" -> "The (fake) Manor House", "regstreet" -> "123 Fake Street", "reglocality" -> "North Fake", "regtown" -> "Fakerton", "regarea" -> "Fakesbury", "reguprn" -> "12345", "regpostcode" -> "XX123 4XX", "pproperty" -> "The (fake) Cottage", "pstreet" -> "321 Fake Street", "plocality" -> "South Fake", "ptown" -> "Fakererly", "parea" -> "Fakesborough", "puprn" -> "54321", "ppostcode" -> "XX342 1XX", "pvote" -> "false", "opnreg" -> "false", "post" -> "true", "email" -> "test@email.com", "phone" -> "01234 5678910", "refNum" -> "12345678910", "ip" -> "256.256.256.256", "gssCode" -> "E09000007", "pgssCode" -> "E09000032", "timeTaken" -> "1234", "lang" -> "en", "webHash" -> "860da84c-74df-45b0-8ff8-d2d16ef8367a" ) application.toApiMap should matchMap(expected) } it should "generate expected payload with postal vote and email delivery" in { lazy val application = createOrdinaryApplication.copy( postalVote = Some(PostalVote( postalVoteOption = Some(PostalVoteOption.Yes), deliveryMethod = Some(PostalVoteDeliveryMethod( deliveryMethod = Some("email"), emailAddress = Some("test@email.com") )) )) ) val expected = ordinaryApplicationPayload ++ Map( "pvote" -> "true", "pvoteemail" -> "test@email.com" ) application.toApiMap should matchMap(expected) } it should "generate expected payload with postal vote and post delivery" in { lazy val application = createOrdinaryApplication.copy( postalVote = Some(PostalVote( postalVoteOption = Some(PostalVoteOption.Yes), deliveryMethod = Some(PostalVoteDeliveryMethod( deliveryMethod = Some("post"), emailAddress = None )) )) ) val expected = ordinaryApplicationPayload + ("pvote" -> "true") application.toApiMap should matchMap(expected) } it should "generate expected payload with no postal vote - prefer in person" in { lazy val application = createOrdinaryApplication.copy( postalVote = Some(PostalVote( postalVoteOption = Some(PostalVoteOption.NoAndVoteInPerson), deliveryMethod = None )) ) val expected = ordinaryApplicationPayload + ("pvote" -> "false") application.toApiMap should matchMap(expected) } it should "generate expected payload with no postal vote - already has one" in { lazy val application = createOrdinaryApplication.copy( postalVote = Some(PostalVote( postalVoteOption = Some(PostalVoteOption.NoAndAlreadyHave), deliveryMethod = None )) ) val expected = ordinaryApplicationPayload + ("pvote" -> "true") application.toApiMap should matchMap(expected) } private val ordinaryApplicationPayload = Map( "fn" -> "John", "mn" -> "James", "ln" -> "Smith", "applicationType" -> "ordinary", "pfn" -> "James", "pmn" -> "John", "pln" -> "Smith", "dob" -> "1980-12-01", "nino" -> "XX 12 34 56 D", "nat" -> "GB, IE", "oadr" -> "none", "regproperty" -> "The (fake) Manor House", "regstreet" -> "123 Fake Street", "reglocality" -> "North Fake", "regtown" -> "Fakerton", "regarea" -> "Fakesbury", "reguprn" -> "12345", "regpostcode" -> "XX123 4XX", "pproperty" -> "The (fake) Cottage", "pstreet" -> "321 Fake Street", "plocality" -> "South Fake", "ptown" -> "Fakererly", "parea" -> "Fakesborough", "puprn" -> "54321", "ppostcode" -> "XX342 1XX", "pvote" -> "false", "opnreg" -> "false", "post" -> "true", "email" -> "test@email.com", "phone" -> "01234 5678910", "refNum" -> "12345678910", "ip" -> "256.256.256.256", "gssCode" -> "E09000007", "pgssCode" -> "E09000032", "timeTaken" -> "1234", "lang" -> "en", "webHash" -> "860da84c-74df-45b0-8ff8-d2d16ef8367a" ) private def createOrdinaryApplication = OrdinaryApplication( name = Some(Name( firstName = "John", middleNames = Some("James"), lastName = "Smith")), previousName = Some(PreviousName( hasPreviousName = true, hasPreviousNameOption = "true", previousName = Some(Name( firstName = "James", middleNames = Some("John"), lastName = "Smith" )) )), lastRegisteredToVote = None, dob = Some(DateOfBirth( dob = Some(DOB( year = 1980, month = 12, day = 1 )), noDob = None )), nationality = Some(IsoNationality( countryIsos = List("GB", "IE"), noNationalityReason = None )), nino = Some(Nino( nino = Some("XX 12 34 56 D"), noNinoReason = None )), address = Some(Address( lineOne = Some("The (fake) Manor House"), lineTwo = Some("123 Fake Street"), lineThree = Some("North Fake"), city = Some("Fakerton"), county = Some("Fakesbury"), postcode = "XX12 34XX", uprn = Some("12345"), gssCode = Some("E09000007") )), previousAddress = Some(Address( lineOne = Some("The (fake) Cottage"), lineTwo = Some("321 Fake Street"), lineThree = Some("South Fake"), city = Some("Fakererly"), county = Some("Fakesborough"), postcode = "XX34 21XX", uprn = Some("54321"), gssCode = Some("E09000032") )), otherAddress = Some(OtherAddress( otherAddressOption = OtherAddress.NoOtherAddress )), openRegisterOptin = Some(false), postalVote = Some(PostalVote( postalVoteOption = Some(PostalVoteOption.NoAndVoteInPerson), deliveryMethod = None )), contact = Some(Contact( post = true, email = Some(ContactDetail( contactMe = true, detail = Some("test@email.com") )), phone = Some(ContactDetail( contactMe = true, detail = Some("01234 5678910") )) )), referenceNumber = Some("12345678910"), ip = Some("256.256.256.256"), timeTaken = "1234", language = "en", sessionId = "860da84c-74df-45b0-8ff8-d2d16ef8367a" ) }
michaeldfallen/ier-frontend
test/uk/gov/gds/ier/model/OrdinaryApplicationTests.scala
Scala
mit
11,765
package org.webant.worker.test import java.io.File import java.lang.reflect.{Field, InvocationTargetException} import java.text.ParseException import java.util.Date import javax.xml.bind.{JAXBContext, JAXBException} import com.google.gson.GsonBuilder import org.apache.commons.beanutils.{BeanUtils, PropertyUtils} import org.apache.commons.io.FileUtils import org.apache.commons.lang3.StringUtils import org.junit.{After, Before, Test} import org.scalatest.junit.AssertionsForJUnit import org.webant.commons.entity.SiteConfig import org.webant.worker.config.{SiteConfigBuilder, WorkerConfig} import scala.beans.BeanProperty class WorkerTest extends AssertionsForJUnit { @Before def init(): Unit = { } @After def exit() { } @Test def printSql(): Unit = { val table = "link" val sql = s"CREATE TABLE IF NOT EXISTS `$table` (" + " `id` varchar(64) NOT NULL," + " `taskId` varchar(64) DEFAULT NULL," + " `siteId` varchar(64) DEFAULT NULL," + " `nodeId` varchar(64) DEFAULT NULL," + " `url` varchar(1024) DEFAULT NULL," + " `body` text DEFAULT NULL," + " `referer` varchar(1024) DEFAULT NULL," + " `priority` smallint(255) DEFAULT NULL," + " `lastCrawlTime` datetime DEFAULT NULL," + " `status` varchar(32) DEFAULT NULL," + " `dataVersion` int(11) DEFAULT NULL," + " `dataCreateTime` datetime DEFAULT NULL," + " `dataUpdateTime` datetime DEFAULT NULL," + " `dataDeleteTime` datetime DEFAULT NULL," + " PRIMARY KEY (`id`)," + s" KEY `idx_${table}_taskId` (`taskId`)," + s" KEY `idx_${table}_siteId` (`siteId`)," + s" KEY `idx_${table}_priority` (`priority`)," + s" KEY `idx_${table}_status` (`status`)," + s" KEY `idx_${table}_dataCreateTime` (`dataCreateTime`)," + s" KEY `idx_${table}_dataUpdateTime` (`dataUpdateTime`)" + ")" println(sql) } @Test def testWorkerConfig(): Unit = { val path = "worker.xml" WorkerConfig(path) } @Test def testConfigToXml(): Unit = { val configPath = ClassLoader.getSystemResource("site/mahua.json").getPath val config = new SiteConfigBuilder().loadSiteConfig(configPath).build() try { val context = JAXBContext.newInstance(classOf[SiteConfig]) val marshaller = context.createMarshaller marshaller.marshal(config, System.out) } catch { case e: JAXBException => e.printStackTrace() } // println(config.toString) } @Test def testXml(): Unit = { val path = "worker.xml" val configPath = ClassLoader.getSystemResource(path) require(StringUtils.isNotBlank(path), "worker config path can not be empty.") val file = new File(configPath.getPath) require(file.exists(), "worker config does not exists.") require(file.isFile, "worker config can not be a directory.") // println(xml.toString()) } @Test def testEncode(): Unit = { val content = "\\\\u76f4\\\\u64ad\\\\u8fbe\\\\u4eba" val newStr = new String(content.getBytes("iso8859-1"), "UTF-8") println(newStr) } @Test def testRegex(): Unit = { val regex = "http://www.toutiao.com/\\\\w*\\\\d*/" val url = "http://www.toutiao.com/6432977727803884034/" println(url.matches(regex)) } @Test def testFromJson(): Unit = { val configPath = ClassLoader.getSystemResource("mahua_site.json") if (configPath == null) return val config = FileUtils.readFileToString(new File(configPath.getPath), "UTF-8") val builder = new GsonBuilder // builder.registerTypeAdapter(classOf[BaseWorkerProcessor], new NewsProviderInstanceCreator(0)) val gson = builder.create() val siteConfig = gson.fromJson(config, classOf[SiteConfig]) println(siteConfig.id) } @Test @throws[InstantiationException] @throws[IllegalAccessException] @throws[ParseException] @throws[InvocationTargetException] @throws[NoSuchMethodException] def testBeanUtils() { val person: Person = new Person person.name = "name1" person.age = 20 person.birthday = new Date val fields: Array[Field] = person.getClass.getDeclaredFields fields.foreach(field => { val name: String = field.getName if (PropertyUtils.isReadable(person, name) && PropertyUtils.isWriteable(person, name)) { System.out.println(name + " : " + BeanUtils.getProperty(person, name)) } }) } class Person { @BeanProperty var name: String = _ @BeanProperty var age: Int = 0 @BeanProperty var birthday: Date = _ } }
sutine/webant
webant-worker/src/test/scala/org/webant/worker/test/WorkerTest.scala
Scala
apache-2.0
4,562
/* * Copyright 2018 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package controllers import java.util.UUID import akka.stream.Materializer import org.scalatest.BeforeAndAfterAll import play.api.Play import play.api.mvc.Result import play.api.test.Helpers._ import play.api.test.{FakeApplication, FakeRequest} import uk.gov.hmrc.play.test.{UnitSpec, WithFakeApplication} import scala.concurrent.Future import play.api.libs.concurrent.Execution.Implicits._ import service._ import connector._ import play.api.test._ import play.api.mvc._ class ErrorHandlerSpec extends test.BaseSpec { "Error Handler" should { "show error page" in { val result = new ErrorHandler().standardErrorTemplate("test title", "test heading", "test body")(FakeRequest(GET, "/paac")).body result should include ("test title") result should include ("test heading") result should include ("test body") } "show error page in event of client error" in { val result = new ErrorHandler().onClientError(FakeRequest(GET, "/paac"), 408, "special message") val clientErrorPage = contentAsString(await(result)) clientErrorPage should include ("special message") status(result) shouldBe 408 } "show 404 error page in event of client error" in { val result = new ErrorHandler().onClientError(FakeRequest(GET, "/paac"), 404, "special message") val clientErrorPage = contentAsString(await(result)) clientErrorPage should include ("Page not found - 404 - GOV.UK") status(result) shouldBe 404 } "show 400 error page in event of client error" in { val result = new ErrorHandler().onClientError(FakeRequest(GET, "/paac"), 400, "special message") val clientErrorPage = contentAsString(await(result)) clientErrorPage should include ("Bad request - 400 - GOV.UK") status(result) shouldBe 400 } "show error page and exception in event of server error" in { val result = new ErrorHandler().onServerError(FakeRequest(GET, "/paac"), new RuntimeException("This is a runtime exception")) val clientErrorPage = contentAsString(await(result)) clientErrorPage should include ("This is a runtime exception") status(result) shouldBe 500 } } }
hmrc/paac-frontend
test/controllers/ErrorHandlerSpec.scala
Scala
apache-2.0
2,867
/* * Copyright 2001-2013 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalatest.examples.refspec.loanfixture import java.util.concurrent.ConcurrentHashMap object DbServer { // Simulating a database server type Db = StringBuffer private val databases = new ConcurrentHashMap[String, Db] def createDb(name: String): Db = { val db = new StringBuffer databases.put(name, db) db } def removeDb(name: String) { databases.remove(name) } } import org.scalatest.refspec.RefSpec import DbServer._ import java.util.UUID.randomUUID import java.io._ class ExampleSpec extends RefSpec { def withDatabase(testCode: Db => Any) { val dbName = randomUUID.toString val db = createDb(dbName) // create the fixture try { db.append("ScalaTest is ") // perform setup testCode(db) // "loan" the fixture to the test } finally removeDb(dbName) // clean up the fixture } def withFile(testCode: (File, FileWriter) => Any) { val file = File.createTempFile("hello", "world") // create the fixture val writer = new FileWriter(file) try { writer.write("ScalaTest is ") // set up the fixture testCode(file, writer) // "loan" the fixture to the test } finally writer.close() // clean up the fixture } object `Testing ` { // This test needs the file fixture def `should be productive` { withFile { (file, writer) => writer.write("productive!") writer.flush() assert(file.length === 24) } } } object `Test code` { // This test needs the database fixture def `should be readable` { withDatabase { db => db.append("readable!") assert(db.toString === "ScalaTest is readable!") } } // This test needs both the file and the database def `should be clear and concise` { withDatabase { db => withFile { (file, writer) => // loan-fixture methods compose db.append("clear!") writer.write("concise!") writer.flush() assert(db.toString === "ScalaTest is clear!") assert(file.length === 21) } } } } }
scalatest/scalatest
examples/src/test/scala/org/scalatest/examples/refspec/loanfixture/ExampleSpec.scala
Scala
apache-2.0
2,693
class Pair[A,B] {} class PolyRec { def foo[T](x:T) { foo[Pair[T,T]](new Pair[T,T]()) } } object Main { def main(args: Array[String]) { } }
tobast/compil-petitscala
tests/typing/good/testfile-polyrec-1.scala
Scala
gpl-3.0
139
package ore.util import java.io.IOException import java.lang.Long.numberOfLeadingZeros import java.nio.file.attribute.BasicFileAttributes import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor} import com.typesafe.scalalogging object FileUtils { private val Logger = scalalogging.Logger("FileUtils") private val MDCLogger = scalalogging.Logger.takingImplicit[OreMDC](Logger.underlying) /** * Formats the number of bytes into a human readable file size * (e.g. 100.0 KB). * * Based on http://stackoverflow.com/a/24805871 * * @param size The size in bytes * @return The formatted string */ def formatFileSize(size: Long): String = { if (size < 1024) s"$size B" else { val z = (63 - numberOfLeadingZeros(size)) / 10 f"${size.toDouble / (1L << (z * 10))}%.1f ${" KMGTPE".charAt(z)}%sB" } } /** * Deletes the directory at the specified [[Path]] with all of its contents. * * @param dir The directory to delete */ def deleteDirectory(dir: Path)(implicit mdc: OreMDC): Unit = { if (Files.exists(dir)) { Files.walkFileTree(dir, new DeleteFileVisitor) () } else { MDCLogger.debug(s"Tried to remove directory that doesn't exist: $dir") } } /** * Deletes the contents of a directory without deleteing the directory itself. * * @param dir The directory to clean */ def cleanDirectory(dir: Path)(implicit mdc: OreMDC): Unit = { if (Files.exists(dir)) { Files.walkFileTree(dir, new CleanFileVisitor(dir)) () } else { MDCLogger.debug(s"Tried to clean directory that doesn't exist: $dir") } } /** * Represents a [[java.nio.file.FileVisitor]] which will recursively delete a directory * with all its contents. */ private class DeleteFileVisitor(implicit mdc: OreMDC) extends SimpleFileVisitor[Path] { override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = { if (Files.exists(file)) { Files.delete(file) } else { MDCLogger.debug(s"Tried to remove file that doesn't exist: $file") } FileVisitResult.CONTINUE } override def postVisitDirectory(dir: Path, exc: IOException): FileVisitResult = { if (Files.exists(dir)) { Files.delete(dir) } else { MDCLogger.debug(s"Tried to remove directory that doesn't exist: $dir") } FileVisitResult.CONTINUE } } /** * Similar to [[DeleteFileVisitor]], except that it will only clean the folder * contents. It will not delete the given [[dir]]. * * @param dir The directory to clean */ private class CleanFileVisitor(private val dir: Path)(implicit mdc: OreMDC) extends DeleteFileVisitor { override def postVisitDirectory(dir: Path, exc: IOException): FileVisitResult = { if (dir != this.dir) { super.postVisitDirectory(dir, exc) } else { FileVisitResult.CONTINUE } } } }
SpongePowered/Ore
models/src/main/scala/ore/util/FileUtils.scala
Scala
mit
2,998
import java.io.{ FileOutputStream, ObjectOutputStream, FileInputStream, ObjectInputStream } import net.liftweb.json.{ compact, render } object Main { def main(args: Array[String]): Unit = { var fos = new FileOutputStream("test.txt") var oos = new ObjectOutputStream(fos) var d = new Dummy oos.writeObject(d) oos.close fos.close // var fis = new FileInputStream("test.txt") var ois = new CustomObjectInputStream(fis, getClass().getClassLoader()) var x = ois.readObject.asInstanceOf[Dummy] Console.println(compact(render(x.json))) ois.close fis.close } }
nightuser/scala-json-fail
src/main/scala/Main.scala
Scala
mit
617
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.expressions.{Expression, Generator} import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, ExprCode} import org.apache.spark.sql.functions._ import org.apache.spark.sql.test.SharedSQLContext import org.apache.spark.sql.types.{IntegerType, StructType} class GeneratorFunctionSuite extends QueryTest with SharedSQLContext { import testImplicits._ test("stack") { val df = spark.range(1) // Empty DataFrame suppress the result generation checkAnswer(spark.emptyDataFrame.selectExpr("stack(1, 1, 2, 3)"), Nil) // Rows & columns checkAnswer(df.selectExpr("stack(1, 1, 2, 3)"), Row(1, 2, 3) :: Nil) checkAnswer(df.selectExpr("stack(2, 1, 2, 3)"), Row(1, 2) :: Row(3, null) :: Nil) checkAnswer(df.selectExpr("stack(3, 1, 2, 3)"), Row(1) :: Row(2) :: Row(3) :: Nil) checkAnswer(df.selectExpr("stack(4, 1, 2, 3)"), Row(1) :: Row(2) :: Row(3) :: Row(null) :: Nil) // Various column types checkAnswer(df.selectExpr("stack(3, 1, 1.1, 'a', 2, 2.2, 'b', 3, 3.3, 'c')"), Row(1, 1.1, "a") :: Row(2, 2.2, "b") :: Row(3, 3.3, "c") :: Nil) // Repeat generation at every input row checkAnswer(spark.range(2).selectExpr("stack(2, 1, 2, 3)"), Row(1, 2) :: Row(3, null) :: Row(1, 2) :: Row(3, null) :: Nil) // The first argument must be a positive constant integer. val m = intercept[AnalysisException] { df.selectExpr("stack(1.1, 1, 2, 3)") }.getMessage assert(m.contains("The number of rows must be a positive constant integer.")) val m2 = intercept[AnalysisException] { df.selectExpr("stack(-1, 1, 2, 3)") }.getMessage assert(m2.contains("The number of rows must be a positive constant integer.")) // The data for the same column should have the same type. val m3 = intercept[AnalysisException] { df.selectExpr("stack(2, 1, '2.2')") }.getMessage assert(m3.contains("data type mismatch: Argument 1 (IntegerType) != Argument 2 (StringType)")) // stack on column data val df2 = Seq((2, 1, 2, 3)).toDF("n", "a", "b", "c") checkAnswer(df2.selectExpr("stack(2, a, b, c)"), Row(1, 2) :: Row(3, null) :: Nil) val m4 = intercept[AnalysisException] { df2.selectExpr("stack(n, a, b, c)") }.getMessage assert(m4.contains("The number of rows must be a positive constant integer.")) val df3 = Seq((2, 1, 2.0)).toDF("n", "a", "b") val m5 = intercept[AnalysisException] { df3.selectExpr("stack(2, a, b)") }.getMessage assert(m5.contains("data type mismatch: Argument 1 (IntegerType) != Argument 2 (DoubleType)")) } test("single explode") { val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList") checkAnswer( df.select(explode('intList)), Row(1) :: Row(2) :: Row(3) :: Nil) } test("single explode_outer") { val df = Seq((1, Seq(1, 2, 3)), (2, Seq())).toDF("a", "intList") checkAnswer( df.select(explode_outer('intList)), Row(1) :: Row(2) :: Row(3) :: Nil) } test("single posexplode") { val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList") checkAnswer( df.select(posexplode('intList)), Row(0, 1) :: Row(1, 2) :: Row(2, 3) :: Nil) } test("single posexplode_outer") { val df = Seq((1, Seq(1, 2, 3)), (2, Seq())).toDF("a", "intList") checkAnswer( df.select(posexplode_outer('intList)), Row(0, 1) :: Row(1, 2) :: Row(2, 3) :: Nil) } test("explode and other columns") { val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList") checkAnswer( df.select($"a", explode('intList)), Row(1, 1) :: Row(1, 2) :: Row(1, 3) :: Nil) checkAnswer( df.select($"*", explode('intList)), Row(1, Seq(1, 2, 3), 1) :: Row(1, Seq(1, 2, 3), 2) :: Row(1, Seq(1, 2, 3), 3) :: Nil) } test("explode_outer and other columns") { val df = Seq((1, Seq(1, 2, 3)), (2, Seq())).toDF("a", "intList") checkAnswer( df.select($"a", explode_outer('intList)), Row(1, 1) :: Row(1, 2) :: Row(1, 3) :: Row(2, null) :: Nil) checkAnswer( df.select($"*", explode_outer('intList)), Row(1, Seq(1, 2, 3), 1) :: Row(1, Seq(1, 2, 3), 2) :: Row(1, Seq(1, 2, 3), 3) :: Row(2, Seq(), null) :: Nil) } test("aliased explode") { val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList") checkAnswer( df.select(explode('intList).as('int)).select('int), Row(1) :: Row(2) :: Row(3) :: Nil) checkAnswer( df.select(explode('intList).as('int)).select(sum('int)), Row(6) :: Nil) } test("aliased explode_outer") { val df = Seq((1, Seq(1, 2, 3)), (2, Seq())).toDF("a", "intList") checkAnswer( df.select(explode_outer('intList).as('int)).select('int), Row(1) :: Row(2) :: Row(3) :: Nil) checkAnswer( df.select(explode('intList).as('int)).select(sum('int)), Row(6) :: Nil) } test("explode on map") { val df = Seq((1, Map("a" -> "b"))).toDF("a", "map") checkAnswer( df.select(explode('map)), Row("a", "b")) } test("explode_outer on map") { val df = Seq((1, Map("a" -> "b")), (2, Map[String, String]()), (3, Map("c" -> "d"))).toDF("a", "map") checkAnswer( df.select(explode_outer('map)), Row("a", "b") :: Row("c", "d") :: Nil) } test("explode on map with aliases") { val df = Seq((1, Map("a" -> "b"))).toDF("a", "map") checkAnswer( df.select(explode('map).as("key1" :: "value1" :: Nil)).select("key1", "value1"), Row("a", "b")) } test("explode_outer on map with aliases") { val df = Seq((3, None), (1, Some(Map("a" -> "b")))).toDF("a", "map") checkAnswer( df.select(explode_outer('map).as("key1" :: "value1" :: Nil)).select("key1", "value1"), Row("a", "b") :: Nil) } test("self join explode") { val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList") val exploded = df.select(explode('intList).as('i)) checkAnswer( exploded.join(exploded, exploded("i") === exploded("i")).agg(count("*")), Row(3) :: Nil) } test("inline raises exception on array of null type") { val m = intercept[AnalysisException] { spark.range(2).selectExpr("inline(array())") }.getMessage assert(m.contains("data type mismatch")) } test("inline with empty table") { checkAnswer( spark.range(0).selectExpr("inline(array(struct(10, 100)))"), Nil) } test("inline on literal") { checkAnswer( spark.range(2).selectExpr("inline(array(struct(10, 100), struct(20, 200), struct(30, 300)))"), Row(10, 100) :: Row(20, 200) :: Row(30, 300) :: Row(10, 100) :: Row(20, 200) :: Row(30, 300) :: Nil) } test("inline on column") { val df = Seq((1, 2)).toDF("a", "b") checkAnswer( df.selectExpr("inline(array(struct(a), struct(a)))"), Row(1) :: Row(1) :: Nil) checkAnswer( df.selectExpr("inline(array(struct(a, b), struct(a, b)))"), Row(1, 2) :: Row(1, 2) :: Nil) // Spark think [struct<a:int>, struct<b:int>] is heterogeneous due to name difference. val m = intercept[AnalysisException] { df.selectExpr("inline(array(struct(a), struct(b)))") }.getMessage assert(m.contains("data type mismatch")) checkAnswer( df.selectExpr("inline(array(struct(a), named_struct('a', b)))"), Row(1) :: Row(2) :: Nil) // Spark think [struct<a:int>, struct<col1:int>] is heterogeneous due to name difference. val m2 = intercept[AnalysisException] { df.selectExpr("inline(array(struct(a), struct(2)))") }.getMessage assert(m2.contains("data type mismatch")) checkAnswer( df.selectExpr("inline(array(struct(a), named_struct('a', 2)))"), Row(1) :: Row(2) :: Nil) checkAnswer( df.selectExpr("struct(a)").selectExpr("inline(array(*))"), Row(1) :: Nil) checkAnswer( df.selectExpr("array(struct(a), named_struct('a', b))").selectExpr("inline(*)"), Row(1) :: Row(2) :: Nil) } test("inline_outer") { val df = Seq((1, "2"), (3, "4"), (5, "6")).toDF("col1", "col2") val df2 = df.select(when('col1 === 1, null).otherwise(array(struct('col1, 'col2))).as("col1")) checkAnswer( df2.selectExpr("inline(col1)"), Row(3, "4") :: Row(5, "6") :: Nil ) checkAnswer( df2.selectExpr("inline_outer(col1)"), Row(3, "4") :: Row(5, "6") :: Nil ) } test("SPARK-14986: Outer lateral view with empty generate expression") { checkAnswer( sql("select nil from values 1 lateral view outer explode(array()) n as nil"), Row(null) :: Nil ) } test("outer explode()") { checkAnswer( sql("select * from values 1, 2 lateral view outer explode(array()) a as b"), Row(1, null) :: Row(2, null) :: Nil) } test("outer generator()") { spark.sessionState.functionRegistry.registerFunction("empty_gen", _ => EmptyGenerator()) checkAnswer( sql("select * from values 1, 2 lateral view outer empty_gen() a as b"), Row(1, null) :: Row(2, null) :: Nil) } } case class EmptyGenerator() extends Generator { override def children: Seq[Expression] = Nil override def elementSchema: StructType = new StructType().add("id", IntegerType) override def eval(input: InternalRow): TraversableOnce[InternalRow] = Seq.empty override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val iteratorClass = classOf[Iterator[_]].getName ev.copy(code = s"$iteratorClass<InternalRow> ${ev.value} = $iteratorClass$$.MODULE$$.empty();") } }
JerryLead/spark
sql/core/src/test/scala/org/apache/spark/sql/GeneratorFunctionSuite.scala
Scala
apache-2.0
10,494
/* * Dimension.scala * (LucreMatrix) * * Copyright (c) 2014-2017 Institute of Electronic Music and Acoustics, Graz. * Copyright (c) 2014-2017 by Hanns Holger Rutz. * * This software is published under the GNU Lesser General Public License v2.1+ * * * For further information, please contact Hanns Holger Rutz at * contact@sciss.de */ package de.sciss.lucre package matrix import de.sciss.lucre.expr.{IntObj, StringObj} import de.sciss.lucre.matrix.impl.{DimensionImpl => Impl} import de.sciss.lucre.stm.Elem import de.sciss.lucre.{event => evt} import de.sciss.serial.{DataInput, Serializer} object Dimension { object Selection extends Elem.Type { // ---- Elem.Type ---- final val typeID = 0x30003 def readIdentifiedObj[S <: Sys[S]](in: DataInput, access: S#Acc)(implicit tx: S#Tx): Elem[S] = Impl.readIdentifiedSelection(in, access) // ---- def read[S <: Sys[S]](in: DataInput, access: S#Acc)(implicit tx: S#Tx): Selection[S] = serializer[S].read(in, access) implicit def serializer[S <: Sys[S]]: Serializer[S#Tx, S#Acc, Selection[S]] = Impl.selSerializer[S] object Index { final val opID = 0 def apply[S <: Sys[S]](expr: IntObj[S])(implicit tx: S#Tx): Index[S] = Impl.applySelIndex(expr) } trait Index[S <: Sys[S]] extends Selection[S] with evt.Node[S] { def expr: IntObj[S] } object Name { final val opID = 1 def apply[S <: Sys[S]](expr: StringObj[S])(implicit tx: S#Tx): Name[S] = Impl.applySelName(expr) } trait Name[S <: Sys[S]] extends Selection[S] with evt.Node[S] { def expr: StringObj[S] } object Var { def apply[S <: Sys[S]](init: Selection[S])(implicit tx: S#Tx): Var[S] = Impl.applySelVar(init) implicit def serializer[S <: Sys[S]]: Serializer[S#Tx, S#Acc, Selection.Var[S]] = Impl.selVarSerializer[S] } trait Var[S <: Sys[S]] extends Selection[S] with matrix.Var[S, Selection[S]] case class Update[S <: Sys[S]](selection: Selection[S]) } sealed trait Selection[S <: Sys[S]] extends Elem[S] with evt.Publisher[S, Selection.Update[S]] { final def tpe: Elem.Type = Selection } case class Value(name: String, size: Int) }
iem-projects/LucreMatrix
core/src/main/scala/de/sciss/lucre/matrix/Dimension.scala
Scala
lgpl-2.1
2,216
package org.http4s package twirl import org.http4s.headers.`Content-Type` import org.http4s.MediaType import _root_.play.twirl.api._ trait TwirlInstances { implicit def htmlContentEncoder[F[_]]( implicit charset: Charset = DefaultCharset): EntityEncoder[F, Html] = contentEncoder(MediaType.text.html) /** * Note: Twirl uses a media type of `text/javascript`. This is obsolete, so we instead return * [[org.http4s.MediaType.application/javascript]]. */ implicit def jsContentEncoder[F[_]]( implicit charset: Charset = DefaultCharset): EntityEncoder[F, JavaScript] = contentEncoder(MediaType.application.javascript) implicit def xmlContentEncoder[F[_]]( implicit charset: Charset = DefaultCharset): EntityEncoder[F, Xml] = contentEncoder(MediaType.application.xml) implicit def txtContentEncoder[F[_]]( implicit charset: Charset = DefaultCharset): EntityEncoder[F, Txt] = contentEncoder(MediaType.text.plain) private def contentEncoder[F[_], C <: Content](mediaType: MediaType)( implicit charset: Charset): EntityEncoder[F, C] = EntityEncoder .stringEncoder[F] .contramap[C](content => content.body) .withContentType(`Content-Type`(mediaType, charset)) }
aeons/http4s
twirl/src/main/scala/org/http4s/twirl/TwirlInstances.scala
Scala
apache-2.0
1,252
/* __ *\\ ** ________ ___ / / ___ __ ____ Scala.js sbt plugin ** ** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL ** ** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ ** ** /____/\\___/_/ |_/____/_/ | |__/ /____/ ** ** |/____/ ** \\* */ package org.scalajs.testadapter import sbt.testing._ import org.scalajs.core.tools.json._ import FingerprintSerializers._ import SelectorSerializers._ import language.implicitConversions private[testadapter] object EventSerializers { implicit object EventDeserializer extends JSONDeserializer[Event] { private implicit def optT2optT(x: Option[Throwable]): OptionalThrowable = x.fold(new OptionalThrowable)(t => new OptionalThrowable(t)) def deserialize(x: JSON): Event = { val obj = new JSONObjExtractor(x) new DeserializedEvent( obj.fld[String]("fullyQualifiedName"), obj.fld[Fingerprint]("fingerprint"), obj.fld[Selector]("selector"), Status.valueOf(obj.fld[String]("status")), obj.opt[RemoteException]("throwable"), (obj.fld[Int]("durationMS").toLong << 32) | (obj.fld[Int]("durationLS").toLong & 0xffffffffL)) } } final class DeserializedEvent( val fullyQualifiedName: String, val fingerprint: Fingerprint, val selector: Selector, val status: Status, val throwable: OptionalThrowable, val duration: Long ) extends Event }
colinrgodsey/scala-js
test-adapter/src/main/scala/org/scalajs/sbttestadapter/EventSerializers.scala
Scala
bsd-3-clause
1,707
package mot.impl import mot.Address case class ProspectiveConnection(remoteAddress: Address, localName: String) extends Connection { def remoteName = "" def localAddress = null }
marianobarrios/mot
src/main/scala/mot/impl/ProspectiveConnection.scala
Scala
bsd-2-clause
184
import sbt._ import sbt.Keys._ object BuildSettings { val buildVersion = "0.11.0-SNAPSHOT" val buildSettings = Defaults.defaultSettings ++ Seq( organization := "org.reactivemongo", version := buildVersion, scalaVersion := "2.11.2", crossScalaVersions := Seq("2.11.2", "2.10.4"), crossVersion := CrossVersion.binary ) ++ Publish.settings } object Publish { def targetRepository: Project.Initialize[Option[sbt.Resolver]] = version { (version: String) => val nexus = "https://oss.sonatype.org/" if (version.trim.endsWith("SNAPSHOT")) Some("snapshots" at nexus + "content/repositories/snapshots") else Some("releases" at nexus + "service/local/staging/deploy/maven2") } lazy val settings = Seq( publishMavenStyle := true, publishTo <<= targetRepository, publishArtifact in Test := false, pomIncludeRepository := { _ => false }, licenses := Seq("Apache 2.0" -> url("http://www.apache.org/licenses/LICENSE-2.0")), homepage := Some(url("http://reactivemongo.org")), pomExtra := ( <scm> <url>git://github.com/zenexity/Play-ReactiveMongo.git</url> <connection>scm:git://github.com/zenexity/Play-ReactiveMongo.git</connection> </scm> <developers> <developer> <id>sgodbillon</id> <name>Stephane Godbillon</name> <url>http://stephane.godbillon.com</url> </developer> <developer> <id>pauldijou</id> <name>Paul Dijou</name> <url>http://pauldijou.com</url> </developer> </developers>) ) } object ProjectBuild extends Build { import BuildSettings._ lazy val reactivemongo = Project( "ReactiveMongo-Play-Validation", file("."), settings = buildSettings ++ Seq( resolvers := Seq( "Sonatype snapshots" at "https://oss.sonatype.org/content/repositories/snapshots/", "Sonatype" at "https://oss.sonatype.org/content/groups/public/", "Typesafe repository releases" at "https://repo.typesafe.com/typesafe/releases/", "Typesafe repository snapshots" at "https://repo.typesafe.com/typesafe/snapshots/", "JTO snapshots" at "https://raw.github.com/jto/mvn-repo/master/snapshots" ), libraryDependencies ++= Seq( "org.reactivemongo" %% "play2-reactivemongo" % "0.11.0-SNAPSHOT", "com.typesafe.play" %% "play" % "2.3.0" % "provided" cross CrossVersion.binary, "io.github.jto" %% "validation-core" % "1.0-1c770f4" cross CrossVersion.binary, "com.typesafe.play" %% "play-test" % "2.3.0" % "test" cross CrossVersion.binary, "org.specs2" % "specs2" % "2.3.12" % "test" cross CrossVersion.binary, "junit" % "junit" % "4.8" % "test" cross CrossVersion.Disabled, "org.apache.logging.log4j" % "log4j-to-slf4j" % "2.0-beta9" ) ) ) }
pauldijou/ReactiveMongo-Play-Validation
project/Build.scala
Scala
apache-2.0
2,859
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy.k8s import io.fabric8.kubernetes.api.model.{LocalObjectReferenceBuilder, PodBuilder} import org.apache.spark.{SPARK_VERSION, SparkConf, SparkFunSuite} import org.apache.spark.deploy.k8s.Config._ import org.apache.spark.deploy.k8s.Constants._ import org.apache.spark.deploy.k8s.submit._ import org.apache.spark.resource.ResourceProfile.DEFAULT_RESOURCE_PROFILE_ID class KubernetesConfSuite extends SparkFunSuite { private val APP_ARGS = Array("arg1", "arg2") private val CUSTOM_NODE_SELECTOR = Map( "nodeSelectorKey1" -> "nodeSelectorValue1", "nodeSelectorKey2" -> "nodeSelectorValue2") private val CUSTOM_DRIVER_NODE_SELECTOR = Map( "driverNodeSelectorKey1" -> "driverNodeSelectorValue1", "driverNodeSelectorKey2" -> "driverNodeSelectorValue2") private val CUSTOM_EXECUTOR_NODE_SELECTOR = Map( "execNodeSelectorKey1" -> "execNodeSelectorValue1", "execNodeSelectorKey2" -> "execNodeSelectorValue2") private val CUSTOM_LABELS = Map( "customLabel1Key" -> "customLabel1Value", "customLabel2Key" -> "customLabel2Value") private val CUSTOM_ANNOTATIONS = Map( "customAnnotation1Key" -> "customAnnotation1Value", "customAnnotation2Key" -> "customAnnotation2Value") private val SECRET_NAMES_TO_MOUNT_PATHS = Map( "secret1" -> "/mnt/secrets/secret1", "secret2" -> "/mnt/secrets/secret2") private val SECRET_ENV_VARS = Map( "envName1" -> "name1:key1", "envName2" -> "name2:key2") private val CUSTOM_ENVS = Map( "customEnvKey1" -> "customEnvValue1", "customEnvKey2" -> "customEnvValue2") private val DRIVER_POD = new PodBuilder().build() private val EXECUTOR_ID = "executor-id" private val EXECUTOR_ENV_VARS = Map( "spark.executorEnv.1executorEnvVars1/var1" -> "executorEnvVars1", "spark.executorEnv.executorEnvVars2*var2" -> "executorEnvVars2", "spark.executorEnv.executorEnvVars3_var3" -> "executorEnvVars3", "spark.executorEnv.executorEnvVars4-var4" -> "executorEnvVars4", "spark.executorEnv.executorEnvVars5-var5" -> "executorEnvVars5/var5") test("Resolve driver labels, annotations, secret mount paths, envs, and memory overhead") { val sparkConf = new SparkConf(false) .set(MEMORY_OVERHEAD_FACTOR, 0.3) CUSTOM_LABELS.foreach { case (key, value) => sparkConf.set(s"$KUBERNETES_DRIVER_LABEL_PREFIX$key", value) } CUSTOM_ANNOTATIONS.foreach { case (key, value) => sparkConf.set(s"$KUBERNETES_DRIVER_ANNOTATION_PREFIX$key", value) } SECRET_NAMES_TO_MOUNT_PATHS.foreach { case (key, value) => sparkConf.set(s"$KUBERNETES_DRIVER_SECRETS_PREFIX$key", value) } SECRET_ENV_VARS.foreach { case (key, value) => sparkConf.set(s"$KUBERNETES_DRIVER_SECRET_KEY_REF_PREFIX$key", value) } CUSTOM_ENVS.foreach { case (key, value) => sparkConf.set(s"$KUBERNETES_DRIVER_ENV_PREFIX$key", value) } val conf = KubernetesConf.createDriverConf( sparkConf, KubernetesTestConf.APP_ID, JavaMainAppResource(None), KubernetesTestConf.MAIN_CLASS, APP_ARGS, None) assert(conf.labels === Map( SPARK_VERSION_LABEL -> SPARK_VERSION, SPARK_APP_ID_LABEL -> KubernetesTestConf.APP_ID, SPARK_APP_NAME_LABEL -> KubernetesConf.getAppNameLabel(conf.appName), SPARK_ROLE_LABEL -> SPARK_POD_DRIVER_ROLE) ++ CUSTOM_LABELS) assert(conf.annotations === CUSTOM_ANNOTATIONS) assert(conf.secretNamesToMountPaths === SECRET_NAMES_TO_MOUNT_PATHS) assert(conf.secretEnvNamesToKeyRefs === SECRET_ENV_VARS) assert(conf.environment === CUSTOM_ENVS) assert(conf.sparkConf.get(MEMORY_OVERHEAD_FACTOR) === 0.3) } test("Basic executor translated fields.") { val conf = KubernetesConf.createExecutorConf( new SparkConf(false), EXECUTOR_ID, KubernetesTestConf.APP_ID, Some(DRIVER_POD)) assert(conf.executorId === EXECUTOR_ID) assert(conf.driverPod.get === DRIVER_POD) assert(conf.resourceProfileId === DEFAULT_RESOURCE_PROFILE_ID) } test("resource profile not default.") { val conf = KubernetesConf.createExecutorConf( new SparkConf(false), EXECUTOR_ID, KubernetesTestConf.APP_ID, Some(DRIVER_POD), 10) assert(conf.resourceProfileId === 10) } test("Image pull secrets.") { val conf = KubernetesConf.createExecutorConf( new SparkConf(false) .set(IMAGE_PULL_SECRETS, Seq("my-secret-1", "my-secret-2 ")), EXECUTOR_ID, KubernetesTestConf.APP_ID, Some(DRIVER_POD)) assert(conf.imagePullSecrets === Seq( new LocalObjectReferenceBuilder().withName("my-secret-1").build(), new LocalObjectReferenceBuilder().withName("my-secret-2").build())) } test("Set executor labels, annotations, and secrets") { val sparkConf = new SparkConf(false) CUSTOM_LABELS.foreach { case (key, value) => sparkConf.set(s"$KUBERNETES_EXECUTOR_LABEL_PREFIX$key", value) } CUSTOM_ANNOTATIONS.foreach { case (key, value) => sparkConf.set(s"$KUBERNETES_EXECUTOR_ANNOTATION_PREFIX$key", value) } SECRET_ENV_VARS.foreach { case (key, value) => sparkConf.set(s"$KUBERNETES_EXECUTOR_SECRET_KEY_REF_PREFIX$key", value) } SECRET_NAMES_TO_MOUNT_PATHS.foreach { case (key, value) => sparkConf.set(s"$KUBERNETES_EXECUTOR_SECRETS_PREFIX$key", value) } val conf = KubernetesConf.createExecutorConf( sparkConf, EXECUTOR_ID, KubernetesTestConf.APP_ID, Some(DRIVER_POD)) assert(conf.labels === Map( SPARK_VERSION_LABEL -> SPARK_VERSION, SPARK_EXECUTOR_ID_LABEL -> EXECUTOR_ID, SPARK_APP_ID_LABEL -> KubernetesTestConf.APP_ID, SPARK_APP_NAME_LABEL -> KubernetesConf.getAppNameLabel(conf.appName), SPARK_ROLE_LABEL -> SPARK_POD_EXECUTOR_ROLE, SPARK_RESOURCE_PROFILE_ID_LABEL -> DEFAULT_RESOURCE_PROFILE_ID.toString) ++ CUSTOM_LABELS) assert(conf.annotations === CUSTOM_ANNOTATIONS) assert(conf.secretNamesToMountPaths === SECRET_NAMES_TO_MOUNT_PATHS) assert(conf.secretEnvNamesToKeyRefs === SECRET_ENV_VARS) } test("Verify that executorEnv key conforms to the regular specification") { val sparkConf = new SparkConf(false) EXECUTOR_ENV_VARS.foreach { case (key, value) => sparkConf.set(key, value) } val conf = KubernetesConf.createExecutorConf( sparkConf, EXECUTOR_ID, KubernetesTestConf.APP_ID, Some(DRIVER_POD)) assert(conf.environment === Map( "executorEnvVars3_var3" -> "executorEnvVars3", "executorEnvVars4-var4" -> "executorEnvVars4", "executorEnvVars5-var5" -> "executorEnvVars5/var5")) } test("SPARK-36075: Set nodeSelector, driverNodeSelector, executorNodeSelect") { val sparkConf = new SparkConf(false) CUSTOM_NODE_SELECTOR.foreach { case (key, value) => sparkConf.set(s"$KUBERNETES_NODE_SELECTOR_PREFIX$key", value) } CUSTOM_DRIVER_NODE_SELECTOR.foreach { case (key, value) => sparkConf.set(s"$KUBERNETES_DRIVER_NODE_SELECTOR_PREFIX$key", value) } CUSTOM_EXECUTOR_NODE_SELECTOR.foreach { case (key, value) => sparkConf.set(s"$KUBERNETES_EXECUTOR_NODE_SELECTOR_PREFIX$key", value) } val execConf = KubernetesTestConf.createExecutorConf(sparkConf) assert(execConf.nodeSelector === CUSTOM_NODE_SELECTOR) assert(execConf.executorNodeSelector === CUSTOM_EXECUTOR_NODE_SELECTOR) val driverConf = KubernetesTestConf.createDriverConf(sparkConf) assert(driverConf.nodeSelector === CUSTOM_NODE_SELECTOR) assert(driverConf.driverNodeSelector === CUSTOM_DRIVER_NODE_SELECTOR) } test("SPARK-36059: Set driver.scheduler and executor.scheduler") { val sparkConf = new SparkConf(false) val execUnsetConf = KubernetesTestConf.createExecutorConf(sparkConf) val driverUnsetConf = KubernetesTestConf.createExecutorConf(sparkConf) assert(execUnsetConf.schedulerName === "") assert(driverUnsetConf.schedulerName === "") sparkConf.set(KUBERNETES_DRIVER_SCHEDULER_NAME, "driverScheduler") sparkConf.set(KUBERNETES_EXECUTOR_SCHEDULER_NAME, "executorScheduler") val execConf = KubernetesTestConf.createExecutorConf(sparkConf) assert(execConf.schedulerName === "executorScheduler") val driverConf = KubernetesTestConf.createDriverConf(sparkConf) assert(driverConf.schedulerName === "driverScheduler") } test("SPARK-37735: access appId in KubernetesConf") { val sparkConf = new SparkConf(false) val driverConf = KubernetesTestConf.createDriverConf(sparkConf) val execConf = KubernetesTestConf.createExecutorConf(sparkConf) assert(driverConf.appId === KubernetesTestConf.APP_ID) assert(execConf.appId === KubernetesTestConf.APP_ID) } test("SPARK-36566: get app name label") { assert(KubernetesConf.getAppNameLabel(" Job+Spark-Pi 2021") === "job-spark-pi-2021") assert(KubernetesConf.getAppNameLabel("a" * 63) === "a" * 63) assert(KubernetesConf.getAppNameLabel("a" * 64) === "a" * 63) assert(KubernetesConf.getAppNameLabel("a" * 253) === "a" * 63) } }
shaneknapp/spark
resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/KubernetesConfSuite.scala
Scala
apache-2.0
9,898
/* * Copyright 2017-2020 Daniel Urban and contributors listed in AUTHORS * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package dev.tauri.seals package macros import java.util.UUID import scala.reflect.macros.blackbox.Context import cats.implicits._ object UUIDMacro extends StringInterpolatorHelper { def impl(c: Context)(): c.Expr[UUID] = { import c.universe._ val s: String = extractLiteral(c) val u: UUID = try { UUID.fromString(s) } catch { case ex: IllegalArgumentException => c.abort(c.enclosingPosition, show"not a valid UUID (${ex.getMessage})") } if (u.variant =!= 2) { c.abort(c.enclosingPosition, "not an RFC-4122 UUID (variant is not 2)") } val msb: Long = u.getMostSignificantBits val lsb: Long = u.getLeastSignificantBits c.Expr[UUID](q"new _root_.java.util.UUID(${msb}, ${lsb})") } }
durban/seals
macros/src/main/scala/dev/tauri/seals/macros/UUIDMacro.scala
Scala
apache-2.0
1,392
/* * Copyright 2012 Comcast Cable Communications Management, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.comcast.money.core.logging import org.mockito.Mockito._ import org.slf4j.Logger import org.scalatest.wordspec.AnyWordSpec import org.scalatest.matchers.should.Matchers import org.scalatestplus.mockito.MockitoSugar import org.scalatest.OneInstancePerTest class TraceLoggingSpec extends AnyWordSpec with Matchers with MockitoSugar with OneInstancePerTest { val mockLogger = mock[Logger] "TraceLogging" should { "capture exceptions into a log" in { val testTraceLogging = new TraceLogging { override lazy val shouldLogExceptions: Boolean = true override val logger: Logger = mockLogger } val t = mock[Throwable] testTraceLogging.logException(t) verify(mockLogger).error("Tracing exception", t) } "not capture exceptions if log exceptions is not enabled" in { val testTraceLogging = new TraceLogging { override lazy val shouldLogExceptions: Boolean = false override val logger: Logger = mockLogger } val t = mock[Throwable] testTraceLogging.logException(t) verifyNoMoreInteractions(mockLogger) } } }
Comcast/money
money-core/src/test/scala/com/comcast/money/core/logging/TraceLoggingSpec.scala
Scala
apache-2.0
1,756
package com.eugene.impatience_scala package com { class T1{} package eugene{ class T2(t1:T1){} package impatience{ class T3(t1:T1, t2:T2){} } } } package com.eugene.impatience{ // class T4(t1:T1){} //can not find type T1 } /** * Created by eugene on 16/3/4. */ object Chap7 { def main(args: Array[String]) { //6 import java.util.{HashMap => JavaHashMap} val javaMap = new JavaHashMap[Int, String] javaMap.put(1, "One") javaMap.put(2, "Two") javaMap.put(3, "Three") import collection.mutable.{HashMap => ScalaHashMap, Map => ScalaMap} val scalaMap = ScalaHashMap[Int, String]() for(key <- javaMap.keySet().toArray) scalaMap += (key.asInstanceOf[Int] -> javaMap.get(key)) println(scalaMap.mkString(" ")) //9 import java.lang.System._ var password = Console.readLine() if(password.equals("secret")) out.println("Hello " + getProperty("user.name")) else err.println("password error") } }
Ernestyj/ScalaStudy
src/com/eugene/impatience_scala/Chap7.scala
Scala
gpl-3.0
1,088
package org.denigma.nlp.annotator import org.denigma.brat._ class TestBratModel { protected def initColData(): ColData = { val types = List( new EntityType("Person", Array("Person", "Per"), "#7fa2ff", "darken") ) val relationTypes = List( new RelationType("Anaphora", List("Anaphora", "Ana"),"3,3", "purple", List( new RelationRole("Anaphor", List("Person")), new RelationRole("Entity", List("Person")) ) ) ) val events = List( new EventType("Assassination", List("Assassination", "Assas"), List( new LabeledType("Victim", List("Victim", "Vict")), new LabeledType("Perpetrator", List("Perpetrator", "Perp")) ), "lightgreen", "darken") ) val typeAttributes = List(new EntityAttributeType("Notorious", Map("glyph"->"★"))) new ColData( types, relationTypes, typeAttributes, events) } protected def initDocData(): DocData = { val text = "Ed O'Kelley was the man who shot the man who shot Jesse James." val parts = List( Entity("T1", "Person", List((0, 11))), Entity("T2", "Person", List((20, 23))), Entity("T3", "Person", List((37, 40))), Entity("T4", "Person", List((50, 61))) ) val atribs = List(DocAttribute("A1", "Notorious", "T4")) val relations = List(Relation("R1", "Anaphora", "Anaphor", "T2", "Entity", "T1")) val events = List( BratEvent("E1", "T5", List("Perpetrator"->"T3", "Victim"->"T4")), BratEvent("E2", "T6", List("Perpetrator"->"T2", "Victim"->"T3")) ) val triggers = List( Entity("T5", "Assassination", List((45, 49))), Entity("T6", "Assassination", List((28, 32))) ) new DocData( text, parts, atribs, relations, events, triggers) } val colData = initColData() val docData = initDocData() }
antonkulaga/bio-nlp
annotator/js/src/main/scala/org.denigma/nlp/annotator/TestBratModel.scala
Scala
mpl-2.0
1,834
package codes.bytes.macros_intro.test import codes.bytes.macros_intro.macros.hello @hello object Test extends App { println(this.hello) }
bwmcadams/scala-macros-intro-talk
implTest/src/main/scala/codes/bytes/macros_intro/test/AnnotationTest.scala
Scala
apache-2.0
141
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.scheduler.cluster import org.apache.spark.rpc.{RpcAddress, RpcEndpointRef} import org.apache.spark.scheduler.ExecutorResourceInfo /** * Grouping of data for an executor used by CoarseGrainedSchedulerBackend. * * @param executorEndpoint The RpcEndpointRef representing this executor * @param executorAddress The network address of this executor * @param executorHost The hostname that this executor is running on * @param freeCores The current number of cores available for work on the executor * @param totalCores The total number of cores available to the executor * @param resourcesInfo The information of the currently available resources on the executor */ private[cluster] class ExecutorData( val executorEndpoint: RpcEndpointRef, val executorAddress: RpcAddress, override val executorHost: String, var freeCores: Int, override val totalCores: Int, override val logUrlMap: Map[String, String], override val attributes: Map[String, String], override val resourcesInfo: Map[String, ExecutorResourceInfo] ) extends ExecutorInfo(executorHost, totalCores, logUrlMap, attributes, resourcesInfo)
pgandhi999/spark
core/src/main/scala/org/apache/spark/scheduler/cluster/ExecutorData.scala
Scala
apache-2.0
1,965
package com.github.haoch.experimental import akka.actor.{ActorSystem, Props} import akka.io.IO import spray.can.Http import akka.pattern.ask import akka.util.Timeout import scala.concurrent.duration._ import akka.actor.Actor import spray.routing._ import spray.http._ import MediaTypes._ object Bootstrap extends App { // we need an ActorSystem to host our application in implicit val system = ActorSystem("on-spray-can") // create and start our service actor val service = system.actorOf(Props[CEPServiceActor], "control-service") implicit val timeout = Timeout(5.seconds) // start a new HTTP server on port 8080 with our service actor as the handler IO(Http) ? Http.Bind(service, interface = "localhost", port = 8080) } // we don't implement our route structure directly in the service actor because // we want to be able to test it independently, without having to spin up an actor class CEPServiceActor extends Actor with CEPService { // the HttpService trait defines only one abstract member, which // connects the services environment to the enclosing actor or test def actorRefFactory = context // this actor only runs our route, but you could add // other things here, like request stream processing // or timeout handling def receive = runRoute(route) } // this trait defines our service behavior independently from the service actor trait CEPService extends HttpService { val route: Route = { path("") { get { respondWithMediaType(`text/html`) { // XML is marshalled to `text/xml` by default, so we simply override here complete { <html> <body> <h1>Say hello to <i>spray-routing</i> on <i>spray-can</i> !</h1> </body> </html> } } } } path("/api/v1/queries/:id") { put { respondWithMediaType(`application/json`) { complete { ??? } } } delete { respondWithMediaType(`application/json`) { ??? } } } path("/api/v1/queries") { get { respondWithMediaType(`application/json`) { complete { ??? } } } post { respondWithMediaType(`application/json`) { complete { ??? } } } } } }
haoch/flink-siddhi
experimental/src/main/scala/com/github/haoch/experimental/CEPService.scala
Scala
apache-2.0
2,453
import sbt._ import Keys._ object ReplSettings { type Sett = Def.Setting[_] lazy val all = Seq[Sett]( initialCommands := """ |import argonaut._ |import Argonaut._ """.stripMargin ) }
jedws/argonaut
project/ReplSettings.scala
Scala
bsd-3-clause
268
package debop4s.core.utils /** * `Long` 수형 값을 base 64 문자열로 인코딩을 수행합니다. * 캐시 키 값에 사용하면 좋습니다. */ object Base64Long { private[this] lazy val standardBase64Alphabet: (Int) => Char = Array[Char]( 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+', '/' ) /** The bit width of a base 64 digit. */ private[this] val digitWidth: Int = 6 /** Mask for the least-sgnificant digit. */ private[this] val digitMask: Int = (1 << digitWidth) - 1 /** The amount to shift right for the first base 64 digit in a Long. */ private[this] val startingBitPosition: Int = 60 /** Enable re-use of the StringBuilder for toBase64(Long): String */ private[this] val threadLocalBuilder = new ThreadLocal[StringBuilder] { override def initialValue = new StringBuilder } /** Convert this Long to a base 64 String, using the standard base 64 alphabet. */ def toBase64(n: Long): String = { val builder = threadLocalBuilder.get() builder.clear() setBase64(builder, n) builder.toString() } /** * `Long` 수형의 값을 base 64 인코딩을 수행하여 `StringBuilder` 에 추가합니다. * The Base64 encoding uses the standard Base64 alphabet (with '+' and '/'). It does not pad the * result. The representation is just like base 10 or base 16, where leading zero digits are * omitted. * * The number is treated as unsigned, so there is never a leading negative sign, and the * representations of negative numbers are larger than positive numbers. */ private def setBase64(builder: StringBuilder, n: Long, alphabet: Int => Char = standardBase64Alphabet): Unit = { if (n == 0) { // 0 은 특수 문자 builder append alphabet(0) } else { var bitPosition = startingBitPosition while ((n >>> bitPosition) == 0) { bitPosition -= digitWidth } // Copy in the 6-bit segments, one at a time. while (bitPosition >= 0) { val shifted = n >>> bitPosition val digitValue = (shifted & digitMask).toInt builder.append(alphabet(digitValue)) bitPosition -= digitWidth } } } }
debop/debop4s
debop4s-core/src/main/scala/debop4s/core/utils/Base64Long.scala
Scala
apache-2.0
2,492
package pl.touk.nussknacker.engine.api.namespaces import com.typesafe.config.Config trait ObjectNaming extends Serializable { def prepareName(originalName: String, config: Config, namingContext: NamingContext): String def objectNamingParameters(originalName: String, config: Config, namingContext: NamingContext): Option[ObjectNamingParameters] def decodeName(preparedName: String, config: Config, namingContext: NamingContext): Option[String] } trait ObjectNamingParameters { /** * This function is used in [[pl.touk.nussknacker.engine.process.runner.FlinkProcessMain FlinkProcessMain]] to pass * to the [[pl.touk.nussknacker.engine.flink.api.NkGlobalParameters NkGlobalParameters]] tags that are to be used when * producing metrics in [[pl.touk.nussknacker.engine.util.metrics.MetricsProviderForScenario]]. It may be changed in the future. */ def toTags: Map[String, String] }
TouK/nussknacker
components-api/src/main/scala/pl/touk/nussknacker/engine/api/namespaces/ObjectNaming.scala
Scala
apache-2.0
903
package com.haskforce.utils import java.io.File import com.haskforce.highlighting.annotation.external.GhcModUtil import com.haskforce.settings.HaskellBuildSettings import com.haskforce.utils.ExecUtil.ExecError import com.intellij.execution.ExecutionException import com.intellij.execution.configurations.GeneralCommandLine import com.intellij.execution.process.CapturingProcessHandler import com.intellij.openapi.project.Project /** * Helper class to execute cabal based on compiler settings. */ object CabalExecutor { def create(project: Project, workDir: Option[String]): Either[CabalExecutorError, CabalExecutor] = { create(HaskellBuildSettings.getInstance(project).getCabalPath, workDir) } def create(cabalPath: String, workDir: Option[String]): Either[CabalExecutorError, CabalExecutor] = { if (cabalPath.isEmpty) return Left(NotConfigured) if (!new File(cabalPath).canExecute) return Left(NotExecutable) Right(create { () => val commandLine = new GeneralCommandLine(cabalPath) workDir.foreach(commandLine.setWorkDirectory) commandLine }) } def create(factory: () => GeneralCommandLine): CabalExecutor = CabalExecutor(factory) sealed trait CabalExecutorError object NotConfigured extends CabalExecutorError object NotExecutable extends CabalExecutorError } case class CabalExecutor private (factory: () => GeneralCommandLine) { def getNumericVersion: Either[ExecError, String] = rawExec("--numeric-version") def rawCommandLine(): GeneralCommandLine = factory() def rawCommandLine(args: String*): GeneralCommandLine = { val commandLine = rawCommandLine() commandLine.addParameters(args: _*) commandLine } def rawExec(args: String*): Either[ExecError, String] = { ExecUtil.readCommandLine(rawCommandLine(args: _*)) } @throws(classOf[ExecutionException]) def init(project: Project, args: Seq[String]): String = { val commandLine = initPreEnv(args) // TODO: We need to patch the PATH since `cabal init` doesn't support --with-ghc GhcModUtil.updateEnvironment(project, commandLine.getEnvironment) initPostEnv(commandLine) } @throws(classOf[ExecutionException]) def init(ghcPath: String, args: Seq[String]): String = { val commandLine = initPreEnv(args) // TODO: We need to patch the PATH since `cabal init` doesn't support --with-ghc GhcModUtil.updateEnvironment(commandLine.getEnvironment, ghcPath) initPostEnv(commandLine) } private def initPreEnv(args: Seq[String]) = rawCommandLine("init" +: args: _*) private def initPostEnv(commandLine: GeneralCommandLine): String = { val output = new CapturingProcessHandler(commandLine).runProcess if (output.getExitCode == 0) return output.getStdout throw new ExecutionException(output.getStderr) } }
carymrobbins/intellij-haskforce
src/com/haskforce/utils/CabalExecutor.scala
Scala
apache-2.0
2,804
/* Copyright 2015 Twitter, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.twitter.scalding.db sealed trait SqlType case object BIGINT extends SqlType case object INT extends SqlType case object SMALLINT extends SqlType case object TINYINT extends SqlType case object BOOLEAN extends SqlType case object VARCHAR extends SqlType case object DATE extends SqlType case object DATETIME extends SqlType case object TEXT extends SqlType case object BLOB extends SqlType case object DOUBLE extends SqlType object IsNullable { def apply(isNullable: Boolean): IsNullable = if (isNullable) Nullable else NotNullable } sealed abstract class IsNullable(val toStr: String) case object Nullable extends IsNullable("NULL") case object NotNullable extends IsNullable("NOT NULL") trait ColumnDefiner { // Some helper methods that we can use to generate column definitions protected def bigint( name: String, nullable: IsNullable = NotNullable, sizeOpt: Option[Int] = None, defaultValue: Option[String] = None ) = ColumnDefinition(BIGINT, ColumnName(name), nullable, sizeOpt, defaultValue) protected def int( name: String, nullable: IsNullable = NotNullable, sizeOpt: Option[Int] = None, defaultValue: Option[String] = None ) = ColumnDefinition(INT, ColumnName(name), nullable, sizeOpt, defaultValue) protected def smallint( name: String, nullable: IsNullable = NotNullable, sizeOpt: Option[Int] = None, defaultValue: Option[String] = None ) = ColumnDefinition(SMALLINT, ColumnName(name), nullable, sizeOpt, defaultValue) protected def tinyint( name: String, nullable: IsNullable = NotNullable, sizeOpt: Option[Int] = None, defaultValue: Option[String] = None ) = ColumnDefinition(TINYINT, ColumnName(name), nullable, sizeOpt, defaultValue) protected def boolean( name: String, nullable: IsNullable = NotNullable, sizeOpt: Option[Int] = None, defaultValue: Option[String] = None ) = ColumnDefinition(BOOLEAN, ColumnName(name), nullable, sizeOpt, defaultValue) protected def varchar( name: String, nullable: IsNullable = NotNullable, sizeOpt: Option[Int] = None, defaultValue: Option[String] = None ) = ColumnDefinition(VARCHAR, ColumnName(name), nullable, sizeOpt, defaultValue) protected def date( name: String, nullable: IsNullable = NotNullable, sizeOpt: Option[Int] = None, defaultValue: Option[String] = None ) = ColumnDefinition(DATE, ColumnName(name), nullable, sizeOpt, defaultValue) protected def datetime( name: String, nullable: IsNullable = NotNullable, sizeOpt: Option[Int] = None, defaultValue: Option[String] = None ) = ColumnDefinition(DATETIME, ColumnName(name), nullable, sizeOpt, defaultValue) protected def text( name: String, nullable: IsNullable = NotNullable, sizeOpt: Option[Int] = None, defaultValue: Option[String] = None ) = ColumnDefinition(TEXT, ColumnName(name), nullable, sizeOpt, defaultValue) protected def double( name: String, nullable: IsNullable = NotNullable, sizeOpt: Option[Int] = None, defaultValue: Option[String] = None ) = ColumnDefinition(DOUBLE, ColumnName(name), nullable, sizeOpt, defaultValue) }
twitter/scalding
scalding-db/src/main/scala/com/twitter/scalding/db/ColumnDefiner.scala
Scala
apache-2.0
3,859
/* * Copyright 2011 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.twitter.scrooge import com.twitter.scrooge.ast.Document import com.twitter.scrooge.backend.GeneratorFactory import com.twitter.scrooge.backend.ScalaGenerator import com.twitter.scrooge.frontend.FileParseException import com.twitter.scrooge.frontend.Importer import com.twitter.scrooge.frontend.NullImporter import com.twitter.scrooge.frontend.ThriftParser import com.twitter.scrooge.frontend.TypeResolver import com.twitter.scrooge.java_generator.ApacheJavaGenerator import java.io.File import java.io.FileWriter import scala.collection.concurrent.TrieMap object CompilerDefaults { val language: String = "scala" val defaultNamespace: String = "thrift" } class Compiler(val config: ScroogeConfig) { var fileMapWriter: scala.Option[FileWriter] = None def run(): Unit = { // if --gen-file-map is specified, prepare the map file. fileMapWriter = config.fileMapPath.map { path => val file = new File(path) val dir = file.getParentFile if (dir != null && !dir.exists()) { dir.mkdirs() } if (config.verbose) { println("+ Writing file mapping to %s".format(path)) } new FileWriter(file) } val importer = { val rootImporter = if (config.addRootDirImporter) Importer(new File(".")) else NullImporter rootImporter +: Importer(config.includePaths.toSeq) } val isJava = config.language.equals("java") val documentCache = new TrieMap[String, Document] // compile for (inputFile <- config.thriftFiles) { try { val parser = new ThriftParser( importer, config.strict, defaultOptional = isJava, skipIncludes = false, documentCache ) val doc = parser.parseFile(inputFile).mapNamespaces(config.namespaceMappings) if (config.verbose) println("+ Compiling %s".format(inputFile)) val resolvedDoc = TypeResolver()(doc, Some(inputFile)) val generator = GeneratorFactory( config.language, resolvedDoc, config.defaultNamespace, config.languageFlags) generator match { case g: ScalaGenerator => g.warnOnJavaNamespaceFallback = config.scalaWarnOnJavaNSFallback case g: ApacheJavaGenerator => g.serEnumType = config.javaSerEnumType case _ => () } val generatedFiles = generator( config.flags, new File(config.destFolder), config.dryRun, config.genAdapt ).map { _.getPath } if (config.verbose) { println("+ Generated %s".format(generatedFiles.mkString(", "))) } fileMapWriter.foreach { w => generatedFiles.foreach { path => w.write(inputFile + " -> " + path + "\\n") } } } catch { case e: Throwable => throw new FileParseException(inputFile, e) } } // flush and close the map file fileMapWriter.foreach { _.close() } } }
twitter/scrooge
scrooge-generator/src/main/scala/com/twitter/scrooge/Compiler.scala
Scala
apache-2.0
3,611
/* * Created on 2010/08/07 * Copyright (c) 2010-2012, Wei-ju Wu. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of Wei-ju Wu nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package org.zmpp.zcode.swing import javax.swing._ import javax.swing.text.MutableAttributeSet import java.awt.{Dimension,Font, Color} import java.io.{FileInputStream, FileOutputStream} import org.zmpp.zcode._ sealed trait V6WindowState { } class V6TextGrid extends V6WindowState { } class V6TextWindow extends V6WindowState { } class V6GraphicsWindow extends V6WindowState { } class V6Window { private var currentState: V6WindowState = null def setCurrentState(state: V6WindowState) { currentState = state } } /* * Implementation of the V6 screen model using Swing components. * The V6 screen model is pixel-based, so rendering is entirely done * in a custom way, making this much more difficult and more restrictive. */ class SwingScreenModelV6 extends JComponent with OutputStream with InputStream with SwingScreenModel { var vm: Machine = null val windows = Array.ofDim[V6Window](8) (0 until 7).foreach{ i => windows(i) = new V6Window } private var selected = true val fixedFont = new Font("Courier New", Font.PLAIN, 14) setPreferredSize(new Dimension(640, 480)) def getComponent = this def capabilities = List(SupportsColors, SupportsBoldFont, SupportsItalicFont, SupportsFixedFont, SupportsTimedInput, SupportsSound, SupportsPictures, SupportsScreenSplit, SupportsMouse, SupportsMenus) def activeWindow = { throw new UnsupportedOperationException("Not supported yet") } // OutputStream def isSelected = selected def select(flag: Boolean) = selected = flag def putChar(c: Char) { //println("@put_char: '%c'".format(c)) } def flush { } def flushInterruptOutput { } def cancelInput { } // InputStream def readLine: Int = { println("@read_line (TODO)") 0 } // ScreenModel def initUI { // now the top window "knows" how large the screen is, so we can set // the dimensions and font sizes to the VM val g = getGraphics val fm = g.getFontMetrics(fixedFont) vm.setFontSizeInUnits(fm.charWidth('0'), fm.getHeight) vm.setScreenSizeInUnits(getWidth, getHeight) println("Screen size (units): " + vm.screenSizeInUnits) println("Font size (units): " + vm.fontSizeInUnits) } def connect(aVm: Machine) { vm = aVm } def setColour(foreground: Int, background: Int, window: Int) { printf("@set_colour %d %d %d (TODO)\\n", foreground, background, window) } def setFont(font: Int): Int = { printf("@set_font %d (TODO)\\n", font) 1 } def eraseLine(value: Int) { printf("@erase_line %d not implemented yet (TODO)\\n", value) } def setTextStyle(aStyle: Int) { printf("@set_text_style %d (TODO)\\n", aStyle) } def eraseWindow(windowId: Int) { printf("@erase_window %d (TODO)\\n", windowId) windowId match { case -1 => println("reset screen") case -2 => println("clear window, no unsplit") case _ => println("clear selected window") } } def setWindow(windowId: Int) { printf("@set_window %d (TODO)\\n", windowId) } def splitWindow(lines: Int) { printf("@split_window (%d units) (TODO)\\n", lines) } def cursorPosition: (Int, Int) = { throw new UnsupportedOperationException("getCursorPosition() not yet implemented in screen model") } def setCursorPosition(line: Int, column: Int) { printf("@set_cursor %d %d (TODO)\\n", line, column) } def updateStatusLine { } def screenOutputStream = this def keyboardStream = this def bufferMode(flag: Int) { printf("@buffer_mode %d (TODO)\\n", flag) } // SwingStreamModel def readChar { } def requestSaveFile { val fileChooser = new JFileChooser fileChooser.setDialogTitle("Save Game As...") val outputStream = if (fileChooser.showSaveDialog(this) == JFileChooser.APPROVE_OPTION) { new FileOutputStream(fileChooser.getSelectedFile) } else null vm.resumeWithSaveStream(outputStream) ExecutionControl.executeTurn(vm, this) } def requestRestoreFile { val fileChooser = new JFileChooser fileChooser.setDialogTitle("Restore Game From...") val inputStream = if (fileChooser.showOpenDialog(this) == JFileChooser.APPROVE_OPTION) { new FileInputStream(fileChooser.getSelectedFile) } else null vm.resumeWithRestoreStream(inputStream) ExecutionControl.executeTurn(vm, this) } // empty for now def styleCharacter(c: Char): Int = 0 def setTransparentAttributeSet(attrs: MutableAttributeSet) {} def setAttributeSet(attrs: MutableAttributeSet, styledChar: Int) {} def resumeWithCharInput(c: Int) {} def resumeWithLineInput(line: String) {} def attributeSetFor(attrs: MutableAttributeSet, style: Int): MutableAttributeSet = null def stdFont: Font = null def backgroundColor: Color = null }
logicmoo/zmpp2
zmpp-zcode/src/main/scala/org/zmpp/zcode/swing/SwingScreenModelV6.scala
Scala
bsd-3-clause
6,415
package controllers import io.flow.common.v0.models.UserReference import io.flow.dependency.v0.Client import io.flow.dependency.v0.models.Organization import io.flow.dependency.www.lib.{DependencyClientProvider, Section, UiData} import io.flow.play.controllers.IdentifiedCookie._ import io.flow.play.controllers._ import io.flow.play.util.AuthHeaders import io.flow.util.Config import play.api.i18n._ import play.api.mvc._ import scala.concurrent.duration.Duration import scala.concurrent.{Await, ExecutionContext, Future} class UserActionBuilder( val parser: BodyParser[AnyContent], onUnauthorized: RequestHeader => Result )( implicit val executionContext: ExecutionContext ) extends ActionBuilder[IdentifiedRequest, AnyContent] { def invokeBlock[A](request: Request[A], block: IdentifiedRequest[A] => Future[Result]): Future[Result] = request.session.get(UserKey) match { case None => Future.successful(onUnauthorized(request)) case Some(userId) => val auth = AuthHeaders.user(UserReference(id = userId)) block(new IdentifiedRequest(auth, request)) } } abstract class BaseController( config: Config, dependencyClientProvider: DependencyClientProvider )(implicit val ec: ExecutionContext) extends FlowController with I18nSupport { protected def onUnauthorized(requestHeader: RequestHeader): Result = Redirect(routes.LoginController.index(return_url = Some(requestHeader.path))).flashing("warning" -> "Please login") private val UserActionBuilder = new UserActionBuilder(controllerComponents.parsers.default, onUnauthorized = onUnauthorized) protected val User: UserActionBuilder = UserActionBuilder private[this] lazy val client = dependencyClientProvider.newClient(user = None, requestId = None) def section: Option[Section] def withOrganization[T]( request: IdentifiedRequest[T], key: String ) ( f: Organization => Future[Result] ) ( implicit ec: scala.concurrent.ExecutionContext ) = { dependencyClient(request).organizations.get(key = Some(key), limit = 1).flatMap { organizations => organizations.headOption match { case None => Future { Redirect(routes.ApplicationController.index()).flashing("warning" -> s"Organization not found") } case Some(org) => { f(org) } } } } def organizations[T]( request: IdentifiedRequest[T] ) ( implicit ec: scala.concurrent.ExecutionContext ): Future[Seq[Organization]] = { dependencyClient(request).organizations.get( userId = Some(request.user.id), limit = 100 ) } def uiData[T]( request: IdentifiedRequest[T] ) ( implicit ec: ExecutionContext ): UiData = { val user = Await.result( dependencyClient(request).users.get(id = Some(request.user.id)), Duration(1, "seconds") ).headOption UiData( requestPath = request.path, user = user, section = section, config = config ) } def uiData[T]( request: AnonymousRequest[T], userReferenceOption: Option[UserReference] ) ( implicit ec: ExecutionContext ): UiData = { val user = userReferenceOption.flatMap { ref => Await.result( client.users.get(id = Some(ref.id)), Duration(1, "seconds") ).headOption } UiData( requestPath = request.path, user = user, section = section, config = config ) } def dependencyClient[T](request: IdentifiedRequest[T]): Client = { dependencyClientProvider.newClient(user = Some(request.user), requestId = Some(request.auth.requestId)) } }
flowcommerce/dependency
www/app/controllers/BaseController.scala
Scala
mit
3,627
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.plan.nodes.physical.stream import org.apache.flink.api.dag.Transformation import org.apache.flink.streaming.api.operators.KeyedProcessOperator import org.apache.flink.streaming.api.transformations.OneInputTransformation import org.apache.flink.table.api.TableException import org.apache.flink.table.dataformat.BaseRow import org.apache.flink.table.planner.calcite.FlinkTypeFactory import org.apache.flink.table.planner.codegen.EqualiserCodeGenerator import org.apache.flink.table.planner.codegen.sort.ComparatorCodeGenerator import org.apache.flink.table.planner.delegation.StreamPlanner import org.apache.flink.table.planner.plan.nodes.exec.{ExecNode, StreamExecNode} import org.apache.flink.table.planner.plan.rules.physical.stream.StreamExecRetractionRules import org.apache.flink.table.planner.plan.utils.RelExplainUtil._ import org.apache.flink.table.planner.plan.utils.{RelExplainUtil, SortUtil} import org.apache.flink.table.runtime.keyselector.NullBinaryRowKeySelector import org.apache.flink.table.runtime.operators.rank.{AppendOnlyTopNFunction, ConstantRankRange, RankType, RetractableTopNFunction} import org.apache.flink.table.runtime.typeutils.BaseRowTypeInfo import org.apache.calcite.plan.{RelOptCluster, RelTraitSet} import org.apache.calcite.rel._ import org.apache.calcite.rel.core.Sort import org.apache.calcite.rex.RexNode import java.util import scala.collection.JavaConversions._ /** * Stream physical RelNode for [[Sort]]. * * This node will output `limit` records beginning with the first `offset` records without sort. */ class StreamExecLimit( cluster: RelOptCluster, traitSet: RelTraitSet, inputRel: RelNode, offset: RexNode, fetch: RexNode) extends Sort( cluster, traitSet, inputRel, RelCollations.EMPTY, offset, fetch) with StreamPhysicalRel with StreamExecNode[BaseRow] { private lazy val limitStart: Long = SortUtil.getLimitStart(offset) private lazy val limitEnd: Long = SortUtil.getLimitEnd(offset, fetch) override def producesUpdates: Boolean = false override def needsUpdatesAsRetraction(input: RelNode): Boolean = false override def consumesRetractions: Boolean = false override def producesRetractions: Boolean = false override def requireWatermark: Boolean = false override def copy( traitSet: RelTraitSet, newInput: RelNode, newCollation: RelCollation, offset: RexNode, fetch: RexNode): Sort = { new StreamExecLimit(cluster, traitSet, newInput, offset, fetch) } override def explainTerms(pw: RelWriter): RelWriter = { pw.input("input", getInput) .item("offset", limitStart) .item("fetch", RelExplainUtil.fetchToString(fetch)) } //~ ExecNode methods ----------------------------------------------------------- override def getInputNodes: util.List[ExecNode[StreamPlanner, _]] = { List(getInput.asInstanceOf[ExecNode[StreamPlanner, _]]) } override def replaceInputNode( ordinalInParent: Int, newInputNode: ExecNode[StreamPlanner, _]): Unit = { replaceInput(ordinalInParent, newInputNode.asInstanceOf[RelNode]) } override protected def translateToPlanInternal( planner: StreamPlanner): Transformation[BaseRow] = { if (fetch == null) { throw new TableException( "FETCH is missed, which on streaming table is not supported currently.") } val inputRowTypeInfo = BaseRowTypeInfo.of( FlinkTypeFactory.toLogicalRowType(getInput.getRowType)) val generateRetraction = StreamExecRetractionRules.isAccRetract(this) val tableConfig = planner.getTableConfig val minIdleStateRetentionTime = tableConfig.getMinIdleStateRetentionTime val maxIdleStateRetentionTime = tableConfig.getMaxIdleStateRetentionTime // rankStart begin with 1 val rankRange = new ConstantRankRange(limitStart + 1, limitEnd) val rankType = RankType.ROW_NUMBER val outputRankNumber = false // Use TopNFunction underlying StreamExecLimit currently val sortKeySelector = NullBinaryRowKeySelector.INSTANCE val sortKeyComparator = ComparatorCodeGenerator.gen( tableConfig, "AlwaysEqualsComparator", Array(), Array(), Array(), Array()) val processFunction = if (generateRetraction) { val cacheSize = tableConfig.getConfiguration.getLong( StreamExecRank.TABLE_EXEC_TOPN_CACHE_SIZE) new AppendOnlyTopNFunction( minIdleStateRetentionTime, maxIdleStateRetentionTime, inputRowTypeInfo, sortKeyComparator, sortKeySelector, rankType, rankRange, generateRetraction, outputRankNumber, cacheSize) } else { val equaliserCodeGen = new EqualiserCodeGenerator(inputRowTypeInfo.getLogicalTypes) val generatedEqualiser = equaliserCodeGen.generateRecordEqualiser("LimitValueEqualiser") new RetractableTopNFunction( minIdleStateRetentionTime, maxIdleStateRetentionTime, inputRowTypeInfo, sortKeyComparator, sortKeySelector, rankType, rankRange, generatedEqualiser, generateRetraction, outputRankNumber) } val operator = new KeyedProcessOperator(processFunction) processFunction.setKeyContext(operator) val inputTransform = getInputNodes.get(0).translateToPlan(planner) .asInstanceOf[Transformation[BaseRow]] val outputRowTypeInfo = BaseRowTypeInfo.of( FlinkTypeFactory.toLogicalRowType(getRowType)) // as input node is singleton exchange, its parallelism is 1. val ret = new OneInputTransformation( inputTransform, getRelDetailedDescription, operator, outputRowTypeInfo, inputTransform.getParallelism) if (inputsContainSingleton()) { ret.setParallelism(1) ret.setMaxParallelism(1) } val selector = NullBinaryRowKeySelector.INSTANCE ret.setStateKeySelector(selector) ret.setStateKeyType(selector.getProducedType) ret } }
fhueske/flink
flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/nodes/physical/stream/StreamExecLimit.scala
Scala
apache-2.0
6,822
package com.github.ellchow.scaramouch.util import scalaz._, Scalaz._ object Enumeration { def fromString[A <: Enumeration](enum: A)(s: String): Option[enum.Value] = enum.values.find(_.toString == s) def fromId[A <: Enumeration](enum: A)(id: Int): Option[enum.Value] = \/.fromTryCatchNonFatal(enum(id)).toOption }
ellchow/scaramouch
scaramouch-util/src/main/scala/com/github/ellchow/scaramouch/util/Enumeration.scala
Scala
apache-2.0
329
package com.yetu.oauth2resource.services.tokenvalidation case class ValidationTokenException(message: String, cause: Throwable = null) extends Exception case class TokenParseException(message: String, cause: Throwable = null) extends Exception case class TokenExpiredException(message: String, cause: Throwable = null) extends Exception case class InvalidAudienceException(message: String, cause: Throwable = null) extends Exception
yetu/oauth2-resource-server
app/com/yetu/oauth2resource/services/tokenvalidation/ValidationTokenException.scala
Scala
mit
438
package org.finra.datagenerator.scaffolding.dependency.service /** * Created by dkopel on 9/20/16. */ trait ObjectiveService { }
FINRAOS/DataGenerator
rubber-scaffolding/rubber-dependency/src/main/scala/org/finra/datagenerator/scaffolding/dependency/service/ObjectiveService.scala
Scala
apache-2.0
135
package Import.FileImport import java.io.FileNotFoundException import Import.{Player, Routing} import RestConnection.TeamRequest import akka.actor.SupervisorStrategy.{Directive, Escalate} import akka.actor._ /** * handles the file import and the line validation */ object FileImporter { val name = "importer" def props(sendTo: ActorRef) = Props(new FileImporter(sendTo)) trait ResponseResult case class PlayerList(players: List[Player]) extends ResponseResult case class ImportFile(l: List[String]) case class BrokenLines(broken: List[String]) extends ResponseResult } class FileImporter(sendTo: ActorRef) extends Actor with Routing with ImportSupervising { import FileImporter._ import CsvLineImporter._ var linesCount = 0 var players = List.empty[Player] var brokenLines = List.empty[String] val router = createRouter(context, 4, CsvLineImporter.props) //checks if all lines have been imported or errors were found def checkIfReady = { if (players.length + brokenLines.length == linesCount) if (brokenLines.isEmpty) sendTo ! PlayerList(players) else sendTo ! BrokenLines(brokenLines) } def receive = loadFile //state that handles the file import def loadFile: Receive = { case t@TeamRequest(_,_,_) => t.players foreach (line => router.route(InputLine(line), self)) linesCount = t.players.length context become validateFile } //state that handles the validation of the line def validateFile: Receive = { case p@Player(_,_,_,_,_,_) => players = p :: players checkIfReady case ErrorInLine(message) => brokenLines = message :: brokenLines checkIfReady } } trait ImportSupervising extends Actor { val decider: PartialFunction[Throwable, Directive] = { case _: FileNotFoundException => Escalate } override def supervisorStrategy: SupervisorStrategy = OneForOneStrategy()(decider.orElse(SupervisorStrategy.defaultStrategy.decider)) }
yannick-cw/tournament_planer
hatplaner/src/main/scala/Import/FileImport/FileImporter.scala
Scala
mit
1,961
package tests import org.scalatest.FreeSpec class PassingTest extends FreeSpec { "test message" in { assert(main.Main.message == "Hello World!") } }
xuwei-k/xsbt
server-test/src/server-test/buildserver/run-and-test/src/test/scala/tests/PassingTest.scala
Scala
apache-2.0
158
package nl.iljabooij.garmintrainer import org.slf4j.{Logger, LoggerFactory} trait LoggerHelper { val loggerName = this.getClass.getName lazy val logger = LoggerFactory.getLogger(loggerName) def debug(s:String) = { if (logger.isDebugEnabled) logger.debug(s) } def error(s:String) = { if (logger.isErrorEnabled) logger.error(s) } def error(s:String, o: Any) = { if (logger.isErrorEnabled) logger.error(s, o) } }
chmandrade/garmintrainer
src/main/scala/nl/iljabooij/garmintrainer/LoggerHelper.scala
Scala
gpl-3.0
447
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.madewithtea.mockedstreams import java.time.Instant import org.apache.kafka.clients.consumer.ConsumerRecord import org.apache.kafka.common.serialization.Serde import org.apache.kafka.streams.StreamsConfig import org.apache.kafka.streams.kstream.{Materialized, TimeWindows} import org.apache.kafka.streams.processor.TimestampExtractor import org.apache.kafka.streams.scala.ImplicitConversions._ import org.apache.kafka.streams.scala.Serdes.{ Integer => intSerde, String => stringSerde } import org.apache.kafka.streams.scala.StreamsBuilder import org.apache.kafka.streams.scala.kstream.KTable import org.apache.kafka.streams.state.ValueAndTimestamp import org.scalatest.{FlatSpec, Matchers} import java.time.Duration import org.apache.kafka.streams.processor.ProcessorContext import com.madewithtea.mockedstreams.MockedStreams.TopologyNotSet class MockedStreamsSpec extends FlatSpec with Matchers { import CustomEquality._ behavior of "MockedStreams" it should "throw exception when inputs specified before topology" in { an[TopologyNotSet] should be thrownBy MockedStreams().input("input", stringSerde, stringSerde, Seq()) } it should "throw exception when outputs specified before topology" in { an[TopologyNotSet] should be thrownBy MockedStreams().output("output", stringSerde, stringSerde) } it should "throw exception when outputTable specified before topology" in { an[TopologyNotSet] should be thrownBy MockedStreams().outputTable("output", stringSerde, stringSerde) } it should "throw exception state store access before topology " in { an[TopologyNotSet] should be thrownBy MockedStreams().windowStateTable( "table", "key", Instant.now(), Instant.now().plusMillis(1) ) } it should "assert correctly when processing strings to uppercase" in { import Fixtures.Uppercase._ val output = MockedStreams() .topology(topology) .input(InputTopic, strings, strings, input) .output(OutputTopic, strings, strings) output shouldEqual expected } it should "assert correctly when processing strings to uppercase match against table" in { import Fixtures.Uppercase._ val output = MockedStreams() .topology(topology) .input(InputTopic, strings, strings, input) .outputTable(OutputTopic, strings, strings) output shouldEqual expected.toMap } it should "assert correctly when processing multi input topology" in { import Fixtures.Multi._ val builder = MockedStreams() .topology(topology1Output) .input(InputATopic, strings, ints, inputA) .input(InputBTopic, strings, ints, inputB) .stores(Seq(StoreName)) builder.output(OutputATopic, strings, ints) shouldEqual expectedA builder.stateTable(StoreName) shouldEqual inputA.toMap } it should "assert correctly when processing multi input output topology" in { import Fixtures.Multi._ val builder = MockedStreams() .topology(topology2Output) .input(InputATopic, strings, ints, inputA) .input(InputBTopic, strings, ints, inputB) .stores(Seq(StoreName)) builder .output(OutputATopic, strings, ints) .shouldEqual(expectedA) builder .output(OutputBTopic, strings, ints) .shouldEqual(expectedB) builder.stateTable(StoreName) shouldEqual inputA.toMap } it should "assert correctly when joining events sent to 2 Ktables in a specific order" in { import Fixtures.Multi._ val firstInputForTopicA = Seq(("x", 1), ("y", 2)) val firstInputForTopicB = Seq(("x", 4), ("y", 3), ("y", 5)) val secondInputForTopicA = Seq(("y", 4)) val expectedOutput = Seq(("x", 5), ("y", 5), ("y", 7), ("y", 9)) val builder = MockedStreams() .topology(topologyTables) .input(InputATopic, strings, ints, firstInputForTopicA) .input(InputBTopic, strings, ints, firstInputForTopicB) .input(InputATopic, strings, ints, secondInputForTopicA) builder .output(OutputATopic, strings, ints) .shouldEqual(expectedOutput) } it should "assert correctly when processing windowed state output topology" in { import java.util.Properties import Fixtures.Multi._ val props = new Properties props.put( StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, classOf[TimestampExtractors.CustomTimestampExtractor].getName ) val builder = MockedStreams() .topology(topology1WindowOutput) .input(InputCTopic, strings, ints, inputC) .stores(Seq(StoreName)) .config(props) builder .windowStateTable[String, Int](StoreName, "x") .shouldEqual(expectedCx.toMap) builder .windowStateTable[String, Int](StoreName, "y") .shouldEqual(expectedCy.toMap) builder .windowStateTable[String, Int]( StoreName, "y", Instant.ofEpochMilli(0L), Instant.ofEpochMilli(1L) ) .shouldEqual(expectedCy.toMap) builder .windowStateTable[String, Int]( StoreName, "x", Instant.ofEpochMilli(0L), Instant.ofEpochMilli(1L) ) .shouldEqual(expectedCx.toMap) } it should "accept already built topology" in { import Fixtures.Uppercase._ def getTopology = { val builder = new StreamsBuilder() topology(builder) builder.build() } val output = MockedStreams() .withTopology(() => getTopology) .input(InputTopic, strings, strings, input) .output(OutputTopic, strings, strings) output shouldEqual expected } it should "accept consumer records with custom timestamps" in { import Fixtures.Multi._ val builder = MockedStreams() .topology(topology1WindowOutput) .inputWithTime(InputCTopic, strings, ints, inputCWithTimeStamps) .stores(Seq(StoreName)) builder .windowStateTable[String, Int](StoreName, "x") .shouldEqual(expectedCWithTimeStamps.toMap)(valueAndTimestampEq[Int]) builder .windowStateTable[String, Long]( StoreName, "x", Instant.ofEpochMilli(1000L), Instant.ofEpochMilli(1002L) ) .shouldEqual(expectedCWithTimeStamps.toMap) } object Fixtures { object Operations { val lastAggregator = (_: String, v: Int, _: Int) => v val addJoiner = (v1: Int, v2: Int) => v1 + v2 val subJoiner = (v1: Int, v2: Int) => v1 - v2 } object Uppercase { val input = Seq(("x", "v1"), ("y", "v2")) val expected = Seq(("x", "V1"), ("y", "V2")) val strings: Serde[String] = stringSerde val InputTopic = "input" val OutputTopic = "output" def topology(builder: StreamsBuilder) = { builder .stream[String, String](InputTopic) .map((k, v) => (k, v.toUpperCase)) .to(OutputTopic) } } object Multi { val inputA = Seq(("x", 1), ("y", 2)) val inputB = Seq(("x", 4), ("y", 3)) val inputC = Seq(("x", 1), ("x", 1), ("x", 2), ("y", 1)) val inputCWithTimeStamps = Seq( ("x", 1, 1000L), ("x", 1, 1000L), ("x", 1, 1001L), ("x", 1, 1001L), ("x", 1, 1002L) ) val expectedA = Seq(("x", 5), ("y", 5)) val expectedB = Seq(("x", 3), ("y", 1)) val expectedCx = Seq( (1L, ValueAndTimestamp.make(2, 1L)), (2L, ValueAndTimestamp.make(1, 2L)) ) val expectedCy = Seq((1, 1)) val expectedCWithTimeStamps = Seq( 1000 -> 2, 1001 -> 2, 1002 -> 1 ) val strings: Serde[String] = stringSerde val ints: Serde[Int] = intSerde val InputATopic = "inputA" val InputBTopic = "inputB" val InputCTopic = "inputC" val OutputATopic = "outputA" val OutputBTopic = "outputB" val StoreName = "store" val Store2Name = "store2" def topology1Output(builder: StreamsBuilder) = { val streamA = builder.stream[String, Int](InputATopic) val streamB = builder.stream[String, Int](InputBTopic) val table = streamA.groupByKey .aggregate[Int](0)(Operations.lastAggregator)( Materialized .as(StoreName) .withKeySerde(strings) .withValueSerde(ints) ) streamB .leftJoin[Int, Int](table)(Operations.addJoiner) .to(OutputATopic) } def topology1WindowOutput(builder: StreamsBuilder) = { val streamA = builder.stream[String, Int](InputCTopic) streamA.groupByKey .windowedBy(TimeWindows.of(Duration.ofMillis(1))) .count()(Materialized.as(StoreName)) } def topology2Output(builder: StreamsBuilder) = { val streamA = builder.stream[String, Int](InputATopic) val streamB = builder.stream[String, Int](InputBTopic) val table = streamA.groupByKey .aggregate(0)(Operations.lastAggregator)( Materialized .as(StoreName) .withKeySerde(strings) .withValueSerde(ints) ) streamB .join(table)(Operations.addJoiner) .to(OutputATopic) streamB .leftJoin(table)(Operations.subJoiner) .to(OutputBTopic) } def topologyTables(builder: StreamsBuilder) = { val streamA = builder.stream[String, Int](InputATopic) val streamB = builder.stream[String, Int](InputBTopic) val tableA: KTable[String, Int] = streamA.groupByKey .aggregate[Int](0)(Operations.lastAggregator) val tableB: KTable[String, Int] = streamB.groupByKey .aggregate[Int](0)(Operations.lastAggregator) val resultTable: KTable[String, Int] = tableA.join[Int, Int](tableB)(Operations.addJoiner) resultTable.toStream .to(OutputATopic) } } } } object TimestampExtractors { class CustomTimestampExtractor extends TimestampExtractor { override def extract( record: ConsumerRecord[AnyRef, AnyRef], previous: Long ): Long = record.value match { case value: Integer => value.toLong case _ => record.timestamp() } } } object CustomEquality { import org.scalactic.Equality implicit def valueAndTimestampEq[A] : Equality[Map[java.lang.Long, ValueAndTimestamp[A]]] = new Equality[Map[java.lang.Long, ValueAndTimestamp[A]]] { override def areEqual( a: Map[java.lang.Long, ValueAndTimestamp[A]], b: Any ): Boolean = { true } } }
jpzk/mockedstreams
src/test/scala/com/madewithtea/mockedstreams/MockedStreamsSpec.scala
Scala
apache-2.0
11,434
class C { private val a = 0 def getA = a } class D(c: C) { def a = c.getA } object Test { implicit def c2d(c: C): D = new D(c) val c = new C (c: D).a // works c.a // error } // to fix this we'd need to check accessibility in the isMatchedBy of a SelectionProto, // so that we can insert an implicit if this does not work. Need to check performance impact of this.
yusuke2255/dotty
tests/pending/pos/t1071.scala
Scala
bsd-3-clause
382
package models.daos import scala.concurrent.Future import utils.misc._ trait ViewDAO { def get(id1: String, id2: String): Future[Option[StringWithIds]] def set(id1: String, id2: String, content: String): Future[Any] }
serversideapps/silhmojs
server/app/models/daos/ViewDAO.scala
Scala
apache-2.0
228
/* active-learning-scala: Active Learning library for Scala Copyright (c) 2014 Davi Pereira dos Santos This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ package util /** * http://blog.adamdklein.com/?p=689 */ class Lazy[T](v: => T) { private var state: Option[T] = None def value: T = if (state.isDefined) state.get else { state = Some(v) state.get } def reset() { state = None } } object Lazy { import scala.language.implicitConversions def apply[T](v: => T) = new Lazy[T](v) implicit def unwrap[T](v: Lazy[T]): T = v.value }
machine-learning-scala/mls
src/main/scala/util/Lazy.scala
Scala
gpl-3.0
1,166
/* Copyright 2013 Twitter, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.twitter.summingbird.planner import com.twitter.summingbird._ case class NodeIdentifier(identifier: String) { override def toString = identifier } sealed trait Node[P <: Platform[P]] { val members: List[Producer[P, _]] = List() def toSource: SourceNode[P] = SourceNode(this.members) def toSummer: SummerNode[P] = SummerNode(this.members) def contains(p: Producer[P, _]): Boolean = members.contains(p) def getNameFallback: String = getClass.getName.replaceFirst("com.twitter.summingbird.storm.", "") def getName(dag: Dag[P]): String = dag.getNodeName(this) def collapseNamedNodes(sanitize: String => String): String = { val membersCombined = members.reverse.collect { case NamedProducer(_, n) => sanitize(n) }.mkString(",") if (membersCombined.size > 0) "|" + membersCombined + "|" else "" } def shortName(sanitize: String => String): NodeIdentifier def add(node: Producer[P, _]): Node[P] def reverse: Node[P] def toStringWithPrefix(prefix: String): String = { prefix + getNameFallback + "\\n" + members.foldLeft("") { case (str, producer) => str + prefix + "\\t" + producer.getClass.getName.replaceFirst("com.twitter.summingbird.", "") + "\\n" } } override def toString = toStringWithPrefix("\\t") } // This is the default state for Nodes if there is nothing special about them. // There can be an unbounded number of these and there is no hard restrictions on ordering/where. Other than // locations which must be one of the others case class FlatMapNode[P <: Platform[P]](override val members: List[Producer[P, _]] = List()) extends Node[P] { def add(node: Producer[P, _]): Node[P] = if (members.contains(node)) this else this.copy(members = node :: members) def reverse = this.copy(members.reverse) override def shortName(sanitize: String => String) = NodeIdentifier("FlatMap" + collapseNamedNodes(sanitize)) } case class SummerNode[P <: Platform[P]](override val members: List[Producer[P, _]] = List()) extends Node[P] { def add(node: Producer[P, _]): Node[P] = if (members.contains(node)) this else this.copy(members = node :: members) def reverse = this.copy(members.reverse) override def shortName(sanitize: String => String) = NodeIdentifier("Summer" + collapseNamedNodes(sanitize)) } case class SourceNode[P <: Platform[P]](override val members: List[Producer[P, _]] = List()) extends Node[P] { def add(node: Producer[P, _]): Node[P] = if (members.contains(node)) this else this.copy(members = node :: members) def reverse = this.copy(members.reverse) override def shortName(sanitize: String => String) = NodeIdentifier("Source" + collapseNamedNodes(sanitize)) } case class Dag[P <: Platform[P]](originalTail: TailProducer[P, _], producerToPriorityNames: Map[Producer[P, Any], List[String]], tail: TailProducer[P, _], producerToNode: Map[Producer[P, _], Node[P]], nodes: List[Node[P]], nodeToName: Map[Node[P], String] = Map[Node[P], String](), nameToNode: Map[String, Node[P]] = Map[String, Node[P]](), dependenciesOfM: Map[Node[P], List[Node[P]]] = Map[Node[P], List[Node[P]]](), dependantsOfM: Map[Node[P], List[Node[P]]] = Map[Node[P], List[Node[P]]]()) { lazy val producerDependants = Dependants(tail) def connect(src: Node[P], dest: Node[P]): Dag[P] = { if (src == dest) { this } else { assert(!dest.isInstanceOf[SourceNode[_]]) // We build/maintain two maps, // Nodes to which each node depends on // and nodes on which each node depends val oldSrcDependants = dependantsOfM.getOrElse(src, List[Node[P]]()) val newSrcDependants = if (oldSrcDependants.contains(dest)) oldSrcDependants else (dest :: oldSrcDependants) val newDependantsOfM = dependantsOfM + (src -> newSrcDependants) val oldDestDependencies = dependenciesOfM.getOrElse(dest, List[Node[P]]()) val newDestDependencies = if (oldDestDependencies.contains(src)) oldDestDependencies else (src :: oldDestDependencies) val newDependenciesOfM = dependenciesOfM + (dest -> newDestDependencies) copy(dependenciesOfM = newDependenciesOfM, dependantsOfM = newDependantsOfM) } } def locateOpt(p: Producer[P, _]): Option[Node[P]] = producerToNode.get(p) def locate(p: Producer[P, _]): Node[P] = locateOpt(p).getOrElse { sys.error("Unexpected node missing when looking for %s".format(p)) } def connect(src: Producer[P, _], dest: Producer[P, _]): Dag[P] = connect(locate(src), locate(dest)) def getNodeName(n: Node[P]): String = nodeToName(n) def tailN: Node[P] = producerToNode(tail) def dependantsOf(n: Node[P]): List[Node[P]] = dependantsOfM.get(n).getOrElse(List()) def dependenciesOf(n: Node[P]): List[Node[P]] = dependenciesOfM.get(n).getOrElse(List()) def dependantsOf(p: Producer[P, _]) = producerDependants.dependantsOf(p) def transitiveDependantsOf(p: Producer[P, _]) = producerDependants.transitiveDependantsOf(p) def dependenciesOf(p: Producer[P, _]) = Producer.dependenciesOf(p) def transitiveDependenciesOf(p: Producer[P, _]) = Producer.transitiveDependenciesOf(p) def toStringWithPrefix(prefix: String): String = { prefix + "Dag\\n" + nodes.foldLeft("") { case (str, node) => str + node.toStringWithPrefix(prefix + "\\t") + "\\n" } } override def toString = toStringWithPrefix("\\t") } object Dag { /** The default name sanitizing */ def apply[P <: Platform[P], T](originalTail: TailProducer[P, Any], producerToPriorityNames: Map[Producer[P, Any], List[String]], tail: TailProducer[P, Any], registry: List[Node[P]]): Dag[P] = apply[P, T](originalTail, producerToPriorityNames, tail, registry, { (s: String) => s.replaceAll("""[\\[\\]]|\\-""", "|") }) def apply[P <: Platform[P], T](originalTail: TailProducer[P, Any], producerToPriorityNames: Map[Producer[P, Any], List[String]], tail: TailProducer[P, Any], registry: List[Node[P]], sanitizeName: String => String): Dag[P] = { require(registry.collect { case n @ SourceNode(_) => n }.size > 0, "Valid registries should have at least one source node") def buildProducerToNodeLookUp(stormNodeSet: List[Node[P]]): Map[Producer[P, _], Node[P]] = { stormNodeSet.foldLeft(Map[Producer[P, _], Node[P]]()) { (curRegistry, stormNode) => stormNode.members.foldLeft(curRegistry) { (innerRegistry, producer) => (innerRegistry + (producer -> stormNode)) } } } val producerToNode = buildProducerToNodeLookUp(registry) val dag = registry.foldLeft(Dag(originalTail, producerToPriorityNames, tail, producerToNode, registry)) { (curDag, stormNode) => // Here we are building the Dag's connection topology. // We visit every producer and connect the Node's represented by its dependant and dependancies. // Producers which live in the same node will result in a NOP in connect. stormNode.members.foldLeft(curDag) { (innerDag, dependantProducer) => Producer.dependenciesOf(dependantProducer) .foldLeft(innerDag) { (dag, dep) => dag.connect(dep, dependantProducer) } } } def tryGetName(name: String, seen: Set[String], indxOpt: Option[Int] = None): String = { indxOpt match { case None => if (seen.contains(name)) tryGetName(name, seen, Some(2)) else name case Some(indx) => if (seen.contains(name + "." + indx)) tryGetName(name, seen, Some(indx + 1)) else name + "." + indx } } def genNames(dep: Node[P], dag: Dag[P], outerNodeToName: Map[Node[P], String], usedNames: Set[String]): (Map[Node[P], String], Set[String]) = { dag.dependenciesOf(dep).foldLeft((outerNodeToName, usedNames)) { case ((nodeToName, taken), n) => val name = tryGetName(nodeToName(dep) + "-" + n.shortName(sanitizeName), taken) val useName = nodeToName.get(n) match { case None => name case Some(otherName) => if (otherName.split("-").size > name.split("-").size) name else otherName } genNames(n, dag, nodeToName + (n -> useName), taken + useName) } } def allTails(dag: Dag[P]): List[Node[P]] = { dag.nodes.filter { m => dag.dependantsOf(m).size == 0 } } //start with the true tail val (nodeToName, _) = (dag.tailN :: allTails(dag)).foldLeft((Map[Node[P], String](), Set[String]())) { case ((nodeToName, usedNames), curTail) => if (!nodeToName.contains(curTail)) { val tailN = tryGetName("Tail", usedNames) genNames(curTail, dag, nodeToName + (curTail -> tailN), usedNames + tailN) } else { (nodeToName, usedNames) } } val nameToNode = nodeToName.map((t) => (t._2, t._1)) dag.copy(nodeToName = nodeToName, nameToNode = nameToNode) } }
rangadi/summingbird
summingbird-core/src/main/scala/com/twitter/summingbird/planner/Node.scala
Scala
apache-2.0
9,321
// Firebase Rules Generator // Bloom Technologies Inc. Copyright 2017 // // Authors: Raphael Javaux <raphael@bloomlife.com> // // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program. If not, see <http://www.gnu.org/licenses/>. package com.bloomlife.fbrules.types import com.bloomlife.fbrules.Rules.Generator import com.bloomlife.fbrules.ruleexpr.{NewData} import com.bloomlife.fbrules.ruleexpr.Implicits._ object FbString { def apply( minLength: Option[Int] = None, maxLength: Option[Int] = None, regex: Option[String] = None): FbNode = { var node = FbNode().validateIf(NewData.isString) if (minLength.isDefined) { node = node.validateIf(NewData.asString.length >= minLength.get) } if (maxLength.isDefined) { node = node.validateIf(NewData.asString.length <= maxLength.get) } if (regex.isDefined) { node = node.validateIf(NewData.asString.matches(regex.get)) } node } }
RaphaelJ/firebase-rules-generator
src/main/scala/types/FbString.scala
Scala
gpl-3.0
1,486
/* Copyright 2012 Anton Kraievoy akraievoy@gmail.com This file is part of Holonet. Holonet is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Holonet is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Holonet. If not, see <http://www.gnu.org/licenses/>. */ package org.akraievoy.holonet.exp import org.akraievoy.holonet.exp case class Param( name: String, mt: Manifest[_ <: Any], valueSpec: Seq[String], strategy: Strategy.Value, chainStrategy: Strategy.Value, desc: String, index: Int ) extends Named { def pos(pos: Int, chained: Boolean = false, expIndex: Int = 0): ParamPos = { fullPosSeq(chained, expIndex)(pos) } def toPosSeq( chained: Boolean, expIndex: Int ): Seq[ParamPos] = { val fullSeq = fullPosSeq(chained, expIndex) actualStrategy(chained) match { case exp.Strategy.USE_FIRST => Seq(fullSeq.head) case exp.Strategy.USE_LAST => Seq(fullSeq.last) case anyFull if exp.Strategy.full(anyFull) => fullSeq case other => throw new IllegalArgumentException( "unable to handle strategy %s".format(other) ) } } protected def fullPosSeq(chained: Boolean, expIndex: Int): Seq[ParamPos] = { valueSpec.zipWithIndex.map { case (str, idx) => ParamPos( name, mt, str, idx, valueSpec.size, isParallel(chained), index, expIndex ) } } def isParallel(chained: Boolean = false) = actualStrategy(chained) != exp.Strategy.ITERATE def actualStrategy(chained: Boolean = false): Strategy.Value = if (chained) { chainStrategy } else { strategy } } object Param{ def apply[T]( paramName: ParamName[T], singleValueSpec: String, strategy: Strategy.Value = exp.Strategy.SPAWN, chainStrategy: Strategy.Value = exp.Strategy.SPAWN, desc: String = "" ) = { val valueSpec = if (singleValueSpec.contains(';')) { singleValueSpec.split(";").toSeq } else if (singleValueSpec.contains("--")) { val rangeStr = singleValueSpec.split("--") if (rangeStr.length > 2) { throw new IllegalArgumentException( "param '%s' has more than 2 range limits".format(paramName.name) ) } val range = rangeStr.map(java.lang.Long.parseLong) (range(0) to range(1)).map(String.valueOf) } else { Seq(singleValueSpec) } new Param( paramName.name, paramName.mt, valueSpec, strategy, chainStrategy, desc, -1 ) } }
akraievoy/holonet
src/main/scala/org/akraievoy/holonet/exp/Param.scala
Scala
gpl-3.0
3,049
package gov.uk.dvla.vehicles.acquire.runner import cucumber.api.CucumberOptions import cucumber.api.junit.Cucumber import org.junit.runner.RunWith @RunWith(classOf[Cucumber]) @CucumberOptions( features = Array("acceptance-tests/src/test/resources/gherkin/Version.feature"), glue = Array("gov.uk.dvla.vehicles.acquire.stepdefs"), tags = Array("@working","~@Ignore") ) class Version { }
dvla/vehicles-acquire-online
acceptance-tests/src/test/scala/gov/uk/dvla/vehicles/acquire/runner/Version.scala
Scala
mit
394
package tw.com.chttl.spark.mllib.util import org.apache.spark.mllib.linalg.Vector /** * Created by leorick on 2016/2/19. */ object LRUtil extends Serializable { /** * * @param coef coefficient of LRModel * @param names array of variable name * @param sort if sorted by coef * @return LR model formulation, e.g.: (+0.99987) * X0 + (+1.99975) * X1 + (+0.00043) * X2 */ def printLrModel(coef: Vector, names:Option[Array[String]] = None, sort:Boolean = false): String = { val name: Array[String] = names.getOrElse{ (for (i <- 0 until coef.size) yield {f"X${i}%s"}).toArray } val pair = coef.toArray.zip(name) val ret = if(sort) { coef.toArray.zip(name).sortBy{ case (coef,name) => java.lang.Math.abs(coef) }.reverse } else { coef.toArray.zip(name) } ret.map{ case (coef, name) => f"(${coef}%+8.5f) * ${name}" }.mkString(" + ") } }
leoricklin/spark-util
src/main/scala/tw/com/chttl/spark/mllib/util/LRUtil.scala
Scala
apache-2.0
886
package org.jobimtext.coref.berkeley /** * Interface for objects that hold an unique identifier. The uniqueness is defined as follows: Given a class C * implementing `Identifiable` while no ancestor of C implements it, it holds that if o and o' are instances of C: * o.identifier == o'.identifier => o == o' * * @author Tim Feuerbach */ trait Identifiable { def identifier: String }
timfeu/berkeleycoref-thesaurus
src/main/java/org/jobimtext/coref/berkeley/Identifiable.scala
Scala
gpl-3.0
392
package sample.stream.experiments import rx.Scheduler import rx.lang.scala.{ Scheduler, Observable } import scala.concurrent.duration._ object ReplayBuffer { def main(args: Array[String]) { //replays the last two things val observable = Observable.interval(1.second) val connectableObservable = observable.replay(bufferSize = 3) connectableObservable.subscribe(x => { println("one"); println(x) }) connectableObservable.connect Thread.sleep(10000) connectableObservable.subscribe(x => { println("two"); println(x) }) Thread.sleep(10000) // Scheduler // // observable.replay(scheduler = Scheduler()) observable.share } }
pallavig/akka-examples
src/main/scala/sample/stream/experiments/ReplayBuffer.scala
Scala
cc0-1.0
689
package actors import akka.actor.{Actor, ActorLogging, ActorSystem} import com.bryzek.apidoc.api.v0.models.{Application, Diff, DiffBreaking, DiffNonBreaking, DiffUndefinedType, Publication, Version} import com.bryzek.apidoc.internal.v0.models.{Task, TaskDataDiffVersion, TaskDataIndexApplication, TaskDataUndefinedType} import db.{ApplicationsDao, Authorization, ChangesDao, OrganizationsDao, TasksDao, UsersDao, VersionsDao} import lib.{ServiceDiff, Text} import play.api.Logger import play.api.libs.concurrent.Akka import play.api.Play.current import java.util.UUID import scala.concurrent.duration._ import scala.util.{Failure, Success, Try} object TaskActor { object Messages { case class Created(guid: UUID) case object RestartDroppedTasks case object PurgeOldTasks case object NotifyFailed } } @javax.inject.Singleton class TaskActor @javax.inject.Inject() ( system: ActorSystem, applicationsDao: ApplicationsDao, changesDao: ChangesDao, emails: Emails, organizationsDao: OrganizationsDao, search: Search, tasksDao: TasksDao, usersDao: UsersDao, versionsDao: VersionsDao ) extends Actor with ActorLogging with ErrorHandler { implicit val ec = system.dispatchers.lookup("task-actor-context") private[this] val NumberDaysBeforePurge = 30 private[this] case class Process(guid: UUID) system.scheduler.schedule(1.hour, 1.hour, self, TaskActor.Messages.RestartDroppedTasks) system.scheduler.schedule(1.day, 1.day, self, TaskActor.Messages.NotifyFailed) system.scheduler.schedule(1.day, 1.day, self, TaskActor.Messages.PurgeOldTasks) def receive = { case m @ TaskActor.Messages.Created(guid) => withVerboseErrorHandler(m) { self ! Process(guid) } case m @ Process(guid) => withVerboseErrorHandler(m) { tasksDao.findByGuid(guid).map { task => tasksDao.incrementNumberAttempts(usersDao.AdminUser, task) task.data match { case TaskDataDiffVersion(oldVersionGuid, newVersionGuid) => { processTask(task, Try(diffVersion(oldVersionGuid, newVersionGuid))) } case TaskDataIndexApplication(applicationGuid) => { processTask(task, Try(search.indexApplication(applicationGuid))) } case TaskDataUndefinedType(desc) => { tasksDao.recordError(usersDao.AdminUser, task, "Task actor got an undefined data type: " + desc) } } } } case m @ TaskActor.Messages.RestartDroppedTasks => withVerboseErrorHandler(m) { tasksDao.findAll( nOrFewerAttempts = Some(2), nOrMoreMinutesOld = Some(1) ).foreach { task => self ! Process(task.guid) } } case m @ TaskActor.Messages.NotifyFailed => withVerboseErrorHandler(m) { val errors = tasksDao.findAll( nOrMoreAttempts = Some(2) ).map { task => val errorType = task.data match { case TaskDataDiffVersion(a, b) => s"TaskDataDiffVersion($a, $b)" case TaskDataIndexApplication(guid) => s"TaskDataIndexApplication($guid)" case TaskDataUndefinedType(desc) => s"TaskDataUndefinedType($desc)" } val errorMsg = Text.truncate(task.lastError.getOrElse("No information on error"), 500) s"$errorType task ${task.guid}: $errorMsg" } emails.sendErrors( subject = "One or more tasks failed", errors = errors ) } case m @ TaskActor.Messages.PurgeOldTasks => withVerboseErrorHandler(m) { tasksDao.findAll( isDeleted = Some(true), deletedAtLeastNDaysAgo = Some(NumberDaysBeforePurge) ).foreach { task => tasksDao.purge(usersDao.AdminUser, task) } } case m: Any => logUnhandledMessage(m) } private[this] def diffVersion(oldVersionGuid: UUID, newVersionGuid: UUID) { versionsDao.findByGuid(Authorization.All, oldVersionGuid, isDeleted = None).map { oldVersion => versionsDao.findByGuid(Authorization.All, newVersionGuid).map { newVersion => ServiceDiff(oldVersion.service, newVersion.service).differences match { case Nil => { // No-op } case diffs => { changesDao.upsert( createdBy = usersDao.AdminUser, fromVersion = oldVersion, toVersion = newVersion, differences = diffs ) versionUpdated(newVersion, diffs) } } } } } private[this] def versionUpdated( version: Version, diffs: Seq[Diff] ) { // Only send email if something has actually changed if (!diffs.isEmpty) { val breakingDiffs = diffs.flatMap { d => d match { case d: DiffBreaking => Some(d.description) case d: DiffNonBreaking => None case d: DiffUndefinedType => Some(d.description) } } val nonBreakingDiffs = diffs.flatMap { d => d match { case d: DiffBreaking => None case d: DiffNonBreaking => Some(d.description) case d: DiffUndefinedType => None } } applicationsDao.findAll(Authorization.All, version = Some(version), limit = 1).headOption.map { application => organizationsDao.findAll(Authorization.All, application = Some(application), limit = 1).headOption.map { org => emails.deliver( context = Emails.Context.Application(application), org = org, publication = Publication.VersionsCreate, subject = s"${org.name}/${application.name}:${version.version} Uploaded", body = views.html.emails.versionCreated( org, application, version, breakingDiffs = breakingDiffs, nonBreakingDiffs = nonBreakingDiffs ).toString ) } } } } def processTask[T](task: Task, attempt: Try[T]) { attempt match { case Success(_) => { tasksDao.softDelete(usersDao.AdminUser, task) } case Failure(ex) => { tasksDao.recordError(usersDao.AdminUser, task, ex) } } } }
Seanstoppable/apidoc
api/app/actors/TaskActor.scala
Scala
mit
6,135
package com.twitter.scalding.serialization.macros.impl.ordered_serialization.providers import scala.reflect.macros.whitebox.Context /** * The `knownDirectSubclasses` method doesn't provide stable ordering since it returns an unordered `Set` and * the `Type` AST nodes don't override the `hashCode` method, relying on the default identity `hashCode`. * * This function makes the ordering stable using a list ordered by the full name of the types. */ object StableKnownDirectSubclasses { def apply(c: Context)(tpe: c.Type): List[c.universe.TypeSymbol] = // linter:ignore:UnusedParameter tpe.typeSymbol.asClass.knownDirectSubclasses.map(_.asType).toList.sortBy(_.fullName) }
twitter/scalding
scalding-serialization/src/main/scala/com/twitter/scalding/serialization/macros/impl/ordered_serialization/providers/StableKnownDirectSubclasses.scala
Scala
apache-2.0
686
package ru.pavkin.todoist.api.core.decoder import cats.{FlatMap, Apply} import shapeless.{HNil, ::} import cats.syntax.flatMap._ import cats.syntax.apply._ trait SingleCommandResponseDecoder[F[_], Command, Base] extends CommandResponseDecoder[F, Command, Base] {self => def combine[Command2, Out2](other: CommandResponseDecoder.Aux[F, Command2, Base, Out2]) (implicit A: Apply[F]) : MultipleCommandResponseDecoder.Aux[F, Command2 :: Command :: HNil, Base, Out2 :: self.Out :: HNil] = new MultipleCommandResponseDecoder[F, Command2 :: Command :: HNil, Base] { type Out = Out2 :: self.Out :: HNil def parse(command: Command2 :: Command :: HNil)(resource: Base): F[Out] = self.parse(command.tail.head)(resource) .map2(other.parse(command.head)(resource))((a, b) => b :: a :: HNil) } def compose[Out2](other: CommandResponseDecoder.Aux[F, Command, Out, Out2]) (implicit F: FlatMap[F]): SingleCommandResponseDecoder.Aux[F, Command, Base, Out2] = new SingleCommandResponseDecoder[F, Command, Base] { type Out = Out2 def parse(command: Command)(resource: Base): F[Out] = self.parse(command: Command)(resource).flatMap(other.parse(command: Command)) } } object SingleCommandResponseDecoder { type Aux[F[_], Command, Base, Out0] = SingleCommandResponseDecoder[F, Command, Base] {type Out = Out0} def using[F[_], Command, Base, Out0](f: (Command, Base) => F[Out0]): Aux[F, Command, Base, Out0] = new SingleCommandResponseDecoder[F, Command, Base] { type Out = Out0 def parse(command: Command)(resource: Base): F[Out] = f(command, resource) } }
vpavkin/todoist-api-scala
core/src/main/scala/ru/pavkin/todoist/api/core/decoder/SingleCommandResponseDecoder.scala
Scala
mit
1,684
package be.wegenenverkeer.atomium.server import be.wegenenverkeer.atomium.format.{Feed, Url} import scala.concurrent.{ExecutionContext, Future} /** * A feed store is responsible for the persistence of feeds. * This abstract class serves as a base class for more specific FeedStore implementations. * * @tparam E type of the elements in the feed */ abstract class AbstractAsyncFeedStore[E, C <: Context](feedName: String, title: Option[String], url: Url) extends AsyncFeedStore[E, C] with FeedStoreSupport[E] { implicit val feedParams = FeedParams(feedName, url, title) /** * Retrieves a page of the feed. * * @param start the starting entry (exclusive), should not be returned in the feed page * @param pageSize the number of entries * @param forward if true navigate to 'previous' elements in feed (towards head of feed) * else navigate to 'next' elements in feed (towards last page of feed) * @return the feed page or `Future.failed` if the page is not found */ override def getFeed(start: Long, pageSize: Int, forward: Boolean) (implicit executionContext: ExecutionContext, context: C): Future[Option[Feed[E]]] = { require(pageSize > 0) val allowedFuture: Future[Boolean] = for { max <- maxId lowerEntries <- getNumberOfEntriesLowerThan(start, forward) } yield start <= max && lowerEntries % pageSize == 0 for { allowed <- allowedFuture entries <- if (allowed) { getFeedEntries(start, pageSize + 2, forward) } else { Future.successful(List.empty) } min <- minId } yield processFeedEntries(start, min, pageSize, forward, entries) } /** * Retrieves the head of the feed. This is the first page containing the most recent entries * @param pageSize the maximum number of feed entries to return. The page could contain less entries * @return the head of the feed */ override def getHeadOfFeed(pageSize: Int) (implicit executionContext: ExecutionContext, context: C): Future[Option[Feed[E]]] = { require(pageSize > 0, "page size must be greater than 0") //fetch most recent entries from feed, we ask for one more than the pageSize to check if we are on the last page for { entries <- getMostRecentFeedEntries(pageSize + 1) numberOfEntries <- if (entries.nonEmpty) getNumberOfEntriesLowerThan(entries.head.sequenceNr) else Future.successful(0L) min <- minId } yield if (entries.nonEmpty) processHeadFeedEntries(numberOfEntries, min, pageSize, entries) else None } /** * @return one less than the minimum sequence number used in this feed */ def minId(implicit context: C): Future[Long] /** * @return the maximum sequence number used in this feed or minId if feed is empty */ def maxId(implicit context: C): Future[Long] /** * @param sequenceNr sequence number to match * @param inclusive if true include the specified sequence number * @return the number of entries in the feed with sequence number lower than specified */ def getNumberOfEntriesLowerThan(sequenceNr: Long, inclusive: Boolean = true) (implicit executionContext: ExecutionContext, context: C): Future[Long] /** * Retrieves the most recent entries from the `FeedStore` sorted in descending order * @param count the amount of recent entries to return * @return a list of FeedEntries. a FeedEntry is a sequence number and its corresponding entry * and sorted by descending sequence number */ def getMostRecentFeedEntries(count: Int) (implicit executionContext: ExecutionContext, context: C): Future[List[FeedStoreSupport[E]#FeedEntry]] /** * Retrieves entries with their sequence numbers from the feed * * @param start the starting entry (inclusive), MUST be returned in the entries * @param count the number of entries to return * @param ascending if true return entries with sequence numbers >= start in ascending order * else return entries with sequence numbers <= start in descending order * @return the corresponding entries sorted accordingly */ def getFeedEntries(start: Long, count: Int, ascending: Boolean) (implicit executionContext: ExecutionContext, context: C): Future[List[FeedStoreSupport[E]#FeedEntry]] }
joachimvda/atomium
modules/server/src/main/scala/be/wegenenverkeer/atomium/server/AbstractAsyncFeedStore.scala
Scala
mit
4,540
package scala.quoted.runtime import scala.quoted.{Expr, Type} /** Part of the Quotes interface that needs to be implemented by the compiler but is not visible to users */ trait QuoteMatching: val ExprMatch: ExprMatchModule trait ExprMatchModule { self: ExprMatch.type => /** Pattern matches an the scrutineeExpr against the patternExpr and returns a tuple * with the matched holes if successful. * * Examples: * - `ExprMatch.unapply('{ f(0, myInt) })('{ f(0, myInt) }, _)` * will return `Some(())` (where `()` is a tuple of arity 0) * - `ExprMatch.unapply('{ f(0, myInt) })('{ f(patternHole[Int], patternHole[Int]) }, _)` * will return `Some(Tuple2('{0}, '{ myInt }))` * - `ExprMatch.unapply('{ f(0, "abc") })('{ f(0, patternHole[Int]) }, _)` * will return `None` due to the missmatch of types in the hole * * Holes: * - scala.quoted.runtime.Patterns.patternHole[T]: hole that matches an expression `x` of type `Expr[U]` * if `U <:< T` and returns `x` as part of the match. * * @param scrutinee `Expr[Any]` on which we are pattern matching * @param pattern `Expr[Any]` containing the pattern tree * @return None if it did not match, `Some(tup)` if it matched where `tup` contains `Expr[Ti]`` */ def unapply[TypeBindings <: Tuple, Tup <: Tuple](scrutinee: Expr[Any])(using pattern: Expr[Any]): Option[Tup] } val TypeMatch: TypeMatchModule trait TypeMatchModule { self: TypeMatch.type => /** Pattern matches an the scrutineeType against the patternType and returns a tuple * with the matched holes if successful. * * @param scrutinee `Type[?]` on which we are pattern matching * @param pattern `Type[?]` containing the pattern tree * @return None if it did not match, `Some(tup)` if it matched where `tup` contains `Type[Ti]`` */ def unapply[TypeBindings <: Tuple, Tup <: Tuple](scrutinee: Type[?])(using pattern: Type[?]): Option[Tup] }
dotty-staging/dotty
library/src/scala/quoted/runtime/QuoteMatching.scala
Scala
apache-2.0
2,055
package com.datastax.driver.spark.mapper import com.datastax.driver.spark.connector.{TableDef, RegularColumn, ColumnDef} import com.datastax.driver.spark.types.IntType import com.datastax.driver.spark.util.SerializationUtil import org.junit.Assert._ import org.junit.Test class TupleColumnMapperTest { private val c1 = ColumnDef("test", "table", "column1", RegularColumn, IntType) private val c2 = ColumnDef("test", "table", "column2", RegularColumn, IntType) private val c3 = ColumnDef("test", "table", "column3", RegularColumn, IntType) private val tableDef = TableDef("test", "table", Seq(c1), Seq(c2), Seq(c3)) @Test def testGetters() { val columnMap = new TupleColumnMapper[(Int, String, Boolean)].columnMap(tableDef) val getters = columnMap.getters assertEquals(IndexedColumnRef(0), getters("_1")) assertEquals(IndexedColumnRef(1), getters("_2")) assertEquals(IndexedColumnRef(2), getters("_3")) } @Test def testConstructor() { val columnMap = new TupleColumnMapper[(Int, String, Boolean)].columnMap(tableDef) assertEquals(Seq(IndexedColumnRef(0), IndexedColumnRef(1), IndexedColumnRef(2)), columnMap.constructor) } @Test def testSerialize() { val columnMap = new TupleColumnMapper[(Int, String, Boolean)].columnMap(tableDef) SerializationUtil.serializeAndDeserialize(columnMap) } @Test def testImplicit() { val columnMap = implicitly[ColumnMapper[(Int, String, Boolean)]].columnMap(tableDef) val getters = columnMap.getters assertEquals(IndexedColumnRef(0), getters("_1")) assertEquals(IndexedColumnRef(1), getters("_2")) assertEquals(IndexedColumnRef(2), getters("_3")) } }
bovigny/cassandra-driver-spark
src/test/scala/com/datastax/driver/spark/mapper/TupleColumnMapperTest.scala
Scala
apache-2.0
1,678
package HackerRank.Training.DataStructures.Stacks import java.io.{ByteArrayInputStream, IOException, InputStream, PrintWriter} import java.util.InputMismatchException import scala.collection.generic.CanBuildFrom import scala.collection.mutable import scala.collection.mutable.ArrayBuffer import scala.language.higherKinds import scala.reflect.ClassTag /** * Copyright (c) 2017 A. Roberto Fischer * * @author A. Roberto Fischer <a.robertofischer@gmail.com> on 8/6/2017 */ private[this] object PoisonousPlants { import Reader._ import Writer._ private[this] val TEST_INPUT: Option[String] = None //------------------------------------------------------------------------------------------// // Solution //------------------------------------------------------------------------------------------// private[this] def solve(): Unit = { val n = next[Int]() println( solveInLinearTime(next[Long, Vector](n).map(-_)) ) // println( // next[Long, Vector](n) // .splitDependingOnPrevious[List, List]((current, previous) => current > previous) // .map(xs => new CustomStack[Long](xs)) // .solve() // ) } private[this] def solveInLinearTime[T: Ordering](xs: Seq[T]) = { import Ordering.Implicits._ import scala.util.control.Breaks._ var max = 0 val stack = mutable.ArrayStack[(T, Int)]() for (currentElement <- xs) { if (stack.isEmpty) { stack.push((currentElement, 0)) } else { if (currentElement < stack.head._1) { max = Math.max(max, 1) stack.push((currentElement, 1)) } else { var head = stack.head var pr = head._2 breakable { while (stack.nonEmpty && head._1 <= currentElement) { stack.pop() if (stack.isEmpty) { break } pr = Math.max(pr, head._2) head = stack.head } } if (stack.isEmpty) { stack.push((currentElement, 0)) } else { stack.push((currentElement, pr + 1)) max = Math.max(max, pr + 1) } } } } max } private[this] implicit final class SeqExtension[T](xs: Seq[T]) { /** * * @param predicate ??? * @param cbfA ??? * @param cbfB ??? * @tparam CollA outer collection * @tparam CollB inner collection * @return */ def splitDependingOnPrevious[CollA[_], CollB[_]] (predicate: (T, T) => Boolean) (implicit cbfA: CanBuildFrom[CollB[T], T, CollB[T]], cbfB: CanBuildFrom[CollA[CollB[T]], CollB[T], CollA[CollB[T]]]): CollA[CollB[T]] = { val outerBuilder = cbfB() //at start prev == head var previous = xs.headOption var innerBuilder = cbfA() for (i <- xs.indices) { if (previous.fold(false)(prev => predicate(xs(i), prev))) { outerBuilder += innerBuilder.result() innerBuilder = cbfA() } val current = xs(i) innerBuilder += current previous = Option(current) //if this is the last iteration add remaining elements to result if (i == xs.size - 1) { outerBuilder += innerBuilder.result() } } outerBuilder.result() } } private[this] implicit final class CustomList[T: Ordering](private var underlying: List[CustomStack[T]]) { def solve(): Int = { var i = 0 while (underlying.size != 1) { i += 1 popTail() merge() } i } private[this] def popTail(): Unit = { underlying.tail.foreach(_.pop()) } private[this] def merge(): Unit = { import Ordering.Implicits._ underlying = underlying .filter(_.nonEmpty) .foldLeft(new ArrayBuffer[CustomStack[T]]()) { case (acm, value) => if (value.headOption.fold(false)(head => acm.lastOption.fold(false)(nonEmptyAcm => nonEmptyAcm.lastOption.fold(false)(_ >= head)))) { acm.last ++ value acm } else { acm += value } }.toList } } private[this] final class CustomStack[T](private var underlying: List[T] = List.empty[T]) { private var lastElement = underlying.lastOption def pop(): Option[T] = { val res = underlying.headOption res.foreach(_ => underlying = underlying.tail) if (underlying.isEmpty) { lastElement = None } res } def push(x: T): Unit = { if (underlying.isEmpty) { lastElement = Some(x) } underlying = x :: underlying } def lastOption: Option[T] = lastElement def headOption: Option[T] = underlying.headOption def ++(b: CustomStack[T]): Unit = { underlying = underlying ::: b.underlying lastElement = b.lastElement } def nonEmpty: Boolean = underlying.nonEmpty override def toString: String = underlying.toString override def hashCode(): Int = underlying.hashCode() override def equals(obj: scala.Any): Boolean = underlying.equals(obj) } //------------------------------------------------------------------------------------------// // Run //------------------------------------------------------------------------------------------// @throws[Exception] def main(args: Array[String]): Unit = { val s = System.currentTimeMillis solve() flush() if (TEST_INPUT.isDefined) System.out.println(System.currentTimeMillis - s + "ms") } //------------------------------------------------------------------------------------------// // Input //------------------------------------------------------------------------------------------// private[this] final object Reader { private[this] implicit val in: InputStream = TEST_INPUT.fold(System.in)(s => new ByteArrayInputStream(s.getBytes)) def next[T: ClassTag](): T = { implicitly[ClassTag[T]].runtimeClass match { case java.lang.Integer.TYPE => nextInt().asInstanceOf[T] case java.lang.Long.TYPE => nextLong().asInstanceOf[T] case java.lang.Double.TYPE => nextDouble().asInstanceOf[T] case java.lang.Character.TYPE => nextChar().asInstanceOf[T] case s if Class.forName("java.lang.String") == s => nextString().asInstanceOf[T] case b if Class.forName("scala.math.BigInt") == b => BigInt(nextString()).asInstanceOf[T] case b if Class.forName("scala.math.BigDecimal") == b => BigDecimal(nextString()).asInstanceOf[T] case _ => throw new RuntimeException("Unsupported input type.") } } def next[T, Coll[_]](reader: => T, n: Int) (implicit cbf: CanBuildFrom[Coll[T], T, Coll[T]]): Coll[T] = { val builder = cbf() builder.sizeHint(n) for (_ <- 0 until n) { builder += reader } builder.result() } def nextWithIndex[T, Coll[_]](reader: => T, n: Int) (implicit cbf: CanBuildFrom[Coll[(T, Int)], (T, Int), Coll[(T, Int)]]): Coll[(T, Int)] = { val builder = cbf() builder.sizeHint(n) for (i <- 0 until n) { builder += ((reader, i)) } builder.result() } def next[T: ClassTag, Coll[_]](n: Int) (implicit cbf: CanBuildFrom[Coll[T], T, Coll[T]]): Coll[T] = { val builder = cbf() builder.sizeHint(n) for (_ <- 0 until n) { builder += next[T]() } builder.result() } def nextWithIndex[T: ClassTag, Coll[_]](n: Int) (implicit cbf: CanBuildFrom[Coll[(T, Int)], (T, Int), Coll[(T, Int)]]): Coll[(T, Int)] = { val builder = cbf() builder.sizeHint(n) for (i <- 0 until n) { builder += ((next[T](), i)) } builder.result() } def nextMultiLine[T: ClassTag](n: Int, m: Int): Seq[Seq[T]] = { val map = Vector.newBuilder[Vector[T]] var i = 0 while (i < n) { map += next[T, Vector](m) i += 1 } map.result() } private[this] def nextDouble(): Double = nextString().toDouble private[this] def nextChar(): Char = skip.toChar private[this] def nextString(): String = { var b = skip val sb = new java.lang.StringBuilder while (!isSpaceChar(b)) { sb.appendCodePoint(b) b = readByte().toInt } sb.toString } private[this] def nextInt(): Int = { var num = 0 var b = 0 var minus = false while ( { b = readByte().toInt b != -1 && !((b >= '0' && b <= '9') || b == '-') }) {} if (b == '-') { minus = true b = readByte().toInt } while (true) { if (b >= '0' && b <= '9') { num = num * 10 + (b - '0') } else { if (minus) return -num else return num } b = readByte().toInt } throw new IOException("Read Int") } private[this] def nextLong(): Long = { var num = 0L var b = 0 var minus = false while ( { b = readByte().toInt b != -1 && !((b >= '0' && b <= '9') || b == '-') }) {} if (b == '-') { minus = true b = readByte().toInt } while (true) { if (b >= '0' && b <= '9') { num = num * 10 + (b - '0') } else { if (minus) return -num else return num } b = readByte().toInt } throw new IOException("Read Long") } private[this] val inputBuffer = new Array[Byte](1024) private[this] var lenBuffer = 0 private[this] var ptrBuffer = 0 private[this] def readByte()(implicit in: java.io.InputStream): Byte = { if (lenBuffer == -1) throw new InputMismatchException if (ptrBuffer >= lenBuffer) { ptrBuffer = 0 try { lenBuffer = in.read(inputBuffer) } catch { case _: IOException => throw new InputMismatchException } if (lenBuffer <= 0) return -1 } inputBuffer({ ptrBuffer += 1 ptrBuffer - 1 }) } private[this] def isSpaceChar(c: Int) = !(c >= 33 && c <= 126) private[this] def skip = { var b = 0 while ( { b = readByte().toInt b != -1 && isSpaceChar(b) }) {} b } } //------------------------------------------------------------------------------------------// // Output //------------------------------------------------------------------------------------------// private[this] final object Writer { private[this] val out = new PrintWriter(System.out) def flush(): Unit = out.flush() def println(x: Any): Unit = out.println(x) def print(x: Any): Unit = out.print(x) } }
robertoFischer/hackerrank
src/main/scala/HackerRank/Training/DataStructures/Stacks/PoisonousPlants.scala
Scala
mit
10,892
package temportalist.compression.main.server import net.minecraft.entity.player.EntityPlayer import net.minecraft.tileentity.TileEntity import net.minecraft.world.World import temportalist.compression.main.common.ProxyCommon /** * * Created by TheTemportalist on 4/14/2016. * * @author TheTemportalist */ class ProxyServer extends ProxyCommon { override def getServerElement(ID: Int, player: EntityPlayer, world: World, x: Int, y: Int, z: Int, tileEntity: TileEntity): AnyRef = { null } }
TheTemportalist/Compression
src/main/scala/temportalist/compression/main/server/ProxyServer.scala
Scala
apache-2.0
510
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.expressions.{Expression, Generator} import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, ExprCode} import org.apache.spark.sql.catalyst.expressions.codegen.Block._ import org.apache.spark.sql.functions._ import org.apache.spark.sql.test.SharedSparkSession import org.apache.spark.sql.types.{IntegerType, StructType} class GeneratorFunctionSuite extends QueryTest with SharedSparkSession { import testImplicits._ test("stack") { val df = spark.range(1) // Empty DataFrame suppress the result generation checkAnswer(spark.emptyDataFrame.selectExpr("stack(1, 1, 2, 3)"), Nil) // Rows & columns checkAnswer(df.selectExpr("stack(1, 1, 2, 3)"), Row(1, 2, 3) :: Nil) checkAnswer(df.selectExpr("stack(2, 1, 2, 3)"), Row(1, 2) :: Row(3, null) :: Nil) checkAnswer(df.selectExpr("stack(3, 1, 2, 3)"), Row(1) :: Row(2) :: Row(3) :: Nil) checkAnswer(df.selectExpr("stack(4, 1, 2, 3)"), Row(1) :: Row(2) :: Row(3) :: Row(null) :: Nil) // Various column types checkAnswer(df.selectExpr("stack(3, 1, 1.1, 'a', 2, 2.2, 'b', 3, 3.3, 'c')"), Row(1, 1.1, "a") :: Row(2, 2.2, "b") :: Row(3, 3.3, "c") :: Nil) // Null values checkAnswer(df.selectExpr("stack(3, 1, 1.1, null, 2, null, 'b', null, 3.3, 'c')"), Row(1, 1.1, null) :: Row(2, null, "b") :: Row(null, 3.3, "c") :: Nil) // Repeat generation at every input row checkAnswer(spark.range(2).selectExpr("stack(2, 1, 2, 3)"), Row(1, 2) :: Row(3, null) :: Row(1, 2) :: Row(3, null) :: Nil) // The first argument must be a positive constant integer. val m = intercept[AnalysisException] { df.selectExpr("stack(1.1, 1, 2, 3)") }.getMessage assert(m.contains("The number of rows must be a positive constant integer.")) val m2 = intercept[AnalysisException] { df.selectExpr("stack(-1, 1, 2, 3)") }.getMessage assert(m2.contains("The number of rows must be a positive constant integer.")) // The data for the same column should have the same type. val m3 = intercept[AnalysisException] { df.selectExpr("stack(2, 1, '2.2')") }.getMessage assert(m3.contains("data type mismatch: Argument 1 (int) != Argument 2 (string)")) // stack on column data val df2 = Seq((2, 1, 2, 3)).toDF("n", "a", "b", "c") checkAnswer(df2.selectExpr("stack(2, a, b, c)"), Row(1, 2) :: Row(3, null) :: Nil) val m4 = intercept[AnalysisException] { df2.selectExpr("stack(n, a, b, c)") }.getMessage assert(m4.contains("The number of rows must be a positive constant integer.")) val df3 = Seq((2, 1, 2.0)).toDF("n", "a", "b") val m5 = intercept[AnalysisException] { df3.selectExpr("stack(2, a, b)") }.getMessage assert(m5.contains("data type mismatch: Argument 1 (int) != Argument 2 (double)")) } test("single explode") { val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList") checkAnswer( df.select(explode('intList)), Row(1) :: Row(2) :: Row(3) :: Nil) } test("single explode_outer") { val df = Seq((1, Seq(1, 2, 3)), (2, Seq())).toDF("a", "intList") checkAnswer( df.select(explode_outer('intList)), Row(1) :: Row(2) :: Row(3) :: Row(null) :: Nil) } test("single posexplode") { val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList") checkAnswer( df.select(posexplode('intList)), Row(0, 1) :: Row(1, 2) :: Row(2, 3) :: Nil) } test("single posexplode_outer") { val df = Seq((1, Seq(1, 2, 3)), (2, Seq())).toDF("a", "intList") checkAnswer( df.select(posexplode_outer('intList)), Row(0, 1) :: Row(1, 2) :: Row(2, 3) :: Row(null, null) :: Nil) } test("explode and other columns") { val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList") checkAnswer( df.select($"a", explode('intList)), Row(1, 1) :: Row(1, 2) :: Row(1, 3) :: Nil) checkAnswer( df.select($"*", explode('intList)), Row(1, Seq(1, 2, 3), 1) :: Row(1, Seq(1, 2, 3), 2) :: Row(1, Seq(1, 2, 3), 3) :: Nil) } test("explode_outer and other columns") { val df = Seq((1, Seq(1, 2, 3)), (2, Seq())).toDF("a", "intList") checkAnswer( df.select($"a", explode_outer('intList)), Row(1, 1) :: Row(1, 2) :: Row(1, 3) :: Row(2, null) :: Nil) checkAnswer( df.select($"*", explode_outer('intList)), Row(1, Seq(1, 2, 3), 1) :: Row(1, Seq(1, 2, 3), 2) :: Row(1, Seq(1, 2, 3), 3) :: Row(2, Seq(), null) :: Nil) } test("aliased explode") { val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList") checkAnswer( df.select(explode('intList).as('int)).select('int), Row(1) :: Row(2) :: Row(3) :: Nil) checkAnswer( df.select(explode('intList).as('int)).select(sum('int)), Row(6) :: Nil) } test("aliased explode_outer") { val df = Seq((1, Seq(1, 2, 3)), (2, Seq())).toDF("a", "intList") checkAnswer( df.select(explode_outer('intList).as('int)).select('int), Row(1) :: Row(2) :: Row(3) :: Row(null) :: Nil) checkAnswer( df.select(explode('intList).as('int)).select(sum('int)), Row(6) :: Nil) } test("explode on map") { val df = Seq((1, Map("a" -> "b"))).toDF("a", "map") checkAnswer( df.select(explode('map)), Row("a", "b")) } test("explode_outer on map") { val df = Seq((1, Map("a" -> "b")), (2, Map[String, String]()), (3, Map("c" -> "d"))).toDF("a", "map") checkAnswer( df.select(explode_outer('map)), Row("a", "b") :: Row(null, null) :: Row("c", "d") :: Nil) } test("explode on map with aliases") { val df = Seq((1, Map("a" -> "b"))).toDF("a", "map") checkAnswer( df.select(explode('map).as("key1" :: "value1" :: Nil)).select("key1", "value1"), Row("a", "b")) } test("explode_outer on map with aliases") { val df = Seq((3, None), (1, Some(Map("a" -> "b")))).toDF("a", "map") checkAnswer( df.select(explode_outer('map).as("key1" :: "value1" :: Nil)).select("key1", "value1"), Row("a", "b") :: Row(null, null) :: Nil) } test("self join explode") { val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList") val exploded = df.select(explode('intList).as('i)) checkAnswer( exploded.join(exploded, exploded("i") === exploded("i")).agg(count("*")), Row(3) :: Nil) } test("inline raises exception on array of null type") { val m = intercept[AnalysisException] { spark.range(2).selectExpr("inline(array())") }.getMessage assert(m.contains("data type mismatch")) } test("inline with empty table") { checkAnswer( spark.range(0).selectExpr("inline(array(struct(10, 100)))"), Nil) } test("inline on literal") { checkAnswer( spark.range(2).selectExpr("inline(array(struct(10, 100), struct(20, 200), struct(30, 300)))"), Row(10, 100) :: Row(20, 200) :: Row(30, 300) :: Row(10, 100) :: Row(20, 200) :: Row(30, 300) :: Nil) } test("inline on column") { val df = Seq((1, 2)).toDF("a", "b") checkAnswer( df.selectExpr("inline(array(struct(a), struct(a)))"), Row(1) :: Row(1) :: Nil) checkAnswer( df.selectExpr("inline(array(struct(a, b), struct(a, b)))"), Row(1, 2) :: Row(1, 2) :: Nil) // Spark think [struct<a:int>, struct<b:int>] is heterogeneous due to name difference. val m = intercept[AnalysisException] { df.selectExpr("inline(array(struct(a), struct(b)))") }.getMessage assert(m.contains("data type mismatch")) checkAnswer( df.selectExpr("inline(array(struct(a), named_struct('a', b)))"), Row(1) :: Row(2) :: Nil) // Spark think [struct<a:int>, struct<col1:int>] is heterogeneous due to name difference. val m2 = intercept[AnalysisException] { df.selectExpr("inline(array(struct(a), struct(2)))") }.getMessage assert(m2.contains("data type mismatch")) checkAnswer( df.selectExpr("inline(array(struct(a), named_struct('a', 2)))"), Row(1) :: Row(2) :: Nil) checkAnswer( df.selectExpr("struct(a)").selectExpr("inline(array(*))"), Row(1) :: Nil) checkAnswer( df.selectExpr("array(struct(a), named_struct('a', b))").selectExpr("inline(*)"), Row(1) :: Row(2) :: Nil) } test("inline_outer") { val df = Seq((1, "2"), (3, "4"), (5, "6")).toDF("col1", "col2") val df2 = df.select(when('col1 === 1, null).otherwise(array(struct('col1, 'col2))).as("col1")) checkAnswer( df2.selectExpr("inline(col1)"), Row(3, "4") :: Row(5, "6") :: Nil ) checkAnswer( df2.selectExpr("inline_outer(col1)"), Row(null, null) :: Row(3, "4") :: Row(5, "6") :: Nil ) } test("SPARK-14986: Outer lateral view with empty generate expression") { checkAnswer( sql("select nil from values 1 lateral view outer explode(array()) n as nil"), Row(null) :: Nil ) } test("outer explode()") { checkAnswer( sql("select * from values 1, 2 lateral view outer explode(array()) a as b"), Row(1, null) :: Row(2, null) :: Nil) } test("outer generator()") { spark.sessionState.functionRegistry .createOrReplaceTempFunction("empty_gen", _ => EmptyGenerator()) checkAnswer( sql("select * from values 1, 2 lateral view outer empty_gen() a as b"), Row(1, null) :: Row(2, null) :: Nil) } } case class EmptyGenerator() extends Generator { override def children: Seq[Expression] = Nil override def elementSchema: StructType = new StructType().add("id", IntegerType) override def eval(input: InternalRow): TraversableOnce[InternalRow] = Seq.empty override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val iteratorClass = classOf[Iterator[_]].getName ev.copy(code = code"$iteratorClass<InternalRow> ${ev.value} = $iteratorClass$$.MODULE$$.empty();") } }
techaddict/spark
sql/core/src/test/scala/org/apache/spark/sql/GeneratorFunctionSuite.scala
Scala
apache-2.0
10,851
package shredzzz.kirkwood.utils import jcuda.driver.{CUdevice_attribute => JCudaDeviceAttr} object cuDeviceAttrs { val attributes = Seq( JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAX_THREADS_PER_BLOCK, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAX_BLOCK_DIM_X, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAX_BLOCK_DIM_Y, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAX_BLOCK_DIM_Z, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAX_GRID_DIM_X, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAX_GRID_DIM_Y, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAX_GRID_DIM_Z, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAX_SHARED_MEMORY_PER_BLOCK, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_TOTAL_CONSTANT_MEMORY, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_WARP_SIZE, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAX_PITCH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAX_REGISTERS_PER_BLOCK, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_CLOCK_RATE, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_TEXTURE_ALIGNMENT, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_GPU_OVERLAP, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MULTIPROCESSOR_COUNT, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_KERNEL_EXEC_TIMEOUT, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_INTEGRATED, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_CAN_MAP_HOST_MEMORY, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_COMPUTE_MODE, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE1D_WIDTH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_WIDTH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_HEIGHT, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE3D_WIDTH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE3D_HEIGHT, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE3D_DEPTH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_LAYERED_WIDTH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_LAYERED_HEIGHT, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_LAYERED_LAYERS, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_SURFACE_ALIGNMENT, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_CONCURRENT_KERNELS, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_ECC_ENABLED, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_PCI_BUS_ID, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_PCI_DEVICE_ID, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_TCC_DRIVER, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MEMORY_CLOCK_RATE, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_GLOBAL_MEMORY_BUS_WIDTH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_L2_CACHE_SIZE, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAX_THREADS_PER_MULTIPROCESSOR, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_ASYNC_ENGINE_COUNT, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_UNIFIED_ADDRESSING, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE1D_LAYERED_WIDTH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE1D_LAYERED_LAYERS, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_CAN_TEX2D_GATHER, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_GATHER_WIDTH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_GATHER_HEIGHT, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE3D_WIDTH_ALTERNATE, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE3D_HEIGHT_ALTERNATE, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE3D_DEPTH_ALTERNATE, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_PCI_DOMAIN_ID, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_TEXTURE_PITCH_ALIGNMENT, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURECUBEMAP_WIDTH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURECUBEMAP_LAYERED_WIDTH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURECUBEMAP_LAYERED_LAYERS, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE1D_WIDTH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE2D_WIDTH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE2D_HEIGHT, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE3D_WIDTH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE3D_HEIGHT, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE3D_DEPTH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE1D_LAYERED_WIDTH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE1D_LAYERED_LAYERS, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE2D_LAYERED_WIDTH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE2D_LAYERED_HEIGHT, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACE2D_LAYERED_LAYERS, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACECUBEMAP_WIDTH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACECUBEMAP_LAYERED_WIDTH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_SURFACECUBEMAP_LAYERED_LAYERS, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE1D_LINEAR_WIDTH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_LINEAR_WIDTH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_LINEAR_HEIGHT, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_LINEAR_PITCH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_MIPMAPPED_WIDTH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE2D_MIPMAPPED_HEIGHT, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_COMPUTE_CAPABILITY_MAJOR, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_COMPUTE_CAPABILITY_MINOR, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_MAXIMUM_TEXTURE1D_MIPMAPPED_WIDTH, JCudaDeviceAttr.CU_DEVICE_ATTRIBUTE_STREAM_PRIORITIES_SUPPORTED ) def asStr(n: Int) = { JCudaDeviceAttr.stringFor(n) } }
shredzzz/kirkwood
src/main/scala/shredzzz/kirkwood/utils/cuDeviceAttrs.scala
Scala
apache-2.0
5,304
package scorex.transaction.state.database.state.storage import org.h2.mvstore.{MVMap, MVStore} import scala.collection.JavaConverters._ import scala.util.Try trait MVStoreOrderMatchStorage extends OrderMatchStorageI { val db: MVStore // ============= Saved days val OrderMatchDays = "OrderMatchSavedDays" private lazy val savedDays: MVMap[Long, Boolean] = db.openMap(OrderMatchDays) def putSavedDays(day: Long): Unit = savedDays.put(day, true) def containsSavedDays(day: Long): Boolean = Option(savedDays.get(day)).isDefined def savedDaysKeys: List[Long] = savedDays.keyList().asScala.toList // ============= Order match val OrderMatchTx = "OrderMatchTx" /** * Returns Map Order id -> OrderMatch transactions Ids by Timestamp - starting of the day */ private def orderMatchTxByDay(orderTimestamp: Long): MVMap[String, Array[String]] = db.openMap(OrderMatchTx + orderTimestamp) def getOrderMatchTxByDay(orderTimestamp: Long, key: String): Option[Array[String]] = Option(orderMatchTxByDay(orderTimestamp).get(key)) def putOrderMatchTxByDay(orderTimestamp: Long, key: String, data: Array[String]): Unit = orderMatchTxByDay(orderTimestamp).put(key, data) def removeOrderMatchDays(days: List[Long]): Unit = days.foreach { d => Try(db.removeMap(orderMatchTxByDay(d))) } }
B83YPoj/Waves
src/main/scala/scorex/transaction/state/database/state/storage/MVStoreOrderMatchStorage.scala
Scala
apache-2.0
1,331
package org.karps.row import scala.util.{Failure, Success, Try} import org.apache.spark.sql.Row import org.apache.spark.sql.types._ import karps.core.{row => R} sealed trait Cell case object Empty extends Cell // The null elements case class IntElement(i: Int) extends Cell case class DoubleElement(i: Double) extends Cell case class StringElement(s: String) extends Cell case class BoolElement(b: Boolean) extends Cell // Unlike the haskell code, we need to make a distinction between the // row and the array case during the reconstruction. case class RowArray(seq: Seq[Cell]) extends Cell case class RowCell(r: AlgebraicRow) extends Cell object Cell { import org.karps.structures.ProtoUtils.sequence def fromProto(c: R.Cell): Cell = c.element match { case R.Cell.Element.Empty => Empty case R.Cell.Element.IntValue(i) => IntElement(i.toInt) case R.Cell.Element.StringValue(i) => StringElement(i) case R.Cell.Element.DoubleValue(i) => DoubleElement(i) case R.Cell.Element.BoolValue(i) => BoolElement(i) case R.Cell.Element.ArrayValue(R.ArrayCell(i)) => RowArray(i.map(fromProto _)) case R.Cell.Element.StructValue(arp) => RowCell(AlgebraicRow.fromProto(arp)) } def toProto(c: Cell): R.Cell = { import R.Cell.{Element => E} R.Cell(c match { case Empty => E.Empty case IntElement(i) => E.IntValue(i) case StringElement(s) => E.StringValue(s) case DoubleElement(d) => E.DoubleValue(d) case BoolElement(b) => E.BoolValue(b) case RowArray(cells) => E.ArrayValue(R.ArrayCell(cells.map(toProto))) case RowCell(ar) => E.StructValue(AlgebraicRow.toProto(ar)) }) } object CellOrdering extends Ordering[Cell] { override def compare(x: Cell, y: Cell): Int = compareCells(x, y) } private def cellsOrdering = Ordering.Iterable[Cell](CellOrdering) private def compareCells(x: Cell, y: Cell): Int = (x, y) match { case (IntElement(i1), IntElement(i2)) => Ordering.Int.compare(i1, i2) case (IntElement(_), _) => 1 case (_, IntElement(_)) => -1 case (DoubleElement(i1), DoubleElement(i2)) => Ordering.Double.compare(i1, i2) case (DoubleElement(_), _) => 1 case (_, DoubleElement(_)) => -1 case (StringElement(s1), StringElement(s2)) => Ordering.String.compare(s1, s2) case (StringElement(_), _) => 1 case (_, StringElement(_)) => -1 case (BoolElement(b1), BoolElement(b2)) => Ordering.Boolean.compare(b1, b2) case (BoolElement(_), _) => 1 case (_, BoolElement(_)) => -1 case (RowArray(seq1), RowArray(seq2)) => cellsOrdering.compare(seq1, seq2) case (RowArray(_), _) => 1 case (_, RowArray(_)) => -1 case (RowCell(c1), RowCell(c2)) => cellsOrdering.compare(c1.cells, c2.cells) case (RowCell(_), _) => 1 case (_, RowCell(_)) => -1 case (Empty, Empty) => 0 } def toAny(c: Cell): Any = c match { case Empty => null case IntElement(i) => i case DoubleElement(d) => d case StringElement(s) => s case RowArray(s) => s.map(toAny) case RowCell(r) => AlgebraicRow.toRow(r) case BoolElement(b) => b } def toStruct(c: Cell): Option[Row] = c match { case RowCell(r) => Some(AlgebraicRow.toRow(r)) case _ => None } def from(x: Any, dt: DataType): Try[Cell] = (x, dt) match { // Nulls, etc. case (null, _) => Success(Empty) case (None, _) => Success(Empty) case (Some(null), _) => Success(Empty) case (Some(y), _) => from(y, dt) // Primitives case (i: Int, t: IntegerType) => Success(IntElement(i)) case (i: Integer, t: IntegerType) => Success(IntElement(i)) case (i: Double, t: DoubleType) => Success(DoubleElement(i)) case (i: java.lang.Double, t: DoubleType) => Success(DoubleElement(i)) // TODO: proper implementation of the long type case (i: Long, t: LongType) => Success(IntElement(i.toInt)) // TODO: special case as long as we do not have long support case (i: Long, t: IntegerType) => Success(IntElement(i.toInt)) case (i: java.lang.Long, t: IntegerType) => Success(IntElement(i.toInt)) case (s: String, t: StringType) => Success(StringElement(s)) case (b: Boolean, t: BooleanType) => Success(BoolElement(b)) case (b: java.lang.Boolean, t: BooleanType) => Success(BoolElement(b)) // Sequences case (a: Array[Any], _) => from(a.toSeq, dt) case (s: Seq[Any], t: ArrayType) => sequence(s.map(from(_, t.elementType))).map(RowArray.apply) // Structures case (r: Row, t: StructType) => from(r.toSeq, t) case (s: Seq[Any], t: StructType) => val elts = s.zip(t.fields.map(_.dataType)) .map { case (x2, dt2) => from(x2, dt2) } sequence(elts).map(ys => RowCell(AlgebraicRow(ys))) case _ => Failure(new Exception(s"Datatype $dt is not compatible with " + s"value type ${x.getClass}: $x")) } }
tjhunter/karps
src/main/scala/org/karps/row/Cell.scala
Scala
apache-2.0
4,900
import java.io.File import scala.util.Random import org.broadinstitute.gatk.queue.QScript import org.broadinstitute.gatk.queue.extensions.gatk._ import org.broadinstitute.gatk.queue.function.ListWriterFunction class IndelRealignmet extends QScript { qscript => /**************************************************************************** * Required Parameters *****************************************************************************/ @Input(doc="The reference file for the bam files.", shortName="R", fullName="reference_sequence") var referenceFile: File = _ // _ is scala shorthand for null @Input(doc="Bam file to relalign", shortName="I", fullName="input_file") var myBam: File = _ /**************************************************************************** * Optional Parameters *****************************************************************************/ @Argument(doc="scatter parameter", shortName="P", fullName="scatter_parameter", required=false) var scatter: Int = _ @Argument(doc="nt parameter", shortName="N", fullName="num_threads", required=false) var nt: Int = _ @Argument(doc="nct parameter", shortName="C", fullName="num_cpu_threads_per_data_thread", required=false) var nct: Int = _ @Input(doc="dbSNP file", shortName="D", fullName="dbsnp", required=false) var dbsnp: File = _ @Input(doc="Known in/del VCF file", shortName="known", required=false) var known: File = _ @Input(doc="Intervals to realign", shortName="L", required = false) var intervals: File = _ /**************************************************************************** * Classes (GATK Walkers) *****************************************************************************/ trait CommonArguments extends CommandLineGATK { this.reference_sequence = qscript.referenceFile this.intervals = if (qscript.intervals == null) Nil else List(qscript.intervals) this.memoryLimit = 12 } /**************************************************************************** * Main script *****************************************************************************/ def script() { val targetCreator = new RealignerTargetCreator with CommonArguments val indelRealigner = new IndelRealigner with CommonArguments targetCreator.input_file +:= qscript.myBam targetCreator.out = swapExt(myBam, "bam", "intervals") targetCreator.nt = qscript.nt targetCreator.known +:= qscript.known indelRealigner.input_file +:= qscript.myBam indelRealigner.targetIntervals = targetCreator.out indelRealigner.out = swapExt(myBam, "dd.bam", "dd.ir.bam") indelRealigner.known +:= qscript.known add(targetCreator, indelRealigner) } }
RodrigoGM/mouseomics
DNAseq/q.indel.scala
Scala
bsd-3-clause
2,823
package models.dao import play.api.db.slick.Profile case class SpecimenPhotoRecord(id: Option[Long], specimenId: Long, imageId: Long, isMain: Boolean) trait SpecimenPhotoComponent { this: Profile with SpecimenComponent with ImageComponent => val Specimens: Specimens val Images: Images import profile.simple._ class SpecimenPhotos extends Table[SpecimenPhotoRecord]("specimen_photos") { def id = column[Long]("id", O.PrimaryKey, O.AutoInc) def specimenId = column[Long]("specimen_id", O.NotNull) def imageId = column[Long]("image_id", O.NotNull) def isMain = column[Boolean]("is_main", O.NotNull) def specimen = foreignKey("SpecimenPhotos_SpecimenFK", specimenId, Specimens)(_.id) def image = foreignKey("SpecimenPhotos_ImageFK", imageId, Images)(_.id) def * = id.? ~ specimenId ~ imageId ~ isMain <> (SpecimenPhotoRecord, SpecimenPhotoRecord.unapply _) def autoInc = * returning id } }
vokhotnikov/sevstone-play
app/models/dao/SpecimenPhoto.scala
Scala
mit
944
package com.twitter.inject.thrift.utils import com.twitter.scrooge.ThriftMethod import com.twitter.util.Memoize object ThriftMethodUtils { val prettyStr = Memoize { method: ThriftMethod => method.serviceName + "." + method.name } }
syamantm/finatra
inject/inject-thrift/src/main/scala/com/twitter/inject/thrift/utils/ThriftMethodUtils.scala
Scala
apache-2.0
243
/** * Copyright 2013-2015 PayPal * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.paypal.cascade.common.tests.util.casts import org.specs2._ import com.paypal.cascade.common.option._ import com.paypal.cascade.common.util.casts._ import org.scalacheck._ import org.scalacheck.Prop._ import org.scalacheck.Arbitrary.arbitrary import com.paypal.cascade.common.tests.util.CommonImmutableSpecificationContext /** * Tests for implicit casts in [[com.paypal.cascade.common.util.casts]] */ class CastSpecs extends Specification with ScalaCheck { override def is = s2""" The cast utilities provide a type safe way to perform casts without throwing exceptions. Cast an Any ${cast().any} Cast any primitive ${cast().primitive} Cast an Option[_] ${cast().option} Cast an Array[_] ${cast().array} Cast a Traversable[_] ${cast().traversable} Cast a List[_] ${cast().list} CastIf an Any ${castIf().any} CastIf any primitive ${castIf().primitive} CastIf an Option[_] ${castIf().option} CastIf an Array[_] ${castIf().array} CastIf a Traversable[_] ${castIf().traversable} CastIf a List[_] ${castIf().list} """ case class castIf() extends CommonImmutableSpecificationContext { def any = apply { forAll(genFoo, genBar, arbitrary[String], arbitrary[Boolean], arbitrary[Boolean]) { (foo: Foo, bar: Bar, s: String, isNull: Boolean, bool: Boolean) => val f = if (isNull) null else foo val b = if (isNull) null else bar if (isNull) { (f.castIf[Bar](c => bool) must beNone) and (f.castIf[Foo](c => bool) must beNone) and (b.castIf[Bar](c => bool) must beNone) and (b.castIf[Foo](c => bool) must beNone) } else { (f.castIf[Bar](c => bool) must beNone) and (f.castIf[Foo](c => bool) must beLike { case Some(c) if bool => ok; case None if !bool => ok }) and (b.castIf[Bar](c => bool) must beLike { case Some(c) if bool => ok; case None if !bool => ok }) and (b.castIf[Foo](c => bool) must beNone) } } } def primitive = apply { forAll(arbitrary[Boolean], arbitrary[Byte], arbitrary[Char], arbitrary[Short], arbitrary[Int], arbitrary[Long], arbitrary[Float], arbitrary[Double]) { (bool: Boolean, b: Byte, c: Char, s: Short, i: Int, l: Long, f: Float, d: Double) => bool.castIf[Boolean](bool => bool) must beLike { case Some(r) if bool => ok; case None if !bool => ok } b.castIf[Byte](b => bool) must beLike { case Some(r) if bool => ok; case None if !bool => ok } c.castIf[Char](c => bool) must beLike { case Some(r) if bool => ok; case None if !bool => ok } s.castIf[Short](s => bool) must beLike { case Some(r) if bool => ok; case None if !bool => ok } i.castIf[Int](i => bool) must beLike { case Some(r) if bool => ok; case None if !bool => ok } l.castIf[Long](l => bool) must beLike { case Some(r) if bool => ok; case None if !bool => ok } f.castIf[Float](f => bool) must beLike { case Some(r) if bool => ok; case None if !bool => ok } d.castIf[Double](d => bool) must beLike { case Some(r) if bool => ok; case None if !bool => ok } } } def option = apply { forAll(genOptionFoo, genOptionBar, arbitrary[String], arbitrary[Boolean]) { (f: Option[Foo], b: Option[Bar], s: String, bool: Boolean) => (f, b) match { case (Some(_), Some(_)) => { (f.castIf[Bar](c => bool) must beNone) and (f.castIf[Foo](c => bool) must beLike { case Some(c) if bool => ok; case None if !bool => ok }) and (b.castIf[Foo](c => bool) must beNone) and (b.castIf[Bar](c => bool) must beLike { case Some(c) if bool => ok; case None if !bool => ok }) } case (Some(_), None) => { (f.castIf[Bar](c => bool) must beNone) and (f.castIf[Foo](c => bool) must beLike { case Some(c) if bool => ok; case None if !bool => ok }) and (b.castIf[Foo](c => bool) must beNone) and (b.castIf[Bar](c => bool) must beNone) } case (None, Some(_)) => { (f.castIf[Bar](c => bool) must beNone) and (f.castIf[Foo](c => bool) must beNone) and (b.castIf[Foo](c => bool) must beNone) and (b.castIf[Bar](c => bool) must beLike { case Some(c) if bool => ok; case None if !bool => ok }) } case (None, None) => { (f.castIf[Bar](c => bool) must beNone) and (f.castIf[Foo](c => bool) must beNone) and (b.castIf[Foo](c => bool) must beNone) and (b.castIf[Bar](c => bool) must beNone) } } } } def array = apply { forAll(Gen.containerOf[Array, Int](arbitrary[Int]), Gen.containerOf[Array, String](arbitrary[String]), arbitrary[Int], arbitrary[String]) { (iArray: Array[Int], sArray: Array[String], i: Int, s: String) => (iArray.castIf[Int](c => c > i).forall(_ > i) must beTrue) and (sArray.castIf[String](c => c > s).forall(_ > s) must beTrue) and (iArray.castIf[String](c => c > s) must beEmpty) and (sArray.castIf[Int](c => c > i) must beEmpty) } } def traversable = apply { forAll(Gen.containerOf[Set, Foo](genFoo), Gen.containerOf[Set, Bar](genBar), arbitrary[String]) { (fSet: Set[Foo], bSet: Set[Bar], s: String) => (fSet.castIf[Foo](c => c.msg > s).forall(_.msg > s) must beTrue) and (bSet.castIf[Bar](c => c.msg > s).forall(_.msg > s) must beTrue) and (fSet.castIf[Bar](c => c.msg > s) must beEmpty) and (bSet.castIf[Foo](c => c.msg > s) must beEmpty) } } def list = apply { forAll(Gen.listOf(genFoo), Gen.listOf(genBar), arbitrary[String]) { (fList: List[Foo], bList: List[Bar], s: String) => (fList.castIf[Foo](c => c.msg > s).forall(_.msg > s) must beTrue) and (bList.castIf[Bar](c => c.msg > s).forall(_.msg > s) must beTrue) and (fList.castIf[Bar](c => c.msg > s) must beEmpty) and (bList.castIf[Foo](c => c.msg > s) must beEmpty) } } } case class cast() extends CommonImmutableSpecificationContext { def any = apply { forAll(genFoo, genBar, arbitrary[Boolean]) { (foo: Foo, bar: Bar, isNull: Boolean) => val f = if (isNull) null else foo val b = if (isNull) null else bar if (isNull) { (f.cast[Bar] must beNone) and (f.cast[Foo] must beNone) and (b.cast[Bar] must beNone) and (b.cast[Foo] must beNone) } else { (f.cast[Bar] must beNone) and (f.cast[Foo] must beSome) and (b.cast[Bar] must beSome) and (b.cast[Foo] must beNone) } } } def primitive = apply { forAll(arbitrary[Boolean], arbitrary[Byte], arbitrary[Char], arbitrary[Short], arbitrary[Int], arbitrary[Long], arbitrary[Float], arbitrary[Double]) { (bool: Boolean, b: Byte, c: Char, s: Short, i: Int, l: Long, f: Float, d: Double) => bool.cast[Boolean] must beSome b.cast[Byte] must beSome c.cast[Char] must beSome s.cast[Short] must beSome i.cast[Int] must beSome l.cast[Long] must beSome f.cast[Float] must beSome d.cast[Double] must beSome } } def option = apply { forAll(genOptionFoo, genOptionBar) { (f: Option[Foo], b: Option[Bar]) => (f, b) match { case (Some(_), Some(_)) => { (f.cast[Bar] must beNone) and (f.cast[Foo] must beSome) and (b.cast[Foo] must beNone) and (b.cast[Bar] must beSome) } case (Some(_), None) => { (f.cast[Bar] must beNone) and (f.cast[Foo] must beSome) and (b.cast[Foo] must beNone) and (b.cast[Bar] must beNone) } case (None, Some(_)) => { (f.cast[Bar] must beNone) and (f.cast[Foo] must beNone) and (b.cast[Foo] must beNone) and (b.cast[Bar] must beSome) } case (None, None) => { (f.cast[Bar] must beNone) and (f.cast[Foo] must beNone) and (b.cast[Foo] must beNone) and (b.cast[Bar] must beNone) } } } } def array = apply { forAll(Gen.containerOf[Array, Int](arbitrary[Int]), Gen.containerOf[Array, String](arbitrary[String])) { (iArray: Array[Int], sArray: Array[String]) => (iArray.cast[Int] must beEqualTo(iArray)) and (sArray.cast[String] must beEqualTo(sArray)) and (iArray.cast[String] must beEmpty) and (sArray.cast[Int] must beEmpty) } } def traversable = apply { forAll(Gen.containerOf[Set, Foo](genFoo), Gen.containerOf[Set, Bar](genBar)) { (fSet: Set[Foo], bSet: Set[Bar]) => (fSet.cast[Foo] must containTheSameElementsAs(fSet.toSeq)) and (bSet.cast[Bar] must containTheSameElementsAs(bSet.toSeq)) and (fSet.cast[Bar] must beEmpty) and (bSet.cast[Foo] must beEmpty) } } def list = apply { forAll(Gen.listOf(genFoo), Gen.listOf(genBar)) { (fList: List[Foo], bList: List[Bar]) => (fList.cast[Foo] must containTheSameElementsAs(fList)) and (bList.cast[Bar] must containTheSameElementsAs(bList)) and (fList.cast[Bar] must beEmpty) and (bList.cast[Foo] must beEmpty) } } } lazy val genFoo: Gen[Foo] = for(msg <- arbitrary[String]) yield Foo(msg) lazy val genBar: Gen[Bar] = for(msg <- arbitrary[String]) yield Bar(msg) lazy val genOptionFoo: Gen[Option[Foo]] = Gen.oneOf(for(msg <- arbitrary[String]) yield Foo(msg).some, Gen.const(None)) lazy val genOptionBar: Gen[Option[Bar]] = Gen.oneOf(for(msg <- arbitrary[String]) yield Bar(msg).some, Gen.const(None)) case class Foo(msg: String = "foo") case class Bar(msg: String = "bar") }
2rs2ts/cascade
common/src/test/scala/com/paypal/cascade/common/tests/util/casts/CastSpecs.scala
Scala
apache-2.0
11,354
package com.oni.web.dom import io.udash.properties.{ModelPart, PropertyCreator, seq} /** * */ case class SqW(id: String, desc: String, details: Option[String] = None) object SqW { def apply(sq: Sq): SqW = SqW(sq.id, sq.desc, sq.details) implicit val pc: PropertyCreator[Seq[SqW]] = PropertyCreator.propertyCreator[Seq[SqW]] }
ObjectNirvana/oni-web
frontend/src/main/scala/com/oni/web/dom/Sq.scala
Scala
epl-1.0
345
/* Copyright 2012 Twitter, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.twitter.algebird import algebra.ring.Rng import com.twitter.algebird.BaseProperties._ import com.twitter.algebird.scalacheck.arbitrary._ import org.scalacheck.Prop.forAll class AdjoinedUnitRingLaws extends CheckProperties { // AdjoinedUnit requires this method to be correct, so it is tested here: property("intTimes works correctly") { forAll((bi0: BigInt, bi1: BigInt) => Group.intTimes(bi0, bi1) == (bi0 * bi1)) } implicit def rng[T: Ring]: Rng[T] = implicitly[Ring[T]] property("AdjoinedUnit[Int] is a Ring")(ringLaws[AdjoinedUnit[Int]]) property("AdjoinedUnit[Long] is a Ring")(ringLaws[AdjoinedUnit[Long]]) }
twitter/algebird
algebird-test/src/test/scala/com/twitter/algebird/AdJoinedUnitRingLaws.scala
Scala
apache-2.0
1,205
package nodes.nlp import org.scalatest.FunSuite import workflow.PipelineContext class NGramsHashingTFSuite extends FunSuite with PipelineContext { test("NGramsHashingTF 1 to 1") { val dims = 40000 val testDatum = "this sentence is a sentence is the some there some then there some".split(" ") val ngrams = NGramsFeaturizer(1 to 1).apply(testDatum) val tfVector = HashingTF(dims).apply(ngrams) val ngramsHashingTFVector = NGramsHashingTF(1 to 1, dims).apply(testDatum) // Assert that the NGramsHashingTF node returns the same output as first getting n-grams then hashing assert(ngramsHashingTFVector === tfVector) } test("NGramsHashingTF 1 to 3") { val dims = 40000 val testDatum = "this sentence is a sentence is the some there some then there some".split(" ") val ngrams = NGramsFeaturizer(1 to 3).apply(testDatum) val tfVector = HashingTF(dims).apply(ngrams) val ngramsHashingTFVector = NGramsHashingTF(1 to 3, dims).apply(testDatum) // Assert that the NGramsHashingTF node returns the same output as first getting n-grams then hashing assert(ngramsHashingTFVector === tfVector) } test("NGramsHashingTF 2 to 3") { val dims = 40000 val testDatum = "this sentence is a sentence is the some there some then there some".split(" ") val ngrams = NGramsFeaturizer(2 to 3).apply(testDatum) val tfVector = HashingTF(dims).apply(ngrams) val ngramsHashingTFVector = NGramsHashingTF(2 to 3, dims).apply(testDatum) // Assert that the NGramsHashingTF node returns the same output as first getting n-grams then hashing assert(ngramsHashingTFVector === tfVector) } test("NGramsHashingTF with collisions 1 to 3") { val dims = 6 val testDatum = "this sentence is a sentence is the some there some then there some".split(" ") val ngrams = NGramsFeaturizer(1 to 3).apply(testDatum) val tfVector = HashingTF(dims).apply(ngrams) val ngramsHashingTFVector = NGramsHashingTF(1 to 3, dims).apply(testDatum) // Assert that the NGramsHashingTF node returns the same output as first getting n-grams then hashing assert(ngramsHashingTFVector === tfVector) } }
tomerk/keystone
src/test/scala/nodes/nlp/NGramsHashingTFSuite.scala
Scala
apache-2.0
2,179
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.amaterasu.leader.mesos.executors import org.apache.amaterasu.common.logging.Logging import org.apache.mesos.Protos._ import org.apache.mesos.{Executor, ExecutorDriver} object JobExecutor extends Executor with Logging { override def shutdown(driver: ExecutorDriver): Unit = {} override def disconnected(driver: ExecutorDriver): Unit = {} override def killTask(driver: ExecutorDriver, taskId: TaskID): Unit = {} override def reregistered(driver: ExecutorDriver, slaveInfo: SlaveInfo): Unit = {} override def error(driver: ExecutorDriver, message: String): Unit = {} override def frameworkMessage(driver: ExecutorDriver, data: Array[Byte]): Unit = {} override def registered(driver: ExecutorDriver, executorInfo: ExecutorInfo, frameworkInfo: FrameworkInfo, slaveInfo: SlaveInfo): Unit = {} override def launchTask(driver: ExecutorDriver, task: TaskInfo): Unit = { //val data = mapper.readValue(task.getData.toStringUtf8, JobData.getClass) } def main(args: Array[String]) { val repo = args(0) } }
shintoio/amaterasu
leader/src/main/scala/org/apache/amaterasu/leader/mesos/executors/JobExecutor.scala
Scala
apache-2.0
1,860
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.dynamicpruning import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.planning.ExtractEquiJoinKeys import org.apache.spark.sql.catalyst.plans._ import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.catalyst.rules.Rule import org.apache.spark.sql.execution.datasources.{HadoopFsRelation, LogicalRelation} import org.apache.spark.sql.internal.SQLConf /** * Dynamic partition pruning optimization is performed based on the type and * selectivity of the join operation. During query optimization, we insert a * predicate on the partitioned table using the filter from the other side of * the join and a custom wrapper called DynamicPruning. * * The basic mechanism for DPP inserts a duplicated subquery with the filter from the other side, * when the following conditions are met: * (1) the table to prune is partitioned by the JOIN key * (2) the join operation is one of the following types: INNER, LEFT SEMI (partitioned on left), * LEFT OUTER (partitioned on right), or RIGHT OUTER (partitioned on left) * * In order to enable partition pruning directly in broadcasts, we use a custom DynamicPruning * clause that incorporates the In clause with the subquery and the benefit estimation. * During query planning, when the join type is known, we use the following mechanism: * (1) if the join is a broadcast hash join, we replace the duplicated subquery with the reused * results of the broadcast, * (2) else if the estimated benefit of partition pruning outweighs the overhead of running the * subquery query twice, we keep the duplicated subquery * (3) otherwise, we drop the subquery. */ object PartitionPruning extends Rule[LogicalPlan] with PredicateHelper { /** * Search the partitioned table scan for a given partition column in a logical plan */ def getPartitionTableScan(a: Expression, plan: LogicalPlan): Option[LogicalRelation] = { val srcInfo: Option[(Expression, LogicalPlan)] = findExpressionAndTrackLineageDown(a, plan) srcInfo.flatMap { case (resExp, l: LogicalRelation) => l.relation match { case fs: HadoopFsRelation => val partitionColumns = AttributeSet( l.resolve(fs.partitionSchema, fs.sparkSession.sessionState.analyzer.resolver)) if (resExp.references.subsetOf(partitionColumns)) { return Some(l) } else { None } case _ => None } case _ => None } } /** * Insert a dynamic partition pruning predicate on one side of the join using the filter on the * other side of the join. * - to be able to identify this filter during query planning, we use a custom * DynamicPruning expression that wraps a regular In expression * - we also insert a flag that indicates if the subquery duplication is worthwhile and it * should run regardless of the join strategy, or is too expensive and it should be run only if * we can reuse the results of a broadcast */ private def insertPredicate( pruningKey: Expression, pruningPlan: LogicalPlan, filteringKey: Expression, filteringPlan: LogicalPlan, joinKeys: Seq[Expression], hasBenefit: Boolean): LogicalPlan = { val reuseEnabled = SQLConf.get.dynamicPartitionPruningReuseBroadcast val index = joinKeys.indexOf(filteringKey) if (hasBenefit || reuseEnabled) { // insert a DynamicPruning wrapper to identify the subquery during query planning Filter( DynamicPruningSubquery( pruningKey, filteringPlan, joinKeys, index, !hasBenefit), pruningPlan) } else { // abort dynamic partition pruning pruningPlan } } /** * Given an estimated filtering ratio we assume the partition pruning has benefit if * the size in bytes of the partitioned plan after filtering is greater than the size * in bytes of the plan on the other side of the join. We estimate the filtering ratio * using column statistics if they are available, otherwise we use the config value of * `spark.sql.optimizer.joinFilterRatio`. */ private def pruningHasBenefit( partExpr: Expression, partPlan: LogicalPlan, otherExpr: Expression, otherPlan: LogicalPlan): Boolean = { // get the distinct counts of an attribute for a given table def distinctCounts(attr: Attribute, plan: LogicalPlan): Option[BigInt] = { plan.stats.attributeStats.get(attr).flatMap(_.distinctCount) } // the default filtering ratio when CBO stats are missing, but there is a // predicate that is likely to be selective val fallbackRatio = SQLConf.get.dynamicPartitionPruningFallbackFilterRatio // the filtering ratio based on the type of the join condition and on the column statistics val filterRatio = (partExpr.references.toList, otherExpr.references.toList) match { // filter out expressions with more than one attribute on any side of the operator case (leftAttr :: Nil, rightAttr :: Nil) if SQLConf.get.dynamicPartitionPruningUseStats => // get the CBO stats for each attribute in the join condition val partDistinctCount = distinctCounts(leftAttr, partPlan) val otherDistinctCount = distinctCounts(rightAttr, otherPlan) val availableStats = partDistinctCount.isDefined && partDistinctCount.get > 0 && otherDistinctCount.isDefined if (!availableStats) { fallbackRatio } else if (partDistinctCount.get.toDouble <= otherDistinctCount.get.toDouble) { // there is likely an estimation error, so we fallback fallbackRatio } else { 1 - otherDistinctCount.get.toDouble / partDistinctCount.get.toDouble } case _ => fallbackRatio } // the pruning overhead is the total size in bytes of all scan relations val overhead = otherPlan.collectLeaves().map(_.stats.sizeInBytes).sum.toFloat filterRatio * partPlan.stats.sizeInBytes.toFloat > overhead.toFloat } /** * Returns whether an expression is likely to be selective */ private def isLikelySelective(e: Expression): Boolean = e match { case Not(expr) => isLikelySelective(expr) case And(l, r) => isLikelySelective(l) || isLikelySelective(r) case Or(l, r) => isLikelySelective(l) && isLikelySelective(r) case Like(_, _) => true case _: BinaryComparison => true case _: In | _: InSet => true case _: StringPredicate => true case _ => false } /** * Search a filtering predicate in a given logical plan */ private def hasSelectivePredicate(plan: LogicalPlan): Boolean = { plan.find { case f: Filter => isLikelySelective(f.condition) case _ => false }.isDefined } /** * To be able to prune partitions on a join key, the filtering side needs to * meet the following requirements: * (1) it can not be a stream * (2) it needs to contain a selective predicate used for filtering */ private def hasPartitionPruningFilter(plan: LogicalPlan): Boolean = { !plan.isStreaming && hasSelectivePredicate(plan) } private def canPruneLeft(joinType: JoinType): Boolean = joinType match { case Inner | LeftSemi | RightOuter => true case _ => false } private def canPruneRight(joinType: JoinType): Boolean = joinType match { case Inner | LeftOuter => true case _ => false } private def prune(plan: LogicalPlan): LogicalPlan = { plan transformUp { // skip this rule if there's already a DPP subquery on the LHS of a join case j @ Join(Filter(_: DynamicPruningSubquery, _), _, _, _, _) => j case j @ Join(_, Filter(_: DynamicPruningSubquery, _), _, _, _) => j case j @ Join(left, right, joinType, Some(condition), hint) => var newLeft = left var newRight = right // extract the left and right keys of the join condition val (leftKeys, rightKeys) = j match { case ExtractEquiJoinKeys(_, lkeys, rkeys, _, _, _, _) => (lkeys, rkeys) case _ => (Nil, Nil) } // checks if two expressions are on opposite sides of the join def fromDifferentSides(x: Expression, y: Expression): Boolean = { def fromLeftRight(x: Expression, y: Expression) = !x.references.isEmpty && x.references.subsetOf(left.outputSet) && !y.references.isEmpty && y.references.subsetOf(right.outputSet) fromLeftRight(x, y) || fromLeftRight(y, x) } splitConjunctivePredicates(condition).foreach { case EqualTo(a: Expression, b: Expression) if fromDifferentSides(a, b) => val (l, r) = if (a.references.subsetOf(left.outputSet) && b.references.subsetOf(right.outputSet)) { a -> b } else { b -> a } // there should be a partitioned table and a filter on the dimension table, // otherwise the pruning will not trigger var partScan = getPartitionTableScan(l, left) if (partScan.isDefined && canPruneLeft(joinType) && hasPartitionPruningFilter(right)) { val hasBenefit = pruningHasBenefit(l, partScan.get, r, right) newLeft = insertPredicate(l, newLeft, r, right, rightKeys, hasBenefit) } else { partScan = getPartitionTableScan(r, right) if (partScan.isDefined && canPruneRight(joinType) && hasPartitionPruningFilter(left) ) { val hasBenefit = pruningHasBenefit(r, partScan.get, l, left) newRight = insertPredicate(r, newRight, l, left, leftKeys, hasBenefit) } } case _ => } Join(newLeft, newRight, joinType, Some(condition), hint) } } override def apply(plan: LogicalPlan): LogicalPlan = plan match { // Do not rewrite subqueries. case _: Subquery => plan case _ if !SQLConf.get.dynamicPartitionPruningEnabled => plan case _ => prune(plan) } }
caneGuy/spark
sql/core/src/main/scala/org/apache/spark/sql/dynamicpruning/PartitionPruning.scala
Scala
apache-2.0
11,023
/** * Copyright 2015 Frank Austin Nothaft * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.akmorrow13.endive import com.esotericsoftware.kryo.Kryo import org.apache.spark.serializer.KryoRegistrator import org.bdgenomics.adam.serialization.AvroSerializer import org.bdgenomics.formats.avro.{ Genotype, Feature } class EndiveKryoRegistrator extends KryoRegistrator { override def registerClasses(kryo: Kryo) { } }
akmorrow13/endive
src/main/scala/net/akmorrow13/endive/EndiveKryoRegistrator.scala
Scala
apache-2.0
944
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka import consumer.ConsumerConfig import utils.ZkUtils import org.I0Itec.zkclient.ZkClient import org.apache.kafka.common.utils.Utils object DeleteZKPath { def main(args: Array[String]) { if(args.length < 2) { println("USAGE: " + DeleteZKPath.getClass.getName + " consumer.properties zk_path") System.exit(1) } val config = new ConsumerConfig(Utils.loadProps(args(0))) val zkPath = args(1) val zkClient = ZkUtils.createZkClient(config.zkConnect, config.zkSessionTimeoutMs, config.zkConnectionTimeoutMs) try { ZkUtils.deletePathRecursive(zkClient, zkPath); System.out.println(zkPath + " is deleted") } catch { case e: Exception => System.err.println("Path not deleted " + e.printStackTrace()) } } }
usakey/kafka
core/src/test/scala/other/kafka/DeleteZKPath.scala
Scala
apache-2.0
1,588
/*********************************************************************** * Copyright (c) 2013-2019 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.process.analytic import org.geotools.data.collection.ListFeatureCollection import org.junit.runner.RunWith import org.locationtech.geomesa.features.ScalaSimpleFeature import org.locationtech.geomesa.utils.collection.SelfClosingIterator import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes import org.specs2.mutable.Specification import org.specs2.runner.JUnitRunner @RunWith(classOf[JUnitRunner]) class MinMaxProcessTest extends Specification { val process = new MinMaxProcess val sft = SimpleFeatureTypes.createType("stats", "track:String,dtg:Date,*geom:Point:srid=4326") val fc = new ListFeatureCollection(sft) val features = (0 until 10).map { i => val sf = new ScalaSimpleFeature(sft, i.toString) sf.setAttribute(0, s"t-${i % 2}") sf.setAttribute(1, s"2017-05-24T00:00:0$i.000Z") sf.setAttribute(2, s"POINT(45 5$i)") sf } step { features.foreach(fc.add) } "MinMaxProcess" should { "manually visit a feature collection for dates" in { val result = SelfClosingIterator(process.execute(fc, "dtg", null).features).toSeq result must haveLength(1) result.head.getAttribute(0) mustEqual """{"min":"2017-05-24T00:00:00.000Z","max":"2017-05-24T00:00:09.000Z","cardinality":10}""" } "manually visit a feature collection for strings" in { val result = SelfClosingIterator(process.execute(fc, "track", null).features).toSeq result must haveLength(1) result.head.getAttribute(0) mustEqual """{"min":"t-0","max":"t-1","cardinality":2}""" } } }
elahrvivaz/geomesa
geomesa-process/geomesa-process-vector/src/test/scala/org/locationtech/geomesa/process/analytic/MinMaxProcessTest.scala
Scala
apache-2.0
2,047
package com.inocybe.pfm.template.apis import java.util.UUID import akka.actor.ActorSystem import akka.cluster.singleton.{ClusterSingletonProxy, ClusterSingletonProxySettings} import akka.http.scaladsl.model.{StatusCodes, HttpEntity, HttpResponse} import akka.http.scaladsl.model.MediaTypes._ import akka.http.scaladsl.server.{StandardRoute, Directives} import akka.pattern.ask import akka.util.Timeout import com.inocybe.pfm.template.apis.SampleService.SampleServiceResponse import com.inocybe.pfm.template.model.Work import scala.concurrent.duration._ import scala.util.{Success, Try, Random} import com.inocybe.pfm.template.model.JsonProtocol._ import spray.json._ object SampleService { trait SampleServiceResponse { def marshal(): HttpResponse } case class ItemInfo(item: Work) extends SampleServiceResponse { def marshal() = HttpResponse(StatusCodes.OK, entity = HttpEntity(`application/json`, item.toJson.toString)) } } class SampleService(system: ActorSystem) extends Directives { def route = pathPrefix("sample") { getWork ~ postWork } val context = system.dispatcher implicit val timeout = Timeout(5.seconds) val masterProxy = system.actorOf( ClusterSingletonProxy.props( settings = ClusterSingletonProxySettings(system).withRole("backend"), singletonManagerPath = "/user/master" ), name = "masterProxy") def getWork = path("work") { get { complete(Work(UUID.randomUUID.toString, Random.nextInt())) } } def postWork = { path("work") { post { entity(as[Work]) { obj => onComplete(masterProxy ? obj) { futureHandler } } } } } val futureHandler: PartialFunction[Try[Any], StandardRoute] = { case Success(response: SampleServiceResponse) => complete(response.marshal) } }
botekchristophe/akka-http-cluster-remote
src/main/scala/com/inocybe/pfm/template/apis/SampleService.scala
Scala
mit
1,869