code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package com.socrata.soql
import com.socrata.soql.ast.{Select, UDF}
import com.socrata.soql.exceptions.SoQLException
import com.socrata.soql.types._
import environment.{ColumnName, DatasetContext, TableName, HoleName}
import com.socrata.soql.functions.{SoQLFunctionInfo, SoQLTypeInfo}
import com.rojoma.json.v3.util.JsonUtil
import com.socrata.soql.parsing.{Parser, AbstractParser}
import scala.io.StdIn.readLine
object SoqlToy extends (Array[String] => Unit) {
def fail(msg: String) = {
System.err.println(msg)
sys.exit(1)
}
implicit val datasetCtx = Map(TableName.PrimaryTable.qualifier -> new DatasetContext[SoQLType] {
private implicit def ctx = this
val locale = com.ibm.icu.util.ULocale.ENGLISH
val schema = com.socrata.soql.collection.OrderedMap(
ColumnName(":id") -> SoQLID,
ColumnName(":updated_at") -> SoQLFixedTimestamp,
ColumnName(":created_at") -> SoQLFixedTimestamp,
ColumnName(":version") -> SoQLVersion,
ColumnName("name_last") -> SoQLText,
ColumnName("name_first") -> SoQLText,
ColumnName("visits") -> SoQLNumber,
ColumnName("last_visit") -> SoQLFixedTimestamp,
ColumnName("address") -> SoQLLocation,
ColumnName("balance") -> SoQLMoney,
ColumnName("object") -> SoQLObject,
ColumnName("array") -> SoQLArray,
ColumnName("dbl") -> SoQLDouble,
ColumnName(":@meta") -> SoQLObject
)
})
def menu(): Unit = {
println("Columns:")
Util.printList(datasetCtx(TableName.PrimaryTable.qualifier).schema)
}
def apply(args: Array[String]): Unit = {
menu()
val analyzer = new SoQLAnalyzer(SoQLTypeInfo, SoQLFunctionInfo)
val stored_procs = Map(
TableName("_is_admin") -> UDF(
arguments = Seq(HoleName("fn") -> SoQLText.name,
HoleName("ln") -> SoQLText.name),
body = new Parser(AbstractParser.defaultParameters.copy(allowHoles = true)).binaryTreeSelect("select 1 where ?fn = 'Adam' and ?ln = 'Admin'")
),
TableName("_positive_balance") -> UDF(
arguments = Seq(HoleName("balance") -> SoQLNumber.name),
body = new Parser(AbstractParser.defaultParameters.copy(allowHoles = true)).binaryTreeSelect("select 1 where ?balance > 0")
)
)
while(true) {
val selection = readLine("> ")
if(selection == null) return;
if(selection == "?") {
menu()
} else if(selection == "exit" || selection == "quit") {
return
} else {
try {
val parsed = new Parser(AbstractParser.defaultParameters.copy(allowJoinFunctions = true)).binaryTreeSelect(selection)
val substituted = Select.rewriteJoinFuncs(parsed, stored_procs)
println(substituted)
val analyses = analyzer.analyzeFullQuery(substituted.toString)
println("Outputs:")
analyses.seq.foreach { analysis =>
Util.printList(analysis.selection)
analysis.where.foreach { w =>
println("where:\n " + w)
}
println(Select.itrToString("group bys:\n", analysis.groupBys))
println(Select.itrToString("having:\n", analysis.having))
val obs = analysis.orderBys
if (obs.nonEmpty) {
println("order bys:")
obs.map { ob =>
println(" " + ob.expression + " (" + (if (ob.ascending) "ascending" else "descending") + ", nulls " + (if (ob.nullLast) "last" else "first") + ")")
}
}
println("has aggregates: " + analysis.isGrouped)
}
} catch {
case e: SoQLException =>
println(e.getMessage)
println(JsonUtil.renderJson(e, pretty = true))
println(JsonUtil.parseJson[SoQLException](JsonUtil.renderJson(e)))
}
}
}
}
}
| socrata-platform/soql-reference | soql-toy/src/main/scala/com/socrata/soql/SoqlToy.scala | Scala | apache-2.0 | 3,832 |
import PhotoMoney.{SendMoneyTextUsd, SendMoneyText, AddressRequest, BalanceRequest}
import org.bitcoinj.core.{Coin, Address}
import org.bitcoinj.params.MainNetParams
import org.specs2.mutable._
import QueryUnderstand.QueryUnderstand
class QueryUnderstandTest extends Specification with org.specs2.mutable.Tables {
"QueryUnderstand" should {
val addr = "1Archive1n2C579dMsAu3iC6tWzuQJz8dN"
val address = new Address(new MainNetParams(), addr)
"understand these valid bal/addr commands" in {
"Command" | "Expected Result" |
"bal " ! BalanceRequest() |
"b " ! BalanceRequest() |
"balance " ! BalanceRequest() |
"address " ! AddressRequest() |
"ad " ! AddressRequest() |
" Address! " ! AddressRequest() |
"addr " ! AddressRequest() |> { (command, result) =>
QueryUnderstand.decodeQuery(command) must beRight(result)
}
}
"understand these valid send commands" in {
"Command" | "Expected Result" |
s" send 1 ubtc $addr " ! SendMoneyText(address, Coin.MICROCOIN) |
s" send 1ubtc $addr " ! SendMoneyText(address, Coin.MICROCOIN) |
s" send $addr 1ubtc " ! SendMoneyText(address, Coin.MICROCOIN) |
s" send $addr 1 mbtc " ! SendMoneyText(address, Coin.MILLICOIN) |
s" send $addr 1$$ " ! SendMoneyTextUsd(address, 100) |
s" send $addr 1¢ " ! SendMoneyTextUsd(address, 1) |
s" send $addr 1 usd " ! SendMoneyTextUsd(address, 100) |
s" send $addr 10.01 usd " ! SendMoneyTextUsd(address, 1001) |
s" send $addr 1,000 mbtc" ! SendMoneyText(address, Coin.MILLICOIN.multiply(1000)) |
s" send $addr 2 mbtc " ! SendMoneyText(address, Coin.MILLICOIN.multiply(2)) |
s" send $addr 1.5 mbtc " ! SendMoneyText(address, Coin.MILLICOIN.multiply(3).divide(2)) |
s" $addr 1ubtc " ! SendMoneyText(address, Coin.MICROCOIN) |
s" $addr 1 btc " ! SendMoneyText(address, Coin.COIN) |
s" $addr 1 satoshi " ! SendMoneyText(address, Coin.SATOSHI) |
"addr " ! AddressRequest() |> { (command, result) =>
QueryUnderstand.decodeQuery(command) must beRight(result)
}
}
"reject these commands" in {
"" ::
s"send ${addr.toLowerCase} 1 btc" ::
s"send $addr 1.0 " ::
" oopsie" ::
"oops wrong person" :: Nil map { command:String =>
QueryUnderstand.decodeQuery(command) must beLeft
}
}
}
} | asm-products/snapcoin-net | src/test/scala/QueryUnderstandTest.scala | Scala | agpl-3.0 | 2,871 |
/*
* Copyright 2010 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package mongodb
package record
package field
import org.bson.types.ObjectId
import org.specs2.mutable.Specification
import net.liftweb.common._
import net.liftweb.json.ext.EnumSerializer
import net.liftweb.record.field.{EnumField, OptionalEnumField}
import net.liftweb.util.Helpers._
import com.mongodb._
package enumfieldspecs {
object WeekDay extends Enumeration {
type WeekDay = Value
val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value
}
case class JsonObj(dow: WeekDay.WeekDay) extends JsonObject[JsonObj] {
def meta = JsonObj
}
object JsonObj extends JsonObjectMeta[JsonObj]
class EnumRec extends MongoRecord[EnumRec] with MongoId[EnumRec] {
def meta = EnumRec
object dow extends EnumField(this, WeekDay)
object dowOptional extends OptionalEnumField(this, WeekDay)
object jsonobj extends JsonObjectField[EnumRec, JsonObj](this, JsonObj) {
def defaultValue = JsonObj(WeekDay.Mon)
}
override def equals(other: Any): Boolean = other match {
case that: EnumRec =>
this.id == that.id &&
this.dow.value == that.dow.value &&
this.dowOptional.valueBox == that.dowOptional.valueBox &&
this.jsonobj.value == that.jsonobj.value
case _ => false
}
}
object EnumRec extends EnumRec with MongoMetaRecord[EnumRec] {
override def collectionName = "enumrecs"
override def formats = super.formats + new EnumSerializer(WeekDay)
}
}
/**
* Systems under specification for EnumField.
*/
class EnumFieldSpec extends Specification with MongoTestKit {
"EnumField Specification".title
import enumfieldspecs._
"EnumField" should {
"work with default values" in {
checkMongoIsRunning
val er = EnumRec.createRecord.save
val erFromDb = EnumRec.find(er.id)
erFromDb.isDefined must_== true
erFromDb.toList map { er2 =>
er2 mustEqual er
er2.dow.value mustEqual WeekDay.Mon
er2.dowOptional.valueBox mustEqual Empty
er2.jsonobj.value mustEqual JsonObj(WeekDay.Mon)
}
}
"work with set values" in {
checkMongoIsRunning
val er = EnumRec.createRecord
.dow(WeekDay.Tue)
.jsonobj(JsonObj(WeekDay.Sun))
.save
val erFromDb = EnumRec.find(er.id)
erFromDb.isDefined must_== true
erFromDb.toList map { er2 =>
er2 mustEqual er
er2.dow.value mustEqual WeekDay.Tue
er2.jsonobj.value mustEqual JsonObj(WeekDay.Sun)
}
}
"work with Empty optional values" in {
checkMongoIsRunning
val er = EnumRec.createRecord
er.dowOptional.setBox(Empty)
er.save
val erFromDb = EnumRec.find(er.id)
erFromDb.isDefined must_== true
erFromDb.toList map { er2 =>
er2 mustEqual er
er2.dowOptional.valueBox mustEqual Empty
}
}
"work with Full optional values" in {
checkMongoIsRunning
val er = EnumRec.createRecord
er.dowOptional.setBox(Full(WeekDay.Sat))
er.save
val erFromDb = EnumRec.find(er.id)
erFromDb.isDefined must_== true
erFromDb.toList map { er2 =>
er2 mustEqual er
er2.dowOptional.valueBox mustEqual Full(WeekDay.Sat)
}
}
}
}
| pbrant/framework | persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/field/EnumFieldSpec.scala | Scala | apache-2.0 | 3,858 |
package com.github.verhoevenv.fiasco
import com.github.verhoevenv.fiasco.transform.json.CategoryJsonProtocol._
import com.github.verhoevenv.fiasco.route.AllRoutes
import com.github.verhoevenv.fiasco.transform.json.JsonCategory
import org.specs2.mutable.Specification
import spray.http._
import spray.json._
import spray.testkit.Specs2RouteTest
class CategoryServiceSpec extends Specification with Specs2RouteTest with AllRoutes {
def actorRefFactory = system
"CategoryService" should {
"return a list of categories when queried without id" in {
Get("/v1/categories") ~> allRoutes ~> check {
val categories = responseAs[List[JsonCategory]]
categories.size should be greaterThan 1
}
}
"return a single category when queried with a known id" in {
Get("/v1/categories/1") ~> allRoutes ~> check {
val category = responseAs[JsonCategory]
category.id must beEqualTo(1)
}
}
"not handle a category with an unknown id" in {
Get("/v1/categories/1337") ~> allRoutes ~> check {
status must be(StatusCodes.BadRequest)
}
}
}
implicit def HttpEntityToCategory(httpEntity: HttpEntity): JsonCategory =
httpEntity.asString(HttpCharsets.`UTF-8`).parseJson.convertTo[JsonCategory]
implicit def HttpEntityToListOfCategories(httpEntity: HttpEntity): List[JsonCategory] =
httpEntity.asString(HttpCharsets.`UTF-8`).parseJson.convertTo[List[JsonCategory]]
}
| verhoevenv/fiasco | src/test/scala/com/github/verhoevenv/fiasco/CategoryServiceSpec.scala | Scala | mit | 1,461 |
/**
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package noop.model;
import collection.mutable.{ArrayBuffer, Buffer};
/**
* Represents the declaration of a method in source code.
*
* @author alexeagle@google.com (Alex Eagle)
* @author tocman@gmail.com (Jeremie Lenfant-Engelmann)
*/
class Method(val name: String, val block: Block, val documentation: String) {
val returnTypes: Buffer[String] = new ArrayBuffer[String]();
val parameters: Buffer[Parameter] = new ArrayBuffer[Parameter]();
val modifiers: Buffer[Modifier.Value] = new ArrayBuffer[Modifier.Value]();
override def toString() = String.format("Method %s (%s) returns %s", name, parameters, returnTypes);
}
| SeaUrchinBot/noop | core/src/main/scala/noop/model/Method.scala | Scala | apache-2.0 | 1,232 |
package dotty.tools.dotc.util
import reflect.ClassTag
/** A least-recently-used cache for Key -> Value computations
* It currently keeps the last 8 associations, but this can be
* changed to anywhere between 2 and 16 by changing `LRUCache.Retained`.
*
* Implementation: We keep a ring of eight places, linked
* with the `next` data structure. The ring models a priority queue.
* `last` points to the last element of the queue, and
* `next(last)` to the first one. Lookups compare keys
* sequentially from first to last. Elements with successful lookup
* get promoted to be first in the queue. Elements are evicted
* at the `last` position.
*/
class LRUCache[Key >: Null : ClassTag, Value >: Null: ClassTag] {
import LRUCache._
val keys = new Array[Key](Retained)
val values = new Array[Value](Retained)
var next = new SixteenNibbles(initialRing.bits)
var last = Retained - 1 // value is arbitrary
var lastButOne: Int = last - 1
def first = next(last)
/** Lookup key, returning value or `null` for not found.
* As a side effect, sets `lastButOne` to the element before `last`
* if key was not found.
*/
def lookup(key: Key): Value = {
def lookupNext(prev: Int, current: Int, nx: SixteenNibbles): Value = {
val follow = nx(current)
if (keys(current) == key) {
// arrange so that found element is at position `first`.
if (current == last) last = prev
else if (prev != last) {
next = next.updated(prev, follow)
next = next.updated(current, first)
next = next.updated(last, current)
}
values(current)
} else if (current == last) {
lastButOne = prev
null
} else
lookupNext(current, follow, nx)
}
lookupNext(last, first, next)
}
/** Enter key/value in cache at position `last`.
* As a side effect, sets `last` to `lastButOne`.
* If `lastButOne` was set by a preceding unsuccessful `lookup`
* for the same key, this means that the new element is now the
* first in the queue. If there was no preceding lookup, the element
* is inserted at a random position in the queue.
*/
def enter(key: Key, value: Value): Unit = {
keys(last) = key
values(last) = value
last = lastButOne
}
/** Invalidate key. The invalidated element becomes
* the last in the queue.
*/
def invalidate(key: Key): Unit =
if (lookup(key) != null) {
keys(first) = null
last = first
}
def indices: Iterator[Int] = Iterator.iterate(first)(next.apply)
def keysIterator: Iterator[Key] =
indices take Retained map keys filter (_ != null)
override def toString = {
val assocs = keysIterator
.toList // double reverse so that lookups do not perturb order
.reverse
.map(key => s"$key -> ${lookup(key)}")
.reverse
s"LRUCache(${assocs.mkString(", ")})"
}
}
object LRUCache {
/** The number of retained elements in the cache; must be at most 16. */
val Retained = 8
/** The initial ring: 0 -> 1 -> ... -> 7 -> 0 */
val initialRing =
(new SixteenNibbles(0L) /: (0 until Retained))((nibbles, idx) =>
nibbles.updated(idx, (idx + 1) % Retained))
}
| magarciaEPFL/dotty | src/dotty/tools/dotc/util/LRUCache.scala | Scala | bsd-3-clause | 3,219 |
import scalajs.js
import slogging._
object App extends LazyLogging {
def main(args: Array[String]): Unit = {
LoggerConfig.factory = HttpLoggerFactory("log")
//LoggerConfig.factory = PrintLoggerFactory()
logger.info("hello")
}
}
| jokade/slogging | test/web/src/main/scala/App.scala | Scala | mit | 248 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api.scala.typeutils
import org.apache.flink.api.common.typeutils.TypeInformationTestBase
/**
* Test for [[UnitTypeInfo]].
*/
class UnitTypeInfoTest extends TypeInformationTestBase[UnitTypeInfo] {
override protected def getTestData: Array[UnitTypeInfo] = Array(
new UnitTypeInfo
)
}
| tzulitai/flink | flink-scala/src/test/scala/org/apache/flink/api/scala/typeutils/UnitTypeInfoTest.scala | Scala | apache-2.0 | 1,129 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.execution
import scala.collection.JavaConversions._
import org.apache.hadoop.hive.metastore.api.FieldSchema
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.execution.RunnableCommand
import org.apache.spark.sql.hive.MetastoreRelation
import org.apache.spark.sql.{Row, SQLContext}
/**
* Implementation for "describe [extended] table".
* 描述[扩展]表”的实现
*/
private[hive]
case class DescribeHiveTableCommand(
table: MetastoreRelation,
override val output: Seq[Attribute],
isExtended: Boolean) extends RunnableCommand {
override def run(sqlContext: SQLContext): Seq[Row] = {
// Trying to mimic the format of Hive's output. But not exactly the same.
//试图模仿Hive输出的格式,但不完全一样。
var results: Seq[(String, String, String)] = Nil
val columns: Seq[FieldSchema] = table.hiveQlTable.getCols
val partitionColumns: Seq[FieldSchema] = table.hiveQlTable.getPartCols
results ++= columns.map(field => (field.getName, field.getType, field.getComment))
if (partitionColumns.nonEmpty) {
val partColumnInfo =
partitionColumns.map(field => (field.getName, field.getType, field.getComment))
results ++=
partColumnInfo ++
Seq(("# Partition Information", "", "")) ++
Seq((s"# ${output.get(0).name}", output.get(1).name, output.get(2).name)) ++
partColumnInfo
}
if (isExtended) {
results ++= Seq(("Detailed Table Information", table.hiveQlTable.getTTable.toString, ""))
}
results.map { case (name, dataType, comment) =>
Row(name, dataType, comment)
}
}
}
| tophua/spark1.52 | sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/DescribeHiveTableCommand.scala | Scala | apache-2.0 | 2,491 |
package keystoneml.pipelines
import org.slf4j.{Logger, LoggerFactory}
/**
* Utility trait for Logging
*/
trait Logging {
// Make the log field transient so that objects with Logging can
// be serialized and used on another machine
@transient private var log_ : Logger = null
// Method to get or create the logger for this object
protected def log: Logger = {
if (log_ == null) {
var className = this.getClass.getName
// Ignore trailing $'s in the class names for Scala objects
if (className.endsWith("$")) {
className = className.substring(0, className.length - 1)
}
log_ = LoggerFactory.getLogger(className)
}
log_
}
// Log methods that take only a String
protected def logInfo(msg: => String) {
if (log.isInfoEnabled) log.info(msg)
}
protected def logDebug(msg: => String) {
if (log.isDebugEnabled) log.debug(msg)
}
protected def logTrace(msg: => String) {
if (log.isTraceEnabled) log.trace(msg)
}
protected def logWarning(msg: => String) {
if (log.isWarnEnabled) log.warn(msg)
}
protected def logError(msg: => String) {
if (log.isErrorEnabled) log.error(msg)
}
// Log methods that take Throwables (Exceptions/Errors) too
protected def logInfo(msg: => String, throwable: Throwable) {
if (log.isInfoEnabled) log.info(msg, throwable)
}
protected def logDebug(msg: => String, throwable: Throwable) {
if (log.isDebugEnabled) log.debug(msg, throwable)
}
protected def logTrace(msg: => String, throwable: Throwable) {
if (log.isTraceEnabled) log.trace(msg, throwable)
}
protected def logWarning(msg: => String, throwable: Throwable) {
if (log.isWarnEnabled) log.warn(msg, throwable)
}
protected def logError(msg: => String, throwable: Throwable) {
if (log.isErrorEnabled) log.error(msg, throwable)
}
}
| amplab/keystone | src/main/scala/keystoneml/pipelines/Logging.scala | Scala | apache-2.0 | 1,859 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2016 Dmitry Ivanov
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.dmitryivanov.crdt
trait Crdt[E, TotalValue] {
/**
* A concrete type of the CRDT implementing this trait.
*/
type SelfType <: Crdt[E, TotalValue]
def merge(anotherCrdt: SelfType): SelfType
def diff(anotherCrdt: SelfType): SelfType
def lookup: TotalValue
}
| ajantis/scala-crdt | src/main/scala/io/dmitryivanov/crdt/Crdt.scala | Scala | mit | 1,436 |
package edu.mit.csail.sdg.ormolu.form.ops
import edu.mit.csail.sdg.ormolu.form.Formula
/**
* The alternative (else) of an implication. impl else formula is equivalent to impl && (!impl.cond => formula)
*/
case class Else(impl: Implies, formula: Formula) extends Formula {
override def toString: String = "%s else %s".format(impl, formula)
override def boolExpr = (impl and (!impl.cond ==> formula)).boolExpr
} | dlreeves/ormolu | src/edu/mit/csail/sdg/ormolu/form/ops/Else.scala | Scala | mit | 430 |
package demo.motto.util
import java.util.ResourceBundle
import javax.annotation.PostConstruct
import javax.faces.context.FacesContext
import javax.inject.Named
import javax.enterprise.context.ContextNotActiveException
import java.util.logging.Logger
import javax.inject.Inject
import javax.enterprise.context.ApplicationScoped
import java.util.Locale
@Named
@ApplicationScoped
class DefaultMessages extends Serializable {
@Inject
var log: Logger = _
var messagesResourceBundle: ResourceBundle = _
def getResourceBundle() = messagesResourceBundle
def getString(key: String): String = messagesResourceBundle.getString(key)
def get(key: String): String = getString(key)
@PostConstruct
def init() = {
/* path: src/main/resources/messages */
messagesResourceBundle = new MessagesResourceBundle(Locale.US)
}
}
| oximity/motto | src/main/scala/demo/motto/util/DefaultMessages.scala | Scala | gpl-2.0 | 847 |
package controllers
import controllers.element.{MainTemplate, BaseCtr}
import models.sunerp.{GioiHan, QuyenHanh, QuyenHanhs}
import models.core.AbstractQuery
import play.api.libs.json.{Json, JsValue, Writes}
import play.api.data.Form
import dtos.PagingDto
import play.api.db.slick.Session
import jp.t2v.lab.play2.stackc.RequestWithAttributes
import play.api.mvc.AnyContent
import com.escalatesoft.subcut.inject.BindingModule
/**
* The Class QuyenHanhCtr.
*
* @author Nguyen Duc Dung
* @since 3/22/14 7:24 AM
*
*/
class QuyenHanhCtr(implicit val bindingModule: BindingModule) extends BaseCtr[QuyenHanh, QuyenHanhs] with MainTemplate {
override val domainName: String = DomainKey.quyenHanh
override def editForm(implicit session: Session): Form[QuyenHanh] = QuyenHanhs.editForm
override implicit val jsonWrite: Writes[QuyenHanh] = QuyenHanhs.jsonFormat
override val dao: AbstractQuery[QuyenHanh, QuyenHanhs] = QuyenHanhs
override protected def doIndex(paging: PagingDto, request: RequestWithAttributes[AnyContent])(implicit session: Session): JsValue = {
val result = QuyenHanhs.load(paging)
Json.toJson(result)
}
def getGioiHans = StackAction(AuthorityKey -> domainName)(implicit request => {
Ok(GioiHan.asComboxDataSource)
})
}
| SunriseSoftVN/sunerp | app/controllers/QuyenHanhCtr.scala | Scala | apache-2.0 | 1,268 |
package mesosphere.marathon
package core.storage.store.impl.cache
import java.time.OffsetDateTime
import akka.http.scaladsl.marshalling.Marshaller
import akka.http.scaladsl.unmarshalling.Unmarshaller
import akka.stream.Materializer
import akka.stream.scaladsl.{Source, Sink}
import akka.{Done, NotUsed}
import com.typesafe.scalalogging.StrictLogging
import mesosphere.marathon.Protos.StorageVersion
import mesosphere.marathon.core.storage.backup.BackupItem
import mesosphere.marathon.core.storage.store.impl.BasePersistenceStore
import mesosphere.marathon.core.storage.store.{IdResolver, PersistenceStore}
import mesosphere.marathon.metrics.{Counter, Metrics}
import mesosphere.marathon.storage.VersionCacheConfig
import mesosphere.marathon.stream.EnrichedSink
import mesosphere.marathon.util.KeyedLock
import scala.async.Async.{async, await}
import scala.collection.concurrent.TrieMap
import scala.collection.immutable.Set
import scala.concurrent.{ExecutionContext, Future}
import scala.util.Random
/**
* A Write Ahead Cache of another persistence store that lazily loads values into the cache.
*
* @param store The store to cache
* @param mat a materializer for Akka Streaming
* @param ctx The execution context for future chaining.
* @tparam K The persistence store's primary key type
* @tparam Serialized The serialized format for the persistence store.
*/
case class LazyCachingPersistenceStore[K, Category, Serialized](metrics: Metrics, store: BasePersistenceStore[K, Category, Serialized])(
implicit
mat: Materializer,
ctx: ExecutionContext
) extends PersistenceStore[K, Category, Serialized]
with StrictLogging {
private val lock = KeyedLock[String]("LazyCachingStore", Int.MaxValue)
private[store] val idCache = TrieMap.empty[Category, Set[Any]]
private[store] val valueCache = TrieMap.empty[K, Option[Any]]
private[this] val getHitCounters = TrieMap.empty[Category, Counter]
private[this] val idsHitCounters = TrieMap.empty[Category, Counter]
override def markOpen(): Unit = store.markOpen()
override def markClosed(): Unit = store.markClosed()
override def isOpen: Boolean = store.isOpen
override def storageVersion(): Future[Option[StorageVersion]] = store.storageVersion()
override def setStorageVersion(storageVersion: StorageVersion): Future[Done] =
store.setStorageVersion(storageVersion)
override def ids[Id, V]()(implicit ir: IdResolver[Id, V, Category, K]): Source[Id, NotUsed] = {
val category = ir.category
val idsFuture = lock(category.toString) {
if (idCache.contains(category)) {
// TODO - remove special name when MARATHON-7618 is addressed
idsHitCounters.getOrElseUpdate(ir.category, metrics.counter(s"debug.persistence.cache.ids.${ir.category}.hit")).increment()
Future.successful(idCache(category).asInstanceOf[Set[Id]])
} else {
async {
val children = await(store.ids.runWith(EnrichedSink.set[Any])) // linter:ignore UndesirableTypeInference
idCache(category) = children
children
}
}
}
Source.fromFuture(idsFuture).mapConcat(_.asInstanceOf[Set[Id]])
}
private def deleteCurrentOrAll[Id, V](k: Id, delete: () => Future[Done])(implicit ir: IdResolver[Id, V, Category, K]): Future[Done] = {
val category = ir.category
val storageId = ir.toStorageId(k, None)
lock(category.toString) {
lock(storageId.toString) {
async { // linter:ignore UnnecessaryElseBranch
await(delete())
valueCache.remove(storageId)
val old = idCache.getOrElse(category, Set.empty[Any]) // linter:ignore UndesirableTypeInference
val children = old - k.asInstanceOf[Any] // linter:ignore UndesirableTypeInference
if (children.nonEmpty) { // linter:ignore UnnecessaryElseBranch+UseIfExpression
idCache.put(category, children)
} else {
idCache.remove(category)
}
Done
}
}
}
}
override def deleteCurrent[Id, V](k: Id)(implicit ir: IdResolver[Id, V, Category, K]): Future[Done] = {
deleteCurrentOrAll(k, () => store.deleteCurrent(k))
}
override def deleteAll[Id, V](k: Id)(implicit ir: IdResolver[Id, V, Category, K]): Future[Done] = {
deleteCurrentOrAll(k, () => store.deleteAll(k))
}
override def get[Id, V](id: Id)(implicit ir: IdResolver[Id, V, Category, K], um: Unmarshaller[Serialized, V]): Future[Option[V]] = {
val storageId = ir.toStorageId(id, None)
lock(storageId.toString) {
val cached = valueCache.get(storageId) // linter:ignore OptionOfOption
cached match {
case Some(v: Option[V] @unchecked) =>
// TODO - remove special name when MARATHON-7618 is addressed
getHitCounters.getOrElseUpdate(ir.category, metrics.counter(s"debug.persistence.cache.get.${ir.category}.hit")).increment()
Future.successful(v)
case _ =>
async { // linter:ignore UnnecessaryElseBranch
val value = await(store.get(id))
valueCache.put(storageId, value)
value
}
}
}
}
override def get[Id, V](id: Id, version: OffsetDateTime)(implicit
ir: IdResolver[Id, V, Category, K],
um: Unmarshaller[Serialized, V]
): Future[Option[V]] =
store.get(id, version)
override def getVersions[Id, V](
list: Seq[(Id, OffsetDateTime)]
)(implicit ir: IdResolver[Id, V, Category, K], um: Unmarshaller[Serialized, V]): Source[V, NotUsed] =
store.getVersions(list)
override def store[Id, V](id: Id, v: V)(implicit ir: IdResolver[Id, V, Category, K], m: Marshaller[V, Serialized]): Future[Done] = {
val category = ir.category
val storageId = ir.toStorageId(id, None)
lock(category.toString) {
lock(storageId.toString) {
async { // linter:ignore UnnecessaryElseBranch
await(store.store(id, v))
valueCache.put(storageId, Some(v))
val cachedIds = idCache.getOrElse(category, Set.empty[Any]) // linter:ignore UndesirableTypeInference
idCache.put(category, cachedIds + id.asInstanceOf[Any])
Done
}
}
}
}
override def store[Id, V](id: Id, v: V, version: OffsetDateTime)(implicit
ir: IdResolver[Id, V, Category, K],
m: Marshaller[V, Serialized]
): Future[Done] = {
val category = ir.category
lock(category.toString) {
async {
await(store.store(id, v, version))
val old = idCache.getOrElse(category, Set.empty[Any]) // linter:ignore UndesirableTypeInference
idCache.put(category, old + id)
Done
}
}
}
override def versions[Id, V](id: Id)(implicit ir: IdResolver[Id, V, Category, K]): Source[OffsetDateTime, NotUsed] =
store.versions(id)
override def deleteVersion[Id, V](k: Id, version: OffsetDateTime)(implicit ir: IdResolver[Id, V, Category, K]): Future[Done] =
store.deleteVersion(k, version)
override def backup(): Source[BackupItem, NotUsed] = store.backup()
override def restore(): Sink[BackupItem, Future[Done]] = store.restore()
override def sync(): Future[Done] = store.sync()
override def startMigration(): Future[Done] = store.startMigration()
override def endMigration(): Future[Done] = {
// Clear caches after a migration, some migrations might act on the store directly
valueCache.clear()
idCache.clear()
store.endMigration()
}
override def toString: String = s"LazyCachingPersistenceStore($store)"
}
case class LazyVersionCachingPersistentStore[K, Category, Serialized](
metrics: Metrics,
store: PersistenceStore[K, Category, Serialized],
config: VersionCacheConfig = VersionCacheConfig.Default
)(implicit mat: Materializer, ctx: ExecutionContext)
extends PersistenceStore[K, Category, Serialized]
with StrictLogging {
override def markOpen(): Unit = store.markOpen()
override def markClosed(): Unit = store.markClosed()
override def isOpen: Boolean = store.isOpen
private[store] val versionCache = TrieMap.empty[(Category, K), Set[OffsetDateTime]]
private[store] val versionedValueCache = TrieMap.empty[(K, OffsetDateTime), Option[Any]]
private[this] val hitCounters = TrieMap.empty[Category, Counter]
private[cache] def maybePurgeCachedVersions(
maxEntries: Int = config.maxEntries,
purgeCount: Int = config.purgeCount,
pRemove: Double = config.pRemove
): Unit =
while (versionedValueCache.size > maxEntries) {
// randomly GC the versions
var counter = 0
versionedValueCache.retain { (k, v) =>
val x = Random.nextDouble()
x > pRemove || { counter += 1; counter > purgeCount }
}
}
private[this] def updateCachedVersions[Id, V](id: Id, version: OffsetDateTime, v: Option[V])(implicit
ir: IdResolver[Id, V, Category, K]
): Unit = {
val category = ir.category
val unversionedId = ir.toStorageId(id, None)
maybePurgeCachedVersions()
versionedValueCache.put((unversionedId, version), v)
if (versionCache.contains((category, unversionedId))) {
// possible race: there is no way to get/update the value in place
val cached = versionCache.getOrElse((category, unversionedId), Set.empty) // linter:ignore UndesirableTypeInference
versionCache.put((category, unversionedId), cached + version)
}
}
private def deleteCurrentOrAll[Id, V](id: Id, delete: () => Future[Done])(implicit ir: IdResolver[Id, V, Category, K]): Future[Done] = {
if (!ir.hasVersions) {
delete()
} else {
val category = ir.category
val storageId = ir.toStorageId(id, None)
async {
await(delete())
versionedValueCache.retain { case ((sid, version), v) => sid != storageId }
versionCache.remove((category, storageId))
Done
}
}
}
override def get[Id, V](id: Id)(implicit ir: IdResolver[Id, V, Category, K], um: Unmarshaller[Serialized, V]): Future[Option[V]] = {
if (!ir.hasVersions) {
store.get(id)
} else {
async {
val value = await(store.get(id))
value.foreach { v =>
val version = ir.version(v)
updateCachedVersions(id, version, value)
}
value
}
}
}
override def get[Id, V](id: Id, version: OffsetDateTime)(implicit
ir: IdResolver[Id, V, Category, K],
um: Unmarshaller[Serialized, V]
): Future[Option[V]] = {
val storageId = ir.toStorageId(id, None)
val cached = versionedValueCache.get((storageId, version)) // linter:ignore OptionOfOption
cached match {
case Some(v: Option[V] @unchecked) =>
// TODO - remove special name when MARATHON-7618 is addressed
hitCounters.getOrElseUpdate(ir.category, metrics.counter(s"debug.persistence.cache.get.${ir.category}.hit")).increment()
Future.successful(v)
case _ =>
async {
val value = await(store.get(id, version))
updateCachedVersions(id, version, value)
value
}
}
}
/**
* TODO: no caching here yet, intended only for migration (for now)
*/
override def getVersions[Id, V](
list: Seq[(Id, OffsetDateTime)]
)(implicit ir: IdResolver[Id, V, Category, K], um: Unmarshaller[Serialized, V]): Source[V, NotUsed] =
store.getVersions(list)
override def store[Id, V](id: Id, v: V)(implicit ir: IdResolver[Id, V, Category, K], m: Marshaller[V, Serialized]): Future[Done] = {
if (!ir.hasVersions) {
store.store(id, v)
} else {
async {
await(store.store(id, v))
val version = ir.version(v)
updateCachedVersions(id, version, Some(v))
Done
}
}
}
override def store[Id, V](id: Id, v: V, version: OffsetDateTime)(implicit
ir: IdResolver[Id, V, Category, K],
m: Marshaller[V, Serialized]
): Future[Done] = {
async {
await(store.store(id, v, version))
updateCachedVersions(id, version, Some(v))
Done
}
}
override def deleteCurrent[Id, V](k: Id)(implicit ir: IdResolver[Id, V, Category, K]): Future[Done] =
deleteCurrentOrAll(k, () => store.deleteCurrent(k))
override def deleteAll[Id, V](k: Id)(implicit ir: IdResolver[Id, V, Category, K]): Future[Done] =
deleteCurrentOrAll(k, () => store.deleteAll(k))
override def deleteVersion[Id, V](k: Id, version: OffsetDateTime)(implicit ir: IdResolver[Id, V, Category, K]): Future[Done] =
deleteCurrentOrAll(k, () => store.deleteVersion(k, version))
override def versions[Id, V](id: Id)(implicit ir: IdResolver[Id, V, Category, K]): Source[OffsetDateTime, NotUsed] = {
val versionsFuture = {
val category = ir.category
val storageId = ir.toStorageId(id, None)
if (versionCache.contains((category, storageId))) {
Future.successful(versionCache((category, storageId)))
} else {
async {
val children = await(store.versions(id).runWith(EnrichedSink.set))
versionCache((category, storageId)) = children
children
}
}
}
Source.fromFuture(versionsFuture).mapConcat(identity)
}
override def ids[Id, V]()(implicit ir: IdResolver[Id, V, Category, K]): Source[Id, NotUsed] = store.ids()
override def storageVersion(): Future[Option[StorageVersion]] = store.storageVersion()
override def setStorageVersion(storageVersion: StorageVersion): Future[Done] =
store.setStorageVersion(storageVersion)
override def backup(): Source[BackupItem, NotUsed] = store.backup()
override def restore(): Sink[BackupItem, Future[Done]] = store.restore()
override def sync(): Future[Done] = store.sync()
override def startMigration(): Future[Done] = store.startMigration()
override def endMigration(): Future[Done] = {
// Clear caches after a migration, some migrations might act on the store directly
versionCache.clear()
versionedValueCache.clear()
store.endMigration()
}
override def toString: String = s"LazyVersionCachingPersistenceStore($store)"
}
| mesosphere/marathon | src/main/scala/mesosphere/marathon/core/storage/store/impl/cache/LazyCachingPersistenceStore.scala | Scala | apache-2.0 | 14,009 |
//
// ExceptionCompileLogger.scala -- Scala class ExceptionCompileLogger
// Project OrcScala
//
// Created by jthywiss on Jun 8, 2010.
//
// Copyright (c) 2017 The University of Texas at Austin. All rights reserved.
//
// Use and redistribution of this file is governed by the license terms in
// the LICENSE file found in the project's top-level directory and also found at
// URL: http://orc.csres.utexas.edu/license.shtml .
//
package orc.error.compiletime
import orc.ast.AST
import orc.compile.parse.{ OrcInputContext, OrcSourceRange }
import orc.error.compiletime.CompileLogger.Severity
/** A CompileMessageRecorder that throws an exception on a message of
* severity WARNING or higher.
*
* @author jthywiss
*/
class ExceptionCompileLogger() extends CompileLogger {
private var maxSeverity = Severity.UNKNOWN;
def beginProcessing(inputContext: OrcInputContext) {
maxSeverity = Severity.UNKNOWN;
}
def endProcessing(inputContext: OrcInputContext) {
// Nothing needed
}
def beginDependency(inputContext: OrcInputContext) {
// Nothing needed
}
def endDependency(inputContext: OrcInputContext) {
// Nothing needed
}
/* (non-Javadoc)
* @see orc.error.compiletime.CompileLogger#recordMessage(Severity, int, String, Position, AST, Throwable)
*/
def recordMessage(severity: Severity, code: Int, message: String, location: Option[OrcSourceRange], astNode: AST, exception: Throwable) {
maxSeverity = if (severity.ordinal() > maxSeverity.ordinal()) severity else maxSeverity
ExceptionCompileLogger.throwExceptionIfNeeded(Severity.WARNING, severity, message, location, exception)
}
def recordMessage(severity: Severity, code: Int, message: String, location: Option[OrcSourceRange], exception: Throwable) {
recordMessage(severity, code, message, location, null, exception)
}
def recordMessage(severity: Severity, code: Int, message: String, location: Option[OrcSourceRange], astNode: AST) {
recordMessage(severity, code, message, location, astNode, null)
}
def recordMessage(severity: Severity, code: Int, message: String) {
recordMessage(severity, code, message, null, null, null)
}
def getMaxSeverity(): Severity = maxSeverity
}
object ExceptionCompileLogger {
class GenericCompilationException(message: String) extends CompilationException(message)
def throwExceptionIfNeeded(minSeverity: Severity, severity: Severity, message: String, location: Option[OrcSourceRange], exception: Throwable) {
if (severity.ordinal() >= minSeverity.ordinal()) {
if (exception != null) {
throw exception;
} else {
// We don't have an exception to throw -- use our "fake" one
val e = new GenericCompilationException(message)
location foreach { l =>
e.setPosition(l)
}
throw e
}
} // else disregard
}
}
| orc-lang/orc | OrcScala/src/orc/error/compiletime/ExceptionCompileLogger.scala | Scala | bsd-3-clause | 2,876 |
/*
* DigiSSHD - DigiControl component for Android Platform
* Copyright (c) 2012, Alexey Aksenov ezh@ezh.msk.ru. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 3 or any later
* version, as published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 3 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 3 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
*/
package org.digimead.digi.ctrl.sshd.session
import scala.actors.Futures.future
import scala.ref.WeakReference
import org.digimead.digi.ctrl.lib.aop.Loggable
import org.digimead.digi.ctrl.lib.block.Block
import org.digimead.digi.ctrl.lib.block.Level
import org.digimead.digi.ctrl.lib.declaration.DIntent
import org.digimead.digi.ctrl.lib.declaration.DOption
import org.digimead.digi.ctrl.lib.declaration.DPreference
import org.digimead.digi.ctrl.lib.log.Logging
import org.digimead.digi.ctrl.lib.util.Android
import org.digimead.digi.ctrl.sshd.Message.dispatcher
import org.digimead.digi.ctrl.sshd.R
import org.digimead.digi.ctrl.sshd.SSHDCommon
import com.commonsware.cwac.merge.MergeAdapter
import android.app.Activity
import android.content.Context
import android.content.Intent
import android.net.Uri
import android.text.Html
import android.view.LayoutInflater
import android.view.MotionEvent
import android.view.View
import android.view.ViewGroup
import android.widget.ArrayAdapter
import android.widget.CheckBox
import android.widget.TextView
class OptionBlock(context: Activity) extends Logging {
private val header = context.getLayoutInflater.inflate(R.layout.header, null).asInstanceOf[TextView]
private val items = Seq(
OptionBlock.Item(DOption.ConfirmConn.tag, DOption.ConfirmConn))
//OptionBlock.Item(DOption.WriteConnLog, DOption.WriteConnLog))
private lazy val adapter = new OptionBlock.Adapter(context, Android.getId(context, "option_list_item_multiple_choice", "layout"), items)
OptionBlock.block = Some(this)
def appendTo(adapter: MergeAdapter) {
header.setText(context.getString(R.string.comm_option_block))
adapter.addView(header)
adapter.addAdapter(this.adapter)
}
@Loggable
def onListItemClick(item: OptionBlock.Item) = item.view.get.foreach {
view =>
val checkbox = view.findViewById(android.R.id.checkbox).asInstanceOf[CheckBox]
val lastState = checkbox.isChecked
context.runOnUiThread(new Runnable { def run = checkbox.setChecked(!lastState) })
onOptionClick(item, lastState)
}
@Loggable
def onOptionClick(item: OptionBlock.Item, lastState: Boolean) = item.option match {
case _ =>
val pref = context.getSharedPreferences(DPreference.Main, Context.MODE_PRIVATE)
val editor = pref.edit()
editor.putBoolean(item.option.tag, !lastState)
editor.commit()
context.sendBroadcast(new Intent(DIntent.UpdateOption, Uri.parse("code://" + context.getPackageName + "/" + item.option)))
SSHDCommon.optionChangedNotify(context, item.option, item.getState(context).toString)
}
}
object OptionBlock extends Logging {
@volatile private var block: Option[OptionBlock] = None
case class Item(val value: String, val option: DOption.OptVal) extends Block.Item {
override def toString() = value
def getState(context: Context): Boolean = {
val pref = context.getSharedPreferences(DPreference.Main, Context.MODE_PRIVATE)
pref.getBoolean(option.tag, option.default.asInstanceOf[Boolean])
}
}
class Adapter(context: Activity, textViewResourceId: Int, data: Seq[Item])
extends ArrayAdapter[Item](context, textViewResourceId, android.R.id.text1, data.toArray) {
private var inflater: LayoutInflater = context.getLayoutInflater
override def getView(position: Int, convertView: View, parent: ViewGroup): View = {
val item = data(position)
item.view.get match {
case None =>
val view = inflater.inflate(textViewResourceId, null)
val text1 = view.findViewById(android.R.id.text1).asInstanceOf[TextView]
val text2 = view.findViewById(android.R.id.text2).asInstanceOf[TextView]
val checkbox = view.findViewById(android.R.id.checkbox).asInstanceOf[CheckBox]
checkbox.setOnTouchListener(new View.OnTouchListener {
def onTouch(v: View, event: MotionEvent): Boolean = {
// don't want check for tap or TOOL_TYPE_
val box = v.asInstanceOf[CheckBox]
val lastState = box.isChecked()
if (event.getAction() == MotionEvent.ACTION_DOWN) {
box.setPressed(true)
box.invalidate()
box.refreshDrawableState()
v.getRootView().postInvalidate()
// apply immediately
future { block.foreach(_.onOptionClick(item, lastState)) }
} else {
box.setChecked(!lastState)
box.setPressed(false)
box.invalidate()
box.refreshDrawableState()
v.getRootView().postInvalidate()
}
true // yes, it is
}
})
checkbox.setFocusable(false)
checkbox.setFocusableInTouchMode(false)
checkbox.setChecked(item.getState(context))
text2.setVisibility(View.VISIBLE)
text1.setText(Html.fromHtml(item.option.name(context)))
text2.setText(Html.fromHtml(item.option.description(context)))
Level.professional(view)
item.view = new WeakReference(view)
view
case Some(view) =>
view
}
}
}
} | ezh/android-component-DigiSSHD | src/main/scala/org/digimead/digi/ctrl/sshd/session/OptionBlock.scala | Scala | gpl-3.0 | 6,138 |
package com.tpl.hamcraft
import cpw.mods.fml.common.FMLLog
import net.minecraftforge.oredict.OreDictionary
import net.minecraft.item.{Item, ItemStack}
import net.minecraft.block.Block
import net.minecraftforge.fluids.FluidStack
object Util {
def registerOreDictionary() {
OreDictionary.registerOre("seedAny", new ItemStack(Item.seeds))
OreDictionary.registerOre("seedAny", new ItemStack(Item.melonSeeds))
OreDictionary.registerOre("seedAny", new ItemStack(Item.pumpkinSeeds))
OreDictionary.registerOre("mushroomAny", new ItemStack(Block.mushroomRed))
OreDictionary.registerOre("mushroomAny", new ItemStack(Block.mushroomBrown))
OreDictionary.registerOre("wheat", new ItemStack(Item.wheat))
OreDictionary.registerOre("meatRaw", new ItemStack(Item.beefRaw))
OreDictionary.registerOre("meatRaw", new ItemStack(Item.porkRaw))
OreDictionary.registerOre("meatRaw", new ItemStack(Item.fishRaw))
OreDictionary.registerOre("foodRootVegetables", new ItemStack(Item.potato))
OreDictionary.registerOre("foodRootVegetables", new ItemStack(Item.carrot))
}
def dumpOreDictionary() {
FMLLog.info("**************************************")
FMLLog.info("Dumping Ore Dictionary ...")
FMLLog.info("")
val ores = OreDictionary.getOreNames
ores.foreach(oreName => {
FMLLog.info("%s:", oreName)
val oreList = OreDictionary.getOres(oreName).toArray
oreList.foreach(_ore => {
val ore = _ore.asInstanceOf[ItemStack]
FMLLog.info(" - %s (%d)", ore.getDisplayName, ore.stackSize.asInstanceOf[AnyRef])
})
FMLLog.info("")
})
FMLLog.info("**************************************")
}
def debug(label: String, fluidStack: FluidStack) {
FMLLog.info("%s: (id: %d) %s (%s) (%d)", label, fluidStack.fluidID.asInstanceOf[AnyRef], fluidStack.getFluid.getName, fluidStack.getFluid.getLocalizedName, fluidStack.amount.asInstanceOf[AnyRef])
}
def debug(label: String, itemStack: ItemStack) {
if(itemStack == null) FMLLog.info("%s: null", label)
else FMLLog.info("%s: %s (%s) (%d) %s", label, itemStack.getUnlocalizedName, itemStack.getDisplayName, itemStack.stackSize.asInstanceOf[AnyRef], itemStack.getTooltip(null, true).toString)
}
def debug(label: String, item: Item) {
FMLLog.info("%s: %s", label, item.getUnlocalizedName)
}
def debug(label: String, int: Int) {
FMLLog.info("%s: %d", label, int.asInstanceOf[AnyRef])
}
def debug(label: String, bool: Boolean) {
FMLLog.info("%s: %s", label, bool.toString)
}
def debug(label: String, string: String) {
FMLLog.info("%s: %s", label, string)
}
def debug(label: String) {
FMLLog.info("%s", label)
}
def debug(label: String, array: Array[_]) {
var string = ""
array.foreach(i => {
string = String.format("%s, %s", string, i.toString)
})
string = String.format("[%s]", string)
FMLLog.info("%s: %s", label, string)
}
}
| piotrb/hamcraft | src/main/scala/com/tpl/hamcraft/Util.scala | Scala | bsd-2-clause | 2,955 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui
import java.net.{BindException, ServerSocket}
import java.net.URI
import javax.servlet.http.HttpServletRequest
import scala.io.Source
import org.eclipse.jetty.servlet.ServletContextHandler
import org.mockito.Mockito.{mock, when}
import org.scalatest.concurrent.Eventually._
import org.scalatest.time.SpanSugar._
import org.apache.spark._
import org.apache.spark.LocalSparkContext._
class UISuite extends SparkFunSuite {
/**
* Create a test SparkContext with the SparkUI enabled.
* It is safe to `get` the SparkUI directly from the SparkContext returned here.
*/
private def newSparkContext(): SparkContext = {
val conf = new SparkConf()
.setMaster("local")
.setAppName("test")
.set("spark.ui.enabled", "true")
val sc = new SparkContext(conf)
assert(sc.ui.isDefined)
sc
}
private def sslDisabledConf(): (SparkConf, SSLOptions) = {
val conf = new SparkConf
(conf, new SecurityManager(conf).getSSLOptions("ui"))
}
private def sslEnabledConf(): (SparkConf, SSLOptions) = {
val keyStoreFilePath = getTestResourcePath("spark.keystore")
val conf = new SparkConf()
.set("spark.ssl.ui.enabled", "true")
.set("spark.ssl.ui.keyStore", keyStoreFilePath)
.set("spark.ssl.ui.keyStorePassword", "123456")
.set("spark.ssl.ui.keyPassword", "123456")
(conf, new SecurityManager(conf).getSSLOptions("ui"))
}
ignore("basic ui visibility") {
withSpark(newSparkContext()) { sc =>
// test if the ui is visible, and all the expected tabs are visible
eventually(timeout(10 seconds), interval(50 milliseconds)) {
val html = Source.fromURL(sc.ui.get.appUIAddress).mkString
assert(!html.contains("random data that should not be present"))
assert(html.toLowerCase.contains("stages"))
assert(html.toLowerCase.contains("storage"))
assert(html.toLowerCase.contains("environment"))
assert(html.toLowerCase.contains("executors"))
}
}
}
ignore("visibility at localhost:4040") {
withSpark(newSparkContext()) { sc =>
// test if visible from http://localhost:4040
eventually(timeout(10 seconds), interval(50 milliseconds)) {
val html = Source.fromURL("http://localhost:4040").mkString
assert(html.toLowerCase.contains("stages"))
}
}
}
test("jetty selects different port under contention") {
var server: ServerSocket = null
var serverInfo1: ServerInfo = null
var serverInfo2: ServerInfo = null
val (conf, sslOptions) = sslDisabledConf()
try {
server = new ServerSocket(0)
val startPort = server.getLocalPort
serverInfo1 = JettyUtils.startJettyServer(
"0.0.0.0", startPort, sslOptions, Seq[ServletContextHandler](), conf)
serverInfo2 = JettyUtils.startJettyServer(
"0.0.0.0", startPort, sslOptions, Seq[ServletContextHandler](), conf)
// Allow some wiggle room in case ports on the machine are under contention
val boundPort1 = serverInfo1.boundPort
val boundPort2 = serverInfo2.boundPort
assert(boundPort1 != startPort)
assert(boundPort2 != startPort)
assert(boundPort1 != boundPort2)
} finally {
stopServer(serverInfo1)
stopServer(serverInfo2)
closeSocket(server)
}
}
test("jetty with https selects different port under contention") {
var server: ServerSocket = null
var serverInfo1: ServerInfo = null
var serverInfo2: ServerInfo = null
try {
server = new ServerSocket(0)
val startPort = server.getLocalPort
val (conf, sslOptions) = sslEnabledConf()
serverInfo1 = JettyUtils.startJettyServer(
"0.0.0.0", startPort, sslOptions, Seq[ServletContextHandler](), conf, "server1")
serverInfo2 = JettyUtils.startJettyServer(
"0.0.0.0", startPort, sslOptions, Seq[ServletContextHandler](), conf, "server2")
// Allow some wiggle room in case ports on the machine are under contention
val boundPort1 = serverInfo1.boundPort
val boundPort2 = serverInfo2.boundPort
assert(boundPort1 != startPort)
assert(boundPort2 != startPort)
assert(boundPort1 != boundPort2)
} finally {
stopServer(serverInfo1)
stopServer(serverInfo2)
closeSocket(server)
}
}
test("jetty binds to port 0 correctly") {
var socket: ServerSocket = null
var serverInfo: ServerInfo = null
val (conf, sslOptions) = sslDisabledConf()
try {
serverInfo = JettyUtils.startJettyServer(
"0.0.0.0", 0, sslOptions, Seq[ServletContextHandler](), conf)
val server = serverInfo.server
val boundPort = serverInfo.boundPort
assert(server.getState === "STARTED")
assert(boundPort != 0)
intercept[BindException] {
socket = new ServerSocket(boundPort)
}
} finally {
stopServer(serverInfo)
closeSocket(socket)
}
}
test("jetty with https binds to port 0 correctly") {
var socket: ServerSocket = null
var serverInfo: ServerInfo = null
try {
val (conf, sslOptions) = sslEnabledConf()
serverInfo = JettyUtils.startJettyServer(
"0.0.0.0", 0, sslOptions, Seq[ServletContextHandler](), conf)
val server = serverInfo.server
val boundPort = serverInfo.boundPort
assert(server.getState === "STARTED")
assert(boundPort != 0)
intercept[BindException] {
socket = new ServerSocket(boundPort)
}
} finally {
stopServer(serverInfo)
closeSocket(socket)
}
}
test("verify appUIAddress contains the scheme") {
withSpark(newSparkContext()) { sc =>
val ui = sc.ui.get
val uiAddress = ui.appUIAddress
val uiHostPort = ui.appUIHostPort
assert(uiAddress.equals("http://" + uiHostPort))
}
}
test("verify appUIAddress contains the port") {
withSpark(newSparkContext()) { sc =>
val ui = sc.ui.get
val splitUIAddress = ui.appUIAddress.split(':')
val boundPort = ui.boundPort
assert(splitUIAddress(2).toInt == boundPort)
}
}
test("verify proxy rewrittenURI") {
val prefix = "/proxy/worker-id"
val target = "http://localhost:8081"
val path = "/proxy/worker-id/json"
var rewrittenURI = JettyUtils.createProxyURI(prefix, target, path, null)
assert(rewrittenURI.toString() === "http://localhost:8081/json")
rewrittenURI = JettyUtils.createProxyURI(prefix, target, path, "test=done")
assert(rewrittenURI.toString() === "http://localhost:8081/json?test=done")
rewrittenURI = JettyUtils.createProxyURI(prefix, target, "/proxy/worker-id", null)
assert(rewrittenURI.toString() === "http://localhost:8081")
rewrittenURI = JettyUtils.createProxyURI(prefix, target, "/proxy/worker-id/test%2F", null)
assert(rewrittenURI.toString() === "http://localhost:8081/test%2F")
rewrittenURI = JettyUtils.createProxyURI(prefix, target, "/proxy/worker-id/%F0%9F%98%84", null)
assert(rewrittenURI.toString() === "http://localhost:8081/%F0%9F%98%84")
rewrittenURI = JettyUtils.createProxyURI(prefix, target, "/proxy/worker-noid/json", null)
assert(rewrittenURI === null)
}
test("verify rewriting location header for reverse proxy") {
val clientRequest = mock(classOf[HttpServletRequest])
var headerValue = "http://localhost:4040/jobs"
val prefix = "/proxy/worker-id"
val targetUri = URI.create("http://localhost:4040")
when(clientRequest.getScheme()).thenReturn("http")
when(clientRequest.getHeader("host")).thenReturn("localhost:8080")
var newHeader = JettyUtils.createProxyLocationHeader(
prefix, headerValue, clientRequest, targetUri)
assert(newHeader.toString() === "http://localhost:8080/proxy/worker-id/jobs")
headerValue = "http://localhost:4041/jobs"
newHeader = JettyUtils.createProxyLocationHeader(
prefix, headerValue, clientRequest, targetUri)
assert(newHeader === null)
}
def stopServer(info: ServerInfo): Unit = {
if (info != null && info.server != null) info.server.stop
}
def closeSocket(socket: ServerSocket): Unit = {
if (socket != null) socket.close
}
}
| Panos-Bletsos/spark-cost-model-optimizer | core/src/test/scala/org/apache/spark/ui/UISuite.scala | Scala | apache-2.0 | 8,951 |
object Virt extends Application {
class Foo {
trait Inner <: { val x : Int = 3 }
}
class Bar extends Foo {
trait Inner <: { val y : Int = x }
}
}
| felixmulder/scala | test/pending/pos/virt.scala | Scala | bsd-3-clause | 165 |
package chat.tox.antox.utils
import java.util.Random
import android.app.Activity
import android.graphics.{Color, Matrix}
import android.util.DisplayMetrics
import android.view.{TextureView, View}
import chat.tox.antox.wrapper.ToxKey
object UiUtils {
val trimedIdLength = 8
//Trims an ID so that it can be displayed to the user
def trimId(id: ToxKey): String = {
id.toString.substring(0, trimedIdLength - 1)
}
def sanitizeAddress(address: String): String = {
//remove start-of-file unicode char and spaces
address.replaceAll("\\uFEFF", "").replace(" ", "")
}
def removeNewlines(str: String): String = {
str.replace("\\n", "").replace("\\r", "")
}
def generateColor(hash: Int): Int = {
val goldenRatio = 0.618033988749895
val hue: Double = (new Random(hash).nextFloat() + goldenRatio) % 1
Color.HSVToColor(Array(hue.asInstanceOf[Float] * 360, 0.5f, 0.7f))
}
def toggleViewVisibility(visibleView: View, goneViews: View*): Unit = {
visibleView.setVisibility(View.VISIBLE)
goneViews.foreach(_.setVisibility(View.GONE))
}
def getScreenWidth(activity: Activity): Int = {
val metrics = new DisplayMetrics()
activity.getWindowManager.getDefaultDisplay.getMetrics(metrics)
metrics.widthPixels
}
def getScreenHeight(activity: Activity): Int = {
val metrics = new DisplayMetrics()
activity.getWindowManager.getDefaultDisplay.getMetrics(metrics)
metrics.heightPixels
}
/**
* Sets the TextureView transform to preserve the aspect ratio of the video.
*/
def adjustAspectRatio(activity: Activity, textureView: TextureView, videoWidth: Int, videoHeight: Int) {
val viewWidth: Int = textureView.getWidth
val viewHeight: Int = textureView.getHeight
val aspectRatio: Double = videoHeight.toDouble / videoWidth
var newWidth: Int = 0
var newHeight: Int = 0
if (viewHeight > (viewWidth * aspectRatio).toInt) {
newWidth = viewWidth
newHeight = (viewWidth * aspectRatio).toInt
}
else {
newWidth = (viewHeight / aspectRatio).toInt
newHeight = viewHeight
}
val xoff: Int = (viewWidth - newWidth) / 2
val yoff: Int = (viewHeight - newHeight) / 2
val txform: Matrix = new Matrix()
activity.runOnUiThread(new Runnable {
override def run(): Unit = {
textureView.getTransform(txform)
txform.setScale(newWidth.toFloat / viewWidth, newHeight.toFloat / viewHeight)
txform.postTranslate(xoff, yoff)
textureView.setTransform(txform)
}
})
}
} | wiiam/Antox | app/src/main/scala/chat/tox/antox/utils/UiUtils.scala | Scala | gpl-3.0 | 2,547 |
package models.daos
import play.api.db.slick.DatabaseConfigProvider
import slick.driver.JdbcProfile
import play.api.db.slick.HasDatabaseConfig
/**
* Trait that contains gimport models.daos.DBTableDefinitions
eneric slick db handling code to be mixed in with DAOs
*/
trait DaoSlick extends DBTableDefinitions with HasDatabaseConfig[JdbcProfile] {
protected val dbConfig = DatabaseConfigProvider.get[JdbcProfile](play.api.Play.current)
import driver.api._
}
| sne11ius/ideen | app/models/daos/DaoSlick.scala | Scala | gpl-3.0 | 464 |
package doodlebot
package model
sealed abstract class Model extends Product with Serializable
object Model {
final case class NotAuthenticated(signup: Signup, login: Login) extends Model
final case class Authenticated(name: String, session: String, chat: view.Chat.Model) extends Model
final case class Signup(email: String, name: String, password: String, errors: Map[String, List[String]] = Map.empty) {
def withErrors(errors: Map[String, List[String]]): Signup =
this.copy(errors = errors)
}
object Signup {
val empty = Signup("","","")
}
final case class Login(name: String, password: String, errors: Map[String, List[String]] = Map.empty) {
def withErrors(errors: Map[String, List[String]]): Login =
this.copy(errors = errors)
}
object Login {
val empty = Login("","")
}
}
| underscoreio/doodlebot | ui/src/main/scala/doodlebot/model/model.scala | Scala | apache-2.0 | 829 |
package com.softwaremill.streams.complete.util
import scala.util.Random
object Timed {
def timed[T](b: => T): (T, Long) = {
val start = System.currentTimeMillis()
val r = b
(r, System.currentTimeMillis() - start)
}
def runTests(tests: List[(String, () => String)], repetitions: Int): Unit = {
val allTests = Random.shuffle(List.fill(repetitions)(tests).flatten)
println("Warmup")
for ((name, body) <- tests) {
val (result, time) = timed { body() }
println(f"$name%-25s $result%-25s ${time/1000.0d}%4.2fs")
}
println("---")
println(s"Running ${allTests.size} tests")
val rawResults = for ((name, body) <- allTests) yield {
val (result, time) = timed { body() }
println(f"$name%-25s $result%-25s ${time/1000.0d}%4.2fs")
name -> time
}
val results: Map[String, (Double, Double)] = rawResults.groupBy(_._1)
.mapValues(_.map(_._2))
.mapValues { times =>
val count = times.size
val mean = times.sum.toDouble / count
val dev = times.map(t => (t - mean) * (t - mean))
val stddev = Math.sqrt(dev.sum / count)
(mean, stddev)
}
println("---")
println("Averages (name, mean, stddev)")
results.toList.sortBy(_._2._1).foreach { case (name, (mean, stddev)) =>
println(f"$name%-25s ${mean/1000.0d}%4.2fs $stddev%4.2fms")
}
}
}
| adamw/streams-pres | src/main/scala/com/softwaremill/streams/complete/util/Timed.scala | Scala | apache-2.0 | 1,374 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.ws
import com.typesafe.config.ConfigFactory
import scala.concurrent.duration._
import scala.util._
/**
* Created with IntelliJ IDEA.
* User: Israel
* Date: 12/09/13
* Time: 16:08
* To change this template use File | Settings | File Templates.
*/
object Settings {
val hostName = java.net.InetAddress.getLocalHost.getHostName
val config = ConfigFactory.load()
// kafka
lazy val kafkaURL = config.getString("kafka.url")
lazy val persistTopicName = config.getString("kafka.persist.topic.name")
lazy val zkServers = config.getString("kafka.zkServers")
// updates tLog
lazy val updatesTLogName = config.getString("updatesTlog.name")
lazy val updatesTLogPartition = try { config.getString("updatesTlog.partition") } catch {
case _: Throwable => "updatesPar_" + hostName
}
// uuids tLog
lazy val uuidsTLogName = config.getString("uuidsTlog.name")
lazy val uuidsTLogPartition = try { config.getString("uuidsTlog.partition") } catch {
case _: Throwable => "uuidsPar_" + hostName
}
// infotons DAO
lazy val irwServiceDaoHostName = config.getString("irwServiceDao.hostName")
lazy val irwServiceDaoClusterName = config.getString("irwServiceDao.clusterName")
lazy val irwServiceDaoKeySpace = config.getString("irwServiceDao.keySpace")
lazy val irwServiceDaoKeySpace2 = config.getString("irwServiceDao.keySpace2")
lazy val irwReadCacheEnabled = config.getBoolean("webservice.irwServiceDao.readCache.enabled")
lazy val pollingInterval = config.getLong("indexer.pollingInterval")
lazy val bucketsSize = config.getInt("indexer.bucketsSize")
// size is in MB
lazy val maxUploadSize = config.getInt("webservice.max.upload.size")
//maximum weight of a single field value
lazy val maxValueWeight: Long = Try(config.getBytes("webservice.max.value.weight")) match {
case Success(n) => n
case Failure(_) => 16384L
}
lazy val cassandraBulkSize: Int = config.getInt("cassandra.bulk.size")
lazy val consumeBulkThreshold: Long = config.getLong("cmwell.ws.consume.bulk.threshold")
lazy val consumeBulkBinarySearchTimeout: FiniteDuration =
config.getDuration("cmwell.ws.consume.bulk.binarySearchTimeout").toMillis.millis
lazy val elasticsearchScrollBufferSize: Int = config.getInt("elasticsearch.scroll.buffer.size")
//in seconds:
lazy val cacheTimeout: Long =
Try(config.getDuration("webservice.cache.timeout", java.util.concurrent.TimeUnit.SECONDS)).getOrElse(7L)
lazy val fieldsNamesCacheTimeout: Duration =
Try(config.getDuration("cmwell.ws.cache.fieldsNamesTimeout")).toOption.fold(2.minutes) { d =>
Duration.fromNanos(d.toNanos)
}
lazy val maxTypesCacheSize: Long = Try(config.getLong("cmwell.ws.cache.types.max-size")).getOrElse(10000L)
lazy val minimumEntryRefreshRateMillis: Long =
Try(config.getDuration("cmwell.ws.cache.types.minimum-refresh-rate")).fold(_ => 30000L, _.toMillis)
lazy val sstreamParallelism: Int = config.getInt("cmwell.ws.sstream-parallelism")
lazy val pushbackpressure: String = Try(config.getString("cmwell.ws.pushbackpressure.trigger")).getOrElse("enable")
lazy val maximumQueueBuildupAllowedUTLog: Long =
Try(config.getLong("cmwell.ws.tlog.updating.limit")).toOption.getOrElse(13200000L)
lazy val maximumQueueBuildupAllowedITLog: Long =
Try(config.getLong("cmwell.ws.tlog.indexing.limit")).toOption.getOrElse(3500000L)
lazy val maximumQueueBuildupAllowed: Long = Try(config.getLong("cmwell.ws.klog.limit")).toOption.getOrElse(496351L)
lazy val ingestPushbackByServer: FiniteDuration =
Try(config.getDuration("cmwell.ws.klog.pushback.time")).toOption.fold(7.seconds) { d =>
Duration.fromNanos(d.toNanos)
}
lazy val bgMonitorAskTimeout: FiniteDuration =
Try(config.getDuration("cmwell.ws.klog.pushback.timeout")).toOption.fold(5.seconds) { d =>
Duration.fromNanos(d.toNanos)
}
// default timeout for ElasticSearch calls
lazy val esTimeout = config.getInt("ftsService.default.timeout").seconds
lazy val gridBindIP = config.getString("cmwell.grid.bindIP")
lazy val gridBindPort = config.getInt("cmwell.grid.bindPort")
lazy val gridSeeds = Set.empty[String] ++ config.getString("cmwell.grid.seeds").split(";")
lazy val clusterName = config.getString("cmwell.clusterName")
lazy val authSystemVersion = config.getInt("auth.system.version")
lazy val expansionLimit = config.getInt("webservice.xg.limit")
lazy val chunkSize = config.getBytes("webservice.max.chunk.size")
lazy val maxOffset = config.getInt("webservice.max-offset")
lazy val maxLength = Try(config.getInt("webservice.max-length")).getOrElse(expansionLimit)
lazy val maxQueryResultsLength = config.getInt("crashableworker.results.maxLength")
lazy val queryResultsTempFileBaseName = config.getString("crashableworker.results.baseFileName")
lazy val subjectsInSpAreHttps = config.getBoolean("crashableworker.subjectsAreHttps")
lazy val dataCenter = config.getString("dataCenter.id")
lazy val maxDataCenters = config.getInt("dataCenter.maxInstances")
lazy val quadsCacheSize = config.getLong("quads.cache.size")
lazy val xFixNumRetries = config.getInt("xfix.num.retries")
lazy val maxSearchResultsForGlobalQuadOperations = config.getInt("quads.globalOperations.results.maxLength")
lazy val initialMetaNsLoadingAmount = config.getInt("ws.meta.ns.initialLoadingAmount")
lazy val esGracfulDegradationTimeout = config.getInt("ws.es.gracfulDegradationTimeout")
lazy val graphReplaceSearchTimeout = config.getInt("webservice.graphreplace.search.timeoutsec")
lazy val graphReplaceMaxStatements = config.getInt("webservice.graphreplace.maxStatements")
lazy val maxDaysToAllowGenerateTokenFor = config.getInt("authorization.token.expiry.maxDays")
lazy val loginPenalty = config.getInt("webservice.login.penaltysec")
lazy val consumeSimpleChunkSize = config.getInt("cmwell.ws.consume.simple-chunk-size")
lazy val consumeExpandableChunkSize = config.getInt("cmwell.ws.consume.expandable-chunk-size")
lazy val requestsPenaltyThreshold = config.getInt("cmwell.ws.trafficshaping.requests-penalty-threshold")
lazy val checkFrequency = config.getInt("cmwell.ws.trafficshaping.check-frequency-sec")
lazy val defaultLimitForHistoryVersions = config.getInt("cmwell.ws.cassandra-driver.history-versions-limit")
lazy val maxRequestTimeSec = config.getInt("cmwell.ws.trafficshaping.max-request-time-sec")
lazy val stressThreshold = config.getLong("cmwell.ws.trafficshaping.stress-threshold")
lazy val thresholdToUseZStore = config.getBytes("cmwell.ws.switch-over-to-zstore.file-size")
lazy val zCacheSecondsTTL = config.getInt("cmwell.ws.zcache.ttlSeconds")
lazy val zCachePollingMaxRetries = config.getInt("cmwell.ws.zcache.pollingMaxRetries")
lazy val zCachePollingIntervalSeconds = config.getInt("cmwell.ws.zcache.pollingIntervalSeconds")
lazy val zCacheL1Size = config.getInt("cmwell.ws.zcache.L1Size")
}
| bryaakov/CM-Well | server/cmwell-ws/app/Settings.scala | Scala | apache-2.0 | 7,553 |
package net.mkowalski.sparkfim.driver
import net.mkowalski.sparkfim.model.MinSupport
import net.mkowalski.sparkfim.runner.BigFimRunner
import net.mkowalski.sparkfim.util.{CliArgsParser, DriverUtil, Logging}
object BigFimDriver extends Logging {
def main(args: Array[String]): Unit = {
if (args.isEmpty) {
printUsageInfo()
return
}
LOG.debug("Parsing program arguments...")
val params = CliArgsParser.parse(args, requiredParams = List(
"inputFile", "bfsStages", "minSup", "outputDir"
))
val inputFilePath = params.get("inputFile").get
val bfsStages = params.get("bfsStages").get.toInt
val minSup = params.get("minSup").get.toInt
val outputDirPath = params.get("outputDir").get
val persistenceManager = DriverUtil.createPersistenceManager(params)
val forcedPartitionsNum = params.get("forcedPartitionsNum").map(_.toInt)
LOG.debug("All arguments provided")
LOG.info("Creating Spark context")
val sc = SparkContextProvider.provideForApp("Spark BigFIM")
LOG.info("Starting BigFIM algorithm")
BigFimRunner(sc, persistenceManager, inputFilePath, bfsStages,
MinSupport(minSup), outputDirPath, forcedPartitionsNum).run()
LOG.info("BigFIM algorithm finished")
sc.stop()
}
private def printUsageInfo() =
println( """BigFIM algorithm implementation (in Scala) for Apache Spark by Michal Kowalski. Required parameters:
| --inputFile <path to input file>
| --bfsStages <number of stages using Apriori-like BFS approach>
| --minSup <min support threshold>
| --outputDir <path to result directory>
|Optional parameters:
| --cacheRemovePolicy auto|normal|aggressive|paranoid
| auto - let Spark handle all unused cached RDDs and broadcast
| normal - force nonblocking unpersist for some unused objects
| aggressive (default) - force nonblocking unpersist for all unused objects
| paranoid - force unpersist for all unused objects
| --serializedStorage serialize objects to cache (default: false)
| --allowDiskStorage allow storing the partitions that don't fit on disk (default: false)
| --forcedPartitionsNum force repartitioning of the original data with specified partitions number
""".stripMargin)
}
| mjkowalski/spark-fim | src/main/scala/net/mkowalski/sparkfim/driver/BigFimDriver.scala | Scala | mit | 2,449 |
package com.arcusys.valamis.web.servlet.base
import javax.servlet.http.HttpServletResponse
import com.arcusys.valamis.util.serialization.JsonHelper
import org.json4s.jackson.Serialization
import org.json4s.{Formats, NoTypeHints}
abstract class BaseApiController extends ServletBase with PermissionSupport {
def jsonAction(a: => Any)(implicit formats: Formats = Serialization.formats(NoTypeHints)): Any = {
val userAgent = request.getHeader("User-Agent")
if (userAgent != null && (userAgent.contains("MSIE 9") || userAgent.contains("MSIE 8"))) //Because IE with versions below 10 doesn't support application/json
response.setHeader("Content-Type", "text/html; charset=UTF-8")
else response.setHeader("Content-Type", "application/json; charset=UTF-8")
val result = a
if (result != null && !result.isInstanceOf[Unit])
JsonHelper.toJson(result)
else
halt(HttpServletResponse.SC_NO_CONTENT)
}
after() {
response.setHeader("Cache-control", "must-revalidate,no-cache,no-store")
response.setHeader("Expires", "-1")
}
}
| arcusys/Valamis | valamis-portlets/src/main/scala/com/arcusys/valamis/web/servlet/base/BaseApiController.scala | Scala | gpl-3.0 | 1,077 |
import org.scalatestplus.play.{BaseOneAppPerTest, PlaySpec}
import play.api.libs.json.Json
import play.api.test.Helpers._
import play.api.test._
class AuthControllerSpec extends PlaySpec with AuthenticatedUser with TestHelpers with BaseOneAppPerTest with AppApplicationFactory {
val badSignupCredentialsList = List(
credentials ++ Json.obj("email" -> "abc"),
credentials ++ Json.obj("username" -> ""),
credentials ++ Json.obj("password" -> ""),
credentials - "username",
credentials - "email",
credentials - "password"
)
val badLoginCredentialsList = List(
credentials ++ Json.obj("email" -> "abc@123.com"),
credentials ++ Json.obj("password" -> "xyz"),
credentials ++ Json.obj("email" -> "abc@123.com", "password" -> "xyz")
)
val invalidStructuredLoginCredentialsList = List(
credentials - "email",
credentials - "password"
)
"POST /signup" should {
"return HTTP 200 ok with valid signup credentials" in signupWithValidCredentials()
"return HTTP 400 bad request with invalid signup credentials" in {
val futureResults = for {
badCredentials <- badSignupCredentialsList
} yield {
val signupRequest = FakeRequest(POST, "/signup").withJsonBody(badCredentials)
route(app, signupRequest).get
}
futureResults.foreach(futureResult => status(futureResult) mustBe BAD_REQUEST)
}
}
"POST /login" should {
"return HTTP 200 ok with valid login credentials" in loginWithValidCredentials()
"return HTTP 400 bad request with invalid structured login credentials" in {
val futureResults = for {
invalidCredentials <- invalidStructuredLoginCredentialsList
} yield {
val loginRequest = FakeRequest(POST, "/login").withJsonBody(invalidCredentials)
route(app, loginRequest).get
}
futureResults.foreach(futureResult => status(futureResult) mustBe BAD_REQUEST)
}
"return HTTP 401 unauthorised with invalid login credentials" in {
val futureResults = for {
badCredentials <- badLoginCredentialsList
} yield {
val loginRequest = FakeRequest(POST, "/login").withJsonBody(badCredentials)
route(app, loginRequest).get
}
futureResults.foreach(futureResult => status(futureResult) mustBe UNAUTHORIZED)
}
}
"GET /user" should {
"return HTTP 200 ok with authorisation cookie" in {
val result = makeSimpleRequest("user", authCookieOption = Some(getAuthCookie), jsonBody = None, GET)
validateResult(result, OK, "username")
}
"return HTTP 401 unauthorised without authorisation cookie" in {
val result = makeSimpleRequest("user", authCookieOption = None, jsonBody = None, GET)
validateResult(result, UNAUTHORIZED, "error")
}
}
}
| stuart-xyz/rate-my-area | test/AuthControllerSpec.scala | Scala | mit | 2,801 |
package org.broadinstitute.dsde.firecloud.integrationtest
import akka.stream.Materializer
import com.typesafe.scalalogging.LazyLogging
import org.broadinstitute.dsde.firecloud.integrationtest.ESIntegrationSupport._
import org.scalatest.BeforeAndAfterAll
import org.scalatest.freespec.AnyFreeSpec
import org.scalatest.matchers.should.Matchers
class TextSearchSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll with LazyLogging with SearchResultValidation {
override def beforeAll = {
// use re-create here, since instantiating the DAO will create it in the first place
searchDAO.recreateIndex()
// make sure we specify refresh=true here; otherwise, the documents may not be available in the index by the
// time the tests start, leading to test failures.
logger.info("indexing fixtures ...")
searchDAO.bulkIndex(IntegrationTestFixtures.fixtureDocs, refresh = true)
logger.info("... fixtures indexed.")
}
override def afterAll = {
searchDAO.deleteIndex()
}
"Library integration" - {
"Elastic Search" - {
"Index exists" in {
assert(searchDAO.indexExists())
}
}
"search for 'brca'" - {
"should find just the two BRCA datasets" in {
val searchResponse = searchFor("brca")
assertResult(2) {searchResponse.total}
validateResultNames(
Set("TCGA_BRCA_ControlledAccess", "TCGA_BRCA_OpenAccess"),
searchResponse
)
}
}
"search for 'tcga_brca'" - {
"should find just the two BRCA datasets" in {
val searchResponse = searchFor("tcga_brca")
assertResult(2) {searchResponse.total}
validateResultNames(
Set("TCGA_BRCA_ControlledAccess", "TCGA_BRCA_OpenAccess"),
searchResponse
)
}
}
"search for 'tcga brca'" - {
"should find just the two BRCA datasets" in {
val searchResponse = searchFor("tcga brca")
assertResult(2) {searchResponse.total}
validateResultNames(
Set("TCGA_BRCA_ControlledAccess", "TCGA_BRCA_OpenAccess"),
searchResponse
)
}
}
"search for 'tcga_brca_openaccess'" - {
"should find just the single BRCA open-access dataset" in {
val searchResponse = searchFor("tcga_brca_openaccess")
assertResult(1) {searchResponse.total}
validateResultNames(
Set("TCGA_BRCA_OpenAccess"),
searchResponse
)
}
}
"search for 'tcga brca openaccess'" - {
"should find all openaccess datasets, plus the BRCA controlled access" in {
// we'll match on 2 of the 3 tokens, so we find "tcga openaccess" as well as "tcga brca" and "brca openaccess"
val searchResponse = searchFor("tcga brca openaccess")
assertResult(13) {searchResponse.total}
val actualNames = getResultField("library:datasetName", searchResponse)
assert(
actualNames.forall(name => name.equals("TCGA_BRCA_ControlledAccess") || name.endsWith("_OpenAccess"))
)
}
}
"search for 'kidney renal papillary cell carcinoma'" - {
"should find four datasets with two types of kidney carcinomas" in {
val searchResponse = searchFor("kidney renal papillary cell carcinoma")
assertResult(4) {searchResponse.total}
validateResultIndications(
Set("Kidney Renal Clear Cell Carcinoma","Kidney Renal Papillary Cell Carcinoma"),
searchResponse
)
}
}
"search for 'testing123'" - {
"should find the single dataset named 'testing123'" in {
val searchResponse = searchFor("testing123")
assertResult(1) {searchResponse.total}
validateResultNames(
Set("testing123"),
searchResponse
)
}
}
}
}
| broadinstitute/firecloud-orchestration | src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/TextSearchSpec.scala | Scala | bsd-3-clause | 3,801 |
package spoiwo.natures.xlsx
import java.util.{Calendar, Date}
import java.time.{LocalDate, LocalDateTime, ZoneId}
import Model2XlsxConversions.{convertCell, _}
import org.apache.poi.ss.usermodel.CellType
import org.apache.poi.xssf.usermodel.{XSSFCell, XSSFWorkbook}
import scala.language.postfixOps
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import spoiwo.model.Height.HeightEnrichment
import spoiwo.model._
class Model2XlsxConversionsForCellSpec extends AnyFlatSpec with Matchers {
private val defaultCell: XSSFCell = convert(Cell.Empty)
private def convert(cell: Cell): XSSFCell = convertCell(Sheet(), Map(), Row(), cell, row)
private def row = new XSSFWorkbook().createSheet().createRow(0)
"Cell conversion" should "return blank cell type with empty string by default" in {
defaultCell.getCellType shouldBe CellType.BLANK
defaultCell.getStringCellValue shouldBe ""
}
it should "return a 0 column index by default if no other cells specified in the row" in {
defaultCell.getColumnIndex shouldBe 0
}
it should "return cell style with 11pt Calibri by default" in {
defaultCell.getCellStyle.getFont.getFontHeightInPoints shouldBe 11
defaultCell.getCellStyle.getFont.getFontName shouldBe "Calibri"
}
it should "return cell style with 14pt Arial when explicitly specified" in {
val cellStyle = CellStyle(font = Font(fontName = "Arial", height = 14 points))
val model = Cell.Empty.withStyle(cellStyle)
val xlsx = convert(model)
xlsx.getCellStyle.getFont.getFontHeightInPoints shouldBe 14
xlsx.getCellStyle.getFont.getFontName shouldBe "Arial"
}
it should "return index of 3 when explicitly specified" in {
val model = Cell.Empty.withIndex(3)
val xlsx = convert(model)
xlsx.getColumnIndex shouldBe 3
}
it should "return index of 2 when row has already 2 other cells" in {
val row = new XSSFWorkbook().createSheet().createRow(0)
row.createCell(0)
row.createCell(1)
val model = Cell.Empty
val xlsx = convertCell(Sheet(), Map(), Row(), model, row)
xlsx.getColumnIndex shouldBe 2
}
it should "return string cell when set up with 'String'" in {
val model = Cell("TEST_STRING")
val xlsx = convert(model)
xlsx.getCellType shouldBe CellType.STRING
xlsx.getStringCellValue shouldBe "TEST_STRING"
}
it should "return string cell when set up with String with newline value" in {
val model = Cell("TEST_STRING\nAnd a 2nd line")
val xlsx = convert(model)
xlsx.getCellType shouldBe CellType.STRING
xlsx.getStringCellValue shouldBe "TEST_STRING\nAnd a 2nd line"
}
it should "return formula cell when set up with string starting with '=' sign" in {
val model = Cell("=1000/3+7")
val xlsx = convert(model)
xlsx.getCellType shouldBe CellType.FORMULA
xlsx.getCellFormula shouldBe "1000/3+7"
}
it should "return numeric cell when set up with double value" in {
val model = Cell(90.45)
val xlsx = convert(model)
xlsx.getCellType shouldBe CellType.NUMERIC
xlsx.getNumericCellValue shouldBe 90.45
}
it should "return numeric cell when set up with big decimal value" in {
val model = Cell(BigDecimal(90.45))
val xlsx = convert(model)
xlsx.getCellType shouldBe CellType.NUMERIC
xlsx.getNumericCellValue shouldBe 90.45
}
it should "return numeric cell when set up with int value" in {
val model = Cell(90)
val xlsx = convert(model)
xlsx.getCellType shouldBe CellType.NUMERIC
xlsx.getNumericCellValue shouldBe 90
}
it should "return numeric cell when set up with long value" in {
val model = Cell(10000000000000L)
val xlsx = convert(model)
xlsx.getCellType shouldBe CellType.NUMERIC
xlsx.getNumericCellValue shouldBe 10000000000000L
}
it should "return boolean cell when set up with boolean value" in {
val model = Cell(true)
val xlsx = convert(model)
xlsx.getCellType shouldBe CellType.BOOLEAN
xlsx.getBooleanCellValue shouldBe true
}
it should "return numeric cell when set up with java.util.Date value" in {
val localDate = LocalDate.of(2011, 11, 13)
val model = Cell(Date.from(localDate.atStartOfDay().atZone(ZoneId.systemDefault()).toInstant))
val xlsx = convert(model)
val date = xlsx.getDateCellValue.toInstant.atZone(ZoneId.systemDefault()).toLocalDate
date.getYear shouldBe 2011
date.getMonthValue shouldBe 11
date.getDayOfMonth shouldBe 13
}
it should "return numeric cell when set up with java.util.Calendar value" in {
val calendar = Calendar.getInstance()
calendar.set(2011, 11, 13)
val model = Cell(calendar)
val xlsx = convert(model)
val date = xlsx.getDateCellValue.toInstant.atZone(ZoneId.systemDefault()).toLocalDate
date.getYear shouldBe 2011
date.getMonthValue shouldBe 12
date.getDayOfMonth shouldBe 13
}
it should "return numeric cell when set up with java.time.LocalDate value" in {
test(LocalDate.of(2011, 6, 13))
test(LocalDate.of(2011, 11, 13))
def test(ld: LocalDate): Unit = {
val model = Cell(ld)
val xlsx = convert(model)
val date = xlsx.getDateCellValue.toInstant.atZone(ZoneId.systemDefault())
date.getYear shouldBe ld.getYear
date.getMonthValue shouldBe ld.getMonthValue
date.getDayOfMonth shouldBe ld.getDayOfMonth
date.getHour shouldBe 0
date.getMinute shouldBe 0
date.getSecond shouldBe 0
}
}
it should "return numeric cell when set up with java.time.LocalDateTime value" in {
test(LocalDateTime.of(2011, 6, 13, 15, 30, 10))
test(LocalDateTime.of(2011, 11, 13, 15, 30, 10))
def test(ldt: LocalDateTime): Unit = {
val model = Cell(ldt)
val xlsx = convert(model)
val date = xlsx.getDateCellValue.toInstant.atZone(ZoneId.systemDefault())
date.getYear shouldBe ldt.getYear
date.getMonthValue shouldBe ldt.getMonthValue
date.getDayOfMonth shouldBe ldt.getDayOfMonth
date.getHour shouldBe ldt.getHour
date.getMinute shouldBe ldt.getMinute
date.getSecond shouldBe ldt.getSecond
}
}
it should "return string cell with the date formatted yyyy-MM-dd if date before 1904" in {
val model = Cell(LocalDate.of(1856, 11, 3))
val xlsx = convert(model)
"1856-11-03" shouldBe xlsx.getStringCellValue
}
it should "apply 14pt Arial cell style for column when set explicitly" in {
val column = Column(index = 0, style = CellStyle(font = Font(fontName = "Arial", height = 14 points)))
val sheet = Sheet(Row(Cell("Test"))).withColumns(column)
val xlsx = sheet.convertAsXlsx()
val cellStyle = xlsx.getSheetAt(0).getRow(0).getCell(0).getCellStyle
cellStyle.getFont.getFontName shouldBe "Arial"
cellStyle.getFont.getFontHeightInPoints shouldBe 14
}
it should "return a string cell with hyperlink when setup with HyperLinkUrl value" in {
val model = Cell(HyperLink("View Item", "https://www.google.com"))
val xlsx = convert(model)
xlsx.getCellType shouldBe CellType.STRING
xlsx.getHyperlink.getAddress shouldBe "https://www.google.com"
}
}
| norbert-radyk/spoiwo | core/src/test/scala/spoiwo/natures/xlsx/Model2XlsxConversionsForCellSpec.scala | Scala | mit | 7,145 |
package uk.gov.dvla.vehicles.presentation.common.clientsidesession
import com.google.inject.Inject
import play.api.mvc.Cookie
import uk.gov.dvla.vehicles.presentation.common.ConfigProperties.{booleanProp, getOptionalProperty, getProperty, intProp}
import uk.gov.dvla.vehicles.presentation.common.utils.helpers.CommonConfig
trait CookieFlags {
def applyToCookie(cookie: Cookie, key: String): Cookie
def applyToCookie(cookie: Cookie): Cookie
}
final class NoCookieFlags extends CookieFlags {
override def applyToCookie(cookie: Cookie, key: String): Cookie = cookie
override def applyToCookie(cookie: Cookie): Cookie = applyToCookie(cookie, key = "")
}
final class CookieFlagsFromConfig @Inject()() extends CookieFlags {
private val cookieMaxAgeSeconds = getProperty[Int]("application.cookieMaxAge")
private val secureCookies = getOptionalProperty[Boolean]("secureCookies").getOrElse(CommonConfig.DEFAULT_SECURE_COOKIES)
override def applyToCookie(cookie: Cookie, key: String = ""): Cookie =
cookie.copy(
secure = secureCookies,
maxAge = Some(cookieMaxAgeSeconds)
)
override def applyToCookie(cookie: Cookie): Cookie = applyToCookie(cookie, key = "")
}
| dvla/vehicles-presentation-common | app/uk/gov/dvla/vehicles/presentation/common/clientsidesession/CookieFlags.scala | Scala | mit | 1,197 |
package ru.reo7sp.wave
object Core extends App {
val waveViewModel = new WaveViewModel
val mainWindow = new MainWindow
mainWindow.setVisible(true)
}
| reo7sp/WaveTest | src/main/scala/ru/reo7sp/wave/Core.scala | Scala | mit | 157 |
/*
* Copyright 2009-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package textapp.server
import ccf.tree.operation.TreeOperation
import textapp.TextDocument
import ccf.session.ChannelId
import java.io.Serializable
import ccf.server.{ShutdownListener, ServerOperationInterceptor}
class TextAppOperationInterceptor(document: TextDocument) extends ServerOperationInterceptor {
override def currentStateFor(channelId: ChannelId): Serializable = {
document
}
override def applyOperation(shutdownListener: ShutdownListener, channelId: ChannelId, op: TreeOperation): Unit = {
document.applyOp(op)
}
}
| akisaarinen/ccf | app/src/main/scala/textapp/server/TextAppOperationInterceptor.scala | Scala | apache-2.0 | 1,166 |
//
// StackTrace.scala -- Scala object and class StackTrace
// Project OrcScala
//
// Copyright (c) 2017 The University of Texas at Austin. All rights reserved.
//
// Use and redistribution of this file is governed by the license terms in
// the LICENSE file found in the project's top-level directory and also found at
// URL: http://orc.csres.utexas.edu/license.shtml .
//
package orc.util
import sun.misc.SharedSecrets
class StackTrace(val frames: Array[StackTraceElement]) extends scala.collection.immutable.IndexedSeq[StackTraceElement] {
def length = frames.length
def apply(i: Int) = frames(i)
override def toString() = frames.mkString("\\n")
}
object StackTrace {
def getStackTrace(skip: Int = 0, n: Int = Int.MaxValue) = {
val e = new Exception();
val depth = Math.min(n, SharedSecrets.getJavaLangAccess().getStackTraceDepth(e));
val result = new Array[StackTraceElement](depth)
val offset = 1 + skip
for (frame <- 0 until depth) {
result(frame) = SharedSecrets.getJavaLangAccess().getStackTraceElement(e, frame + offset);
}
new StackTrace(result)
}
}
| orc-lang/orc | OrcScala/src/orc/util/StackTrace.scala | Scala | bsd-3-clause | 1,110 |
package breeze.stats.distributions
/*
Copyright 2009 David Hall, Daniel Ramage
Licensed under the Apache License, Version 2.0 (the "License")
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import scala.collection.mutable.ArrayBuffer
import collection.TraversableLike
import collection.generic.CanBuildFrom
import breeze.linalg.DenseVector
import org.apache.commons.math3.random.{MersenneTwister, RandomGenerator}
/**
* A trait for monadic distributions. Provides support for use in for-comprehensions
* @author dlwh
*/
trait Rand[@specialized(Int, Double) +T] { outer =>
/**
* Gets one sample from the distribution. Equivalent to sample()
*/
def draw() : T
def get() = draw()
/** Overridden by filter/map/flatmap for monadic invocations. Basically, rejeciton samplers will return None here */
def drawOpt():Option[T] = Some(draw())
/**
* Gets one sample from the distribution. Equivalent to get()
*/
def sample() = get()
/**
* Gets n samples from the distribution.
*/
def sample(n : Int) = IndexedSeq.fill(n)(draw())
/**
* An infinitely long iterator that samples repeatedly from the Rand
* @return an iterator that repeatedly samples
*/
def samples:Iterator[T] = new Iterator[T] {
def hasNext = true
def next() = get()
}
/**
* Converts a random sampler of one type to a random sampler of another type.
* Examples:
* randInt(10).flatMap(x => randInt(3 * x.asInstanceOf[Int]) gives a Rand[Int] in the range [0,30]
* Equivalently, for(x <- randInt(10); y <- randInt(30 *x)) yield y
*
* @param f the transform to apply to the sampled value.
*
*/
def flatMap[E](f : T => Rand[E] ):Rand[E] = new Rand[E] {
def draw() = Iterator.continually(f(outer.draw()).drawOpt()).find(_.nonEmpty).get.get
override def drawOpt() = outer.drawOpt().flatMap(t => f(t).drawOpt())
}
/**
* Converts a random sampler of one type to a random sampler of another type.
* Examples:
* uniform.map(_*2) gives a Rand[Double] in the range [0,2]
* Equivalently, for(x <- uniform) yield 2*x
*
* @param f the transform to apply to the sampled value.
*
*/
def map[E](f : T=>E):Rand[E] = {
new Rand[E] {
def draw() = f(outer.get())
override def drawOpt() = outer.drawOpt().map(f)
}
}
/**
* Samples one element and qpplies the provided function to it.
* Despite the name, the function is applied once. Sample usage:
* <pre> for(x <- Rand.uniform) { println(x) } </pre>
*
* @param f the function to be applied
*/
def foreach(f : T=>Unit) = f(get())
def filter(p: T=>Boolean) = condition(p)
def withFilter(p: T=>Boolean) = condition(p)
// Not the most efficient implementation ever, but meh.
def condition(p : T => Boolean):Rand[T] = new Rand[T] {
def draw() = {
var x = outer.get()
while(!p(x)) {
x = outer.get()
}
x
}
override def drawOpt() = {
Some(outer.get()).filter(p)
}
}
}
/**
* Provides standard combinators and such to use
* to compose new Rands.
*/
class RandBasis(val generator: RandomGenerator) {
/**
* Chooses an element from a collection.
*/
def choose[T](c: Iterable[T]):Rand[T] = new Rand[T] {
def draw() = {
val sz = uniform.get * c.size
val elems = c.iterator
var i = 1
var e = elems.next()
while(i < sz) {
e = elems.next()
i += 1
}
e
}
}
def choose[T](c : Seq[T]) = Rand.randInt(c.size).map( c(_))
/**
* The trivial random generator: always returns the argument
*/
def always[T](t : T):Rand[T] = new Rand[T] {
def draw = t
}
/**
* Simply reevaluate the body every time get is called
*/
def fromBody[T](f : =>T):Rand[T] = new Rand[T] {
def draw = f
}
/**
* Convert a Collection of Rand[T] into a Rand[Collection[T]]
*/
def promote[T, CC[X] <: Traversable[X] with TraversableLike[X, CC[X]]]
(col : CC[Rand[T]])(implicit cbf: CanBuildFrom[CC[Rand[T]], T, CC[T]]):Rand[CC[T]] = fromBody(col.map(_.get))
/**
* Convert an Seq of Rand[T] into a Rand[Seq[T]]
*/
def promote[U](col : Seq[Rand[U]]) = fromBody(col.map(_.get))
def promote[T1,T2](t : (Rand[T1],Rand[T2])) = fromBody( (t._1.get,t._2.get))
def promote[T1,T2,T3](t : (Rand[T1],Rand[T2],Rand[T3])) = fromBody( (t._1.get,t._2.get,t._3.get))
def promote[T1,T2,T3,T4](t : (Rand[T1],Rand[T2],Rand[T3],Rand[T4])) =
fromBody( (t._1.get,t._2.get,t._3.get,t._4.get))
/**
* Uniformly samples in [0,1]
*/
val uniform:Rand[Double] = new Rand[Double] {
def draw = generator.nextDouble
}
/**
* Uniformly samples an integer in [0,MAX_INT]
*/
val randInt:Rand[Int] = new Rand[Int] {
def draw = generator.nextInt
}
/**
* Uniformly samples an integer in [0,n)
*/
def randInt(n : Int):Rand[Int] = new Rand[Int] {
def draw = generator.nextInt(n)
}
/**
* Uniformly samples an integer in [n,m)
*/
def randInt(n : Int, m: Int):Rand[Int] = new Rand[Int] {
def draw = generator.nextInt(m-n)+n
}
/**
* Samples a gaussian with 0 mean and 1 std
*/
val gaussian :Rand[Double] = new Rand[Double] {
def draw = generator.nextGaussian
}
/**
* Samples a gaussian with m mean and s std
*/
def gaussian(m : Double, s : Double): Rand[Double] = new Rand[Double] {
def draw = m + s * gaussian.get
}
/**
* Implements the Knuth shuffle of numbers from 0 to n.
*/
def permutation(n : Int):Rand[IndexedSeq[Int]] = new Rand[IndexedSeq[Int]] {
def draw = {
val arr = new ArrayBuffer[Int]()
arr ++= (0 until n)
var i = n
while(i > 1) {
val k = generator.nextInt(i)
i -= 1
val tmp = arr(i)
arr(i) = arr(k)
arr(k) = tmp
}
arr
}
}
/**
* Knuth shuffle of a subset of size n from a set
*/
def subsetsOfSize[T](set: IndexedSeq[T], n: Int):Rand[IndexedSeq[T]] = new Rand[IndexedSeq[T]] {
def draw = {
val arr = Array.range(0,set.size)
var i = 0
while( i < n.min(set.size)) {
val k = generator.nextInt(set.size-i) + i
val temp = arr(i)
arr(i) = arr(k)
arr(k) = temp
i+=1
}
arr.take(n).map(set)
}
}
}
/**
* Provides a number of random generators.
*/
object Rand extends RandBasis(new ThreadLocalRandomGenerator(new MersenneTwister()))
| ktakagaki/breeze | src/main/scala/breeze/stats/distributions/Rand.scala | Scala | apache-2.0 | 6,868 |
package stainless
package frontends.dotc
import dotty.tools.dotc
import dotc._
import core._
import dotc.util._
import Contexts.{Context => DottyContext}
import plugins._
import Phases._
import transform._
import reporting._
import inox.{Context, DebugSection, utils => InoxPosition}
import stainless.frontend
import stainless.frontend.{CallBack, Frontend}
object StainlessPlugin {
val PluginName = "stainless"
val PluginDescription = "Inject Stainless verification pipeline"
val EnableVerificationOptionName = "verify:"
val EnableGhostEliminationOptionName = "ghost-elim:"
}
case class PluginOptions(enableVerification: Boolean, enableGhostElimination: Boolean)
class StainlessPlugin extends StandardPlugin {
import StainlessPlugin._
override val name: String = PluginName
override val description: String = PluginDescription
def init(options: List[String]): List[PluginPhase] = {
val pluginOpts = parseOptions(options)
List(
if (pluginOpts.enableVerification)
Some(new ExtractionAndVerification)
else None,
if (pluginOpts.enableGhostElimination)
Some(new GhostAccessRewriter)
else None
).flatten
}
private def parseOptions(options: List[String]): PluginOptions = {
var enableVerification = false
var enableGhostElimination = false
for (option <- options) {
if (option.startsWith(EnableVerificationOptionName)) {
val value = option.substring(EnableVerificationOptionName.length)
parseBoolean(value) foreach { value =>
enableVerification = value
}
}
else if (option.startsWith(EnableGhostEliminationOptionName)) {
val value = option.substring(EnableGhostEliminationOptionName.length)
parseBoolean(value) foreach { value =>
enableGhostElimination = value
}
}
}
PluginOptions(enableVerification = enableVerification, enableGhostElimination = enableGhostElimination)
}
private def parseBoolean(str: String): Option[Boolean] =
str match {
case "false" | "no" => Some(false)
case "true" | "yes" => Some(true)
case _ => None
}
private class ExtractionAndVerification extends PluginPhase {
override val phaseName = "stainless"
override val runsAfter = Set(Pickler.name)
override val runsBefore = Set(FirstTransform.name)
private var extraction: Option[StainlessExtraction] = None
private var callback: Option[CallBack] = None
// This method id called for every compilation unit, and in the same thread.
// It is called within super.runOn.
override def run(using DottyContext): Unit =
extraction.get.extractUnit.foreach(extracted =>
callback.get(extracted.file, extracted.unit, extracted.classes, extracted.functions, extracted.typeDefs))
override def runOn(units: List[CompilationUnit])(using dottyCtx: DottyContext): List[CompilationUnit] = {
val mainHelper = new stainless.MainHelpers {
override val factory = new frontend.FrontendFactory{
override def apply(ctx: Context, compilerArgs: Seq[String], callback: CallBack): Frontend =
sys.error("stainless.MainHelpers#factory should never be called from the dotty plugin")
override protected val libraryPaths: Seq[String] = Seq.empty
}
}
val inoxCtx = {
val base = mainHelper.getConfigContext(inox.Options.empty)(using new stainless.PlainTextReporter(Set.empty))
val adapter = new ReporterAdapter(base.reporter.debugSections)
inox.Context(
reporter = adapter,
interruptManager = new inox.utils.InterruptManager(adapter),
options = base.options,
timers = base.timers,
)
}
val cb = stainless.frontend.getCallBack(using inoxCtx)
// Not pretty at all... Oh well...
callback = Some(cb)
extraction = Some(new StainlessExtraction(inoxCtx))
cb.beginExtractions()
val unitRes = super.runOn(units)
cb.endExtractions()
cb.join()
val report = cb.getReport
report foreach { report =>
report.emit(inoxCtx)
}
unitRes
}
}
class ReporterAdapter(debugSections: Set[DebugSection])(using dottyCtx: DottyContext) extends inox.PlainTextReporter(debugSections) {
import dotty.tools.io._
import Diagnostic._
import Message._
private def toSourceFile(file: java.io.File): SourceFile =
SourceFile(AbstractFile.getFile(file.getPath), scala.io.Codec.UTF8)
private def toDottyPos(pos: InoxPosition.Position): SourcePosition = pos match {
case InoxPosition.NoPosition =>
NoSourcePosition
case InoxPosition.OffsetPosition(_, _, point, file) =>
SourcePosition(toSourceFile(file), Spans.Span(point, point, point))
case InoxPosition.RangePosition(_, _, pointFrom, _, _, pointTo, file) =>
SourcePosition(toSourceFile(file), Spans.Span(pointFrom, pointFrom, pointTo))
}
override def emit(message: Message): Unit = {
val pos = toDottyPos(message.position)
message.msg match {
case msg: ReportMessage =>
msg.emit(this)
case msg: String =>
message.severity match {
case INFO => dottyCtx.reporter.report(Info(msg, pos))
case WARNING => dottyCtx.reporter.report(Warning(msg, pos))
case ERROR | FATAL | INTERNAL => dottyCtx.reporter.report(Diagnostic.Error(msg, pos))
case _ => dottyCtx.reporter.report(Info(msg, pos)) // DEBUG messages are at reported at INFO level
}
case _ => ()
}
}
}
}
| epfl-lara/stainless | frontends/dotty/src/main/scala/stainless/frontends/dotc/StainlessPlugin.scala | Scala | apache-2.0 | 5,742 |
import org.specs2.mutable._
import java.io.{File}
import scalaxb.compiler.Module
import scalaxb.compiler.xsd.{Driver}
import org.specs2.matcher
trait TestBase extends Specification with CompilerMatcher with matcher.FileMatchers {
val module: Module = new Driver // with Verbose
val tmp = new File("tmp")
if (tmp.exists) deleteAll(tmp)
tmp.mkdirs() // you need this for copyFileFromResource
}
| Fayho/scalaxb | integration/src/test/scala/TestBase.scala | Scala | mit | 401 |
package io.swagger.client.model
case class CommonResponse (
/* Status code */
status: Integer,
/* Message */
message: String,
success: Boolean)
| QuantiModo/QuantiModo-SDK-Scala | src/main/scala/io/swagger/client/model/CommonResponse.scala | Scala | gpl-2.0 | 161 |
/***********************************************************************
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.utils.geotools
import org.geotools.data.{DelegatingFeatureReader, FeatureReader, ReTypeFeatureReader}
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
/** A [[DelegatingFeatureReader]] that re-types simple features. Unlike [[ReTypeFeatureReader]] this
* feature reader will preserve user data.
*
* @param delegate the delegate reader
* @param featureType the projected type
*/
class TypeUpdatingFeatureReader(delegate: FeatureReader[SimpleFeatureType, SimpleFeature],
featureType: SimpleFeatureType)
extends DelegatingFeatureReader[SimpleFeatureType, SimpleFeature] {
override val getDelegate: FeatureReader[SimpleFeatureType, SimpleFeature] = delegate
override def next(): SimpleFeature = FeatureUtils.retype(delegate.next(), featureType)
override def hasNext: Boolean = delegate.hasNext
override def getFeatureType: SimpleFeatureType = featureType
override def close(): Unit = delegate.close()
} | drackaer/geomesa | geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/geotools/TypeUpdatingFeatureReader.scala | Scala | apache-2.0 | 1,470 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package logic
import cmwell.domain._
import com.typesafe.scalalogging.LazyLogging
/**
* Created by gilad on 6/18/14.
*/
object InfotonValidator extends LazyLogging {
def isInfotonNameValid(path: String): Boolean = {
val noSlash = if(path.startsWith("/")) path.dropWhile(_ == '/') else path
(!(noSlash.matches("_(.)*|(ii|zz|proc)(/(.)*)?"))) match {
case true => true
case false => logger.warn(s"validation failed for infoton path: $path"); false
}
}
type Fields[K] = Map[K,Set[FieldValue]]
def validateValueSize[K](fields: Fields[K]): Unit =
if(fields.exists{case (_,s) => s.exists(_.size > cmwell.ws.Settings.maxValueWeight)})
throw new IllegalArgumentException("uploaded infoton, contains a value heavier than 16K.")
}
| nruppin/CM-Well | server/cmwell-ws/app/logic/InfotonValidator.scala | Scala | apache-2.0 | 1,385 |
/**
* Copyright 2012-2014 Jorge Aliss (jaliss at gmail dot com) - twitter: @jaliss
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package securesocial.core.providers
import org.joda.time.DateTime
import play.api.Play.current
import play.api.data.Form
import play.api.data.Forms._
import play.api.mvc._
import securesocial.controllers.ViewTemplates
import securesocial.core.AuthenticationResult.{ Authenticated, NavigationFlow }
import securesocial.core._
import securesocial.core.providers.utils.PasswordHasher
import securesocial.core.services.{ AvatarService, UserService }
import scala.concurrent.{ ExecutionContext, Future }
/**
* A username password provider
*/
class UsernamePasswordProvider[U](userService: UserService[U],
avatarService: Option[AvatarService],
viewTemplates: ViewTemplates,
passwordHashers: Map[String, PasswordHasher])(implicit val executionContext: ExecutionContext)
extends IdentityProvider with ApiSupport with Controller {
override val id = UsernamePasswordProvider.UsernamePassword
def authMethod = AuthenticationMethod.UserPassword
val InvalidCredentials = "securesocial.login.invalidCredentials"
def authenticateForApi(implicit request: Request[AnyContent]): Future[AuthenticationResult] = {
doAuthentication(apiMode = true)
}
def authenticate()(implicit request: Request[AnyContent]): Future[AuthenticationResult] = {
doAuthentication()
}
private def profileForCredentials(userId: String, password: String): Future[Option[BasicProfile]] = {
userService.find(id, userId).map { maybeUser =>
for (
user <- maybeUser;
pinfo <- user.passwordInfo;
hasher <- passwordHashers.get(pinfo.hasher) if hasher.matches(pinfo, password)
) yield {
user
}
}
}
protected def authenticationFailedResult[A](apiMode: Boolean)(implicit request: Request[A]) = Future.successful {
if (apiMode)
AuthenticationResult.Failed("Invalid credentials")
else
NavigationFlow(badRequest(UsernamePasswordProvider.loginForm, Some(InvalidCredentials)))
}
protected def withUpdatedAvatar(profile: BasicProfile): Future[BasicProfile] = {
(avatarService, profile.email) match {
case (Some(service), Some(e)) => service.urlFor(e).map {
case url if url != profile.avatarUrl => profile.copy(avatarUrl = url)
case _ => profile
}
case _ => Future.successful(profile)
}
}
private def doAuthentication[A](apiMode: Boolean = false)(implicit request: Request[A]): Future[AuthenticationResult] = {
val form = UsernamePasswordProvider.loginForm.bindFromRequest()
form.fold(
errors => Future.successful {
if (apiMode)
AuthenticationResult.Failed("Invalid credentials")
else
AuthenticationResult.NavigationFlow(badRequest(errors)(request))
},
credentials => {
val userId = credentials._1.toLowerCase
val password = credentials._2
profileForCredentials(userId, password).flatMap {
case Some(profile) => withUpdatedAvatar(profile).map(Authenticated)
case None => authenticationFailedResult(apiMode)
}
})
}
private def badRequest[A](f: Form[(String, String)], msg: Option[String] = None)(implicit request: Request[A]): Result = {
Results.BadRequest(viewTemplates.getLoginPage(f, msg))
}
}
object UsernamePasswordProvider {
val UsernamePassword = "userpass"
private val Key = "securesocial.userpass.withUserNameSupport"
private val SendWelcomeEmailKey = "securesocial.userpass.sendWelcomeEmail"
private val Hasher = "securesocial.userpass.hasher"
private val EnableTokenJob = "securesocial.userpass.enableTokenJob"
private val SignupSkipLogin = "securesocial.userpass.signupSkipLogin"
val loginForm = Form(
tuple(
"username" -> nonEmptyText,
"password" -> nonEmptyText
)
)
lazy val withUserNameSupport = current.configuration.getBoolean(Key).getOrElse(false)
lazy val sendWelcomeEmail = current.configuration.getBoolean(SendWelcomeEmailKey).getOrElse(true)
lazy val hasher = current.configuration.getString(Hasher).getOrElse(PasswordHasher.id)
lazy val enableTokenJob = current.configuration.getBoolean(EnableTokenJob).getOrElse(true)
lazy val signupSkipLogin = current.configuration.getBoolean(SignupSkipLogin).getOrElse(false)
}
/**
* A token used for reset password and sign up operations
*
* @param uuid the token id
* @param email the user email
* @param creationTime the creation time
* @param expirationTime the expiration time
* @param isSignUp a boolean indicating wether the token was created for a sign up action or not
*/
case class MailToken(uuid: String, email: String, creationTime: DateTime, expirationTime: DateTime, isSignUp: Boolean) {
def isExpired = expirationTime.isBeforeNow
}
| mojo22jojo/securesocial-test | module-code/app/securesocial/core/providers/UsernamePasswordProvider.scala | Scala | apache-2.0 | 5,365 |
package domain.user
case class User(val id: Option[Long],
val email: String)
| ELAPAKAHARI/satellizer | examples/server/scala/app/domain/user/User.scala | Scala | mit | 94 |
package org.openurp.edu.eams.teach.grade.lesson.web.action
import org.beangle.commons.transfer.TransferListener
import org.beangle.commons.transfer.importer.listener.ImporterForeignerListener
import org.beangle.commons.transfer.importer.listener.ItemImporterListener
import org.openurp.edu.teach.code.GradeType
import org.openurp.edu.eams.teach.grade.course.service.CourseGradeImportListener
import org.openurp.edu.teach.grade.CourseGrade
import org.openurp.edu.eams.teach.lesson.GradeTypeConstants
import org.openurp.edu.eams.web.util.DownloadHelper
import com.opensymphony.xwork2.util.ClassLoaderUtil
class RevokeAction extends AuditAction {
protected override def getEntityName(): String = classOf[CourseGrade].getName
def downloadTemplate(): String = {
val template = get("template")
DownloadHelper.download(getRequest, getResponse, ClassLoaderUtil.getResource(template, this.getClass),
null)
null
}
def revoke(): String = {
val gradeTypeId = getInt("gradeTypeId")
var gradeTypes: Array[GradeType] = null
gradeTypes = if (null == gradeTypeId) baseCodeService.getCodes(classOf[GradeType]).toArray().asInstanceOf[Array[GradeType]] else if (settings.getSetting(getProject).getFinalCandinateTypes
.contains(new GradeType(gradeTypeId))) entityDao.get(classOf[GradeType], Array(gradeTypeId, GradeTypeConstants.FINAL_ID))
.toArray(Array.ofDim[GradeType](2)) else Array(baseCodeService.getCode(classOf[GradeType], gradeTypeId))
courseGradeService.publish(get("lessonIds"), gradeTypes, false)
redirect("search", "取消发布成功", "status=" + get("status"))
}
protected override def getImporterListeners(): List[_ <: TransferListener] = {
val listeners = new ArrayList[ItemImporterListener]()
listeners.add(new ImporterForeignerListener(entityDao))
listeners.add(new CourseGradeImportListener(entityDao, getProject, calculator))
listeners
}
}
| openurp/edu-eams-webapp | grade/src/main/scala/org/openurp/edu/eams/teach/grade/lesson/web/action/RevokeAction.scala | Scala | gpl-3.0 | 1,932 |
package com.twitter.finagle.mdns
import com.twitter.finagle.{Announcement, Announcer, Addr, Resolver, Name}
import com.twitter.util.{Future, Try, Var}
import java.net.{InetSocketAddress, SocketAddress}
private object Local {
def mkAddr(name: String) = "mdns!" + name + "._finagle._tcp.local."
}
class LocalAnnouncer extends Announcer {
val scheme = "local"
def announce(ia: InetSocketAddress, addr: String): Future[Announcement] =
Announcer.announce(ia, Local.mkAddr(addr))
}
class LocalResolver extends Resolver {
val scheme = "local"
def bind(arg: String): Var[Addr]= {
val Name.Bound(va) = Resolver.eval(Local.mkAddr(arg))
va
}
}
| latur19318/finagle | finagle-mdns/src/main/scala/com/twitter/finagle/mdns/Local.scala | Scala | apache-2.0 | 663 |
package spark.api.java
import java.util.{Map => JMap}
import scala.collection.JavaConversions
import scala.collection.JavaConversions._
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.mapred.InputFormat
import org.apache.hadoop.mapred.JobConf
import org.apache.hadoop.mapreduce.{InputFormat => NewInputFormat}
import spark.{Accumulator, AccumulatorParam, RDD, SparkContext}
import spark.SparkContext.IntAccumulatorParam
import spark.SparkContext.DoubleAccumulatorParam
import spark.broadcast.Broadcast
/**
* A Java-friendly version of [[spark.SparkContext]] that returns [[spark.api.java.JavaRDD]]s and
* works with Java collections instead of Scala ones.
*/
class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWorkaround {
/**
*
* @param master Cluter URL to connect to (e.g. mesos://host:port, spark://host:port, local[4])
*
* added by wenzhiguang
*/
def this(master: String) = this(new SparkContext(master))
/**
* @param master Cluster URL to connect to (e.g. mesos://host:port, spark://host:port, local[4]).
* @param jobName A name for your job, to display on the cluster web UI
*/
def this(master: String, jobName: String) = this(new SparkContext(master, jobName))
/**
* @param master Cluster URL to connect to (e.g. mesos://host:port, spark://host:port, local[4]).
* @param jobName A name for your job, to display on the cluster web UI
* @param sparkHome The SPARK_HOME directory on the slave nodes
* @param jars Collection of JARs to send to the cluster. These can be paths on the local file
* system or HDFS, HTTP, HTTPS, or FTP URLs.
*/
def this(master: String, jobName: String, sparkHome: String, jarFile: String) =
this(new SparkContext(master, jobName, sparkHome, Seq(jarFile)))
/**
* @param master Cluster URL to connect to (e.g. mesos://host:port, spark://host:port, local[4]).
* @param jobName A name for your job, to display on the cluster web UI
* @param sparkHome The SPARK_HOME directory on the slave nodes
* @param jars Collection of JARs to send to the cluster. These can be paths on the local file
* system or HDFS, HTTP, HTTPS, or FTP URLs.
*/
def this(master: String, jobName: String, sparkHome: String, jars: Array[String]) =
this(new SparkContext(master, jobName, sparkHome, jars.toSeq))
/**
* @param master Cluster URL to connect to (e.g. mesos://host:port, spark://host:port, local[4]).
* @param jobName A name for your job, to display on the cluster web UI
* @param sparkHome The SPARK_HOME directory on the slave nodes
* @param jars Collection of JARs to send to the cluster. These can be paths on the local file
* system or HDFS, HTTP, HTTPS, or FTP URLs.
* @param environment Environment variables to set on worker nodes
*/
def this(master: String, jobName: String, sparkHome: String, jars: Array[String],
environment: JMap[String, String]) =
this(new SparkContext(master, jobName, sparkHome, jars.toSeq, environment))
private[spark] val env = sc.env
/** Distribute a local Scala collection to form an RDD. */
def parallelize[T](list: java.util.List[T], numSlices: Int): JavaRDD[T] = {
implicit val cm: ClassManifest[T] =
implicitly[ClassManifest[AnyRef]].asInstanceOf[ClassManifest[T]]
sc.parallelize(JavaConversions.asScalaBuffer(list), numSlices)
}
/** Distribute a local Scala collection to form an RDD. */
def parallelize[T](list: java.util.List[T]): JavaRDD[T] =
parallelize(list, sc.defaultParallelism)
/** Distribute a local Scala collection to form an RDD. */
def parallelizePairs[K, V](list: java.util.List[Tuple2[K, V]], numSlices: Int)
: JavaPairRDD[K, V] = {
implicit val kcm: ClassManifest[K] =
implicitly[ClassManifest[AnyRef]].asInstanceOf[ClassManifest[K]]
implicit val vcm: ClassManifest[V] =
implicitly[ClassManifest[AnyRef]].asInstanceOf[ClassManifest[V]]
JavaPairRDD.fromRDD(sc.parallelize(JavaConversions.asScalaBuffer(list), numSlices))
}
/** Distribute a local Scala collection to form an RDD. */
def parallelizePairs[K, V](list: java.util.List[Tuple2[K, V]]): JavaPairRDD[K, V] =
parallelizePairs(list, sc.defaultParallelism)
/** Distribute a local Scala collection to form an RDD. */
def parallelizeDoubles(list: java.util.List[java.lang.Double], numSlices: Int): JavaDoubleRDD =
JavaDoubleRDD.fromRDD(sc.parallelize(JavaConversions.asScalaBuffer(list).map(_.doubleValue()),
numSlices))
/** Distribute a local Scala collection to form an RDD. */
def parallelizeDoubles(list: java.util.List[java.lang.Double]): JavaDoubleRDD =
parallelizeDoubles(list, sc.defaultParallelism)
/**
* Read a text file from HDFS, a local file system (available on all nodes), or any
* Hadoop-supported file system URI, and return it as an RDD of Strings.
*/
def textFile(path: String): JavaRDD[String] = sc.textFile(path)
/**
* Read a text file from HDFS, a local file system (available on all nodes), or any
* Hadoop-supported file system URI, and return it as an RDD of Strings.
*/
def textFile(path: String, minSplits: Int): JavaRDD[String] = sc.textFile(path, minSplits)
/**Get an RDD for a Hadoop SequenceFile with given key and value types. */
def sequenceFile[K, V](path: String,
keyClass: Class[K],
valueClass: Class[V],
minSplits: Int
): JavaPairRDD[K, V] = {
implicit val kcm = ClassManifest.fromClass(keyClass)
implicit val vcm = ClassManifest.fromClass(valueClass)
new JavaPairRDD(sc.sequenceFile(path, keyClass, valueClass, minSplits))
}
/**Get an RDD for a Hadoop SequenceFile. */
def sequenceFile[K, V](path: String, keyClass: Class[K], valueClass: Class[V]):
JavaPairRDD[K, V] = {
implicit val kcm = ClassManifest.fromClass(keyClass)
implicit val vcm = ClassManifest.fromClass(valueClass)
new JavaPairRDD(sc.sequenceFile(path, keyClass, valueClass))
}
/**
* Load an RDD saved as a SequenceFile containing serialized objects, with NullWritable keys and
* BytesWritable values that contain a serialized partition. This is still an experimental storage
* format and may not be supported exactly as is in future Spark releases. It will also be pretty
* slow if you use the default serializer (Java serialization), though the nice thing about it is
* that there's very little effort required to save arbitrary objects.
*/
def objectFile[T](path: String, minSplits: Int): JavaRDD[T] = {
implicit val cm: ClassManifest[T] =
implicitly[ClassManifest[AnyRef]].asInstanceOf[ClassManifest[T]]
sc.objectFile(path, minSplits)(cm)
}
/**
* Load an RDD saved as a SequenceFile containing serialized objects, with NullWritable keys and
* BytesWritable values that contain a serialized partition. This is still an experimental storage
* format and may not be supported exactly as is in future Spark releases. It will also be pretty
* slow if you use the default serializer (Java serialization), though the nice thing about it is
* that there's very little effort required to save arbitrary objects.
*/
def objectFile[T](path: String): JavaRDD[T] = {
implicit val cm: ClassManifest[T] =
implicitly[ClassManifest[AnyRef]].asInstanceOf[ClassManifest[T]]
sc.objectFile(path)(cm)
}
/**
* Get an RDD for a Hadoop-readable dataset from a Hadooop JobConf giving its InputFormat and any
* other necessary info (e.g. file name for a filesystem-based dataset, table name for HyperTable,
* etc).
*/
def hadoopRDD[K, V, F <: InputFormat[K, V]](
conf: JobConf,
inputFormatClass: Class[F],
keyClass: Class[K],
valueClass: Class[V],
minSplits: Int
): JavaPairRDD[K, V] = {
implicit val kcm = ClassManifest.fromClass(keyClass)
implicit val vcm = ClassManifest.fromClass(valueClass)
new JavaPairRDD(sc.hadoopRDD(conf, inputFormatClass, keyClass, valueClass, minSplits))
}
/**
* Get an RDD for a Hadoop-readable dataset from a Hadooop JobConf giving its InputFormat and any
* other necessary info (e.g. file name for a filesystem-based dataset, table name for HyperTable,
* etc).
*/
def hadoopRDD[K, V, F <: InputFormat[K, V]](
conf: JobConf,
inputFormatClass: Class[F],
keyClass: Class[K],
valueClass: Class[V]
): JavaPairRDD[K, V] = {
implicit val kcm = ClassManifest.fromClass(keyClass)
implicit val vcm = ClassManifest.fromClass(valueClass)
new JavaPairRDD(sc.hadoopRDD(conf, inputFormatClass, keyClass, valueClass))
}
/** Get an RDD for a Hadoop file with an arbitrary InputFormat */
def hadoopFile[K, V, F <: InputFormat[K, V]](
path: String,
inputFormatClass: Class[F],
keyClass: Class[K],
valueClass: Class[V],
minSplits: Int
): JavaPairRDD[K, V] = {
implicit val kcm = ClassManifest.fromClass(keyClass)
implicit val vcm = ClassManifest.fromClass(valueClass)
new JavaPairRDD(sc.hadoopFile(path, inputFormatClass, keyClass, valueClass, minSplits))
}
/** Get an RDD for a Hadoop file with an arbitrary InputFormat */
def hadoopFile[K, V, F <: InputFormat[K, V]](
path: String,
inputFormatClass: Class[F],
keyClass: Class[K],
valueClass: Class[V]
): JavaPairRDD[K, V] = {
implicit val kcm = ClassManifest.fromClass(keyClass)
implicit val vcm = ClassManifest.fromClass(valueClass)
new JavaPairRDD(sc.hadoopFile(path,
inputFormatClass, keyClass, valueClass))
}
/**
* Get an RDD for a given Hadoop file with an arbitrary new API InputFormat
* and extra configuration options to pass to the input format.
*/
def newAPIHadoopFile[K, V, F <: NewInputFormat[K, V]](
path: String,
fClass: Class[F],
kClass: Class[K],
vClass: Class[V],
conf: Configuration): JavaPairRDD[K, V] = {
implicit val kcm = ClassManifest.fromClass(kClass)
implicit val vcm = ClassManifest.fromClass(vClass)
new JavaPairRDD(sc.newAPIHadoopFile(path, fClass, kClass, vClass, conf))
}
/**
* Get an RDD for a given Hadoop file with an arbitrary new API InputFormat
* and extra configuration options to pass to the input format.
*/
def newAPIHadoopRDD[K, V, F <: NewInputFormat[K, V]](
conf: Configuration,
fClass: Class[F],
kClass: Class[K],
vClass: Class[V]): JavaPairRDD[K, V] = {
implicit val kcm = ClassManifest.fromClass(kClass)
implicit val vcm = ClassManifest.fromClass(vClass)
new JavaPairRDD(sc.newAPIHadoopRDD(conf, fClass, kClass, vClass))
}
/** Build the union of two or more RDDs. */
override def union[T](first: JavaRDD[T], rest: java.util.List[JavaRDD[T]]): JavaRDD[T] = {
val rdds: Seq[RDD[T]] = (Seq(first) ++ asScalaBuffer(rest)).map(_.rdd)
implicit val cm: ClassManifest[T] = first.classManifest
sc.union(rdds)(cm)
}
/** Build the union of two or more RDDs. */
override def union[K, V](first: JavaPairRDD[K, V], rest: java.util.List[JavaPairRDD[K, V]])
: JavaPairRDD[K, V] = {
val rdds: Seq[RDD[(K, V)]] = (Seq(first) ++ asScalaBuffer(rest)).map(_.rdd)
implicit val cm: ClassManifest[(K, V)] = first.classManifest
implicit val kcm: ClassManifest[K] = first.kManifest
implicit val vcm: ClassManifest[V] = first.vManifest
new JavaPairRDD(sc.union(rdds)(cm))(kcm, vcm)
}
/** Build the union of two or more RDDs. */
override def union(first: JavaDoubleRDD, rest: java.util.List[JavaDoubleRDD]): JavaDoubleRDD = {
val rdds: Seq[RDD[Double]] = (Seq(first) ++ asScalaBuffer(rest)).map(_.srdd)
new JavaDoubleRDD(sc.union(rdds))
}
/**
* Create an [[spark.Accumulator]] integer variable, which tasks can "add" values
* to using the `+=` method. Only the master can access the accumulator's `value`.
*/
def intAccumulator(initialValue: Int): Accumulator[Int] =
sc.accumulator(initialValue)(IntAccumulatorParam)
/**
* Create an [[spark.Accumulator]] double variable, which tasks can "add" values
* to using the `+=` method. Only the master can access the accumulator's `value`.
*/
def doubleAccumulator(initialValue: Double): Accumulator[Double] =
sc.accumulator(initialValue)(DoubleAccumulatorParam)
/**
* Create an [[spark.Accumulator]] variable of a given type, which tasks can "add" values
* to using the `+=` method. Only the master can access the accumulator's `value`.
*/
def accumulator[T](initialValue: T, accumulatorParam: AccumulatorParam[T]): Accumulator[T] =
sc.accumulator(initialValue)(accumulatorParam)
/**
* Broadcast a read-only variable to the cluster, returning a [[spark.Broadcast]] object for
* reading it in distributed functions. The variable will be sent to each cluster only once.
*/
def broadcast[T](value: T): Broadcast[T] = sc.broadcast(value)
/** Shut down the SparkContext. */
def stop() {
sc.stop()
}
/**
* Get Spark's home location from either a value set through the constructor,
* or the spark.home Java property, or the SPARK_HOME environment variable
* (in that order of preference). If neither of these is set, return None.
*/
def getSparkHome(): Option[String] = sc.getSparkHome()
}
object JavaSparkContext {
implicit def fromSparkContext(sc: SparkContext): JavaSparkContext = new JavaSparkContext(sc)
implicit def toSparkContext(jsc: JavaSparkContext): SparkContext = jsc.sc
}
| joeywen/spark_cpp_api | core/src/main/scala/spark/api/java/JavaSparkContext.scala | Scala | bsd-3-clause | 13,435 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.blueeyes.json.serialization
import quasar.blueeyes._, json._
import IsoSerialization._
import Extractor._
import DefaultSerialization._
import shapeless._
import scalaz._, Scalaz._, Validation._, FlatMap._
case class Version(major: Int, minor: Int, micro: Option[Int] = None, classifier: Option[String] = None) {
def isBackwardCompatible(other: Version) = this >= other && this.major == other.major
}
object SVersion {
val VPattern = """(\\d+)\\.(\\d+)(?:\\.(\\d+))?(?:-(.*))?""".r
def unapply(s: String): Option[Version] = s match {
case VPattern(major, minor, micro, classifier) => Some(Version(major.toInt, minor.toInt, Option(micro).map(_.toInt), Option(classifier)))
case _ => None
}
}
object Version {
implicit val tc: Order[Version] with Show[Version] = new Order[Version] with Show[Version] {
import scalaz.Ordering
import scalaz.Ordering._
def order(v1: Version, v2: Version): Ordering = {
import scalaz.syntax.apply._
(v1.major ?|? v2.major) |+|
(v1.minor ?|? v2.minor) |+|
(^(v1.micro, v2.micro) { _ ?|? _ } getOrElse EQ)
}
override def shows(v: Version): String = {
val microS = v.micro.map("." + _).getOrElse("")
val clS = v.classifier.map("-" + _).getOrElse("")
v.major + "." + v.minor + microS + clS
}
}
implicit val serialization: Extractor[Version] with Decomposer[Version] = new Extractor[Version] with Decomposer[Version] {
def decompose(v: Version) = JString(v.shows)
def validated(jvalue: JValue) = jvalue match {
case JString(SVersion(v)) => success(v)
case _ => Failure(Invalid("Version " + jvalue.renderCompact + " + did not match the expected pattern."))
}
}
}
object Versioned {
val defaultVersionProperty = JPath(".schemaVersion")
def extractorV[T] = new MkExtractorV[T]
def decomposerV[T] = new MkDecomposerV[T]
def serializationV[T] = new MkSerializationV[T]
implicit def toToVersion(s: String): ToVersion = new ToVersion(s)
class ToVersion(s: String) {
def v: Version = (s: @unchecked) match { case SVersion(v) => v }
}
implicit def toVersionableExtractor[A](extractor: Extractor[A]): VersionableExtractor[A] = new VersionableExtractor(extractor)
class VersionableExtractor[A](extractor: Extractor[A]) {
def versioned(version: Option[Version], versionField: JPath = defaultVersionProperty): Extractor[A] = new Extractor[A] {
def validated(jv: JValue) = {
import scalaz.syntax.traverse._
version.traverse[Validation[Error, ?], Version] { v =>
jv.validated[Option[Version]]("schemaVersion") flatMap {
case Some(vrec) =>
if (v.isBackwardCompatible(vrec)) success(vrec)
else failure(Invalid(versionField.path + " value " + vrec.shows + " was incompatible with desired version " + v.shows))
case None =>
failure(Invalid(versionField.path + " property missing for value " + jv.renderCompact + "; was expecting " + v.shows))
}
} flatMap { _: Option[Version] =>
extractor.validated(jv)
}
}
}
}
implicit def toVersionableDecomposer[A](decomposer: Decomposer[A]): VersionableDecomposer[A] = new VersionableDecomposer(decomposer)
class VersionableDecomposer[A](decomposer: Decomposer[A]) {
def versioned(version: Option[Version], versionField: JPath = defaultVersionProperty): Decomposer[A] = new Decomposer[A] {
def decompose(a: A): JValue = {
val baseResult = decomposer.decompose(a)
version map { v =>
if (baseResult.isInstanceOf[JObject]) {
baseResult.unsafeInsert(versionField, v.jv)
} else {
sys.error("Cannot version primitive or array values!")
}
} getOrElse {
baseResult
}
}
}
}
class MkDecomposerV[T] {
def apply[F <: HList, L <: HList](fields: F, version: Option[Version], versionProperty: JPath = defaultVersionProperty)(
implicit iso: Generic.Aux[T, L],
decomposer: DecomposerAux[F, L]): Decomposer[T] =
new IsoDecomposer(fields, iso, decomposer).versioned(version, versionProperty)
}
class MkExtractorV[T] {
def apply[F <: HList, L <: HList](fields: F, version: Option[Version], versionProperty: JPath = defaultVersionProperty)(
implicit iso: Generic.Aux[T, L],
extractor: ExtractorAux[F, L]): Extractor[T] =
new IsoExtractor(fields, iso, extractor).versioned(version, versionProperty)
}
class MkSerializationV[T] {
def apply[F <: HList, L <: HList](fields: F, version: Option[Version], versionProperty: JPath = defaultVersionProperty)(
implicit iso: Generic.Aux[T, L],
decomposer: DecomposerAux[F, L],
extractor: ExtractorAux[F, L]): (Decomposer[T], Extractor[T]) =
(new IsoDecomposer(fields, iso, decomposer).versioned(version, versionProperty),
new IsoExtractor(fields, iso, extractor).versioned(version, versionProperty))
}
}
| drostron/quasar | blueeyes/src/main/scala/quasar/blueeyes/json/serialization/Versioned.scala | Scala | apache-2.0 | 5,683 |
package org.jetbrains.plugins.scala.testingSupport.specs2
/**
* @author Roman.Shein
* @since 06.09.2015.
*/
abstract class Specs2PackageTest extends Specs2TestCase {
protected val packageName = "testPackage"
protected val secondPackageName = "otherPackage"
addSourceFile(packageName + "/Test1.scala",
"""
|package testPackage
|
|import org.specs2.mutable.Specification
|
|class Test1 extends Specification {
| "One" should {
| "run" in {
| success
| }
| }
|
| "Two" should {
| "run" in {
| success
| }
| }
|}
""".stripMargin.trim())
addSourceFile(packageName + "/Test2.scala",
"""
|package testPackage
|
|import org.specs2.mutable.Specification
|
|class Test2 extends Specification {
| "One" should {
| "run" in {
| success
| }
| }
|
| "Two" should {
| "run" in {
| success
| }
| }
|}
""".stripMargin.trim())
addSourceFile(secondPackageName + "/Test1.scala",
"""
|package otherPackage
|
|import org.specs2.mutable.Specification
|
|class Test2 extends Specification {
| "Three" should {
| "run" in { success }
| }
|}
""".stripMargin.trim())
def testPackageTestRun(): Unit = {
runTestByConfig(createTestFromPackage(packageName), checkPackageConfigAndSettings(_, packageName),
root => checkResultTreeHasExactNamedPath(root, "[root]", "Test1", "One should", "run") &&
checkResultTreeHasExactNamedPath(root, "[root]", "Test1", "Two should", "run") &&
checkResultTreeHasExactNamedPath(root, "[root]", "Test2", "One should", "run") &&
checkResultTreeHasExactNamedPath(root, "[root]", "Test2", "Two should", "run") &&
checkResultTreeDoesNotHaveNodes(root, "Three should"))
}
def testModuleTestRun(): Unit = {
runTestByConfig(createTestFromModule(testClassName),
checkPackageConfigAndSettings(_, generatedName = "ScalaTests in 'src'"),
root => checkResultTreeHasExactNamedPath(root, "[root]", "Test1", "One should", "run") &&
checkResultTreeHasExactNamedPath(root, "[root]", "Test1", "Two should", "run") &&
checkResultTreeHasExactNamedPath(root, "[root]", "Test2", "One should", "run") &&
checkResultTreeHasExactNamedPath(root, "[root]", "Test2", "Two should", "run") &&
checkResultTreeHasExactNamedPath(root, "[root]", "Test2", "Three should", "run"))
}
}
| ilinum/intellij-scala | test/org/jetbrains/plugins/scala/testingSupport/specs2/Specs2PackageTest.scala | Scala | apache-2.0 | 2,608 |
package org.squeryl.framework
import org.squeryl.test.PrimitiveTypeModeForTests._
import org.squeryl.Session
trait RunTestsInsideTransaction extends DbTestBase {
self: DBConnector =>
override protected def runTest(testName: String,args: org.scalatest.Args): org.scalatest.Status = {
if(isIgnored(testName))
super.runTest(testName, args)
else {
// each test occur from within a transaction, that way when the test completes _all_ changes
// made during the test are reverted so each test gets a clean environment to test against
transaction {
val res = super.runTest(testName, args)
// we abort the transaction if we get to here, so changes get rolled back
Session.currentSession.connection.rollback
return res
}
}
}
}
| xuwei-k/Squeryl | src/test/scala/org/squeryl/framework/RunTestsInsideTransaction.scala | Scala | apache-2.0 | 805 |
package org.yotchang4s.pixiv.http
import java.io._
import java.net._
import scala.collection._
import org.yotchang4s.scala.Loan
import org.yotchang4s.scala.Loan._
import scala.io._
import scala.collection.convert.WrapAsScala._
import java.nio.charset.Charset
import java.io.Closeable
class HttpResponse(con: HttpURLConnection, statusCode: Int,
requestProperties: Map[String, List[String]]) extends Closeable {
var cacheCookies = immutable.Map[String, List[HttpCookie]]()
var in: InputStream = new HttpURLConnectionCloseableInputStream(con,
Option(con.getErrorStream) getOrElse con.getInputStream)
def requestHeaders: Map[String, List[String]] = this.requestProperties
def responseStatusCode: Int = statusCode
def responseHeaders: immutable.Map[String, immutable.List[String]] =
con.getHeaderFields.toMap.map { case (k, v) => (k, v.to[List]) }
def cookies: immutable.Map[String, immutable.List[HttpCookie]] = {
if (cacheCookies.isEmpty) {
cacheCookies = createCookie
}
this.cacheCookies
}
private def createCookie: immutable.Map[String, List[HttpCookie]] = {
val rawCookies = responseHeaders.get("Set-Cookie")
val cookies = rawCookies match {
case Some(cookies) => cookies.flatMap(HttpCookieParser.parse)
case None => Nil
}
cookies.groupBy(_.key)
}
def responseHeaderFirst(key: String): Option[String] =
responseHeader(key) match {
case Nil => None
case x => Some(x.head)
}
def responseHeader(key: String): immutable.List[String] =
Option(con.getHeaderFields.get(key)) match {
case Some(value) => value.to[List]
case None => Nil
}
def asStream: InputStream = in
def asReader: Reader = asReader(Charset.defaultCharset)
def asReader(charsetName: String): Reader = asReader(Charset.forName(charsetName))
def asReader(charset: Charset): Reader = new InputStreamReader(asStream, charset)
def asString: String = asString(Charset.defaultCharset)
def asString(charsetName: String): String = asString(Charset.forName(charsetName))
def asString(charset: Charset): String = {
for {
reader <- Loan(new BufferedReader(asReader(charset)))
} {
val buffer = new Array[Char](4096);
val builder = new StringBuilder
Stream.continually(reader.read(buffer)).takeWhile(-1 !=).foreach {
builder.appendAll(buffer, 0, _)
}
builder.toString;
}
}
def close = in.close
private class HttpURLConnectionCloseableInputStream(conn: HttpURLConnection, in: InputStream)
extends FilterInputStream(in) {
@throws(classOf[IOException])
override def close {
try {
super.close
} finally {
this.conn.disconnect
}
}
}
} | yotchang4s/yapix | src/org/yotchang4s/pixiv/http/HttpResponse.scala | Scala | bsd-3-clause | 2,738 |
package com.codexica.encryption
import play.api.libs.json.Json
import com.codexica.s3crate.filetree.history.snapshotstore.RemoteFileSystemTypes
import com.codexica.common.ByteListFormat
/**
* Represents the data used to encrypt a blob of binary data. Encrypting the blob requires a symmetric key, but that
* is pointless if the symmetric key is stored unencrypted, so it is encrypted using the public key identified by
* encodingKey, then the encrypted version is stored as encodedKey
*
* @param encodedKey The serialized SymmetricKey
* @param encodingKey The id of the public key used to encrypt encodedKey
*
* @author Josh Albrecht (joshalbrecht@gmail.com)
*/
case class EncryptionDetails(
encodedKey: List[Byte],
encodingKey: KeyPairReference
)
object EncryptionDetails {
implicit val keyFormat = new ByteListFormat()
implicit val format = Json.format[EncryptionDetails]
}
| joshalbrecht/s3crate | src/main/scala/com/codexica/encryption/EncryptionDetails.scala | Scala | mit | 899 |
/*
* ============= Ryft-Customized BSD License ============
* Copyright (c) 2015, Ryft Systems, Inc.
* All rights reserved.
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
* 3. All advertising materials mentioning features or use of this software must display the following acknowledgement:
* This product includes software developed by Ryft Systems, Inc.
* 4. Neither the name of Ryft Systems, Inc. nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY RYFT SYSTEMS, INC. ''AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL RYFT SYSTEMS, INC. BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* ============
*/
package com.ryft.spark.connector.util
import com.ryft.spark.connector.exception.RyftSparkException
import com.ryft.spark.connector._
import com.ryft.spark.connector.query._
import com.ryft.spark.connector.query.value.model._
import com.ryft.spark.connector.query.value.{NumericValue, TimeValue, DateValue, PlainTextValue}
import org.apache.spark.Logging
import org.apache.spark.sql.sources._
import org.apache.spark.sql.types._
import scala.annotation.tailrec
private[connector] object FilterConverter extends Logging {
@tailrec
def filtersToRecordQuery(filters: Array[Filter],
schema: StructType,
parameters: Map[String, String] = Map.empty,
acc: RecordQuery = RecordQuery()): RecordQuery = {
if (filters.isEmpty) acc
else filtersToRecordQuery(filters.tail, schema, parameters, acc.and(filterToRecordQuery(filters.head, schema, parameters)))
}
private def filterToRecordQuery(filter: Filter, schema: StructType, parameters: Map[String, String]): RecordQuery =
RecordQuery(toRyftFilter(filter, schema, parameters))
private val recordStr = "RECORD."
// TODO: try to implement it tailrec
// This method is not tail recursive but large depth isn't assumed here
private def toRyftFilter(f: Filter, schema: StructType, parameters: Map[String, String]): filter.Filter = f match {
case Or(left, right) => filter.Or(toRyftFilter(left, schema, parameters), toRyftFilter(right, schema, parameters))
case And(left, right) => filter.And(toRyftFilter(left, schema, parameters), toRyftFilter(right, schema, parameters))
case Not(filter) => not(filter, schema, parameters)
// FIXME: try to reflect 'attribute'
case a@(EqualTo(_, _) |
StringContains(_, _) |
StringStartsWith(_, _) |
StringEndsWith(_, _) |
GreaterThan(_, _) |
GreaterThanOrEqual(_, _) |
LessThan(_, _) |
LessThanOrEqual(_, _)) =>
val fieldType = schema(a.asInstanceOf[ {def attribute: String}].attribute).dataType
fieldType match {
case DateType => toRyftDateFilter(f, parameters.getOrElse("date_format", ""))
case TimestampType => toRyftTimestampFilter(f, parameters.getOrElse("date_format", ""))
case ft: NumericType => toRyftNumericFilter(f, parameters)
case _ => toRyftStringFilter(f)
}
case _ =>
val msg = s"Filter not supported by Ryft: $f"
logWarning(msg)
throw RyftSparkException(msg)
}
private def toRyftStringFilter(f: Filter): filter.Filter = f match {
case EqualTo(attr, v) => filter.EqualTo(recordStr + attr, PlainTextValue(v.toString)) // TODO: check if we should not use String everywhere
case StringContains(attr, v) => filter.Contains(recordStr + attr, PlainTextValue(v))
case StringStartsWith(attr, v) => filter.Contains(recordStr + attr, PlainTextValue(v))
case StringEndsWith(attr, v) => filter.Contains(recordStr + attr, PlainTextValue(v))
case _ =>
val msg = s"Text filter not supported by Ryft: $f"
logWarning(msg)
throw RyftSparkException(msg)
}
private def toRyftDateFilter(f: Filter, format: String): filter.Filter = f match {
case EqualTo(attr, v) =>
ryftDateFilter(attr, _.===, v.toString, format)
case GreaterThan(attr, v) =>
ryftDateFilter(attr, _.>, v.toString, format)
case GreaterThanOrEqual(attr, v) =>
ryftDateFilter(attr, _.>=, v.toString, format)
case LessThan(attr, v) =>
ryftDateFilter(attr, _.<, v.toString, format)
case LessThanOrEqual(attr, v) =>
ryftDateFilter(attr, _.<=, v.toString, format)
case _ =>
val msg = s"Date filter not supported by Ryft: $f"
logWarning(msg)
throw RyftSparkException(msg)
}
private def toRyftTimestampFilter(f: Filter, format: String): filter.Filter = f match {
case EqualTo(attr, v) =>
ryftTimestampFilter(attr, _.===, v.toString, format)
case GreaterThan(attr, v) =>
ryftTimestampFilter(attr, _.>, v.toString, format)
case GreaterThanOrEqual(attr, v) =>
ryftTimestampFilter(attr, _.>=, v.toString, format)
case LessThan(attr, v) =>
ryftTimestampFilter(attr, _.<, v.toString, format)
case LessThanOrEqual(attr, v) =>
ryftTimestampFilter(attr, _.<=, v.toString, format)
case _ =>
val msg = s"Timestamp filter not supported by Ryft: $f"
logWarning(msg)
throw RyftSparkException(msg)
}
private def toRyftNumericFilter(f: Filter, parameters: Map[String, String]): filter.Filter = f match {
case EqualTo(attr, v) =>
ryftNumericFilter(attr, _.===, v.toString, parameters)
case GreaterThan(attr, v) =>
ryftNumericFilter(attr, _.>, v.toString, parameters)
case GreaterThanOrEqual(attr, v) =>
ryftNumericFilter(attr, _.>=, v.toString, parameters)
case LessThan(attr, v) =>
ryftNumericFilter(attr, _.<, v.toString, parameters)
case LessThanOrEqual(attr, v) =>
ryftNumericFilter(attr, _.<=, v.toString, parameters)
case _ =>
val msg = s"Numeric filter not supported by Ryft: $f"
logWarning(msg)
throw RyftSparkException(msg)
}
private def not(f: Filter, schema: StructType, parameters: Map[String, String]): filter.Filter = f match {
case EqualTo(attr, v) =>
val fieldType = schema(attr).dataType
fieldType match {
case DateType =>
ryftDateFilter(attr, _.=/=, v.toString, parameters.getOrElse("date_format", ""))
case TimestampType =>
ryftTimestampFilter(attr, _.=/=, v.toString, parameters.getOrElse("date_format", ""))
case ft: NumericType =>
ryftNumericFilter(attr, _.=/=, v.toString, parameters)
case _ =>
filter.NotEqualTo(recordStr + attr, PlainTextValue(v.toString)) // TODO: check if we should not use String everywhere
}
case StringContains(attr, v) => filter.NotContains(recordStr + attr, PlainTextValue(v))
case StringStartsWith(attr, v) => filter.NotContains(recordStr + attr, PlainTextValue(v))
case StringEndsWith(attr, v) => filter.NotContains(recordStr + attr, PlainTextValue(v))
case _ =>
val msg = s"Filter not supported by Ryft: $f"
logWarning(msg)
throw RyftSparkException(msg)
}
private def ryftDateFilter(attr: String,
operator: Format => Value => ShortParams,
value: String,
format: String) = {
val dateFormat = SparkSqlFormatConverter.dateFormat(format) getOrElse {
val msg = s"Invalid date format $format, use MM/dd/yyyy instead"
logError(msg)
throw RyftSparkException(msg)
}
val ryftValue = SparkSqlFormatConverter.fromSparkSqlDate(value.split(' ')(0), dateFormat)
filter.Contains(recordStr + attr,
DateValue(operator(SparkSqlFormatConverter.toRyftDateFormat(dateFormat))(Date(ryftValue))))
}
private def ryftTimestampFilter(attr: String,
operator: Format => Value => ShortParams,
value: String,
format: String) = {
val timeFormat = SparkSqlFormatConverter.timeFormat(format) getOrElse {
val msg = s"Invalid timestamp format $format, use MM/dd/yyyy HH:MM:ss instead"
logError(msg)
throw RyftSparkException(msg)
}
val ryftValue = SparkSqlFormatConverter.fromSparkSqlTimestamp(value, timeFormat)
filter.And(
ryftDateFilter(attr, operator, value, format),
filter.Contains(recordStr + attr,
TimeValue(operator(SparkSqlFormatConverter.toRyftTimeFormat(timeFormat))(Time(ryftValue))))
)
}
private def ryftNumericFilter(attr: String,
operator: Format => Value => ShortParams,
value: String,
parameters: Map[String, String]) = {
val decimal = parameters.getOrElse("decimal", ".")
val subitizer = parameters.getOrElse("subitizer", "")
filter.Contains(recordStr + attr, NumericValue(operator(NUM)(Number(value)), subitizer, decimal))
}
}
| getryft/spark-ryft-connector | spark-ryft-connector/src/main/scala/com/ryft/spark/connector/util/FilterConverter.scala | Scala | bsd-3-clause | 9,681 |
// Most types are port of Vue.js official type definitions for TypeScript
// https://github.com/vuejs/vue/tree/dev/types
package vuescale
package facade
import scala.scalajs.js
import scala.scalajs.js.|
import scala.scalajs.js.annotation.JSName
import org.scalajs.dom
@js.native
trait VNode extends js.Object {
var tag: String = js.native
var data: VNodeData = js.native
var children: js.Array[VNode] = js.native
var text: String = js.native
var elm: dom.Node = js.native
var ns: String = js.native
var componentOptions: VNodeComponentOptions = js.native
var componentInstance: Vue = js.native
var parent: VNode = js.native
var raw: Boolean = js.native
var isStatic: js.UndefOr[Boolean] = js.native
var isRootInsert: Boolean = js.native
var isComment: Boolean = js.native
}
@js.native
trait VNodeComponentOptions extends js.Object {
val Ctor: Vue = js.native
val propsData: js.UndefOr[Object] = js.native
val listeners: js.UndefOr[Object] = js.native
val children: js.UndefOr[Any] = js.native // FIXME: give correct type
val tag: js.UndefOr[String] = js.native
}
@js.native
trait VNodeData extends js.Object {
var key: js.UndefOr[String | Double] = js.native
var slot: js.UndefOr[String] = js.native
var scopedSlots: js.UndefOr[js.Dictionary[js.Function1[js.Any, _]]] = js.native
var ref: js.UndefOr[String] = js.native
var tag: js.UndefOr[String] = js.native
var staticClass: js.UndefOr[String] = js.native
@JSName("class")
var jsClass: js.UndefOr[js.Any] = js.native // FIXME: could there be any good name?
var staticStyle: js.UndefOr[js.Dictionary[js.Any]] = js.native
var style: js.UndefOr[js.Object] = js.native // FIXME: is there any situation that it needs to be an Array?
var props: js.UndefOr[js.Dictionary[js.Any]] = js.native
var attrs: js.UndefOr[js.Dictionary[js.Any]] = js.native
var domProps: js.UndefOr[js.Dictionary[js.Any]] = js.native
var hook: js.UndefOr[js.Dictionary[js.Function]] = js.native
var on: js.UndefOr[js.Dictionary[js.Any]] = js.native // FIXME: give its values a type
var nativeOn: js.UndefOr[js.Dictionary[js.Any]] = js.native // FIXME: give its values a type
var transition: js.UndefOr[js.Object] = js.native
var show: js.UndefOr[Boolean] = js.native
var inlineTemplate: js.UndefOr[InlineTemplate] = js.native
var directives: js.UndefOr[js.Array[VNodeDirective]] = js.native
var keepAlive: js.UndefOr[Boolean] = js.native
}
@js.native
trait InlineTemplate extends js.Object {
val render: js.Function = js.native
val staticRenderFns: js.Array[js.Function] = js.native
}
@js.native
trait VNodeDirective extends js.Object {
val name: String = js.native
val value: js.Any = js.native
val oldValue: js.Any = js.native
val expression: js.Any = js.native
val arg: String = js.native
val modifiers: js.Dictionary[Boolean] = js.native
}
| lettenj61/vuescale | vuescale-core/src/main/scala/vuescale/facade/VNode.scala | Scala | mit | 2,867 |
package gitbucket.core.controller
import gitbucket.core.issues.priorities.html
import gitbucket.core.service.{RepositoryService, AccountService, IssuesService, PrioritiesService}
import gitbucket.core.util.{ReferrerAuthenticator, WritableUsersAuthenticator}
import gitbucket.core.util.Implicits._
import gitbucket.core.util.SyntaxSugars._
import org.scalatra.forms._
import org.scalatra.i18n.Messages
import org.scalatra.Ok
class PrioritiesController extends PrioritiesControllerBase
with PrioritiesService with IssuesService with RepositoryService with AccountService
with ReferrerAuthenticator with WritableUsersAuthenticator
trait PrioritiesControllerBase extends ControllerBase {
self: PrioritiesService with IssuesService with RepositoryService
with ReferrerAuthenticator with WritableUsersAuthenticator =>
case class PriorityForm(priorityName: String, description: Option[String], color: String)
val priorityForm = mapping(
"priorityName" -> trim(label("Priority name", text(required, priorityName, uniquePriorityName, maxlength(100)))),
"description" -> trim(label("Description", optional(text(maxlength(255))))),
"priorityColor" -> trim(label("Color", text(required, color)))
)(PriorityForm.apply)
get("/:owner/:repository/issues/priorities")(referrersOnly { repository =>
html.list(
getPriorities(repository.owner, repository.name),
countIssueGroupByPriorities(repository.owner, repository.name, IssuesService.IssueSearchCondition(), Map.empty),
repository,
hasDeveloperRole(repository.owner, repository.name, context.loginAccount))
})
ajaxGet("/:owner/:repository/issues/priorities/new")(writableUsersOnly { repository =>
html.edit(None, repository)
})
ajaxPost("/:owner/:repository/issues/priorities/new", priorityForm)(writableUsersOnly { (form, repository) =>
val priorityId = createPriority(repository.owner, repository.name, form.priorityName, form.description, form.color.substring(1))
html.priority(
getPriority(repository.owner, repository.name, priorityId).get,
countIssueGroupByPriorities(repository.owner, repository.name, IssuesService.IssueSearchCondition(), Map.empty),
repository,
hasDeveloperRole(repository.owner, repository.name, context.loginAccount))
})
ajaxGet("/:owner/:repository/issues/priorities/:priorityId/edit")(writableUsersOnly { repository =>
getPriority(repository.owner, repository.name, params("priorityId").toInt).map { priority =>
html.edit(Some(priority), repository)
} getOrElse NotFound()
})
ajaxPost("/:owner/:repository/issues/priorities/:priorityId/edit", priorityForm)(writableUsersOnly { (form, repository) =>
updatePriority(repository.owner, repository.name, params("priorityId").toInt, form.priorityName, form.description, form.color.substring(1))
html.priority(
getPriority(repository.owner, repository.name, params("priorityId").toInt).get,
countIssueGroupByPriorities(repository.owner, repository.name, IssuesService.IssueSearchCondition(), Map.empty),
repository,
hasDeveloperRole(repository.owner, repository.name, context.loginAccount))
})
ajaxPost("/:owner/:repository/issues/priorities/reorder")(writableUsersOnly { (repository) =>
reorderPriorities(repository.owner, repository.name, params("order")
.split(",")
.map(id => id.toInt)
.zipWithIndex
.toMap)
Ok()
})
ajaxPost("/:owner/:repository/issues/priorities/default")(writableUsersOnly { (repository) =>
setDefaultPriority(repository.owner, repository.name, priorityId("priorityId"))
Ok()
})
ajaxPost("/:owner/:repository/issues/priorities/:priorityId/delete")(writableUsersOnly { repository =>
deletePriority(repository.owner, repository.name, params("priorityId").toInt)
Ok()
})
val priorityId: String => Option[Int] = (key: String) => params.get(key).flatMap(_.toIntOpt)
/**
* Constraint for the identifier such as user name, repository name or page name.
*/
private def priorityName: Constraint = new Constraint(){
override def validate(name: String, value: String, messages: Messages): Option[String] =
if(value.contains(',')){
Some(s"${name} contains invalid character.")
} else if(value.startsWith("_") || value.startsWith("-")){
Some(s"${name} starts with invalid character.")
} else {
None
}
}
private def uniquePriorityName: Constraint = new Constraint(){
override def validate(name: String, value: String, params: Map[String, Seq[String]], messages: Messages): Option[String] = {
val owner = params.value("owner")
val repository = params.value("repository")
params.optionValue("priorityId").map { priorityId =>
getPriority(owner, repository, value).filter(_.priorityId != priorityId.toInt).map(_ => "Name has already been taken.")
}.getOrElse {
getPriority(owner, repository, value).map(_ => "Name has already been taken.")
}
}
}
}
| gencer/gitbucket | src/main/scala/gitbucket/core/controller/PrioritiesController.scala | Scala | apache-2.0 | 5,027 |
package sss
import sss.ancillary.Logging
import sss.db.IsNull.IsNull
import sss.db.NullOrder.NullOrder
import sss.db.TxIsolationLevel.TxIsolationLevel
import java.io.{ByteArrayInputStream, InputStream}
import java.math.BigDecimal
import java.sql.{Blob, Connection}
import java.util
import java.util.Locale
import java.util.regex.Pattern
import javax.sql.DataSource
import scala.collection.mutable
import scala.concurrent.duration.Duration
import scala.concurrent.{ExecutionContext, Future}
import scala.language.implicitConversions
import scala.util.Try
/**
* @author alan
*/
package object db extends Logging {
object DbException {
def apply(msg: String) = throw new DbException(msg)
}
object DbError {
def apply(msg: String) = throw new DbError(msg)
}
class DbOptimisticLockingException(msg: String) extends RuntimeException(msg)
class DbException(msg: String) extends RuntimeException(msg)
class DbError(msg: String) extends Error(msg)
type QueryResults[A] = IndexedSeq[A]
type Rows = QueryResults[Row]
type SimpleColumnTypes =
String with
Long with
Short with
Integer with
Int with
Float with
Boolean with
BigDecimal with
Byte with
Double with
mutable.ArraySeq[Byte] with
Array[Byte] with
java.sql.Date with
java.sql.Time with
java.sql.Timestamp with
java.sql.Clob with
java.sql.Blob with
java.sql.Array with
java.sql.Ref with
java.sql.Struct with
InputStream
type ColumnTypes = SimpleColumnTypes with Option[SimpleColumnTypes]
object TxIsolationLevel extends Enumeration {
type TxIsolationLevel = Value
val NONE = Value(Connection.TRANSACTION_NONE)
val READ_COMMITTED = Value(Connection.TRANSACTION_READ_COMMITTED)
val READ_UNCOMMITTED = Value(Connection.TRANSACTION_READ_UNCOMMITTED)
val REPEATABLE_READ = Value(Connection.TRANSACTION_REPEATABLE_READ)
val SERIALIZABLE = Value(Connection.TRANSACTION_SERIALIZABLE)
}
trait RunContext {
implicit val ds: DataSource
implicit val executor: FutureTxExecutor
val isolationLevel: Option[TxIsolationLevel]
}
class AsyncRunContext(aDs: DataSource,
anEc: ExecutionContext,
val isolationLevel: Option[TxIsolationLevel] = None,
anExecutor: FutureTxExecutor = FutureTxExecutor
) extends RunContext {
implicit val ds: DataSource = aDs
implicit val ec: ExecutionContext = anEc
implicit val executor: FutureTxExecutor = anExecutor
}
class SyncRunContext(aDs: DataSource,
val timeout: Duration,
val isolationLevel: Option[TxIsolationLevel] = None,
anExecutor: FutureTxExecutor = FutureTxExecutor
) extends RunContext {
implicit val ds: DataSource = aDs
implicit val executor: FutureTxExecutor = anExecutor
}
implicit class RunOp[T](val t: FutureTx[T]) extends AnyVal {
def run(implicit runContext: AsyncRunContext): Future[T] = {
runContext.executor.execute(t, runContext, false)
}
def runRollback(implicit runContext: AsyncRunContext): Future[T] = {
runContext.executor.execute(t, runContext, true)
}
}
implicit class RunSyncOp[T](val t: FutureTx[T]) extends AnyVal {
def runSync(implicit runContext: SyncRunContext): Try[T] = {
runContext.executor.executeSync(
t,
runContext.ds,
false,
runContext.isolationLevel,
runContext.timeout
)
}
def runSyncRollback(implicit runContext: SyncRunContext): Try[T] = {
runContext.executor.executeSync(
t,
runContext.ds,
true,
runContext.isolationLevel,
runContext.timeout
)
}
def runSyncAndGet(implicit runContext: SyncRunContext): T = runSync.get
def runSyncRollbackAndGet(implicit runContext: SyncRunContext): T = runSyncRollback.get
}
implicit class SqlHelper(val sc: StringContext) extends AnyVal {
def ps(args: Any*): (String, Seq[Any]) = {
(sc.parts.mkString("?"), args)
}
}
implicit def toMap(r: Row): Map[String, _] = r.asMap
implicit class MapHelper[A](val m: Map[String, A]) extends AnyVal {
def splitKeyValues: (Seq[String], Seq[A]) = m.foldLeft((Seq.empty[String], Seq.empty[A])) {
case ((keys, vals), (k, v)) => (keys :+ k, vals :+ v)
}
}
sealed case class LimitParams(page: Int, start: Option[Long] = None)
type Limit = Option[LimitParams]
object IsNull extends Enumeration {
type IsNull = Value
val Null = Value(" IS NULL")
val NotNull = Value(" IS NOT NULL")
}
object NullOrder extends Enumeration {
type NullOrder = Value
val NullsFirst = Value("NULLS FIRST")
val NullsLast = Value("NULLS LAST")
}
trait OrderBy {
val regex = "^[a-zA-Z_][a-zA-Z0-9_]*$"
val colName: String
val pattern = Pattern.compile(regex)
require(pattern.matcher(colName).matches(), s"Column name must conform to pattern $regex")
}
object OrderBys {
def apply(start: Int, pageSize: Int, orderBys: OrderBy*): OrderBys =
OrderBys(orderBys.toSeq, Some(LimitParams(pageSize, Some(start))))
def apply(pageSize: Int, orderBys: OrderBy*): OrderBys = OrderBys(orderBys.toSeq, Some(LimitParams(pageSize)))
def apply(orderBys: OrderBy*): OrderBys = OrderBys(orderBys.toSeq, None)
}
sealed case class OrderBys(orderBys: Seq[OrderBy] = Seq.empty, limit: Limit = None) {
def limit(start: Long, limit: Int): OrderBys = OrderBys(orderBys, Some(LimitParams(limit, Some(start))))
def limit(limit: Int): OrderBys = OrderBys(orderBys, Some(LimitParams(limit)))
private[db] def sql: String = {
orderByClausesToString(orderBys) + (limit match {
case Some(LimitParams(lim, Some(start))) => s" LIMIT $start, $lim"
case Some(LimitParams(lim, None)) => s" LIMIT $lim"
case None => ""
})
}
private def orderByClausesToString(orderClauses: Seq[OrderBy]): String = {
if (orderClauses.nonEmpty)
" ORDER BY " + orderClauses.map {
case OrderDesc(col, no) => s"$col DESC $no"
case OrderAsc(col, no) => s"$col ASC $no"
}.mkString(",")
else ""
}
}
sealed case class OrderDesc(colName: String, nullOrder: NullOrder = NullOrder.NullsLast) extends OrderBy
sealed case class OrderAsc(colName: String, nullOrder: NullOrder = NullOrder.NullsLast) extends OrderBy
sealed class Where private[db](
private[db] val clause: String,
private[db] val params: Seq[Any] = Seq.empty,
private[db] val orderBys: OrderBys = OrderBys()) {
private def copy(clause: String = this.clause,
params: Seq[Any] = this.params,
orderBys: OrderBys = this.orderBys): Where =
new Where(clause, params, orderBys)
def apply(prms: Any*): Where = copy(params = prms)
def and(w: Where): Where = {
val newClause = if (clause.nonEmpty && w.clause.nonEmpty) clause + " AND " + w.clause
else clause + w.clause
val newLimit = w.orderBys.limit.orElse(orderBys.limit)
copy(
clause = newClause,
params = params ++ w.params,
orderBys = orderBys.copy(
orderBys = orderBys.orderBys ++ w.orderBys.orderBys,
limit = newLimit
)
)
}
def notIn(params: Set[_]): Where = in(params, true)
def is(p: IsNull): Where = {
val newClause = s"$clause$p"
copy(newClause, params)
}
def in(params: Set[_], neg: Boolean = false): Where = {
val str = Seq.fill(params.size)("?") mkString(",")
val isNot = if(neg) " NOT" else ""
val newClause = s"$clause$isNot IN ($str)"
copy(newClause, this.params ++ params)
}
def using(prms: Any*): Where = apply(prms: _*)
def orderAsc(colsAsc: String*): Where = copy(orderBys = OrderBys(colsAsc map (OrderAsc(_)), this.orderBys.limit))
def orderDesc(colsDesc: String*): Where = copy(orderBys = OrderBys(colsDesc map (OrderDesc(_)), this.orderBys.limit))
def orderBy(orderBys: OrderBys): Where = copy(orderBys = orderBys)
def orderBy(orderBys: OrderBy*): Where = copy(orderBys = OrderBys(orderBys, this.orderBys.limit))
def limit(start: Long, page: Int): Where = copy(orderBys = orderBys.limit(start, page))
def limit(page: Int): Where = copy(orderBys = orderBys.limit(page))
private[db] def sql: String = {
val where = if (clause.nonEmpty) s" WHERE $clause" else ""
where + orderBys.sql
}
}
object WhereOps {
implicit def toWhere(orderBy: OrderBy): Where = new Where("", Seq.empty, OrderBys(orderBy))
}
def where(): Where = new Where("")
def where(sqlParams: (String, Seq[Any])): Where = new Where(sqlParams._1, sqlParams._2)
def where(sql: String, params: Any*): Where = new Where(sql, params.toSeq)
def where(tuples: (String, Any)*): Where =
where(
tuples.foldLeft[(String, Seq[Any])](("", Seq()))((acc, e) => (acc._1, e._2) match {
case ("", None) =>
(s"${e._1} IS NULL", acc._2)
case ("", _) =>
(s"${e._1} = ?", acc._2 :+ e._2)
case (_, None) =>
(s"${acc._1} AND ${e._1} IS NULL", acc._2)
case _ =>
(s"${acc._1} AND ${e._1} = ?", acc._2 :+ e._2)
})
)
import scala.reflect.runtime.universe._
case class ColumnMetaInfo(name: String, `type`: Int, noNullsAllowed: Boolean)
type ColumnsMetaInfo = Seq[ColumnMetaInfo]
class Row(val asMap: Map[String, _]) {
def shallowEquals(that: Row): Boolean = {
asMap.forall {
case (k, _: Blob | _: InputStream) =>
that.asMap(k) match {
case _: Blob | _ :InputStream => true
case _ => false
}
case (k, v: Array[Byte]) =>
that.asMap(k) match {
case v2: Array[Byte] => util.Arrays.equals(v, v2)
case _ => false
}
case (k, v) =>
that.asMap(k) == v
}
}
override def equals(o: Any): Boolean = o match {
case that: Row =>
asMap.forall {
case (k, v: Array[Byte]) =>
that.asMap(k) match {
case v2: Array[Byte] => util.Arrays.equals(v, v2)
case _ => false
}
case (k, v) =>
that.asMap(k) == v
}
case x => false
}
def id: Long = long("id")
override def hashCode = asMap.hashCode
def get(col: String) = asMap(col.toLowerCase(Locale.ROOT))
@deprecated("Use string(), int(), long() etc. instead.", "1.5-SNAPSHOT")
def apply[T >: ColumnTypes : TypeTag](col: String): T = {
val rawVal = asMap(col.toLowerCase(Locale.ROOT))
val massaged = if (typeOf[T] <:< typeOf[Option[_]] && rawVal == null) {
None
} else if (typeOf[T] == typeOf[Array[Byte]] && rawVal.isInstanceOf[Blob]) {
blobToBytes(rawVal.asInstanceOf[Blob])
} else if (typeOf[T] == typeOf[mutable.ArraySeq[Byte]] && rawVal.isInstanceOf[Blob]) {
blobToWrappedBytes(rawVal.asInstanceOf[Blob])
} else if (typeOf[T] == typeOf[mutable.ArraySeq[Byte]] && rawVal.isInstanceOf[Array[Byte]]) {
(rawVal.asInstanceOf[Array[Byte]]).to(mutable.ArraySeq)
} else if (typeOf[T] == typeOf[InputStream] && rawVal.isInstanceOf[Blob]) {
blobToStream(rawVal.asInstanceOf[Blob])
} else if (typeOf[T] == typeOf[Byte] && rawVal.isInstanceOf[Array[Byte]]) {
shimObjectToByte(rawVal)
} else if (typeOf[T] == typeOf[InputStream] && rawVal.isInstanceOf[Array[Byte]]) {
val aryByte = rawVal.asInstanceOf[Array[Byte]]
new ByteArrayInputStream(aryByte)
} else if (typeOf[T] =:= typeOf[ColumnTypes]) {
//in the case where NO parameter type is passed it defaults to ColumnTypes
// and ColumnTypes will match typeOf[Option[_]] so we must prevent that here
rawVal
} else if (typeOf[T] <:< typeOf[Option[_]])
Some(rawVal)
else rawVal
massaged.asInstanceOf[T]
}
private def shimObjectToByte(o: Any): Byte = {
val aryByte = o.asInstanceOf[Array[Byte]]
require(aryByte.length == 1)
aryByte(0)
}
def number(col: String): Number = asMap(col.toLowerCase(Locale.ROOT)).asInstanceOf[Number]
def stringOpt(col: String): Option[String] = Option(asMap(col.toLowerCase(Locale.ROOT))).map(_.asInstanceOf[String])
def string(col: String): String = stringOpt(col).get
def longOpt(col: String): Option[Long] = Option(asMap(col.toLowerCase(Locale.ROOT))).map(_.asInstanceOf[Long])
def long(col: String): Long = longOpt(col).get
def intOpt(col: String): Option[Int] = Option(asMap(col.toLowerCase(Locale.ROOT))).map(_.asInstanceOf[Int])
def int(col: String): Int = intOpt(col).get
def bigDecimal(col: String): BigDecimal = bigDecimalOpt(col).get
def bigDecimalOpt(col: String): Option[BigDecimal] = Option(asMap(col.toLowerCase(Locale.ROOT))).map(_.asInstanceOf[BigDecimal])
def byteOpt(col: String): Option[Byte] = Option(asMap(col.toLowerCase(Locale.ROOT))).map(shimObjectToByte)
def byte(col: String): Byte = byteOpt(col).get
def shortOpt(col: String): Option[Short] = Option(asMap(col.toLowerCase(Locale.ROOT))).map(_.asInstanceOf[Short])
def short(col: String): Short = asMap(col.toLowerCase(Locale.ROOT)).asInstanceOf[Short]
def booleanOpt(col: String): Option[Boolean] = Option(asMap(col.toLowerCase(Locale.ROOT))).map(_.asInstanceOf[Boolean])
def boolean(col: String): Boolean = asMap(col.toLowerCase(Locale.ROOT)).asInstanceOf[Boolean]
def arrayByteOpt(col: String): Option[Array[Byte]] = Option(asMap(col.toLowerCase(Locale.ROOT))).map(_.asInstanceOf[Array[Byte]])
def arrayByte(col: String): Array[Byte] = asMap(col.toLowerCase(Locale.ROOT)).asInstanceOf[Array[Byte]]
def blobByteArrayOpt(col: String): Option[Array[Byte]] = Option(asMap(col.toLowerCase(Locale.ROOT)).asInstanceOf[Blob]).map(blobToBytes)
def blobByteArray(col: String): Array[Byte] = blobToBytes(asMap(col.toLowerCase(Locale.ROOT)).asInstanceOf[Blob])
def blobInputStreamOpt(col: String): Option[InputStream] =
Option(asMap(col.toLowerCase(Locale.ROOT))
.asInstanceOf[Blob])
.map(blobToStream)
def blobInputStream(col: String): InputStream = blobInputStreamOpt(col).get
def blob(col: String): Blob = blobOpt(col).get
def blobOpt(col: String): Option[Blob] = Option(asMap(col.toLowerCase(Locale.ROOT)).asInstanceOf[Blob])
private def blobToStream(jDBCBlobClient: Blob): InputStream = jDBCBlobClient.getBinaryStream
private def blobToBytes(jDBCBlobClient: Blob): Array[Byte] = jDBCBlobClient.getBytes(1, jDBCBlobClient.length.toInt)
private def blobToWrappedBytes(jDBCBlobClient: Blob): mutable.ArraySeq[Byte] = jDBCBlobClient.getBytes(1, jDBCBlobClient.length.toInt).to(mutable.ArraySeq)
override def toString: String = {
asMap.foldLeft("") { case (a, (k, v)) => a + s" Key:$k, Value: $v" }
}
}
} | mcsherrylabs/sss.db | src/main/scala/sss/db/package.scala | Scala | gpl-3.0 | 15,188 |
package main.scala.projectEulerScala
import scala.annotation.tailrec
import scala.collection.mutable
// 100
object P33_DigitCancellingFractions {
@tailrec
def gcd(a: Int, b: Int): Int = if (b == 0) a else gcd(b, a % b)
def firstTry = {
val r = 10 to 99
val special = mutable.Set[(Int, Int)]()
(for (a <- r; b <- r) yield (a, b)).foreach { case (top, bot) =>
val topString = top.toString
val botString = bot.toString
val topLeft = topString.take(1)
val topRight = topString.takeRight(1)
val botLeft = botString.take(1)
val botRight = botString.takeRight(1)
if (
top != bot &&
topRight == botLeft &&
topRight != "0" &&
Integer.parseInt(topLeft, 10) * 1.0 / Integer.parseInt(botRight, 10) == 1.0 * top / bot
) {
println("found " + (top, bot))
special += ((top, bot))
}
}
val product = (special.map(_._1).product, special.map(_._2).product)
println("unreduced product fraction: " + product)
println("reduced product denominator: " + product._2 / gcd(product._1, product._2))
}
def euler33 = {
val prod = (for {i <- 1 until 10
d <- 1 until i
n <- 1 until d
if (n * 10 + i) * d == n * (i * 10 + d)
} yield (n, d)) reduce { (a, b) => (a._1 * b._1, a._2 * b._2) }
prod._2 / gcd(prod._1, prod._2)
}
def main(args: Array[String]) {
println(euler33)
}
}
| rck109d/projectEuler | src/main/scala/projectEulerScala/P33_DigitCancellingFractions.scala | Scala | lgpl-3.0 | 1,479 |
package com.whitepages.cloudmanager.action
import java.nio.file.{Files, Path}
import com.whitepages.cloudmanager.state.ClusterManager
import scala.util.Try
case class UploadConfig(path: Path, configName: String) extends Action {
val requiredFiles = List("solrconfig.xml", "schema.xml")
override def execute(clusterManager: ClusterManager): Boolean = {
val attempt = Try(clusterManager.client.uploadConfig(path, configName))
if (attempt.isFailure) {
comment.error("Couldn't upload config", attempt.failed.get)
false
}
else
true
}
override val preConditions: List[StateCondition] =
requiredFiles.map(f => StateCondition(s"$f exists in $path", (s) => Files.exists(path.resolve(f))))
override val postConditions: List[StateCondition] = List(
StateCondition(s"$configName exists", Conditions.configExists(configName))
)
override def toString = s"UploadConfig: dir: ${path.toAbsolutePath} configName: $configName"
}
| randomstatistic/solrcloud_manager | src/main/scala/com/whitepages/cloudmanager/action/UploadConfig.scala | Scala | apache-2.0 | 977 |
/*
* Copyright 2017 FOLIO Co., Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.folio_sec.reladomo.generator
class DefaultScalaCodeGeneratorFactory extends ScalaCodeGeneratorFactory {}
trait ScalaCodeGeneratorFactory {
def getInstance(mithraObjectXmlPath: String,
scalaApiPackageSuffix: String,
scalaApiUnmodifiableFilesOutputDir: String,
scalaApiModifiableFilesOutputDir: String,
futureApi: String): ScalaCodeGenerator = {
new DefaultScalaCodeGenerator(
mithraObjectXmlPath = mithraObjectXmlPath,
scalaApiPackageSuffix = scalaApiPackageSuffix,
scalaApiUnmodifiableFilesOutputDir = scalaApiUnmodifiableFilesOutputDir,
scalaApiModifiableFilesOutputDir = scalaApiModifiableFilesOutputDir,
futureApi = futureApi
)
}
}
| folio-sec/reladomo-scala | sbt-reladomo-plugin/src/main/scala/com/folio_sec/reladomo/generator/ScalaCodeGeneratorFactory.scala | Scala | apache-2.0 | 1,365 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.aggregate
import org.apache.spark.TaskContext
import org.apache.spark.memory.TaskMemoryManager
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.errors._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate._
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.catalyst.plans.physical._
import org.apache.spark.sql.execution._
import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics}
import org.apache.spark.sql.execution.vectorized.MutableColumnarRow
import org.apache.spark.sql.types.{DecimalType, StringType, StructType}
import org.apache.spark.unsafe.KVIterator
import org.apache.spark.util.Utils
/**
* Hash-based aggregate operator that can also fallback to sorting when data exceeds memory size.
*/
case class HashAggregateExec(
requiredChildDistributionExpressions: Option[Seq[Expression]],
groupingExpressions: Seq[NamedExpression],
aggregateExpressions: Seq[AggregateExpression],
aggregateAttributes: Seq[Attribute],
initialInputBufferOffset: Int,
resultExpressions: Seq[NamedExpression],
child: SparkPlan)
extends UnaryExecNode with BlockingOperatorWithCodegen {
private[this] val aggregateBufferAttributes = {
aggregateExpressions.flatMap(_.aggregateFunction.aggBufferAttributes)
}
require(HashAggregateExec.supportsAggregate(aggregateBufferAttributes))
override lazy val allAttributes: AttributeSeq =
child.output ++ aggregateBufferAttributes ++ aggregateAttributes ++
aggregateExpressions.flatMap(_.aggregateFunction.inputAggBufferAttributes)
override lazy val metrics = Map(
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"),
"peakMemory" -> SQLMetrics.createSizeMetric(sparkContext, "peak memory"),
"spillSize" -> SQLMetrics.createSizeMetric(sparkContext, "spill size"),
"aggTime" -> SQLMetrics.createTimingMetric(sparkContext, "aggregate time"),
"avgHashProbe" -> SQLMetrics.createAverageMetric(sparkContext, "avg hash probe"))
override def output: Seq[Attribute] = resultExpressions.map(_.toAttribute)
override def outputPartitioning: Partitioning = child.outputPartitioning
override def producedAttributes: AttributeSet =
AttributeSet(aggregateAttributes) ++
AttributeSet(resultExpressions.diff(groupingExpressions).map(_.toAttribute)) ++
AttributeSet(aggregateBufferAttributes)
override def requiredChildDistribution: List[Distribution] = {
requiredChildDistributionExpressions match {
case Some(exprs) if exprs.isEmpty => AllTuples :: Nil
case Some(exprs) if exprs.nonEmpty => ClusteredDistribution(exprs) :: Nil
case None => UnspecifiedDistribution :: Nil
}
}
// This is for testing. We force TungstenAggregationIterator to fall back to the unsafe row hash
// map and/or the sort-based aggregation once it has processed a given number of input rows.
private val testFallbackStartsAt: Option[(Int, Int)] = {
sqlContext.getConf("spark.sql.TungstenAggregate.testFallbackStartsAt", null) match {
case null | "" => None
case fallbackStartsAt =>
val splits = fallbackStartsAt.split(",").map(_.trim)
Some((splits.head.toInt, splits.last.toInt))
}
}
protected override def doExecute(): RDD[InternalRow] = attachTree(this, "execute") {
val numOutputRows = longMetric("numOutputRows")
val peakMemory = longMetric("peakMemory")
val spillSize = longMetric("spillSize")
val avgHashProbe = longMetric("avgHashProbe")
val aggTime = longMetric("aggTime")
child.execute().mapPartitionsWithIndex { (partIndex, iter) =>
val beforeAgg = System.nanoTime()
val hasInput = iter.hasNext
val res = if (!hasInput && groupingExpressions.nonEmpty) {
// This is a grouped aggregate and the input iterator is empty,
// so return an empty iterator.
Iterator.empty
} else {
val aggregationIterator =
new TungstenAggregationIterator(
partIndex,
groupingExpressions,
aggregateExpressions,
aggregateAttributes,
initialInputBufferOffset,
resultExpressions,
(expressions, inputSchema) =>
newMutableProjection(expressions, inputSchema, subexpressionEliminationEnabled),
child.output,
iter,
testFallbackStartsAt,
numOutputRows,
peakMemory,
spillSize,
avgHashProbe)
if (!hasInput && groupingExpressions.isEmpty) {
numOutputRows += 1
Iterator.single[UnsafeRow](aggregationIterator.outputForEmptyGroupingKeyWithoutInput())
} else {
aggregationIterator
}
}
aggTime += (System.nanoTime() - beforeAgg) / 1000000
res
}
}
// all the mode of aggregate expressions
private val modes = aggregateExpressions.map(_.mode).distinct
override def usedInputs: AttributeSet = inputSet
override def supportCodegen: Boolean = {
// ImperativeAggregate is not supported right now
!aggregateExpressions.exists(_.aggregateFunction.isInstanceOf[ImperativeAggregate])
}
override def inputRDDs(): Seq[RDD[InternalRow]] = {
child.asInstanceOf[CodegenSupport].inputRDDs()
}
protected override def doProduce(ctx: CodegenContext): String = {
if (groupingExpressions.isEmpty) {
doProduceWithoutKeys(ctx)
} else {
doProduceWithKeys(ctx)
}
}
override def doConsume(ctx: CodegenContext, input: Seq[ExprCode], row: ExprCode): String = {
if (groupingExpressions.isEmpty) {
doConsumeWithoutKeys(ctx, input)
} else {
doConsumeWithKeys(ctx, input)
}
}
// The variables used as aggregation buffer. Only used for aggregation without keys.
private var bufVars: Seq[ExprCode] = _
private def doProduceWithoutKeys(ctx: CodegenContext): String = {
val initAgg = ctx.addMutableState(CodeGenerator.JAVA_BOOLEAN, "initAgg")
// The generated function doesn't have input row in the code context.
ctx.INPUT_ROW = null
// generate variables for aggregation buffer
val functions = aggregateExpressions.map(_.aggregateFunction.asInstanceOf[DeclarativeAggregate])
val initExpr = functions.flatMap(f => f.initialValues)
bufVars = initExpr.map { e =>
val isNull = ctx.addMutableState(CodeGenerator.JAVA_BOOLEAN, "bufIsNull")
val value = ctx.addMutableState(CodeGenerator.javaType(e.dataType), "bufValue")
// The initial expression should not access any column
val ev = e.genCode(ctx)
val initVars = code"""
| $isNull = ${ev.isNull};
| $value = ${ev.value};
""".stripMargin
ExprCode(
ev.code + initVars,
JavaCode.isNullGlobal(isNull),
JavaCode.global(value, e.dataType))
}
val initBufVar = evaluateVariables(bufVars)
// generate variables for output
val (resultVars, genResult) = if (modes.contains(Final) || modes.contains(Complete)) {
// evaluate aggregate results
ctx.currentVars = bufVars
val aggResults = functions.map(_.evaluateExpression).map { e =>
BindReferences.bindReference(e, aggregateBufferAttributes).genCode(ctx)
}
val evaluateAggResults = evaluateVariables(aggResults)
// evaluate result expressions
ctx.currentVars = aggResults
val resultVars = resultExpressions.map { e =>
BindReferences.bindReference(e, aggregateAttributes).genCode(ctx)
}
(resultVars, s"""
|$evaluateAggResults
|${evaluateVariables(resultVars)}
""".stripMargin)
} else if (modes.contains(Partial) || modes.contains(PartialMerge)) {
// output the aggregate buffer directly
(bufVars, "")
} else {
// no aggregate function, the result should be literals
val resultVars = resultExpressions.map(_.genCode(ctx))
(resultVars, evaluateVariables(resultVars))
}
val doAgg = ctx.freshName("doAggregateWithoutKey")
val doAggFuncName = ctx.addNewFunction(doAgg,
s"""
| private void $doAgg() throws java.io.IOException {
| // initialize aggregation buffer
| $initBufVar
|
| ${child.asInstanceOf[CodegenSupport].produce(ctx, this)}
| }
""".stripMargin)
val numOutput = metricTerm(ctx, "numOutputRows")
val aggTime = metricTerm(ctx, "aggTime")
val beforeAgg = ctx.freshName("beforeAgg")
s"""
| while (!$initAgg) {
| $initAgg = true;
| long $beforeAgg = System.nanoTime();
| $doAggFuncName();
| $aggTime.add((System.nanoTime() - $beforeAgg) / 1000000);
|
| // output the result
| ${genResult.trim}
|
| $numOutput.add(1);
| ${consume(ctx, resultVars).trim}
| }
""".stripMargin
}
private def doConsumeWithoutKeys(ctx: CodegenContext, input: Seq[ExprCode]): String = {
// only have DeclarativeAggregate
val functions = aggregateExpressions.map(_.aggregateFunction.asInstanceOf[DeclarativeAggregate])
val inputAttrs = functions.flatMap(_.aggBufferAttributes) ++ child.output
val updateExpr = aggregateExpressions.flatMap { e =>
e.mode match {
case Partial | Complete =>
e.aggregateFunction.asInstanceOf[DeclarativeAggregate].updateExpressions
case PartialMerge | Final =>
e.aggregateFunction.asInstanceOf[DeclarativeAggregate].mergeExpressions
}
}
ctx.currentVars = bufVars ++ input
val boundUpdateExpr = updateExpr.map(BindReferences.bindReference(_, inputAttrs))
val subExprs = ctx.subexpressionEliminationForWholeStageCodegen(boundUpdateExpr)
val effectiveCodes = subExprs.codes.mkString("\\n")
val aggVals = ctx.withSubExprEliminationExprs(subExprs.states) {
boundUpdateExpr.map(_.genCode(ctx))
}
// aggregate buffer should be updated atomic
val updates = aggVals.zipWithIndex.map { case (ev, i) =>
s"""
| ${bufVars(i).isNull} = ${ev.isNull};
| ${bufVars(i).value} = ${ev.value};
""".stripMargin
}
s"""
| // do aggregate
| // common sub-expressions
| $effectiveCodes
| // evaluate aggregate function
| ${evaluateVariables(aggVals)}
| // update aggregation buffer
| ${updates.mkString("\\n").trim}
""".stripMargin
}
private val groupingAttributes = groupingExpressions.map(_.toAttribute)
private val groupingKeySchema = StructType.fromAttributes(groupingAttributes)
private val declFunctions = aggregateExpressions.map(_.aggregateFunction)
.filter(_.isInstanceOf[DeclarativeAggregate])
.map(_.asInstanceOf[DeclarativeAggregate])
private val bufferSchema = StructType.fromAttributes(aggregateBufferAttributes)
// The name for Fast HashMap
private var fastHashMapTerm: String = _
private var isFastHashMapEnabled: Boolean = false
// whether a vectorized hashmap is used instead
// we have decided to always use the row-based hashmap,
// but the vectorized hashmap can still be switched on for testing and benchmarking purposes.
private var isVectorizedHashMapEnabled: Boolean = false
// The name for UnsafeRow HashMap
private var hashMapTerm: String = _
private var sorterTerm: String = _
/**
* This is called by generated Java class, should be public.
*/
def createHashMap(): UnsafeFixedWidthAggregationMap = {
// create initialized aggregate buffer
val initExpr = declFunctions.flatMap(f => f.initialValues)
val initialBuffer = UnsafeProjection.create(initExpr)(EmptyRow)
// create hashMap
new UnsafeFixedWidthAggregationMap(
initialBuffer,
bufferSchema,
groupingKeySchema,
TaskContext.get(),
1024 * 16, // initial capacity
TaskContext.get().taskMemoryManager().pageSizeBytes
)
}
def getTaskMemoryManager(): TaskMemoryManager = {
TaskContext.get().taskMemoryManager()
}
def getEmptyAggregationBuffer(): InternalRow = {
val initExpr = declFunctions.flatMap(f => f.initialValues)
val initialBuffer = UnsafeProjection.create(initExpr)(EmptyRow)
initialBuffer
}
/**
* This is called by generated Java class, should be public.
*/
def createUnsafeJoiner(): UnsafeRowJoiner = {
GenerateUnsafeRowJoiner.create(groupingKeySchema, bufferSchema)
}
/**
* Called by generated Java class to finish the aggregate and return a KVIterator.
*/
def finishAggregate(
hashMap: UnsafeFixedWidthAggregationMap,
sorter: UnsafeKVExternalSorter,
peakMemory: SQLMetric,
spillSize: SQLMetric,
avgHashProbe: SQLMetric): KVIterator[UnsafeRow, UnsafeRow] = {
// update peak execution memory
val mapMemory = hashMap.getPeakMemoryUsedBytes
val sorterMemory = Option(sorter).map(_.getPeakMemoryUsedBytes).getOrElse(0L)
val maxMemory = Math.max(mapMemory, sorterMemory)
val metrics = TaskContext.get().taskMetrics()
peakMemory.add(maxMemory)
metrics.incPeakExecutionMemory(maxMemory)
// Update average hashmap probe
avgHashProbe.set(hashMap.getAverageProbesPerLookup())
if (sorter == null) {
// not spilled
return hashMap.iterator()
}
// merge the final hashMap into sorter
sorter.merge(hashMap.destructAndCreateExternalSorter())
hashMap.free()
val sortedIter = sorter.sortedIterator()
// Create a KVIterator based on the sorted iterator.
new KVIterator[UnsafeRow, UnsafeRow] {
// Create a MutableProjection to merge the rows of same key together
val mergeExpr = declFunctions.flatMap(_.mergeExpressions)
val mergeProjection = newMutableProjection(
mergeExpr,
aggregateBufferAttributes ++ declFunctions.flatMap(_.inputAggBufferAttributes),
subexpressionEliminationEnabled)
val joinedRow = new JoinedRow()
var currentKey: UnsafeRow = null
var currentRow: UnsafeRow = null
var nextKey: UnsafeRow = if (sortedIter.next()) {
sortedIter.getKey
} else {
null
}
override def next(): Boolean = {
if (nextKey != null) {
currentKey = nextKey.copy()
currentRow = sortedIter.getValue.copy()
nextKey = null
// use the first row as aggregate buffer
mergeProjection.target(currentRow)
// merge the following rows with same key together
var findNextGroup = false
while (!findNextGroup && sortedIter.next()) {
val key = sortedIter.getKey
if (currentKey.equals(key)) {
mergeProjection(joinedRow(currentRow, sortedIter.getValue))
} else {
// We find a new group.
findNextGroup = true
nextKey = key
}
}
true
} else {
spillSize.add(sorter.getSpillSize)
false
}
}
override def getKey: UnsafeRow = currentKey
override def getValue: UnsafeRow = currentRow
override def close(): Unit = {
sortedIter.close()
}
}
}
/**
* Generate the code for output.
* @return function name for the result code.
*/
private def generateResultFunction(ctx: CodegenContext): String = {
val funcName = ctx.freshName("doAggregateWithKeysOutput")
val keyTerm = ctx.freshName("keyTerm")
val bufferTerm = ctx.freshName("bufferTerm")
val numOutput = metricTerm(ctx, "numOutputRows")
val body =
if (modes.contains(Final) || modes.contains(Complete)) {
// generate output using resultExpressions
ctx.currentVars = null
ctx.INPUT_ROW = keyTerm
val keyVars = groupingExpressions.zipWithIndex.map { case (e, i) =>
BoundReference(i, e.dataType, e.nullable).genCode(ctx)
}
val evaluateKeyVars = evaluateVariables(keyVars)
ctx.INPUT_ROW = bufferTerm
val bufferVars = aggregateBufferAttributes.zipWithIndex.map { case (e, i) =>
BoundReference(i, e.dataType, e.nullable).genCode(ctx)
}
val evaluateBufferVars = evaluateVariables(bufferVars)
// evaluate the aggregation result
ctx.currentVars = bufferVars
val aggResults = declFunctions.map(_.evaluateExpression).map { e =>
BindReferences.bindReference(e, aggregateBufferAttributes).genCode(ctx)
}
val evaluateAggResults = evaluateVariables(aggResults)
// generate the final result
ctx.currentVars = keyVars ++ aggResults
val inputAttrs = groupingAttributes ++ aggregateAttributes
val resultVars = resultExpressions.map { e =>
BindReferences.bindReference(e, inputAttrs).genCode(ctx)
}
s"""
$evaluateKeyVars
$evaluateBufferVars
$evaluateAggResults
${consume(ctx, resultVars)}
"""
} else if (modes.contains(Partial) || modes.contains(PartialMerge)) {
// resultExpressions are Attributes of groupingExpressions and aggregateBufferAttributes.
assert(resultExpressions.forall(_.isInstanceOf[Attribute]))
assert(resultExpressions.length ==
groupingExpressions.length + aggregateBufferAttributes.length)
ctx.currentVars = null
ctx.INPUT_ROW = keyTerm
val keyVars = groupingExpressions.zipWithIndex.map { case (e, i) =>
BoundReference(i, e.dataType, e.nullable).genCode(ctx)
}
val evaluateKeyVars = evaluateVariables(keyVars)
ctx.INPUT_ROW = bufferTerm
val resultBufferVars = aggregateBufferAttributes.zipWithIndex.map { case (e, i) =>
BoundReference(i, e.dataType, e.nullable).genCode(ctx)
}
val evaluateResultBufferVars = evaluateVariables(resultBufferVars)
ctx.currentVars = keyVars ++ resultBufferVars
val inputAttrs = resultExpressions.map(_.toAttribute)
val resultVars = resultExpressions.map { e =>
BindReferences.bindReference(e, inputAttrs).genCode(ctx)
}
s"""
$evaluateKeyVars
$evaluateResultBufferVars
${consume(ctx, resultVars)}
"""
} else {
// generate result based on grouping key
ctx.INPUT_ROW = keyTerm
ctx.currentVars = null
val eval = resultExpressions.map{ e =>
BindReferences.bindReference(e, groupingAttributes).genCode(ctx)
}
consume(ctx, eval)
}
ctx.addNewFunction(funcName,
s"""
private void $funcName(UnsafeRow $keyTerm, UnsafeRow $bufferTerm)
throws java.io.IOException {
$numOutput.add(1);
$body
}
""")
}
/**
* A required check for any fast hash map implementation (basically the common requirements
* for row-based and vectorized).
* Currently fast hash map is supported for primitive data types during partial aggregation.
* This list of supported use-cases should be expanded over time.
*/
private def checkIfFastHashMapSupported(ctx: CodegenContext): Boolean = {
val isSupported =
(groupingKeySchema ++ bufferSchema).forall(f => CodeGenerator.isPrimitiveType(f.dataType) ||
f.dataType.isInstanceOf[DecimalType] || f.dataType.isInstanceOf[StringType]) &&
bufferSchema.nonEmpty && modes.forall(mode => mode == Partial || mode == PartialMerge)
// For vectorized hash map, We do not support byte array based decimal type for aggregate values
// as ColumnVector.putDecimal for high-precision decimals doesn't currently support in-place
// updates. Due to this, appending the byte array in the vectorized hash map can turn out to be
// quite inefficient and can potentially OOM the executor.
// For row-based hash map, while decimal update is supported in UnsafeRow, we will just act
// conservative here, due to lack of testing and benchmarking.
val isNotByteArrayDecimalType = bufferSchema.map(_.dataType).filter(_.isInstanceOf[DecimalType])
.forall(!DecimalType.isByteArrayDecimalType(_))
isSupported && isNotByteArrayDecimalType
}
private def enableTwoLevelHashMap(ctx: CodegenContext): Unit = {
if (!checkIfFastHashMapSupported(ctx)) {
if (modes.forall(mode => mode == Partial || mode == PartialMerge) && !Utils.isTesting) {
logInfo("spark.sql.codegen.aggregate.map.twolevel.enabled is set to true, but"
+ " current version of codegened fast hashmap does not support this aggregate.")
}
} else {
isFastHashMapEnabled = true
// This is for testing/benchmarking only.
// We enforce to first level to be a vectorized hashmap, instead of the default row-based one.
isVectorizedHashMapEnabled = sqlContext.getConf(
"spark.sql.codegen.aggregate.map.vectorized.enable", "false") == "true"
}
}
private def doProduceWithKeys(ctx: CodegenContext): String = {
val initAgg = ctx.addMutableState(CodeGenerator.JAVA_BOOLEAN, "initAgg")
if (sqlContext.conf.enableTwoLevelAggMap) {
enableTwoLevelHashMap(ctx)
} else {
sqlContext.getConf("spark.sql.codegen.aggregate.map.vectorized.enable", null) match {
case "true" =>
logWarning("Two level hashmap is disabled but vectorized hashmap is enabled.")
case _ =>
}
}
val bitMaxCapacity = sqlContext.conf.fastHashAggregateRowMaxCapacityBit
val thisPlan = ctx.addReferenceObj("plan", this)
// Create a name for the iterator from the fast hash map.
val iterTermForFastHashMap = if (isFastHashMapEnabled) {
// Generates the fast hash map class and creates the fash hash map term.
val fastHashMapClassName = ctx.freshName("FastHashMap")
if (isVectorizedHashMapEnabled) {
val generatedMap = new VectorizedHashMapGenerator(ctx, aggregateExpressions,
fastHashMapClassName, groupingKeySchema, bufferSchema, bitMaxCapacity).generate()
ctx.addInnerClass(generatedMap)
// Inline mutable state since not many aggregation operations in a task
fastHashMapTerm = ctx.addMutableState(fastHashMapClassName, "vectorizedHastHashMap",
v => s"$v = new $fastHashMapClassName();", forceInline = true)
ctx.addMutableState(s"java.util.Iterator<InternalRow>", "vectorizedFastHashMapIter",
forceInline = true)
} else {
val generatedMap = new RowBasedHashMapGenerator(ctx, aggregateExpressions,
fastHashMapClassName, groupingKeySchema, bufferSchema, bitMaxCapacity).generate()
ctx.addInnerClass(generatedMap)
// Inline mutable state since not many aggregation operations in a task
fastHashMapTerm = ctx.addMutableState(fastHashMapClassName, "fastHashMap",
v => s"$v = new $fastHashMapClassName(" +
s"$thisPlan.getTaskMemoryManager(), $thisPlan.getEmptyAggregationBuffer());",
forceInline = true)
ctx.addMutableState(
"org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow>",
"fastHashMapIter", forceInline = true)
}
}
// Create a name for the iterator from the regular hash map.
// Inline mutable state since not many aggregation operations in a task
val iterTerm = ctx.addMutableState(classOf[KVIterator[UnsafeRow, UnsafeRow]].getName,
"mapIter", forceInline = true)
// create hashMap
val hashMapClassName = classOf[UnsafeFixedWidthAggregationMap].getName
hashMapTerm = ctx.addMutableState(hashMapClassName, "hashMap",
v => s"$v = $thisPlan.createHashMap();", forceInline = true)
sorterTerm = ctx.addMutableState(classOf[UnsafeKVExternalSorter].getName, "sorter",
forceInline = true)
val doAgg = ctx.freshName("doAggregateWithKeys")
val peakMemory = metricTerm(ctx, "peakMemory")
val spillSize = metricTerm(ctx, "spillSize")
val avgHashProbe = metricTerm(ctx, "avgHashProbe")
val finishRegularHashMap = s"$iterTerm = $thisPlan.finishAggregate(" +
s"$hashMapTerm, $sorterTerm, $peakMemory, $spillSize, $avgHashProbe);"
val finishHashMap = if (isFastHashMapEnabled) {
s"""
|$iterTermForFastHashMap = $fastHashMapTerm.rowIterator();
|$finishRegularHashMap
""".stripMargin
} else {
finishRegularHashMap
}
val doAggFuncName = ctx.addNewFunction(doAgg,
s"""
|private void $doAgg() throws java.io.IOException {
| ${child.asInstanceOf[CodegenSupport].produce(ctx, this)}
| $finishHashMap
|}
""".stripMargin)
// generate code for output
val keyTerm = ctx.freshName("aggKey")
val bufferTerm = ctx.freshName("aggBuffer")
val outputFunc = generateResultFunction(ctx)
def outputFromFastHashMap: String = {
if (isFastHashMapEnabled) {
if (isVectorizedHashMapEnabled) {
outputFromVectorizedMap
} else {
outputFromRowBasedMap
}
} else ""
}
def outputFromRowBasedMap: String = {
s"""
|while ($iterTermForFastHashMap.next()) {
| UnsafeRow $keyTerm = (UnsafeRow) $iterTermForFastHashMap.getKey();
| UnsafeRow $bufferTerm = (UnsafeRow) $iterTermForFastHashMap.getValue();
| $outputFunc($keyTerm, $bufferTerm);
|
| if (shouldStop()) return;
|}
|$fastHashMapTerm.close();
""".stripMargin
}
// Iterate over the aggregate rows and convert them from InternalRow to UnsafeRow
def outputFromVectorizedMap: String = {
val row = ctx.freshName("fastHashMapRow")
ctx.currentVars = null
ctx.INPUT_ROW = row
val generateKeyRow = GenerateUnsafeProjection.createCode(ctx,
groupingKeySchema.toAttributes.zipWithIndex
.map { case (attr, i) => BoundReference(i, attr.dataType, attr.nullable) }
)
val generateBufferRow = GenerateUnsafeProjection.createCode(ctx,
bufferSchema.toAttributes.zipWithIndex.map { case (attr, i) =>
BoundReference(groupingKeySchema.length + i, attr.dataType, attr.nullable)
})
s"""
|while ($iterTermForFastHashMap.hasNext()) {
| InternalRow $row = (InternalRow) $iterTermForFastHashMap.next();
| ${generateKeyRow.code}
| ${generateBufferRow.code}
| $outputFunc(${generateKeyRow.value}, ${generateBufferRow.value});
|
| if (shouldStop()) return;
|}
|
|$fastHashMapTerm.close();
""".stripMargin
}
def outputFromRegularHashMap: String = {
s"""
|while ($limitNotReachedCond $iterTerm.next()) {
| UnsafeRow $keyTerm = (UnsafeRow) $iterTerm.getKey();
| UnsafeRow $bufferTerm = (UnsafeRow) $iterTerm.getValue();
| $outputFunc($keyTerm, $bufferTerm);
| if (shouldStop()) return;
|}
|$iterTerm.close();
|if ($sorterTerm == null) {
| $hashMapTerm.free();
|}
""".stripMargin
}
val aggTime = metricTerm(ctx, "aggTime")
val beforeAgg = ctx.freshName("beforeAgg")
s"""
if (!$initAgg) {
$initAgg = true;
long $beforeAgg = System.nanoTime();
$doAggFuncName();
$aggTime.add((System.nanoTime() - $beforeAgg) / 1000000);
}
// output the result
$outputFromFastHashMap
$outputFromRegularHashMap
"""
}
private def doConsumeWithKeys(ctx: CodegenContext, input: Seq[ExprCode]): String = {
// create grouping key
val unsafeRowKeyCode = GenerateUnsafeProjection.createCode(
ctx, groupingExpressions.map(e => BindReferences.bindReference[Expression](e, child.output)))
val fastRowKeys = ctx.generateExpressions(
groupingExpressions.map(e => BindReferences.bindReference[Expression](e, child.output)))
val unsafeRowKeys = unsafeRowKeyCode.value
val unsafeRowBuffer = ctx.freshName("unsafeRowAggBuffer")
val fastRowBuffer = ctx.freshName("fastAggBuffer")
// only have DeclarativeAggregate
val updateExpr = aggregateExpressions.flatMap { e =>
e.mode match {
case Partial | Complete =>
e.aggregateFunction.asInstanceOf[DeclarativeAggregate].updateExpressions
case PartialMerge | Final =>
e.aggregateFunction.asInstanceOf[DeclarativeAggregate].mergeExpressions
}
}
// generate hash code for key
// SPARK-24076: HashAggregate uses the same hash algorithm on the same expressions
// as ShuffleExchange, it may lead to bad hash conflict when shuffle.partitions=8192*n,
// pick a different seed to avoid this conflict
val hashExpr = Murmur3Hash(groupingExpressions, 48)
val hashEval = BindReferences.bindReference(hashExpr, child.output).genCode(ctx)
val (checkFallbackForGeneratedHashMap, checkFallbackForBytesToBytesMap, resetCounter,
incCounter) = if (testFallbackStartsAt.isDefined) {
val countTerm = ctx.addMutableState(CodeGenerator.JAVA_INT, "fallbackCounter")
(s"$countTerm < ${testFallbackStartsAt.get._1}",
s"$countTerm < ${testFallbackStartsAt.get._2}", s"$countTerm = 0;", s"$countTerm += 1;")
} else {
("true", "true", "", "")
}
val findOrInsertRegularHashMap: String =
s"""
|// generate grouping key
|${unsafeRowKeyCode.code}
|${hashEval.code}
|if ($checkFallbackForBytesToBytesMap) {
| // try to get the buffer from hash map
| $unsafeRowBuffer =
| $hashMapTerm.getAggregationBufferFromUnsafeRow($unsafeRowKeys, ${hashEval.value});
|}
|// Can't allocate buffer from the hash map. Spill the map and fallback to sort-based
|// aggregation after processing all input rows.
|if ($unsafeRowBuffer == null) {
| if ($sorterTerm == null) {
| $sorterTerm = $hashMapTerm.destructAndCreateExternalSorter();
| } else {
| $sorterTerm.merge($hashMapTerm.destructAndCreateExternalSorter());
| }
| $resetCounter
| // the hash map had be spilled, it should have enough memory now,
| // try to allocate buffer again.
| $unsafeRowBuffer = $hashMapTerm.getAggregationBufferFromUnsafeRow(
| $unsafeRowKeys, ${hashEval.value});
| if ($unsafeRowBuffer == null) {
| // failed to allocate the first page
| throw new OutOfMemoryError("No enough memory for aggregation");
| }
|}
""".stripMargin
val findOrInsertHashMap: String = {
if (isFastHashMapEnabled) {
// If fast hash map is on, we first generate code to probe and update the fast hash map.
// If the probe is successful the corresponding fast row buffer will hold the mutable row.
s"""
|if ($checkFallbackForGeneratedHashMap) {
| ${fastRowKeys.map(_.code).mkString("\\n")}
| if (${fastRowKeys.map("!" + _.isNull).mkString(" && ")}) {
| $fastRowBuffer = $fastHashMapTerm.findOrInsert(
| ${fastRowKeys.map(_.value).mkString(", ")});
| }
|}
|// Cannot find the key in fast hash map, try regular hash map.
|if ($fastRowBuffer == null) {
| $findOrInsertRegularHashMap
|}
""".stripMargin
} else {
findOrInsertRegularHashMap
}
}
val inputAttr = aggregateBufferAttributes ++ child.output
// Here we set `currentVars(0)` to `currentVars(numBufferSlots)` to null, so that when
// generating code for buffer columns, we use `INPUT_ROW`(will be the buffer row), while
// generating input columns, we use `currentVars`.
ctx.currentVars = new Array[ExprCode](aggregateBufferAttributes.length) ++ input
val updateRowInRegularHashMap: String = {
ctx.INPUT_ROW = unsafeRowBuffer
val boundUpdateExpr = updateExpr.map(BindReferences.bindReference(_, inputAttr))
val subExprs = ctx.subexpressionEliminationForWholeStageCodegen(boundUpdateExpr)
val effectiveCodes = subExprs.codes.mkString("\\n")
val unsafeRowBufferEvals = ctx.withSubExprEliminationExprs(subExprs.states) {
boundUpdateExpr.map(_.genCode(ctx))
}
val updateUnsafeRowBuffer = unsafeRowBufferEvals.zipWithIndex.map { case (ev, i) =>
val dt = updateExpr(i).dataType
CodeGenerator.updateColumn(unsafeRowBuffer, dt, i, ev, updateExpr(i).nullable)
}
s"""
|// common sub-expressions
|$effectiveCodes
|// evaluate aggregate function
|${evaluateVariables(unsafeRowBufferEvals)}
|// update unsafe row buffer
|${updateUnsafeRowBuffer.mkString("\\n").trim}
""".stripMargin
}
val updateRowInHashMap: String = {
if (isFastHashMapEnabled) {
if (isVectorizedHashMapEnabled) {
ctx.INPUT_ROW = fastRowBuffer
val boundUpdateExpr = updateExpr.map(BindReferences.bindReference(_, inputAttr))
val subExprs = ctx.subexpressionEliminationForWholeStageCodegen(boundUpdateExpr)
val effectiveCodes = subExprs.codes.mkString("\\n")
val fastRowEvals = ctx.withSubExprEliminationExprs(subExprs.states) {
boundUpdateExpr.map(_.genCode(ctx))
}
val updateFastRow = fastRowEvals.zipWithIndex.map { case (ev, i) =>
val dt = updateExpr(i).dataType
CodeGenerator.updateColumn(
fastRowBuffer, dt, i, ev, updateExpr(i).nullable, isVectorized = true)
}
// If vectorized fast hash map is on, we first generate code to update row
// in vectorized fast hash map, if the previous loop up hit vectorized fast hash map.
// Otherwise, update row in regular hash map.
s"""
|if ($fastRowBuffer != null) {
| // common sub-expressions
| $effectiveCodes
| // evaluate aggregate function
| ${evaluateVariables(fastRowEvals)}
| // update fast row
| ${updateFastRow.mkString("\\n").trim}
|} else {
| $updateRowInRegularHashMap
|}
""".stripMargin
} else {
// If row-based hash map is on and the previous loop up hit fast hash map,
// we reuse regular hash buffer to update row of fast hash map.
// Otherwise, update row in regular hash map.
s"""
|// Updates the proper row buffer
|if ($fastRowBuffer != null) {
| $unsafeRowBuffer = $fastRowBuffer;
|}
|$updateRowInRegularHashMap
""".stripMargin
}
} else {
updateRowInRegularHashMap
}
}
val declareRowBuffer: String = if (isFastHashMapEnabled) {
val fastRowType = if (isVectorizedHashMapEnabled) {
classOf[MutableColumnarRow].getName
} else {
"UnsafeRow"
}
s"""
|UnsafeRow $unsafeRowBuffer = null;
|$fastRowType $fastRowBuffer = null;
""".stripMargin
} else {
s"UnsafeRow $unsafeRowBuffer = null;"
}
// We try to do hash map based in-memory aggregation first. If there is not enough memory (the
// hash map will return null for new key), we spill the hash map to disk to free memory, then
// continue to do in-memory aggregation and spilling until all the rows had been processed.
// Finally, sort the spilled aggregate buffers by key, and merge them together for same key.
s"""
$declareRowBuffer
$findOrInsertHashMap
$incCounter
$updateRowInHashMap
"""
}
override def verboseString: String = toString(verbose = true)
override def simpleString: String = toString(verbose = false)
private def toString(verbose: Boolean): String = {
val allAggregateExpressions = aggregateExpressions
testFallbackStartsAt match {
case None =>
val keyString = Utils.truncatedString(groupingExpressions, "[", ", ", "]")
val functionString = Utils.truncatedString(allAggregateExpressions, "[", ", ", "]")
val outputString = Utils.truncatedString(output, "[", ", ", "]")
if (verbose) {
s"HashAggregate(keys=$keyString, functions=$functionString, output=$outputString)"
} else {
s"HashAggregate(keys=$keyString, functions=$functionString)"
}
case Some(fallbackStartsAt) =>
s"HashAggregateWithControlledFallback $groupingExpressions " +
s"$allAggregateExpressions $resultExpressions fallbackStartsAt=$fallbackStartsAt"
}
}
}
object HashAggregateExec {
def supportsAggregate(aggregateBufferAttributes: Seq[Attribute]): Boolean = {
val aggregationBufferSchema = StructType.fromAttributes(aggregateBufferAttributes)
UnsafeFixedWidthAggregationMap.supportsAggregationBufferSchema(aggregationBufferSchema)
}
}
| ahnqirage/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/HashAggregateExec.scala | Scala | apache-2.0 | 37,793 |
package controllers.api.singer
import fixtures.{DBSpecBase, SingerSession, SpecBase}
import models._
import play.api.db.slick._
import play.api.libs.json.{JsSuccess, JsValue, Json}
import play.api.test.FakeRequest
import repositories.SessionSongRepositoryMessages.RequestSongRequest
import repositories.{SingerRepositoryComponent, SessionRepositoryComponent, SessionSongRepositoryComponent}
import org.mockito.Mockito._
import play.api.db.slick.{Session => DBSession}
import play.api.test.Helpers._
import repositories.SessionSongRepositoryMessages.SessionSongComponentFormatter._
import SessionSongFormatter._
import org.mockito.Matchers
import scalaz.Success
class SessionSongControllerSpec extends DBSpecBase with SingerSession {
class TestController() extends SessionSongController with SessionSongRepositoryComponent
with SessionRepositoryComponent with SingerRepositoryComponent {
override val sessionSongRepository = mock[SessionSongRepository]
override val singerRepository = mock[SingerRepository]
}
"SessionSongController#requestSong" when {
abstract class WithController {
implicit def dbSession: DBSession
val controller = new TestController
val req = RequestSongRequest("title", "artist")
val reqJson = Json.toJson(req)
val singerId = SingerId(1)
val singer = Singer(Option(singerId), sessionId = SessionId(1), name = "Bob")
when(controller.singerRepository.findById(Matchers.eq(singerId))(Matchers.any[DBSession])).thenReturn(Some(singer))
def getResult(b: JsValue) = controller.requestSong().apply(FakeRequest().withBody(b).withSession(withSingerInSession(singer)))
}
"given valid data" should {
abstract class SuccessfulRequest extends WithController {
val song = SessionSong(id = Some(SessionSongId(1)), sessionId = singer.sessionId, artist = req.artist, title = req.title, singerId = singer.id.get)
when(controller.sessionSongRepository.requestSong(Matchers.eq(req), Matchers.eq(singer))(Matchers.any[DBSession])).thenReturn(Success(song))
}
"return 200" in { implicit s =>
new SuccessfulRequest {
override implicit def dbSession = s
status(getResult(reqJson)) shouldBe CREATED
}
}
"return the new song" in { implicit s =>
new SuccessfulRequest {
override implicit def dbSession = s
Json.fromJson[SessionSong](contentAsJson(getResult(reqJson))) match {
case JsSuccess(`song`, _) => // pass
case _ => fail
}
}
}
}
}
}
| nagirrab/Karaoke | test/controllers/api/singer/SessionSongControllerSpec.scala | Scala | mit | 2,585 |
package com.dominikgruber.fpinscala.chapter06
trait RNG {
def nextInt: (Int, RNG)
}
case class Simple(seed: Long) extends RNG {
def nextInt: (Int, RNG) = {
val newSeed = (seed * 0x5DEECE66DL + 0xBL) & 0xFFFFFFFFFFFFL
val nextRNG = Simple(newSeed)
val n = (newSeed >>> 16).toInt
(n, nextRNG)
}
} | TheDom/functional-programming-in-scala | src/main/scala/com/dominikgruber/fpinscala/chapter06/RNG.scala | Scala | mit | 318 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.execution.internal.collection
import scala.collection.immutable.LongMap
/**
* A Map which tracks the insertion order of entries, so that entries may be
* traversed in the order they were inserted. Alternative to `ListMap` that
* has better asymptotic performance at the cost of more memory usage.
*/
private[monix] class LinkedMap[K, +V](
val entries: Map[K, (V, Long)],
private[this] val insertionOrder: LongMap[K],
private[this] val nextId: Long) {
/** Returns `true` if this map is empty, or `false` otherwise. */
def isEmpty: Boolean =
entries.isEmpty
/** Returns a new map with the supplied key/value added. */
def updated[V2 >: V](k: K, v: V2): LinkedMap[K, V2] = {
val insertionOrderOldRemoved = entries.get(k).fold(insertionOrder) { case (_, id) => insertionOrder - id }
new LinkedMap(entries.updated(k, (v, nextId)), insertionOrderOldRemoved.updated(nextId, k), nextId + 1)
}
/** Removes the element at the specified key. */
def -(k: K): LinkedMap[K, V] =
new LinkedMap(
entries - k,
entries
.get(k)
.map { case (_, id) => insertionOrder - id }
.getOrElse(insertionOrder),
nextId)
/** The keys in this map, in the order they were added. */
def keys: Iterable[K] = insertionOrder.values
/** The values in this map, in the order they were added. */
def values: Iterable[V] = keys.flatMap(k => entries.get(k).toList.map(_._1))
/** Pulls the first value from this `LinkedMap`, in FIFO order. */
def dequeue: (V, LinkedMap[K, V]) = {
val k = insertionOrder.head._2
(entries(k)._1, this - k)
}
override def toString: String =
keys.zip(values).mkString("LinkedMap(", ", ", ")")
}
private[monix] object LinkedMap {
def empty[K, V]: LinkedMap[K, V] =
emptyRef.asInstanceOf[LinkedMap[K, V]]
private val emptyRef =
new LinkedMap[Nothing, Nothing](Map.empty, LongMap.empty, 0)
}
| alexandru/monifu | monix-execution/shared/src/main/scala/monix/execution/internal/collection/LinkedMap.scala | Scala | apache-2.0 | 2,603 |
package com.debasish.nlp.sentenceDetectors
import java.io.StringReader
import edu.stanford.nlp.ling.Sentence
import edu.stanford.nlp.process.DocumentPreprocessor
import scala.collection.mutable.ListBuffer
/**
* Created by Debasish Kaushik on 5/21/16.
*/
private[sentenceDetectors] class StanfordSentenceDetection extends Sentences {
def detect(string: String): List[String] = {
val dp = new DocumentPreprocessor(new StringReader(string)).iterator
val sentenceList = ListBuffer[String]()
while(dp.hasNext) {
val sentenceString = Sentence.listToString(dp.next)
sentenceList += sentenceString.toString
}
sentenceList.toList
}
} | DEK11/MoreNLP | src/main/scala/com/debasish/nlp/sentenceDetectors/StanfordSentenceDetection.scala | Scala | apache-2.0 | 671 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.ui
import java.util.Date
import java.util.concurrent.ConcurrentHashMap
import java.util.function.Function
import scala.collection.JavaConverters._
import org.apache.spark.{JobExecutionStatus, SparkConf}
import org.apache.spark.internal.Logging
import org.apache.spark.scheduler._
import org.apache.spark.sql.execution.SQLExecution
import org.apache.spark.sql.execution.metric._
import org.apache.spark.sql.internal.StaticSQLConf._
import org.apache.spark.status.{ElementTrackingStore, KVUtils, LiveEntity}
import org.apache.spark.status.config._
class SQLAppStatusListener(
conf: SparkConf,
kvstore: ElementTrackingStore,
live: Boolean) extends SparkListener with Logging {
// How often to flush intermediate state of a live execution to the store. When replaying logs,
// never flush (only do the very last write).
private val liveUpdatePeriodNs = if (live) conf.get(LIVE_ENTITY_UPDATE_PERIOD) else -1L
// Live tracked data is needed by the SQL status store to calculate metrics for in-flight
// executions; that means arbitrary threads may be querying these maps, so they need to be
// thread-safe.
private val liveExecutions = new ConcurrentHashMap[Long, LiveExecutionData]()
private val stageMetrics = new ConcurrentHashMap[Int, LiveStageMetrics]()
// Returns true if this listener has no live data. Exposed for tests only.
private[sql] def noLiveData(): Boolean = {
liveExecutions.isEmpty && stageMetrics.isEmpty
}
kvstore.addTrigger(classOf[SQLExecutionUIData], conf.get(UI_RETAINED_EXECUTIONS)) { count =>
cleanupExecutions(count)
}
kvstore.onFlush {
if (!live) {
val now = System.nanoTime()
liveExecutions.values.asScala.foreach { exec =>
// This saves the partial aggregated metrics to the store; this works currently because
// when the SHS sees an updated event log, all old data for the application is thrown
// away.
exec.metricsValues = aggregateMetrics(exec)
exec.write(kvstore, now)
}
}
}
override def onJobStart(event: SparkListenerJobStart): Unit = {
val executionIdString = event.properties.getProperty(SQLExecution.EXECUTION_ID_KEY)
if (executionIdString == null) {
// This is not a job created by SQL
return
}
val executionId = executionIdString.toLong
val jobId = event.jobId
val exec = getOrCreateExecution(executionId)
// Record the accumulator IDs for the stages of this job, so that the code that keeps
// track of the metrics knows which accumulators to look at.
val accumIds = exec.metrics.map(_.accumulatorId).sorted.toList
event.stageIds.foreach { id =>
stageMetrics.put(id, new LiveStageMetrics(id, 0, accumIds.toArray, new ConcurrentHashMap()))
}
exec.jobs = exec.jobs + (jobId -> JobExecutionStatus.RUNNING)
exec.stages ++= event.stageIds.toSet
update(exec)
}
override def onStageSubmitted(event: SparkListenerStageSubmitted): Unit = {
if (!isSQLStage(event.stageInfo.stageId)) {
return
}
// Reset the metrics tracking object for the new attempt.
Option(stageMetrics.get(event.stageInfo.stageId)).foreach { metrics =>
metrics.taskMetrics.clear()
metrics.attemptId = event.stageInfo.attemptNumber
}
}
override def onJobEnd(event: SparkListenerJobEnd): Unit = {
liveExecutions.values().asScala.foreach { exec =>
if (exec.jobs.contains(event.jobId)) {
val result = event.jobResult match {
case JobSucceeded => JobExecutionStatus.SUCCEEDED
case _ => JobExecutionStatus.FAILED
}
exec.jobs = exec.jobs + (event.jobId -> result)
exec.endEvents += 1
update(exec)
}
}
}
override def onExecutorMetricsUpdate(event: SparkListenerExecutorMetricsUpdate): Unit = {
event.accumUpdates.foreach { case (taskId, stageId, attemptId, accumUpdates) =>
updateStageMetrics(stageId, attemptId, taskId, accumUpdates, false)
}
}
override def onTaskEnd(event: SparkListenerTaskEnd): Unit = {
if (!isSQLStage(event.stageId)) {
return
}
val info = event.taskInfo
// SPARK-20342. If processing events from a live application, use the task metrics info to
// work around a race in the DAGScheduler. The metrics info does not contain accumulator info
// when reading event logs in the SHS, so we have to rely on the accumulator in that case.
val accums = if (live && event.taskMetrics != null) {
event.taskMetrics.externalAccums.flatMap { a =>
// This call may fail if the accumulator is gc'ed, so account for that.
try {
Some(a.toInfo(Some(a.value), None))
} catch {
case _: IllegalAccessError => None
}
}
} else {
info.accumulables
}
updateStageMetrics(event.stageId, event.stageAttemptId, info.taskId, accums,
info.successful)
}
def liveExecutionMetrics(executionId: Long): Option[Map[Long, String]] = {
Option(liveExecutions.get(executionId)).map { exec =>
if (exec.metricsValues != null) {
exec.metricsValues
} else {
aggregateMetrics(exec)
}
}
}
private def aggregateMetrics(exec: LiveExecutionData): Map[Long, String] = {
val metricIds = exec.metrics.map(_.accumulatorId).sorted
val metricTypes = exec.metrics.map { m => (m.accumulatorId, m.metricType) }.toMap
val metrics = exec.stages.toSeq
.flatMap { stageId => Option(stageMetrics.get(stageId)) }
.flatMap(_.taskMetrics.values().asScala)
.flatMap { metrics => metrics.ids.zip(metrics.values) }
val aggregatedMetrics = (metrics ++ exec.driverAccumUpdates.toSeq)
.filter { case (id, _) => metricIds.contains(id) }
.groupBy(_._1)
.map { case (id, values) =>
id -> SQLMetrics.stringValue(metricTypes(id), values.map(_._2).toSeq)
}
// Check the execution again for whether the aggregated metrics data has been calculated.
// This can happen if the UI is requesting this data, and the onExecutionEnd handler is
// running at the same time. The metrics calculated for the UI can be innacurate in that
// case, since the onExecutionEnd handler will clean up tracked stage metrics.
if (exec.metricsValues != null) {
exec.metricsValues
} else {
aggregatedMetrics
}
}
private def updateStageMetrics(
stageId: Int,
attemptId: Int,
taskId: Long,
accumUpdates: Seq[AccumulableInfo],
succeeded: Boolean): Unit = {
Option(stageMetrics.get(stageId)).foreach { metrics =>
if (metrics.attemptId != attemptId || metrics.accumulatorIds.isEmpty) {
return
}
val oldTaskMetrics = metrics.taskMetrics.get(taskId)
if (oldTaskMetrics != null && oldTaskMetrics.succeeded) {
return
}
val updates = accumUpdates
.filter { acc => acc.update.isDefined && metrics.accumulatorIds.contains(acc.id) }
.sortBy(_.id)
if (updates.isEmpty) {
return
}
val ids = new Array[Long](updates.size)
val values = new Array[Long](updates.size)
updates.zipWithIndex.foreach { case (acc, idx) =>
ids(idx) = acc.id
// In a live application, accumulators have Long values, but when reading from event
// logs, they have String values. For now, assume all accumulators are Long and covert
// accordingly.
values(idx) = acc.update.get match {
case s: String => s.toLong
case l: Long => l
case o => throw new IllegalArgumentException(s"Unexpected: $o")
}
}
// TODO: storing metrics by task ID can cause metrics for the same task index to be
// counted multiple times, for example due to speculation or re-attempts.
metrics.taskMetrics.put(taskId, new LiveTaskMetrics(ids, values, succeeded))
}
}
private def onExecutionStart(event: SparkListenerSQLExecutionStart): Unit = {
val SparkListenerSQLExecutionStart(executionId, description, details,
physicalPlanDescription, sparkPlanInfo, time) = event
def toStoredNodes(nodes: Seq[SparkPlanGraphNode]): Seq[SparkPlanGraphNodeWrapper] = {
nodes.map {
case cluster: SparkPlanGraphCluster =>
val storedCluster = new SparkPlanGraphClusterWrapper(
cluster.id,
cluster.name,
cluster.desc,
toStoredNodes(cluster.nodes),
cluster.metrics)
new SparkPlanGraphNodeWrapper(null, storedCluster)
case node =>
new SparkPlanGraphNodeWrapper(node, null)
}
}
val planGraph = SparkPlanGraph(sparkPlanInfo)
val sqlPlanMetrics = planGraph.allNodes.flatMap { node =>
node.metrics.map { metric => (metric.accumulatorId, metric) }
}.toMap.values.toList
val graphToStore = new SparkPlanGraphWrapper(
executionId,
toStoredNodes(planGraph.nodes),
planGraph.edges)
kvstore.write(graphToStore)
val exec = getOrCreateExecution(executionId)
exec.description = description
exec.details = details
exec.physicalPlanDescription = physicalPlanDescription
exec.metrics = sqlPlanMetrics
exec.submissionTime = time
update(exec)
}
private def onExecutionEnd(event: SparkListenerSQLExecutionEnd): Unit = {
val SparkListenerSQLExecutionEnd(executionId, time) = event
Option(liveExecutions.get(executionId)).foreach { exec =>
exec.metricsValues = aggregateMetrics(exec)
exec.completionTime = Some(new Date(time))
exec.endEvents += 1
update(exec)
// Remove stale LiveStageMetrics objects for stages that are not active anymore.
val activeStages = liveExecutions.values().asScala.flatMap { other =>
if (other != exec) other.stages else Nil
}.toSet
stageMetrics.keySet().asScala
.filter(!activeStages.contains(_))
.foreach(stageMetrics.remove)
}
}
private def onDriverAccumUpdates(event: SparkListenerDriverAccumUpdates): Unit = {
val SparkListenerDriverAccumUpdates(executionId, accumUpdates) = event
Option(liveExecutions.get(executionId)).foreach { exec =>
exec.driverAccumUpdates = accumUpdates.toMap
update(exec)
}
}
override def onOtherEvent(event: SparkListenerEvent): Unit = event match {
case e: SparkListenerSQLExecutionStart => onExecutionStart(e)
case e: SparkListenerSQLExecutionEnd => onExecutionEnd(e)
case e: SparkListenerDriverAccumUpdates => onDriverAccumUpdates(e)
case _ => // Ignore
}
private def getOrCreateExecution(executionId: Long): LiveExecutionData = {
liveExecutions.computeIfAbsent(executionId,
new Function[Long, LiveExecutionData]() {
override def apply(key: Long): LiveExecutionData = new LiveExecutionData(executionId)
})
}
private def update(exec: LiveExecutionData): Unit = {
val now = System.nanoTime()
if (exec.endEvents >= exec.jobs.size + 1) {
exec.write(kvstore, now)
liveExecutions.remove(exec.executionId)
} else if (liveUpdatePeriodNs >= 0) {
if (now - exec.lastWriteTime > liveUpdatePeriodNs) {
exec.write(kvstore, now)
}
}
}
private def isSQLStage(stageId: Int): Boolean = {
liveExecutions.values().asScala.exists { exec =>
exec.stages.contains(stageId)
}
}
private def cleanupExecutions(count: Long): Unit = {
val countToDelete = count - conf.get(UI_RETAINED_EXECUTIONS)
if (countToDelete <= 0) {
return
}
val view = kvstore.view(classOf[SQLExecutionUIData]).index("completionTime").first(0L)
val toDelete = KVUtils.viewToSeq(view, countToDelete.toInt)(_.completionTime.isDefined)
toDelete.foreach { e => kvstore.delete(e.getClass(), e.executionId) }
}
}
private class LiveExecutionData(val executionId: Long) extends LiveEntity {
var description: String = null
var details: String = null
var physicalPlanDescription: String = null
var metrics = Seq[SQLPlanMetric]()
var submissionTime = -1L
var completionTime: Option[Date] = None
var jobs = Map[Int, JobExecutionStatus]()
var stages = Set[Int]()
var driverAccumUpdates = Map[Long, Long]()
@volatile var metricsValues: Map[Long, String] = null
// Just in case job end and execution end arrive out of order, keep track of how many
// end events arrived so that the listener can stop tracking the execution.
var endEvents = 0
override protected def doUpdate(): Any = {
new SQLExecutionUIData(
executionId,
description,
details,
physicalPlanDescription,
metrics,
submissionTime,
completionTime,
jobs,
stages,
metricsValues)
}
}
private class LiveStageMetrics(
val stageId: Int,
var attemptId: Int,
val accumulatorIds: Array[Long],
val taskMetrics: ConcurrentHashMap[Long, LiveTaskMetrics])
private class LiveTaskMetrics(
val ids: Array[Long],
val values: Array[Long],
val succeeded: Boolean)
| esi-mineset/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListener.scala | Scala | apache-2.0 | 13,857 |
package springtastrophe
import java.io.File
sealed trait FileStructure {
/**
* A recursive search over a directory and it's sub directories to find files
*
* rewrite this - I think you can do this with a fold - which is cool since you can paralise a monoid
*/
def whatsInADir(f: File): List[File] = {
val fs = f.listFiles.toList
fs ++ fs.filter(_.isDirectory).flatMap(whatsInADir)
}
}
case object Empty extends FileStructure
case class Directory(d: String) extends FileStructure {
lazy val contents: List[File] = whatsInADir(new File(d))
lazy val isEmpty: Boolean = whatsInADir(new File(d)).isEmpty
lazy val javaFiles: List[File] =
contents.foldRight(List[File]())((a,b) => if (FileStructure.isJavaFile(a)) a :: b else b)
/*
lazy val javaFileNames: List[_] = {
val fs = javaFiles map (f => f.getName)
val content = fs map (f => fromInputStream(getClass.getResourceAsStream(f)))
fs.map(i => i.split("\\\\.").head)
}*/
}
object FileStructure {
def fileType(f: File): FilesICareAbout = {
if (f.getName.endsWith(".java"))
JavaFile(f)
else
OtherFile(f)
}
def isJavaFile(f: File): Boolean = fileType(f) match {
case JavaFile(_) => true
case _ => false
}
} | hamishdickson/springtastrophe | src/main/scala/springtastrophe/FileStructure.scala | Scala | mit | 1,247 |
package com.theseventhsense.utils.persistence
import akka.stream.scaladsl.Source
import scala.concurrent.{ExecutionContext, Future}
import scala.language.implicitConversions
object StreamQueryResult {
implicit def convert[A, B](
a: StreamQueryResult[A]
)(implicit converter: (A) => B): StreamQueryResult[B] = {
val stream: Source[B, _] = a.stream.map { item =>
converter(item)
}
StreamQueryResult[B](a.totalCount, stream)
}
implicit def convertFuture[A, B](
a: Future[StreamQueryResult[A]]
)(implicit
converter: (A) => B,
ec: ExecutionContext): Future[StreamQueryResult[B]] = {
a.map { source: StreamQueryResult[A] =>
val result: StreamQueryResult[B] = convert(source)
result
}
}
}
case class StreamQueryResult[T](
totalCount: Long,
stream: Source[T, _]
) extends QueryResult {
def map[U](converter: (T) => U): StreamQueryResult[U] = {
val s = stream.map(converter)
StreamQueryResult(totalCount, s)
}
}
| easel/utils-persistence | src/main/scala/com/theseventhsense/utils/persistence/StreamQueryResult.scala | Scala | mit | 993 |
package com.twitter.finagle.buoyant
import com.twitter.finagle._
import com.twitter.finagle.client.{StackClient, StdStackClient, Transporter}
import com.twitter.finagle.dispatch.{SerialClientDispatcher, SerialServerDispatcher}
import com.twitter.finagle.netty4.{Netty4Listener, Netty4Transporter}
import com.twitter.finagle.server.{StackServer, StdStackServer}
import com.twitter.finagle.transport.{Transport, TransportContext}
import com.twitter.io.Charsets
import com.twitter.util.Future
import java.net.SocketAddress
import java.nio.charset.StandardCharsets.UTF_8
import com.twitter.finagle.stats.NullStatsReceiver
import io.netty.channel._
import io.netty.handler.codec.{DelimiterBasedFrameDecoder, Delimiters}
import io.netty.handler.codec.string.{StringDecoder, StringEncoder}
/**
* Lovingly stolen from finagle-core's tests
*
* Copyright 2015 Twitter Inc and all that jazz.
*/
object Echo extends Client[String, String] with Server[String, String] {
def newClient(dest: Name, label: String) =
client.newClient(dest, label)
def newService(dest: Name, label: String) =
client.newService(dest, label)
def serve(addr: SocketAddress, service: ServiceFactory[String, String]) =
server.serve(addr, service)
/*
* Finagle Client
*/
private class DelimEncoder(delim: Char) extends ChannelOutboundHandlerAdapter {
override def write(ctx: ChannelHandlerContext, msg: Any, p: ChannelPromise): Unit = {
val delimMsg = msg match {
case m: String => m + delim
case m => m
}
ctx.write(delimMsg, p)
()
}
}
private object StringClientPipeline extends (ChannelPipeline => Unit) {
def apply(pipeline: ChannelPipeline): Unit = {
pipeline.addLast("stringEncode", new StringEncoder(UTF_8))
pipeline.addLast("stringDecode", new StringDecoder(UTF_8))
pipeline.addLast("line", new DelimEncoder('\\n'))
()
}
}
case class RichClient(underlying: Service[String, String]) {
def ping(): Future[String] = underlying("ping")
}
trait StringRichClient { self: com.twitter.finagle.Client[String, String] =>
def newRichClient(dest: Name, label: String): RichClient =
RichClient(newService(dest, label))
}
case class Client(
stack: Stack[ServiceFactory[String, String]] = StackClient.newStack,
params: Stack.Params = Stack.Params.empty
)
extends StdStackClient[String, String, Client]
with StringRichClient { self =>
protected def copy1(
stack: Stack[ServiceFactory[String, String]] = this.stack,
params: Stack.Params = this.params
): Client = copy(stack, params)
protected type In = String
protected type Out = String
protected type Context = TransportContext
protected def newTransporter(addr: SocketAddress): Transporter[String, String, TransportContext] =
Netty4Transporter.raw(StringClientPipeline, addr, params)
protected def newDispatcher(transport: Transport[super.In, super.Out] {
type Context <: self.Context
}) =
new SerialClientDispatcher(transport, NullStatsReceiver)
}
val client = Client()
/*
* Finagle Server
*/
object StringServerPipeline extends (ChannelPipeline => Unit) {
def apply(pipeline: ChannelPipeline): Unit = {
pipeline.addLast("line", new DelimiterBasedFrameDecoder(100, Delimiters.lineDelimiter: _*))
pipeline.addLast("stringDecoder", new StringDecoder(UTF_8))
pipeline.addLast("stringEncoder", new StringEncoder(UTF_8))
()
}
}
case class Server(
stack: Stack[ServiceFactory[String, String]] = StackServer.newStack,
params: Stack.Params = StackServer.defaultParams
) extends StdStackServer[String, String, Server] { self =>
protected def copy1(
stack: Stack[ServiceFactory[String, String]] = this.stack,
params: Stack.Params = this.params
) = copy(stack, params)
protected type In = String
protected type Out = String
protected type Context = TransportContext
protected def newListener() = Netty4Listener(StringServerPipeline, params)
protected def newDispatcher(transport: Transport[String, String] {
type Context <: self.Context
}, service: Service[String, String]) =
new SerialServerDispatcher(transport, service)
}
val server = Server()
}
| linkerd/linkerd | router/core/src/e2e/scala/com/twitter/finagle/buoyant/Echo.scala | Scala | apache-2.0 | 4,306 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.redis.data
import java.util.{Collections, Date}
import org.geotools.data.{DataStoreFinder, DataUtilities, Query, Transaction}
import org.geotools.filter.text.ecql.ECQL
import org.geotools.geometry.jts.ReferencedEnvelope
import org.junit.runner.RunWith
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.locationtech.geomesa.utils.collection.SelfClosingIterator
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes.Configs
import org.locationtech.geomesa.utils.geotools.{CRS_EPSG_4326, FeatureUtils, SimpleFeatureTypes}
import org.locationtech.geomesa.utils.io.WithClose
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class RedisDataStoreIntegrationTest extends Specification {
import scala.collection.JavaConverters._
sequential
val url = "redis://localhost:6379"
val sft = SimpleFeatureTypes.createImmutableType("test", "name:String:index=true,dtg:Date,*geom:Point:srid=4326")
val features = Seq.tabulate(10) { i =>
ScalaSimpleFeature.create(sft, i.toString, s"name$i", s"2019-01-03T0$i:00:00.000Z", s"POINT (-4$i 55)")
}
val filters = Seq(
"bbox(geom, -39, 54, -51, 56)",
"bbox(geom, -45, 54, -49, 56)",
"bbox(geom, -39, 54, -51, 56) AND dtg >= '2019-01-03T00:00:00.000Z' AND dtg < '2019-01-03T12:00:00.000Z'",
"bbox(geom, -45, 54, -49, 56) AND dtg >= '2019-01-03T00:00:00.000Z' AND dtg < '2019-01-03T12:00:00.000Z'",
"bbox(geom, -39, 54, -51, 56) AND dtg during 2019-01-03T04:30:00.000Z/2019-01-03T08:30:00.000Z",
s"name IN('${features.map(_.getAttribute("name")).mkString("', '")}')",
"name IN('name0', 'name2') AND dtg >= '2019-01-03T00:00:00.000Z' AND dtg < '2019-01-03T01:00:00.000Z'",
features.map(_.getID).mkString("IN('", "', '", "')")
).map(ECQL.toFilter)
val transforms = Seq(null, Array("dtg", "geom"), Array("name", "geom"))
val params = Map(
RedisDataStoreParams.RedisUrlParam.key -> url,
RedisDataStoreParams.RedisCatalogParam.key -> "gm-test",
RedisDataStoreParams.PipelineParam.key -> "false" // "true"
)
"RedisDataStore" should {
"read and write features" in {
skipped("Integration tests")
val ds = DataStoreFinder.getDataStore(params.asJava).asInstanceOf[RedisDataStore]
ds must not(beNull)
try {
ds.getSchema(sft.getTypeName) must beNull
ds.createSchema(sft)
ds.getSchema(sft.getTypeName) mustEqual sft
WithClose(ds.getFeatureWriterAppend(sft.getTypeName, Transaction.AUTO_COMMIT)) { writer =>
features.foreach(FeatureUtils.write(writer, _, useProvidedFid = true))
}
foreach(filters) { filter =>
val filtered = features.filter(filter.evaluate)
foreach(transforms) { transform =>
val query = new Query(sft.getTypeName, filter, transform)
val result = SelfClosingIterator(ds.getFeatureReader(query, Transaction.AUTO_COMMIT)).toList
val expected = if (transform == null) { filtered } else {
val tsft = DataUtilities.createSubType(sft, transform)
filtered.map(DataUtilities.reType(tsft, _)).map(ScalaSimpleFeature.copy)
}
result must containTheSameElementsAs(expected)
}
}
ds.stats.getCount(sft) must beSome(10L)
ds.stats.getBounds(sft) mustEqual new ReferencedEnvelope(-49, -40, 55, 55, CRS_EPSG_4326)
} finally {
ds.removeSchema(sft.getTypeName)
ds.dispose()
}
}
"expire features based on ingest time" in {
skipped("Integration tests")
RedisSystemProperties.AgeOffInterval.set("5 seconds")
val sft = SimpleFeatureTypes.immutable(this.sft, Collections.singletonMap(Configs.FeatureExpiration, "10 seconds"))
val ds = DataStoreFinder.getDataStore(params.asJava).asInstanceOf[RedisDataStore]
ds must not(beNull)
try {
ds.getSchema(sft.getTypeName) must beNull
ds.createSchema(sft)
ds.getSchema(sft.getTypeName) mustEqual sft
WithClose(ds.getFeatureWriterAppend(sft.getTypeName, Transaction.AUTO_COMMIT)) { writer =>
features.foreach(FeatureUtils.write(writer, _, useProvidedFid = true))
}
foreach(filters) { filter =>
val expected = features.filter(filter.evaluate)
val query = new Query(sft.getTypeName, filter)
val result = SelfClosingIterator(ds.getFeatureReader(query, Transaction.AUTO_COMMIT)).toList
result must containTheSameElementsAs(expected)
}
ds.stats.getCount(sft) must beSome(10L)
ds.stats.getBounds(sft) mustEqual new ReferencedEnvelope(-49, -40, 55, 55, CRS_EPSG_4326)
Thread.sleep(1000 * 20)
foreach(filters) { filter =>
val query = new Query(sft.getTypeName, filter)
val result = SelfClosingIterator(ds.getFeatureReader(query, Transaction.AUTO_COMMIT)).toList
result must beEmpty
}
ds.stats.getCount(sft) must beSome(0L)
} finally {
RedisSystemProperties.AgeOffInterval.clear()
ds.removeSchema(sft.getTypeName)
ds.dispose()
}
}
"expire features based on attribute time" in {
skipped("Integration tests")
RedisSystemProperties.AgeOffInterval.set("5 seconds")
// age off the first feature, since they are one hour apart
val time = System.currentTimeMillis() + 10000L - features.head.getAttribute("dtg").asInstanceOf[Date].getTime
val sft = SimpleFeatureTypes.immutable(this.sft, Collections.singletonMap(Configs.FeatureExpiration, s"dtg($time ms)"))
val ds = DataStoreFinder.getDataStore(params.asJava).asInstanceOf[RedisDataStore]
ds must not(beNull)
try {
ds.getSchema(sft.getTypeName) must beNull
ds.createSchema(sft)
ds.getSchema(sft.getTypeName) mustEqual sft
WithClose(ds.getFeatureWriterAppend(sft.getTypeName, Transaction.AUTO_COMMIT)) { writer =>
features.foreach(FeatureUtils.write(writer, _, useProvidedFid = true))
}
foreach(filters) { filter =>
val expected = features.filter(filter.evaluate)
val query = new Query(sft.getTypeName, filter)
val result = SelfClosingIterator(ds.getFeatureReader(query, Transaction.AUTO_COMMIT)).toList
result must containTheSameElementsAs(expected)
}
ds.stats.getCount(sft) must beSome(10L)
ds.stats.getBounds(sft) mustEqual new ReferencedEnvelope(-49, -40, 55, 55, CRS_EPSG_4326)
Thread.sleep(1000 * 20)
foreach(filters) { filter =>
val expected = features.drop(1).filter(filter.evaluate)
val query = new Query(sft.getTypeName, filter)
val result = SelfClosingIterator(ds.getFeatureReader(query, Transaction.AUTO_COMMIT)).toList
result must containTheSameElementsAs(expected)
}
ds.stats.getCount(sft) must beSome(9L)
} finally {
RedisSystemProperties.AgeOffInterval.clear()
ds.removeSchema(sft.getTypeName)
ds.dispose()
}
}
}
}
| locationtech/geomesa | geomesa-redis/geomesa-redis-datastore/src/test/scala/org/locationtech/geomesa/redis/data/RedisDataStoreIntegrationTest.scala | Scala | apache-2.0 | 7,621 |
package com.example.actors
import akka.actor.Actor
import akka.actor.ActorLogging
import com.example.actors.CollectorMessages._
class LocatorActor extends Actor with ActorLogging {
def receive = {
case LocateIPs(ips) => {
/* Your code here to lookup IP address to lat/lon pairs via http://freegeoip.net/ */
// Optionally rate limit the calls to 10 per second (http://doc.akka.io/docs/akka/snapshot/contrib/throttle.html)
val dummyMapping:List[Map[String,(Double,Double)]] = List(Map("1.1.1.1" -> (1.1,2.2)))
log.info("Locating")
sender ! LocateIPsResult(dummyMapping)
}
}
}
| jcudit/collector | src/main/scala/com/example/actors/LocatorActor.scala | Scala | mit | 621 |
/* sbt -- Simple Build Tool
* Copyright 2008, 2009, 2010, 2011 Mark Harrah
*/
package sbt
import sbt.internal.util.{ AttributeKey, AttributeMap, complete, ConsoleOut, GlobalLogging, LineRange, MainLogging, SimpleReader, Types }
import sbt.util.{ Level, Logger }
import complete.{ DefaultParsers, Parser }
import sbt.internal.inc.{ CompilerCache, ScalaInstance }
import sbt.compiler.EvalImports
import Types.{ const, idFun }
import Aggregation.AnyKeys
import Project.LoadAction
import scala.annotation.tailrec
import sbt.io.IO
import sbt.io.Path._
import StandardMain._
import java.io.File
import java.net.URI
import java.util.Locale
import scala.util.control.NonFatal
/** This class is the entry point for sbt.*/
final class xMain extends xsbti.AppMain {
def run(configuration: xsbti.AppConfiguration): xsbti.MainResult =
{
import BasicCommands.early
import BasicCommandStrings.runEarly
import BuiltinCommands.{ initialize, defaults }
import CommandStrings.{ BootCommand, DefaultsCommand, InitCommand }
runManaged(initialState(configuration,
Seq(defaults, early),
runEarly(DefaultsCommand) :: runEarly(InitCommand) :: BootCommand :: Nil)
)
}
}
final class ScriptMain extends xsbti.AppMain {
def run(configuration: xsbti.AppConfiguration): xsbti.MainResult =
runManaged(initialState(configuration,
BuiltinCommands.ScriptCommands,
Script.Name :: Nil)
)
}
final class ConsoleMain extends xsbti.AppMain {
def run(configuration: xsbti.AppConfiguration): xsbti.MainResult =
runManaged(initialState(configuration,
BuiltinCommands.ConsoleCommands,
IvyConsole.Name :: Nil)
)
}
object StandardMain {
def runManaged(s: State): xsbti.MainResult =
{
val previous = TrapExit.installManager()
try MainLoop.runLogged(s)
finally TrapExit.uninstallManager(previous)
}
/** The common interface to standard output, used for all built-in ConsoleLoggers. */
val console = ConsoleOut.systemOutOverwrite(ConsoleOut.overwriteContaining("Resolving "))
def initialGlobalLogging: GlobalLogging = GlobalLogging.initial(MainLogging.globalDefault(console), File.createTempFile("sbt", ".log"), console)
def initialState(configuration: xsbti.AppConfiguration, initialDefinitions: Seq[Command], preCommands: Seq[String]): State =
{
import BasicCommandStrings.isEarlyCommand
val userCommands = configuration.arguments.map(_.trim)
val (earlyCommands, normalCommands) = (preCommands ++ userCommands).partition(isEarlyCommand)
val commands = earlyCommands ++ normalCommands
val initAttrs = BuiltinCommands.initialAttributes
val s = State(configuration, initialDefinitions, Set.empty, None, commands, State.newHistory, initAttrs, initialGlobalLogging, State.Continue)
s.initializeClassLoaderCache
}
}
import DefaultParsers._
import CommandStrings._
import BasicCommandStrings._
import BasicCommands._
import CommandUtil._
object BuiltinCommands {
def initialAttributes = AttributeMap.empty
def ConsoleCommands: Seq[Command] = Seq(ignore, exit, IvyConsole.command, setLogLevel, early, act, nop)
def ScriptCommands: Seq[Command] = Seq(ignore, exit, Script.command, setLogLevel, early, act, nop)
def DefaultCommands: Seq[Command] = Seq(ignore, help, completionsCommand, about, tasks, settingsCommand, loadProject,
projects, project, reboot, read, history, set, sessionCommand, inspect, loadProjectImpl, loadFailed, Cross.crossBuild, Cross.switchVersion,
setOnFailure, clearOnFailure, stashOnFailure, popOnFailure, setLogLevel, plugin, plugins,
ifLast, multi, shell, continuous, eval, alias, append, last, lastGrep, export, boot, nop, call, exit, early, initialize, act) ++
compatCommands
def DefaultBootCommands: Seq[String] = LoadProject :: (IfLast + " " + Shell) :: Nil
def boot = Command.make(BootCommand)(bootParser)
def about = Command.command(AboutCommand, aboutBrief, aboutDetailed) { s => s.log.info(aboutString(s)); s }
def setLogLevel = Command.arb(const(logLevelParser), logLevelHelp)(LogManager.setGlobalLogLevel)
private[this] def logLevelParser: Parser[Level.Value] = oneOf(Level.values.toSeq.map(v => v.toString ^^^ v))
// This parser schedules the default boot commands unless overridden by an alias
def bootParser(s: State) =
{
val orElse = () => DefaultBootCommands ::: s
delegateToAlias(BootCommand, success(orElse))(s)
}
def sbtName(s: State): String = s.configuration.provider.id.name
def sbtVersion(s: State): String = s.configuration.provider.id.version
def scalaVersion(s: State): String = s.configuration.provider.scalaProvider.version
def aboutProject(s: State): String =
if (Project.isProjectLoaded(s)) {
val e = Project.extract(s)
val version = e.getOpt(Keys.version) match { case None => ""; case Some(v) => " " + v }
val current = "The current project is " + Reference.display(e.currentRef) + version + "\\n"
val sc = aboutScala(s, e)
val built = if (sc.isEmpty) "" else "The current project is built against " + sc + "\\n"
current + built + aboutPlugins(e)
} else "No project is currently loaded"
def aboutPlugins(e: Extracted): String =
{
def list(b: BuildUnit) = b.plugins.detected.autoPlugins.map(_.value.label) ++ b.plugins.detected.plugins.names
val allPluginNames = e.structure.units.values.flatMap(u => list(u.unit)).toSeq.distinct
if (allPluginNames.isEmpty) "" else allPluginNames.mkString("Available Plugins: ", ", ", "")
}
def aboutScala(s: State, e: Extracted): String =
{
val scalaVersion = e.getOpt(Keys.scalaVersion)
val scalaHome = e.getOpt(Keys.scalaHome).flatMap(idFun)
val instance = e.getOpt(Keys.scalaInstance.task).flatMap(_ => quiet(e.runTask(Keys.scalaInstance, s)._2))
(scalaVersion, scalaHome, instance) match {
case (sv, Some(home), Some(si)) => "local Scala version " + selectScalaVersion(sv, si) + " at " + home.getAbsolutePath
case (_, Some(home), None) => "a local Scala build at " + home.getAbsolutePath
case (sv, None, Some(si)) => "Scala " + selectScalaVersion(sv, si)
case (Some(sv), None, None) => "Scala " + sv
case (None, None, None) => ""
}
}
def aboutString(s: State): String =
{
val (name, ver, scalaVer, about) = (sbtName(s), sbtVersion(s), scalaVersion(s), aboutProject(s))
"""This is %s %s
|%s
|%s, %s plugins, and build definitions are using Scala %s
|""".stripMargin.format(name, ver, about, name, name, scalaVer)
}
private[this] def selectScalaVersion(sv: Option[String], si: ScalaInstance): String = sv match { case Some(si.version) => si.version; case _ => si.actualVersion }
private[this] def quiet[T](t: => T): Option[T] = try { Some(t) } catch { case e: Exception => None }
def settingsCommand = showSettingLike(SettingsCommand, settingsPreamble, KeyRanks.MainSettingCutoff, key => !isTask(key.manifest))
def tasks = showSettingLike(TasksCommand, tasksPreamble, KeyRanks.MainTaskCutoff, key => isTask(key.manifest))
def showSettingLike(command: String, preamble: String, cutoff: Int, keep: AttributeKey[_] => Boolean) =
Command(command, settingsBrief(command), settingsDetailed(command))(showSettingParser(keep)) {
case (s: State, (verbosity: Int, selected: Option[String])) =>
if (selected.isEmpty) System.out.println(preamble)
val prominentOnly = verbosity <= 1
val verboseFilter = if (prominentOnly) highPass(cutoff) else topNRanked(25 * verbosity)
System.out.println(tasksHelp(s, keys => verboseFilter(keys filter keep), selected))
System.out.println()
if (prominentOnly) System.out.println(moreAvailableMessage(command, selected.isDefined))
s
}
def showSettingParser(keepKeys: AttributeKey[_] => Boolean)(s: State): Parser[(Int, Option[String])] =
verbosityParser ~ selectedParser(s, keepKeys).?
def selectedParser(s: State, keepKeys: AttributeKey[_] => Boolean): Parser[String] =
singleArgument(allTaskAndSettingKeys(s).filter(keepKeys).map(_.label).toSet)
def verbosityParser: Parser[Int] = success(1) | ((Space ~ "-") ~> (
'v'.id.+.map(_.size + 1) |
("V" ^^^ Int.MaxValue)
))
def taskDetail(keys: Seq[AttributeKey[_]]): Seq[(String, String)] =
sortByLabel(withDescription(keys)) flatMap taskStrings
def allTaskAndSettingKeys(s: State): Seq[AttributeKey[_]] =
{
val extracted = Project.extract(s)
import extracted._
val index = structure.index
index.keyIndex.keys(Some(currentRef)).toSeq.map { key =>
try
Some(index.keyMap(key))
catch {
case NonFatal(ex) =>
s.log error ex.getMessage
None
}
}.collect { case Some(s) => s }.distinct
}
def sortByLabel(keys: Seq[AttributeKey[_]]): Seq[AttributeKey[_]] = keys.sortBy(_.label)
def sortByRank(keys: Seq[AttributeKey[_]]): Seq[AttributeKey[_]] = keys.sortBy(_.rank)
def withDescription(keys: Seq[AttributeKey[_]]): Seq[AttributeKey[_]] = keys.filter(_.description.isDefined)
def isTask(mf: Manifest[_])(implicit taskMF: Manifest[Task[_]], inputMF: Manifest[InputTask[_]]): Boolean =
mf.runtimeClass == taskMF.runtimeClass || mf.runtimeClass == inputMF.runtimeClass
def topNRanked(n: Int) = (keys: Seq[AttributeKey[_]]) => sortByRank(keys).take(n)
def highPass(rankCutoff: Int) = (keys: Seq[AttributeKey[_]]) => sortByRank(keys).takeWhile(_.rank <= rankCutoff)
def tasksHelp(s: State, filter: Seq[AttributeKey[_]] => Seq[AttributeKey[_]], arg: Option[String]): String =
{
val commandAndDescription = taskDetail(filter(allTaskAndSettingKeys(s)))
arg match {
case Some(selected) => detail(selected, commandAndDescription.toMap)
case None => aligned(" ", " ", commandAndDescription) mkString ("\\n", "\\n", "")
}
}
def taskStrings(key: AttributeKey[_]): Option[(String, String)] = key.description map { d => (key.label, d) }
def defaults = Command.command(DefaultsCommand) { s =>
s.copy(definedCommands = DefaultCommands)
}
def initialize = Command.command(InitCommand) { s =>
/*"load-commands -base ~/.sbt/commands" :: */ readLines(readable(sbtRCs(s))) ::: s
}
def eval = Command.single(EvalCommand, Help.more(EvalCommand, evalDetailed)) { (s, arg) =>
if (Project.isProjectLoaded(s)) loadedEval(s, arg) else rawEval(s, arg)
s
}
private[this] def loadedEval(s: State, arg: String): Unit = {
val extracted = Project extract s
import extracted._
val result = session.currentEval().eval(arg, srcName = "<eval>", imports = autoImports(extracted))
s.log.info(s"ans: ${result.tpe} = ${result.getValue(currentLoader)}")
}
private[this] def rawEval(s: State, arg: String): Unit = {
val app = s.configuration.provider
val classpath = app.mainClasspath ++ app.scalaProvider.jars
val result = Load.mkEval(classpath, s.baseDir, Nil).eval(arg, srcName = "<eval>", imports = new EvalImports(Nil, ""))
s.log.info(s"ans: ${result.tpe} = ${result.getValue(app.loader)}")
}
def sessionCommand = Command.make(SessionCommand, sessionBrief, SessionSettings.Help)(SessionSettings.command)
def reapply(newSession: SessionSettings, structure: BuildStructure, s: State): State =
{
s.log.info("Reapplying settings...")
// Here, for correct behavior, we also need to re-inject a settings logger, as we'll be re-evaluating settings.
val loggerInject = LogManager.settingsLogger(s)
val withLogger = newSession.appendRaw(loggerInject :: Nil)
val newStructure = Load.reapply(withLogger.mergeSettings, structure)(Project.showContextKey(newSession, structure))
Project.setProject(newSession, newStructure, s)
}
def set = Command(SetCommand, setBrief, setDetailed)(setParser) {
case (s, (all, arg)) =>
val extracted = Project extract s
import extracted._
val dslVals = extracted.currentUnit.unit.definitions.dslDefinitions
// TODO - This is possibly inefficient (or stupid). We should try to only attach the
// classloader + imports NEEDED to compile the set command, rather than
// just ALL of them.
val ims = (imports(extracted) ++ dslVals.imports.map(i => (i, -1)))
val cl = dslVals.classloader(currentLoader)
val settings = EvaluateConfigurations.evaluateSetting(
session.currentEval(),
"<set>",
ims,
arg,
LineRange(0, 0)
)(cl)
val setResult = if (all) SettingCompletions.setAll(extracted, settings) else SettingCompletions.setThis(s, extracted, settings, arg)
s.log.info(setResult.quietSummary)
s.log.debug(setResult.verboseSummary)
reapply(setResult.session, structure, s)
}
// @deprecated("Use SettingCompletions.setThis", "0.13.0")
def setThis(s: State, extracted: Extracted, settings: Seq[Def.Setting[_]], arg: String) =
SettingCompletions.setThis(s, extracted, settings, arg)
def inspect = Command(InspectCommand, inspectBrief, inspectDetailed)(Inspect.parser) {
case (s, (option, sk)) =>
s.log.info(Inspect.output(s, option, sk))
s
}
@deprecated("Use Inspect.output", "0.13.0")
def inspectOutput(s: State, option: Inspect.Mode, sk: Def.ScopedKey[_]): String = Inspect.output(s, option, sk)
def lastGrep = Command(LastGrepCommand, lastGrepBrief, lastGrepDetailed)(lastGrepParser) {
case (s, (pattern, Some(sks))) =>
val (str, ref, display) = extractLast(s)
Output.lastGrep(sks, str.streams(s), pattern, printLast(s))(display)
keepLastLog(s)
case (s, (pattern, None)) =>
for (logFile <- lastLogFile(s)) yield Output.lastGrep(logFile, pattern, printLast(s))
keepLastLog(s)
}
def extractLast(s: State) = {
val ext = Project.extract(s)
(ext.structure, Select(ext.currentRef), ext.showKey)
}
def setParser = (s: State) => {
val extracted = Project.extract(s)
import extracted._
token(Space ~> flag("every" ~ Space)) ~
SettingCompletions.settingParser(structure.data, structure.index.keyMap, currentProject)
}
@deprecated("Use Inspect.parser", "0.13.0")
def inspectParser: State => Parser[(Inspect.Mode, Def.ScopedKey[_])] = Inspect.parser
@deprecated("Use Inspect.spacedModeParser", "0.13.0")
val spacedModeParser: State => Parser[Inspect.Mode] = Inspect.spacedModeParser
@deprecated("Use Inspect.allKeyParser", "0.13.0")
def allKeyParser(s: State): Parser[AttributeKey[_]] = Inspect.allKeyParser(s)
@deprecated("Use Inspect.spacedKeyParser", "0.13.0")
val spacedKeyParser: State => Parser[Def.ScopedKey[_]] = Inspect.spacedKeyParser
val spacedAggregatedParser = (s: State) => Act.requireSession(s, token(Space) ~> Act.aggregatedKeyParser(s))
val aggregatedKeyValueParser: State => Parser[Option[AnyKeys]] = (s: State) => spacedAggregatedParser(s).map(x => Act.keyValues(s)(x)).?
val exportParser: State => Parser[() => State] = (s: State) => Act.requireSession(s, token(Space) ~> exportParser0(s))
private[sbt] def exportParser0(s: State): Parser[() => State] =
{
val extracted = Project extract s
import extracted.{ showKey, structure }
val keysParser = token(flag("--last" <~ Space)) ~ Act.aggregatedKeyParser(extracted)
val show = Aggregation.ShowConfig(settingValues = true, taskValues = false, print = println _, success = false)
for {
lastOnly_keys <- keysParser
kvs = Act.keyValues(structure)(lastOnly_keys._2)
f <- if (lastOnly_keys._1) success(() => s) else Aggregation.evaluatingParser(s, structure, show)(kvs)
} yield () => {
def export0(s: State): State = lastImpl(s, kvs, Some(ExportStream))
val newS = try f() catch {
case e: Exception =>
try export0(s)
finally { throw e }
}
export0(newS)
}
}
def lastGrepParser(s: State) = Act.requireSession(s, (token(Space) ~> token(NotSpace, "<pattern>")) ~ aggregatedKeyValueParser(s))
def last = Command(LastCommand, lastBrief, lastDetailed)(aggregatedKeyValueParser) {
case (s, Some(sks)) => lastImpl(s, sks, None)
case (s, None) =>
for (logFile <- lastLogFile(s)) yield Output.last(logFile, printLast(s))
keepLastLog(s)
}
def export = Command(ExportCommand, exportBrief, exportDetailed)(exportParser)((s, f) => f())
private[this] def lastImpl(s: State, sks: AnyKeys, sid: Option[String]): State =
{
val (str, ref, display) = extractLast(s)
Output.last(sks, str.streams(s), printLast(s), sid)(display)
keepLastLog(s)
}
/** Determines the log file that last* commands should operate on. See also isLastOnly. */
def lastLogFile(s: State) =
{
val backing = s.globalLogging.backing
if (isLastOnly(s)) backing.last else Some(backing.file)
}
/**
* If false, shift the current log file to be the log file that 'last' will operate on.
* If true, keep the previous log file as the one 'last' operates on because there is nothing useful in the current one.
*/
def keepLastLog(s: State): State = if (isLastOnly(s)) s.keepLastLog else s
/**
* The last* commands need to determine whether to read from the current log file or the previous log file
* and whether to keep the previous log file or not. This is selected based on whether the previous command
* was 'shell', which meant that the user directly entered the 'last' command. If it wasn't directly entered,
* the last* commands operate on any output since the last 'shell' command and do shift the log file.
* Otherwise, the output since the previous 'shell' command is used and the log file is not shifted.
*/
def isLastOnly(s: State): Boolean = s.history.previous.forall(_ == Shell)
def printLast(s: State): Seq[String] => Unit = _ foreach println
def autoImports(extracted: Extracted): EvalImports = new EvalImports(imports(extracted), "<auto-imports>")
def imports(extracted: Extracted): Seq[(String, Int)] =
{
val curi = extracted.currentRef.build
extracted.structure.units(curi).imports.map(s => (s, -1))
}
def listBuild(uri: URI, build: LoadedBuildUnit, current: Boolean, currentID: String, log: Logger) =
{
log.info("In " + uri)
def prefix(id: String) = if (currentID != id) " " else if (current) " * " else "(*)"
for (id <- build.defined.keys.toSeq.sorted) log.info("\\t" + prefix(id) + id)
}
def act = Command.customHelp(Act.actParser, actHelp)
def actHelp = (s: State) => CommandStrings.showHelp ++ CommandStrings.multiTaskHelp ++ keysHelp(s)
def keysHelp(s: State): Help =
if (Project.isProjectLoaded(s))
Help.detailOnly(taskDetail(allTaskAndSettingKeys(s)))
else
Help.empty
def plugins = Command.command(PluginsCommand, pluginsBrief, pluginsDetailed) { s =>
val helpString = PluginsDebug.helpAll(s)
System.out.println(helpString)
s
}
val pluginParser: State => Parser[AutoPlugin] = s => {
val autoPlugins: Map[String, AutoPlugin] = PluginsDebug.autoPluginMap(s)
token(Space) ~> Act.knownPluginParser(autoPlugins, "plugin")
}
def plugin = Command(PluginCommand)(pluginParser) { (s, plugin) =>
val helpString = PluginsDebug.help(plugin, s)
System.out.println(helpString)
s
}
def projects = Command(ProjectsCommand, (ProjectsCommand, projectsBrief), projectsDetailed)(s => projectsParser(s).?) {
case (s, Some(modifyBuilds)) => transformExtraBuilds(s, modifyBuilds)
case (s, None) => showProjects(s); s
}
def showProjects(s: State): Unit = {
val extracted = Project extract s
import extracted._
import currentRef.{ build => curi, project => cid }
listBuild(curi, structure.units(curi), true, cid, s.log)
for ((uri, build) <- structure.units if curi != uri) listBuild(uri, build, false, cid, s.log)
}
def transformExtraBuilds(s: State, f: List[URI] => List[URI]): State =
{
val original = Project.extraBuilds(s)
val extraUpdated = Project.updateExtraBuilds(s, f)
try doLoadProject(extraUpdated, LoadAction.Current)
catch {
case e: Exception =>
s.log.error("Project loading failed: reverting to previous state.")
Project.setExtraBuilds(s, original)
}
}
def projectsParser(s: State): Parser[List[URI] => List[URI]] =
{
val addBase = token(Space ~> "add") ~> token(Space ~> basicUri, "<build URI>").+
val removeBase = token(Space ~> "remove") ~> token(Space ~> Uri(Project.extraBuilds(s).toSet)).+
addBase.map(toAdd => (xs: List[URI]) => (toAdd.toList ::: xs).distinct) |
removeBase.map(toRemove => (xs: List[URI]) => xs.filterNot(toRemove.toSet))
}
def project = Command.make(ProjectCommand, projectBrief, projectDetailed)(ProjectNavigation.command)
def loadFailed = Command(LoadFailed)(loadProjectParser)(doLoadFailed)
@deprecated("No longer used.", "0.13.2")
def handleLoadFailed(s: State): State = doLoadFailed(s, "")
@tailrec
private[this] def doLoadFailed(s: State, loadArg: String): State =
{
val result = (SimpleReader.readLine("Project loading failed: (r)etry, (q)uit, (l)ast, or (i)gnore? ") getOrElse Quit).toLowerCase(Locale.ENGLISH)
def matches(s: String) = !result.isEmpty && (s startsWith result)
if (result.isEmpty || matches("retry"))
loadProjectCommand(LoadProject, loadArg) :: s.clearGlobalLog
else if (matches(Quit))
s.exit(ok = false)
else if (matches("ignore")) {
val hadPrevious = Project.isProjectLoaded(s)
s.log.warn("Ignoring load failure: " + (if (hadPrevious) "using previously loaded project." else "no project loaded."))
s
} else if (matches("last"))
LastCommand :: loadProjectCommand(LoadFailed, loadArg) :: s
else {
println("Invalid response.")
doLoadFailed(s, loadArg)
}
}
def loadProjectCommands(arg: String) =
StashOnFailure ::
(OnFailure + " " + loadProjectCommand(LoadFailed, arg)) ::
loadProjectCommand(LoadProjectImpl, arg) ::
PopOnFailure ::
State.FailureWall ::
Nil
def loadProject = Command(LoadProject, LoadProjectBrief, LoadProjectDetailed)(loadProjectParser) { (s, arg) => loadProjectCommands(arg) ::: s }
private[this] def loadProjectParser = (s: State) => matched(Project.loadActionParser)
private[this] def loadProjectCommand(command: String, arg: String): String = s"$command $arg".trim
def loadProjectImpl = Command(LoadProjectImpl)(_ => Project.loadActionParser)(doLoadProject)
def doLoadProject(s0: State, action: LoadAction.Value): State =
{
val (s1, base) = Project.loadAction(SessionVar.clear(s0), action)
IO.createDirectory(base)
val s = if (s1 has Keys.stateCompilerCache) s1 else registerCompilerCache(s1)
val (eval, structure) =
try Load.defaultLoad(s, base, s.log, Project.inPluginProject(s), Project.extraBuilds(s))
catch {
case ex: compiler.EvalException =>
s0.log.debug(ex.getMessage)
ex.getStackTrace map (ste => s"\\tat $ste") foreach (s0.log.debug(_))
ex.setStackTrace(Array.empty)
throw ex
}
val session = Load.initialSession(structure, eval, s0)
SessionSettings.checkSession(session, s)
Project.setProject(session, structure, s)
}
def registerCompilerCache(s: State): State =
{
val maxCompilers = System.getProperty("sbt.resident.limit")
val cache =
if (maxCompilers == null)
CompilerCache.fresh
else {
val num = try maxCompilers.toInt catch {
case e: NumberFormatException => throw new RuntimeException("Resident compiler limit must be an integer.", e)
}
if (num <= 0) CompilerCache.fresh else CompilerCache(num)
}
s.put(Keys.stateCompilerCache, cache)
}
}
| dansanduleac/sbt | main/src/main/scala/sbt/Main.scala | Scala | bsd-3-clause | 24,035 |
//Copyright 2014, Alex Khilko.
//This file is part of MoonGene which is released under MIT.
//See file LICENSE.TXT or go to www.alexkhilko.com for full license details.
package com.moongene.models.track
import org.joda.time.DateTime
//Used to track first session events
object FirstSessionEventObj {
case class FirstSessionEvent(//Device and user info
deviceId: String, //User's device ID as string
deviceBinId: Option[Array[Byte]],//User's device ID as byte array
version: String, //User defined application version
auth: Common.SysAuth, //App authorization data
timestamp: DateTime, //User's timestamp in UTC
//From - to event
fromState: Common.StateInfo, //State prior to the event
fromEvent: String, //Event prior to the current event
toState: Common.StateInfo, //Current event state
toEvent: String) //Current event
}
| InfiniteCode/MoonGene | src/gene/src/main/scala/com/moongene/models/track/FirstSessionEventObj.scala | Scala | mit | 1,234 |
package lila
package object game {
type PgnMoves = Vector[String]
}
| ornicar/lichess-db | src/main/scala/lila/game/package.scala | Scala | agpl-3.0 | 71 |
package spinoco.protocol.http.header.value
import scodec.Codec
import spinoco.protocol.http.codec.helper._
sealed case class HttpEncoding (value: String)
object HttpEncoding {
val codec: Codec[HttpEncoding] = {
trimmedAsciiToken.xmap(HttpEncoding.apply, _.value)
}
} | Spinoco/protocol | http/src/main/scala/spinoco/protocol/http/header/value/HttpEncoding.scala | Scala | mit | 279 |
package uk.gov.homeoffice.crypt
import java.nio.charset.StandardCharsets._
import javax.crypto.Cipher
import javax.crypto.spec.{IvParameterSpec, SecretKeySpec}
import scala.util.Try
import org.apache.commons.codec.binary.Base64._
import org.json4s.JValue
import org.json4s.JsonDSL._
import uk.gov.homeoffice.json.JsonFormats
import org.json4s.jackson.JsonMethods._
/**
* Encrypt text and decrypt back to text.
*/
trait Crypto extends JsonFormats {
def encrypt(data: String, iv: String)(implicit secrets: Secrets): JValue = {
val cipher: Cipher = Cipher.getInstance(secrets.transformation)
val secretKey = new SecretKeySpec(secrets.encryptionKey.getBytes(UTF_8), secrets.algorithm)
cipher.init(Cipher.ENCRYPT_MODE, secretKey, new IvParameterSpec(iv.getBytes(UTF_8), 0, cipher.getBlockSize))
val encryptedData = cipher.doFinal(data.getBytes(UTF_8))
("data" -> sign(encodeBase64String(encryptedData), secrets.signingPassword)) ~ ("iv" -> sign(encodeBase64String(iv.getBytes()), secrets.signingPassword))
}
def encrypt(data: JValue, iv: String)(implicit secrets: Secrets): JValue = encrypt(pretty(render(data)), iv)
def decrypt(j: JValue)(implicit secrets: Secrets): Try[String] = Try {
val signedData = (j \\ "data").extract[String]
val signedIV = (j \\ "iv").extract[String]
if (verifySignatureFor(signedData, secrets.signingPassword) && verifySignatureFor(signedIV, secrets.signingPassword)) {
val key = new SecretKeySpec(secrets.encryptionKey.getBytes(UTF_8), secrets.algorithm)
val cipher: Cipher = Cipher.getInstance(secrets.transformation)
cipher.init(Cipher.DECRYPT_MODE, key, new IvParameterSpec(decodeBase64(signedIV.getBytes()), 0, cipher.getBlockSize))
new String(cipher.doFinal(decodeBase64(signedData.split("----").head.getBytes(UTF_8))))
} else {
throw new IllegalAccessException(s"Badly signed data $signedData & signedIV $signedIV")
}
}
private def verifySignatureFor(signedData: String, signingPassword: String): Boolean = {
require(signedData.nonEmpty)
val signedDataWithClientPassword = sign(signedData.split("----").head, signingPassword)
if (signedData == signedDataWithClientPassword) true else false
}
private def sign(data: String, signingPassword: String): String = {
val hmac256 = new HmacSha256
val signature = encodeBase64String(hmac256.create(signingPassword, data))
s"$data----$signature"
}
} | UKHomeOffice/rtp-io-lib | src/main/scala/uk/gov/homeoffice/crypt/Crypto.scala | Scala | mit | 2,446 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.tools.data.utils.akka
import akka.actor.ActorSystem
import akka.http.scaladsl._
import akka.http.scaladsl.settings.{ClientConnectionSettings, ConnectionPoolSettings}
import akka.stream.Materializer
import cmwell.tools.data.utils.logging.LabelId
import com.typesafe.config.ConfigFactory
object HttpConnections extends DataToolsConfig {
def outgoingConnection(host: String, port: Int, protocol: String = "http")(implicit system: ActorSystem,
label: Option[LabelId] = None) = {
val userAgent = label.fold(s"cmwell-data-tools using akka-http/${config.getString("akka.version")}")(
l => s"cmwell-data-tools ${l.id}"
)
val settings = ClientConnectionSettings(
ConfigFactory
.parseString(s"akka.http.host-connection-pool.client.user-agent-header=$userAgent")
.withFallback(config)
)
protocol match {
case "https" => Http().outgoingConnectionHttps(host, port, settings = settings)
case _ => Http().outgoingConnection(host, port, settings = settings)
}
}
def newHostConnectionPool[T](host: String, port: Int, protocol: String = "http")(implicit system: ActorSystem,
mat: Materializer,
label: Option[LabelId] = None) = {
val userAgent = label.fold(s"cmwell-data-tools using akka-http/${config.getString("akka.version")}")(
l => s"cmwell-data-tools ${l.id}"
)
val settings = ConnectionPoolSettings(
ConfigFactory
.parseString(s"akka.http.host-connection-pool.client.user-agent-header=$userAgent")
.withFallback(config)
)
protocol match {
case "https" => Http().newHostConnectionPoolHttps[T](host, port, settings = settings)
case _ => Http().newHostConnectionPool[T](host, port, settings = settings)
}
}
def cachedHostConnectionPool[T](host: String, port: Int, protocol: String = "http")(implicit system: ActorSystem,
mat: Materializer,
label: Option[LabelId] = None) = {
val userAgent = label.fold(s"cmwell-data-tools using akka-http/${config.getString("akka.version")}")(
l => s"cmwell-data-tools ${l.id}"
)
val settings = ConnectionPoolSettings(
ConfigFactory
.parseString(s"data-tools.akka.http.host-connection-pool.client.user-agent-header=$userAgent")
.withFallback(config)
)
protocol match {
case "https" => Http().cachedHostConnectionPoolHttps[T](host, port, settings = settings)
case _ => Http().cachedHostConnectionPool[T](host, port, settings = settings)
}
}
}
| thomsonreuters/CM-Well | server/cmwell-data-tools/src/main/scala/cmwell/tools/data/utils/akka/HttpConnections.scala | Scala | apache-2.0 | 3,536 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.scala.dsl
import org.apache.camel.processor.PolicyPerRouteTest
import org.apache.camel.scala.dsl.builder.{RouteBuilder, RouteBuilderSupport}
class SPolicyPerRouteTest extends PolicyPerRouteTest with RouteBuilderSupport {
override def createRouteBuilder = new RouteBuilder {
from("direct:start")
.policy("foo").to("mock:foo").to("mock:bar").to("mock:result")
from("direct:send")
.to("direct:start")
.to("mock:response")
}
}
| YMartsynkevych/camel | components/camel-scala/src/test/scala/org/apache/camel/scala/dsl/SPolicyPerRouteTest.scala | Scala | apache-2.0 | 1,286 |
import _root_.io.gatling.core.scenario.Simulation
import ch.qos.logback.classic.{Level, LoggerContext}
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import org.slf4j.LoggerFactory
import scala.concurrent.duration._
/**
* Performance test for the DurchfuehrungPruefung entity.
*/
class DurchfuehrungPruefungGatlingTest extends Simulation {
val context: LoggerContext = LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
// Log all HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("TRACE"))
// Log failed HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("DEBUG"))
val baseURL = Option(System.getProperty("baseURL")) getOrElse """http://127.0.0.1:8080"""
val httpConf = http
.baseURL(baseURL)
.inferHtmlResources()
.acceptHeader("*/*")
.acceptEncodingHeader("gzip, deflate")
.acceptLanguageHeader("fr,fr-fr;q=0.8,en-us;q=0.5,en;q=0.3")
.connectionHeader("keep-alive")
.userAgentHeader("Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:33.0) Gecko/20100101 Firefox/33.0")
val headers_http = Map(
"Accept" -> """application/json"""
)
val headers_http_authentication = Map(
"Content-Type" -> """application/json""",
"Accept" -> """application/json"""
)
val headers_http_authenticated = Map(
"Accept" -> """application/json""",
"Authorization" -> "${access_token}"
)
val scn = scenario("Test the DurchfuehrungPruefung entity")
.exec(http("First unauthenticated request")
.get("/api/account")
.headers(headers_http)
.check(status.is(401))).exitHereIfFailed
.pause(10)
.exec(http("Authentication")
.post("/api/authenticate")
.headers(headers_http_authentication)
.body(StringBody("""{"username":"admin", "password":"admin"}""")).asJSON
.check(header.get("Authorization").saveAs("access_token"))).exitHereIfFailed
.pause(1)
.exec(http("Authenticated request")
.get("/api/account")
.headers(headers_http_authenticated)
.check(status.is(200)))
.pause(10)
.repeat(2) {
exec(http("Get all durchfuehrungPruefungs")
.get("/api/durchfuehrung-pruefungs")
.headers(headers_http_authenticated)
.check(status.is(200)))
.pause(10 seconds, 20 seconds)
.exec(http("Create new durchfuehrungPruefung")
.post("/api/durchfuehrung-pruefungs")
.headers(headers_http_authenticated)
.body(StringBody("""{"id":null, "datum":"2020-01-01T00:00:00.000Z", "kosten":"0"}""")).asJSON
.check(status.is(201))
.check(headerRegex("Location", "(.*)").saveAs("new_durchfuehrungPruefung_url"))).exitHereIfFailed
.pause(10)
.repeat(5) {
exec(http("Get created durchfuehrungPruefung")
.get("${new_durchfuehrungPruefung_url}")
.headers(headers_http_authenticated))
.pause(10)
}
.exec(http("Delete created durchfuehrungPruefung")
.delete("${new_durchfuehrungPruefung_url}")
.headers(headers_http_authenticated))
.pause(10)
}
val users = scenario("Users").exec(scn)
setUp(
users.inject(rampUsers(Integer.getInteger("users", 100)) over (Integer.getInteger("ramp", 1) minutes))
).protocols(httpConf)
}
| t08094a/ffManagementSuite | src/test/gatling/user-files/simulations/DurchfuehrungPruefungGatlingTest.scala | Scala | gpl-3.0 | 3,539 |
package reshapes.ui.dialogs
import scala.swing.Action
import scala.swing.BoxPanel
import scala.swing.Button
import scala.swing.CheckBox
import scala.swing.Dialog
import scala.swing.Label
import scala.swing.Orientation
import scala.swing.TextField
import java.awt.Point
abstract class CustomDialog extends Dialog {
private var dialogResult = false
modal = true;
def showDialog(position: Point = null) = {
if (position != null)
location = position
dialogResult = false
visible = true
dialogResult
}
def hideDialog(result: Boolean): Unit = {
dialogResult = result
visible = false
}
}
class ServerDialog extends CustomDialog {
val hostnameInput = new TextField(50) { text = "localhost" }
val commandPortInput = new TextField(10) { text = "9998" }
val exchangePortInput = new TextField(10) { text = "9999" }
val listenerPortInput = new TextField(10) { text = "1337" }
var hostname: String = null
var commandPort: Int = -1
var exchangePort: Int = -1
var listenerPort: Int = -1
contents = new BoxPanel(Orientation.Vertical) {
contents += new Label("Server hostname")
contents += hostnameInput
contents += new Label("Server Command-Port")
contents += commandPortInput
contents += new Label("Server Shapes-Exchange-Port")
contents += exchangePortInput
contents += new Label("Client listener port")
contents += listenerPortInput
contents += new BoxPanel(Orientation.Horizontal) {
contents += new Button(Action("OK") {
applyPorts
hideDialog(true)
})
contents += new Button(Action("Cancel") { hideDialog(false) })
}
}
def applyPorts(): Unit = {
try {
hostname = hostnameInput.text
commandPort = commandPortInput.text.toInt
exchangePort = exchangePortInput.text.toInt
listenerPort = listenerPortInput.text.toInt
}
catch {
case e: NumberFormatException =>
hostname = null
commandPort = -1
exchangePort = -1
listenerPort = -1
e.printStackTrace
}
}
def inputIsValid() =
hostname != null &&
hostname.length > 0 &&
commandPort > 0 &&
exchangePort > 0 &&
listenerPort > 0
}
class NewTabDialog extends CustomDialog {
val showIntersections = new CheckBox("show intersections")
val showCoordinates = new CheckBox("show coordinates")
val showNames = new CheckBox("show shape names")
contents = new BoxPanel(Orientation.Vertical) {
contents += showIntersections
contents += showCoordinates
contents += showNames
contents += new Button(Action("OK") { hideDialog(true) })
}
}
| volkc/REScala | Examples/Shapes/src/main/scala/reshapes/ui/dialogs/Dialogs.scala | Scala | apache-2.0 | 2,636 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.hbase.index.legacy
import org.apache.hadoop.hbase.client._
import org.locationtech.geomesa.hbase.data._
import org.locationtech.geomesa.hbase.index.HBaseIndexAdapter.ScanConfig
import org.locationtech.geomesa.hbase.index.{HBaseFeatureIndex, HBasePlatform}
import org.locationtech.geomesa.index.index.legacy.AttributeDateIndex
case object HBaseAttributeIndexV1 extends HBaseLikeAttributeIndexV1 with HBasePlatform
trait HBaseLikeAttributeIndexV1 extends HBaseFeatureIndex
with AttributeDateIndex[HBaseDataStore, HBaseFeature, Mutation, Scan, ScanConfig] {
override val version: Int = 1
}
| ddseapy/geomesa | geomesa-hbase/geomesa-hbase-datastore/src/main/scala/org/locationtech/geomesa/hbase/index/legacy/HBaseAttributeIndexV1.scala | Scala | apache-2.0 | 1,094 |
package org.scalarules.example
import org.scalarules.finance.nl._
import org.scalarules.utils.Glossary
object ExampleGlossary extends Glossary {
val BaseIncome = defineFact[Bedrag]
val TotalPrepaidTaxes = defineFact[Bedrag]
val TotalPaidHealthCost = defineFact[Bedrag]
val DefaultPaidHealthCost = defineFact[Bedrag]
val DefaultMinimumOwedTaxes = defineFact[Bedrag]
val FlatTaxRate = defineFact[Percentage]
val HealthCostReimbursementPercentage = defineFact[Percentage]
val HealthCostReimbursementCeiling = defineFact[Bedrag]
val BaseIncomeTax = defineFact[Bedrag]
val BaseHealthCosts = defineFact[Bedrag]
val HealthCostEligibleForReimbursement = defineFact[Bedrag]
val TaxesReducedByReimbursements = defineFact[Bedrag]
val LegallyOwedTaxes = defineFact[Bedrag]
val ActualHealthCostReimbursement = defineFact[Bedrag]
val TaxReturnAmount = defineFact[Bedrag]
val TaxDueAmount = defineFact[Bedrag]
}
| scala-rules/examples | src/main/scala/org/scalarules/example/ExampleGlossary.scala | Scala | mit | 941 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package iht.forms.registration
import iht.config.AppConfig
import iht.forms.mappings.DateMapping
import iht.models._
import iht.utils.IhtFormValidator
import play.api.data.Forms._
import play.api.data.{Form, Mapping}
import play.api.i18n.Messages
trait CoExecutorForms extends IhtFormValidator {
implicit val appConfig: AppConfig
def addressMappingCoexecInternational(implicit messages: Messages): Mapping[UkAddress] = mapping(
"ukAddressLine1" -> of(ihtInternationalAddress("ukAddressLine2", "ukAddressLine3",
"ukAddressLine4", "countryCode",
"error.address.give", "error.address.giveInLine1And2",
"error.address.giveUsing35CharsOrLess", "error.address.giveUsingOnlyValidChars",
"error.country.select")),
"ukAddressLine2" -> text,
"ukAddressLine3" -> optional(text),
"ukAddressLine4" -> optional(text),
"countryCode" -> optional(text)
)(UkAddress.applyInternational)(UkAddress.unapplyInternational)
lazy val addressMappingCoexecUk: Mapping[UkAddress] = mapping(
"ukAddressLine1" -> of(ihtAddress("ukAddressLine2", "ukAddressLine3",
"ukAddressLine4", "postCode", "countryCode",
"error.address.give", "error.address.giveInLine1And2",
"error.address.giveUsing35CharsOrLess", "error.address.giveUsingOnlyValidChars",
"error.address.givePostcode",
"error.address.givePostcodeUsingNumbersAndLetters", "error.country.select")),
"ukAddressLine2" -> text,
"ukAddressLine3" -> optional(text),
"ukAddressLine4" -> optional(text),
"postCode" -> text
)(UkAddress.applyUk)(UkAddress.unapplyUk)
lazy val coExecutorAddressUkForm = Form(addressMappingCoexecUk)
def coExecutorAddressAbroadForm(implicit messages: Messages) = Form(addressMappingCoexecInternational(messages))
def coExecutorPersonalDetailsForm(oRegDetails: Option[RegistrationDetails] = None, loginNino: String) = Form(
mapping(
"id" -> optional(text),
"firstName" -> ihtNonEmptyText("error.firstName.give")
.verifying("error.firstName.giveUsingXCharsOrLess",
f => f.length <= appConfig.validationMaxLengthFirstName)
.verifying("error.firstName.giveUsingOnlyValidChars", f => nameAndAddressRegex.findFirstIn(f).fold(false)(_=>true)),
"lastName" -> ihtNonEmptyText("error.lastName.give")
.verifying("error.lastName.giveUsingXCharsOrLess",
f => f.length <= appConfig.validationMaxLengthLastName)
.verifying("error.lastName.giveUsingOnlyValidChars", f => nameAndAddressRegex.findFirstIn(f).fold(false)(_=>true)),
"dateOfBirth" -> DateMapping(
"error.dateOfBirth.giveFull",
"error.dateOfBirth.giveCorrectDateUsingOnlyNumbers",
"error.dateOfBirth.giveCorrectDay",
"error.dateOfBirth.giveCorrectDayForMonth",
"error.dateOfBirth.giveCorrectMonth",
"error.dateOfBirth.giveCorrectYear",
"error.dateOfBirth.giveFull",
"error.dateOfBirth.giveNoneFuture",
"error.dateOfBirth.giveCorrectDayMonth",
"error.dateOfBirth.giveCorrectDayYear",
"error.dateOfBirth.giveCorrectMonthYear"
),
"nino" -> ninoForCoExecutor(
"error.nino.give",
"error.nino.giveUsing8Or9Characters",
"error.nino.giveUsingOnlyLettersAndNumbers",
"id",
oRegDetails,
loginNino
).verifying("error.nino.coexec.sameaslogin", _ != loginNino),
"phoneNo" -> mandatoryPhoneNumber(
"error.phoneNumber.give",
"error.phoneNumber.giveUsing27CharactersOrLess",
"error.phoneNumber.giveUsingOnlyLettersAndNumbers"
),
"isAddressInUk" -> yesNoQuestion("error.address.isInUK.give")
)
(
(id, firstName, lastName, dateOfBirth, nino, phoneNo, isAddressInUk) =>
CoExecutor(id, firstName, None, lastName, dateOfBirth, nino, None, None, ContactDetails(phoneNo), None,
isAddressInUk)
)
(
(c: CoExecutor) => Some(Tuple7(c.id, c.firstName, c.lastName, c.dateOfBirth, c.nino, c.contactDetails.phoneNo,
c.isAddressInUk))
)
)
def coExecutorPersonalDetailsEditForm(oRegDetails: Option[RegistrationDetails] = None, loginNino: String) = Form(
mapping(
"id" -> optional(text),
"firstName" -> ihtNonEmptyText("error.firstName.give")
.verifying("error.firstName.giveUsingXCharsOrLess",
f => f.length <= appConfig.validationMaxLengthFirstName)
.verifying("error.firstName.giveUsingOnlyValidChars", f => nameAndAddressRegex.findFirstIn(f).fold(false)(_=>true)),
"lastName" -> ihtNonEmptyText("error.lastName.give")
.verifying("error.lastName.giveUsingXCharsOrLess",
f => f.length <= appConfig.validationMaxLengthLastName)
.verifying("error.lastName.giveUsingOnlyValidChars", f => nameAndAddressRegex.findFirstIn(f).fold(false)(_=>true)),
"dateOfBirth" -> DateMapping(
"error.dateOfBirth.giveFull",
"error.dateOfBirth.giveCorrectDateUsingOnlyNumbers",
"error.dateOfBirth.giveCorrectDay",
"error.dateOfBirth.giveCorrectDayForMonth",
"error.dateOfBirth.giveCorrectMonth",
"error.dateOfBirth.giveCorrectYear",
"error.dateOfBirth.giveFull",
"error.dateOfBirth.giveNoneFuture",
"error.dateOfBirth.giveCorrectDayMonth",
"error.dateOfBirth.giveCorrectDayYear",
"error.dateOfBirth.giveCorrectMonthYear"
),
"nino" -> ninoForCoExecutor(
"error.nino.give",
"error.nino.giveUsing8Or9Characters",
"error.nino.giveUsingOnlyLettersAndNumbers",
"id",
oRegDetails,
loginNino
).verifying("error.nino.coexec.sameaslogin", _ != loginNino),
"phoneNo" -> mandatoryPhoneNumber(
"error.phoneNumber.give",
"error.phoneNumber.giveUsing27CharactersOrLess",
"error.phoneNumber.giveUsingOnlyLettersAndNumbers"
)
)
(
(id, firstName, lastName, dateOfBirth, nino, phoneNo) =>
CoExecutor(id, firstName, None, lastName, dateOfBirth, nino, None, None, ContactDetails(phoneNo), None, None)
)
(
(c: CoExecutor) => Some(Tuple6(c.id, c.firstName, c.lastName, c.dateOfBirth, c.nino, c.contactDetails.phoneNo))
)
)
lazy val othersApplyingForProbateForm = Form(
single(
"areOthersApplyingForProbate" -> yesNoQuestion("error.applicant.selectIfAnyoneElseApplyingForProbate")
)
)
lazy val executorOverviewForm = Form(
single(
"addMoreCoExecutors" -> yesNoQuestion("error.applicant.selectIfAnyoneElseApplyingForProbate")
)
)
lazy val deleteConfirmationForm = Form(
single(
"hidden" -> optional(text)
)
)
}
| hmrc/iht-frontend | app/iht/forms/registration/CoExecutorForms.scala | Scala | apache-2.0 | 7,251 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
// SKIP-SCALATESTJS-START
import java.io.{ObjectOutputStream, ByteArrayOutputStream}
// SKIP-SCALATESTJS-END
import org.scalatest.EitherValues._
import org.scalatest.OptionValues._
import org.scalatest.SharedHelpers.thisLineNumber
import org.scalatest.exceptions.TestFailedException
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers._
class EitherValuesSpec extends AnyFunSpec {
describe("values on Either") {
it("should return the left value inside an either if left.value is defined") {
val e: Either[String, String] = Left("hi there")
e.left.value should === ("hi there")
e.left.value should startWith ("hi")
}
it("should throw TestFailedException if left.value is empty") {
val e: Either[String, String] = Right("hi there")
val caught =
the [TestFailedException] thrownBy {
e.left.value should startWith ("hi")
}
caught.failedCodeLineNumber.value should equal (thisLineNumber - 2)
caught.failedCodeFileName.value should be ("EitherValuesSpec.scala")
caught.message.value should be (Resources.eitherLeftValueNotDefined(e))
}
// SKIP-SCALATESTJS-START
it("should throw a serialized TestFailedException") {
val objectOutputStream: ObjectOutputStream = new ObjectOutputStream(new ByteArrayOutputStream())
val e: Either[String, String] = Right("hi there")
val caught =
the [TestFailedException] thrownBy {
e.left.value should startWith ("hi")
}
noException should be thrownBy objectOutputStream.writeObject(caught)
}
// SKIP-SCALATESTJS-END
it("should return the right value inside an either if right.value is defined") {
val e: Either[String, String] = Right("hi there")
e.right.value should === ("hi there")
e.right.value should startWith ("hi")
}
it("should throw TestFailedException if right.value is empty") {
val e: Either[String, String] = Left("hi there")
val caught =
the [TestFailedException] thrownBy {
e.right.value should startWith ("hi")
}
caught.failedCodeLineNumber.value should equal (thisLineNumber - 2)
caught.failedCodeFileName.value should be ("EitherValuesSpec.scala")
caught.message.value should be (Resources.eitherRightValueNotDefined(e))
}
it("should return the right value inside an either if the either is a Right") {
val e: Either[String, String] = Right("hi there")
e.value should === ("hi there")
e.value should startWith ("hi")
}
it("should throw TestFailedException if either is not a Right") {
val e: Either[String, String] = Left("hi there")
val caught =
the [TestFailedException] thrownBy {
e.value should startWith ("hi")
}
caught.failedCodeLineNumber.value should equal (thisLineNumber - 2)
caught.failedCodeFileName.value should be ("EitherValuesSpec.scala")
caught.message.value should be (Resources.eitherValueNotDefined(e))
}
it("should allow an immediate application of parens to invoke apply on the type contained in the Left") {
val lefty: Either[Map[String, Int], String] = Left(Map("I" -> 1, "II" -> 2))
lefty.left.value("II") shouldBe 2
}
it("should allow an immediate application of parens to invoke apply on the type contained in the Right") {
val righty: Either[String, Map[String, Int]] = Right(Map("I" -> 1, "II" -> 2))
righty.right.value("II") shouldBe 2
}
it("should allow an immediate application of parens to invoke apply on the type contained in the Right if the Either is a Right") {
val righty: Either[String, Map[String, Int]] = Right(Map("I" -> 1, "II" -> 2))
righty.value("II") shouldBe 2
}
it("should be able to used with OptionValues") {
class TestSpec extends AnyFunSpec with EitherValues with OptionValues
}
}
}
| scalatest/scalatest | jvm/scalatest-test/src/test/scala/org/scalatest/EitherValuesSpec.scala | Scala | apache-2.0 | 4,577 |
abstract class Base(val x: Int) {
val d: Int
def f: Int = d
val a = x
}
class C(x: Int) extends Base(x) {
val d = f // error
} | som-snytt/dotty | tests/init/neg/override28.scala | Scala | apache-2.0 | 159 |
trait T0 { def ap(): Int }
trait T1 { def ap(a: Any): Int }
trait T2 { def ap(a: Any, b: Any): Int }
class Test {
def f0 = (() => 0): T1
def f1 = ((x: Any) => 0): T2
def f2 = ((x: Any) => 0): T0
def f3 = ((x: Any) => 0): T2
def f4 = ((x: Any, y: Any) => 0): T0
def f5 = ((x: Any, y: Any) => 0): T1
def f6 = ((x) => 0): T2
def f7 = ((x) => 0): T0
def f8 = ((x) => 0): T2
def f9 = ((x, y) => 0): T0
def g0 = ((x, y) => 0): T1
}
| scala/scala | test/files/neg/sammy_wrong_arity.scala | Scala | apache-2.0 | 454 |
/*
* Copyright 2013 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.storage.anormdb
import com.twitter.util.{Future, Time}
import com.twitter.conversions.time._
import com.twitter.zipkin.common.{Service, DependencyLink, Dependencies}
import com.twitter.zipkin.storage.Aggregates
import java.sql.Connection
import anorm._
import anorm.SqlParser._
import com.twitter.algebird.Moments
import AnormThreads.inNewThread
/**
* Retrieve and store aggregate dependency information.
*
* The top annotations methods are stubbed because they're not currently
* used anywhere; that feature was never completed.
*/
case class AnormAggregates(db: DB, openCon: Option[Connection] = None) extends Aggregates {
// Database connection object
private implicit val conn = openCon match {
case None => db.getConnection()
case Some(con) => con
}
/**
* Close the index
*/
def close() { conn.close() }
/**
* Get the dependencies in a time range.
*
* endDate is optional and if not passed defaults to startDate plus one day.
*/
def getDependencies(startDate: Option[Time], endDate: Option[Time]=None): Future[Dependencies] = inNewThread {
val startMs = startDate.getOrElse(Time.now - 1.day).inMicroseconds
val endMs = endDate.getOrElse(Time.now).inMicroseconds
val links: List[DependencyLink] = SQL(
"""SELECT parent, child, m0, m1, m2, m3, m4
|FROM zipkin_dependency_links AS l
|LEFT JOIN zipkin_dependencies AS d
| ON l.dlid = d.dlid
|WHERE start_ts >= {startTs}
| AND end_ts <= {endTs}
|ORDER BY l.dlid DESC
""".stripMargin)
.on("startTs" -> startMs)
.on("endTs" -> endMs)
.as((str("parent") ~ str("child") ~ long("m0") ~ get[Double]("m1") ~ get[Double]("m2") ~ get[Double]("m3") ~ get[Double]("m4") map {
case parent ~ child ~ m0 ~ m1 ~ m2 ~ m3 ~ m4 => new DependencyLink(
new Service(parent),
new Service(child),
new Moments(m0, m1, m2, m3, m4)
)
}) *)
new Dependencies(Time.fromMicroseconds(startMs), Time.fromMicroseconds(endMs), links)
}
/**
* Write dependencies
*
* Synchronize these so we don't do concurrent writes from the same box
*/
def storeDependencies(dependencies: Dependencies): Future[Unit] = inNewThread {
db.withTransaction(conn, { implicit conn: Connection =>
val dlid = SQL("""INSERT INTO zipkin_dependencies
| (start_ts, end_ts)
|VALUES ({startTs}, {endTs})
""".stripMargin)
.on("startTs" -> dependencies.startTime.inMicroseconds)
.on("endTs" -> dependencies.endTime.inMicroseconds)
.executeInsert()
dependencies.links.foreach { link =>
SQL("""INSERT INTO zipkin_dependency_links
| (dlid, parent, child, m0, m1, m2, m3, m4)
|VALUES ({dlid}, {parent}, {child}, {m0}, {m1}, {m2}, {m3}, {m4})
""".stripMargin)
.on("dlid" -> dlid)
.on("parent" -> link.parent.name)
.on("child" -> link.child.name)
.on("m0" -> link.durationMoments.m0)
.on("m1" -> link.durationMoments.m1)
.on("m2" -> link.durationMoments.m2)
.on("m3" -> link.durationMoments.m3)
.on("m4" -> link.durationMoments.m4)
.execute()
}
})
}
/**
* Get the top annotations for a service name
*/
def getTopAnnotations(serviceName: String): Future[Seq[String]] = {
Future.value(Seq.empty[String])
}
/**
* Get the top key value annotation keys for a service name
*/
def getTopKeyValueAnnotations(serviceName: String): Future[Seq[String]] = {
Future.value(Seq.empty[String])
}
/**
* Override the top annotations for a service
*/
def storeTopAnnotations(serviceName: String, a: Seq[String]): Future[Unit] = {
Future.Unit
}
/**
* Override the top key value annotation keys for a service
*/
def storeTopKeyValueAnnotations(serviceName: String, a: Seq[String]): Future[Unit] = {
Future.Unit
}
}
| tangyang/zipkin | zipkin-anormdb/src/main/scala/com/twitter/zipkin/storage/anormdb/AnormAggregates.scala | Scala | apache-2.0 | 4,587 |
package knot.core.stream.sinks
import knot.core.stream.Sink
import knot.core.stream.ops.SinkOps
import knot.core.stream.ops.StreamOps.{Completed, ErrorEmitted}
import knot.core.stream.plugs.Input
import scala.collection.immutable
case class SeqSink[T]() extends Sink[T, immutable.Seq[T]] {
override def newOps = new SinkOps[T, immutable.Seq[T]] {
private[this] val buf = Vector.newBuilder[T]
override protected def onUpstreamFinish: LifecycleBehavior = {
case Completed =>
val r = buf.result()
complete(r)
case ErrorEmitted(cause, _) =>
error(cause)
}
override protected def onNext(in: Input[T], element: T): Unit = {
buf += element
requestIfNeeded(in)
}
override def toDebugString = "SeqSink"
}
}
| defvar/knot | knot-core/src/main/scala/knot/core/stream/sinks/SeqSink.scala | Scala | mit | 783 |
package scala.annotation
/** This annotation can only be used on a field which defines a lazy val.
* When this annotation is used, the initialization of the lazy val will use a
* faster mechanism which is not thread-safe.
*/
final class threadUnsafe extends StaticAnnotation
| som-snytt/dotty | library/src/scala/annotation/threadUnsafe.scala | Scala | apache-2.0 | 281 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.optim
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.{T, Table}
import scala.reflect.ClassTag
/**
* Adadelta implementation for SGD: http://arxiv.org/abs/1212.5701
* @param decayRate decayRate, also called interpolation parameter rho
* @param Epsilon for numerical stability
* @tparam T
*/
class Adadelta[@specialized(Float, Double) T: ClassTag](
var decayRate: Double = 0.9,
var Epsilon: Double = 1e-10
)(implicit ev: TensorNumeric[T])
extends OptimMethod[T] {
/**
* Adadelta implementation for SGD: http://arxiv.org/abs/1212.5701
*
* @param feval a function that takes a single input (X), the point of a evaluation, and
* returns f(X) and df/dX
* @param parameter the initial point
* state("paramVariance") : vector of temporal variances of parameters
* state("accDelta"): vector of accumulated delta of gradients
* @return the new x vector and the function list {fx}, evaluated before the update
*/
override def optimize(feval: (Tensor[T]) => (T, Tensor[T]),
parameter: Tensor[T]): (Tensor[T], Array[T]) = {
val nevals = state.getOrElse[Int]("evalCounter", 0)
val dr = this.decayRate
val eps = this.Epsilon
val (fx, dfdx) = feval(parameter)
val (_paramVariance, _paramStd, _delta, _accDelta) =
if (state.get[Tensor[T]]("paramVariance").isDefined) {
(state.get[Tensor[T]]("paramVariance").get, state.get[Tensor[T]]("paramStd").get,
state.get[Tensor[T]]("delta").get, state.get[Tensor[T]]("accDelta").get)
} else {
(Tensor[T]().resizeAs(dfdx).zero(), Tensor[T]().resizeAs(dfdx).zero(),
Tensor[T]().resizeAs(dfdx).zero(), Tensor[T]().resizeAs(dfdx).zero())
}
_paramVariance.mul(ev.fromType[Double](dr)).addcmul(ev.fromType[Double](1-dr), dfdx, dfdx)
_paramStd.copy(_paramVariance).add(ev.fromType[Double](eps)).sqrt()
_delta.copy(_accDelta).add(ev.fromType[Double](eps)).sqrt()
.cdiv(_paramStd).cmul(dfdx)
parameter.add(ev.fromType[Double](-1), _delta)
_accDelta.mul(ev.fromType[Double](dr)).addcmul(ev.fromType[Double](1-dr), _delta, _delta)
state("evalCounter") = nevals + 1
state("paramVariance") = _paramVariance
state("paramStd") = _paramStd
state("delta") = _delta
state("accDelta") = _accDelta
(parameter, Array(fx))
}
override def loadFromTable(config: Table): this.type = {
this.decayRate = config.get[Double]("decayRate").getOrElse(this.decayRate)
this.Epsilon = config.get[Double]("Epsilon").getOrElse(this.Epsilon)
this
}
override def clearHistory(): Unit = {
state.delete("paramVariance")
state.delete("paramStd")
state.delete("delta")
state.delete("accDelta")
}
override def getLearningRate(): Double = 0.0
}
| jenniew/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/optim/Adadelta.scala | Scala | apache-2.0 | 3,562 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler.v2_3.planner
import org.neo4j.cypher.internal.frontend.v2_3.CypherException
import org.neo4j.cypher.internal.frontend.v2_3.spi.MapToPublicExceptions
import org.neo4j.kernel.api.exceptions.Status
class CantHandleQueryException(message: String = "Internal error - should have used fall back to execute query, but something went horribly wrong")
extends CypherException(message, null) {
def status = Status.Statement.ExecutionFailure
def mapToPublic[T <: Throwable](thrower: MapToPublicExceptions[T]) = throw this
}
| HuangLS/neo4j | community/cypher/cypher-compiler-2.3/src/main/scala/org/neo4j/cypher/internal/compiler/v2_3/planner/CantHandleQueryException.scala | Scala | apache-2.0 | 1,362 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.plans.logical.statsEstimation
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeMap, AttributeReference, Expression}
import org.apache.spark.sql.catalyst.planning.ExtractEquiJoinKeys
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.logical.{ColumnStat, Join, Statistics}
import org.apache.spark.sql.catalyst.plans.logical.statsEstimation.EstimationUtils._
import org.apache.spark.sql.internal.SQLConf
object JoinEstimation extends Logging {
/**
* Estimate statistics after join. Return `None` if the join type is not supported, or we don't
* have enough statistics for estimation.
*/
def estimate(conf: SQLConf, join: Join): Option[Statistics] = {
join.joinType match {
case Inner | Cross | LeftOuter | RightOuter | FullOuter =>
InnerOuterEstimation(conf, join).doEstimate()
case LeftSemi | LeftAnti =>
LeftSemiAntiEstimation(conf, join).doEstimate()
case _ =>
logDebug(s"[CBO] Unsupported join type: ${join.joinType}")
None
}
}
}
case class InnerOuterEstimation(conf: SQLConf, join: Join) extends Logging {
private val leftStats = join.left.stats(conf)
private val rightStats = join.right.stats(conf)
/**
* Estimate output size and number of rows after a join operator, and update output column stats.
*/
def doEstimate(): Option[Statistics] = join match {
case _ if !rowCountsExist(conf, join.left, join.right) =>
None
case ExtractEquiJoinKeys(joinType, leftKeys, rightKeys, _, _, _) =>
// 1. Compute join selectivity
val joinKeyPairs = extractJoinKeysWithColStats(leftKeys, rightKeys)
val selectivity = joinSelectivity(joinKeyPairs)
// 2. Estimate the number of output rows
val leftRows = leftStats.rowCount.get
val rightRows = rightStats.rowCount.get
val innerJoinedRows = ceil(BigDecimal(leftRows * rightRows) * selectivity)
// Make sure outputRows won't be too small based on join type.
val outputRows = joinType match {
case LeftOuter =>
// All rows from left side should be in the result.
leftRows.max(innerJoinedRows)
case RightOuter =>
// All rows from right side should be in the result.
rightRows.max(innerJoinedRows)
case FullOuter =>
// T(A FOJ B) = T(A LOJ B) + T(A ROJ B) - T(A IJ B)
leftRows.max(innerJoinedRows) + rightRows.max(innerJoinedRows) - innerJoinedRows
case _ =>
// Don't change for inner or cross join
innerJoinedRows
}
// 3. Update statistics based on the output of join
val inputAttrStats = AttributeMap(
leftStats.attributeStats.toSeq ++ rightStats.attributeStats.toSeq)
val attributesWithStat = join.output.filter(a => inputAttrStats.contains(a))
val (fromLeft, fromRight) = attributesWithStat.partition(join.left.outputSet.contains(_))
val outputStats: Seq[(Attribute, ColumnStat)] = if (outputRows == 0) {
// The output is empty, we don't need to keep column stats.
Nil
} else if (selectivity == 0) {
joinType match {
// For outer joins, if the join selectivity is 0, the number of output rows is the
// same as that of the outer side. And column stats of join keys from the outer side
// keep unchanged, while column stats of join keys from the other side should be updated
// based on added null values.
case LeftOuter =>
fromLeft.map(a => (a, inputAttrStats(a))) ++
fromRight.map(a => (a, nullColumnStat(a.dataType, leftRows)))
case RightOuter =>
fromRight.map(a => (a, inputAttrStats(a))) ++
fromLeft.map(a => (a, nullColumnStat(a.dataType, rightRows)))
case FullOuter =>
fromLeft.map { a =>
val oriColStat = inputAttrStats(a)
(a, oriColStat.copy(nullCount = oriColStat.nullCount + rightRows))
} ++ fromRight.map { a =>
val oriColStat = inputAttrStats(a)
(a, oriColStat.copy(nullCount = oriColStat.nullCount + leftRows))
}
case _ => Nil
}
} else if (selectivity == 1) {
// Cartesian product, just propagate the original column stats
inputAttrStats.toSeq
} else {
val joinKeyStats = getIntersectedStats(joinKeyPairs)
join.joinType match {
// For outer joins, don't update column stats from the outer side.
case LeftOuter =>
fromLeft.map(a => (a, inputAttrStats(a))) ++
updateAttrStats(outputRows, fromRight, inputAttrStats, joinKeyStats)
case RightOuter =>
updateAttrStats(outputRows, fromLeft, inputAttrStats, joinKeyStats) ++
fromRight.map(a => (a, inputAttrStats(a)))
case FullOuter =>
inputAttrStats.toSeq
case _ =>
// Update column stats from both sides for inner or cross join.
updateAttrStats(outputRows, attributesWithStat, inputAttrStats, joinKeyStats)
}
}
val outputAttrStats = AttributeMap(outputStats)
Some(Statistics(
sizeInBytes = getOutputSize(join.output, outputRows, outputAttrStats),
rowCount = Some(outputRows),
attributeStats = outputAttrStats))
case _ =>
// When there is no equi-join condition, we do estimation like cartesian product.
val inputAttrStats = AttributeMap(
leftStats.attributeStats.toSeq ++ rightStats.attributeStats.toSeq)
// Propagate the original column stats
val outputRows = leftStats.rowCount.get * rightStats.rowCount.get
Some(Statistics(
sizeInBytes = getOutputSize(join.output, outputRows, inputAttrStats),
rowCount = Some(outputRows),
attributeStats = inputAttrStats))
}
// scalastyle:off
/**
* The number of rows of A inner join B on A.k1 = B.k1 is estimated by this basic formula:
* T(A IJ B) = T(A) * T(B) / max(V(A.k1), V(B.k1)), where V is the number of distinct values of
* that column. The underlying assumption for this formula is: each value of the smaller domain
* is included in the larger domain.
* Generally, inner join with multiple join keys can also be estimated based on the above
* formula:
* T(A IJ B) = T(A) * T(B) / (max(V(A.k1), V(B.k1)) * max(V(A.k2), V(B.k2)) * ... * max(V(A.kn), V(B.kn)))
* However, the denominator can become very large and excessively reduce the result, so we use a
* conservative strategy to take only the largest max(V(A.ki), V(B.ki)) as the denominator.
*/
// scalastyle:on
def joinSelectivity(joinKeyPairs: Seq[(AttributeReference, AttributeReference)]): BigDecimal = {
var ndvDenom: BigInt = -1
var i = 0
while(i < joinKeyPairs.length && ndvDenom != 0) {
val (leftKey, rightKey) = joinKeyPairs(i)
// Check if the two sides are disjoint
val leftKeyStats = leftStats.attributeStats(leftKey)
val rightKeyStats = rightStats.attributeStats(rightKey)
val lRange = Range(leftKeyStats.min, leftKeyStats.max, leftKey.dataType)
val rRange = Range(rightKeyStats.min, rightKeyStats.max, rightKey.dataType)
if (Range.isIntersected(lRange, rRange)) {
// Get the largest ndv among pairs of join keys
val maxNdv = leftKeyStats.distinctCount.max(rightKeyStats.distinctCount)
if (maxNdv > ndvDenom) ndvDenom = maxNdv
} else {
// Set ndvDenom to zero to indicate that this join should have no output
ndvDenom = 0
}
i += 1
}
if (ndvDenom < 0) {
// We can't find any join key pairs with column stats, estimate it as cartesian join.
1
} else if (ndvDenom == 0) {
// One of the join key pairs is disjoint, thus the two sides of join is disjoint.
0
} else {
1 / BigDecimal(ndvDenom)
}
}
/**
* Propagate or update column stats for output attributes.
*/
private def updateAttrStats(
outputRows: BigInt,
attributes: Seq[Attribute],
oldAttrStats: AttributeMap[ColumnStat],
joinKeyStats: AttributeMap[ColumnStat]): Seq[(Attribute, ColumnStat)] = {
val outputAttrStats = new ArrayBuffer[(Attribute, ColumnStat)]()
val leftRows = leftStats.rowCount.get
val rightRows = rightStats.rowCount.get
attributes.foreach { a =>
// check if this attribute is a join key
if (joinKeyStats.contains(a)) {
outputAttrStats += a -> joinKeyStats(a)
} else {
val oldColStat = oldAttrStats(a)
val oldNdv = oldColStat.distinctCount
val newNdv = if (join.left.outputSet.contains(a)) {
updateNdv(oldNumRows = leftRows, newNumRows = outputRows, oldNdv = oldNdv)
} else {
updateNdv(oldNumRows = rightRows, newNumRows = outputRows, oldNdv = oldNdv)
}
val newColStat = oldColStat.copy(distinctCount = newNdv)
// TODO: support nullCount updates for specific outer joins
outputAttrStats += a -> newColStat
}
}
outputAttrStats
}
/** Get intersected column stats for join keys. */
private def getIntersectedStats(joinKeyPairs: Seq[(AttributeReference, AttributeReference)])
: AttributeMap[ColumnStat] = {
val intersectedStats = new mutable.HashMap[Attribute, ColumnStat]()
joinKeyPairs.foreach { case (leftKey, rightKey) =>
val leftKeyStats = leftStats.attributeStats(leftKey)
val rightKeyStats = rightStats.attributeStats(rightKey)
val lRange = Range(leftKeyStats.min, leftKeyStats.max, leftKey.dataType)
val rRange = Range(rightKeyStats.min, rightKeyStats.max, rightKey.dataType)
// When we reach here, join selectivity is not zero, so each pair of join keys should be
// intersected.
assert(Range.isIntersected(lRange, rRange))
// Update intersected column stats
assert(leftKey.dataType.sameType(rightKey.dataType))
val newNdv = leftKeyStats.distinctCount.min(rightKeyStats.distinctCount)
val (newMin, newMax) = Range.intersect(lRange, rRange, leftKey.dataType)
val newMaxLen = math.min(leftKeyStats.maxLen, rightKeyStats.maxLen)
val newAvgLen = (leftKeyStats.avgLen + rightKeyStats.avgLen) / 2
val newStats = ColumnStat(newNdv, newMin, newMax, 0, newAvgLen, newMaxLen)
intersectedStats.put(leftKey, newStats)
intersectedStats.put(rightKey, newStats)
}
AttributeMap(intersectedStats.toSeq)
}
private def extractJoinKeysWithColStats(
leftKeys: Seq[Expression],
rightKeys: Seq[Expression]): Seq[(AttributeReference, AttributeReference)] = {
leftKeys.zip(rightKeys).collect {
// Currently we don't deal with equal joins like key1 = key2 + 5.
// Note: join keys from EqualNullSafe also fall into this case (Coalesce), consider to
// support it in the future by using `nullCount` in column stats.
case (lk: AttributeReference, rk: AttributeReference)
if columnStatsExist((leftStats, lk), (rightStats, rk)) => (lk, rk)
}
}
}
case class LeftSemiAntiEstimation(conf: SQLConf, join: Join) {
def doEstimate(): Option[Statistics] = {
// TODO: It's error-prone to estimate cardinalities for LeftSemi and LeftAnti based on basic
// column stats. Now we just propagate the statistics from left side. We should do more
// accurate estimation when advanced stats (e.g. histograms) are available.
if (rowCountsExist(conf, join.left)) {
val leftStats = join.left.stats(conf)
// Propagate the original column stats for cartesian product
val outputRows = leftStats.rowCount.get
Some(Statistics(
sizeInBytes = getOutputSize(join.output, outputRows, leftStats.attributeStats),
rowCount = Some(outputRows),
attributeStats = leftStats.attributeStats))
} else {
None
}
}
}
| wangyixiaohuihui/spark2-annotation | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statsEstimation/JoinEstimation.scala | Scala | apache-2.0 | 13,168 |
package ore
import shapeless.tag._
package object permission {
private object SharedTagger extends Tagger[Nothing]
private def tagS[U]: Tagger[U] = SharedTagger.asInstanceOf[Tagger[U]]
type Permission = Long @@ Permission.type
//noinspection TypeAnnotation
object Permission {
private[permission] def apply(long: Long): Long @@ Permission.type = tagS[Permission.type](long)
/**
* Create a permission that has all the permissions passed in.
*/
def apply(permissions: Permission*): Permission = permissions.fold(None)(_ ++ _)
/**
* Create a permission from an int.
*/
def fromLong(long: Long): Long @@ Permission.type = apply(long)
def fromBinString(str: String): Option[Long @@ Permission.type] =
scodec.bits.BitVector.fromBin(str).map(_.toLong(signed = false)).map(fromLong)
val None = Permission(0)
val All = Permission(0XFFFFFFFFFFFFFFFFL)
val ViewPublicInfo = Permission(1L << 0)
val EditOwnUserSettings = Permission(1L << 1)
val EditApiKeys = Permission(1L << 2)
val EditSubjectSettings = Permission(1L << 4)
val ManageSubjectMembers = Permission(1L << 5)
val IsSubjectOwner = Permission(1L << 6)
val IsSubjectMember = Permission(1L << 7)
val CreateProject = Permission(1L << 8)
val EditPage = Permission(1L << 9)
val DeleteProject = Permission(1L << 10)
val EditProjectSettings = EditSubjectSettings
val ManageProjectMembers = ManageSubjectMembers
val IsProjectMember = IsSubjectMember
val IsProjectOwner = IsSubjectOwner ++ EditProjectSettings ++ ManageProjectMembers ++ IsProjectMember
val CreateVersion = Permission(1L << 12)
val EditVersion = Permission(1L << 13)
val DeleteVersion = Permission(1L << 14)
val EditChannel = Permission(1L << 15) //To become edit tags later
val CreateOrganization = Permission(1L << 20)
val PostAsOrganization = Permission(1L << 21)
val EditOrganizationSettings = EditSubjectSettings
val ManageOrganizationMembers = ManageSubjectMembers
val IsOrganizationMember = IsProjectMember
val IsOrganizationOwner = IsProjectOwner
val ModNotesAndFlags = Permission(1L << 24)
val SeeHidden = Permission(1L << 25)
val IsStaff = Permission(1L << 26)
val Reviewer = Permission(1L << 27)
val ViewHealth = Permission(1L << 32)
val ViewIp = Permission(1L << 33)
val ViewStats = Permission(1L << 34)
val ViewLogs = Permission(1L << 35)
val ManualValueChanges = Permission(1L << 40)
val HardDeleteProject = Permission(1L << 41)
val HardDeleteVersion = Permission(1L << 42)
val EditAllUserSettings = Permission(1L << 43)
}
implicit class PermissionSyntax(private val permission: Permission) extends AnyVal {
/**
* Add a permission to this permission.
* @param other The other permission.
*/
def add(other: Permission): Permission = Permission(permission | other)
/**
* Add a permission to this permission.
* @param other The other permission.
*/
def ++(other: Permission): Permission = add(other)
/**
* Remove a permission from this permission.
* @param other The permission to remove.
*/
def remove(other: Permission): Permission = Permission(permission & ~other)
/**
* Remove a permission from this permission.
* @param other The permission to remove.
*/
def --(other: Permission): Permission = remove(other)
/**
* Toggle a permission in this permission.
* @param other The permission to toggle.
*/
def toggle(other: Permission): Permission = Permission(permission ^ other)
/**
* Check if this permission has a permission.
* @param other The permission to check against.
*/
def has(other: Permission): Boolean = (permission & other) == other
/**
* Check if this permission grants any permissions.
*/
def isNone: Boolean = permission == 0
def toBinString: String = scodec.bits.BitVector.fromLong(permission).toBin
def toNamedSeq: Seq[NamedPermission] = NamedPermission.values.filter(perm => has(perm.permission))
}
}
| SpongePowered/Ore | models/src/main/scala/ore/permission/package.scala | Scala | mit | 4,301 |
package cn.edu.neu.chiewen.roadDemo.moving
import akka.actor.ActorSystem
import cn.edu.neu.chiewen.roadDemo.road.Node
import cn.edu.neu.chiewen.roadDemo.ui.{RepaintEvent, RoadDemoData, TrajectoryFinishBuildingEvent}
import scala.concurrent.duration._
import scala.language.postfixOps
import scala.swing.Publisher
import scala.swing.event.Event
case object ObjectMovesEvent extends Event
case object KnnRefreshEvent extends Event
/**
* Created by Chiewen on 2015/9/29.
*/
object MovingController extends Publisher {
var stepDistance: Double = 1
var stepTime: Int = 50
val stepTimeMax: Int = 200
var obj: MovingObject = null
var knn = List.empty[Node]
var voronoiNeighbor = List.empty[Node]
var dataKnn = Array[Array[String]]()
var dataVoronoi = Array[Array[String]]()
var trajectoryNodes: List[Node] = List.empty[Node]
def resetTrajectory() {
trajectoryNodes = List.empty[Node]
}
def buildTrajectory() {
obj = new MovingObject(trajectoryNodes.reverse: _*)
trajectoryNodes = List.empty[Node]
publish(TrajectoryFinishBuildingEvent)
}
def addTrajectoryNode(node: Option[Node]) {
node match {
case Some(n) =>
if (trajectoryNodes.isEmpty || trajectoryNodes.head.roads.exists(_.terminals.contains(n)))
trajectoryNodes ::= n
case None =>
}
}
def move() {
val actorSystem = ActorSystem()
val scheduler = actorSystem.scheduler
implicit val executor = actorSystem.dispatcher
def _move(ob: MovingObject): Unit = {
scheduler.scheduleOnce(stepTime milliseconds) {
ob.advance(stepDistance)
publish(ObjectMovesEvent)
if (!ob.isFinished) {
val newKnn = ob.kNN(RoadDemoData.k)
if (newKnn != knn) {
knn = newKnn
voronoiNeighbor = MovingObject.neighbors(knn)
dataKnn = Array[Array[String]]()
dataVoronoi = Array[Array[String]]()
var i = knn.size + 1
for (n <- knn.reverse) { i -= 1; dataKnn +:= Array(i.toString, n.id.toString, n.x.toString, n.y.toString)}
i = voronoiNeighbor.size + 1
for (n <- voronoiNeighbor) { i -= 1; dataVoronoi +:= Array(i.toString, n.id.toString, n.x.toString, n.y.toString)}
publish(KnnRefreshEvent)
}
}
publish(RepaintEvent)
if (!ob.isFinished) _move(ob)
}
}
if (obj != null && !obj.isFinished)
_move(obj)
}
}
| chiewen/CkNN | CkNN/src/main/scala/cn/edu/neu/chiewen/roadDemo/moving/MovingController.scala | Scala | gpl-2.0 | 2,452 |
package im.actor.bots
import derive.key
import upickle.Js
import upickle.Js.Obj
import upickle.default._
import scala.annotation.meta.beanGetter
import scala.collection.JavaConversions._
import scala.compat.java8.OptionConverters._
object BotMessages {
sealed trait BotMessage
sealed trait BotMessageIn extends BotMessage
sealed trait BotMessageOut extends BotMessage
object Services {
val KeyValue = "keyvalue"
val Messaging = "messaging"
val Bots = "bots"
val WebHooks = "webhooks"
val Users = "users"
val Groups = "groups"
val Stickers = "stickers"
val Files = "files"
}
final case class FileLocation(
@beanGetter fileId: Long,
@beanGetter accessHash: Long
)
final case class AvatarImage(
@beanGetter fileLocation: FileLocation,
@beanGetter width: Int,
@beanGetter height: Int,
@beanGetter fileSize: Int
)
final case class ImageLocation(
@beanGetter fileLocation: FileLocation,
@beanGetter width: Int,
@beanGetter height: Int,
@beanGetter fileSize: Int
)
final case class Avatar(
@beanGetter smallImage: Option[AvatarImage],
@beanGetter largeImage: Option[AvatarImage],
@beanGetter fullImage: Option[AvatarImage]
)
final case class BotCommand(
@beanGetter slashCommand: String,
@beanGetter description: String,
locKey: Option[String]
) { def getLocKey = locKey.asJava }
final case class ContactInfo(
phones: Seq[Long],
emails: Seq[String]
)
sealed trait ContactRecord
@key("Email")
final case class EmailContactRecord(email: String) extends ContactRecord
@key("Phone")
final case class PhoneContactRecord(phone: Long) extends ContactRecord
final case class User(
@beanGetter id: Int,
@beanGetter accessHash: Long,
@beanGetter name: String,
@beanGetter sex: Option[Int],
about: Option[String],
avatar: Option[Avatar],
username: Option[String],
isBot: Option[Boolean],
contactRecords: Seq[ContactRecord],
timeZone: Option[String],
preferredLanguages: Seq[String],
botCommands: Seq[BotCommand]
) {
def isMale = sex.contains(1)
def isFemale = sex.contains(2)
def isABot = isBot.contains(true)
def getSex = sex.asJava
def getAbout = about.asJava
def getAvatar = avatar.asJava
def getUsername = username.asJava
def getIsBot = isBot.asJava
def getContactRecords = seqAsJavaList(contactRecords)
def getEmailContactRecords = seqAsJavaList(contactRecords collect {
case e: EmailContactRecord ⇒ e
})
def getPhoneContactRecords = seqAsJavaList(contactRecords collect {
case p: PhoneContactRecord ⇒ p
})
def getTimeZone = timeZone.asJava
def getPreferredLanguages = seqAsJavaList(preferredLanguages)
def getBotCommands = seqAsJavaList(botCommands)
}
final case class GroupMember(
@beanGetter userId: Int,
@beanGetter inviterUserId: Int,
@beanGetter memberSince: Long,
isAdmin: Option[Boolean]
) {
def getIsAdmin = isAdmin.asJava
}
final case class Group(
@beanGetter id: Int,
@beanGetter accessHash: Long,
@beanGetter title: String,
about: Option[String],
avatar: Option[Avatar],
@beanGetter isMember: Boolean,
@beanGetter creatorUserId: Int,
members: Seq[GroupMember]
) {
def getAbout = about.asJava
def getAvatar = about.asJava
def getMembers = seqAsJavaList(members)
}
final object OutPeer {
def privat(id: Int, accessHash: Long) = UserOutPeer(id, accessHash)
def user(id: Int, accessHash: Long) = privat(id, accessHash)
def group(id: Int, accessHash: Long) = GroupOutPeer(id, accessHash)
}
sealed trait Peer {
val id: Int
}
@key("User")
final case class UserPeer(@beanGetter id: Int) extends Peer
@key("Group")
final case class GroupPeer(@beanGetter id: Int) extends Peer
sealed trait OutPeer extends Peer {
val id: Int
val accessHash: Long
val isPrivate: Boolean
val isGroup: Boolean
}
@key("Group")
final case class GroupOutPeer(
@beanGetter id: Int,
@beanGetter accessHash: Long
) extends OutPeer {
override val isPrivate = false
override val isGroup = true
}
@key("User")
final case class UserOutPeer(
@beanGetter id: Int,
@beanGetter accessHash: Long
) extends OutPeer {
override val isPrivate = true
override val isGroup = false
}
sealed trait RequestBody {
type Response <: ResponseBody
val service: String
def readResponse(obj: Js.Obj): Response
}
trait ResponseBody
@key("Request")
final case class BotRequest(
id: Long,
service: String,
body: RequestBody
) extends BotMessageIn
@key("Response")
final case class BotResponse(
id: Long,
body: BotResponseBody
) extends BotMessageOut
sealed trait BotResponseBody
sealed trait BotUpdate extends BotMessageOut {
val seq: Int
val body: UpdateBody
}
sealed trait UpdateBody
@key("SeqUpdate")
final case class BotSeqUpdate(
seq: Int,
body: UpdateBody
) extends BotUpdate
@key("FatSeqUpdate")
final case class BotFatSeqUpdate(
seq: Int,
body: UpdateBody,
users: Map[Int, User],
groups: Map[Int, Group]
) extends BotUpdate
@key("Error")
case class BotError(code: Int, tag: String, data: Js.Obj, retryIn: Option[Int]) extends RuntimeException(
s"code: $code, tag: $tag, data: ${write(data)}, retryIn: $retryIn"
) with BotResponseBody
object BotError {
def apply(code: Int, tag: String): BotError = BotError(code, tag, Js.Obj(), None)
def apply(code: Int, tag: String, retryIn: Option[Int]): BotError = BotError(code, tag, Js.Obj(), retryIn)
}
@key("Success")
case class BotSuccess(obj: Js.Obj) extends BotResponseBody
implicit val objWriter = Writer[Js.Obj] {
case obj ⇒ obj
}
implicit val objReader = Reader[Js.Obj] {
case obj: Js.Obj ⇒ obj
}
implicit val botSuccessWriter = upickle.default.Writer[BotSuccess] {
case BotSuccess(obj) ⇒ obj
}
implicit val botSuccessReader = upickle.default.Reader[BotSuccess] {
case obj: Js.Obj ⇒ BotSuccess(obj)
}
implicit val botErrorWriter = upickle.default.Writer[BotError] {
case BotError(code, tag, data, retryInOpt) ⇒
Js.Obj(
"code" → Js.Num(code.toDouble),
"tag" → Js.Str(tag),
"data" → data,
"retryIn" → retryInOpt.map(n ⇒ Js.Num(n.toDouble)).getOrElse(Js.Null)
)
}
case class Container[T](@beanGetter value: T) extends ResponseBody
final case class ContainerList[T](value: Seq[T]) extends ResponseBody {
def getValue = seqAsJavaList(value)
}
trait Void extends ResponseBody
case object Void extends Void
implicit val voidReader = upickle.default.Reader[Void] {
case Js.Obj() ⇒ Void
}
implicit val voidWriter = upickle.default.Writer[Void] {
case _ ⇒ Js.Obj()
}
@key("SendMessage")
final case class SendMessage(
@beanGetter peer: OutPeer,
@beanGetter randomId: Long,
@beanGetter message: MessageBody
) extends RequestBody {
override type Response = MessageSent
override val service = Services.Messaging
override def readResponse(obj: Js.Obj) = readJs[MessageSent](obj)
}
@key("UpdateMessageContent")
final case class UpdateMessageContent(
@beanGetter peer: OutPeer,
@beanGetter randomId: Long,
@beanGetter updatedMessage: MessageBody
) extends RequestBody {
override type Response = MessageContentUpdated
override val service = Services.Messaging
override def readResponse(obj: Js.Obj) = readJs[MessageContentUpdated](obj)
}
@key("SetValue")
final case class SetValue(
@beanGetter keyspace: String,
@beanGetter key: String,
@beanGetter value: String
) extends RequestBody {
override type Response = Void
override val service = Services.KeyValue
override def readResponse(obj: Js.Obj) = readJs[Void](obj)
}
@key("GetValue")
final case class GetValue(
@beanGetter keyspace: String,
@beanGetter key: String
) extends RequestBody {
override type Response = Container[Option[String]]
override val service = Services.KeyValue
override def readResponse(obj: Js.Obj) = readJs[Container[Option[String]]](obj)
}
@key("DeleteValue")
final case class DeleteValue(
@beanGetter keyspace: String,
@beanGetter key: String
) extends RequestBody {
override type Response = Void
override val service = Services.KeyValue
override def readResponse(obj: Js.Obj) = readJs[Void](obj)
}
@key("GetKeys")
final case class GetKeys(@beanGetter keyspace: String) extends RequestBody {
override type Response = ContainerList[String]
override val service = Services.KeyValue
override def readResponse(obj: Js.Obj) = readJs[ContainerList[String]](obj)
}
//username is nickname
@key("CreateBot")
final case class CreateBot(
@beanGetter username: String,
@beanGetter name: String
) extends RequestBody {
override type Response = BotCreated
override val service = Services.Bots
override def readResponse(obj: Js.Obj) = readJs[Response](obj)
}
@key("BotCreated")
final case class BotCreated(
@beanGetter token: String,
@beanGetter userId: Int
) extends ResponseBody
@key("BotToken")
final case class BotToken(
@beanGetter token: String
) extends ResponseBody
@key("GetBotToken")
final case class GetBotToken(
@beanGetter botUserId: Int
) extends RequestBody {
override type Response = BotToken
override val service = Services.Bots
override def readResponse(obj: Js.Obj) = readJs[Response](obj)
}
@key("RegisterHook")
final case class RegisterHook(@beanGetter name: String) extends RequestBody {
override type Response = Container[String]
override val service = Services.WebHooks
override def readResponse(obj: Js.Obj) = readJs[Response](obj)
}
@key("GetHooks")
sealed trait GetHooks extends RequestBody {
override type Response = ContainerList[String]
override val service = Services.WebHooks
override def readResponse(obj: Js.Obj) = readJs[Response](obj)
}
@key("GetHooks")
case object GetHooks extends GetHooks
@key("ChangeUserAvatar")
final case class ChangeUserAvatar(
@beanGetter userId: Int,
@beanGetter fileLocation: FileLocation
) extends RequestBody {
override type Response = Void
override val service = Services.Users
override def readResponse(obj: Js.Obj) = readJs[Response](obj)
}
@key("ChangeUserName")
final case class ChangeUserName(
@beanGetter userId: Int,
@beanGetter name: String
) extends RequestBody {
override type Response = Void
override val service = Services.Users
override def readResponse(obj: Js.Obj) = readJs[Response](obj)
}
@key("ChangeUserNickname")
final case class ChangeUserNickname(
@beanGetter userId: Int,
@beanGetter nickname: Option[String]
) extends RequestBody {
override type Response = Void
override val service = Services.Users
override def readResponse(obj: Js.Obj): Response = readJs[Response](obj)
}
@key("ChangeUserAbout")
final case class ChangeUserAbout(
@beanGetter userId: Int,
about: Option[String]
) extends RequestBody {
override type Response = Void
override val service = Services.Users
override def readResponse(obj: Js.Obj) = readJs[Response](obj)
def getAbout = about.asJava
}
@key("AddSlashCommand")
final case class AddSlashCommand(
@beanGetter userId: Int,
@beanGetter command: BotCommand
) extends RequestBody {
override type Response = Void
override val service = Services.Users
override def readResponse(obj: Js.Obj) = readJs[Response](obj)
}
@key("RemoveSlashCommand")
final case class RemoveSlashCommand(
@beanGetter userId: Int,
@beanGetter slashCommand: String
) extends RequestBody {
override type Response = Void
override val service = Services.Users
override def readResponse(obj: Js.Obj) = readJs[Response](obj)
}
@key("AddUserExtString")
final case class AddUserExtString(
@beanGetter userId: Int,
@beanGetter key: String,
@beanGetter value: String
) extends RequestBody {
override type Response = Void
override def readResponse(obj: Obj) = readJs[Response](obj)
override val service: String = Services.Users
}
@key("AddUserExtBool")
final case class AddUserExtBool(
@beanGetter userId: Int,
@beanGetter key: String,
@beanGetter value: Boolean
) extends RequestBody {
override type Response = Void
override def readResponse(obj: Obj) = readJs[Response](obj)
override val service: String = Services.Users
}
@key("RemoveUserExt")
final case class RemoveUserExt(
@beanGetter userId: Int,
@beanGetter key: String
) extends RequestBody {
override type Response = Void
override def readResponse(obj: Obj) = readJs[Response](obj)
override val service: String = Services.Users
}
@key("IsAdmin")
final case class IsAdmin(@beanGetter userId: Int) extends RequestBody {
override type Response = ResponseIsAdmin
override val service = Services.Users
override def readResponse(obj: Js.Obj) = readJs[Response](obj)
}
final case class ResponseIsAdmin(isAdmin: Boolean) extends ResponseBody {
def getIsAdmin: java.lang.Boolean = isAdmin.booleanValue()
}
@key("FindUser")
final case class FindUser(
@beanGetter query: String
) extends RequestBody {
override type Response = FoundUsers
override val service: String = Services.Users
override def readResponse(obj: Js.Obj): Response = readJs[Response](obj)
}
final case class FoundUsers(users: Seq[User]) extends ResponseBody {
def getUsers = seqAsJavaList(users)
}
final case class MessageSent(@beanGetter date: Long) extends ResponseBody
@key("MessageContentUpdated")
sealed trait MessageContentUpdated extends ResponseBody
@key("MessageContentUpdated")
case object MessageContentUpdated extends MessageContentUpdated
implicit val messageContentUpdatedReader = upickle.default.Reader[MessageContentUpdated] {
case Js.Obj() ⇒ MessageContentUpdated
}
implicit val messageContentUpdatedWriter = upickle.default.Writer[MessageContentUpdated] {
case _ ⇒ Js.Obj()
}
@key("CreateGroup")
final case class CreateGroup(
@beanGetter title: String
) extends RequestBody {
override type Response = ResponseCreateGroup
override val service: String = Services.Groups
override def readResponse(obj: Js.Obj): Response = readJs[Response](obj)
}
@key("CreateGroupWithOwner")
final case class CreateGroupWithOwner(
@beanGetter title: String,
@beanGetter user: UserPeer,
members: Seq[UserPeer]
) extends RequestBody {
override type Response = ResponseCreateGroup
override val service: String = Services.Groups
def this(title: String, user: UserPeer, members: java.util.List[UserPeer]) =
this(title, user, members.toIndexedSeq)
def getMembers = seqAsJavaList(members)
override def readResponse(obj: Js.Obj): Response = readJs[Response](obj)
}
final case class ResponseCreateGroup(@beanGetter peer: GroupOutPeer) extends ResponseBody
@key("UpdateGroupShortName")
final case class UpdateGroupShortName(
@beanGetter groupId: Int,
shortName: Option[String]
) extends RequestBody {
def this(groupId: Int, shortName: String) = this(groupId, Option(shortName))
def getShortName = shortName.asJava
override type Response = Void
override val service: String = Services.Groups
override def readResponse(obj: Js.Obj): Response = readJs[Response](obj)
}
@key("AddGroupExtString")
final case class AddGroupExtString(
@beanGetter groupId: Int,
@beanGetter key: String,
@beanGetter value: String
) extends RequestBody {
override type Response = Void
override val service: String = Services.Groups
override def readResponse(obj: Js.Obj): Response = readJs[Response](obj)
}
@key("AddGroupExtBool")
final case class AddGroupExtBool(
@beanGetter groupId: Int,
@beanGetter key: String,
@beanGetter value: Boolean
) extends RequestBody {
override type Response = Void
override val service: String = Services.Groups
override def readResponse(obj: Js.Obj): Response = readJs[Response](obj)
}
@key("RemoveGroupExt")
final case class RemoveGroupExt(
@beanGetter groupId: Int,
@beanGetter key: String
) extends RequestBody {
override type Response = Void
override val service: String = Services.Groups
override def readResponse(obj: Js.Obj): Response = readJs[Response](obj)
}
@key("InviteUser")
final case class InviteUser(@beanGetter groupPeer: GroupOutPeer, @beanGetter userPeer: UserOutPeer) extends RequestBody {
override type Response = Void
override val service: String = Services.Groups
override def readResponse(obj: Js.Obj): Response = readJs[Response](obj)
}
@key("CreateStickerPack")
final case class CreateStickerPack(@beanGetter creatorUserId: Int) extends RequestBody {
override type Response = Container[String]
override def readResponse(obj: Js.Obj): Response = readJs[Response](obj)
override val service: String = Services.Stickers
}
@key("ShowStickerPacks")
final case class ShowStickerPacks(@beanGetter ownerUserId: Int) extends RequestBody {
override type Response = StickerPackIds
override def readResponse(obj: Js.Obj): Response = readJs[Response](obj)
override val service: String = Services.Stickers
}
final case class StickerPackIds(ids: Seq[String]) extends ResponseBody {
def getIds = seqAsJavaList(ids)
}
@key("ShowStickers")
final case class ShowStickers(
@beanGetter ownerUserId: Int,
@beanGetter packId: Int
) extends RequestBody {
override type Response = StickerIds
override def readResponse(obj: Js.Obj): Response = readJs[Response](obj)
override val service: String = Services.Stickers
}
final case class StickerIds(ids: Seq[String]) extends ResponseBody {
def getIds = seqAsJavaList(ids)
}
@key("DownloadFile")
final case class DownloadFile(@beanGetter fileLocation: FileLocation) extends RequestBody {
override type Response = ResponseDownloadFile
override def readResponse(obj: Js.Obj): Response = readJs[Response](obj)
override val service: String = Services.Files
}
final case class ResponseDownloadFile(fileBytes: Array[Byte]) extends ResponseBody
@key("UploadFile")
final case class UploadFile(@beanGetter bytes: Array[Byte]) extends RequestBody {
override type Response = ResponseUploadFile
override def readResponse(obj: Js.Obj): Response = readJs[Response](obj)
override val service: String = Services.Files
}
final case class ResponseUploadFile(@beanGetter location: FileLocation) extends ResponseBody
@key("AddSticker")
final case class AddSticker(
@beanGetter ownerUserId: Int,
@beanGetter packId: Int,
emoji: Option[String],
@beanGetter small: Array[Byte],
@beanGetter smallW: Int,
@beanGetter smallH: Int,
@beanGetter medium: Array[Byte],
@beanGetter mediumW: Int,
@beanGetter mediumH: Int,
@beanGetter large: Array[Byte],
@beanGetter largeW: Int,
@beanGetter largeH: Int
) extends RequestBody {
override type Response = Void
override def readResponse(obj: Js.Obj): Response = readJs[Response](obj)
override val service: String = Services.Stickers
def getEmoji = emoji.asJava
}
@key("DeleteSticker")
final case class DeleteSticker(
@beanGetter ownerUserId: Int,
@beanGetter packId: Int,
@beanGetter stickerId: Int
) extends RequestBody {
override type Response = Void
override def readResponse(obj: Js.Obj): Response = readJs[Response](obj)
override val service: String = Services.Stickers
}
@key("MakeStickerPackDefault")
final case class MakeStickerPackDefault(
@beanGetter userId: Int,
@beanGetter packId: Int
) extends RequestBody {
override type Response = Void
override def readResponse(obj: Js.Obj): Response = readJs[Response](obj)
override val service: String = Services.Stickers
}
@key("UnmakeStickerPackDefault")
final case class UnmakeStickerPackDefault(
@beanGetter userId: Int,
@beanGetter packId: Int
) extends RequestBody {
override type Response = Void
override def readResponse(obj: Js.Obj): Response = readJs[Response](obj)
override val service: String = Services.Stickers
}
@key("Message")
final case class Message(
@beanGetter peer: OutPeer,
@beanGetter sender: UserOutPeer,
@beanGetter date: Long,
@beanGetter randomId: Long,
@beanGetter message: MessageBody
) extends UpdateBody
@key("RawUpdate")
final case class RawUpdate(
`type`: Option[String],
@beanGetter data: String
) extends UpdateBody {
def getType = `type`.asJava
}
sealed trait MessageBody
//ext has default value for backward compatibility with old bots
@key("Text")
final case class TextMessage(@beanGetter text: String, ext: Option[TextMessageEx] = None) extends MessageBody {
def getExt = ext.asJava
}
@key("Json")
final case class JsonMessage(@beanGetter rawJson: String) extends MessageBody
@key("Sticker")
final case class StickerMessage(
stickerId: Option[Int],
fastPreview: Option[Array[Byte]],
image512: Option[ImageLocation],
image256: Option[ImageLocation],
stickerCollectionId: Option[Int],
stickerCollectionAccessHash: Option[Long]
) extends MessageBody {
def getStickerId = stickerId.asPrimitive
def getFastPreview = fastPreview.asJava
def getImage512 = image512.asJava
def getImage256 = image256.asJava
def getStickerCollectionId = stickerCollectionId.asPrimitive
def getStickerCollectionAccessHash = stickerCollectionAccessHash.asPrimitive
}
sealed trait TextMessageEx
@key("TextCommand")
final case class TextCommand(text: String, args: String) extends TextMessageEx
@key("TextModernMessage")
final case class TextModernMessage(
text: Option[String],
senderNameOverride: Option[String],
senderPhotoOverride: Option[Avatar],
style: Option[ParagraphStyle],
attaches: IndexedSeq[TextModernAttach]
) extends TextMessageEx {
def this(
text: String,
senderNameOverride: String,
senderPhotoOverride: Avatar,
style: ParagraphStyle,
attaches: java.util.List[TextModernAttach]
) =
this(Option(text), Option(senderNameOverride), Option(senderPhotoOverride), Option(style), attaches.toIndexedSeq)
def getText = text.asJava
def getSenderNameOverride = senderNameOverride.asJava
def getSenderPhotoOverride = senderPhotoOverride.asJava
def getStyle = style.asJava
def getAttaches = seqAsJavaList(attaches)
}
final case class TextModernAttach(
title: Option[String],
titleUrl: Option[String],
titleIcon: Option[ImageLocation],
text: Option[String],
style: Option[ParagraphStyle],
fields: IndexedSeq[TextModernField]
) {
def this(
title: String,
titleUrl: String,
titleIcon: ImageLocation,
text: String,
style: ParagraphStyle,
fields: java.util.List[TextModernField]
) = this(Option(title), Option(titleUrl), Option(titleIcon), Option(text), Option(style), fields.toIndexedSeq)
def getTitle = title.asJava
def getTitleUrl = titleUrl.asJava
def getTitleIcon = titleIcon.asJava
def getText = text.asJava
def getStyle = style.asJava
def getFields = seqAsJavaList(fields) //fields.toSeq.seqAsJavaList doesn't work for some reason
}
final case class TextModernField(@beanGetter title: String, @beanGetter value: String, isShort: Option[Boolean]) {
def getIsShort = isShort.asJava
}
final case class ParagraphStyle(
showParagraph: Option[Boolean],
paragraphColor: Option[Color],
bgColor: Option[Color]
) {
def getShowParagraph = showParagraph.asJava
def getParagraphColor = paragraphColor.asJava
def getBgColor = bgColor.asJava
}
sealed trait Colors
@key("Red") case object Red extends Colors
@key("Yellow") case object Yellow extends Colors
@key("Green") case object Green extends Colors
sealed trait Color
@key("PredefinedColor")
final case class PredefinedColor(color: Colors) extends Color
@key("RgbColor")
final case class RgbColor(rgb: Int) extends Color
@key("Document")
final case class DocumentMessage(
@beanGetter fileId: Long,
@beanGetter accessHash: Long,
@beanGetter fileSize: Long,
@beanGetter name: String,
@beanGetter mimeType: String,
thumb: Option[FastThumb],
ext: Option[DocumentEx]
) extends MessageBody {
def getThumb = thumb.asJava
def getExt = ext.asJava
}
@key("Service")
final case class ServiceMessage(@beanGetter text: String) extends MessageBody
@key("Unsupported")
sealed trait UnsupportedMessage extends MessageBody
@key("Unsupported")
final case object UnsupportedMessage extends UnsupportedMessage
@key("FastThumb")
final case class FastThumb(
@beanGetter width: Int,
@beanGetter height: Int,
@beanGetter thumb: String
)
sealed trait DocumentEx
@key("Photo")
final case class DocumentExPhoto(
@beanGetter width: Int,
@beanGetter height: Int
) extends DocumentEx
@key("Video")
final case class DocumentExVideo(
@beanGetter width: Int,
@beanGetter height: Int,
@beanGetter duration: Int
) extends DocumentEx
@key("Voice")
final case class DocumentExVoice(@beanGetter duration: Int) extends DocumentEx
@key("Animation")
final case class DocumentExAnimation(
@beanGetter width: Int,
@beanGetter height: Int
) extends DocumentEx
@key("AnimationVid")
final case class DocumentExAnimationVid(
@beanGetter width: Int,
@beanGetter height: Int,
@beanGetter duration: Int
) extends DocumentEx
}
| dfsilva/actor-platform | actor-server/actor-bots-shared/src/main/scala/im/actor/bots/BotMessages.scala | Scala | agpl-3.0 | 26,939 |
package templemore.liftjson.provider
import net.liftweb.json.JsonAST.JValue
trait JsonASTTransformer {
def transform(json: JValue): JValue
} | skipoleschris/lift-json-jsr311-provider | provider/src/main/scala/templemore/liftjson/provider/JsonASTTransformer.scala | Scala | apache-2.0 | 145 |
package gameover.fwk.libgdx.gfx
import java.util.Comparator
import com.badlogic.gdx.math.{Rectangle, Vector2}
import gameover.fwk.libgdx.utils.LibGDXHelper
import gameover.fwk.pool.Vector2Pool
object GeometryUtils extends LibGDXHelper{
def computeTiledIntersectionPoints(x1: Float, y1: Float, x2: Float, y2: Float): GdxArray[Vector2] = {
val ret = new GdxArray[Vector2]
val diffX = x2 - x1
val fX1 = com.badlogic.gdx.math.MathUtils.floor(x1)
val diffY = y2 - y1
val fY1 = com.badlogic.gdx.math.MathUtils.floor(y1)
val alpha = Math.atan2(diffY, diffX)
val angle = Math.abs(alpha % (Math.PI / 2d))
val inverseAxis = alpha > Math.PI / 2d || alpha < -Math.PI / 2d
val alphaTan = Math.tan(angle).toFloat
val xSign: Float = Math.signum(diffX)
val ySign: Float = Math.signum(diffY)
val xs = new GdxArray[Float]
val ys = new GdxArray[Float]
var finished: Boolean = false
while (!finished) {
val x: Float = if (xSign < 0f) fX1 - (if (fX1 == x1) 1 else 0) - xs.size else fX1 + 1 + xs.size
if ((xSign > 0 && x >= x2) || (xSign < 0 && x <= x2) || xSign == 0)
finished = true
else
xs.add(x)
}
finished = false
while (!finished) {
val y: Float = if (ySign < 0f) fY1 - (if (fY1 == y1) 1 else 0) - ys.size else fY1 + 1 + ys.size
if ((ySign > 0 && y >= y2) || (ySign < 0 && y <= y2) || ySign == 0)
finished = true
else
ys.add(y)
}
for (x <- xs) {
if (!inverseAxis)
ret.add(Vector2Pool.obtain(x, computeYAux(x1, y1, x, ySign, alphaTan)))
else
ret.add(Vector2Pool.obtain(x, computeXAux(y1, x1, x, ySign, alphaTan)))
}
for (y <- ys) {
if (!inverseAxis)
ret.add(Vector2Pool.obtain(computeXAux(x1, y1, y, xSign, alphaTan), y))
else
ret.add(Vector2Pool.obtain(computeYAux(y1, x1, y, xSign, alphaTan), y))
}
ret.sort(new Function2DPointComparator(xSign, ySign))
var i = ret.size - 2
while ((i >= 0) && (i + 1 < ret.size)) {
if (ret.get(i).equals(ret.get(i + 1)))
ret.removeIndex(i)
else
i = i - 1
}
ret
}
def computeYAux(x1: Float, y1: Float, x2: Float, ySign: Float, alphaTan: Float) = y1 + ySign * (alphaTan * Math.abs(x2 - x1))
def computeXAux(x1: Float, y1: Float, y2: Float, xSign: Float, alphaTan: Float) = x1 + (if (alphaTan != 0) xSign * (Math.abs(y2 - y1) / alphaTan) else 0f)
def centerAndEdges(area: Rectangle): List[Vector2] =
area.getCenter(Vector2Pool.obtain()) ::
area.getPosition(Vector2Pool.obtain()) ::
area.getPosition(Vector2Pool.obtain()).add(area.width, 0) ::
area.getPosition(Vector2Pool.obtain()).add(0, area.height) ::
area.getPosition(Vector2Pool.obtain()).add(area.width, area.height) :: Nil
}
class Function2DPointComparator(xSign: Float, ySign: Float) extends Comparator[Vector2] {
override def compare(o1: Vector2, o2: Vector2): Int = {
val signum: Float = Math.signum(o1.x - o2.x)
if (signum == 0) {
(Math.signum(o1.x - o2.x) * ySign).toInt
}
else {
(signum * xSign).toInt
}
}
}
| PixelDuck/gameover-game-framework | src/main/scala/gameover/fwk/libgdx/gfx/GeometryUtils.scala | Scala | mit | 3,134 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.tools
import org.scalatest._
/**
* A <code>Reporter</code> that prints test status information to
* the standard error stream.
*
* @author Bill Venners
*/
private[scalatest] class StandardErrReporter(
presentAllDurations: Boolean,
presentInColor: Boolean,
presentShortStackTraces: Boolean,
presentFullStackTraces: Boolean,
presentUnformatted: Boolean,
presentReminder: Boolean,
presentReminderWithShortStackTraces: Boolean,
presentReminderWithFullStackTraces: Boolean,
presentReminderWithoutCanceledTests: Boolean
) extends PrintReporter(
Console.err,
presentAllDurations,
presentInColor,
presentShortStackTraces,
presentFullStackTraces,
presentUnformatted,
presentReminder,
presentReminderWithShortStackTraces,
presentReminderWithFullStackTraces,
presentReminderWithoutCanceledTests
) {
/**
* Does nothing, because don't want to dispose the standard error stream.
*/
override def dispose() {
}
}
| travisbrown/scalatest | src/main/scala/org/scalatest/tools/StandardErrReporter.scala | Scala | apache-2.0 | 1,577 |
import scala.quoted.*
object MatchTest {
inline def test[T](inline obj: T): Unit = ${testImpl('obj)}
def testImpl[T](objExpr: Expr[T])(using qctx: Quotes, t: Type[T]): Expr[Unit] = {
import qctx.reflect.*
val obj = objExpr.asTerm
val cases = obj.tpe.typeSymbol.children.map { child =>
val subtype = TypeIdent(child)
val bind = Symbol.newBind(Symbol.spliceOwner, "c", Flags.EmptyFlags, subtype.tpe)
CaseDef(Bind(bind, Typed(Ref(bind), subtype)), None, '{()}.asTerm)
}
val result = Match(obj, cases)
//println(result.show(using Printer.TreeAnsiCode))
result.asExprOf[Unit]
}
}
| dotty-staging/dotty | tests/neg-custom-args/fatal-warnings/i12188/Macro.scala | Scala | apache-2.0 | 631 |
package com.seanshubin.scala.training.web.http.client
import java.io.ByteArrayOutputStream
import com.seanshubin.scala.training.web.HttpResponseCode
import org.apache.http.client.methods.{HttpGet, HttpRequestBase}
import org.apache.http.client.utils.URIBuilder
import org.apache.http.impl.client.HttpClientBuilder
class HttpClientImpl(host: String, port: Int, charsetName: String) extends HttpClient {
private val scheme = "http"
def get(path: String, parameterKey: String, parameterValue: String): (HttpResponseCode, String) = {
val httpClient = HttpClientBuilder.create().build()
val uri = new URIBuilder().setScheme(scheme).setHost(host).setPort(port).setPath(path).addParameter(parameterKey, parameterValue).build()
val httpRequestBase: HttpRequestBase = new HttpGet(uri)
val response = httpClient.execute(httpRequestBase)
val statusCode = response.getStatusLine.getStatusCode
val byteArrayOutputStream = new ByteArrayOutputStream()
response.getEntity.writeTo(byteArrayOutputStream)
val body = new String(byteArrayOutputStream.toByteArray, charsetName)
(HttpResponseCode.fromCode(statusCode), body)
}
}
| SeanShubin/scala-training | web/src/main/scala/com/seanshubin/scala/training/web/http/client/HttpClientImpl.scala | Scala | unlicense | 1,153 |
/* ---------------------------------------------------------------------
%%
%% Copyright (c) 2007-2014 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% ---------------------------------------------------------------------*/
import scala.concurrent._
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
import com.amazonaws.services.dynamodbv2.model._
// TODO: add await for result to converge
Table.create("books_range_bulk", "Id", "N", Some("Title"), Some("S"))
// Index Shape Controls
val t_items = 10
val per_item = 1000
val tasks: Seq[Future[Array[PutItemResult]]] =
for (i <- 1 to t_items) yield future {
println("Executing task " + i)
val results: Array[PutItemResult] = Array()
for (j <- 1 to per_item) {
val item = new Item(
("Id", "N", i.toString()),
("Title", "S", "Title " + i.toString() + "::" + j.toString()),
("ISBN", "S", i.toString() + "::" + j.toString()))
results :+ Table.put("books_range_bulk", item)
}
results
}
val aggregated: Future[Seq[Array[PutItemResult]]] =
Future.sequence(tasks)
val results: Seq[Array[PutItemResult]] =
Await.result(aggregated, 300.seconds)
val result:QueryResult = Table.range_query("books_range_bulk", "Id", "1", Some("Title"), Some("BEGINS_WITH"), Some("Title 1"))
// TODO: add await for result count to converge to #per_item count
// assert(per_item == result.getCount())
| basho-labs/rinamo | tests/com.basho.dynamodb.integ/console/range_async.scala | Scala | apache-2.0 | 2,026 |
package sbt
import java.io.File
import Def.Classpath
import scala.annotation.implicitNotFound
import sbt.internal.util.Attributed
object Append {
@implicitNotFound(msg = "No implicit for Append.Value[${A}, ${B}] found,\\n so ${B} cannot be appended to ${A}")
trait Value[A, B] {
def appendValue(a: A, b: B): A
}
@implicitNotFound(msg = "No implicit for Append.Values[${A}, ${B}] found,\\n so ${B} cannot be appended to ${A}")
trait Values[A, -B] {
def appendValues(a: A, b: B): A
}
trait Sequence[A, -B, T] extends Value[A, T] with Values[A, B]
implicit def appendSeq[T, V <: T]: Sequence[Seq[T], Seq[V], V] = new Sequence[Seq[T], Seq[V], V] {
def appendValues(a: Seq[T], b: Seq[V]): Seq[T] = a ++ b
def appendValue(a: Seq[T], b: V): Seq[T] = a :+ b
}
implicit def appendSeqImplicit[T, V <% T]: Sequence[Seq[T], Seq[V], V] = new Sequence[Seq[T], Seq[V], V] {
def appendValues(a: Seq[T], b: Seq[V]): Seq[T] = a ++ (b map { x => (x: T) })
def appendValue(a: Seq[T], b: V): Seq[T] = a :+ (b: T)
}
implicit def appendList[T, V <: T]: Sequence[List[T], List[V], V] = new Sequence[List[T], List[V], V] {
def appendValues(a: List[T], b: List[V]): List[T] = a ::: b
def appendValue(a: List[T], b: V): List[T] = a :+ b
}
implicit def appendListImplicit[T, V <% T]: Sequence[List[T], List[V], V] = new Sequence[List[T], List[V], V] {
def appendValues(a: List[T], b: List[V]): List[T] = a ::: (b map { x => (x: T) })
def appendValue(a: List[T], b: V): List[T] = a :+ (b: T)
}
implicit def appendString: Value[String, String] = new Value[String, String] {
def appendValue(a: String, b: String) = a + b
}
implicit def appendInt = new Value[Int, Int] {
def appendValue(a: Int, b: Int) = a + b
}
implicit def appendLong = new Value[Long, Long] {
def appendValue(a: Long, b: Long) = a + b
}
implicit def appendDouble = new Value[Double, Double] {
def appendValue(a: Double, b: Double) = a + b
}
implicit def appendClasspath: Sequence[Classpath, Seq[File], File] = new Sequence[Classpath, Seq[File], File] {
def appendValues(a: Classpath, b: Seq[File]): Classpath = a ++ Attributed.blankSeq(b)
def appendValue(a: Classpath, b: File): Classpath = a :+ Attributed.blank(b)
}
implicit def appendSet[T, V <: T]: Sequence[Set[T], Set[V], V] = new Sequence[Set[T], Set[V], V] {
def appendValues(a: Set[T], b: Set[V]): Set[T] = a ++ b
def appendValue(a: Set[T], b: V): Set[T] = a + b
}
implicit def appendMap[A, B, X <: A, Y <: B]: Sequence[Map[A, B], Map[X, Y], (X, Y)] = new Sequence[Map[A, B], Map[X, Y], (X, Y)] {
def appendValues(a: Map[A, B], b: Map[X, Y]): Map[A, B] = a ++ b
def appendValue(a: Map[A, B], b: (X, Y)): Map[A, B] = a + b
}
}
| dansanduleac/sbt | main/settings/src/main/scala/sbt/Append.scala | Scala | bsd-3-clause | 2,764 |
package stackoverflow
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.rdd.RDD
import annotation.tailrec
import scala.reflect.ClassTag
/** A raw stackoverflow posting, either a question or an answer */
case class Posting(postingType: Int, id: Int, acceptedAnswer: Option[Int], parentId: Option[Int], score: Int, tags: Option[String]) extends Serializable
/** The main class */
object StackOverflow extends StackOverflow {
@transient lazy val conf: SparkConf = new SparkConf().setMaster("local").setAppName("StackOverflow")
@transient lazy val sc: SparkContext = new SparkContext(conf)
/** Main function */
def main(args: Array[String]): Unit = {
sc.setLogLevel("WARN")
val lines = sc.textFile("src/main/resources/stackoverflow/stackoverflow.csv")
val raw = rawPostings(lines)
val grouped = groupedPostings(raw)
val scored = scoredPostings(grouped)//.sample(true, 0.1, 0)
val vectors = vectorPostings(scored).persist()
// assert(vectors.count() == 2121822, "Incorrect number of vectors: " + vectors.count())
val means = kmeans(sampleVectors(vectors), vectors, debug = true)
val results = clusterResults(means, vectors)
printResults(results)
}
}
/** The parsing and kmeans methods */
class StackOverflow extends Serializable {
/** Languages */
val langs =
List(
"JavaScript", "Java", "PHP", "Python", "C#", "C++", "Ruby", "CSS",
"Objective-C", "Perl", "Scala", "Haskell", "MATLAB", "Clojure", "Groovy")
/** K-means parameter: How "far apart" languages should be for the kmeans algorithm? */
def langSpread = 50000
assert(langSpread > 0, "If langSpread is zero we can't recover the language from the input data!")
/** K-means parameter: Number of clusters */
def kmeansKernels = 45
/** K-means parameter: Convergence criteria */
def kmeansEta: Double = 20.0D
/** K-means parameter: Maximum iterations */
def kmeansMaxIterations = 120
//
//
// Parsing utilities:
//
//
/** Load postings from the given file */
def rawPostings(lines: RDD[String]): RDD[Posting] =
lines.map(line => {
val arr = line.split(",")
Posting(postingType = arr(0).toInt,
id = arr(1).toInt,
acceptedAnswer = if (arr(2) == "") None else Some(arr(2).toInt),
parentId = if (arr(3) == "") None else Some(arr(3).toInt),
score = arr(4).toInt,
tags = if (arr.length >= 6) Some(arr(5).intern()) else None)
})
/** Group the questions and answers together */
def groupedPostings(postings: RDD[Posting]): RDD[(Int, Iterable[(Posting, Posting)])] = {
val q = postings.filter(_.postingType == 1).map(p => (p.id, p))
val a = postings.filter(_.postingType == 2).map(p => (p.parentId.get, p))
q.join(a).groupByKey()
}
/** Compute the maximum score for each posting */
def scoredPostings(grouped: RDD[(Int, Iterable[(Posting, Posting)])]): RDD[(Posting, Int)] = {
def answerHighScore(as: Array[Posting]): Int = {
var highScore = 0
var i = 0
while (i < as.length) {
val score = as(i).score
if (score > highScore)
highScore = score
i += 1
}
highScore
}
grouped.map({case (_, group) => (group.head._1, answerHighScore(group.map(_._2).toArray))})
}
/** Compute the vectors for the kmeans */
def vectorPostings(scored: RDD[(Posting, Int)]): RDD[(Int, Int)] = {
/** Return optional index of first language that occurs in `tags`. */
def firstLangInTag(tag: Option[String], ls: List[String]): Option[Int] = {
if (tag.isEmpty) None
else if (ls.isEmpty) None
else if (tag.get == ls.head) Some(0) // index: 0
else {
val tmp = firstLangInTag(tag, ls.tail)
tmp match {
case None => None
case Some(i) => Some(i + 1) // index i in ls.tail => index i+1
}
}
}
scored.map({ case (post, score) => (firstLangInTag(post.tags, langs).get * langSpread, score) })
}
/** Sample the vectors */
def sampleVectors(vectors: RDD[(Int, Int)]): Array[(Int, Int)] = {
assert(kmeansKernels % langs.length == 0, "kmeansKernels should be a multiple of the number of languages studied.")
val perLang = kmeansKernels / langs.length
// http://en.wikipedia.org/wiki/Reservoir_sampling
def reservoirSampling(lang: Int, iter: Iterator[Int], size: Int): Array[Int] = {
val res = new Array[Int](size)
val rnd = new util.Random(lang)
for (i <- 0 until size) {
assert(iter.hasNext, s"iterator must have at least $size elements")
res(i) = iter.next
}
var i = size.toLong
while (iter.hasNext) {
val elt = iter.next
val j = math.abs(rnd.nextLong) % i
if (j < size)
res(j.toInt) = elt
i += 1
}
res
}
val res =
if (langSpread < 500)
// sample the space regardless of the language
vectors.takeSample(false, kmeansKernels, 42)
else
// sample the space uniformly from each language partition
vectors.groupByKey.flatMap({
case (lang, vectors) => reservoirSampling(lang, vectors.toIterator, perLang).map((lang, _))
}).collect()
assert(res.length == kmeansKernels, res.length)
res
}
//
//
// Kmeans method:
//
//
/** Main kmeans computation */
@tailrec final def kmeans(means: Array[(Int, Int)], vectors: RDD[(Int, Int)], iter: Int = 1, debug: Boolean = false): Array[(Int, Int)] = {
val newMeans = means.clone()
vectors.cache()
.map(v => (findClosest(v, means), v))
.groupByKey()
.mapValues(averageVectors).collect().par.foreach(v => newMeans(v._1) = v._2)
val distance = euclideanDistance(means, newMeans)
if (debug) {
println(s"""Iteration: $iter
| * current distance: $distance
| * desired distance: $kmeansEta
| * means:""".stripMargin)
for (idx <- 0 until kmeansKernels)
println(f" ${means(idx).toString}%20s ==> ${newMeans(idx).toString}%20s " +
f" distance: ${euclideanDistance(means(idx), newMeans(idx))}%8.0f")
}
if (converged(distance))
newMeans
else if (iter < kmeansMaxIterations)
kmeans(newMeans, vectors, iter + 1, debug)
else {
println("Reached max iterations!")
newMeans
}
}
//
//
// Kmeans utilities:
//
//
/** Decide whether the kmeans clustering converged */
def converged(distance: Double) =
distance < kmeansEta
/** Return the euclidean distance between two points */
def euclideanDistance(v1: (Int, Int), v2: (Int, Int)): Double = {
val part1 = (v1._1 - v2._1).toDouble * (v1._1 - v2._1)
val part2 = (v1._2 - v2._2).toDouble * (v1._2 - v2._2)
part1 + part2
}
/** Return the euclidean distance between two points */
def euclideanDistance(a1: Array[(Int, Int)], a2: Array[(Int, Int)]): Double = {
assert(a1.length == a2.length)
var sum = 0d
var idx = 0
while(idx < a1.length) {
sum += euclideanDistance(a1(idx), a2(idx))
idx += 1
}
sum
}
/** Return the closest point */
def findClosest(p: (Int, Int), centers: Array[(Int, Int)]): Int = {
var bestIndex = 0
var closest = Double.PositiveInfinity
for (i <- 0 until centers.length) {
val tempDist = euclideanDistance(p, centers(i))
if (tempDist < closest) {
closest = tempDist
bestIndex = i
}
}
bestIndex
}
/** Average the vectors */
def averageVectors(ps: Iterable[(Int, Int)]): (Int, Int) = {
val iter = ps.iterator
var count = 0
var comp1: Long = 0
var comp2: Long = 0
while (iter.hasNext) {
val item = iter.next
comp1 += item._1
comp2 += item._2
count += 1
}
((comp1 / count).toInt, (comp2 / count).toInt)
}
//
//
// Displaying results:
//
//
def clusterResults(means: Array[(Int, Int)], vectors: RDD[(Int, Int)]): Array[(String, Double, Int, Int)] = {
val closest = vectors.map(p => (findClosest(p, means), p))
val closestGrouped = closest.groupByKey()
val median = closestGrouped.mapValues { vs =>
val langLabel: String = langs(vs.groupBy(_._1).maxBy(_._2.size)._1 / langSpread) // most common language in the cluster
val langPercent: Double = vs.count(v => langs(v._1 / langSpread) == langLabel) * 100.0 / vs.size // percent of the questions in the most common language
val clusterSize: Int = vs.size
val (l, u) = vs.map(_._2).toList.sortWith(_<_).splitAt(vs.size / 2)
val medianScore: Int = if (vs.size % 2 == 0) (l.last + u.head) / 2 else u.head
(langLabel, langPercent, clusterSize, medianScore)
}
median.collect().map(_._2).sortBy(_._4)
}
def printResults(results: Array[(String, Double, Int, Int)]): Unit = {
println("Resulting clusters:")
println(" Score Dominant language (%percent) Questions")
println("================================================")
for ((lang, percent, size, score) <- results)
println(f"${score}%7d ${lang}%-17s (${percent}%-5.1f%%) ${size}%7d")
}
}
| syhan/coursera | scala-spark-big-data/stackoverflow/src/main/scala/stackoverflow/StackOverflow.scala | Scala | gpl-3.0 | 9,336 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.