code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package se.culvertsoft.mgen.javapack.generator
import se.culvertsoft.mgen.compiler.internal.FancyHeaders
import se.culvertsoft.mgen.api.model.Field
import se.culvertsoft.mgen.api.model.RuntimeEnumType
import se.culvertsoft.mgen.api.model.RuntimeClassType
import se.culvertsoft.mgen.javagenerator.BuildVersion
object JavaConstants {
val colClsString = "java.util.Collection"
val listClsString = "java.util.List"
val arrayListClsString = "java.util.ArrayList"
val utilPkg = "se.culvertsoft.mgen.javapack.util"
val metadataPkg = "se.culvertsoft.mgen.javapack.metadata"
val apiPkg = "se.culvertsoft.mgen.api"
val modelPkg = s"${apiPkg}.model"
val javapackPkg = "se.culvertsoft.mgen.javapack"
val validatorClsString = s"Validator"
val setFieldSetClsString = s"Marker"
val deepCopyerClsString = s"DeepCopyer"
val fieldHasherClsString = s"FieldHasher"
val listMakerClsString = s"ListMaker"
val mapMakerClsString = s"MapMaker"
val listMakerClsStringQ = s"${utilPkg}.${listMakerClsString}"
val mapMakerClsStringQ = s"${utilPkg}.${mapMakerClsString}"
val validatorClsStringQ = s"${utilPkg}.${validatorClsString}"
val setFieldSetClsStringQ = s"${utilPkg}.${setFieldSetClsString}"
val deepCopyerClsStringQ = s"${utilPkg}.${deepCopyerClsString}"
val fieldHasherClsStringQ = s"${utilPkg}.${fieldHasherClsString}"
val stringifyerCls = s"Stringifyer"
val stringifyerClsQ = s"${utilPkg}.${stringifyerCls}"
val mgenBaseClsString = s"${javapackPkg}.classes.MGenBase"
val mgenEnumClsString = s"${javapackPkg}.classes.MGenEnum"
val clsRegistryClsString = s"${javapackPkg}.classes.ClassRegistryBase"
val clsRegistryEntryClsString = s"ClassRegistryEntry"
val clsRegistryEntryClsStringQ = s"$javapackPkg.classes.$clsRegistryEntryClsString"
val eqTesterClsString = s"EqualityTester"
val eqTesterClsStringQ = s"${javapackPkg}.util.${eqTesterClsString}"
val fieldIfcClsString = s"Field"
val readerClsString = s"Reader"
val fieldVisitorClsString = s"FieldVisitor"
val runtimeEnumClsString = classOf[RuntimeEnumType].getSimpleName
val runtimeEnumClsStringQ = classOf[RuntimeEnumType].getName
val runtimeClassClsString = classOf[RuntimeClassType].getSimpleName
val runtimeClassClsStringQ = classOf[RuntimeClassType].getName
val fieldIfcClsStringQ = s"${modelPkg}.${fieldIfcClsString}"
val readerClsStringQ = s"${javapackPkg}.serialization.${readerClsString}"
val fieldVisitorClsStringQ = s"${javapackPkg}.serialization.${fieldVisitorClsString}"
val fieldSetDepthClsString = s"FieldSetDepth"
val fieldVisitSelectionClsString = s"FieldVisitSelection"
val fieldSetDepthClsStringQ = s"${metadataPkg}.${fieldSetDepthClsString}"
val fieldVisitSelectionClsStringQ = s"${metadataPkg}.${fieldVisitSelectionClsString}"
val fileHeader = FancyHeaders.fileHeader(BuildVersion.GIT_TAG + " " + BuildVersion.GIT_COMMIT_DATE)
val serializationSectionHeader = FancyHeaders.serializationSectionHeader
val metadataSectionHeader = FancyHeaders.metadataSectionHeader
} | culvertsoft/mgen | mgen-javagenerator/src/main/scala/se/culvertsoft/mgen/javapack/generator/JavaConstants.scala | Scala | mit | 3,029 |
/** soar
*
* Copyright (c) 2017 Hugo Firth
* Email: <me@hugofirth.com/>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ac.ncl.la.soar.glance.web.client.view
import cats._
import cats.implicits._
import diode.react.ModelProxy
import uk.ac.ncl.la.soar.ModuleCode
import uk.ac.ncl.la.soar.data.{Module, StudentRecords}
import uk.ac.ncl.la.soar.glance.eval.Survey
import uk.ac.ncl.la.soar.glance.web.client.style.Icon
import uk.ac.ncl.la.soar.glance.web.client.{ChangeRanks, Main, ProgressSimpleSurvey, SurveyModel}
import diode.data._
import diode.react.ReactPot._
import diode.react._
import japgolly.scalajs.react._
import japgolly.scalajs.react.extra.router.RouterCtl
import japgolly.scalajs.react.vdom.html_<^._
import uk.ac.ncl.la.soar.glance.web.client.component.StudentsSortableTable
import scala.collection.immutable.SortedMap
/**
* React Component for Simple Survey View
*/
object SimpleSurveyView {
case class Props(proxy: ModelProxy[Pot[SurveyModel]], ctrl: RouterCtl[Main.SurveyLoc])
class Backend(bs: BackendScope[Props, Unit]) {
val indexCol = "Student Number"
/** Construct the presentation of the modules as a sorted list to fill some table headings */
private def modules(survey: Survey, moduleInfo: Map[ModuleCode, Module]) =
survey.modules.map(k => k -> moduleInfo.get(k).flatMap(_.title)).toList.sorted
/** Construct the full presentation of table headings, including modules and tool tips */
private def headings(survey: Survey, moduleInfo: Map[ModuleCode, Module]) =
(indexCol, none[String]) :: modules(survey, moduleInfo)
/** Construct the presentation of the students to fill table rows */
private def students(survey: Survey) = survey.entries
/** Construct the presentation of the query students to fill the rankable table rows */
private def queryStudents(survey: Survey) = survey.entries.filter( r => survey.queries.contains(r.number) )
/** Construct the function which provides the presentation of a table cell, given a StudentRecord and string key */
private def renderCell(default: String)(student: StudentRecords[SortedMap, ModuleCode, Double], key: String) =
key match {
case k if k == indexCol => student.number
case k => student.record.get(k).fold(default)(_.toString)
}
private val rankingTable =
ScalaComponent.builder[ModelProxy[Pot[SurveyModel]]]("RankingTable")
.render($ => {
val proxy = $.props
val model = proxy()
<.div(
^.id := "ranking",
<.span(
^.className := "sub-title",
Icon.listOl(Icon.Medium),
<.h2("Rank students")
),
model.render { sm =>
val rankModule = sm.survey.moduleToRank
<.div(
^.className := "table-responsive",
StudentsSortableTable.component(
StudentsSortableTable.Props(
rankModule,
queryStudents(sm.survey),
headings(sm.survey, sm.modules),
renderCell(" "),
_ => Callback.empty,
(ranks, change) => proxy.dispatchCB(ChangeRanks(ranks, change))
)
)
)
}
)
})
.build
def render(p: Props): VdomElement = {
//Get the necessary data from the model
val model = p.proxy()
//TODO: find a neater way to do the below than two rendering blocks.
<.div(
model.render { sm =>
val rankModule = sm.survey.moduleToRank
<.div(
^.className := "alert alert-success welcome-banner",
^.role := "alert",
<.p(
<.strong("Welcome"),
" Please rank students below by how you believe they will perform in the module ",
<.strong(s"$rankModule: ${sm.modules.get(rankModule).flatMap(m => m.title).getOrElse("")}"),
". Higher is better."
),
//Why is the type annotation necessary below?
<.p(<.strong("Module aims: "), sm.modules.get(rankModule).flatMap(m => m.description).getOrElse(""): String),
<.p(
<.strong("Module keywords: "),
sm.modules.get(rankModule).fold(List.empty[String])(_.keywords).mkString(", ")
)
)
},
<.div(
^.id := "training",
rankingTable(p.proxy)
),
model.render { sm =>
<.div(
^.className := "row",
<.div(
^.className := "col-md-4 col-md-offset-8",
<.button(
^.`type` := "button",
^.className := "btn btn-primary pull-right",
"Continue",
^.onClick --> {
p.proxy.dispatchCB(ProgressSimpleSurvey) >> p.ctrl.set(Main.SurveyFormLoc(sm.survey.id))
}
)
)
)
}
)
}
}
val component = ScalaComponent.builder[Props]("SimpleSurveyView")
.renderBackend[Backend]
.build
}
| NewcastleComputingScience/student-outcome-accelerator | glance-eval/js/src/main/scala/uk/ac/ncl/la/soar/glance/web/client/view/SimpleSurveyView.scala | Scala | apache-2.0 | 5,703 |
/*
* Copyright 2014-2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.chart.graphics
import java.awt.Graphics2D
/**
* Draws vertical grid lines based on a time axis.
*
* @param xaxis
* Axis to use for creating the scale and determining the the tick marks that correspond with
* the major grid lines.
* @param major
* Style to use for drawing the major tick lines.
* @param minor
* Style to use for drawing the minor tick lines.
*/
case class TimeGrid(xaxis: TimeAxis, major: Style, minor: Style) extends Element {
def draw(g: Graphics2D, x1: Int, y1: Int, x2: Int, y2: Int): Unit = {
val ticks = xaxis.ticks(x1, x2)
val xscale = xaxis.scale(x1, x2)
ticks.foreach { tick =>
if (tick.major) major.configure(g) else minor.configure(g)
val px = xscale(tick.timestamp)
if (px > x1 && px < x2) {
g.drawLine(px, y1, px, y2)
}
}
}
}
| copperlight/atlas | atlas-chart/src/main/scala/com/netflix/atlas/chart/graphics/TimeGrid.scala | Scala | apache-2.0 | 1,475 |
/*
* Copyright (c) 2012-2019 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.enrich.common
package enrichments
// Java
import java.lang.{Integer => JInteger}
// Scala
import scala.util.control.NonFatal
// Scalaz
import scalaz._
import Scalaz._
/**
* Contains enrichments related to the
* client - where the client is the
* software which is using the SnowPlow
* tracker.
*
* Enrichments relate to browser resolution
*/
object ClientEnrichments {
/**
* The Tracker Protocol's pattern
* for a screen resolution - for
* details see:
*
* https://github.com/snowplow/snowplow/wiki/snowplow-tracker-protocol#wiki-browserandos
*/
private val ResRegex = """(\\d+)x(\\d+)""".r
/**
* Extracts view dimensions (e.g. screen resolution,
* browser/app viewport) stored as per the Tracker
* Protocol:
*
* https://github.com/snowplow/snowplow/wiki/snowplow-tracker-protocol#wiki-browserandos
*
* @param field The name of the field
* holding the screen dimensions
* @param res The packed string
* holding the screen dimensions
* @return the ResolutionTuple or an
* error message, boxed in a
* Scalaz Validation
*/
val extractViewDimensions: (String, String) => Validation[String, ViewDimensionsTuple] = (field, res) =>
res match {
case ResRegex(width, height) =>
try {
(width.toInt: JInteger, height.toInt: JInteger).success
} catch {
case NonFatal(e) => "Field [%s]: view dimensions [%s] exceed Integer's max range".format(field, res).fail
}
case _ => "Field [%s]: [%s] does not contain valid view dimensions".format(field, res).fail
}
}
| RetentionGrid/snowplow | 3-enrich/scala-common-enrich/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/ClientEnrichments.scala | Scala | apache-2.0 | 2,361 |
package geotrellis.core.spark
import geotrellis.raster._
import geotrellis.spark._
import java.time.ZonedDateTime
trait SpaceTimeTileLayerRDDMethods {
val rdd: TileLayerRDD[SpaceTimeKey]
def stitch(tk: TemporalKey): Raster[Tile] = {
rdd.withContext {
_.filter { case (key, value) => key.temporalKey == tk }
.map { case (key, tile) => key.spatialKey -> tile }
}.stitch
}
def stitch(dt: Option[ZonedDateTime]): Raster[Tile] = stitch(TemporalKey(dt.get))
} | pomadchin/geotrellis-integration-tests | src/main/scala/geotrellis/core/spark/SpaceTimeTileLayerRDDMethods.scala | Scala | apache-2.0 | 486 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.rdd
import scala.reflect.ClassTag
import org.apache.spark._
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.serializer.Serializer
private[spark] class ShuffledRDDPartition(val idx: Int) extends Partition {
override val index: Int = idx
}
/**
* :: DeveloperApi ::
* The resulting RDD from a shuffle (e.g. repartitioning of data).
* @param prev the parent RDD.
* @param part the partitioner used to partition the RDD
* @tparam K the key class.
* @tparam V the value class.
* @tparam C the combiner class.
*/
// TODO: Make this return RDD[Product2[K, C]] or have some way to configure mutable pairs
@DeveloperApi
class ShuffledRDD[K: ClassTag, V: ClassTag, C: ClassTag](
@transient var prev: RDD[_ <: Product2[K, V]],
part: Partitioner)
extends RDD[(K, C)](prev.context, Nil) {
private var userSpecifiedSerializer: Option[Serializer] = None
private var keyOrdering: Option[Ordering[K]] = None
private var aggregator: Option[Aggregator[K, V, C]] = None
private var mapSideCombine: Boolean = false
/** Set a serializer for this RDD's shuffle, or null to use the default (spark.serializer) */
def setSerializer(serializer: Serializer): ShuffledRDD[K, V, C] = {
this.userSpecifiedSerializer = Option(serializer)
this
}
/** Set key ordering for RDD's shuffle. */
def setKeyOrdering(keyOrdering: Ordering[K]): ShuffledRDD[K, V, C] = {
this.keyOrdering = Option(keyOrdering)
this
}
/** Set aggregator for RDD's shuffle. */
def setAggregator(aggregator: Aggregator[K, V, C]): ShuffledRDD[K, V, C] = {
this.aggregator = Option(aggregator)
this
}
/** Set mapSideCombine flag for RDD's shuffle. */
def setMapSideCombine(mapSideCombine: Boolean): ShuffledRDD[K, V, C] = {
this.mapSideCombine = mapSideCombine
this
}
override def getDependencies: Seq[Dependency[_]] = {
val serializer = userSpecifiedSerializer.getOrElse {
val serializerManager = SparkEnv.get.serializerManager
if (mapSideCombine) {
serializerManager.getSerializer(implicitly[ClassTag[K]], implicitly[ClassTag[C]])
} else {
serializerManager.getSerializer(implicitly[ClassTag[K]], implicitly[ClassTag[V]])
}
}
List(new ShuffleDependency(prev, part, serializer, keyOrdering, aggregator, mapSideCombine))
}
override val partitioner = Some(part)
override def getPartitions: Array[Partition] = {
Array.tabulate[Partition](part.numPartitions)(i => new ShuffledRDDPartition(i))
}
override protected def getPreferredLocations(partition: Partition): Seq[String] = {
val tracker = SparkEnv.get.mapOutputTracker.asInstanceOf[MapOutputTrackerMaster]
val dep = dependencies.head.asInstanceOf[ShuffleDependency[K, V, C]]
tracker.getPreferredLocationsForShuffle(dep, partition.index)
}
override def compute(split: Partition, context: TaskContext): Iterator[(K, C)] = {
val dep = dependencies.head.asInstanceOf[ShuffleDependency[K, V, C]]
val metrics = context.taskMetrics().createTempShuffleReadMetrics()
SparkEnv.get.shuffleManager.getReader(
dep.shuffleHandle, split.index, split.index + 1, context, metrics)
.read()
.asInstanceOf[Iterator[(K, C)]]
}
override def clearDependencies(): Unit = {
super.clearDependencies()
prev = null
}
private[spark] override def isBarrier(): Boolean = false
}
| shaneknapp/spark | core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala | Scala | apache-2.0 | 4,216 |
package com.scalableQuality.quick.mantle.parsing
import com.scalableQuality.quick.mantle.error.UnrecoverableError
import com.scalableQuality.quick.mantle.parsing.errorMessages.FixedRowIdentifierErrorMessages
class FixedRowIdentifier(
columnsIdentifiers: List[FixedColumnIdentifier]
) extends RowIdentifier {
def canIdentify(rawRow: RawRow): Boolean = columnsIdentifiers match {
case Nil => RowToRowDescriptionMatcher.defaultIdentificationResult
case columnIdentifier :: restOfColumnIdentifiers =>
val canIdentifyTheFirstColumn = columnIdentifier(rawRow)
restOfColumnIdentifiers.foldLeft(canIdentifyTheFirstColumn)(
_ && _(rawRow))
}
}
object FixedRowIdentifier {
def apply(
columnIdentifiers: List[FixedColumnIdentifier]
): Either[UnrecoverableError, FixedRowIdentifier] = columnIdentifiers match {
case Nil =>
FixedRowIdentifierErrorMessages.noColumnIdentifierIsProvided
case _ =>
Right(new FixedRowIdentifier(columnIdentifiers))
}
}
| MouslihAbdelhakim/Quick | src/main/scala/com/scalableQuality/quick/mantle/parsing/FixedRowIdentifier.scala | Scala | apache-2.0 | 1,006 |
package io.rout.generic
import cats.data.Xor
import com.twitter.io.Buf
import com.twitter.util.Try
import io.rout.{Decode, Encode, Error}
import io.rout.parse.xml._
import shapeless._
import scala.xml.XML
object xml {
object semiauto {
class Encoder[T]{
def encodeXml(implicit toXml: ToXml[T]): Encode.ApplicationXml[T] = Encode.xml(t =>
Buf.Utf8(toXml.toXml(t).toString()))
def derive[LKV](implicit
gen:LabelledGeneric.Aux[T,LKV],
toXml: Lazy[ToXml.Wrap[LKV]]
): ToXml[T] = ToXml.deriveInstance(gen,toXml)
}
class Decoder[T]{
def decodeXml(implicit fromXml: FromXml[T]): Decode.ApplicationXml[String,T] = Decode.applicationXml(i =>
fromXml.fromXml(XML.loadString(i)).fold[Xor[Error,Try[T]]](
err => Xor.Left(Error(err.getMessage)),
value => Xor.Right(Try(value))
))
def derive[LKV](implicit
gen:LabelledGeneric.Aux[T,LKV],
fromXml: Lazy[FromXml.Wrap[LKV]]
): FromXml[T] = FromXml.deriveInstance(gen,fromXml)
}
def decode[T] = new Decoder[T]
def encode[T] = new Encoder[T]
}
}
| teodimoff/rOut | xml/src/io/rout/xml/xml.scala | Scala | apache-2.0 | 1,076 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.aggregate
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.ProcessFunction
import org.apache.flink.types.Row
import org.apache.flink.util.Collector
import org.apache.flink.api.common.state.ValueStateDescriptor
import org.apache.flink.api.java.typeutils.RowTypeInfo
import org.apache.flink.api.common.state.ValueState
import org.apache.flink.table.codegen.{GeneratedAggregationsFunction, Compiler}
import org.slf4j.LoggerFactory
/**
* Process Function for processing-time unbounded OVER window
*
* @param genAggregations Generated aggregate helper function
* @param aggregationStateType row type info of aggregation
*/
class ProcTimeUnboundedPartitionedOver(
genAggregations: GeneratedAggregationsFunction,
aggregationStateType: RowTypeInfo)
extends ProcessFunction[Row, Row]
with Compiler[GeneratedAggregations] {
private var output: Row = _
private var state: ValueState[Row] = _
val LOG = LoggerFactory.getLogger(this.getClass)
private var function: GeneratedAggregations = _
override def open(config: Configuration) {
LOG.debug(s"Compiling AggregateHelper: $genAggregations.name \\n\\n " +
s"Code:\\n$genAggregations.code")
val clazz = compile(
getRuntimeContext.getUserCodeClassLoader,
genAggregations.name,
genAggregations.code)
LOG.debug("Instantiating AggregateHelper.")
function = clazz.newInstance()
output = function.createOutputRow()
val stateDescriptor: ValueStateDescriptor[Row] =
new ValueStateDescriptor[Row]("overState", aggregationStateType)
state = getRuntimeContext.getState(stateDescriptor)
}
override def processElement(
input: Row,
ctx: ProcessFunction[Row, Row]#Context,
out: Collector[Row]): Unit = {
var accumulators = state.value()
if (null == accumulators) {
accumulators = function.createAccumulators()
}
function.setForwardedFields(input, output)
function.accumulate(accumulators, input)
function.setAggregationResults(accumulators, output)
state.update(accumulators)
out.collect(output)
}
}
| DieBauer/flink | flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/ProcTimeUnboundedPartitionedOver.scala | Scala | apache-2.0 | 2,989 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.keras.layers
import com.intel.analytics.bigdl.dllib.nn.internal.{ZeroPadding3D => BigDLZeroPadding3D}
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.utils.Shape
import com.intel.analytics.bigdl.dllib.keras.Net
import com.intel.analytics.bigdl.dllib.keras.layers.utils.KerasUtils
import scala.reflect.ClassTag
/**
* Zero-padding layer for 3D data (spatial or spatio-temporal).
* The input of this layer should be 5D.
*
* When you use this layer as the first layer of a model, you need to provide the argument
* inputShape (a Single Shape, does not include the batch dimension).
*
* @param padding Int array of length 3.
* How many zeros to add at the beginning and end of the 3 padding dimensions.
* Symmetric padding will be applied to each dimension. Default is (1, 1, 1).
* @param dimOrdering Format of the input data. Either "CHANNEL_FIRST" (dimOrdering='th') or
* "CHANNEL_LAST" (dimOrdering='tf'). Default is "CHANNEL_FIRST".
* @param inputShape A Single Shape, does not include the batch dimension.
* @tparam T The numeric type of parameter(e.g. weight, bias). Only support float/double now.
*/
class ZeroPadding3D[T: ClassTag](
override val padding: Array[Int] = Array(1, 1, 1),
override val dimOrdering: String = "CHANNEL_FIRST",
override val inputShape: Shape = null)(implicit ev: TensorNumeric[T])
extends BigDLZeroPadding3D[T](padding, dimOrdering, inputShape) with Net {}
object ZeroPadding3D {
def apply[@specialized(Float, Double) T: ClassTag](
padding: (Int, Int, Int) = (1, 1, 1),
dimOrdering: String = "th",
inputShape: Shape = null)(implicit ev: TensorNumeric[T]) : ZeroPadding3D[T] = {
val paddingArray = padding match {
case null => throw new IllegalArgumentException("For ZeroPadding3D, " +
"padding can not be null, please input int tuple of length 3")
case _ => Array(padding._1, padding._2, padding._3)
}
new ZeroPadding3D[T](paddingArray, KerasUtils.toBigDLFormat5D(dimOrdering), inputShape)
}
}
| intel-analytics/BigDL | scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/keras/layers/ZeroPadding3D.scala | Scala | apache-2.0 | 2,760 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalatest.Inside._
import org.scalatest.SharedHelpers.thisLineNumber
import Matchers._
import OptionValues._
/* Uncomment after remove type aliases in org.scalatest package object
import org.scalatest.exceptions.TestFailedException
*/
class InsideSpec extends FunSpec {
case class Address(street: String, city: String, state: String, zip: String)
case class Name(first: String, middle: String, last: String)
case class Record(name: Name, address: Address, age: Int)
describe("The inside construct") {
val rec = Record(
Name("Sally", "Mary", "Jones"),
Address("123 Main St", "Bluesville", "KY", "12345"),
29
)
it("should return normally when nested properties are inspected with matcher expressions that all succeed") {
inside (rec) { case Record(name, address, age) =>
inside (name) { case Name(first, middle, last) =>
first should be ("Sally")
middle should startWith ("Ma")
last should endWith ("nes")
}
inside (address) { case Address(street, city, state, zip) =>
street should be ("123 Main St")
city should be ("Bluesville")
state.toLowerCase should be ("ky")
zip should be ("12345")
}
age should be >= 21
}
}
it("should throw a TFE when the partial function isn't defined at the passed value") {
val caught = evaluating {
inside (rec) { case Record(name, _, 99) =>
name.first should be ("Sally")
}
} should produce [TestFailedException]
caught.message.value should be (Resources("insidePartialFunctionNotDefined", rec.toString))
caught.failedCodeLineNumber.value should equal (thisLineNumber - 5)
caught.failedCodeFileName.value should be ("InsideSpec.scala")
}
it("should include an inside clause when a matcher fails inside") {
val caught = evaluating {
inside (rec) { case Record(_, _, age) =>
age should be <= 21
}
} should produce [TestFailedException]
caught.message.value should be (Resources("insidePartialFunctionAppendSomeMsg", Resources("wasNotLessThanOrEqualTo", "29", "21"), "", rec.toString))
caught.failedCodeLineNumber.value should equal (thisLineNumber - 4)
caught.failedCodeFileName.value should be ("InsideSpec.scala")
}
it("should include a nested inside clause when a matcher fails inside a nested inside") {
val caught = evaluating {
inside (rec) { case Record(name, _, _) =>
inside (name) { case Name(first, _, _) =>
first should be ("Harry")
}
}
} should produce [TestFailedException]
caught.message.value should be (Resources("insidePartialFunctionAppendSomeMsg", Resources("insidePartialFunctionAppendSomeMsg", Resources("wasNotEqualTo", "\\"[Sall]y\\"", "\\"[Harr]y\\""), " ", rec.name.toString), "", rec.toString))
caught.failedCodeLineNumber.value should equal (thisLineNumber - 5)
caught.failedCodeFileName.value should be ("InsideSpec.scala")
}
it("should throw a TFE when matcher fails inside due to exception") {
val caught = evaluating {
inside (rec) { case Record(name, address, age) =>
throw new TestFailedException(None, None, 0)
}
} should produce [TestFailedException]
caught.message.value should be (Resources("insidePartialFunctionAppendNone", "", rec))
caught.failedCodeLineNumber.value should equal (thisLineNumber - 4)
caught.failedCodeFileName.value should be ("InsideSpec.scala")
}
it("should include a nested inside clause when a matcher fails inside due to exception") {
val caught = evaluating {
inside (rec) { case Record(name, _, _) =>
inside (name) { case Name(first, _, _) =>
throw new TestFailedException(None, None, 0)
}
}
} should produce [TestFailedException]
caught.message.value should be (Resources("insidePartialFunctionAppendSomeMsg", Resources("insidePartialFunctionAppendNone", " ", rec.name), "", rec.toString))
caught.failedCodeLineNumber.value should equal (thisLineNumber - 5)
caught.failedCodeFileName.value should be ("InsideSpec.scala")
}
}
}
| travisbrown/scalatest | src/test/scala/org/scalatest/InsideSpec.scala | Scala | apache-2.0 | 4,898 |
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.sparta.plugin.output.print
import java.io.{Serializable => JSerializable}
import com.stratio.sparta.sdk._
import org.apache.spark.Logging
import org.apache.spark.sql._
/**
* This output prints all AggregateOperations or DataFrames information on screen. Very useful to debug.
*
* @param keyName
* @param properties
* @param schemas
*/
class PrintOutput(keyName: String,
version: Option[Int],
properties: Map[String, JSerializable],
schemas: Seq[TableSchema])
extends Output(keyName, version, properties, schemas) with Logging {
override def upsert(dataFrame: DataFrame, options: Map[String, String]): Unit = {
if (log.isDebugEnabled) {
log.debug(s"> Table name : ${Output.getTableNameFromOptions(options)}")
log.debug(s"> Time dimension : ${Output.getTimeFromOptions(options)}")
log.debug(s"> Version policy : $version")
log.debug(s"> Data frame count : " + dataFrame.count())
log.debug(s"> DataFrame schema")
dataFrame.printSchema()
}
dataFrame.foreach(row => log.info(row.mkString(",")))
}
}
| danielcsant/sparta | plugins/src/main/scala/com/stratio/sparta/plugin/output/print/PrintOutput.scala | Scala | apache-2.0 | 1,767 |
package com.tam.cobol_interpreter
import java.io._
import com.tam.cobol_interpreter.parser.ParserFactory
import com.tam.cobol_interpreter.parser.schema.ParserSchema
import com.tam.cobol_interpreter.writer.WriterFactory
import com.tam.cobol_interpreter.writer.schema.WriterSchema
/**
* Created by tamu on 1/6/15.
*/
class Interpreter(parserSchema: ParserSchema, writerSchema: WriterSchema) {
val writerFactory = new WriterFactory()
def setQuote(q:Char):Interpreter = {writerFactory.setQuote(q); this}
def setTerminator(q:Char):Interpreter = {writerFactory.setTerminator(q); this}
def setSeparator(q:Char):Interpreter = {writerFactory.setSeparator(q); this}
def setWriteHeader(b:Boolean):Interpreter = {writerFactory.setWriteHeader(b); this}
def interpret(fIn: InputStream, fOut:OutputStream): Unit = {
val parser = ParserFactory.createParser(parserSchema, fIn)
val writer = this.writerFactory.createWriter(writerSchema, fOut)
writer.write(parser)
}
}
object Interpreter{
def main (args:Array[String]): Unit ={
args.length match {
case 2 =>
val parserSchemaFile = new File(args(0))
val writerSchemaFile = new File(args(1))
val interpreter = InterpreterFactory.createInterpreter(parserSchemaFile, writerSchemaFile)
interpreter.setWriteHeader(true)
interpreter.interpret(System.in, System.out)
case 4 =>
val parserSchemaFile = new File(args(0))
val writerSchemaFile = new File(args(1))
val interpreter = InterpreterFactory.createInterpreter(parserSchemaFile, writerSchemaFile)
interpreter.setWriteHeader(true)
interpreter.interpret(new FileInputStream(args(2)), new FileOutputStream(args(3)))
case 3 =>
val parserSchemaFile = new File(args(0))
val writerSchemaFile = new File(args(1))
val interpreter = InterpreterFactory.createInterpreter(parserSchemaFile, writerSchemaFile)
interpreter.setWriteHeader(true)
interpreter.interpret(new FileInputStream(args(2)), System.out)
case _ =>
val usage =
"""
|interpreter [Parser Schema Location] [Writer Schema Location]
| Reads from stdin and writes to stdout
|
|interpreter [Parser Schema Location] [Writer Schema Location] [Input File Location]
| Writes to stdout
|
|interpreter [Parser Schema Location] [Writer Schema Location] [Input File Location] [Output File Location]
""".stripMargin.trim()
System.out.print(usage + "\\n")
}
}
}
| tamsanh/scala-cobol-interpreter | src/main/scala/com/tam/cobol_interpreter/Interpreter.scala | Scala | apache-2.0 | 2,585 |
/*
* Copyright 2017 Datamountaineer.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datamountaineer.streamreactor.connect.kudu
/**
* Created by andrew@datamountaineer.com on 24/02/16.
* stream-reactor
*/
import java.nio.ByteBuffer
import java.util
import com.datamountaineer.streamreactor.connect.kudu.config.KuduConfigConstants
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.record.TimestampType
import org.apache.kafka.connect.data.{Schema, SchemaBuilder, Struct}
import org.apache.kafka.connect.sink.SinkRecord
import org.apache.kudu.ColumnSchema.ColumnSchemaBuilder
import org.apache.kudu.{ColumnSchema, Type}
import org.scalatest.BeforeAndAfter
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import scala.collection.JavaConverters._
import scala.collection.mutable
trait TestBase extends AnyWordSpec with BeforeAndAfter with Matchers {
val TOPIC = "sink_test"
val TABLE = "table1"
val KUDU_MASTER = "127.0.0.1"
val KCQL_MAP = s"INSERT INTO $TABLE SELECT * FROM $TOPIC"
val KCQL_MAP_AUTOCREATE = KCQL_MAP + " AUTOCREATE DISTRIBUTEBY name,adult INTO 10 BUCKETS"
val KCQL_MAP_AUTOCREATE_AUTOEVOLVE = KCQL_MAP + " AUTOCREATE AUTOEVOLVE DISTRIBUTEBY name,adult INTO 10 BUCKETS"
val schema =
"""
|{ "type": "record",
|"name": "Person",
|"namespace": "com.datamountaineer",
|"fields": [
|{ "name": "name", "type": "string"},
|{ "name": "adult", "type": "boolean"},
|{ "name": "integer8", "type": "int"},
|{ "name": "integer16", "type": "int"},
|{ "name": "integer32", "type": "long"},
|{ "name": "integer64", "type": "long"},
|{ "name": "float32", "type": "float"},
|{ "name": "float64", "type": "double"}
|]}"
""".stripMargin
val schemaDefaults =
"""
|{ "type": "record",
|"name": "Person",
|"namespace": "com.datamountaineer",
|"fields": [
|{ "name": "name", "type": "string"},
|{ "name": "adult", "type": "boolean"},
|{ "name": "integer8", "type": "int"},
|{ "name": "integer16", "type": "int"},
|{ "name": "integer32", "type": "long"},
|{ "name": "integer64", "type": "long"},
|{ "name": "float32", "type": "float"},
|{ "name": "float64", "type": ["double", "null"], "default" : 10.0}
|]}"
""".stripMargin
protected val PARTITION: Int = 12
protected val PARTITION2: Int = 13
protected val TOPIC_PARTITION: TopicPartition = new TopicPartition(TOPIC, PARTITION)
protected val TOPIC_PARTITION2: TopicPartition = new TopicPartition(TOPIC, PARTITION2)
protected val ASSIGNMENT: util.Set[TopicPartition] = new util.HashSet[TopicPartition]
//Set topic assignments
ASSIGNMENT.add(TOPIC_PARTITION)
ASSIGNMENT.add(TOPIC_PARTITION2)
before {
}
after {
}
def getConfig = {
Map(
"topics" -> TOPIC,
KuduConfigConstants.KUDU_MASTER -> KUDU_MASTER,
KuduConfigConstants.KCQL -> KCQL_MAP,
KuduConfigConstants.ERROR_POLICY -> "THROW"
).asJava
}
def getConfigAutoCreate(url: String) = {
Map(KuduConfigConstants.KUDU_MASTER -> KUDU_MASTER,
KuduConfigConstants.KCQL -> KCQL_MAP_AUTOCREATE,
KuduConfigConstants.ERROR_POLICY -> "THROW",
KuduConfigConstants.SCHEMA_REGISTRY_URL -> url
).asJava
}
def getConfigAutoCreateAndEvolve(url: String) = {
Map(KuduConfigConstants.KUDU_MASTER -> KUDU_MASTER,
KuduConfigConstants.KCQL -> KCQL_MAP_AUTOCREATE_AUTOEVOLVE,
KuduConfigConstants.ERROR_POLICY -> "THROW",
KuduConfigConstants.SCHEMA_REGISTRY_URL -> url
).asJava
}
def getConfigAutoCreateRetry(url: String) = {
Map(KuduConfigConstants.KUDU_MASTER -> KUDU_MASTER,
KuduConfigConstants.KCQL -> KCQL_MAP_AUTOCREATE,
KuduConfigConstants.ERROR_POLICY -> "RETRY",
KuduConfigConstants.SCHEMA_REGISTRY_URL -> url
).asJava
}
def getConfigAutoCreateRetryWithBackgroundFlush(url: String) = {
Map(KuduConfigConstants.KUDU_MASTER -> KUDU_MASTER,
KuduConfigConstants.KCQL -> KCQL_MAP_AUTOCREATE,
KuduConfigConstants.ERROR_POLICY -> "RETRY",
KuduConfigConstants.SCHEMA_REGISTRY_URL -> url,
KuduConfigConstants.WRITE_FLUSH_MODE -> "BATCH_BACKGROUND"
).asJava
}
def createSchema2: Schema = {
SchemaBuilder.struct.name("record")
.version(1)
.field("id", Schema.STRING_SCHEMA)
.field("int_field", Schema.INT32_SCHEMA)
.field("long_field", Schema.INT64_SCHEMA)
.field("string_field", Schema.STRING_SCHEMA)
.field("float_field", Schema.FLOAT32_SCHEMA)
.field("float64_field", Schema.FLOAT64_SCHEMA)
.field("boolean_field", Schema.BOOLEAN_SCHEMA)
.field("int64_field", Schema.INT64_SCHEMA)
.build
}
def createKuduSchema2: org.apache.kudu.Schema = {
val columns = new util.ArrayList[ColumnSchema]
val idField = new ColumnSchema.ColumnSchemaBuilder("id", Type.STRING).key(true).build
columns.add(idField)
val intField = new ColumnSchema.ColumnSchemaBuilder("int_field", Type.INT32).build
columns.add(intField)
val longField = new ColumnSchema.ColumnSchemaBuilder("long_field", Type.INT64).build
columns.add(longField)
val stringField = new ColumnSchema.ColumnSchemaBuilder("string_field", Type.STRING).build
columns.add(stringField)
val floatField = new ColumnSchema.ColumnSchemaBuilder("float_field", Type.FLOAT).build
columns.add(floatField)
val float64Field = new ColumnSchema.ColumnSchemaBuilder("float64_field", Type.DOUBLE).build
columns.add(float64Field)
val booleanField = new ColumnSchema.ColumnSchemaBuilder("boolean_field", Type.BOOL).build
columns.add(booleanField)
val int64Field = new ColumnSchema.ColumnSchemaBuilder("int64_field", Type.INT64).defaultValue(20.toLong).build
columns.add(int64Field)
new org.apache.kudu.Schema(columns)
}
def createSchema3: Schema = {
SchemaBuilder.struct.name("record")
.version(1)
.field("id", Schema.STRING_SCHEMA)
.field("int_field", Schema.INT32_SCHEMA)
.field("long_field", Schema.INT64_SCHEMA)
.field("string_field", Schema.STRING_SCHEMA)
.field("float_field", Schema.FLOAT32_SCHEMA)
.field("float64_field", Schema.FLOAT64_SCHEMA)
.field("boolean_field", Schema.BOOLEAN_SCHEMA)
.field("int64_field", Schema.INT64_SCHEMA)
.field("new_field", Schema.STRING_SCHEMA)
.build
}
def createSchema4: Schema = {
SchemaBuilder.struct.name("record")
.version(1)
.field("id", Schema.STRING_SCHEMA)
.field("int_field", Schema.INT32_SCHEMA)
.field("long_field", Schema.INT64_SCHEMA)
.field("string_field", Schema.STRING_SCHEMA)
.field("float_field", Schema.FLOAT32_SCHEMA)
.field("float64_field", Schema.FLOAT64_SCHEMA)
.field("boolean_field", Schema.BOOLEAN_SCHEMA)
.field("byte_field", Schema.BYTES_SCHEMA)
.field("int64_field", SchemaBuilder.int64().defaultValue(20.toLong).build())
.build
}
def createKuduSchema4: org.apache.kudu.Schema = {
val columns = new util.ArrayList[ColumnSchema]
val idField = new ColumnSchema.ColumnSchemaBuilder("id", Type.STRING).key(true).build
columns.add(idField)
val intField = new ColumnSchema.ColumnSchemaBuilder("int_field", Type.INT32).build
columns.add(intField)
val longField = new ColumnSchema.ColumnSchemaBuilder("long_field", Type.INT64).build
columns.add(longField)
val stringField = new ColumnSchema.ColumnSchemaBuilder("string_field", Type.STRING).build
columns.add(stringField)
val floatField = new ColumnSchema.ColumnSchemaBuilder("float_field", Type.FLOAT).build
columns.add(floatField)
val float64Field = new ColumnSchema.ColumnSchemaBuilder("float64_field", Type.DOUBLE).build
columns.add(float64Field)
val booleanField = new ColumnSchema.ColumnSchemaBuilder("boolean_field", Type.BOOL).build
columns.add(booleanField)
val byteField = new ColumnSchema.ColumnSchemaBuilder("byte_field", Type.BINARY).build
columns.add(byteField)
val int64Field = new ColumnSchema.ColumnSchemaBuilder("int64_field", Type.INT64).defaultValue(20.toLong).build
columns.add(int64Field)
new org.apache.kudu.Schema(columns)
}
def createSchema5: Schema = {
SchemaBuilder.struct.name("record")
.version(2)
.field("id", Schema.STRING_SCHEMA)
.field("int_field", Schema.INT32_SCHEMA)
.field("long_field", Schema.INT64_SCHEMA)
.field("string_field", Schema.STRING_SCHEMA)
.field("float_field", Schema.FLOAT32_SCHEMA)
.field("float64_field", Schema.FLOAT64_SCHEMA)
.field("boolean_field", Schema.BOOLEAN_SCHEMA)
.field("byte_field", Schema.BYTES_SCHEMA)
.field("int64_field", SchemaBuilder.int64().defaultValue(20.toLong).build())
.field("new_field", SchemaBuilder.string().defaultValue("").build())
.build
}
def createKuduSchema5: org.apache.kudu.Schema = {
val columns = new util.ArrayList[ColumnSchema]
val idField = new ColumnSchema.ColumnSchemaBuilder("id", Type.STRING).key(true).build
columns.add(idField)
val intField = new ColumnSchema.ColumnSchemaBuilder("int_field", Type.INT32).build
columns.add(intField)
val longField = new ColumnSchema.ColumnSchemaBuilder("long_field", Type.INT64).build
columns.add(longField)
val stringField = new ColumnSchema.ColumnSchemaBuilder("string_field", Type.STRING).build
columns.add(stringField)
val floatField = new ColumnSchema.ColumnSchemaBuilder("float_field", Type.FLOAT).build
columns.add(floatField)
val float64Field = new ColumnSchema.ColumnSchemaBuilder("float64_field", Type.DOUBLE).build
columns.add(float64Field)
val booleanField = new ColumnSchema.ColumnSchemaBuilder("boolean_field", Type.BOOL).build
columns.add(booleanField)
val byteField = new ColumnSchema.ColumnSchemaBuilder("byte_field", Type.BINARY).build
columns.add(byteField)
val int64Field = new ColumnSchema.ColumnSchemaBuilder("int64_field", Type.INT64).defaultValue(20.toLong).build
columns.add(int64Field)
val newField = new ColumnSchema.ColumnSchemaBuilder("new_field", Type.STRING).defaultValue("").build
columns.add(newField)
new org.apache.kudu.Schema(columns)
}
//build a test record
def createRecord5(schema: Schema, id: String): Struct = {
new Struct(schema)
.put("id", id)
.put("int_field", 12)
.put("long_field", 12L)
.put("string_field", "foo")
.put("float_field", 0.1.toFloat)
.put("float64_field", 0.199999)
.put("boolean_field", true)
.put("byte_field", ByteBuffer.wrap("bytes".getBytes))
.put("int64_field", 12L)
.put("new_field", "teststring")
}
def createSinkRecord(record: Struct, topic: String, offset: Long) = {
new SinkRecord(topic, 1, Schema.STRING_SCHEMA, "key", record.schema(), record, offset, System.currentTimeMillis(), TimestampType.CREATE_TIME)
}
//generate some test records
def getTestRecords: Set[SinkRecord] = {
val schema = createSchema
val assignment: mutable.Set[TopicPartition] = getAssignment.asScala
assignment.flatMap(a => {
(1 to 1).map(i => {
val record: Struct = createRecord(schema, a.topic() + "-" + a.partition() + "-" + i)
new SinkRecord(a.topic(), a.partition(), Schema.STRING_SCHEMA, "key", schema, record, i, System.currentTimeMillis(), TimestampType.CREATE_TIME)
})
}).toSet
}
//get the assignment of topic partitions for the sinkTask
def getAssignment: util.Set[TopicPartition] = {
ASSIGNMENT
}
//build a test record schema
def createSchema: Schema = {
import org.apache.kafka.connect.data._
val o = SchemaBuilder.bytes()
.name(Decimal.LOGICAL_NAME)
.optional()
.parameter("connect.decimal.precision", "18")
.parameter("scale", "4")
.build()
SchemaBuilder.struct.name("record")
.version(1)
.field("id", Schema.STRING_SCHEMA)
.field("int_field", Schema.INT32_SCHEMA)
.field("long_field", Schema.INT64_SCHEMA)
.field("string_field", Schema.STRING_SCHEMA)
.field("float_field", Schema.FLOAT32_SCHEMA)
.field("float64_field", Schema.FLOAT64_SCHEMA)
.field("boolean_field", Schema.BOOLEAN_SCHEMA)
.field("byte_field", Schema.BYTES_SCHEMA)
.field( "optional", o)
.build
}
def createKuduSchema: org.apache.kudu.Schema = {
val columns = new util.ArrayList[ColumnSchema]
val idField = new ColumnSchemaBuilder("id", Type.STRING).key(true).build()
columns.add(idField)
val intField = new ColumnSchemaBuilder("int_field", Type.INT32).build()
columns.add(intField)
val longField = new ColumnSchemaBuilder("long_field", Type.INT64).build()
columns.add(longField)
val stringField = new ColumnSchemaBuilder("string_field", Type.STRING).build()
columns.add(stringField)
val floatField = new ColumnSchemaBuilder("float_field", Type.FLOAT).build()
columns.add(floatField)
val float64Field = new ColumnSchemaBuilder("float64_field", Type.DOUBLE).build()
columns.add(float64Field)
val booleanField = new ColumnSchemaBuilder("boolean_field", Type.BOOL).build()
columns.add(booleanField)
val byteField = new ColumnSchemaBuilder("byte_field", Type.BINARY).build()
columns.add(byteField)
new org.apache.kudu.Schema(columns)
}
//build a test record
def createRecord(schema: Schema, id: String): Struct = {
new Struct(schema)
.put("id", id)
.put("int_field", 12)
.put("long_field", 12L)
.put("string_field", "foo")
.put("float_field", 0.1.toFloat)
.put("float64_field", 0.199999)
.put("boolean_field", true)
.put("byte_field", ByteBuffer.wrap("bytes".getBytes))
}
}
| datamountaineer/stream-reactor | kafka-connect-kudu/src/test/scala/com/datamountaineer/streamreactor/connect/kudu/TestBase.scala | Scala | apache-2.0 | 14,474 |
package TAPLcomp2
import TAPLcomp2.Print._
import TAPLcomp2.arith.{ArithParsers, ArithPrinter}
import TAPLcomp2.bot.{BotParsers, BotPrinter}
import TAPLcomp2.equirec.{EquiRecParsers, EquiRecPrinter}
import TAPLcomp2.fullequirec.{FullEquiRecParsers, FullEquiRecPrinter}
import TAPLcomp2.fullerror.{FullErrorParsers, FullErrorPrinter}
import TAPLcomp2.fullisorec.{FullIsoRecParsers, FullIsoRecPrinter}
import TAPLcomp2.fullomega.{FullOmegaParsers, FullOmegaPrinter}
import TAPLcomp2.fullpoly.{FullPolyParsers, FullPolyPrinter}
import TAPLcomp2.fullrecon.{FullReconParsers, FullReconPrinter}
import TAPLcomp2.fullref.{FullRefParsers, FullRefPrinter}
import TAPLcomp2.fullsimple.{FullSimpleParsers, FullSimplePrinter}
import TAPLcomp2.fullsub.{FullSubParsers, FullSubPrinter}
import TAPLcomp2.fulluntyped.{FullUntypedParsers, FullUntypedPrinter}
import TAPLcomp2.rcdsubbot.{RcdSubBotParsers, RcdSubBotPrinter}
import TAPLcomp2.recon.{ReconParsers, ReconPrinter}
import TAPLcomp2.simplebool.{SimpleBoolParsers, SimpleBoolPrinter}
import TAPLcomp2.tyarith.{TyArithParsers, TyArithPrinter}
import TAPLcomp2.untyped.{UntypedParsers, UntypedPrinter}
import scala.io.Source
import scala.text.Document
object Test {
def runTest(name: String, f: => (String => Unit)): Unit = {
val inputFile = "examples/" + name + ".tapl"
val lines: List[String] = Source.fromFile(inputFile).getLines().toList
val t0 = System.nanoTime()
lines.foreach(f)
val t1 = System.nanoTime()
println((t1 - t0) / 1000000)
}
def parseAndPrint[E](parse: String => E, print: E => Document)(inp: String): Unit = {
val width = 60
val e = parse(inp)
println(printDoc(print(e), width))
}
def main(args: Array[String]): Unit = {
if (args.isEmpty) {
sys.error("No argument")
} else {
val name = args(0).toLowerCase
val fn: String => Unit = name match {
case "arith" => parseAndPrint(ArithParsers.input, ArithPrinter.ptm)
case "untyped" => parseAndPrint(UntypedParsers.input, UntypedPrinter.ptm)
case "fulluntyped" => parseAndPrint(FullUntypedParsers.input, FullUntypedPrinter.ptm)
case "tyarith" => parseAndPrint(TyArithParsers.input, TyArithPrinter.ptm)
case "simplebool" => parseAndPrint(SimpleBoolParsers.input, SimpleBoolPrinter.ptm)
case "fullsimple" => parseAndPrint(FullSimpleParsers.input, FullSimplePrinter.ptm)
case "bot" => parseAndPrint(BotParsers.input, BotPrinter.ptm)
case "fullref" => parseAndPrint(FullRefParsers.input, FullRefPrinter.ptm)
case "fullerror" => parseAndPrint(FullErrorParsers.input, FullErrorPrinter.ptm)
case "rcdsubbot" => parseAndPrint(RcdSubBotParsers.input, RcdSubBotPrinter.ptm)
case "fullsub" => parseAndPrint(FullSubParsers.input, FullSubPrinter.ptm)
case "fullequirec" => parseAndPrint(FullEquiRecParsers.input, FullEquiRecPrinter.ptm)
case "fullisorec" => parseAndPrint(FullIsoRecParsers.input, FullIsoRecPrinter.ptm)
case "equirec" => parseAndPrint(EquiRecParsers.input, EquiRecPrinter.ptm)
case "recon" => parseAndPrint(ReconParsers.input, ReconPrinter.ptm)
case "fullrecon" => parseAndPrint(FullReconParsers.input, FullReconPrinter.ptm)
case "fullpoly" => parseAndPrint(FullPolyParsers.input, FullPolyPrinter.ptm)
case "fullomega" => parseAndPrint(FullOmegaParsers.input, FullOmegaPrinter.ptm)
case _ => sys.error("Incorrect name")
}
runTest(name, fn)
}
}
}
| hy-zhang/parser | Scala/Parser/src/TAPLcomp2/Test.scala | Scala | bsd-3-clause | 3,501 |
/**
* Swaggy Jenkins
* Jenkins API clients generated from Swagger / Open API specification
*
* The version of the OpenAPI document: 1.1.2-pre.0
* Contact: blah@cliffano.com
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package org.openapitools.client.model
import org.openapitools.client.core.ApiModel
case class StringParameterValue (
`class`: Option[String] = None,
name: Option[String] = None,
value: Option[String] = None
) extends ApiModel
| cliffano/swaggy-jenkins | clients/scala-akka/generated/src/main/scala/org/openapitools/client/model/StringParameterValue.scala | Scala | mit | 582 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct
import scala.math.BigDecimal.RoundingMode
object RoundingFunctions {
def roundUpToInt(bigDecimal: BigDecimal): Int = {
bigDecimal.setScale(0, RoundingMode.UP).toInt
}
def roundDownToInt(bigDecimal: BigDecimal): Int = {
bigDecimal.setScale(0, RoundingMode.DOWN).toInt
}
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/RoundingFunctions.scala | Scala | apache-2.0 | 920 |
package slick.test.jdbc
import java.io.PrintWriter
import java.sql.{Connection, Driver, DriverPropertyInfo, SQLException}
import java.util.Properties
import java.util.logging.Logger
import javax.sql.DataSource
import com.typesafe.config.ConfigFactory
import org.junit.Test
import org.junit.Assert._
import slick.basic.DatabaseConfig
import slick.jdbc.{JdbcBackend, JdbcProfile}
import scala.concurrent.Await
import scala.concurrent.duration.Duration
class DataSourceTest {
@Test def testDataSourceJdbcDataSource: Unit = {
val dc = DatabaseConfig.forConfig[JdbcProfile]("ds1")
import dc.profile.api._
try {
assertEquals(1, Await.result(dc.db.run(sql"select lock_mode()".as[Int].head), Duration.Inf))
} finally dc.db.close
}
@Test def testDirectDataSource: Unit = {
val dc = DatabaseConfig.forConfig[JdbcProfile]("ds2")
import dc.profile.api._
try {
assertEquals(2, Await.result(dc.db.run(sql"select lock_mode()".as[Int].head), Duration.Inf))
} finally dc.db.close
}
@Test def testDatabaseUrlDataSource: Unit = {
import slick.jdbc.H2Profile.api.actionBasedSQLInterpolation
MockDriver.reset
val db = JdbcBackend.Database.forConfig("databaseUrl")
try {
assertEquals(Some(100), db.source.maxConnections)
try Await.result(db.run(sqlu"dummy"), Duration.Inf) catch { case ex: SQLException => }
val (url, info) = MockDriver.getLast.getOrElse(fail("No connection data recorded").asInstanceOf[Nothing])
assertEquals("jdbc:postgresql://host/dbname", url)
assertEquals("user", info.getProperty("user"))
assertEquals("pass", info.getProperty("password"))
assertEquals("bar", info.getProperty("foo"))
} finally db.close
}
@Test def testMaxConnections: Unit = {
MockDriver.reset
val db = JdbcBackend.Database.forConfig("databaseUrl", ConfigFactory.parseString(
"""
|databaseUrl {
| dataSourceClass = "slick.jdbc.DatabaseUrlDataSource"
| maxConnections = 20
| url = "postgres://user:pass@host/dbname"
|}
|""".stripMargin))
try {
assertEquals("maxConnections should be respected", Some(20), db.source.maxConnections)
} finally db.close
}
@Test def testMaxConnectionsNumThreads: Unit = {
MockDriver.reset
val db = JdbcBackend.Database.forConfig("databaseUrl", ConfigFactory.parseString(
"""
|databaseUrl {
| dataSourceClass = "slick.jdbc.DatabaseUrlDataSource"
| numThreads = 10
| url = "postgres://user:pass@host/dbname"
|}
|""".stripMargin
))
try {
assertEquals("maxConnections should be numThreads * 5", Some(50), db.source.maxConnections)
} finally db.close
}
@Test def testConnectionPoolDisabled: Unit = {
MockDriver.reset
val db = JdbcBackend.Database.forConfig("databaseUrl", ConfigFactory.parseString(
"""
|databaseUrl {
| dataSourceClass = "slick.jdbc.DatabaseUrlDataSource"
| connectionPool = "disabled"
| url = "postgres://user:pass@host/dbname"
|}
|
""".stripMargin))
try {
assertEquals("maxConnections should be None when not using a pool", None, db.source.maxConnections)
} finally db.close
}
}
object MockDriver {
@volatile private var last: Option[(String, Properties)] = None
def getLast = last
def reset: Unit = last = None
}
class MockDriver extends Driver {
def acceptsURL(url: String): Boolean = true
def jdbcCompliant(): Boolean = false
def getPropertyInfo(url: String, info: Properties): Array[DriverPropertyInfo] = Array()
def getMinorVersion: Int = 0
def getParentLogger: Logger = throw new SQLException("feature not implemented")
def connect(url: String, info: Properties): Connection = {
MockDriver.last = Some((url, info))
throw new SQLException("Connection data has been recorded")
}
def getMajorVersion: Int = 0
}
| AtkinsChang/slick | slick-testkit/src/test/scala/slick/test/jdbc/DataSourceTest.scala | Scala | bsd-2-clause | 3,951 |
package ch.epfl.scala.index.model
/**
* Licence representation
* @param name the licence name
* @param shortName the short name
* @param url the url for further reading
*/
case class License(name: String, shortName: String, url: Option[String])
object License {
def spdx(id: String, name: String) =
License(name, id, Some(s"https://spdx.org/licenses/$id.html"))
/** inspired by: https://github.com/NixOS/nixpkgs/blob/master/lib/licenses.nix#L1 */
val Academic = spdx("AFL-3.0", "Academic Free License")
val Affero = spdx("AGPL-3.0", "GNU Affero General Public License v3.0")
val Apache2 = spdx("Apache-2.0", "Apache License 2.0")
val Apple2_0 = spdx("APSL-2.0", "Apple Public Source License 2.0")
val Beerware = spdx("Beerware", "Beerware License")
val Bsd2Clause = spdx("BSD-2-Clause", """BSD 2-clause "Simplified" License""")
val Bsd3Clause = spdx("BSD-3-Clause", """BSD 3-clause "New" or "Revised" License""")
val BsdOriginal = spdx("BSD-4-Clause", """BSD 4-clause "Original" or "Old" License""")
val CreativeCommonsZeroUniversal = spdx("CC0-1.0", "Creative Commons Zero v1.0 Universal")
val CreativeCommonsAttributionNonCommercialShareAlike_2_0 =
spdx("CC-BY-NC-SA-2.0", "Creative Commons Attribution Non Commercial Share Alike 2.0")
val CreativeCommonsAttributionNonCommercialShareAlike_2_5 =
spdx("CC-BY-NC-SA-2.5", "Creative Commons Attribution Non Commercial Share Alike 2.5")
val CreativeCommonsAttributionNonCommercialShareAlike_3_0 =
spdx("CC-BY-NC-SA-3.0", "Creative Commons Attribution Non Commercial Share Alike 3.0")
val CreativeCommonsAttributionNonCommercialShareAlike_4_0 =
spdx("CC-BY-NC-SA-4.0", "Creative Commons Attribution Non Commercial Share Alike 4.0")
val CreativeCommonsAttributionShareAlike_2_5 =
spdx("CC-BY-SA-2.5", "Creative Commons Attribution Share Alike 2.5")
val CreativeCommonsAttribution_3_0 = spdx("CC-BY-3.0", "Creative Commons Attribution 3.0")
val CreativeCommonsAttributionShareAlike_3_0 =
spdx("CC-BY-SA-3.0", "Creative Commons Attribution Share Alike 3.0")
val CreativeCommonsAttribution_4_0 = spdx("CC-BY-4.0", "Creative Commons Attribution 4.0")
val CreativeCommonsAttributionShareAlike_4_0 =
spdx("CC-BY-SA-4.0", "Creative Commons Attribution Share Alike 4.0")
val Eclipse = spdx("EPL-1.0", "Eclipse Public License 1.0")
val GPL1 = spdx("GPL-1.0", "GNU General Public License v1.0 only")
val GPL1Plus = spdx("GPL-1.0+", "GNU General Public License v1.0 or later")
val GPL2 = spdx("GPL-2.0", "GNU General Public License v2.0 only")
val GPL2Plus = spdx("GPL-2.0+", "GNU General Public License v2.0 or later")
val GPl3 = spdx("GPL-3.0", "GNU General Public License v3.0 only")
val GPL3Plus = spdx("GPL-3.0+", "GNU General Public License v3.0 or later")
val ISC = spdx("ISC", "ISC License")
val LGPL2 = spdx("LGPL-2.0", "GNU Library General Public License v2 only")
// @deprecated("-", "-")
val LGPL2_Plus = spdx("LGPL-2.0+", "GNU Library General Public License v2 or later")
val LGPL2_1 = spdx("LGPL-2.1", "GNU Library General Public License v2.1 only")
// @deprecated("-", "-")
val LGPL2_1_Plus = spdx("LGPL-2.1+", "GNU Library General Public License v2.1 or later")
val LGPL3 = spdx("LGPL-3.0", "GNU Lesser General Public License v3.0 only")
// @deprecated("use LGPL3", "2.0rc2")
val LGPL3_Plus = spdx("LGPL-3.0+", "GNU Lesser General Public License v3.0 or later")
// Spdx.org does not (yet) differentiate between the X11 and Expat versions
// for details see http://en.wikipedia.org/wiki/MIT_License#Various_versions
val MIT = spdx("MIT", "MIT License")
val MPL_1_0 = spdx("MPL-1.0", "Mozilla Public License 1.0")
val MPL_1_1 = spdx("MPL-1.1", "Mozilla Public License 1.1")
val MPL2 = spdx("MPL-2.0", "Mozilla Public License 2.0")
val PublicDomain = License("Public Domain", "Public Domain", None)
val Scala =
License("Scala License", "Scala License", Some("http://www.scala-lang.org/license.html"))
val TypesafeSubscriptionAgreement = License(
"Typesafe Subscription Agreement",
"Typesafe Subscription Agreement",
Some("http://downloads.typesafe.com/website/legal/TypesafeSubscriptionAgreement.pdf"))
val Unlicense = spdx("Unlicense", "The Unlicense")
val W3C = spdx("W3C", "W3C Software Notice and License")
val WTFPL = spdx("WTFPL", "Do What The F*ck You Want To Public License")
/**
* list of all licences
* @return
*/
def all = List(
Academic,
Affero,
Apache2,
Apple2_0,
Beerware,
Bsd2Clause,
Bsd3Clause,
BsdOriginal,
CreativeCommonsZeroUniversal,
CreativeCommonsAttributionNonCommercialShareAlike_2_0,
CreativeCommonsAttributionNonCommercialShareAlike_2_5,
CreativeCommonsAttributionNonCommercialShareAlike_3_0,
CreativeCommonsAttributionNonCommercialShareAlike_4_0,
CreativeCommonsAttributionShareAlike_2_5,
CreativeCommonsAttribution_3_0,
CreativeCommonsAttributionShareAlike_3_0,
CreativeCommonsAttribution_4_0,
CreativeCommonsAttributionShareAlike_4_0,
Eclipse,
GPL1,
GPL1Plus,
GPL2,
GPL2Plus,
GPl3,
GPL3Plus,
ISC,
LGPL2,
LGPL2_Plus,
LGPL2_1,
LGPL2_1_Plus,
LGPL3,
LGPL3_Plus,
MIT,
MPL_1_0,
MPL_1_1,
MPL2,
PublicDomain,
Scala,
TypesafeSubscriptionAgreement,
Unlicense,
W3C,
WTFPL
)
}
| adamwy/scaladex | model/src/main/scala/ch.epfl.scala.index.model/License.scala | Scala | bsd-3-clause | 5,389 |
package com.mle.sbt.cloud
import AppFogKeys._
import sbt.Keys._
import java.nio.file.Paths
import PaasKeys._
import sbt._
/**
* @author Michael
*/
object AppFogPlugin extends CloudFoundryBasedPlugin {
val settings: Seq[Project.Setting[_]] = cfBasedSettings ++ inConfig(AppFog)(Seq(
cmdLineTool := Paths.get( """C:\\Program Files (x86)\\ruby-1.9.2\\bin\\af.bat"""),
infra := EuAws,
deployUrl <<= (name, infra)((n, i) => n + i.urlSuffix),
runtime := JavaRuntime,
pushOptions <<= (deployUrl, instances, memoryMb, infra, runtime)(
(u, inst, mem, inf, r) => {
val params = Map(
"--url" -> u,
"--instances" -> inst.toString,
"--mem" -> (mem + "M"),
"--runtime" -> r.name,
"--infra" -> inf.name
)
params.map(kv => Seq(kv._1, kv._2)).flatten.toSeq ++ Seq("--non-interactive")
}),
pushCommand <<= (cmdLineTool, name, packagedApp, pushOptions) map ((cmdPath, app, appPackage, params) => {
toCommand(cmdPath, "push", app, appPackage) ++ params
}),
paasPush <<= (pushCommand, deployUrl, streams) map (executeDeploy),
updateCommand <<= (cmdLineTool, name, packagedApp) map ((cmdPath, app, appPackage) => {
toCommand(cmdPath, "update", app, appPackage)
}),
paasUpdate <<= (updateCommand, deployUrl, streams) map (executeDeploy),
printUpdate <<= (updateCommand, streams) map logIt,
printPush <<= (pushCommand, streams) map logIt,
login <<= (cmdLineTool, streams) map executeLogin
))
}
| malliina/sbt-paas-deployer | src/main/scala/com/mle/sbt/cloud/AppFogPlugin.scala | Scala | bsd-3-clause | 1,528 |
package com.twitter.scrooge.swift_generator
import com.twitter.scrooge.ast._
import com.twitter.scrooge.java_generator.{FieldTypeController => JavaFieldTypeController}
class FieldTypeController(fieldType: FunctionType, generator: SwiftGenerator)
extends JavaFieldTypeController(fieldType, generator) {
val left_element_type_name_in_container: String = generator.leftElementTypeName(fieldType)
val right_element_type_name_in_container: String = generator.rightElementTypeName(fieldType)
val left_element_type_name_in_container_skip_generic: String =
generator.leftElementTypeName(fieldType, skipGeneric = true)
val right_element_type_name_in_container_skip_generic: String =
generator.rightElementTypeName(fieldType, skipGeneric = true)
}
| twitter/scrooge | scrooge-generator/src/main/scala/com/twitter/scrooge/swift_generator/FieldTypeController.scala | Scala | apache-2.0 | 761 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.kudu.index.z2
import org.locationtech.geomesa.index.index.z2._
import org.locationtech.geomesa.kudu.index.KuduFeatureIndex
case object KuduXZ2Index extends KuduXZ2Index
trait KuduXZ2Index extends KuduFeatureIndex[XZ2IndexValues, Long] with KuduZ2Schema[XZ2IndexValues] {
override val name: String = XZ2Index.Name
override val version: Int = 1
override protected val keySpace: XZ2IndexKeySpace = XZ2IndexKeySpace
}
| ddseapy/geomesa | geomesa-kudu/geomesa-kudu-datastore/src/main/scala/org/locationtech/geomesa/kudu/index/z2/KuduXZ2Index.scala | Scala | apache-2.0 | 924 |
package com.quantifind.charts.highcharts
import org.json4s.NoTypeHints
import org.json4s.JsonDSL.WithDouble._
import org.json4s.jackson.Serialization
import scala.collection._
import scala.language.implicitConversions
/**
* User: austin
* Date: 9/9/13
*
* Tries to closely follow : api.highcharts.com/highcharts
*
* Original built for Highcharts 3.0.6, and we are now porting to 4.0.4 (12/12/14)
*/
/**
* implicits to offer conversions from scala types to Highcharts objects.
* Including wrappers around Option, and transformations from Traversable / Array to Highcharts series
*/
object Highchart {
// Data
implicit def traversableToTraversableData[X: Numeric, Y: Numeric](seq: Traversable[(X, Y)]) = seq.map{case(x, y) => Data(x, y)}
// Series
implicit def traversableToTraversableSeries[X: Numeric, Y: Numeric](seq: Traversable[(X, Y)]) = seriesToTraversableSeries(traversableToSeries(seq))
implicit def traversableToSeries[X: Numeric, Y: Numeric](seq: Traversable[(X, Y)]) = Series(traversableToTraversableData(seq))
implicit def seriesToTraversableSeries(series: Series) = Seq(series)
implicit def traversableToSomeArray(t: Traversable[Any]) = Some(t.toArray) // for axes
// Axis
implicit def axisTitleOptionToArrayAxes(axisTitle: Option[AxisTitle]) = Some(Array(Axis(axisTitle)))
implicit def axisToArrayAxes(axis: Axis) = Some(Array(axis))
implicit def axesSeqToSomeAxesArray(axes: Seq[Axis]) = Some(axes.toArray)
implicit def stringToAxisTitle(s: String) = Some(AxisTitle(s))
implicit def stringToAxis(s: String): Option[Array[Axis]] = axisTitleOptionToArrayAxes(stringToAxisTitle(s))
// Colors
implicit def colorToSomeColorArray(c: Color.Type) = Some(Array(c))
// Exporting
implicit def stringToExporting(s: String) = Some(Exporting(s))
// Title
implicit def stringToTitle(s: String) = Some(Title(text = s))
// value -> Some(value)
implicit def optionWrap[T](value: T): Option[T] = Option(value)
}
/**
* Ensures the object can be cast to Map[String, Any] so we can perform json serialization
* @param _name
*/
abstract class HighchartKey(var _name: String) {
def toServiceFormat: Map[String, Any]
}
//Internal functions for mapping to service format (which gets parsed to json)
object HighchartKey {
def highchartKeyToServiceFormat(hck: HighchartKey): Map[String, Any] = Map(hck._name -> hck.toServiceFormat)
def optionToServiceFormat(o: Option[HighchartKey]): Map[String, Any] = o match {
case None => Map()
case Some(s) => highchartKeyToServiceFormat(s)
}
def optionArrayAxisToServiceFormat(o: Option[Array[Axis]]): Map[String, Any] = o match {
case None => Map()
case Some(s) => Map(s.head.__name -> s.map(_.toServiceFormat))
}
def optionArrayColorToServiceFormat(o: Option[Array[Color.Type]]): Map[String, Any] = o match {
case None => Map()
case Some(a) => Map("colors" -> a)
}
def axisToTitleId(a: Axis) = a.id
def someAxisToTitleId(oa: Option[Axis]) = oa match {
case None => "0"
case Some(a) => a.id
}
def hckTraversableToServiceFormat(t: Traversable[HighchartKey]): Map[String, Any] = {
if(t.isEmpty) Map()
else Map(t.head._name -> t.map(_.toServiceFormat))
}
def flatten(o: (String, Option[Any])) = o._2 match {
case None => None
case Some(v) => Some(o._1, v)
}
def someStyleToServiceFormat(style: Option[CSSObject]) =
{if (style != None) Map("style" -> style.get.toString()) else Map()}
}
// Not going to implement: loading
// Not done yet:
// css object wrappers limited support
// navigation :: (styling for exporting)
// pane: for guages (where are guages?)
//
/**
* Top-most level Highcharts object. Overrides some of the Highcharts defaults
* @param series
* @param title
* @param chart
* @param colors
* @param credits
* @param exporting
* @param legend
* @param plotOptions
* @param subtitle
* @param setTurboThreshold
* @param tooltip
* @param xAxis
* @param yAxis
*/
case class Highchart(
series: Traversable[Series],
title: Option[Title] = Some(Title()),
chart: Option[Chart] = Some(Chart()),
colors: Option[Array[Color.Type]] = None,
credits: Option[Credits] = Some(Credits()),
exporting: Option[Exporting] = Some(Exporting()),
legend: Option[Legend] = None,
plotOptions: Option[PlotOptions] = Some(PlotOptions()),
subtitle: Option[Title] = None,
setTurboThreshold: Boolean = true,
tooltip: Option[ToolTip] = None,
xAxis: Option[Array[Axis]] = None,
yAxis: Option[Array[Axis]] = Some(Array(Axis()))
) {
import HighchartKey._
implicit val formats = Serialization.formats(NoTypeHints)
def toJson = Serialization.write(jsonMap)
def jsonMap: Map[String, Any] = {
if(series.size == 0) System.err.println("Tried to create a chart with no series")
// Because we want to default to turboThreshold off (0) we control it as a boolean at the top-most level
// As otherwise it is a per-type plotOption
val turboOutput: Map[String, Any] =
if(setTurboThreshold) {
series.filter(_.chart != Some(SeriesType.pie)).flatMap(_.chart).map { s =>
s -> Map("turboThreshold" -> 0)
}.toMap
} else Map.empty[String, Any]
// Todo: can we do better?
// Check if it exists
val finalPlotOption = if(plotOptions.isDefined) {
Map(PlotOptions.name -> {optionToServiceFormat(plotOptions)(PlotOptions.name).asInstanceOf[Map[String, Any]] ++ turboOutput})
} else {
Map(PlotOptions.name -> turboOutput)
}
val colorWrapper = (colors, yAxis) match {
case (Some(c), _) if c.size > 0 => colors
case (_, Some(y)) => {
val styleColors = y.flatMap(_.title).flatMap(_.style).flatMap(_.color)
if(styleColors.size == series.size) Some(styleColors)
else None
}
case _ => None
}
(yAxis, series) match {
case (Some(y), s) => if(y.size > 1 && y.size == s.size) y.zip(s.toSeq).zipWithIndex.foreach{case((axis, ser), index) =>
if(axis.id.isEmpty) axis.id = Some(index.toString())
if(ser.yAxis.isEmpty) ser.yAxis = axis.id
}
case _ =>
}
(xAxis, series) match {
case (Some(x), s) => if(x.size > 1 && x.size == s.size) x.zip(s.toSeq).zipWithIndex.foreach{case((axis, ser), index) =>
if(axis.id.isEmpty) axis.id = Some(index.toString())
if(ser.xAxis.isEmpty) ser.xAxis = axis.id
}
case _ =>
}
// Axis defaults to yAxis, rename xAxes
xAxis.map(_.foreach(_.__name = "xAxis"))
finalPlotOption ++
hckTraversableToServiceFormat(series) ++
Seq(xAxis, yAxis).flatMap(optionArrayAxisToServiceFormat) ++
optionArrayColorToServiceFormat(colorWrapper) ++
Seq(chart, title, exporting, credits, legend, tooltip, subtitle).flatMap(optionToServiceFormat)
}
def toServiceFormat: (String, Map[String, Any]) = {
"highcharts" -> jsonMap
}
}
// can we do better than specifying every field manually? (probably...)
// but I was not happy with Enumeration returning type Value instead of type String
// I need to look into jerkson or something similar for case class -> json conversion
case class Title(
text: String = "", // Override default "Chart title"
align: Option[Alignment.Type] = None,
floating: Option[Boolean] = None,
style: Option[CSSObject] = None,
useHTML: Option[Boolean] = None,
verticalAlign: Option[VerticalAlignment.Type] = None,
x: Option[Int] = None,
y: Option[Int] = None,
var __name: String = "title"
) extends HighchartKey(__name) {
def toServiceFormat =
Map("text" -> text) ++
Map(
"align" -> align,
"floating" -> floating,
"useHTML" -> useHTML,
"verticalAlign" -> verticalAlign,
"x" -> x,
"y" -> y
).flatMap(HighchartKey.flatten) ++
HighchartKey.someStyleToServiceFormat(style)
}
case class Chart(
// todo, many other chart options
zoomType: Option[Zoom.Type] = Some(Zoom.xy)
) extends HighchartKey("chart") {
def toServiceFormat = Map(
"zoomType" -> zoomType
).flatMap(HighchartKey.flatten)
}
case class Credits(
enabled: Option[Boolean] = None,
href: String = "", // Override default Highcharts
position: Option[Position] = None,
style: Option[CSSObject] = None,
text: String = "" // Override default Highcharts
) extends HighchartKey("credits") {
def toServiceFormat = Map(
"href" -> href,
"text" -> text
) ++
Map("style" -> style, "enabled" -> enabled).flatMap(HighchartKey.flatten) ++
HighchartKey.optionToServiceFormat(position) ++
HighchartKey.someStyleToServiceFormat(style)}
case class Exporting(
//buttons
//chartOptions
filename: String = "chart",
scale: Option[Int] = None,
sourceHeight: Option[Int] = None,
sourceWidth: Option[Int] = None,
_type: Option[String] = None,
url: Option[String] = None,
width: Option[Int] = None
) extends HighchartKey("exporting") {
def toServiceFormat =
Map("filename" -> filename) ++
Map(
"scale" -> scale,
"type" -> _type,
"url" -> url,
"sourceHeight" -> sourceHeight,
"sourceWidth" -> sourceWidth,
"width" -> width
).flatMap(HighchartKey.flatten)
}
case class Position(
align: Option[Alignment.Type] = None,
x: Option[Int] = None,
verticalAlign: Option[VerticalAlignment.Type] = None,
y: Option[Int] = None
) extends HighchartKey("position") {
def toServiceFormat = Map(
"align" -> align,
"x" -> x,
"verticalAlign" -> verticalAlign,
"y" -> y
).flatMap(HighchartKey.flatten)
}
case class ToolTip(
animation: Option[Boolean] = None,
backgroundColor: Option[Color.Type] = None,
borderColor: Option[Color.Type] = None,
borderRadius: Option[Int] = None,
borderWidth: Option[Int] = None,
// crosshairs
dateTimeLabelFormats: Option[DateTimeFormats] = None, // this has different defaults than the Axis
enabled: Option[Boolean] = None,
followPointer: Option[Boolean] = None,
followTouchMove: Option[Boolean] = None,
footerFormat: Option[String] = None,
//formatter
//headerFormat
hideDelay: Option[Int] = None,
//pointFormat
//positioner
shadow: Option[Boolean] = None,
shared: Option[Boolean] = None,
snap: Option[Int] = None,
//style
useHTML: Option[Boolean] = None,
valueDecimals: Option[Int] = None,
valuePrefix: Option[String] = None,
valueSuffix: Option[String] = None,
xDateFormat: Option[String] = None
) extends HighchartKey("ToolTip") {
def toServiceFormat =
Map(
"animation" -> animation,
"backgroundColor" -> backgroundColor,
"borderColor" -> borderColor,
"borderRadius" -> borderRadius,
"borderWidth" -> borderWidth,
"enabled" -> enabled,
"followPointer" -> followPointer,
"followTouchMove" -> followTouchMove,
"footerFormat" -> footerFormat,
"hideDelay" -> hideDelay,
"shadow" -> shadow,
"shared" -> shared,
"snap" -> snap,
"useHTML" -> useHTML,
"valueDecimals" -> valueDecimals,
"valuePrefix" -> valuePrefix,
"valueSuffix" -> valueSuffix,
"xDateFormat" -> xDateFormat
).flatMap(HighchartKey.flatten) ++
HighchartKey.optionToServiceFormat(dateTimeLabelFormats)
}
case class Series(
data: Traversable[Data[_, _]],
index: Option[Int] = None,
legendIndex: Option[Int] = None,
name: Option[String] = None,
chart: Option[SeriesType.Type] = Some(SeriesType.line), // for turbo threshold default
visible: Option[Boolean] = None,
color: Option[Color.Type] = None,
var xAxis: Option[String] = None,
var yAxis: Option[String] = None,
__name: String = "series"
) extends HighchartKey(__name) {
def toServiceFormat: Map[String, Any] = {
if (data.size == 0) System.err.println("Tried to create a series with no data")
Map("data" -> data.map(_.toServiceFormat).toSeq) ++
Map("xAxis" -> xAxis, "yAxis" -> yAxis, "type" -> chart, "color" -> color, "visible" -> visible, " index" -> index, "legendIndex" -> legendIndex, "name" -> name).flatMap{HighchartKey.flatten}
}
}
case class Data[X: Numeric, Y: Numeric](
x: X,
y: Y,
color: Option[Color.Type] = None,
//dataLabels
//events
// id
name: Option[String] = None
) {
def toServiceFormat = {
Map("x" -> x, "y" -> y) ++
Map("color" -> color, "name" -> name).flatMap{HighchartKey.flatten}
}
}
// TODO PieData for legendIndex, slice
// No more than 22 members in a case class TODO
case class Legend(
align: Option[Alignment.Type] = None,
backgroundColor: Option[Color.Type] = None,
borderColor: Option[Color.Type] = None,
// borderRadius: Int = 5,
// borderWidth: Int = 2,
enabled: Option[Boolean] = Some(false), // override default
floating: Option[Boolean] = None,
itemDistance: Option[Int] = None,
//itemHiddenStyle
//itemHoverStyle
itemMarginBottom: Option[Int] = None,
itemMarginTop: Option[Int] = None,
//itemStyle
itemWidth: Option[Int] = None,
labelFormat: Option[String] = None, // TODO - format string helpers
//labelFormatter
layout: Option[Layout.Type] = None,
margin: Option[Int] = None,
maxHeight: Option[Int] = None,
//navigation
padding: Option[Int] = None,
reversed: Option[Boolean] = None,
rtl: Option[Boolean] = None, // right-to-left
//shadow
//style
// symbolPadding: Int = 5,
// symbolWidth: Int = 30,
title: Option[String] = None, // todo - css title
// useHTML: Boolean = false,
verticalAlign: Option[VerticalAlignment.Type] = None,
width: Option[Int] = None,
x: Option[Int] = None,
y: Option[Int] = None
) extends HighchartKey("legend") {
def toServiceFormat =
Map(
"align" -> align,
"backgroundColor" -> backgroundColor,
"borderColor" -> borderColor,
// "borderRadius" -> borderRadius,
// "borderWidth" -> borderWidth,
"enabled" -> enabled,
"floating" -> floating,
"itemDistance" -> itemDistance,
"itemMarginBottom" -> itemMarginBottom,
"itemMarginTop" -> itemMarginTop,
"labelFormat" -> labelFormat,
"layout" -> layout,
"margin" -> margin,
"padding" -> padding,
"reversed" -> reversed,
"rtl" -> rtl,
// "symbolPadding" -> symbolPadding,
// "symbolWidth" -> symbolWidth,
// "useHTML" -> useHTML,
"verticalAlign" -> verticalAlign,
"x" -> x,
"y" -> y,
"itemWidth" -> itemWidth,
"maxHeight" -> maxHeight,
"title" -> title,
"width" -> width
).flatMap{case(s, a) => HighchartKey.flatten(s, a)}
}
case class Axis(
title: Option[AxisTitle] = Some(AxisTitle()),
allowDecimals: Option[Boolean] = None,
alternateGridColor: Option[Color.Type] = None,
categories: Option[Array[String]] = None,
dateTimeLabelFormats: Option[DateTimeFormats] = None,
endOnTick: Option[Boolean] = None,
//events
// gridLineColor: Color.Type = "#C0C0C0",
// gridLineDashStyle: String = "Solid", // TODO Bundle
// gridLineWidth: Int = 2,
var id: Option[String] = None,
labels: Option[AxisLabel] = None,
lineColor: Option[Color.Type] = None,
lineWidth: Option[Int] = None,
//linkedTo
max: Option[Int] = None,
// maxPadding: Double = 0.01,
min: Option[Int] = None,
// minPadding: Double = 0.01,
minRange: Option[Int] = None,
minTickInterval: Option[Int] = None,
//minor
offset: Option[Int] = None,
opposite: Option[Boolean] = None, // opposite side of chart, ie left / right for y-axis
//plotBands
//plotLines // TODO Kevin wants these
reversed: Option[Boolean] = None,
// showEmpty: Boolean = false,
showFirstLabel: Option[Boolean] = None,
showLastLabel: Option[Boolean] = None,
//startOfWeek
startOnTick: Option[Boolean] = None,
// tickColor: Color.Type = "#C0D0E0",
// TICK STUFF TODO
axisType: Option[AxisType.Type] = None,
var __name: String = "yAxis"
) extends HighchartKey(__name) {
def toServiceFormat: Map[String, Any] =
Map(
"allowDecimals" -> allowDecimals,
"categories" -> categories,
"endOnTick" -> endOnTick,
"lineColor" -> lineColor,
"lineWidth" -> lineWidth,
// "maxPadding" -> maxPadding,
// "minPadding" -> minPadding,
"offset" -> offset,
"opposite" -> opposite,
"reversed" -> reversed,
"showFirstLabel" -> showFirstLabel,
"showLastLabel" -> showLastLabel,
"startOnTick" -> startOnTick,
"type" -> axisType,
"title" -> title,
"id" -> id
).flatMap(HighchartKey.flatten) ++
HighchartKey.optionToServiceFormat(dateTimeLabelFormats) ++
HighchartKey.optionToServiceFormat(labels)
}
case class AxisLabel(
align: Option[String] = None,
enabled: Option[Boolean] = None,
format: Option[String] = None,
// formatter
maxStaggerLines: Option[Int] = None,
overflow: Option[Overflow.Type] = None,
rotation: Option[Int] = None,
step: Option[Int] = None,
style: Option[CSSObject] = None,
useHTML: Option[Boolean] = None,
x: Option[Int] = None,
y: Option[Int] = None,
zIndex: Option[Int] = None
) extends HighchartKey("labels") {
def toServiceFormat =
Map(
"align" -> align,
"enabled" -> enabled,
"format" -> format,
"maxStaggerLines" -> maxStaggerLines,
"overflow" -> overflow,
"rotation" -> rotation,
"step" -> step,
"useHTML" -> useHTML,
"x" -> x,
"y" -> y,
"zIndex" -> zIndex
).flatMap(HighchartKey.flatten) ++
HighchartKey.someStyleToServiceFormat(style)
}
case class DateTimeFormats(
millisecond: String = "%H:%M:%S.%L",
second: String = "%H:%M:%S",
minute: String = "%H:%M",
hour: String = "%H:%M",
day: String = "%e. %b",
week: String = "%e. b",
month: String = "%b \\\\ %y",
year: String = "%Y"
) extends HighchartKey("dateTimeLabelFormats") {
def toServiceFormat = Map("dateTimeLabelFormats" ->
Map(
"millisecond" -> millisecond,
"second" -> second,
"minute" -> minute,
"hour" -> hour,
"day" -> day,
"week" -> week,
"month" -> month,
"year" -> year
)
)
}
// Must supply text, others default to align=middle, maring=(x=0, y=10), offset=(relative), rotation=0
case class AxisTitle(
text: String = "", // Override default y-axis "value"
align: Option[AxisAlignment.Type] = None,
margin: Option[Int] = None,
offset: Option[Int] = None,
rotation: Option[Int] = None,
style: Option[CSSObject] = None
) {
def toServiceFormat =
Map("text" -> text) ++
Map("align" -> align, "margin" -> margin, "offset" -> offset, "rotation" -> rotation).flatMap(HighchartKey.flatten) ++
HighchartKey.someStyleToServiceFormat(style)
}
object AxisTitle {
def apply(text: String, color: Color.Type) =
new AxisTitle(text, style = Some(CSSObject(Some(color))))
def apply(text: String, color: Color.Type, rotation: Option[Int]) =
new AxisTitle(text, rotation = rotation, style = Some(CSSObject(Some(color))))
}
| MartinSenne/wisp | core/src/main/scala/com/quantifind/charts/highcharts/Highchart.scala | Scala | apache-2.0 | 22,640 |
package com.github.j5ik2o.chatwork.infrastructure.api.contact
import org.json4s._
import org.json4s.DefaultReaders._
/**
* {
"account_id": 123,
"room_id": 322,
"name": "John Smith",
"chatwork_id": "tarochatworkid",
"organization_id": 101,
"organization_name": "Hello Company",
"department": "Marketing",
"avatar_image_url": "https://example.com/abc.png"
}
*/
case class Contact
(accountId: Int,
roomId: Int,
name: String,
chatWorkId: String,
organizationId: Int,
organizationName: String,
department: String,
avatarImageUrl: String)
object Contact {
def apply(jValue: JValue): Contact = {
Contact(
(jValue \\ "account_id").as[Int],
(jValue \\ "room_id").as[Int],
(jValue \\ "name").as[String],
(jValue \\ "chatwork_id").as[String],
(jValue \\ "organization_id").as[Int],
(jValue \\ "organization_name").as[String],
(jValue \\ "department").as[String],
(jValue \\ "avatar_image_url").as[String]
)
}
}
| j5ik2o/chatwork-client | src/main/scala/com/github/j5ik2o/chatwork/infrastructure/api/contact/Contact.scala | Scala | apache-2.0 | 1,001 |
package amailp.intellij.robot.extensions
import amailp.intellij.robot.ast.Keyword
import amailp.intellij.robot.psi.reference.PythonKeywordToDefinitionReference
import com.intellij.openapi.util.TextRange
import com.intellij.patterns.PlatformPatterns
import com.intellij.psi._
import com.intellij.util.ProcessingContext
class PythonKeywordReferenceContributor extends PsiReferenceContributor {
override def registerReferenceProviders(registrar: PsiReferenceRegistrar): Unit = {
registrar.registerReferenceProvider(
PlatformPatterns.psiElement(Keyword),
new PsiReferenceProvider() {
override def getReferencesByElement(element: PsiElement, context: ProcessingContext): Array[PsiReference] = {
List[PsiReference](
new PythonKeywordToDefinitionReference(element, new TextRange(0, element.getText.length))
)
}.toArray
}
)
}
}
| AmailP/robot-plugin | src/main/scala/amailp/intellij/robot/extensions/PythonKeywordReferenceContributor.scala | Scala | gpl-3.0 | 919 |
/*
* Copyright (C) 04/06/13 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package gridscale.authentication
import java.io.{ File, FileInputStream, IOException }
import java.security.cert.X509Certificate
import scala.util.Try
object P12Authentication {
def loadPKCS12Credentials(a: P12Authentication): Loaded = {
val ks = java.security.KeyStore.getInstance("pkcs12")
ks.load(new java.io.FileInputStream(a.certificate), a.password.toCharArray)
val aliases = ks.aliases
import collection.JavaConverters._
// FIXME GET
val alias = aliases.asScala.find(e ⇒ ks.isKeyEntry(e)).get
//if (alias == null) throw new VOMSException("No aliases found inside pkcs12 certificate!")
val userCert = ks.getCertificate(alias).asInstanceOf[X509Certificate]
val userKey = ks.getKey(alias, a.password.toCharArray).asInstanceOf[java.security.PrivateKey]
val userChain = Array[X509Certificate](userCert)
Loaded(userCert, userKey, userChain)
}
case class Loaded(certificate: X509Certificate, key: java.security.PrivateKey, chain: Array[X509Certificate])
def testPassword(p12Authentication: P12Authentication): Try[Boolean] =
util.Try(loadPKCS12Credentials(p12Authentication)).map(_ ⇒ true)
}
case class P12Authentication(certificate: File, password: String)
| openmole/gridscale | gridscale/src/main/scala/gridscale/authentication/P12Authentication.scala | Scala | agpl-3.0 | 1,941 |
/*-
* #%L
* FWAPP Framework
* %%
* Copyright (C) 2016 - 2017 Open Design Flow
* %%
* This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.odfi.wsb.fwapp.swing
import org.odfi.wsb.fwapp.SiteApp
import java.awt.GraphicsEnvironment
import javax.swing.JFrame
import javax.swing.JLabel
import javax.swing.Icon
import javax.swing.ImageIcon
import java.awt.BorderLayout
import org.odfi.wsb.fwapp.Site
import org.apache.batik.swing.JSVGCanvas
import org.apache.batik.swing.svg.JSVGComponent
import org.apache.batik.util.XMLResourceDescriptor
//import org.apache.batik.anim.dom.SAXSVGDocumentFactory
import org.w3c.dom.svg.SVGDocument
import com.idyria.osi.wsb.webapp.http.connector.HTTPConnector
import java.awt.Font
import java.awt.Color
import javax.swing.SwingUtilities
import org.odfi.indesign.core.module.swing.SwingUtilsTrait
import java.awt.event.MouseEvent
import java.awt.event.MouseAdapter
import java.awt.Desktop
import java.net.URI
import java.awt.Cursor
import java.awt.event.WindowStateListener
import java.awt.event.WindowEvent
import org.odfi.indesign.core.heart.Heart
import org.odfi.indesign.core.brain.Brain
import org.w3c.dom.svg.SVGElement
import org.w3c.dom.svg.SVGSVGElement
import org.apache.batik.dom.svg.SVGDocumentFactory
import org.apache.batik.swing.svg.SVGDocumentLoader
import org.apache.batik.anim.dom.SAXSVGDocumentFactory
import java.awt.event.WindowAdapter
import org.apache.batik.swing.svg.SVGDocumentLoaderAdapter
import org.apache.batik.swing.svg.SVGDocumentLoaderEvent
class SwingPanelSite(path: String) extends Site(path) with SwingUtilsTrait {
var disableGUI = false
var startupFrame: Option[JFrame] = None
this.onInit {
GraphicsEnvironment.isHeadless() match {
case true =>
case false if (disableGUI) =>
case other =>
//frame.add(new JLabel(new ImageIcon(getClass.getClassLoader.getResource("fwapp/ui/logo.png"))), BorderLayout.CENTER)
this.engine.network.connectors.foreach {
case hc: HTTPConnector =>
onSwingThreadLater {
var frame = new JFrame()
startupFrame = Some(frame)
frame.setSize(600, 300)
frame.getContentPane.setBackground(Color.WHITE)
var svgPanel = new JSVGCanvas
// Use Document Loading to start managing colors at the right time
svgPanel.setDocumentState(JSVGComponent.ALWAYS_DYNAMIC)
svgPanel.addSVGDocumentLoaderListener(new SVGDocumentLoaderAdapter() {
override def documentLoadingCompleted(e: SVGDocumentLoaderEvent) = {
println("Document Loaded")
svgPanel.getSVGDocument.getElementById("bottom").setAttribute("style", "fill:red")
Brain.onStarted {
println("Started -> OK")
onSwingThreadLater {
svgPanel.getSVGDocument.getElementById("bottom").setAttribute("style", "fill:darkgreen")
/*svgPanel.getUpdateManager.dispatchSVGZoomEvent()
svgPanel.getUpdateManager.getUpdateRunnableQueue.invokeAndWait(new Runnable {
def run = {
svgPanel.getSVGDocument.getElementById("bottom").setAttribute("style", "fill:darkgreen")
}
})*/
}
}
}
});
svgPanel.loadSVGDocument(getClass.getClassLoader.getResource("fwapp/ui/logo.svg").toString())
frame.add(svgPanel, BorderLayout.CENTER)
var l = new JLabel(s"${getDisplayName} : http://localhost:${hc.port}${this.basePath}/")
l.setCursor(new Cursor(Cursor.HAND_CURSOR))
l.setFont(new Font("Sans Serif", Font.BOLD, 22))
l.addMouseListener(new MouseAdapter {
override def mouseClicked(e: MouseEvent) = {
if (Desktop.isDesktopSupported()) {
Desktop.getDesktop.browse(new URI(s"http://localhost:${hc.port}${basePath}/"))
}
}
})
frame.add(l, BorderLayout.SOUTH)
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE)
/*frame.addWindowListener( new WindowAdapter {
override def windowClosing(e : WindowEvent) = {
println(s"window closing")
Brain.moveToShutdown
}
})*/
frame.setVisible(true)
centerOnScreen(frame)
// Shutdown hook
//---------
/*this.onShutdown {
onSwingThreadAndWait {
startupFrame.get.dispose()
}
}*/
// Shutdown on Closing
//------------
sys.addShutdownHook {
println("Shutdown hook")
moveToShutdown
}
/*frame.addWindowStateListener(new WindowStateListener {
def windowStateChanged(ev: java.awt.event.WindowEvent) : Unit = {
if(ev.getNewState==WindowEvent.
}
})*/
}
case other =>
}
}
}
this.onStart {
}
}
| opendesignflow/fwapp | src/main/scala/org/odfi/wsb/fwapp/swing/SwingPanelSiteApp.scala | Scala | agpl-3.0 | 6,910 |
import scala.language.higherKinds
trait Bound[B]
package p1 {
case class Sub[B <: Bound[B]](p: B)
object Test {
def g[A](x: Bound[A]) = ()
def f(x: Any) = x match { case Sub(p) => g(p) }
}
}
package p2 {
trait Traversable[+A] { def head: A = ??? }
trait Seq[+A] extends Traversable[A] { def length: Int = ??? }
case class SubHK[B <: Bound[B], CC[X] <: Traversable[X]](xs: CC[B])
class MyBound extends Bound[MyBound]
class MySeq extends Seq[MyBound]
object Test {
def g[B](x: Bound[B]) = ()
def f1(x: Any) = x match { case SubHK(xs) => xs }
def f2[B <: Bound[B], CC[X] <: Traversable[X]](sub: SubHK[B, CC]): CC[B] = sub match { case SubHK(xs) => xs }
def f3 = g(f1(SubHK(new MySeq)).head)
def f4 = g(f2(SubHK(new MySeq)).head)
}
}
| felixmulder/scala | test/pending/pos/pattern-typing.scala | Scala | bsd-3-clause | 784 |
package cwl
import better.files.{File => BFile}
import cwl.CwlDecoder.decodeAllCwl
import org.scalatest.prop.TableDrivenPropertyChecks
import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}
import shapeless.Coproduct
import wom.expression.WomExpression
import wom.graph.Graph.ResolvedExecutableInput
import wom.graph.GraphNodePort
import wom.values._
class CwlInputValidationSpec extends FlatSpec with Matchers with TableDrivenPropertyChecks with BeforeAndAfterAll {
behavior of "CWL Wom executable"
var cwlFile: BFile = _
override def beforeAll(): Unit = {
cwlFile = BFile.newTemporaryFile().write(
"""
|cwlVersion: v1.0
|class: Workflow
|inputs:
| w0:
| type: string
| default: "hi w0 !"
| w1: File
| w2:
| type: string
| default: "hi w2 !"
| w3: int
| w4: long
| w5: double
| w6: float
| w7: boolean
|steps: []
|outputs: []
""".stripMargin
)
}
override def afterAll(): Unit = {
cwlFile.delete()
()
}
lazy val cwlWorkflow = decodeAllCwl(cwlFile).map {
_.select[Workflow].get
}.value.unsafeRunSync.fold(error => throw new RuntimeException(s"broken parse! msg was $error"), identity)
lazy val graph = cwlWorkflow.womDefinition match {
case Left(errors) => fail(s"Failed to build wom definition: ${errors.toList.mkString(", ")}")
case Right(womDef) => womDef.graph
}
lazy val w0OutputPort = graph.inputNodes.find(_.localName == "w0").getOrElse(fail("Failed to find an input node for w0")).singleOutputPort
lazy val w1OutputPort = graph.inputNodes.find(_.localName == "w1").getOrElse(fail("Failed to find an input node for w1")).singleOutputPort
lazy val w2OutputPort = graph.inputNodes.find(_.localName == "w2").getOrElse(fail("Failed to find an input node for w2")).singleOutputPort
lazy val w3OutputPort = graph.inputNodes.find(_.localName == "w3").getOrElse(fail("Failed to find an input node for w3")).singleOutputPort
lazy val w4OutputPort = graph.inputNodes.find(_.localName == "w4").getOrElse(fail("Failed to find an input node for w4")).singleOutputPort
lazy val w5OutputPort = graph.inputNodes.find(_.localName == "w5").getOrElse(fail("Failed to find an input node for w5")).singleOutputPort
lazy val w6OutputPort = graph.inputNodes.find(_.localName == "w6").getOrElse(fail("Failed to find an input node for w6")).singleOutputPort
lazy val w7OutputPort = graph.inputNodes.find(_.localName == "w7").getOrElse(fail("Failed to find an input node for w7")).singleOutputPort
def validate(inputFile: String): Map[GraphNodePort.OutputPort, ResolvedExecutableInput] = {
cwlWorkflow.womExecutable(Option(inputFile)) match {
case Left(errors) => fail(s"Failed to build a wom executable: ${errors.toList.mkString(", ")}")
case Right(executable) => executable.resolvedExecutableInputs
}
}
it should "parse and validate a valid input file" in {
val inputFile =
"""
w1:
class: File
path: my_file.txt
w2: hello !
w3: 3
w4: 4
w5: 5.1
w6: 6.1
w7: true
""".stripMargin
val validInputs = validate(inputFile).map {
case (port, resolvedInput) => (port.name, resolvedInput)
}
// w0 has no input value in the input file, so it should fallback to the default value
// TODO WOM: when we have string value for wom expression, check that it's "hi !"
validInputs(w0OutputPort.name).select[WomExpression].isDefined shouldBe true
validInputs(w1OutputPort.name) shouldBe Coproduct[ResolvedExecutableInput](WomFile("my_file.txt"): WomValue)
validInputs(w2OutputPort.name) shouldBe Coproduct[ResolvedExecutableInput](WomString("hello !"): WomValue)
validInputs(w3OutputPort.name) shouldBe Coproduct[ResolvedExecutableInput](WomInteger(3): WomValue)
validInputs(w4OutputPort.name) shouldBe Coproduct[ResolvedExecutableInput](WomInteger(4): WomValue)
validInputs(w5OutputPort.name) shouldBe Coproduct[ResolvedExecutableInput](WomFloat(5.1F): WomValue)
validInputs(w6OutputPort.name) shouldBe Coproduct[ResolvedExecutableInput](WomFloat(6.1F): WomValue)
validInputs(w7OutputPort.name) shouldBe Coproduct[ResolvedExecutableInput](WomBoolean(true): WomValue)
}
it should "not validate when required inputs are missing" in {
val inputFile =
"""
w2: hello !
""".stripMargin
cwlWorkflow.womExecutable(Option(inputFile)) match {
case Right(booh) => fail(s"Expected failed validation but got valid input map: $booh")
case Left(errors) => errors.toList.toSet shouldBe Set(
"Required workflow input 'w1' not specified",
"Required workflow input 'w3' not specified",
"Required workflow input 'w4' not specified",
"Required workflow input 'w5' not specified",
"Required workflow input 'w6' not specified",
"Required workflow input 'w7' not specified"
)
}
}
}
| ohsu-comp-bio/cromwell | cwl/src/test/scala/cwl/CwlInputValidationSpec.scala | Scala | bsd-3-clause | 5,046 |
/*
* Copyright (c) 2016
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.tudarmstadt.lt.flinkdt.tasks
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.scala.{ExecutionEnvironment, DataSet}
import org.apache.flink.core.fs.Path
import scala.reflect.ClassTag
/**
* Created by Steffen Remus.
*/
object Checkpointed {
/**
*
* @param f
* @param out
* @param jobname
* @param reReadFromCheckpoint if true, the written data is read and provided for upcoming task (this might improve performance on big datasets), otherwise the intermediate results are re-used
* @tparam I
* @tparam O
* @return
*/
def apply[I : ClassTag : TypeInformation, O : ClassTag : TypeInformation](f:DSTask[I,O], out:String, jobname:String = null, reReadFromCheckpoint:Boolean=false, env:ExecutionEnvironment = null) = new Checkpointed[I,O](f, out, jobname, reReadFromCheckpoint, env)
}
class Checkpointed[I : ClassTag : TypeInformation, O : ClassTag : TypeInformation](f:DSTask[I,O], out:String, jobname:String = null, reReadFromCheckpoint:Boolean = false, env:ExecutionEnvironment = null) extends DSTask[I,O] {
val output_path:Path = new Path(out)
override def process(ds: DataSet[I]): DataSet[O] = {
if(output_path.getFileSystem.exists(output_path))
return DSReader[O](out, env).process()
val ds_out = f(ds)
if(out != null) {
DSWriter[O](out, jobname).process(ds_out)
if(reReadFromCheckpoint) // throw away intermediate results and continue to work with the re-read data
return DSReader[O](out, env).process()
// else just re-use the processed data
}
return ds_out
}
}
| remstef/flinkfun | src/main/scala/de/tudarmstadt/lt/flinkdt/tasks/Checkpointed.scala | Scala | apache-2.0 | 2,223 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package java.nio
object LongBuffer {
private final val HashSeed = -1709696158 // "java.nio.LongBuffer".##
def allocate(capacity: Int): LongBuffer =
wrap(new Array[Long](capacity))
def wrap(array: Array[Long], offset: Int, length: Int): LongBuffer =
HeapLongBuffer.wrap(array, 0, array.length, offset, length, false)
def wrap(array: Array[Long]): LongBuffer =
wrap(array, 0, array.length)
}
abstract class LongBuffer private[nio] (
_capacity: Int, private[nio] val _array: Array[Long],
private[nio] val _arrayOffset: Int)
extends Buffer(_capacity) with Comparable[LongBuffer] {
private[nio] type ElementType = Long
private[nio] type BufferType = LongBuffer
private[nio] type TypedArrayType = Null
def this(_capacity: Int) = this(_capacity, null, -1)
def slice(): LongBuffer
def duplicate(): LongBuffer
def asReadOnlyBuffer(): LongBuffer
def get(): Long
def put(l: Long): LongBuffer
def get(index: Int): Long
def put(index: Int, l: Long): LongBuffer
@noinline
def get(dst: Array[Long], offset: Int, length: Int): LongBuffer =
GenBuffer(this).generic_get(dst, offset, length)
def get(dst: Array[Long]): LongBuffer =
get(dst, 0, dst.length)
@noinline
def put(src: LongBuffer): LongBuffer =
GenBuffer(this).generic_put(src)
@noinline
def put(src: Array[Long], offset: Int, length: Int): LongBuffer =
GenBuffer(this).generic_put(src, offset, length)
final def put(src: Array[Long]): LongBuffer =
put(src, 0, src.length)
@inline final def hasArray(): Boolean =
GenBuffer(this).generic_hasArray()
@inline final def array(): Array[Long] =
GenBuffer(this).generic_array()
@inline final def arrayOffset(): Int =
GenBuffer(this).generic_arrayOffset()
@inline override def position(newPosition: Int): LongBuffer = {
super.position(newPosition)
this
}
@inline override def limit(newLimit: Int): LongBuffer = {
super.limit(newLimit)
this
}
@inline override def mark(): LongBuffer = {
super.mark()
this
}
@inline override def reset(): LongBuffer = {
super.reset()
this
}
@inline override def clear(): LongBuffer = {
super.clear()
this
}
@inline override def flip(): LongBuffer = {
super.flip()
this
}
@inline override def rewind(): LongBuffer = {
super.rewind()
this
}
def compact(): LongBuffer
def isDirect(): Boolean
// toString(): String inherited from Buffer
@noinline
override def hashCode(): Int =
GenBuffer(this).generic_hashCode(LongBuffer.HashSeed)
override def equals(that: Any): Boolean = that match {
case that: LongBuffer => compareTo(that) == 0
case _ => false
}
@noinline
def compareTo(that: LongBuffer): Int =
GenBuffer(this).generic_compareTo(that)(java.lang.Long.compare(_, _))
def order(): ByteOrder
// Internal API
private[nio] def load(index: Int): Long
private[nio] def store(index: Int, elem: Long): Unit
@inline
private[nio] def load(startIndex: Int,
dst: Array[Long], offset: Int, length: Int): Unit =
GenBuffer(this).generic_load(startIndex, dst, offset, length)
@inline
private[nio] def store(startIndex: Int,
src: Array[Long], offset: Int, length: Int): Unit =
GenBuffer(this).generic_store(startIndex, src, offset, length)
}
| scala-js/scala-js | javalib/src/main/scala/java/nio/LongBuffer.scala | Scala | apache-2.0 | 3,630 |
import leon.lang._
import leon.lang.synthesis._
import leon.annotation._
import leon.collection._
object RunLength {
def decode[A](l: List[(BigInt, A)]): List[A] = {
def fill[A](i: BigInt, a: A): List[A] = {
if (i > 0) a :: fill(i - 1, a)
else Nil[A]()
}
l match {
case Nil() => Nil[A]()
case Cons((i, x), xs) =>
fill(i, x) ++ decode(xs)
}
}
def legal[A](l: List[(BigInt, A)]): Boolean = l match {
case Nil() => true
case Cons((i, _), Nil()) => i > 0
case Cons((i, x), tl@Cons((_, y), _)) =>
i > 0 && x != y && legal(tl)
}
def encode[A](l: List[A]): List[(BigInt, A)] = {
// Solution
/*l match {
case Nil() => Nil[(BigInt, A)]()
case Cons(x, xs) =>
val rec = encode(xs)
rec match {
case Nil() =>
Cons( (BigInt(1), x), Nil[(BigInt,A)]())
case Cons( (recC, recEl), recTl) =>
if (x == recEl) {
Cons( (1+recC, x), recTl)
} else {
Cons( (BigInt(1), x), rec )
}
}
}*/
???[List[(BigInt, A)]]
} ensuring {
(res: List[(BigInt, A)]) =>
legal(res) && decode(res) == l
}
}
| epfl-lara/leon | testcases/synthesis/current/RunLength/RunLength.scala | Scala | gpl-3.0 | 1,204 |
/*
* @author Philip Stutz
* @author Mihaela Verman
*
* Copyright 2010 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.signalcollect.messaging
import java.util.Random
import java.util.concurrent.atomic.AtomicInteger
import scala.Array.canBuildFrom
import com.signalcollect.Edge
import com.signalcollect.GraphEditor
import com.signalcollect.Vertex
import com.signalcollect.interfaces.Coordinator
import com.signalcollect.interfaces.EdgeId
import com.signalcollect.interfaces.MessageBus
import com.signalcollect.interfaces.Request
import com.signalcollect.interfaces.SignalMessage
import com.signalcollect.interfaces.VertexToWorkerMapper
import com.signalcollect.interfaces.WorkerApi
import akka.actor.ActorRef
import akka.actor.actorRef2Scala
import akka.event.Logging.LogEvent
import com.signalcollect.interfaces.AddVertex
import com.signalcollect.interfaces.AddEdge
trait AbstractMessageBus[@specialized(Int, Long) Id, @specialized(Int, Long, Float, Double) Signal]
extends MessageBus[Id, Signal] with GraphEditor[Id, Signal] {
def reset {}
protected val registrations = new AtomicInteger()
def flush = {}
def isInitialized = registrations.get == numberOfWorkers + numberOfNodes + 2
// Results of requests are received using temporary actors, but for termination detection to work,
// the send count should be still credited to the actual recipient of the reply.
protected def sendCountIncrementorForRequests: MessageBus[_, _] => Unit
protected val mapper: VertexToWorkerMapper[Id] = new DefaultVertexToWorkerMapper[Id](numberOfWorkers)
protected val workers = new Array[ActorRef](numberOfWorkers)
protected val nodes = new Array[ActorRef](numberOfNodes)
protected val workerIds = (0 until numberOfWorkers).toList
protected var logger: ActorRef = _
protected var coordinator: ActorRef = _
def incrementMessagesSentToWorker(workerId: Int) = sentWorkerMessageCounters(workerId).incrementAndGet
def incrementMessagesSentToNode(nodeId: Int) = sentNodeMessageCounters(nodeId).incrementAndGet
def incrementMessagesSentToCoordinator = sentCoordinatorMessageCounter.incrementAndGet
def incrementMessagesSentToOthers = sentOtherMessageCounter.incrementAndGet
protected val sentWorkerMessageCounters: Array[AtomicInteger] = getInitializedAtomicArray(numberOfWorkers)
protected val sentNodeMessageCounters: Array[AtomicInteger] = getInitializedAtomicArray(numberOfNodes)
protected val sentCoordinatorMessageCounter = new AtomicInteger(0)
protected val sentOtherMessageCounter = new AtomicInteger(0)
def messagesSentToWorkers: Array[Int] = sentWorkerMessageCounters.map((c: AtomicInteger) => c.get)
def messagesSentToNodes: Array[Int] = sentNodeMessageCounters.map((c: AtomicInteger) => c.get)
def messagesSentToCoordinator: Int = sentCoordinatorMessageCounter.get
def messagesSentToOthers: Int = sentOtherMessageCounter.get
protected def getInitializedAtomicArray(numberOfEntries: Int): Array[AtomicInteger] = {
val atomicInts = new Array[AtomicInteger](numberOfEntries)
for (i <- 0 until numberOfEntries) {
atomicInts(i) = new AtomicInteger(0)
}
atomicInts
}
protected val receivedMessagesCounter = new AtomicInteger(0)
def getReceivedMessagesCounter: AtomicInteger = receivedMessagesCounter
lazy val workerProxies: Array[WorkerApi[Id, Signal]] = {
val result = new Array[WorkerApi[Id, Signal]](numberOfWorkers)
for (workerId <- workerIds) {
result(workerId) = AkkaProxy.newInstanceWithIncrementor[WorkerApi[Id, Signal]](
workers(workerId),
sendCountIncrementorForRequests,
sentWorkerMessageCounters(workerId),
receivedMessagesCounter)
}
result
}
def workerApi: WorkerApi[Id, Signal]
def messagesReceived = receivedMessagesCounter.get
//--------------------MessageRecipientRegistry--------------------
override def registerWorker(workerId: Int, worker: ActorRef) {
workers(workerId) = worker
registrations.incrementAndGet
}
override def registerNode(nodeId: Int, node: ActorRef) {
nodes(nodeId) = node
registrations.incrementAndGet
}
override def registerCoordinator(c: ActorRef) {
coordinator = c
registrations.incrementAndGet
}
override def registerLogger(l: ActorRef) {
logger = l
registrations.incrementAndGet
}
//--------------------MessageBus--------------------
override def sendToActor(actor: ActorRef, message: Any) {
actor ! message
}
override def sendToWorkerForVertexId(message: Any, recipientId: Id) {
val workerId = mapper.getWorkerIdForVertexId(recipientId)
sendToWorker(workerId, message)
}
override def sendToWorkerForVertexIdHash(message: Any, recipientIdHash: Int) {
val workerId = mapper.getWorkerIdForVertexIdHash(recipientIdHash)
sendToWorker(workerId, message)
}
override def sendToWorker(workerId: Int, message: Any) {
incrementMessagesSentToWorker(workerId)
workers(workerId) ! message
}
override def sendToWorkers(message: Any, messageCounting: Boolean) {
for (workerId <- 0 until numberOfWorkers) {
if (messageCounting) {
incrementMessagesSentToWorker(workerId)
}
workers(workerId) ! message
}
}
override def sendToNode(nodeId: Int, message: Any) {
incrementMessagesSentToNode(nodeId)
nodes(nodeId) ! message
}
override def sendToNodes(message: Any, messageCounting: Boolean) {
for (nodeId <- 0 until numberOfNodes) {
if (messageCounting) {
incrementMessagesSentToNode(nodeId)
}
nodes(nodeId) ! message
}
}
override def sendToCoordinator(message: Any) {
incrementMessagesSentToCoordinator
coordinator ! message
}
override def getWorkerIdForVertexId(vertexId: Id): Int = mapper.getWorkerIdForVertexId(vertexId)
override def getWorkerIdForVertexIdHash(vertexIdHash: Int): Int = mapper.getWorkerIdForVertexIdHash(vertexIdHash)
//--------------------GraphEditor--------------------
/**
* Sends a signal to the vertex with vertex.id=edgeId.targetId
*/
override def sendSignal(signal: Signal, targetId: Id, sourceId: Option[Id], blocking: Boolean = false) {
if (blocking) {
// Use proxy.
workerApi.processSignal(signal, targetId, sourceId)
} else {
// Manually send a fire & forget request.
sendToWorkerForVertexId(SignalMessage(targetId, sourceId, signal), targetId)
}
}
override def addVertex(vertex: Vertex[Id, _], blocking: Boolean = false) {
if (blocking) {
// Use proxy.
workerApi.addVertex(vertex)
} else {
// Manually send a fire & forget request.
sendToWorkerForVertexId(AddVertex(vertex), vertex.id)
}
}
override def addEdge(sourceId: Id, edge: Edge[Id], blocking: Boolean = false) {
// thread that uses an object should instantiate it (performance)
if (blocking) {
// use proxy
workerApi.addEdge(sourceId, edge)
} else {
// Manually send a fire & forget request.
sendToWorkerForVertexId(AddEdge(sourceId, edge), sourceId)
}
}
override def removeVertex(vertexId: Id, blocking: Boolean = false) {
if (blocking) {
// use proxy
workerApi.removeVertex(vertexId)
} else {
// manually send a fire & forget request
val request = Request[WorkerApi[Id, Signal]](
(_.removeVertex(vertexId)),
returnResult = false,
sendCountIncrementorForRequests)
sendToWorkerForVertexId(request, vertexId)
}
}
override def removeEdge(edgeId: EdgeId[Id], blocking: Boolean = false) {
if (blocking) {
// use proxy
workerApi.removeEdge(edgeId)
} else {
// manually send a fire & forget request
val request = Request[WorkerApi[Id, Signal]](
(_.removeEdge(edgeId)),
returnResult = false,
sendCountIncrementorForRequests)
sendToWorkerForVertexId(request, edgeId.sourceId)
}
}
override def modifyGraph(graphModification: GraphEditor[Id, Signal] => Unit, vertexIdHint: Option[Id] = None, blocking: Boolean = false) {
if (blocking) {
workerApi.modifyGraph(graphModification, vertexIdHint)
} else {
val request = Request[WorkerApi[Id, Signal]](
(_.modifyGraph(graphModification)),
returnResult = false,
sendCountIncrementorForRequests)
if (vertexIdHint.isDefined) {
val workerId = mapper.getWorkerIdForVertexId(vertexIdHint.get)
sendToWorker(workerId, request)
} else {
val rand = new Random
sendToWorker(rand.nextInt(numberOfWorkers), request)
}
}
}
override def loadGraph(graphModifications: Iterator[GraphEditor[Id, Signal] => Unit], vertexIdHint: Option[Id]) {
val request = Request[WorkerApi[Id, Signal]](
(_.loadGraph(graphModifications)),
false,
sendCountIncrementorForRequests)
if (vertexIdHint.isDefined) {
val workerId = mapper.getWorkerIdForVertexId(vertexIdHint.get)
sendToWorker(workerId, request)
} else {
val rand = new Random
sendToWorker(rand.nextInt(numberOfWorkers), request)
}
}
//--------------------Access to high-level messaging constructs--------------------
def getGraphEditor: GraphEditor[Id, Signal] = this
def getWorkerApi: WorkerApi[Id, Signal] = workerApi
def getWorkerProxies: Array[WorkerApi[Id, Signal]] = workerProxies
}
| gmazlami/dcop-maxsum | src/main/scala/com/signalcollect/messaging/AbstractMessageBus.scala | Scala | apache-2.0 | 9,949 |
object Test {
import scala.util.NotGiven
class Foo
class Bar
implicit def foo: Foo = ???
implicitly[Foo]
implicitly[NotGiven[Foo]] // error
implicitly[NotGiven[Bar]]
}
| lampepfl/dotty | tests/neg/i5234a.scala | Scala | apache-2.0 | 183 |
package com.olegych.scastie.client.components.editor
import org.scalajs.dom.raw.{HTMLElement, HTMLPreElement}
import com.olegych.scastie.api.Completion
import codemirror.{Hint, HintConfig, CodeMirror, TextAreaEditor, modeScala}
import codemirror.CodeMirror.{Pos => CMPosition}
import japgolly.scalajs.react.Callback
import org.scalajs.dom
import scala.scalajs.js
object AutocompletionRender {
private def renderSingleAutocompletion(el: HTMLElement,
completion: Completion,
nextProps: Editor): Unit = {
val hint = dom.document
.createElement("span")
.asInstanceOf[HTMLPreElement]
hint.className = "name cm-def"
hint.textContent = completion.hint
val signature = dom.document
.createElement("pre")
.asInstanceOf[HTMLPreElement]
signature.className = "signature"
CodeMirror.runMode(completion.signature, modeScala, signature)
val resultType = dom.document
.createElement("pre")
.asInstanceOf[HTMLPreElement]
resultType.className = "result-type"
CodeMirror.runMode(completion.resultType, modeScala, resultType)
el.appendChild(hint)
el.appendChild(signature)
el.appendChild(resultType)
if (nextProps.isPresentationMode) {
val hintsDiv = signature.parentElement.parentElement
hintsDiv.className = hintsDiv.className.concat(" presentation-mode")
}
}
def apply(editor: TextAreaEditor,
currentProps: Option[Editor],
nextProps: Editor,
state: EditorState,
modState: (EditorState => EditorState) => Callback): Callback = {
Callback(render(editor, currentProps, nextProps, state, modState))
}
def render(editor: TextAreaEditor,
currentProps: Option[Editor],
nextProps: Editor,
state: EditorState,
modState: (EditorState => EditorState) => Callback): Unit = {
if (state.completionState == Requested ||
state.completionState == NeedRender ||
!nextProps.completions.equals(
currentProps.getOrElse(nextProps).completions
)) {
state.loadingMessage.hide()
val doc = editor.getDoc()
val cursor = doc.getCursor()
var fr = cursor.ch
val to = cursor.ch
val currLine = cursor.line
val alphaNum = ('a' to 'z').toSet ++ ('A' to 'Z').toSet ++ ('0' to '9').toSet
val lineContent = doc.getLine(currLine).getOrElse("")
var i = fr - 1
while (i >= 0 && alphaNum.contains(lineContent.charAt(i))) {
fr = i
i -= 1
}
val currPos = doc.indexFromPos(doc.getCursor())
val filter = doc
.getValue()
.substring(doc.indexFromPos(new CMPosition {
line = currLine; ch = fr
}), currPos)
// stop autocomplete if user reached brackets
val enteredBrackets =
doc.getValue().substring(currPos - 1, currPos + 1) == "()" &&
state.completionState != Requested
if (enteredBrackets || nextProps.completions.isEmpty) {
modState(_.copy(completionState = Idle)).runNow()
} else {
// autopick single completion only if it's user's first request
val completeSingle =
nextProps.completions.length == 1 &&
state.completionState == Requested
CodeMirror.showHint(
editor,
(_, options) => {
js.Dictionary(
"from" -> new CMPosition {
line = currLine; ch = fr
},
"to" -> new CMPosition {
line = currLine; ch = to
},
"list" ->
nextProps.completions
.filter(_.hint.startsWith(filter)) // FIXME: can place not 'important' completions first
.map { completion =>
HintConfig
.className("autocomplete")
.text(completion.hint)
.render(
(el, _, _) ⇒
renderSingleAutocompletion(el, completion, nextProps)
): Hint
}
.to[js.Array]
)
},
js.Dictionary[Any](
"container" -> dom.document.querySelector(".CodeMirror"),
"alignWithWord" -> true,
"completeSingle" -> completeSingle
)
)
modState(_.copy(completionState = Active)).runNow()
if (completeSingle) {
modState(_.copy(completionState = Idle)).runNow()
nextProps.clearCompletions.runNow()
}
}
} else {
Callback(())
}
}
}
| OlegYch/scastie | client/src/main/scala/com.olegych.scastie.client/components/editor/AutocompletionRender.scala | Scala | apache-2.0 | 4,719 |
package com.wavesplatform.it.sync.smartcontract
import com.typesafe.config.Config
import com.wavesplatform.account.KeyPair
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.common.utils.EitherExt2
import com.wavesplatform.it.NodeConfigs
import com.wavesplatform.it.api.SyncHttpApi._
import com.wavesplatform.it.api.TransactionInfo
import com.wavesplatform.it.sync.{setScriptFee, _}
import com.wavesplatform.it.transactions.BaseTransactionSuite
import com.wavesplatform.lang.v1.compiler.Terms
import com.wavesplatform.lang.v1.estimator.v2.ScriptEstimatorV2
import com.wavesplatform.state.{BinaryDataEntry, DataEntry}
import com.wavesplatform.transaction.TxVersion
import com.wavesplatform.transaction.smart.script.ScriptCompiler
class DataTransactionBodyBytesByteVectorSuite extends BaseTransactionSuite {
private def compile(scriptText: String) =
ScriptCompiler.compile(scriptText, ScriptEstimatorV2).explicitGet()._1.bytes().base64
override protected def nodeConfigs: Seq[Config] =
NodeConfigs.newBuilder
.overrideBase(_.quorum(0))
.withDefault(1)
.buildNonConflicting()
private val maxDataTxV1bodyBytesSize = 153530
// actually lower than Terms.DataTxMaxBytes
private val scriptV3 =
compile(
s"""
|{-# STDLIB_VERSION 3 #-}
|{-# CONTENT_TYPE EXPRESSION #-}
|
| match tx {
| case dtx: DataTransaction =>
| dtx.bodyBytes.size() == $maxDataTxV1bodyBytesSize &&
| dtx.data.size() == 5
|
| case _ =>
| throw("unexpected")
| }
|
""".stripMargin
)
private val scriptV4 =
compile(
s"""
|{-# STDLIB_VERSION 4 #-}
|{-# CONTENT_TYPE EXPRESSION #-}
|
| match tx {
| case dtx: DataTransaction =>
| dtx.bodyBytes.size() == ${Terms.DataTxMaxProtoBytes} &&
| dtx.data.size() == 6 &&
| sigVerify(dtx.bodyBytes, dtx.proofs[0], dtx.senderPublicKey)
|
| case _ =>
| throw("unexpected")
| }
|
""".stripMargin
)
private val maxDataEntriesV1 =
List(
BinaryDataEntry("a", ByteStr.fill(22380)(1)),
BinaryDataEntry("b", ByteStr.fill(DataEntry.MaxValueSize)(1)),
BinaryDataEntry("c", ByteStr.fill(DataEntry.MaxValueSize)(1)),
BinaryDataEntry("d", ByteStr.fill(DataEntry.MaxValueSize)(1)),
BinaryDataEntry("e", ByteStr.fill(DataEntry.MaxValueSize)(1))
)
private val maxDataEntriesV2 =
maxDataEntriesV1 :+ BinaryDataEntry("f", ByteStr.fill(12378)(1))
test("filled data transaction body bytes") {
checkByteVectorLimit(firstKeyPair, maxDataEntriesV1, scriptV3, TxVersion.V1)
checkByteVectorLimit(secondKeyPair, maxDataEntriesV2, scriptV4, TxVersion.V2)
}
private def checkByteVectorLimit(address: KeyPair, data: List[BinaryDataEntry], script: String, version: TxVersion) = {
val setScriptId = sender.setScript(address, Some(script), setScriptFee, waitForTx = true).id
sender.transactionInfo[TransactionInfo](setScriptId).script.get.startsWith("base64:") shouldBe true
val scriptInfo = sender.addressScriptInfo(address.toAddress.toString)
scriptInfo.script.isEmpty shouldBe false
scriptInfo.scriptText.isEmpty shouldBe false
scriptInfo.script.get.startsWith("base64:") shouldBe true
sender.putData(address, data, version = version, fee = calcDataFee(data, version) + smartFee, waitForTx = true).id
val increasedData = data.head.copy(value = data.head.value ++ ByteStr.fromBytes(1)) :: data.tail
assertBadRequestAndMessage(
sender.putData(address, increasedData, version = version, fee = calcDataFee(data, version) + smartFee),
"Too big sequence requested"
)
}
}
| wavesplatform/Waves | node-it/src/test/scala/com/wavesplatform/it/sync/smartcontract/DataTransactionBodyBytesByteVectorSuite.scala | Scala | mit | 3,871 |
import definiti.native._
import java.time.LocalDateTime
package object my {
case class Parent(date: LocalDateTime, child: Child)
object Parent {
val verification: Verification[Parent] = Verification.all(Verification.all(Child.verification).from[Parent](_.child, "child"))
def universe(now: LocalDateTime, minimalFromConfig: BigDecimal): Verification[Parent] = {
object Parent0 extends SimpleVerification[Parent]("Date should be after now") {
override def isValid(nominal: Parent): Boolean = DateExtension.isAfter(nominal.date, now)
}
Verification.all(Verification.all(Child.universe(now, minimalFromConfig)).from[Parent](_.child, "child"), Parent0)
}
}
case class Child(date: LocalDateTime, grandChild: GrandChild)
object Child {
val verification: Verification[Child] = Verification.all(Verification.all(GrandChild.verification).from[Child](_.grandChild, "grandChild"))
def universe(now: LocalDateTime, minimalFromConfig: BigDecimal): Verification[Child] = {
object Child0 extends SimpleVerification[Child]("Date should be after tomorrow") {
override def isValid(child: Child): Boolean = DateExtension.isAfter(child.date, DateExtension.plusDays(now, BigDecimal(1)))
}
Verification.all(Verification.all(GrandChild.universe(minimalFromConfig)).from[Child](_.grandChild, "grandChild"), Child0)
}
}
case class GrandChild(value: BigDecimal)
object GrandChild {
val verification: Verification[GrandChild] = Verification.none[GrandChild]
def universe(minimalFromConfig: BigDecimal): Verification[GrandChild] = {
object GrandChild0 extends DefinedVerification[GrandChild] {
private val message: String = "grandChild.universe"
override def verify(grandChild: GrandChild): Option[Message] = {
if (grandChild.value >= minimalFromConfig) None
else Some(Message(message, grandChild.value, minimalFromConfig))
}
}
Verification.all(GrandChild0)
}
}
} | definiti/definiti-scala-model | src/test/resources/samples/specific/dependentVerification/parameterAggregation/output.scala | Scala | mit | 2,004 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.internal
import java.util.{NoSuchElementException, Properties}
import java.util.concurrent.TimeUnit
import scala.collection.JavaConverters._
import scala.collection.immutable
import org.apache.hadoop.fs.Path
import org.apache.parquet.hadoop.ParquetOutputCommitter
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config._
import org.apache.spark.network.util.ByteUnit
import org.apache.spark.sql.catalyst.CatalystConf
import org.apache.spark.util.Utils
////////////////////////////////////////////////////////////////////////////////////////////////////
// This file defines the configuration options for Spark SQL.
////////////////////////////////////////////////////////////////////////////////////////////////////
object SQLConf {
private val sqlConfEntries = java.util.Collections.synchronizedMap(
new java.util.HashMap[String, ConfigEntry[_]]())
private def register(entry: ConfigEntry[_]): Unit = sqlConfEntries.synchronized {
require(!sqlConfEntries.containsKey(entry.key),
s"Duplicate SQLConfigEntry. ${entry.key} has been registered")
sqlConfEntries.put(entry.key, entry)
}
private[sql] object SQLConfigBuilder {
def apply(key: String): ConfigBuilder = new ConfigBuilder(key).onCreate(register)
}
val WAREHOUSE_PATH = SQLConfigBuilder("spark.sql.warehouse.dir")
.doc("The default location for managed databases and tables.")
.stringConf
.createWithDefault(Utils.resolveURI("spark-warehouse").toString)
val OPTIMIZER_MAX_ITERATIONS = SQLConfigBuilder("spark.sql.optimizer.maxIterations")
.internal()
.doc("The max number of iterations the optimizer and analyzer runs.")
.intConf
.createWithDefault(100)
val OPTIMIZER_INSET_CONVERSION_THRESHOLD =
SQLConfigBuilder("spark.sql.optimizer.inSetConversionThreshold")
.internal()
.doc("The threshold of set size for InSet conversion.")
.intConf
.createWithDefault(10)
val COMPRESS_CACHED = SQLConfigBuilder("spark.sql.inMemoryColumnarStorage.compressed")
.internal()
.doc("When set to true Spark SQL will automatically select a compression codec for each " +
"column based on statistics of the data.")
.booleanConf
.createWithDefault(true)
val COLUMN_BATCH_SIZE = SQLConfigBuilder("spark.sql.inMemoryColumnarStorage.batchSize")
.internal()
.doc("Controls the size of batches for columnar caching. Larger batch sizes can improve " +
"memory utilization and compression, but risk OOMs when caching data.")
.intConf
.createWithDefault(10000)
val IN_MEMORY_PARTITION_PRUNING =
SQLConfigBuilder("spark.sql.inMemoryColumnarStorage.partitionPruning")
.internal()
.doc("When true, enable partition pruning for in-memory columnar tables.")
.booleanConf
.createWithDefault(true)
val PREFER_SORTMERGEJOIN = SQLConfigBuilder("spark.sql.join.preferSortMergeJoin")
.internal()
.doc("When true, prefer sort merge join over shuffle hash join.")
.booleanConf
.createWithDefault(true)
val RADIX_SORT_ENABLED = SQLConfigBuilder("spark.sql.sort.enableRadixSort")
.internal()
.doc("When true, enable use of radix sort when possible. Radix sort is much faster but " +
"requires additional memory to be reserved up-front. The memory overhead may be " +
"significant when sorting very small rows (up to 50% more in this case).")
.booleanConf
.createWithDefault(true)
val AUTO_BROADCASTJOIN_THRESHOLD = SQLConfigBuilder("spark.sql.autoBroadcastJoinThreshold")
.doc("Configures the maximum size in bytes for a table that will be broadcast to all worker " +
"nodes when performing a join. By setting this value to -1 broadcasting can be disabled. " +
"Note that currently statistics are only supported for Hive Metastore tables where the " +
"command<code>ANALYZE TABLE <tableName> COMPUTE STATISTICS noscan</code> has been run.")
.longConf
.createWithDefault(10L * 1024 * 1024)
val ENABLE_FALL_BACK_TO_HDFS_FOR_STATS =
SQLConfigBuilder("spark.sql.statistics.fallBackToHdfs")
.doc("If the table statistics are not available from table metadata enable fall back to hdfs." +
" This is useful in determining if a table is small enough to use auto broadcast joins.")
.booleanConf
.createWithDefault(false)
val DEFAULT_SIZE_IN_BYTES = SQLConfigBuilder("spark.sql.defaultSizeInBytes")
.internal()
.doc("The default table size used in query planning. By default, it is set to a larger " +
"value than `spark.sql.autoBroadcastJoinThreshold` to be more conservative. That is to say " +
"by default the optimizer will not choose to broadcast a table unless it knows for sure " +
"its size is small enough.")
.longConf
.createWithDefault(-1)
val SHUFFLE_PARTITIONS = SQLConfigBuilder("spark.sql.shuffle.partitions")
.doc("The default number of partitions to use when shuffling data for joins or aggregations.")
.intConf
.createWithDefault(200)
val SHUFFLE_TARGET_POSTSHUFFLE_INPUT_SIZE =
SQLConfigBuilder("spark.sql.adaptive.shuffle.targetPostShuffleInputSize")
.doc("The target post-shuffle input size in bytes of a task.")
.bytesConf(ByteUnit.BYTE)
.createWithDefault(64 * 1024 * 1024)
val ADAPTIVE_EXECUTION_ENABLED = SQLConfigBuilder("spark.sql.adaptive.enabled")
.doc("When true, enable adaptive query execution.")
.booleanConf
.createWithDefault(false)
val SHUFFLE_MIN_NUM_POSTSHUFFLE_PARTITIONS =
SQLConfigBuilder("spark.sql.adaptive.minNumPostShufflePartitions")
.internal()
.doc("The advisory minimal number of post-shuffle partitions provided to " +
"ExchangeCoordinator. This setting is used in our test to make sure we " +
"have enough parallelism to expose issues that will not be exposed with a " +
"single partition. When the value is a non-positive value, this setting will " +
"not be provided to ExchangeCoordinator.")
.intConf
.createWithDefault(-1)
val SUBEXPRESSION_ELIMINATION_ENABLED =
SQLConfigBuilder("spark.sql.subexpressionElimination.enabled")
.internal()
.doc("When true, common subexpressions will be eliminated.")
.booleanConf
.createWithDefault(true)
val CASE_SENSITIVE = SQLConfigBuilder("spark.sql.caseSensitive")
.internal()
.doc("Whether the query analyzer should be case sensitive or not. " +
"Default to case insensitive. It is highly discouraged to turn on case sensitive mode.")
.booleanConf
.createWithDefault(false)
val PARQUET_SCHEMA_MERGING_ENABLED = SQLConfigBuilder("spark.sql.parquet.mergeSchema")
.doc("When true, the Parquet data source merges schemas collected from all data files, " +
"otherwise the schema is picked from the summary file or a random data file " +
"if no summary file is available.")
.booleanConf
.createWithDefault(false)
val PARQUET_SCHEMA_RESPECT_SUMMARIES = SQLConfigBuilder("spark.sql.parquet.respectSummaryFiles")
.doc("When true, we make assumption that all part-files of Parquet are consistent with " +
"summary files and we will ignore them when merging schema. Otherwise, if this is " +
"false, which is the default, we will merge all part-files. This should be considered " +
"as expert-only option, and shouldn't be enabled before knowing what it means exactly.")
.booleanConf
.createWithDefault(false)
val PARQUET_BINARY_AS_STRING = SQLConfigBuilder("spark.sql.parquet.binaryAsString")
.doc("Some other Parquet-producing systems, in particular Impala and older versions of " +
"Spark SQL, do not differentiate between binary data and strings when writing out the " +
"Parquet schema. This flag tells Spark SQL to interpret binary data as a string to provide " +
"compatibility with these systems.")
.booleanConf
.createWithDefault(false)
val PARQUET_INT96_AS_TIMESTAMP = SQLConfigBuilder("spark.sql.parquet.int96AsTimestamp")
.doc("Some Parquet-producing systems, in particular Impala, store Timestamp into INT96. " +
"Spark would also store Timestamp as INT96 because we need to avoid precision lost of the " +
"nanoseconds field. This flag tells Spark SQL to interpret INT96 data as a timestamp to " +
"provide compatibility with these systems.")
.booleanConf
.createWithDefault(true)
val PARQUET_CACHE_METADATA = SQLConfigBuilder("spark.sql.parquet.cacheMetadata")
.doc("Turns on caching of Parquet schema metadata. Can speed up querying of static data.")
.booleanConf
.createWithDefault(true)
val PARQUET_COMPRESSION = SQLConfigBuilder("spark.sql.parquet.compression.codec")
.doc("Sets the compression codec use when writing Parquet files. Acceptable values include: " +
"uncompressed, snappy, gzip, lzo.")
.stringConf
.transform(_.toLowerCase())
.checkValues(Set("uncompressed", "snappy", "gzip", "lzo"))
.createWithDefault("snappy")
val PARQUET_FILTER_PUSHDOWN_ENABLED = SQLConfigBuilder("spark.sql.parquet.filterPushdown")
.doc("Enables Parquet filter push-down optimization when set to true.")
.booleanConf
.createWithDefault(true)
val PARQUET_WRITE_LEGACY_FORMAT = SQLConfigBuilder("spark.sql.parquet.writeLegacyFormat")
.doc("Whether to follow Parquet's format specification when converting Parquet schema to " +
"Spark SQL schema and vice versa.")
.booleanConf
.createWithDefault(false)
val PARQUET_OUTPUT_COMMITTER_CLASS = SQLConfigBuilder("spark.sql.parquet.output.committer.class")
.doc("The output committer class used by Parquet. The specified class needs to be a " +
"subclass of org.apache.hadoop.mapreduce.OutputCommitter. Typically, it's also a subclass " +
"of org.apache.parquet.hadoop.ParquetOutputCommitter. NOTE: 1. Instead of SQLConf, this " +
"option must be set in Hadoop Configuration. 2. This option overrides " +
"\\"spark.sql.sources.outputCommitterClass\\".")
.stringConf
.createWithDefault(classOf[ParquetOutputCommitter].getName)
val PARQUET_VECTORIZED_READER_ENABLED =
SQLConfigBuilder("spark.sql.parquet.enableVectorizedReader")
.doc("Enables vectorized parquet decoding.")
.booleanConf
.createWithDefault(true)
val ORC_FILTER_PUSHDOWN_ENABLED = SQLConfigBuilder("spark.sql.orc.filterPushdown")
.doc("When true, enable filter pushdown for ORC files.")
.booleanConf
.createWithDefault(false)
val HIVE_VERIFY_PARTITION_PATH = SQLConfigBuilder("spark.sql.hive.verifyPartitionPath")
.doc("When true, check all the partition paths under the table\\'s root directory " +
"when reading data stored in HDFS.")
.booleanConf
.createWithDefault(false)
val HIVE_METASTORE_PARTITION_PRUNING =
SQLConfigBuilder("spark.sql.hive.metastorePartitionPruning")
.doc("When true, some predicates will be pushed down into the Hive metastore so that " +
"unmatching partitions can be eliminated earlier.")
.booleanConf
.createWithDefault(false)
val NATIVE_VIEW = SQLConfigBuilder("spark.sql.nativeView")
.internal()
.doc("When true, CREATE VIEW will be handled by Spark SQL instead of Hive native commands. " +
"Note that this function is experimental and should ony be used when you are using " +
"non-hive-compatible tables written by Spark SQL. The SQL string used to create " +
"view should be fully qualified, i.e. use `tbl1`.`col1` instead of `*` whenever " +
"possible, or you may get wrong result.")
.booleanConf
.createWithDefault(true)
val CANONICAL_NATIVE_VIEW = SQLConfigBuilder("spark.sql.nativeView.canonical")
.internal()
.doc("When this option and spark.sql.nativeView are both true, Spark SQL tries to handle " +
"CREATE VIEW statement using SQL query string generated from view definition logical " +
"plan. If the logical plan doesn't have a SQL representation, we fallback to the " +
"original native view implementation.")
.booleanConf
.createWithDefault(true)
val COLUMN_NAME_OF_CORRUPT_RECORD = SQLConfigBuilder("spark.sql.columnNameOfCorruptRecord")
.doc("The name of internal column for storing raw/un-parsed JSON records that fail to parse.")
.stringConf
.createWithDefault("_corrupt_record")
val BROADCAST_TIMEOUT = SQLConfigBuilder("spark.sql.broadcastTimeout")
.doc("Timeout in seconds for the broadcast wait time in broadcast joins.")
.intConf
.createWithDefault(5 * 60)
// This is only used for the thriftserver
val THRIFTSERVER_POOL = SQLConfigBuilder("spark.sql.thriftserver.scheduler.pool")
.doc("Set a Fair Scheduler pool for a JDBC client session.")
.stringConf
.createOptional
val THRIFTSERVER_UI_STATEMENT_LIMIT =
SQLConfigBuilder("spark.sql.thriftserver.ui.retainedStatements")
.doc("The number of SQL statements kept in the JDBC/ODBC web UI history.")
.intConf
.createWithDefault(200)
val THRIFTSERVER_UI_SESSION_LIMIT = SQLConfigBuilder("spark.sql.thriftserver.ui.retainedSessions")
.doc("The number of SQL client sessions kept in the JDBC/ODBC web UI history.")
.intConf
.createWithDefault(200)
// This is used to set the default data source
val DEFAULT_DATA_SOURCE_NAME = SQLConfigBuilder("spark.sql.sources.default")
.doc("The default data source to use in input/output.")
.stringConf
.createWithDefault("parquet")
val CONVERT_CTAS = SQLConfigBuilder("spark.sql.hive.convertCTAS")
.internal()
.doc("When true, a table created by a Hive CTAS statement (no USING clause) " +
"without specifying any storage property will be converted to a data source table, " +
"using the data source set by spark.sql.sources.default.")
.booleanConf
.createWithDefault(false)
val GATHER_FASTSTAT = SQLConfigBuilder("spark.sql.hive.gatherFastStats")
.internal()
.doc("When true, fast stats (number of files and total size of all files) will be gathered" +
" in parallel while repairing table partitions to avoid the sequential listing in Hive" +
" metastore.")
.booleanConf
.createWithDefault(true)
// This is used to control the when we will split a schema's JSON string to multiple pieces
// in order to fit the JSON string in metastore's table property (by default, the value has
// a length restriction of 4000 characters). We will split the JSON string of a schema
// to its length exceeds the threshold.
val SCHEMA_STRING_LENGTH_THRESHOLD =
SQLConfigBuilder("spark.sql.sources.schemaStringLengthThreshold")
.doc("The maximum length allowed in a single cell when " +
"storing additional schema information in Hive's metastore.")
.internal()
.intConf
.createWithDefault(4000)
val PARTITION_DISCOVERY_ENABLED = SQLConfigBuilder("spark.sql.sources.partitionDiscovery.enabled")
.doc("When true, automatically discover data partitions.")
.booleanConf
.createWithDefault(true)
val PARTITION_COLUMN_TYPE_INFERENCE =
SQLConfigBuilder("spark.sql.sources.partitionColumnTypeInference.enabled")
.doc("When true, automatically infer the data types for partitioned columns.")
.booleanConf
.createWithDefault(true)
val PARTITION_MAX_FILES =
SQLConfigBuilder("spark.sql.sources.maxConcurrentWrites")
.doc("The maximum number of concurrent files to open before falling back on sorting when " +
"writing out files using dynamic partitioning.")
.intConf
.createWithDefault(1)
val BUCKETING_ENABLED = SQLConfigBuilder("spark.sql.sources.bucketing.enabled")
.doc("When false, we will treat bucketed table as normal table")
.booleanConf
.createWithDefault(true)
val CROSS_JOINS_ENABLED = SQLConfigBuilder("spark.sql.crossJoin.enabled")
.doc("When false, we will throw an error if a query contains a cross join")
.booleanConf
.createWithDefault(false)
val ORDER_BY_ORDINAL = SQLConfigBuilder("spark.sql.orderByOrdinal")
.doc("When true, the ordinal numbers are treated as the position in the select list. " +
"When false, the ordinal numbers in order/sort by clause are ignored.")
.booleanConf
.createWithDefault(true)
val GROUP_BY_ORDINAL = SQLConfigBuilder("spark.sql.groupByOrdinal")
.doc("When true, the ordinal numbers in group by clauses are treated as the position " +
"in the select list. When false, the ordinal numbers are ignored.")
.booleanConf
.createWithDefault(true)
// The output committer class used by HadoopFsRelation. The specified class needs to be a
// subclass of org.apache.hadoop.mapreduce.OutputCommitter.
//
// NOTE:
//
// 1. Instead of SQLConf, this option *must be set in Hadoop Configuration*.
// 2. This option can be overridden by "spark.sql.parquet.output.committer.class".
val OUTPUT_COMMITTER_CLASS =
SQLConfigBuilder("spark.sql.sources.outputCommitterClass").internal().stringConf.createOptional
val PARALLEL_PARTITION_DISCOVERY_THRESHOLD =
SQLConfigBuilder("spark.sql.sources.parallelPartitionDiscovery.threshold")
.doc("The degree of parallelism for schema merging and partition discovery of " +
"Parquet data sources.")
.intConf
.createWithDefault(32)
// Whether to automatically resolve ambiguity in join conditions for self-joins.
// See SPARK-6231.
val DATAFRAME_SELF_JOIN_AUTO_RESOLVE_AMBIGUITY =
SQLConfigBuilder("spark.sql.selfJoinAutoResolveAmbiguity")
.internal()
.booleanConf
.createWithDefault(true)
// Whether to retain group by columns or not in GroupedData.agg.
val DATAFRAME_RETAIN_GROUP_COLUMNS = SQLConfigBuilder("spark.sql.retainGroupColumns")
.internal()
.booleanConf
.createWithDefault(true)
val DATAFRAME_PIVOT_MAX_VALUES = SQLConfigBuilder("spark.sql.pivotMaxValues")
.doc("When doing a pivot without specifying values for the pivot column this is the maximum " +
"number of (distinct) values that will be collected without error.")
.intConf
.createWithDefault(10000)
val RUN_SQL_ON_FILES = SQLConfigBuilder("spark.sql.runSQLOnFiles")
.internal()
.doc("When true, we could use `datasource`.`path` as table in SQL query.")
.booleanConf
.createWithDefault(true)
val WHOLESTAGE_CODEGEN_ENABLED = SQLConfigBuilder("spark.sql.codegen.wholeStage")
.internal()
.doc("When true, the whole stage (of multiple operators) will be compiled into single java" +
" method.")
.booleanConf
.createWithDefault(true)
val WHOLESTAGE_MAX_NUM_FIELDS = SQLConfigBuilder("spark.sql.codegen.maxFields")
.internal()
.doc("The maximum number of fields (including nested fields) that will be supported before" +
" deactivating whole-stage codegen.")
.intConf
.createWithDefault(100)
val WHOLESTAGE_FALLBACK = SQLConfigBuilder("spark.sql.codegen.fallback")
.internal()
.doc("When true, whole stage codegen could be temporary disabled for the part of query that" +
" fail to compile generated code")
.booleanConf
.createWithDefault(true)
val MAX_CASES_BRANCHES = SQLConfigBuilder("spark.sql.codegen.maxCaseBranches")
.internal()
.doc("The maximum number of switches supported with codegen.")
.intConf
.createWithDefault(20)
val FILES_MAX_PARTITION_BYTES = SQLConfigBuilder("spark.sql.files.maxPartitionBytes")
.doc("The maximum number of bytes to pack into a single partition when reading files.")
.longConf
.createWithDefault(128 * 1024 * 1024) // parquet.block.size
val FILES_OPEN_COST_IN_BYTES = SQLConfigBuilder("spark.sql.files.openCostInBytes")
.internal()
.doc("The estimated cost to open a file, measured by the number of bytes could be scanned in" +
" the same time. This is used when putting multiple files into a partition. It's better to" +
" over estimated, then the partitions with small files will be faster than partitions with" +
" bigger files (which is scheduled first).")
.longConf
.createWithDefault(4 * 1024 * 1024)
val EXCHANGE_REUSE_ENABLED = SQLConfigBuilder("spark.sql.exchange.reuse")
.internal()
.doc("When true, the planner will try to find out duplicated exchanges and re-use them.")
.booleanConf
.createWithDefault(true)
val STATE_STORE_MIN_DELTAS_FOR_SNAPSHOT =
SQLConfigBuilder("spark.sql.streaming.stateStore.minDeltasForSnapshot")
.internal()
.doc("Minimum number of state store delta files that needs to be generated before they " +
"consolidated into snapshots.")
.intConf
.createWithDefault(10)
val STATE_STORE_MIN_VERSIONS_TO_RETAIN =
SQLConfigBuilder("spark.sql.streaming.stateStore.minBatchesToRetain")
.internal()
.doc("Minimum number of versions of a state store's data to retain after cleaning.")
.intConf
.createWithDefault(2)
val CHECKPOINT_LOCATION = SQLConfigBuilder("spark.sql.streaming.checkpointLocation")
.doc("The default location for storing checkpoint data for streaming queries.")
.stringConf
.createOptional
val UNSUPPORTED_OPERATION_CHECK_ENABLED =
SQLConfigBuilder("spark.sql.streaming.unsupportedOperationCheck")
.internal()
.doc("When true, the logical plan for streaming query will be checked for unsupported" +
" operations.")
.booleanConf
.createWithDefault(true)
val VARIABLE_SUBSTITUTE_ENABLED =
SQLConfigBuilder("spark.sql.variable.substitute")
.doc("This enables substitution using syntax like ${var} ${system:var} and ${env:var}.")
.booleanConf
.createWithDefault(true)
val VARIABLE_SUBSTITUTE_DEPTH =
SQLConfigBuilder("spark.sql.variable.substitute.depth")
.doc("The maximum replacements the substitution engine will do.")
.intConf
.createWithDefault(40)
val VECTORIZED_AGG_MAP_MAX_COLUMNS =
SQLConfigBuilder("spark.sql.codegen.aggregate.map.columns.max")
.internal()
.doc("Sets the maximum width of schema (aggregate keys + values) for which aggregate with" +
"keys uses an in-memory columnar map to speed up execution. Setting this to 0 effectively" +
"disables the columnar map")
.intConf
.createWithDefault(3)
val FILE_SINK_LOG_DELETION = SQLConfigBuilder("spark.sql.streaming.fileSink.log.deletion")
.internal()
.doc("Whether to delete the expired log files in file stream sink.")
.booleanConf
.createWithDefault(true)
val FILE_SINK_LOG_COMPACT_INTERVAL =
SQLConfigBuilder("spark.sql.streaming.fileSink.log.compactInterval")
.internal()
.doc("Number of log files after which all the previous files " +
"are compacted into the next log file.")
.intConf
.createWithDefault(10)
val FILE_SINK_LOG_CLEANUP_DELAY =
SQLConfigBuilder("spark.sql.streaming.fileSink.log.cleanupDelay")
.internal()
.doc("How long that a file is guaranteed to be visible for all readers.")
.timeConf(TimeUnit.MILLISECONDS)
.createWithDefault(TimeUnit.MINUTES.toMillis(10)) // 10 minutes
val FILE_SOURCE_LOG_DELETION = SQLConfigBuilder("spark.sql.streaming.fileSource.log.deletion")
.internal()
.doc("Whether to delete the expired log files in file stream source.")
.booleanConf
.createWithDefault(true)
val FILE_SOURCE_LOG_COMPACT_INTERVAL =
SQLConfigBuilder("spark.sql.streaming.fileSource.log.compactInterval")
.internal()
.doc("Number of log files after which all the previous files " +
"are compacted into the next log file.")
.intConf
.createWithDefault(10)
val FILE_SOURCE_LOG_CLEANUP_DELAY =
SQLConfigBuilder("spark.sql.streaming.fileSource.log.cleanupDelay")
.internal()
.doc("How long in milliseconds a file is guaranteed to be visible for all readers.")
.timeConf(TimeUnit.MILLISECONDS)
.createWithDefault(TimeUnit.MINUTES.toMillis(10)) // 10 minutes
val STREAMING_SCHEMA_INFERENCE =
SQLConfigBuilder("spark.sql.streaming.schemaInference")
.internal()
.doc("Whether file-based streaming sources will infer its own schema")
.booleanConf
.createWithDefault(false)
val STREAMING_POLLING_DELAY =
SQLConfigBuilder("spark.sql.streaming.pollingDelay")
.internal()
.doc("How long to delay polling new data when no data is available")
.timeConf(TimeUnit.MILLISECONDS)
.createWithDefault(10L)
val STREAMING_METRICS_ENABLED =
SQLConfigBuilder("spark.sql.streaming.metricsEnabled")
.doc("Whether Dropwizard/Codahale metrics will be reported for active streaming queries.")
.booleanConf
.createWithDefault(false)
object Deprecated {
val MAPRED_REDUCE_TASKS = "mapred.reduce.tasks"
}
}
/**
* A class that enables the setting and getting of mutable config parameters/hints.
*
* In the presence of a SQLContext, these can be set and queried by passing SET commands
* into Spark SQL's query functions (i.e. sql()). Otherwise, users of this class can
* modify the hints by programmatically calling the setters and getters of this class.
*
* SQLConf is thread-safe (internally synchronized, so safe to be used in multiple threads).
*/
private[sql] class SQLConf extends Serializable with CatalystConf with Logging {
import SQLConf._
/** Only low degree of contention is expected for conf, thus NOT using ConcurrentHashMap. */
@transient protected[spark] val settings = java.util.Collections.synchronizedMap(
new java.util.HashMap[String, String]())
/** ************************ Spark SQL Params/Hints ******************* */
def optimizerMaxIterations: Int = getConf(OPTIMIZER_MAX_ITERATIONS)
def optimizerInSetConversionThreshold: Int = getConf(OPTIMIZER_INSET_CONVERSION_THRESHOLD)
def checkpointLocation: Option[String] = getConf(CHECKPOINT_LOCATION)
def filesMaxPartitionBytes: Long = getConf(FILES_MAX_PARTITION_BYTES)
def filesOpenCostInBytes: Long = getConf(FILES_OPEN_COST_IN_BYTES)
def useCompression: Boolean = getConf(COMPRESS_CACHED)
def parquetCompressionCodec: String = getConf(PARQUET_COMPRESSION)
def parquetCacheMetadata: Boolean = getConf(PARQUET_CACHE_METADATA)
def parquetVectorizedReaderEnabled: Boolean = getConf(PARQUET_VECTORIZED_READER_ENABLED)
def columnBatchSize: Int = getConf(COLUMN_BATCH_SIZE)
def numShufflePartitions: Int = getConf(SHUFFLE_PARTITIONS)
def targetPostShuffleInputSize: Long =
getConf(SHUFFLE_TARGET_POSTSHUFFLE_INPUT_SIZE)
def adaptiveExecutionEnabled: Boolean = getConf(ADAPTIVE_EXECUTION_ENABLED)
def minNumPostShufflePartitions: Int =
getConf(SHUFFLE_MIN_NUM_POSTSHUFFLE_PARTITIONS)
def parquetFilterPushDown: Boolean = getConf(PARQUET_FILTER_PUSHDOWN_ENABLED)
def orcFilterPushDown: Boolean = getConf(ORC_FILTER_PUSHDOWN_ENABLED)
def verifyPartitionPath: Boolean = getConf(HIVE_VERIFY_PARTITION_PATH)
def metastorePartitionPruning: Boolean = getConf(HIVE_METASTORE_PARTITION_PRUNING)
def nativeView: Boolean = getConf(NATIVE_VIEW)
def gatherFastStats: Boolean = getConf(GATHER_FASTSTAT)
def wholeStageEnabled: Boolean = getConf(WHOLESTAGE_CODEGEN_ENABLED)
def wholeStageMaxNumFields: Int = getConf(WHOLESTAGE_MAX_NUM_FIELDS)
def wholeStageFallback: Boolean = getConf(WHOLESTAGE_FALLBACK)
def maxCaseBranchesForCodegen: Int = getConf(MAX_CASES_BRANCHES)
def exchangeReuseEnabled: Boolean = getConf(EXCHANGE_REUSE_ENABLED)
def canonicalView: Boolean = getConf(CANONICAL_NATIVE_VIEW)
def caseSensitiveAnalysis: Boolean = getConf(SQLConf.CASE_SENSITIVE)
def subexpressionEliminationEnabled: Boolean =
getConf(SUBEXPRESSION_ELIMINATION_ENABLED)
def autoBroadcastJoinThreshold: Long = getConf(AUTO_BROADCASTJOIN_THRESHOLD)
def fallBackToHdfsForStatsEnabled: Boolean = getConf(ENABLE_FALL_BACK_TO_HDFS_FOR_STATS)
def preferSortMergeJoin: Boolean = getConf(PREFER_SORTMERGEJOIN)
def enableRadixSort: Boolean = getConf(RADIX_SORT_ENABLED)
def defaultSizeInBytes: Long = getConf(DEFAULT_SIZE_IN_BYTES, Long.MaxValue)
def isParquetBinaryAsString: Boolean = getConf(PARQUET_BINARY_AS_STRING)
def isParquetINT96AsTimestamp: Boolean = getConf(PARQUET_INT96_AS_TIMESTAMP)
def writeLegacyParquetFormat: Boolean = getConf(PARQUET_WRITE_LEGACY_FORMAT)
def inMemoryPartitionPruning: Boolean = getConf(IN_MEMORY_PARTITION_PRUNING)
def columnNameOfCorruptRecord: String = getConf(COLUMN_NAME_OF_CORRUPT_RECORD)
def broadcastTimeout: Int = getConf(BROADCAST_TIMEOUT)
def defaultDataSourceName: String = getConf(DEFAULT_DATA_SOURCE_NAME)
def convertCTAS: Boolean = getConf(CONVERT_CTAS)
def partitionDiscoveryEnabled(): Boolean =
getConf(SQLConf.PARTITION_DISCOVERY_ENABLED)
def partitionColumnTypeInferenceEnabled(): Boolean =
getConf(SQLConf.PARTITION_COLUMN_TYPE_INFERENCE)
def parallelPartitionDiscoveryThreshold: Int =
getConf(SQLConf.PARALLEL_PARTITION_DISCOVERY_THRESHOLD)
def bucketingEnabled: Boolean = getConf(SQLConf.BUCKETING_ENABLED)
def crossJoinEnabled: Boolean = getConf(SQLConf.CROSS_JOINS_ENABLED)
// Do not use a value larger than 4000 as the default value of this property.
// See the comments of SCHEMA_STRING_LENGTH_THRESHOLD above for more information.
def schemaStringLengthThreshold: Int = getConf(SCHEMA_STRING_LENGTH_THRESHOLD)
def dataFrameSelfJoinAutoResolveAmbiguity: Boolean =
getConf(DATAFRAME_SELF_JOIN_AUTO_RESOLVE_AMBIGUITY)
def dataFrameRetainGroupColumns: Boolean = getConf(DATAFRAME_RETAIN_GROUP_COLUMNS)
override def runSQLonFile: Boolean = getConf(RUN_SQL_ON_FILES)
def vectorizedAggregateMapMaxColumns: Int = getConf(VECTORIZED_AGG_MAP_MAX_COLUMNS)
def variableSubstituteEnabled: Boolean = getConf(VARIABLE_SUBSTITUTE_ENABLED)
def variableSubstituteDepth: Int = getConf(VARIABLE_SUBSTITUTE_DEPTH)
def warehousePath: String = {
new Path(getConf(WAREHOUSE_PATH).replace("${system:user.dir}",
System.getProperty("user.dir"))).toString
}
override def orderByOrdinal: Boolean = getConf(ORDER_BY_ORDINAL)
override def groupByOrdinal: Boolean = getConf(GROUP_BY_ORDINAL)
/** ********************** SQLConf functionality methods ************ */
/** Set Spark SQL configuration properties. */
def setConf(props: Properties): Unit = settings.synchronized {
props.asScala.foreach { case (k, v) => setConfString(k, v) }
}
/** Set the given Spark SQL configuration property using a `string` value. */
def setConfString(key: String, value: String): Unit = {
require(key != null, "key cannot be null")
require(value != null, s"value cannot be null for key: $key")
val entry = sqlConfEntries.get(key)
if (entry != null) {
// Only verify configs in the SQLConf object
entry.valueConverter(value)
}
setConfWithCheck(key, value)
}
/** Set the given Spark SQL configuration property. */
def setConf[T](entry: ConfigEntry[T], value: T): Unit = {
require(entry != null, "entry cannot be null")
require(value != null, s"value cannot be null for key: ${entry.key}")
require(sqlConfEntries.get(entry.key) == entry, s"$entry is not registered")
setConfWithCheck(entry.key, entry.stringConverter(value))
}
/** Return the value of Spark SQL configuration property for the given key. */
@throws[NoSuchElementException]("if key is not set")
def getConfString(key: String): String = {
Option(settings.get(key)).
orElse {
// Try to use the default value
Option(sqlConfEntries.get(key)).map(_.defaultValueString)
}.
getOrElse(throw new NoSuchElementException(key))
}
/**
* Return the value of Spark SQL configuration property for the given key. If the key is not set
* yet, return `defaultValue`. This is useful when `defaultValue` in ConfigEntry is not the
* desired one.
*/
def getConf[T](entry: ConfigEntry[T], defaultValue: T): T = {
require(sqlConfEntries.get(entry.key) == entry, s"$entry is not registered")
Option(settings.get(entry.key)).map(entry.valueConverter).getOrElse(defaultValue)
}
/**
* Return the value of Spark SQL configuration property for the given key. If the key is not set
* yet, return `defaultValue` in [[ConfigEntry]].
*/
def getConf[T](entry: ConfigEntry[T]): T = {
require(sqlConfEntries.get(entry.key) == entry, s"$entry is not registered")
Option(settings.get(entry.key)).map(entry.valueConverter).orElse(entry.defaultValue).
getOrElse(throw new NoSuchElementException(entry.key))
}
/**
* Return the value of an optional Spark SQL configuration property for the given key. If the key
* is not set yet, returns None.
*/
def getConf[T](entry: OptionalConfigEntry[T]): Option[T] = {
require(sqlConfEntries.get(entry.key) == entry, s"$entry is not registered")
Option(settings.get(entry.key)).map(entry.rawValueConverter)
}
/**
* Return the `string` value of Spark SQL configuration property for the given key. If the key is
* not set yet, return `defaultValue`.
*/
def getConfString(key: String, defaultValue: String): String = {
val entry = sqlConfEntries.get(key)
if (entry != null && defaultValue != "<undefined>") {
// Only verify configs in the SQLConf object
entry.valueConverter(defaultValue)
}
Option(settings.get(key)).getOrElse(defaultValue)
}
/**
* Return all the configuration properties that have been set (i.e. not the default).
* This creates a new copy of the config properties in the form of a Map.
*/
def getAllConfs: immutable.Map[String, String] =
settings.synchronized { settings.asScala.toMap }
/**
* Return all the configuration definitions that have been defined in [[SQLConf]]. Each
* definition contains key, defaultValue and doc.
*/
def getAllDefinedConfs: Seq[(String, String, String)] = sqlConfEntries.synchronized {
sqlConfEntries.values.asScala.filter(_.isPublic).map { entry =>
(entry.key, getConfString(entry.key, entry.defaultValueString), entry.doc)
}.toSeq
}
/**
* Return whether a given key is set in this [[SQLConf]].
*/
def contains(key: String): Boolean = {
settings.containsKey(key)
}
private def setConfWithCheck(key: String, value: String): Unit = {
settings.put(key, value)
}
def unsetConf(key: String): Unit = {
settings.remove(key)
}
def unsetConf(entry: ConfigEntry[_]): Unit = {
settings.remove(entry.key)
}
def clear(): Unit = {
settings.clear()
}
}
| gioenn/xSpark | sql/core/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala | Scala | apache-2.0 | 35,549 |
package integrationtest
import org.scalatra.test.scalatest._
import skinny.test._
import model._
import controller.Controllers
class CompaniesControllerSpec extends ScalatraFlatSpec with unit.SkinnyTesting {
addFilter(Controllers.companies, "/*")
def company = Company.findAllWithLimitOffset(1, 0).headOption.getOrElse {
FactoryGirl(Company).create()
}
it should "show companies" in {
get("/companies") {
status should equal(200)
}
get("/companies/") {
status should equal(200)
}
get("/companies.json") {
logger.debug(body)
status should equal(200)
}
get("/companies.xml") {
logger.debug(body)
status should equal(200)
body should include("<companies><company>")
}
}
it should "show a company in detail" in {
get(s"/companies/${company.id}") {
status should equal(200)
}
get(s"/companies/${company.id}.xml") {
logger.debug(body)
status should equal(200)
}
get(s"/companies/${company.id}.json") {
logger.debug(body)
status should equal(200)
}
}
it should "show new entry form" in {
get(s"/companies/new") {
status should equal(200)
}
}
it should "create a company" in {
val newName = s"Created at ${System.currentTimeMillis}"
post(s"/companies", "name" -> newName, "url" -> "http://www.example.com/") {
status should equal(403)
}
withSession("csrf-token" -> "12345") {
post(s"/companies", "csrf-token" -> "12345") {
status should equal(400)
}
post(s"/companies", "name" -> newName, "url" -> "http://www.example.com/", "updatedAt" -> "2013-01-02 12:34:56", "csrf-token" -> "12345") {
status should equal(302)
val id = header("Location").split("/").last.toLong
Company.findById(CompanyId(id)).isDefined should equal(true)
}
}
}
it should "show the edit form" in {
get(s"/companies/${company.id}/edit") {
status should equal(200)
}
}
it should "update a company" in {
val newName = s"Updated at ${System.currentTimeMillis}"
put(s"/companies/${company.id}", "name" -> newName) {
status should equal(403)
}
Company.findById(company.id).get.name should not equal (newName)
withSession("csrf-token" -> "12345") {
put(s"/companies/${company.id}", "name" -> newName, "updatedAt" -> "2013-01-02 12:34:56", "csrf-token" -> "12345") {
status should equal(302)
}
put(s"/companies/${company.id}", "csrf-token" -> "12345") {
status should equal(400)
}
}
Company.findById(company.id).get.name should equal(newName)
}
it should "delete a company" in {
val company = FactoryGirl(Company).create()
delete(s"/companies/${company.id}") {
status should equal(403)
}
withSession("csrf-token" -> "aaaaaa") {
delete(s"/companies/${company.id}?csrf-token=aaaaaa") {
status should equal(200)
}
}
}
}
| BlackPrincess/skinny-framework | example/src/test/scala/integrationtest/CompaniesControllerSpec.scala | Scala | mit | 2,982 |
package mesosphere.marathon.api
import javax.servlet.http.HttpServletRequest
import mesosphere.marathon.plugin.http.HttpRequest
import scala.collection.JavaConverters._
class RequestFacade(request: HttpServletRequest, path: String) extends HttpRequest {
def this(request: HttpServletRequest) = this(request, request.getRequestURI)
// Jersey will not allow calls to the request object from another thread
// To circumvent that, we have to copy all data during creation
val headers = request.getHeaderNames.asScala.map(header =>
header.toLowerCase -> request.getHeaders(header).asScala.toSeq).toMap
val cookies = request.getCookies
val params = request.getParameterMap
val remoteAddr = request.getRemoteAddr
override def header(name: String): Seq[String] = headers.getOrElse(name.toLowerCase, Seq.empty)
override def requestPath: String = path
override def cookie(name: String): Option[String] = cookies.find(_.getName == name).map(_.getValue)
override def queryParam(name: String): Seq[String] = params.asScala.get(name).map(_.toSeq).getOrElse(Seq.empty)
override def method: String = request.getMethod
}
| ss75710541/marathon | src/main/scala/mesosphere/marathon/api/RequestFacade.scala | Scala | apache-2.0 | 1,136 |
package nodes.learning
import breeze.linalg.{DenseVector, SparseVector}
import org.apache.spark.SparkContext
import org.scalatest.FunSuite
import pipelines._
import workflow.{TransformerLabelEstimatorChain, PipelineContext, WorkflowUtils}
class LeastSquaresEstimatorSuite extends FunSuite with PipelineContext with Logging {
test("Big n small d dense") {
sc = new SparkContext("local", "test")
val n = 1000000
val sampleRatio = 0.001
val d = 1000
val k = 1000
val numMachines = 16
val data = sc.parallelize(Seq.fill((n * sampleRatio).toInt)(DenseVector.rand[Double](d)))
val labels = data.map(_ => DenseVector.rand[Double](k))
val numPerPartition = WorkflowUtils.numPerPartition(data).mapValues(x => (x / sampleRatio).toInt)
val solver = new LeastSquaresEstimator[DenseVector[Double]](numMachines = Some(numMachines))
val optimizedSolver = solver.optimize(data, labels, numPerPartition)
val isLinearMapEstimator = optimizedSolver match {
case t: TransformerLabelEstimatorChain[_,_,_,_] => {
t.second match {
case _: LinearMapEstimator => true
case _ => false
}
}
case _ => false
}
assert(isLinearMapEstimator, "Expected exact distributed solver")
}
test("big n big d dense") {
sc = new SparkContext("local", "test")
val n = 1000000
val sampleRatio = 0.0001
val d = 10000
val k = 1000
val numMachines = 16
val data = sc.parallelize(Seq.fill((n * sampleRatio).toInt)(DenseVector.rand[Double](d)))
val labels = data.map(_ => DenseVector.rand[Double](k))
val numPerPartition = WorkflowUtils.numPerPartition(data).mapValues(x => (x / sampleRatio).toInt)
val solver = new LeastSquaresEstimator[DenseVector[Double]](numMachines = Some(numMachines))
val optimizedSolver = solver.optimize(data, labels, numPerPartition)
val isBlockSolver = optimizedSolver match {
case t: TransformerLabelEstimatorChain[_,_,_,_] => {
t.second match {
case _: BlockLeastSquaresEstimator => true
case _ => false
}
}
case _ => false
}
assert(isBlockSolver, "Expected block solver")
}
test("big n big d sparse") {
sc = new SparkContext("local", "test")
val n = 1000000
val sampleRatio = 0.0001
val d = 10000
val k = 2
val sparsity = 0.01
val numMachines = 16
val data = sc.parallelize(Seq.fill((n * sampleRatio).toInt) {
val sparseVec = SparseVector.zeros[Double](d)
DenseVector.rand[Double]((sparsity * d).toInt).toArray.zipWithIndex.foreach {
case (value, i) =>
sparseVec(i) = value
}
sparseVec
})
val labels = data.map(_ => DenseVector.rand[Double](k))
val numPerPartition = WorkflowUtils.numPerPartition(data).mapValues(x => (x / sampleRatio).toInt)
val solver = new LeastSquaresEstimator[SparseVector[Double]](numMachines = Some(numMachines))
val optimizedSolver = solver.optimize(data, labels, numPerPartition)
val isSparseLBFGS = optimizedSolver match {
case t: TransformerLabelEstimatorChain[_,_,_,_] => {
t.second match {
case _: SparseLBFGSwithL2 => true
case _ => false
}
}
case _ => false
}
assert(isSparseLBFGS, "Expected sparse LBFGS solver")
}
} | tomerk/keystone | src/test/scala/nodes/learning/LeastSquaresEstimatorSuite.scala | Scala | apache-2.0 | 3,341 |
package wow.common.network
import java.net.InetSocketAddress
import akka.actor.{Actor, ActorLogging, Props, SupervisorStrategy}
import akka.io.Tcp._
import akka.io.{IO, Tcp}
case object GetAddress
/**
* This class defines the behaviour of the TCP server.
*
* @constructor send a Bind command to the TCP manager
*/
class TCPServer[A <: TCPSessionFactory](val factory: A, val address: String, val port: Int)
extends Actor with ActorLogging {
log.debug("Binding server with socket")
IO(Tcp)(context.system) ! Bind(self, new InetSocketAddress(address, port))
override def supervisorStrategy: SupervisorStrategy = SupervisorStrategy.stoppingStrategy
override def postStop(): Unit = log.debug(s"Stopped TCP server for $address:$port")
def receive: PartialFunction[Any, Unit] = {
case Bound(localAddress) =>
log.debug(s"TCP port opened at: ${localAddress.getHostString}:${localAddress.getPort}")
case Connected(remote, local) =>
log.debug(s"Remote connection set from $remote to $local")
val handlerRef = context.actorOf(factory.props(sender), factory.PreferredName + TCPSession.PreferredName(remote))
sender ! Register(handlerRef)
case CommandFailed(_: Bind) => context stop self
}
}
object TCPServer {
def props[A <: TCPSessionFactory](companion: A, address: String, port: Int): Props = Props(classOf[TCPServer[A]],
companion,
address,
port)
val PreferredName = "tcp"
}
| SKNZ/SpinaciCore | wow/core/src/main/scala/wow/common/network/TCPServer.scala | Scala | mit | 1,452 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.filters.https
import javax.inject.Inject
import com.typesafe.config.ConfigFactory
import play.api.{ Configuration, Environment, _ }
import play.api.http.HttpFilters
import play.api.inject.bind
import play.api.inject.guice.{ GuiceApplicationBuilder, GuiceableModule }
import play.api.mvc.Results._
import play.api.mvc._
import play.api.mvc.request.RemoteConnection
import play.api.test.{ WithApplication, _ }
private[https] class TestFilters @Inject() (redirectPlainFilter: RedirectHttpsFilter) extends HttpFilters {
override def filters: Seq[EssentialFilter] = Seq(redirectPlainFilter)
}
class RedirectHttpsFilterSpec extends PlaySpecification {
"RedirectHttpsConfigurationProvider" should {
"throw configuration error on invalid redirect status code" in {
val configuration = Configuration.from(Map("play.filters.https.redirectStatusCode" -> "200"))
val environment = Environment.simple()
val configProvider = new RedirectHttpsConfigurationProvider(configuration, environment)
{
configProvider.get
} must throwA[com.typesafe.config.ConfigException.Missing]
}
}
"RedirectHttpsFilter" should {
"redirect when not on https including the path and url query parameters" in new WithApplication(
buildApp(mode = Mode.Prod)) with Injecting {
val req = request("/please/dont?remove=this&foo=bar")
val result = route(app, req).get
status(result) must_== PERMANENT_REDIRECT
header(LOCATION, result) must beSome("https://playframework.com/please/dont?remove=this&foo=bar")
}
"redirect with custom redirect status code if configured" in new WithApplication(buildApp(
"""
|play.filters.https.redirectStatusCode = 301
""".stripMargin, mode = Mode.Prod)) with Injecting {
val req = request("/please/dont?remove=this&foo=bar")
val result = route(app, req).get
status(result) must_== 301
}
"not redirect when on http in test" in new WithApplication(buildApp(mode = Mode.Test)) {
val secure = RemoteConnection(remoteAddressString = "127.0.0.1", secure = false, clientCertificateChain = None)
val result = route(app, request().withConnection(secure)).get
header(STRICT_TRANSPORT_SECURITY, result) must beNone
status(result) must_== OK
}
"redirect when on http in test and redirectEnabled = true" in new WithApplication(
buildApp("play.filters.https.redirectEnabled = true", mode = Mode.Test)) {
val secure = RemoteConnection(remoteAddressString = "127.0.0.1", secure = false, clientCertificateChain = None)
val result = route(app, request().withConnection(secure)).get
header(STRICT_TRANSPORT_SECURITY, result) must beNone
status(result) must_== PERMANENT_REDIRECT
}
"not redirect when on https but send HSTS header" in new WithApplication(buildApp(mode = Mode.Prod)) {
val secure = RemoteConnection(remoteAddressString = "127.0.0.1", secure = true, clientCertificateChain = None)
val result = route(app, request().withConnection(secure)).get
header(STRICT_TRANSPORT_SECURITY, result) must beSome("max-age=31536000; includeSubDomains")
status(result) must_== OK
}
"redirect to custom HTTPS port if configured" in new WithApplication(
buildApp("play.filters.https.port = 9443", mode = Mode.Prod)) {
val result = route(app, request("/please/dont?remove=this&foo=bar")).get
header(LOCATION, result) must beSome("https://playframework.com:9443/please/dont?remove=this&foo=bar")
}
"not contain default HSTS header if secure in test" in new WithApplication(buildApp(mode = Mode.Test)) {
val secure = RemoteConnection(remoteAddressString = "127.0.0.1", secure = true, clientCertificateChain = None)
val result = route(app, request().withConnection(secure)).get
header(STRICT_TRANSPORT_SECURITY, result) must beNone
}
"contain default HSTS header if secure in production" in new WithApplication(buildApp(mode = Mode.Prod)) {
val secure = RemoteConnection(remoteAddressString = "127.0.0.1", secure = true, clientCertificateChain = None)
val result = route(app, request().withConnection(secure)).get
header(STRICT_TRANSPORT_SECURITY, result) must beSome("max-age=31536000; includeSubDomains")
}
"contain custom HSTS header if configured explicitly in prod" in new WithApplication(buildApp(
"""
|play.filters.https.strictTransportSecurity="max-age=12345; includeSubDomains"
""".stripMargin, mode = Mode.Prod)) {
val secure = RemoteConnection(remoteAddressString = "127.0.0.1", secure = true, clientCertificateChain = None)
val result = route(app, request().withConnection(secure)).get
header(STRICT_TRANSPORT_SECURITY, result) must beSome("max-age=12345; includeSubDomains")
}
}
private def request(uri: String = "/") = {
FakeRequest(method = "GET", path = uri)
.withHeaders(HOST -> "playframework.com")
}
private def buildApp(config: String = "", mode: Mode = Mode.Test) = GuiceApplicationBuilder(Environment.simple(mode = mode))
.configure(Configuration(ConfigFactory.parseString(config)))
.load(
new play.api.inject.BuiltinModule,
new play.api.mvc.CookiesModule,
new play.api.i18n.I18nModule,
new play.filters.https.RedirectHttpsModule)
.appRoutes(app => {
case ("GET", "/") =>
val action = app.injector.instanceOf[DefaultActionBuilder]
action(Ok(""))
}).overrides(
bind[HttpFilters].to[TestFilters]
).build()
}
| Shruti9520/playframework | framework/src/play-filters-helpers/src/test/scala/play/filters/https/RedirectHttpsFilterSpec.scala | Scala | apache-2.0 | 5,658 |
package dotty
object DottyPredef {
import compiletime.summonFrom
inline final def assert(inline assertion: Boolean, inline message: => Any): Unit = {
if (!assertion)
assertFail(message)
}
inline final def assert(inline assertion: Boolean) <: Unit = {
if (!assertion)
assertFail()
}
def assertFail(): Nothing = throw new java.lang.AssertionError("assertion failed")
def assertFail(message: => Any): Nothing = throw new java.lang.AssertionError("assertion failed: " + message)
inline final def implicitly[T](implicit ev: T): T = ev
/** Used to mark code blocks as being expressions, instead of being taken as part of anonymous classes and the like.
* This is just a different name for [[identity]].
*
* @example Separating code blocks from `new`:
* {{{
* val x = new AnyRef
* {
* val y = ...
* println(y)
* }
* // the { ... } block is seen as the body of an anonymous class
*
* val x = new AnyRef
*
* {
* val y = ...
* println(y)
* }
* // an empty line is a brittle "fix"
*
* val x = new AnyRef
* locally {
* val y = ...
* println(y)
* }
* // locally guards the block and helps communicate intent
* }}}
* @group utilities
*/
inline def locally[T](inline body: T): T = body
/**
* Retrieve the single value of a type with a unique inhabitant.
*
* @example {{{
* object Foo
* val foo = valueOf[Foo.type]
* // foo is Foo.type = Foo
*
* val bar = valueOf[23]
* // bar is 23.type = 23
* }}}
* @group utilities
*/
inline def valueOf[T]: T = summonFrom {
case ev: ValueOf[T] => ev.value
}
/** Summon a given value of type `T`. Usually, the argument is not passed explicitly.
*
* @tparam T the type of the value to be summoned
* @return the given value typed as the provided type parameter
*/
inline def summon[T](using x: T): x.type = x
// Extension methods for working with explicit nulls
/** Strips away the nullability from a value.
* e.g.
* val s1: String|Null = "hello"
* val s: String = s1.nn
*
* Note that `.nn` performs a checked cast, so if invoked on a null value it'll throw an NPE.
*/
def[T] (x: T|Null) nn: x.type & T =
if (x == null) throw new NullPointerException("tried to cast away nullability, but value is null")
else x.asInstanceOf[x.type & T]
}
| som-snytt/dotty | library/src/dotty/DottyPredef.scala | Scala | apache-2.0 | 2,645 |
/* sbt -- Simple Build Tool
* Copyright 2008, 2009, 2010 Mark Harrah
*/
package sbt.internal.util
import sbt.util._
/**
* Provides a `java.io.Writer` interface to a `Logger`. Content is line-buffered and logged at `level`.
* A line is delimited by `nl`, which is by default the platform line separator.
*/
class LoggerWriter(delegate: Logger, unbufferedLevel: Option[Level.Value], nl: String = System.getProperty("line.separator")) extends java.io.Writer {
def this(delegate: Logger, level: Level.Value) = this(delegate, Some(level))
def this(delegate: Logger) = this(delegate, None)
private[this] val buffer = new StringBuilder
private[this] val lines = new collection.mutable.ListBuffer[String]
override def close() = flush()
override def flush(): Unit =
synchronized {
if (buffer.nonEmpty) {
log(buffer.toString)
buffer.clear()
}
}
def flushLines(level: Level.Value): Unit =
synchronized {
for (line <- lines)
delegate.log(level, line)
lines.clear()
}
override def write(content: Array[Char], offset: Int, length: Int): Unit =
synchronized {
buffer.appendAll(content, offset, length)
process()
}
private[this] def process(): Unit = {
val i = buffer.indexOf(nl)
if (i >= 0) {
log(buffer.substring(0, i))
buffer.delete(0, i + nl.length)
process()
}
}
private[this] def log(s: String): Unit = unbufferedLevel match {
case None =>
lines += s; ()
case Some(level) => delegate.log(level, s)
}
}
| Duhemm/util | internal/util-logging/src/main/scala/sbt/internal/util/LoggerWriter.scala | Scala | bsd-3-clause | 1,555 |
package us.feliscat.text.analyzer.mor.mecab
import org.junit.Test
import org.scalatest.junit.AssertionsForJUnit
import us.feliscat.text.StringOption
/**
* @author K. Sakamoto
* Created on 2017/05/29
*/
class IpadicMecabTestSuite extends AssertionsForJUnit {
@Test
def testAnalysisResult(): Unit = {
val result: String = UnidicMecab.analysisResult(
StringOption(
"運輸・通信手段の発展が,アジア・アフリカの植民地化をうながした。"
)
).get
assert(result ==
"""運輸 名詞,普通名詞,一般,*,*,*,ウンユ,運輸,運輸,ウンユ,運輸,ウンユ,漢,*,*,*,*,ウンユ,ウンユ,ウンユ,ウンユ,*,*,1,C1,*
|・ 補助記号,一般,*,*,*,*,,・,・,,・,,記号,*,*,*,*,・,・,,,*,*,*,*,*
|通信 名詞,普通名詞,サ変可能,*,*,*,ツウシン,通信,通信,ツーシン,通信,ツーシン,漢,*,*,*,*,ツウシン,ツウシン,ツウシン,ツウシン,*,*,0,C2,*
|手段 名詞,普通名詞,一般,*,*,*,シュダン,手段,手段,シュダン,手段,シュダン,漢,*,*,*,*,シュダン,シュダン,シュダン,シュダン,*,*,1,C1,*
|の 助詞,格助詞,*,*,*,*,ノ,の,の,ノ,の,ノ,和,*,*,*,*,ノ,ノ,ノ,ノ,*,*,*,名詞%F1,*
|発展 名詞,普通名詞,サ変可能,*,*,*,ハッテン,発展,発展,ハッテン,発展,ハッテン,漢,*,*,*,*,ハッテン,ハッテン,ハッテン,ハッテン,*,*,0,C2,*
|が 助詞,格助詞,*,*,*,*,ガ,が,が,ガ,が,ガ,和,*,*,*,*,ガ,ガ,ガ,ガ,*,*,*,"動詞%F2@0,名詞%F1",*
|, 補助記号,読点,*,*,*,*,,,,,,,,,,記号,*,*,*,*,,,,,*,*,*,*,*
|アジア 名詞,普通名詞,一般,*,*,*,世界史用語辞書,世界史用語辞書,basyo,basyo,地域,地域,固,*,*,*,*
|・ 補助記号,一般,*,*,*,*,,・,・,,・,,記号,*,*,*,*,・,・,,,*,*,*,*,*
|アフリカ 名詞,普通名詞,一般,*,*,*,世界史用語辞書,世界史用語辞書,basyo,basyo,地域,地域,固,*,*,*,*
|の 助詞,格助詞,*,*,*,*,ノ,の,の,ノ,の,ノ,和,*,*,*,*,ノ,ノ,ノ,ノ,*,*,*,名詞%F1,*
|植民地 名詞,普通名詞,一般,*,*,*,世界史用語辞書,世界史用語辞書,syakaigainen,syakaigainen,土地が担うロール@役割@土地,土地が担うロール@役割@土地,固,*,*,*,*
|化 接尾辞,名詞的,サ変可能,*,*,*,カ,化,化,カ,化,カ,漢,*,*,*,*,カ,カ,カ,カ,*,*,*,C4,*
|を 助詞,格助詞,*,*,*,*,ヲ,を,を,オ,を,オ,和,*,*,*,*,ヲ,ヲ,ヲ,ヲ,*,*,*,"動詞%F2@0,名詞%F1,形容詞%F2@-1",*
|うながし 動詞,一般,*,*,五段-サ行,連用形-一般,ウナガス,促す,うながし,ウナガシ,うながす,ウナガス,和,*,*,*,*,ウナガシ,ウナガス,ウナガシ,ウナガス,*,*,"0,3",C2,*
|た 助動詞,*,*,*,助動詞-タ,終止形-一般,タ,た,た,タ,た,タ,和,*,*,*,*,タ,タ,タ,タ,*,*,*,"動詞%F2@1,形容詞%F4@-2",*
|。 補助記号,句点,*,*,*,*,,。,。,,。,,記号,*,*,*,*,,,,,*,*,*,*,*
|EOS
|""".stripMargin)
}
}
| ktr-skmt/FelisCatusZero-multilingual | libraries/src/test/scala/us/feliscat/text/analyzer/mor/mecab/IpadicMecabTestSuite.scala | Scala | apache-2.0 | 3,108 |
package com.twitter.finagle
import com.twitter.finagle.client._
import com.twitter.finagle.dispatch.PipeliningDispatcher
import com.twitter.finagle.netty3.Netty3Transporter
import com.twitter.finagle.pool.ReusingPool
import com.twitter.finagle.redis.protocol.{Command, Reply}
import com.twitter.finagle.stats.StatsReceiver
import java.net.SocketAddress
trait RedisRichClient { self: Client[Command, Reply] =>
def newRichClient(group: Group[SocketAddress]): redis.Client = redis.Client(newClient(group).toService)
def newRichClient(group: String): redis.Client = redis.Client(newClient(group).toService)
}
object RedisTransporter extends Netty3Transporter[Command, Reply]("redis", redis.RedisClientPipelineFactory)
object RedisClient extends DefaultClient[Command, Reply](
name = "redis",
endpointer = Bridge[Command, Reply, Command, Reply](RedisTransporter, new PipeliningDispatcher(_)),
pool = (sr: StatsReceiver) => new ReusingPool(_, sr)
) with RedisRichClient
object Redis extends Client[Command, Reply] with RedisRichClient {
def newClient(group: Group[SocketAddress]): ServiceFactory[Command, Reply] =
RedisClient.newClient(group)
}
| firebase/finagle | finagle-redis/src/main/scala/com/twitter/finagle/Redis.scala | Scala | apache-2.0 | 1,160 |
package pureconfig.module.scalaxml
import scala.xml.Elem
import com.typesafe.config.ConfigFactory.parseString
import org.scalatest.EitherValues
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import pureconfig.generic.auto._
import pureconfig.syntax._
class ScalaXMLSuite extends AnyFlatSpec with Matchers with EitherValues {
case class Config(people: Elem)
val sampleXML: Elem =
<people>
<person firstName="foo" lastName="bar"/>
<person firstName="blah" lastName="stuff"/>
</people>
it should "be able to read a config with XML" in {
val config = parseString(s"""{ people =
| \\"\\"\\"$sampleXML\\"\\"\\"
| }""".stripMargin)
config.to[Config] shouldEqual Right(Config(sampleXML))
}
it should "return an error when reading invalid XML " in {
val config = parseString("{ people: <people> }")
config.to[Config] shouldBe 'left
}
}
| melrief/pureconfig | modules/scala-xml/src/test/scala/pureconfig/module/scalaxml/ScalaXMLSuite.scala | Scala | mpl-2.0 | 939 |
import sbt._
import Keys._
package org.scommon.sbt.settings {
trait PublishSettings extends Settings {
def publishRoot : Boolean
def realm : String
def releaseCredentialsID : String
def releaseRepository : String
def snapshotCredentialsID: String
def snapshotRepository : String
def autoDiscoverRealm : Boolean
def signArtifacts : Boolean
def behavior : PublishBehavior
}
trait PublishBehavior extends Behavior {
def publishTo(version: String, settings: CoreSettings): Resolver = {
//http://www.scala-sbt.org/release/docs/Detailed-Topics/Publishing
if (version.trim.endsWith("SNAPSHOT"))
"snapshots" at settings.publish.snapshotRepository
else
"releases" at settings.publish.releaseRepository
}
def autoDiscoverRealm(destination: String, settings: CoreSettings): String = {
//Make a POST to the destination in order to extract the WWW-Authenticate header
//and discover the realm to use.
//
//Example of what the header might look like:
// WWW-Authenticate: BASIC realm="Sonatype Nexus Repository Manager"
POST(destination) map { dest =>
val response = dest.headers.getOrElse("WWW-Authenticate", s"""BASIC realm="${settings.publish.realm}" """)
//Parse the return value to extract the realm.
val pattern = """(?<=realm=").+(?=")""".r
val realm = pattern findFirstIn response
//If unable to extract the realm, attempt to use the default.
realm getOrElse settings.publish.realm
} getOrElse settings.publish.realm
}
protected def hostFor(destination: String, settings: CoreSettings): Option[String] = {
try Some(new URI(destination).getHost)
catch {
case _: Throwable => None
}
}
def loadCredentials(version: String, settings: CoreSettings): Seq[Credentials] = {
val (destination, credentials_id) =
if (version.trim.endsWith("SNAPSHOT"))
(settings.publish.snapshotRepository, settings.publish.snapshotCredentialsID)
else
(settings.publish.releaseRepository, settings.publish.releaseCredentialsID)
val realm =
if (settings.publish.autoDiscoverRealm)
autoDiscoverRealm(destination, settings)
else
settings.publish.realm
(for {
host <- hostFor(destination, settings)
} yield {
def loadMavenCredentials(file: java.io.File): Seq[Credentials] = {
for {
s <- xml.XML.loadFile(file) \\ "servers" \\ "server"
id = (s \\ "id").text
if id == credentials_id
username = (s \\ "username").text
password = (s \\ "password").text
} yield Credentials(realm, host, username, password)
}
val sbt_credentials = Path.userHome / ".sbt" / ".credentials"
val ivy_credentials = Path.userHome / ".ivy2" / ".credentials"
val mvn_credentials = Path.userHome / ".m2" / "settings.xml"
//Attempt to gather credentials from the environment if possible.
val env = Seq(sys.env.get("PUBLISH_USER"), sys.env.get("PUBLISH_PASSWORD")).flatten match {
case Seq(username, password) =>
Seq(Credentials(realm, host, username, password))
case _ =>
Seq()
}
//Attempt to read in all the credentials.
val sbt = if (sbt_credentials.canRead) Seq(Credentials(sbt_credentials)) else Seq()
val ivy = if (ivy_credentials.canRead) Seq(Credentials(ivy_credentials)) else Seq()
val mvn = if (mvn_credentials.canRead) loadMavenCredentials(mvn_credentials) else Seq()
env ++ sbt ++ ivy ++ mvn
}) getOrElse Seq()
}
}
}
//Unnamed package
import org.scommon.sbt.settings._
case object PublishBehavior extends PublishBehavior
object PublishSettings {
val credentials =
sbt.Keys.credentials ++=
delegateLoadCredentials(
sbt.Keys.version.value
, CoreSettings(
org.scommon.sbt.settings.primarySettings.value
, org.scommon.sbt.settings.promptSettings.value
, org.scommon.sbt.settings.compilerSettings.value
, org.scommon.sbt.settings.scaladocSettings.value
, org.scommon.sbt.settings.mavenSettings.value
, org.scommon.sbt.settings.publishSettings.value
, org.scommon.sbt.settings.releaseProcessSettings.value
)
)
val publishArtifact =
sbt.Keys.publishArtifact in Test :=
false
val publishTo =
sbt.Keys.publishTo :=
Some(delegatePublishTo(
sbt.Keys.version.value
, CoreSettings(
org.scommon.sbt.settings.primarySettings.value
, org.scommon.sbt.settings.promptSettings.value
, org.scommon.sbt.settings.compilerSettings.value
, org.scommon.sbt.settings.scaladocSettings.value
, org.scommon.sbt.settings.mavenSettings.value
, org.scommon.sbt.settings.publishSettings.value
, org.scommon.sbt.settings.releaseProcessSettings.value
)
))
val publishMavenStyle =
sbt.Keys.publishMavenStyle :=
true
val defaults =
Seq(
credentials
, publishArtifact
, publishTo
, publishMavenStyle
)
private[this] def delegatePublishTo(version: String, settings: CoreSettings): Resolver =
settings.publish.behavior.publishTo(version, settings)
private[this] def delegateLoadCredentials(version: String, settings: CoreSettings): Seq[Credentials] =
settings.publish.behavior.loadCredentials(version, settings)
}
| scommon/sbt-settings-simple | src/main/scala/publish-settings.scala | Scala | apache-2.0 | 5,675 |
import java.io.{FileInputStream, FileNotFoundException, File}
import scala.collection.immutable.{ListSet, Iterable}
import scala.concurrent.duration._
import akka.util.{ByteString, CompactByteString}
import akka.actor.{ Actor, Props, ActorSystem }
import akka.testkit.{ ImplicitSender, TestKit, TestActorRef }
import org.scalatest.{ BeforeAndAfterAll, FlatSpec }
import org.scalatest.concurrent._
import org.scalatest._
import org.scalatest.matchers.{ClassicMatchers, Matchers, ShouldMatchers}
import rtmp.amf.{AmfMixedMap, AmfNull, AMF0Encoding}
import rtmp.packet.{Notify, NotifyDecoder, Invoke, InvokeDecoder}
import rtmp.amf.amf0.Amf0Deserializer
/**
* Testing AMF parsing
*/
class ParseAmfSpec extends FlatSpec with ClassicMatchers with BinaryTester {
"A parsed data in packet_invoke_connect_1.rtmp" should "match to test data" in {
val binaryData = readData("packet_invoke_connect_1.rtmp")
val decoder = new InvokeDecoder(new DummyLogger())
val packet = decoder.decode(new AMF0Encoding(), binaryData)
assert(packet.equals(Invoke("connect", 1, List(Map(
"app" -> "live",
"type" -> "nonprivate",
"flashVer" -> "FMLE/3.0 (compatible; Lavf55.2.0)",
"tcUrl" -> "rtmp://127.0.0.1:1935/live"
)))))
}
"A parsed data in packet_invoke_releaseStream_2.rtmp" should "match to test data" in {
val binaryData = readData("packet_invoke_releaseStream_2.rtmp")
val decoder = new InvokeDecoder(new DummyLogger())
val packet = decoder.decode(new AMF0Encoding(), binaryData)
assert(packet.equals(Invoke("releaseStream", 2, List(
AmfNull(),
"mystream.sdp"
))))
}
"A parsed data in packet_invoke_FCPublish_3.rtmp" should "match to test data" in {
val binaryData = readData("packet_invoke_FCPublish_3.rtmp")
val decoder = new InvokeDecoder(new DummyLogger())
val packet = decoder.decode(new AMF0Encoding(), binaryData)
assert(packet.equals(Invoke("FCPublish", 3, List(
AmfNull(),
"mystream.sdp"
))))
}
"A parsed data in packet_invoke_createStream_4.rtmp" should "match to test data" in {
val binaryData = readData("packet_invoke_createStream_4.rtmp")
val decoder = new InvokeDecoder(new DummyLogger())
val packet = decoder.decode(new AMF0Encoding(), binaryData)
assert(packet.equals(Invoke("createStream", 4, List(
AmfNull()
))))
}
"A parsed data in packet_invoke_publish_5.rtmp" should "match to test data" in {
val binaryData = readData("packet_invoke_publish_5.rtmp")
val decoder = new InvokeDecoder(new DummyLogger())
val packet = decoder.decode(new AMF0Encoding(), binaryData)
assert(packet.equals(Invoke("publish", 5, List(
AmfNull(),
"mystream.sdp",
"live"
))))
}
"A parsed data in in_stream_metadata.rtmp" should "match to test data" in {
val binaryData = readData("in_stream_metadata.rtmp")
val decoder = new NotifyDecoder(new DummyLogger())
val packet = decoder.decode(new AMF0Encoding(), binaryData)
assert(packet.equals(Notify("@setDataFrame", List(
"onMetaData",
AmfMixedMap(Map(
"duration" -> 0.0,
"filesize" -> 0.0,
"videocodecid" -> 2.0,
"height" -> 720.0,
"videodatarate" -> 195.3125,
"compatible_brands" -> "isomavc1",
"stereo" -> true,
"encoder" -> "Lavf55.12.100",
"audiosamplesize" -> 16.0,
"minor_version" -> "1",
"major_brand" -> "isom",
"width" -> 1280.0,
"audiosamplerate" -> 44100.0,
"framerate" -> 23.976023976023978,
"audiocodecid" -> 2.0,
"audiodatarate" -> 0.0
))
))))
}
}
| vimvim/AkkaTest | src/test/scala/ParseAmfSpec.scala | Scala | agpl-3.0 | 3,674 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding
import cascading.tuple._
import java.lang.reflect.Method
import java.lang.reflect.Constructor
import scala.reflect.Manifest
/**
* Typeclass for packing a cascading Tuple into some type T,
* this is used to put fields of a cascading tuple into Thrift, Protobuf,
* or case classes, for instance, but you can add your own instances to control
* how this is done.
*
* @author Argyris Zymnis
* @author Oscar Boykin
*/
trait TuplePacker[T] extends java.io.Serializable {
def newConverter(fields: Fields): TupleConverter[T]
}
object TuplePacker extends CaseClassPackers
trait CaseClassPackers extends LowPriorityTuplePackers {
implicit def caseClassPacker[T <: Product](implicit mf: Manifest[T]): OrderedTuplePacker[T] = new OrderedTuplePacker[T]
}
trait LowPriorityTuplePackers extends java.io.Serializable {
implicit def genericTuplePacker[T: Manifest]: ReflectionTuplePacker[T] = new ReflectionTuplePacker[T]
}
/**
* Packs a tuple into any object with set methods, e.g. thrift or proto objects.
* TODO: verify that protobuf setters for field camel_name are of the form setCamelName.
* In that case this code works for proto.
*
* @author Argyris Zymnis
* @author Oscar Boykin
*/
class ReflectionTuplePacker[T](implicit m: Manifest[T]) extends TuplePacker[T] {
override def newConverter(fields: Fields) = new ReflectionTupleConverter[T](fields)(m)
}
class ReflectionTupleConverter[T](fields: Fields)(implicit m: Manifest[T]) extends TupleConverter[T] {
override val arity = fields.size
def lowerFirst(s: String) = s.substring(0, 1).toLowerCase + s.substring(1)
// Cut out "set" and lower case the first after
def setterToFieldName(setter: Method) = lowerFirst(setter.getName.substring(3))
/* The `_.get` is safe because of the `_.isEmpty` check. ScalaTest does not
* seem to support a more type safe way of doing this.
*/
@SuppressWarnings(Array("org.wartremover.warts.OptionPartial"))
def validate(): Unit = {
//We can't touch setters because that shouldn't be accessed until map/reduce side, not
//on submitter.
val missing = Dsl.asList(fields).find { f => !getSetters.contains(f.toString) }
assert(missing.isEmpty, "Field: " + missing.get.toString + " not in setters")
}
validate()
def getSetters = m.runtimeClass
.getDeclaredMethods
.filter { _.getName.startsWith("set") }
.groupBy { setterToFieldName(_) }
.mapValues { _.head }
// Do all the reflection for the setters we need:
// This needs to be lazy because Method is not serializable
// TODO: filter by isAccessible, which somehow seems to fail
lazy val setters = getSetters
override def apply(input: TupleEntry): T = {
val newInst = m.runtimeClass.newInstance().asInstanceOf[T]
val fields = input.getFields
(0 until fields.size).map { idx =>
val thisField = fields.get(idx)
val setMethod = setters(thisField.toString)
setMethod.invoke(newInst, input.getObject(thisField))
}
newInst
}
}
/**
* This just blindly uses the first public constructor with the same arity as the fields size
*/
class OrderedTuplePacker[T](implicit m: Manifest[T]) extends TuplePacker[T] {
override def newConverter(fields: Fields) = new OrderedConstructorConverter[T](fields)(m)
}
class OrderedConstructorConverter[T](fields: Fields)(implicit mf: Manifest[T]) extends TupleConverter[T] {
override val arity = fields.size
// Keep this as a method, so we can validate by calling, but don't serialize it, and keep it lazy
// below
def getConstructor = mf.runtimeClass
.getConstructors
.filter { _.getParameterTypes.size == fields.size }
.head.asInstanceOf[Constructor[T]]
//Make sure we can actually get a constructor:
getConstructor
lazy val cons = getConstructor
override def apply(input: TupleEntry): T = {
val tup = input.getTuple
val args = (0 until tup.size).map { tup.getObject(_) }
cons.newInstance(args: _*)
}
}
| tdyas/scalding | scalding-core/src/main/scala/com/twitter/scalding/TuplePacker.scala | Scala | apache-2.0 | 4,530 |
import scala.compiletime.ops.int._
object Test {
type Max2[A <: Int, B <: Int] <: Int = (A < B) match {
case true => B
case false => A
}
val t0: Max2[-1, 10] = 10
val t1: Max2[4, 2] = 4
val t2: Max2[2, 2] = 1 // error
val t3: Max2[-1, -1] = 0 // error
}
| som-snytt/dotty | tests/neg/singleton-ops-match-type-scrutinee.scala | Scala | apache-2.0 | 275 |
package io.fintrospect.templating
import com.twitter.io.Buf
trait TemplateRenderer {
def toBuf(view: View): Buf
}
| daviddenton/fintrospect | core/src/main/scala/io/fintrospect/templating/TemplateRenderer.scala | Scala | apache-2.0 | 118 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.fixture
import scala.concurrent.{Future, ExecutionContext}
import org.scalatest._
import SharedHelpers.EventRecordingReporter
class AsyncFunSpecSpec extends org.scalatest.FunSpec {
describe("AsyncFunSpec") {
it("can be used for tests that return Future") {
class ExampleSpec extends AsyncFunSpec {
implicit val executionContext: ExecutionContext = ExecutionContext.Implicits.global
type FixtureParam = String
def withAsyncFixture(test: OneArgAsyncTest): Future[Outcome] =
test("testing")
val a = 1
it("test 1") { fixture =>
Future {
assert(a == 1)
}
}
it("test 2") { fixture =>
Future {
assert(a == 2)
}
}
it("test 3") { fixture =>
Future {
pending
}
}
it("test 4") { fixture =>
Future {
cancel
}
}
ignore("test 5") { fixture =>
Future {
cancel
}
}
override def newInstance = new ExampleSpec
}
val rep = new EventRecordingReporter
val spec = new ExampleSpec
val status = spec.run(None, Args(reporter = rep))
status.waitUntilCompleted()
assert(rep.testStartingEventsReceived.length == 4)
assert(rep.testSucceededEventsReceived.length == 1)
assert(rep.testSucceededEventsReceived(0).testName == "test 1")
assert(rep.testFailedEventsReceived.length == 1)
assert(rep.testFailedEventsReceived(0).testName == "test 2")
assert(rep.testPendingEventsReceived.length == 1)
assert(rep.testPendingEventsReceived(0).testName == "test 3")
assert(rep.testCanceledEventsReceived.length == 1)
assert(rep.testCanceledEventsReceived(0).testName == "test 4")
assert(rep.testIgnoredEventsReceived.length == 1)
assert(rep.testIgnoredEventsReceived(0).testName == "test 5")
}
it("can be used for tests that did not return Future") {
class ExampleSpec extends AsyncFunSpec {
implicit val executionContext: ExecutionContext = ExecutionContext.Implicits.global
type FixtureParam = String
def withAsyncFixture(test: OneArgAsyncTest): Future[Outcome] =
test("testing")
val a = 1
it("test 1") { fixture =>
assert(a == 1)
}
it("test 2") { fixture =>
assert(a == 2)
}
it("test 3") { fixture =>
pending
}
it("test 4") { fixture =>
cancel
}
ignore("test 5") { fixture =>
cancel
}
override def newInstance = new ExampleSpec
}
val rep = new EventRecordingReporter
val spec = new ExampleSpec
val status = spec.run(None, Args(reporter = rep))
status.waitUntilCompleted()
assert(rep.testStartingEventsReceived.length == 4)
assert(rep.testSucceededEventsReceived.length == 1)
assert(rep.testSucceededEventsReceived(0).testName == "test 1")
assert(rep.testFailedEventsReceived.length == 1)
assert(rep.testFailedEventsReceived(0).testName == "test 2")
assert(rep.testPendingEventsReceived.length == 1)
assert(rep.testPendingEventsReceived(0).testName == "test 3")
assert(rep.testCanceledEventsReceived.length == 1)
assert(rep.testCanceledEventsReceived(0).testName == "test 4")
assert(rep.testIgnoredEventsReceived.length == 1)
assert(rep.testIgnoredEventsReceived(0).testName == "test 5")
}
}
} | rahulkavale/scalatest | scalatest-test/src/test/scala/org/scalatest/fixture/AsyncFunSpecSpec.scala | Scala | apache-2.0 | 4,200 |
package spire
package benchmark
import spire.implicits._
object LongRational {
val Zero = new LongRational(0, 1)
val One = new LongRational(1, 1)
def apply(n: Long, d: Long): LongRational = {
val div = gcd(n, d)
if (d < 0) {
new LongRational(-n / div, -d / div)
} else {
new LongRational(n / div, d / div)
}
}
@inline final def gcd(a: Long, b: Long) = spire.math.gcd(a, b)
}
final class LongRational private (val n: Long, val d: Long) {
import LongRational.gcd
def unary_-(): LongRational = new LongRational(-n, d)
def +(r: LongRational): LongRational = {
val dgcd: Long = gcd(d, r.d)
if (dgcd == 1) {
new LongRational(r.d * n + r.n * d, r.d * d)
} else {
val lden: Long = d / dgcd
val rden: Long = r.d / dgcd
val num: Long = rden * n + r.n * lden
val ngcd: Long = gcd(num, dgcd)
if (ngcd == 1)
new LongRational(num, lden * r.d)
else
new LongRational(num / ngcd, (r.d / ngcd) * lden)
}
}
def -(r: LongRational): LongRational = {
val dgcd: Long = gcd(d, r.d)
if (dgcd == 1) {
new LongRational(r.d * n - r.n * d, r.d * d)
} else {
val lden: Long = d / dgcd
val rden: Long = r.d / dgcd
val num: Long = rden * n - r.n * lden
val ngcd: Long = gcd(num, dgcd)
if (ngcd == 1)
new LongRational(num, lden * r.d)
else
new LongRational(num / ngcd, (r.d / ngcd) * lden)
}
}
def *(r: LongRational): LongRational = {
val a = gcd(n, r.d)
val b = gcd(d, r.n)
new LongRational((n / a) * (r.n / b), (d / b) * (r.d / a))
}
def /(r: LongRational): LongRational = {
val a = gcd(n, r.n)
val b = gcd(d, r.d)
val num = (n / a) * (r.d / b)
val den = (d / b) * (r.n / a)
if (den < 0L) {
new LongRational(-num, -den)
} else {
new LongRational(num, den)
}
}
def pow(exp: Int): LongRational = if (exp == 0) {
LongRational.One
} else if (exp < 0) {
new LongRational(d pow java.lang.Math.abs(exp), n pow java.lang.Math.abs(exp))
} else {
new LongRational(n pow exp, d pow exp)
}
def compare(r: LongRational): Int = {
val dgcd = gcd(d, r.d)
if (dgcd == 1)
java.lang.Math.signum(n * r.d - r.n * d).toInt
else
java.lang.Math.signum((r.d / dgcd) * n - (d / dgcd) * r.n).toInt
}
def signum: Int = if (n < 0) -1 else if (n > 0) 1 else 0
}
| non/spire | benchmark/src/main/scala/spire/benchmark/LongRational.scala | Scala | mit | 2,425 |
package com.nudemeth.example.web.controller
import com.nudemeth.example.web.engine._
import org.json4s.{DefaultFormats, Formats}
import org.scalatra.ScalatraServlet
import org.slf4j.{Logger, LoggerFactory}
trait BaseController extends ScalatraServlet {
implicit val jsonFormats: Formats = DefaultFormats
protected val logger: Logger = LoggerFactory.getLogger(getClass)
protected val renderer: JavaScriptEngine = J2V8Engine.instance.registerScripts(
Seq(
ScriptURL(getClass.getResource("/webapp/js/polyfill/j2v8-polyfill.js")),
ScriptURL(getClass.getResource("/webapp/js/bundle.js")),
ScriptText("var frontend = new com.nudemeth.example.web.Frontend();")
)
).build
override def destroy(): Unit = {
renderer.destroy
super.destroy()
}
}
| nudemeth/scalatra-react-isomorphic | src/main/scala/com/nudemeth/example/web/controller/BaseController.scala | Scala | unlicense | 786 |
/*
* Copyright 2011-2018 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.commons.util
import io.gatling.BaseSpec
class StringReplaceSpec extends BaseSpec {
"StringReplace.replace" should "replace all occurrences" in {
StringReplace.replace("1234foo5678foo9012foo", "foo", "bar") shouldBe "1234bar5678bar9012bar"
}
}
| wiacekm/gatling | gatling-commons/src/test/scala/io/gatling/commons/util/StringReplaceSpec.scala | Scala | apache-2.0 | 892 |
package gitbucket.core.model
trait AccessTokenComponent { self: Profile =>
import profile.simple._
lazy val AccessTokens = TableQuery[AccessTokens]
class AccessTokens(tag: Tag) extends Table[AccessToken](tag, "ACCESS_TOKEN") {
val accessTokenId = column[Int]("ACCESS_TOKEN_ID", O AutoInc)
val userName = column[String]("USER_NAME")
val tokenHash = column[String]("TOKEN_HASH")
val note = column[String]("NOTE")
def * = (accessTokenId, userName, tokenHash, note) <> (AccessToken.tupled, AccessToken.unapply)
}
}
case class AccessToken(
accessTokenId: Int = 0,
userName: String,
tokenHash: String,
note: String
)
| yonglehou/gitbucket | src/main/scala/gitbucket/core/model/AccessToken.scala | Scala | apache-2.0 | 650 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.internal.config
import java.util.concurrent.TimeUnit
private[spark] object Worker {
val SPARK_WORKER_PREFIX = "spark.worker"
val SPARK_WORKER_RESOURCE_FILE =
ConfigBuilder("spark.worker.resourcesFile")
.internal()
.doc("Path to a file containing the resources allocated to the worker. " +
"The file should be formatted as a JSON array of ResourceAllocation objects. " +
"Only used internally in standalone mode.")
.version("3.0.0")
.stringConf
.createOptional
val WORKER_TIMEOUT = ConfigBuilder("spark.worker.timeout")
.version("0.6.2")
.longConf
.createWithDefault(60)
val WORKER_DRIVER_TERMINATE_TIMEOUT = ConfigBuilder("spark.worker.driverTerminateTimeout")
.version("2.1.2")
.timeConf(TimeUnit.MILLISECONDS)
.createWithDefaultString("10s")
val WORKER_CLEANUP_ENABLED = ConfigBuilder("spark.worker.cleanup.enabled")
.version("1.0.0")
.booleanConf
.createWithDefault(false)
val WORKER_CLEANUP_INTERVAL = ConfigBuilder("spark.worker.cleanup.interval")
.version("1.0.0")
.longConf
.createWithDefault(60 * 30)
val APP_DATA_RETENTION = ConfigBuilder("spark.worker.cleanup.appDataTtl")
.version("1.0.0")
.longConf
.createWithDefault(7 * 24 * 3600)
val PREFER_CONFIGURED_MASTER_ADDRESS = ConfigBuilder("spark.worker.preferConfiguredMasterAddress")
.version("2.2.1")
.booleanConf
.createWithDefault(false)
val WORKER_UI_PORT = ConfigBuilder("spark.worker.ui.port")
.version("1.1.0")
.intConf
.createOptional
val WORKER_UI_RETAINED_EXECUTORS = ConfigBuilder("spark.worker.ui.retainedExecutors")
.version("1.5.0")
.intConf
.createWithDefault(1000)
val WORKER_UI_RETAINED_DRIVERS = ConfigBuilder("spark.worker.ui.retainedDrivers")
.version("1.5.0")
.intConf
.createWithDefault(1000)
val UNCOMPRESSED_LOG_FILE_LENGTH_CACHE_SIZE_CONF =
ConfigBuilder("spark.worker.ui.compressedLogFileLengthCacheSize")
.version("2.0.2")
.intConf
.createWithDefault(100)
private[spark] val WORKER_DECOMMISSION_ENABLED =
ConfigBuilder("spark.worker.decommission.enabled")
.version("3.1.0")
.booleanConf
.createWithDefault(false)
}
| goldmedal/spark | core/src/main/scala/org/apache/spark/internal/config/Worker.scala | Scala | apache-2.0 | 3,057 |
package serialization
import play.api.libs.json._
import models._
trait Json {
implicit val gnipSourceFmt = Json.format[GnipSource]
implicit val datasiftSourceFmt = Json.format[DatasiftSource]
implicit val sourceFmt = new Format[Source] {
def writes(s: Source): JsValue = s match {
case g: GnipSource => Json.obj("type" -> "gnip", "rules" -> g.rules)
case d: DatasiftSource => Json.obj("type" -> "datasift", "csdl" -> d.csdl)
case _ => Json.obj("type" -> "unknown")
}
def reads(json: JsValue): JsResult[Source] = {
(json \\ "type").asOpt[String].map {
case "gnip" => json.validate[GnipSource]
case "datasift" => json.validate[DatasiftSource]
case _ => JsError()
}.getOrElse(JsError())
}
}
implicit val focussetFmt = Json.format[Focusset]
}
| fernando-romero/salat-test | app/serialization/Json.scala | Scala | mit | 826 |
package leibniz.inhabitance
//import cats.~>
sealed abstract class TotallyUninhabited[F[_]] {
def proof[A]: Uninhabited[F[A]]
// def contramapK[G[_]](f: G ~> F): TotallyUninhabited[G]
def cozipK[G[_]](G: TotallyUninhabited[G]): TotallyUninhabited[λ[x => Either[F[x], G[x]]]]
}
object TotallyUninhabited {
} | alexknvl/leibniz | src/main/scala/leibniz/inhabitance/TotallyUninhabited.scala | Scala | mit | 317 |
/*-------------------------------------------------------------------------*\\
** ScalaCheck **
** Copyright (c) 2007-2019 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
\\*------------------------------------------------------------------------ */
package org.scalacheck.example
import org.scalacheck._
object StringUtils extends Properties("Examples.StringUtils") {
private object StringUtils {
def truncate(s: String, n: Int) = {
if(s.length <= n) s
else s.substring(0, n) ++ "..."
}
def truncate2(s: String, n: Int) = {
if(n < 0) ""
else if(s.length <= n) s
else s.substring(0, n) ++ "..."
}
def tokenize(s: String, delim: Char) = {
val delimStr = Character.valueOf(delim).toString
val st = new java.util.StringTokenizer(s, delimStr)
val tokens = Array.ofDim[String](st.countTokens)
var i = 0;
while(st.hasMoreTokens) {
tokens(i) = st.nextToken()
i += 1;
}
tokens;
}
def contains(s: String, subString: String) = {
s.indexOf(subString) != -1
}
}
property("truncate") = Prop.forAll { (s: String, n: Int) =>
lazy val t = StringUtils.truncate(s, n)
if(n < 0)
Prop.throws(classOf[StringIndexOutOfBoundsException]) { t }
else
(s.length <= n && t == s) ||
(s.length > n && t == s.take(n)+"...")
}
property("truncate.precond") = Prop.forAll { (s: String, n: Int) =>
import Prop.propBoolean
(n >= 0) ==> {
val t = StringUtils.truncate(s, n)
(s.length <= n && t == s) ||
(s.length > n && t == s.take(n)+"...")
}
}
property("truncate2") = Prop.forAll { (s: String, n: Int) =>
val t = StringUtils.truncate2(s, n)
if(n < 0)
t == ""
else
(s.length <= n && t == s) ||
(s.length > n && t == s.take(n)+"...")
}
//property("tokenize") = {
// import Prop.AnyOperators
// Prop.forAll(Gen.listOf(Gen.alphaStr), Gen.numChar) { (ts, d) =>
// val str = ts.mkString(d.toString)
// StringUtils.tokenize(str, d).toList ?= ts
// }
//}
property("contains") =
Prop.forAll { (s1: String, s2: String, s3: String) =>
StringUtils.contains(s1+s2+s3, s2)
}
}
| xuwei-k/scalacheck | jvm/src/test/scala/org/scalacheck/examples/StringUtils.scala | Scala | bsd-3-clause | 2,586 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.feature.image
import com.intel.analytics.bigdl.dllib.feature.transform.vision.image.ImageFeature
import com.intel.analytics.bigdl.dllib.feature.transform.vision.image.augmentation
/**
* Flip the image horizontally
*/
class ImageHFlip() extends ImageProcessing {
private val internalCrop = new augmentation.HFlip()
override def apply(prev: Iterator[ImageFeature]): Iterator[ImageFeature] = {
internalCrop.apply(prev)
}
override def transformMat(feature: ImageFeature): Unit = {
internalCrop.transformMat(feature)
}
}
object ImageHFlip {
def apply(): ImageHFlip = new ImageHFlip()
}
| intel-analytics/BigDL | scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/feature/image/ImageHFlip.scala | Scala | apache-2.0 | 1,250 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.encoders
import scala.reflect.ClassTag
import scala.reflect.runtime.universe.{typeTag, TypeTag}
import org.apache.spark.sql.Encoder
import org.apache.spark.sql.catalyst.{InternalRow, JavaTypeInference, ScalaReflection}
import org.apache.spark.sql.catalyst.analysis.{Analyzer, GetColumnByOrdinal, SimpleAnalyzer, UnresolvedAttribute, UnresolvedExtractValue}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen.{GenerateSafeProjection, GenerateUnsafeProjection}
import org.apache.spark.sql.catalyst.expressions.objects.{AssertNotNull, Invoke, NewInstance}
import org.apache.spark.sql.catalyst.optimizer.SimplifyCasts
import org.apache.spark.sql.catalyst.plans.logical.{CatalystSerde, DeserializeToObject, LocalRelation}
import org.apache.spark.sql.types.{BooleanType, ObjectType, StructField, StructType}
import org.apache.spark.util.Utils
/**
* A factory for constructing encoders that convert objects and primitives to and from the
* internal row format using catalyst expressions and code generation. By default, the
* expressions used to retrieve values from an input row when producing an object will be created as
* follows:
* - Classes will have their sub fields extracted by name using [[UnresolvedAttribute]] expressions
* and [[UnresolvedExtractValue]] expressions.
* - Tuples will have their subfields extracted by position using [[BoundReference]] expressions.
* - Primitives will have their values extracted from the first ordinal with a schema that defaults
* to the name `value`.
*/
object ExpressionEncoder {
def apply[T : TypeTag](): ExpressionEncoder[T] = {
// We convert the not-serializable TypeTag into StructType and ClassTag.
val mirror = typeTag[T].mirror
val tpe = typeTag[T].tpe
if (ScalaReflection.optionOfProductType(tpe)) {
throw new UnsupportedOperationException(
"Cannot create encoder for Option of Product type, because Product type is represented " +
"as a row, and the entire row can not be null in Spark SQL like normal databases. " +
"You can wrap your type with Tuple1 if you do want top level null Product objects, " +
"e.g. instead of creating `Dataset[Option[MyClass]]`, you can do something like " +
"`val ds: Dataset[Tuple1[MyClass]] = Seq(Tuple1(MyClass(...)), Tuple1(null)).toDS`")
}
val cls = mirror.runtimeClass(tpe)
val flat = !ScalaReflection.definedByConstructorParams(tpe)
val inputObject = BoundReference(0, ScalaReflection.dataTypeFor[T], nullable = true)
val nullSafeInput = if (flat) {
inputObject
} else {
// For input object of Product type, we can't encode it to row if it's null, as Spark SQL
// doesn't allow top-level row to be null, only its columns can be null.
AssertNotNull(inputObject, Seq("top level Product input object"))
}
val serializer = ScalaReflection.serializerFor[T](nullSafeInput)
val deserializer = ScalaReflection.deserializerFor[T]
val schema = ScalaReflection.schemaFor[T] match {
case ScalaReflection.Schema(s: StructType, _) => s
case ScalaReflection.Schema(dt, nullable) => new StructType().add("value", dt, nullable)
}
new ExpressionEncoder[T](
schema,
flat,
serializer.flatten,
deserializer,
ClassTag[T](cls))
}
// TODO: improve error message for java bean encoder.
def javaBean[T](beanClass: Class[T]): ExpressionEncoder[T] = {
val schema = JavaTypeInference.inferDataType(beanClass)._1
assert(schema.isInstanceOf[StructType])
val serializer = JavaTypeInference.serializerFor(beanClass)
val deserializer = JavaTypeInference.deserializerFor(beanClass)
new ExpressionEncoder[T](
schema.asInstanceOf[StructType],
flat = false,
serializer.flatten,
deserializer,
ClassTag[T](beanClass))
}
/**
* Given a set of N encoders, constructs a new encoder that produce objects as items in an
* N-tuple. Note that these encoders should be unresolved so that information about
* name/positional binding is preserved.
*/
def tuple(encoders: Seq[ExpressionEncoder[_]]): ExpressionEncoder[_] = {
encoders.foreach(_.assertUnresolved())
val schema = StructType(encoders.zipWithIndex.map {
case (e, i) =>
val (dataType, nullable) = if (e.flat) {
e.schema.head.dataType -> e.schema.head.nullable
} else {
e.schema -> true
}
StructField(s"_${i + 1}", dataType, nullable)
})
val cls = Utils.getContextOrSparkClassLoader.loadClass(s"scala.Tuple${encoders.size}")
val serializer = encoders.zipWithIndex.map { case (enc, index) =>
val originalInputObject = enc.serializer.head.collect { case b: BoundReference => b }.head
val newInputObject = Invoke(
BoundReference(0, ObjectType(cls), nullable = true),
s"_${index + 1}",
originalInputObject.dataType)
val newSerializer = enc.serializer.map(_.transformUp {
case b: BoundReference if b == originalInputObject => newInputObject
})
if (enc.flat) {
newSerializer.head
} else {
// For non-flat encoder, the input object is not top level anymore after being combined to
// a tuple encoder, thus it can be null and we should wrap the `CreateStruct` with `If` and
// null check to handle null case correctly.
// e.g. for Encoder[(Int, String)], the serializer expressions will create 2 columns, and is
// not able to handle the case when the input tuple is null. This is not a problem as there
// is a check to make sure the input object won't be null. However, if this encoder is used
// to create a bigger tuple encoder, the original input object becomes a filed of the new
// input tuple and can be null. So instead of creating a struct directly here, we should add
// a null/None check and return a null struct if the null/None check fails.
val struct = CreateStruct(newSerializer)
val nullCheck = Or(
IsNull(newInputObject),
Invoke(Literal.fromObject(None), "equals", BooleanType, newInputObject :: Nil))
If(nullCheck, Literal.create(null, struct.dataType), struct)
}
}
val childrenDeserializers = encoders.zipWithIndex.map { case (enc, index) =>
if (enc.flat) {
enc.deserializer.transform {
case g: GetColumnByOrdinal => g.copy(ordinal = index)
}
} else {
val input = GetColumnByOrdinal(index, enc.schema)
val deserialized = enc.deserializer.transformUp {
case UnresolvedAttribute(nameParts) =>
assert(nameParts.length == 1)
UnresolvedExtractValue(input, Literal(nameParts.head))
case GetColumnByOrdinal(ordinal, _) => GetStructField(input, ordinal)
}
If(IsNull(input), Literal.create(null, deserialized.dataType), deserialized)
}
}
val deserializer =
NewInstance(cls, childrenDeserializers, ObjectType(cls), propagateNull = false)
new ExpressionEncoder[Any](
schema,
flat = false,
serializer,
deserializer,
ClassTag(cls))
}
// Tuple1
def tuple[T](e: ExpressionEncoder[T]): ExpressionEncoder[Tuple1[T]] =
tuple(Seq(e)).asInstanceOf[ExpressionEncoder[Tuple1[T]]]
def tuple[T1, T2](
e1: ExpressionEncoder[T1],
e2: ExpressionEncoder[T2]): ExpressionEncoder[(T1, T2)] =
tuple(Seq(e1, e2)).asInstanceOf[ExpressionEncoder[(T1, T2)]]
def tuple[T1, T2, T3](
e1: ExpressionEncoder[T1],
e2: ExpressionEncoder[T2],
e3: ExpressionEncoder[T3]): ExpressionEncoder[(T1, T2, T3)] =
tuple(Seq(e1, e2, e3)).asInstanceOf[ExpressionEncoder[(T1, T2, T3)]]
def tuple[T1, T2, T3, T4](
e1: ExpressionEncoder[T1],
e2: ExpressionEncoder[T2],
e3: ExpressionEncoder[T3],
e4: ExpressionEncoder[T4]): ExpressionEncoder[(T1, T2, T3, T4)] =
tuple(Seq(e1, e2, e3, e4)).asInstanceOf[ExpressionEncoder[(T1, T2, T3, T4)]]
def tuple[T1, T2, T3, T4, T5](
e1: ExpressionEncoder[T1],
e2: ExpressionEncoder[T2],
e3: ExpressionEncoder[T3],
e4: ExpressionEncoder[T4],
e5: ExpressionEncoder[T5]): ExpressionEncoder[(T1, T2, T3, T4, T5)] =
tuple(Seq(e1, e2, e3, e4, e5)).asInstanceOf[ExpressionEncoder[(T1, T2, T3, T4, T5)]]
}
/**
* A generic encoder for JVM objects.
*
* @param schema The schema after converting `T` to a Spark SQL row.
* @param serializer A set of expressions, one for each top-level field that can be used to
* extract the values from a raw object into an [[InternalRow]].
* @param deserializer An expression that will construct an object given an [[InternalRow]].
* @param clsTag A classtag for `T`.
*/
case class ExpressionEncoder[T](
schema: StructType,
flat: Boolean,
serializer: Seq[Expression],
deserializer: Expression,
clsTag: ClassTag[T])
extends Encoder[T] {
if (flat) require(serializer.size == 1)
// serializer expressions are used to encode an object to a row, while the object is usually an
// intermediate value produced inside an operator, not from the output of the child operator. This
// is quite different from normal expressions, and `AttributeReference` doesn't work here
// (intermediate value is not an attribute). We assume that all serializer expressions use a same
// `BoundReference` to refer to the object, and throw exception if they don't.
assert(serializer.forall(_.references.isEmpty), "serializer cannot reference to any attributes.")
assert(serializer.flatMap { ser =>
val boundRefs = ser.collect { case b: BoundReference => b }
assert(boundRefs.nonEmpty,
"each serializer expression should contains at least one `BoundReference`")
boundRefs
}.distinct.length <= 1, "all serializer expressions must use the same BoundReference.")
/**
* Returns a new copy of this encoder, where the `deserializer` is resolved and bound to the
* given schema.
*
* Note that, ideally encoder is used as a container of serde expressions, the resolution and
* binding stuff should happen inside query framework. However, in some cases we need to
* use encoder as a function to do serialization directly(e.g. Dataset.collect), then we can use
* this method to do resolution and binding outside of query framework.
*/
def resolveAndBind(
attrs: Seq[Attribute] = schema.toAttributes,
analyzer: Analyzer = SimpleAnalyzer): ExpressionEncoder[T] = {
val dummyPlan = CatalystSerde.deserialize(LocalRelation(attrs))(this)
val analyzedPlan = analyzer.execute(dummyPlan)
analyzer.checkAnalysis(analyzedPlan)
val resolved = SimplifyCasts(analyzedPlan).asInstanceOf[DeserializeToObject].deserializer
val bound = BindReferences.bindReference(resolved, attrs)
copy(deserializer = bound)
}
@transient
private lazy val extractProjection = GenerateUnsafeProjection.generate(serializer)
@transient
private lazy val inputRow = new GenericInternalRow(1)
@transient
private lazy val constructProjection = GenerateSafeProjection.generate(deserializer :: Nil)
/**
* Returns a new set (with unique ids) of [[NamedExpression]] that represent the serialized form
* of this object.
*/
def namedExpressions: Seq[NamedExpression] = schema.map(_.name).zip(serializer).map {
case (_, ne: NamedExpression) => ne.newInstance()
case (name, e) => Alias(e, name)()
}
/**
* Returns an encoded version of `t` as a Spark SQL row. Note that multiple calls to
* toRow are allowed to return the same actual [[InternalRow]] object. Thus, the caller should
* copy the result before making another call if required.
*/
def toRow(t: T): InternalRow = try {
inputRow(0) = t
extractProjection(inputRow)
} catch {
case e: Exception =>
throw new RuntimeException(
s"Error while encoding: $e\n${serializer.map(_.treeString).mkString("\n")}", e)
}
/**
* Returns an object of type `T`, extracting the required values from the provided row. Note that
* you must `resolveAndBind` an encoder to a specific schema before you can call this
* function.
*/
def fromRow(row: InternalRow): T = try {
constructProjection(row).get(0, ObjectType(clsTag.runtimeClass)).asInstanceOf[T]
} catch {
case e: Exception =>
throw new RuntimeException(s"Error while decoding: $e\n${deserializer.treeString}", e)
}
/**
* The process of resolution to a given schema throws away information about where a given field
* is being bound by ordinal instead of by name. This method checks to make sure this process
* has not been done already in places where we plan to do later composition of encoders.
*/
def assertUnresolved(): Unit = {
(deserializer +: serializer).foreach(_.foreach {
case a: AttributeReference if a.name != "loopVar" =>
sys.error(s"Unresolved encoder expected, but $a was found.")
case _ =>
})
}
protected val attrs = serializer.flatMap(_.collect {
case _: UnresolvedAttribute => ""
case a: Attribute => s"#${a.exprId}"
case b: BoundReference => s"[${b.ordinal}]"
})
protected val schemaString =
schema
.zip(attrs)
.map { case(f, a) => s"${f.name}$a: ${f.dataType.simpleString}"}.mkString(", ")
override def toString: String = s"class[$schemaString]"
}
| kimoonkim/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoder.scala | Scala | apache-2.0 | 14,325 |
package ipfix.ie
import ipfix.ie.Elements._
object DefaultIEMap extends IEMap {
val ieByID = Map(
1 -> OctetDeltaCount,
2 -> PacketDeltaCount,
3 -> DeltaFlowCount,
4 -> ProtocolIdentifier,
5 -> IpClassOfService,
6 -> TcpControlBits,
7 -> SourceTransportPort,
8 -> SourceIPv4Address,
9 -> SourceIPv4PrefixLength,
10 -> IngressInterface,
11 -> DestinationTransportPort,
12 -> DestinationIPv4Address,
13 -> DestinationIPv4PrefixLength,
14 -> EgressInterface,
15 -> IpNextHopIPv4Address,
16 -> BgpSourceAsNumber,
17 -> BgpDestinationAsNumber,
18 -> BgpNextHopIPv4Address,
19 -> PostMCastPacketDeltaCount,
20 -> PostMCastOctetDeltaCount,
21 -> FlowEndSysUpTime,
22 -> FlowStartSysUpTime,
23 -> PostOctetDeltaCount,
24 -> PostPacketDeltaCount,
25 -> MinimumIpTotalLength,
26 -> MaximumIpTotalLength,
27 -> SourceIPv6Address,
28 -> DestinationIPv6Address,
29 -> SourceIPv6PrefixLength,
30 -> DestinationIPv6PrefixLength,
31 -> FlowLabelIPv6,
32 -> IcmpTypeCodeIPv4,
33 -> IgmpType,
34 -> SamplingInterval,
35 -> SamplingAlgorithm,
36 -> FlowActiveTimeout,
37 -> FlowIdleTimeout,
38 -> EngineType,
39 -> EngineId,
40 -> ExportedOctetTotalCount,
41 -> ExportedMessageTotalCount,
42 -> ExportedFlowRecordTotalCount,
43 -> Ipv4RouterSc,
44 -> SourceIPv4Prefix,
45 -> DestinationIPv4Prefix,
46 -> MplsTopLabelType,
47 -> MplsTopLabelIPv4Address,
48 -> SamplerId,
49 -> SamplerMode,
50 -> SamplerRandomInterval,
51 -> ClassId,
52 -> MinimumTTL,
53 -> MaximumTTL,
54 -> FragmentIdentification,
55 -> PostIpClassOfService,
56 -> SourceMacAddress,
57 -> PostDestinationMacAddress,
58 -> VlanId,
59 -> PostVlanId,
60 -> IpVersion,
61 -> FlowDirection,
62 -> IpNextHopIPv6Address,
63 -> BgpNextHopIPv6Address,
64 -> Ipv6ExtensionHeaders,
70 -> MplsTopLabelStackSection,
71 -> MplsLabelStackSection2,
72 -> MplsLabelStackSection3,
73 -> MplsLabelStackSection4,
74 -> MplsLabelStackSection5,
75 -> MplsLabelStackSection6,
76 -> MplsLabelStackSection7,
77 -> MplsLabelStackSection8,
78 -> MplsLabelStackSection9,
79 -> MplsLabelStackSection10,
80 -> DestinationMacAddress,
81 -> PostSourceMacAddress,
82 -> InterfaceName,
83 -> InterfaceDescription,
84 -> SamplerName,
85 -> OctetTotalCount,
86 -> PacketTotalCount,
87 -> FlagsAndSamplerId,
88 -> FragmentOffset,
89 -> ForwardingStatus,
90 -> MplsVpnRouteDistinguisher,
91 -> MplsTopLabelPrefixLength,
92 -> SrcTrafficIndex,
93 -> DstTrafficIndex,
94 -> ApplicationDescription,
95 -> ApplicationId,
96 -> ApplicationName,
98 -> PostIpDiffServCodePoint,
99 -> MulticastReplicationFactor,
100 -> ClassName,
101 -> ClassificationEngineId,
102 -> Layer2packetSectionOffset,
103 -> Layer2packetSectionSize,
104 -> Layer2packetSectionData,
128 -> BgpNextAdjacentAsNumber,
129 -> BgpPrevAdjacentAsNumber,
130 -> ExporterIPv4Address,
131 -> ExporterIPv6Address,
132 -> DroppedOctetDeltaCount,
133 -> DroppedPacketDeltaCount,
134 -> DroppedOctetTotalCount,
135 -> DroppedPacketTotalCount,
136 -> FlowEndReason,
137 -> CommonPropertiesId,
138 -> ObservationPointId,
139 -> IcmpTypeCodeIPv6,
140 -> MplsTopLabelIPv6Address,
141 -> LineCardId,
142 -> PortId,
143 -> MeteringProcessId,
144 -> ExportingProcessId,
145 -> TemplateId,
146 -> WlanChannelId,
147 -> WlanSSID,
148 -> FlowId,
149 -> ObservationDomainId,
150 -> FlowStartSeconds,
151 -> FlowEndSeconds,
152 -> FlowStartMilliseconds,
153 -> FlowEndMilliseconds,
154 -> FlowStartMicroseconds,
155 -> FlowEndMicroseconds,
156 -> FlowStartNanoseconds,
157 -> FlowEndNanoseconds,
158 -> FlowStartDeltaMicroseconds,
159 -> FlowEndDeltaMicroseconds,
160 -> SystemInitTimeMilliseconds,
161 -> FlowDurationMilliseconds,
162 -> FlowDurationMicroseconds,
163 -> ObservedFlowTotalCount,
164 -> IgnoredPacketTotalCount,
165 -> IgnoredOctetTotalCount,
166 -> NotSentFlowTotalCount,
167 -> NotSentPacketTotalCount,
168 -> NotSentOctetTotalCount,
169 -> DestinationIPv6Prefix,
170 -> SourceIPv6Prefix,
171 -> PostOctetTotalCount,
172 -> PostPacketTotalCount,
173 -> FlowKeyIndicator,
174 -> PostMCastPacketTotalCount,
175 -> PostMCastOctetTotalCount,
176 -> IcmpTypeIPv4,
177 -> IcmpCodeIPv4,
178 -> IcmpTypeIPv6,
179 -> IcmpCodeIPv6,
180 -> UdpSourcePort,
181 -> UdpDestinationPort,
182 -> TcpSourcePort,
183 -> TcpDestinationPort,
184 -> TcpSequenceNumber,
185 -> TcpAcknowledgementNumber,
186 -> TcpWindowSize,
187 -> TcpUrgentPointer,
188 -> TcpHeaderLength,
189 -> IpHeaderLength,
190 -> TotalLengthIPv4,
191 -> PayloadLengthIPv6,
192 -> IpTTL,
193 -> NextHeaderIPv6,
194 -> MplsPayloadLength,
195 -> IpDiffServCodePoint,
196 -> IpPrecedence,
197 -> FragmentFlags,
198 -> OctetDeltaSumOfSquares,
199 -> OctetTotalSumOfSquares,
200 -> MplsTopLabelTTL,
201 -> MplsLabelStackLength,
202 -> MplsLabelStackDepth,
203 -> MplsTopLabelExp,
204 -> IpPayloadLength,
205 -> UdpMessageLength,
206 -> IsMulticast,
207 -> Ipv4IHL,
208 -> Ipv4Options,
209 -> TcpOptions,
210 -> PaddingOctets,
211 -> CollectorIPv4Address,
212 -> CollectorIPv6Address,
213 -> ExportInterface,
214 -> ExportProtocolVersion,
215 -> ExportTransportProtocol,
216 -> CollectorTransportPort,
217 -> ExporterTransportPort,
218 -> TcpSynTotalCount,
219 -> TcpFinTotalCount,
220 -> TcpRstTotalCount,
221 -> TcpPshTotalCount,
222 -> TcpAckTotalCount,
223 -> TcpUrgTotalCount,
224 -> IpTotalLength,
225 -> PostNATSourceIPv4Address,
226 -> PostNATDestinationIPv4Address,
227 -> PostNAPTSourceTransportPort,
228 -> PostNAPTDestinationTransportPort,
229 -> NatOriginatingAddressRealm,
230 -> NatEvent,
231 -> InitiatorOctets,
232 -> ResponderOctets,
233 -> FirewallEvent,
234 -> IngressVRFID,
235 -> EgressVRFID,
236 -> VRFname,
237 -> PostMplsTopLabelExp,
238 -> TcpWindowScale,
239 -> BiflowDirection,
240 -> EthernetHeaderLength,
241 -> EthernetPayloadLength,
242 -> EthernetTotalLength,
243 -> Dot1qVlanId,
244 -> Dot1qPriority,
245 -> Dot1qCustomerVlanId,
246 -> Dot1qCustomerPriority,
247 -> MetroEvcId,
248 -> MetroEvcType,
249 -> PseudoWireId,
250 -> PseudoWireType,
251 -> PseudoWireControlWord,
252 -> IngressPhysicalInterface,
253 -> EgressPhysicalInterface,
254 -> PostDot1qVlanId,
255 -> PostDot1qCustomerVlanId,
256 -> EthernetType,
257 -> PostIpPrecedence,
258 -> CollectionTimeMilliseconds,
259 -> ExportSctpStreamId,
260 -> MaxExportSeconds,
261 -> MaxFlowEndSeconds,
262 -> MessageMD5Checksum,
263 -> MessageScope,
264 -> MinExportSeconds,
265 -> MinFlowStartSeconds,
266 -> OpaqueOctets,
267 -> SessionScope,
268 -> MaxFlowEndMicroseconds,
269 -> MaxFlowEndMilliseconds,
270 -> MaxFlowEndNanoseconds,
271 -> MinFlowStartMicroseconds,
272 -> MinFlowStartMilliseconds,
273 -> MinFlowStartNanoseconds,
274 -> CollectorCertificate,
275 -> ExporterCertificate,
276 -> DataRecordsReliability,
277 -> ObservationPointType,
278 -> NewConnectionDeltaCount,
279 -> ConnectionSumDurationSeconds,
280 -> ConnectionTransactionId,
281 -> PostNATSourceIPv6Address,
282 -> PostNATDestinationIPv6Address,
283 -> NatPoolId,
284 -> NatPoolName,
285 -> AnonymizationFlags,
286 -> AnonymizationTechnique,
287 -> InformationElementIndex,
288 -> P2pTechnology,
289 -> TunnelTechnology,
290 -> EncryptedTechnology,
294 -> BgpValidityState,
295 -> IPSecSPI,
296 -> GreKey,
297 -> NatType,
298 -> InitiatorPackets,
299 -> ResponderPackets,
300 -> ObservationDomainName,
301 -> SelectionSequenceId,
302 -> SelectorId,
303 -> InformationElementId,
304 -> SelectorAlgorithm,
305 -> SamplingPacketInterval,
306 -> SamplingPacketSpace,
307 -> SamplingTimeInterval,
308 -> SamplingTimeSpace,
309 -> SamplingSize,
310 -> SamplingPopulation,
311 -> SamplingProbability,
312 -> DataLinkFrameSize,
313 -> IpHeaderPacketSection,
314 -> IpPayloadPacketSection,
315 -> DataLinkFrameSection,
316 -> MplsLabelStackSection,
317 -> MplsPayloadPacketSection,
318 -> SelectorIdTotalPktsObserved,
319 -> SelectorIdTotalPktsSelected,
320 -> AbsoluteError,
321 -> RelativeError,
322 -> ObservationTimeSeconds,
323 -> ObservationTimeMilliseconds,
324 -> ObservationTimeMicroseconds,
325 -> ObservationTimeNanoseconds,
326 -> DigestHashValue,
327 -> HashIPPayloadOffset,
328 -> HashIPPayloadSize,
329 -> HashOutputRangeMin,
330 -> HashOutputRangeMax,
331 -> HashSelectedRangeMin,
332 -> HashSelectedRangeMax,
333 -> HashDigestOutput,
334 -> HashInitialiserValue,
335 -> SelectorName,
336 -> UpperCILimit,
337 -> LowerCILimit,
338 -> ConfidenceLevel,
339 -> InformationElementDataType,
340 -> InformationElementDescription,
341 -> InformationElementName,
342 -> InformationElementRangeBegin,
343 -> InformationElementRangeEnd,
344 -> InformationElementSemantics,
345 -> InformationElementUnits,
346 -> PrivateEnterpriseNumber,
347 -> VirtualStationInterfaceId,
348 -> VirtualStationInterfaceName,
349 -> VirtualStationUUID,
350 -> VirtualStationName,
351 -> Layer2SegmentId,
352 -> Layer2OctetDeltaCount,
353 -> Layer2OctetTotalCount,
354 -> IngressUnicastPacketTotalCount,
355 -> IngressMulticastPacketTotalCount,
356 -> IngressBroadcastPacketTotalCount,
357 -> EgressUnicastPacketTotalCount,
358 -> EgressBroadcastPacketTotalCount,
359 -> MonitoringIntervalStartMilliSeconds,
360 -> MonitoringIntervalEndMilliSeconds,
361 -> PortRangeStart,
362 -> PortRangeEnd,
363 -> PortRangeStepSize,
364 -> PortRangeNumPorts,
365 -> StaMacAddress,
366 -> StaIPv4Address,
367 -> WtpMacAddress,
368 -> IngressInterfaceType,
369 -> EgressInterfaceType,
370 -> RtpSequenceNumber,
371 -> UserName,
372 -> ApplicationCategoryName,
373 -> ApplicationSubCategoryName,
374 -> ApplicationGroupName,
375 -> OriginalFlowsPresent,
376 -> OriginalFlowsInitiated,
377 -> OriginalFlowsCompleted,
378 -> DistinctCountOfSourceIPAddress,
379 -> DistinctCountOfDestinationIPAddress,
380 -> DistinctCountOfSourceIPv4Address,
381 -> DistinctCountOfDestinationIPv4Address,
382 -> DistinctCountOfSourceIPv6Address,
383 -> DistinctCountOfDestinationIPv6Address,
384 -> ValueDistributionMethod,
385 -> Rfc3550JitterMilliseconds,
386 -> Rfc3550JitterMicroseconds,
387 -> Rfc3550JitterNanoseconds,
388 -> Dot1qDEI,
389 -> Dot1qCustomerDEI,
390 -> FlowSelectorAlgorithm,
391 -> FlowSelectedOctetDeltaCount,
392 -> FlowSelectedPacketDeltaCount,
393 -> FlowSelectedFlowDeltaCount,
394 -> SelectorIDTotalFlowsObserved,
395 -> SelectorIDTotalFlowsSelected,
396 -> SamplingFlowInterval,
397 -> SamplingFlowSpacing,
398 -> FlowSamplingTimeInterval,
399 -> FlowSamplingTimeSpacing,
400 -> HashFlowDomain,
401 -> TransportOctetDeltaCount,
402 -> TransportPacketDeltaCount,
403 -> OriginalExporterIPv4Address,
404 -> OriginalExporterIPv6Address,
405 -> OriginalObservationDomainId,
406 -> IntermediateProcessId,
407 -> IgnoredDataRecordTotalCount,
408 -> DataLinkFrameType,
409 -> SectionOffset,
410 -> SectionExportedOctets,
411 -> Dot1qServiceInstanceTag,
412 -> Dot1qServiceInstanceId,
413 -> Dot1qServiceInstancePriority,
414 -> Dot1qCustomerSourceMacAddress,
415 -> Dot1qCustomerDestinationMacAddress,
417 -> PostLayer2OctetDeltaCount,
418 -> PostMCastLayer2OctetDeltaCount,
420 -> PostLayer2OctetTotalCount,
421 -> PostMCastLayer2OctetTotalCount,
422 -> MinimumLayer2TotalLength,
423 -> MaximumLayer2TotalLength,
424 -> DroppedLayer2OctetDeltaCount,
425 -> DroppedLayer2OctetTotalCount,
426 -> IgnoredLayer2OctetTotalCount,
427 -> NotSentLayer2OctetTotalCount,
428 -> Layer2OctetDeltaSumOfSquares,
429 -> Layer2OctetTotalSumOfSquares,
430 -> Layer2FrameDeltaCount,
431 -> Layer2FrameTotalCount,
432 -> PseudoWireDestinationIPv4Address,
433 -> IgnoredLayer2FrameTotalCount
)
} | ConnorDillon/ipfix | src/main/scala/ipfix/ie/DefaultIEMap.scala | Scala | gpl-3.0 | 12,920 |
package com.datastax.spark.connector.types
/**
* Type adapters that serve as a middle step in conversion from one type to another.
*
* Adapters are utilized by types with scheme ([[UserDefinedType]]], [[TupleType]]) to convert an instance of
* an type to corresponding adapter and than to final value of the given type.
*/
private[spark] object TypeAdapters {
/**
* Adapter for multi-values types that my be returned as a sequence.
*
* It is used to extend conversion capabilities offered by Tuple type.
*/
trait ValuesSeqAdapter {
def toSeq(): Seq[Any]
}
/**
* Adapter for multi-value types that may return values by name.
*
* It is used to extend conversion capabilities offered by UDT type.
*/
trait ValueByNameAdapter {
def getByName(name: String): Any
}
}
| datastax/spark-cassandra-connector | driver/src/main/scala/com/datastax/spark/connector/types/TypeAdapters.scala | Scala | apache-2.0 | 825 |
package es.weso.wiFetcher.utils
import scala.collection.mutable.Queue
import es.weso.wiFetcher.entities.issues._
import org.slf4j.LoggerFactory
import org.slf4j.Logger
/**
* This class represents an issue that contains an error or warning message and
* it's location
*/
case class FilterIssue(
val message: Option[String] = None,
val path: Option[String] = None,
val sheetName: Option[String] = None,
val col: Option[Int] = None,
val row: Option[Int] = None,
val cell: Option[String] = None) {
/**
* This method indicates if an issue should be filtered or no using an issue
* as a template
*/
def filter(issue: Issue): Boolean = {
if (message.isDefined && ((message == issue.message) == false))
return false
if (path.isDefined && ((path == issue.path) == false))
return false
if (sheetName.isDefined && ((sheetName == issue.sheetName) == false))
return false
if (col.isDefined && ((col == issue.col) == false))
return false
if (row.isDefined && ((row == issue.row) == false))
return false
if (cell.isDefined && ((cell == issue.cell) == false))
return false
true
}
}
/**
* This class represents an issue manager. It contains all the generated errors
* and warnings that are generated during the execution of the application
*/
class IssueManagerUtils() {
private val issues: Queue[Issue] = Queue.empty
private val filters: Queue[FilterIssue] = Queue.empty
private val logger: Logger = LoggerFactory.getLogger(this.getClass())
def clear: Unit = issues.clear
def clearFilters: Unit = filters.clear
/**
* This method adds a new issue to the list
*/
def add(issue: Issue): Unit = {
issue match {
case e: Error => logger.error(e.message)
case w: Warn => logger.error(w.message)
}
issues += issue
}
/**
* This method adds a filter to the list
*/
def addFilter(filter: FilterIssue): Unit = {
filters += filter
}
/**
* This method adds an error message to the list
*/
def addError(message: String, path: Option[String] = None,
sheetName: Option[String] = None, col: Option[Int] = None,
row: Option[Int] = None, cell: Option[String] = None): Unit = {
logger.error(message)
issues += Error(message, path, sheetName, col, row, cell)
}
/**
* This method adds a warning message to the list
*/
def addWarn(message: String, path: Option[String] = None,
sheetName: Option[String] = None, col: Option[Int] = None,
row: Option[Int] = None, cell: Option[String] = None): Unit = {
logger.info(message)
issues += Warn(message, path, sheetName, col, row, cell)
}
/**
* This method filters issues list
*/
def filteredAsSeq: List[Issue] = {
val filteredIssues = for {
issue <- issues.toList
filter <- filters
if (filter.filter(issue))
} yield {
issue
}
val finalIssues = for {
issue <- issues
if(!filteredIssues.contains(issue))
} yield {
issue
}
finalIssues.toList
}
def asSeq: List[Issue] = issues.toList
} | weso/wiFetcher | app/es/weso/wiFetcher/utils/IssueManagerUtils.scala | Scala | apache-2.0 | 3,105 |
package com.metl.model
import com.metl.TimeSpanParser
import com.metl.external.{Detail, ExternalGradebook, LiftAuthStateData, LiftAuthStateDataForbidden, LtiIntegration, MeTLingPotAdaptor, Member, OrgUnit, GroupsProvider => ExternalGroupsProvider}
import com.metl.utils._
import net.liftweb.http.{LiftRules, LiftSession, SessionVar, SessionVarHelper}
import net.liftweb.common._
import net.liftweb.util.Helpers._
import net.liftweb.util.Props
import scala.xml._
import scala.util._
import com.metl.renderer.RenderDescription
case class PropertyNotFoundException(key: String) extends Exception(key) {
override def getMessage: String = "Property not found: " + key
}
trait PropertyReader extends Logger {
def readProperty(key: String, default: Option[String] = None): String = traceIt("readProperty",key,{
Props.get(key).getOrElse(default.getOrElse(throw PropertyNotFoundException(key)))
})
protected def traceIt[A](label:String,param:String,in: => A):A = {
val res = in
trace("%s(%s) : %s".format(label,param,in))
res
}
def readNodes(node: NodeSeq, tag: String): Seq[NodeSeq] = traceIt("readNodes",tag,node \\\\ tag)
def readNode(node: NodeSeq, tag: String): NodeSeq = traceIt("readNode",tag,readNodes(node, tag).headOption.getOrElse(NodeSeq.Empty))
def readText(node: NodeSeq, tag: String): Option[String] = traceIt("readText",tag,readNodes(node, tag).headOption.map(_.text))
def readInt(node:NodeSeq,tag:String):Option[Int] = traceIt("readInt",tag,readNodes(node,tag).headOption.map(_.text.toInt))
def readLong(node:NodeSeq,tag:String):Option[Long] = traceIt("readLong",tag,readNodes(node,tag).headOption.map(_.text.toLong))
def readBool(node:NodeSeq,tag:String):Option[Boolean] = traceIt("readBool",tag,readNodes(node,tag).headOption.map(_.text.toBoolean))
def readTimespan(node:NodeSeq,tag:String):Option[TimeSpan] = traceIt("readTimespan",tag,readNodes(node,tag).headOption.map(v => TimeSpanParser.parse(v.text)))
def readMandatoryText(node: NodeSeq, tag: String): String = traceIt("readMandatoryText",tag,readNodes(node, tag).headOption.map(_.text match {
case s: String if s.trim.isEmpty => throw new Exception("mandatory field (%s) not supplied in expected node %s".format(tag, node))
case other => other.trim
}).getOrElse({
throw new Exception("mandatory field (%s) not supplied in expected node %s".format(tag, node))
}))
def readAttribute(node:NodeSeq,attrName:String):String = traceIt("readAttribute",attrName,node match {
case e:Elem => e.attribute(attrName).map(a => a.text).getOrElse("")
case _ => ""
})
def readMandatoryAttribute(node:NodeSeq,attrName:String):String = traceIt("readMandatoryAttribute",attrName,readAttribute(node,attrName) match {
case s: String if s.trim.isEmpty => throw new Exception("mandatory attr (%s) not supplied in expected node %s".format(attrName, node))
case other => other.trim
})
}
object Globals extends PropertyReader with Logger {
val liveIntegration = System.getProperty("stackable.spending") match {
case "enabled" => true
case _ => false
}
val chunkingTimeout = Try(System.getProperty("metlingpot.chunking.timeout").toInt).toOption match {
case Some(milis) => milis
case _ => 3000
}
val chunkingThreshold = Try(System.getProperty("metlingpot.chunking.strokeThreshold").toInt).toOption match {
case Some(strokes) => strokes
case _ => 5
}
warn("Integrations are live: %s".format(liveIntegration))
warn("Chunking: %s %s".format(chunkingTimeout,chunkingThreshold))
val configurationFileLocation = System.getProperty("metlx.configurationFile")
List(configurationFileLocation).filter(prop => prop match {
case null => true
case "" => true
case _ => false
}) match {
case Nil => {}
case any => {
val e = new Exception("properties not provided, server cannot start")
error("please ensure that the following properties are set on the command-line when starting the WAR: %s".format(any),e)
throw e
}
}
val propFile = XML.load(configurationFileLocation)
val scheme = readText((propFile \\\\ "serverAddress"),"scheme").filterNot(_ == "")
val host = readText((propFile \\\\ "serverAddress"),"hostname").filterNot(_ == "")
val port = readText((propFile \\\\ "serverAddress"),"port").filterNot(_ == "").map(_.toInt)
val importerParallelism = (propFile \\\\ "importerPerformance").headOption.map(ipn => readAttribute(ipn,"parallelism").toInt).filter(_ > 0).getOrElse(1)
var isDevMode:Boolean = true
var tokBox = if(liveIntegration) for {
tbNode <- (propFile \\\\ "tokBox").headOption
apiKey <- (tbNode \\\\ "@apiKey").headOption.map(_.text.toInt)
secret <- (tbNode \\\\ "@secret").headOption.map(_.text)
} yield {
new TokBox(apiKey,secret)
} else None
val liftConfig = (propFile \\\\ "liftConfiguration")
readBool(liftConfig,"allowParallelSnippets").foreach(allowParallelSnippets => {
LiftRules.allowParallelSnippets.session.set(allowParallelSnippets)
})
readInt(liftConfig,"maxConcurrentRequestsPerSession").foreach(maxRequests => {
LiftRules.maxConcurrentRequests.session.set((r:net.liftweb.http.Req)=>maxRequests)
})
readInt(liftConfig,"cometRequestTimeout").foreach(cometTimeout => {
LiftRules.cometRequestTimeout = Full(cometTimeout) //defaults to Empty, which results in 120000
})
readLong(liftConfig,"cometRenderTimeout").foreach(cometTimeout => {
LiftRules.cometRenderTimeout = cometTimeout //defaults to 30000
})
readLong(liftConfig,"cometFailureRetryTimeout").foreach(cometTimeout => {
LiftRules.cometFailureRetryTimeout = cometTimeout //defaults to 10000
})
readLong(liftConfig,"cometProcessingTimeout").foreach(cometTimeout => {
LiftRules.cometProcessingTimeout = cometTimeout //defaults to 5000
})
readInt(liftConfig,"cometGetTimeout").foreach(cometTimeout => {
LiftRules.cometGetTimeout = cometTimeout // this defaults to 140000
})
readLong(liftConfig,"maxMimeFileSize").foreach(maxUploadSize => {
LiftRules.maxMimeFileSize = maxUploadSize
})
readLong(liftConfig,"maxMimeSize").foreach(maxMimeSize => {
LiftRules.maxMimeSize = maxMimeSize
})
readBool(liftConfig,"bufferUploadsOnDisk").filter(_ == true).foreach(y => {
LiftRules.handleMimeFile = net.liftweb.http.OnDiskFileParamHolder.apply
})
readInt(liftConfig,"ajaxPostTimeout").foreach(ajaxTimeout => {
LiftRules.ajaxPostTimeout = ajaxTimeout // this defaults to 5000
})
readInt(liftConfig,"ajaxRetryCount").foreach(retryCount => {
LiftRules.ajaxRetryCount = Full(retryCount) // this defaults to empty, which means keep retrying forever
})
readBool(liftConfig,"enableLiftGC").foreach(gc => {
LiftRules.enableLiftGC = gc
})
readLong(liftConfig,"liftGCFailureRetryTimeout").foreach(value => {
LiftRules.liftGCFailureRetryTimeout = value
})
readLong(liftConfig,"liftGCPollingInterval").foreach(value => {
LiftRules.liftGCPollingInterval = value
})
readLong(liftConfig,"unusedFunctionsLifeTime").foreach(value => {
LiftRules.unusedFunctionsLifeTime = value
})
readInt(liftConfig,"stdRequestTimeout").foreach(value => {
LiftRules.stdRequestTimeout = Full(value)
})
val cometConfig = (propFile \\\\ "cometConfiguration")
val metlActorLifespan = Full(readTimespan(cometConfig,"metlActorLifespan").getOrElse(2 minutes))
val searchActorLifespan = Full(readTimespan(cometConfig,"conversationSearchActorLifespan").getOrElse(2 minutes))
val conversationChooserActorLifespan = Full(readTimespan(cometConfig,"remotePluginConversationChooserActorLifespan").getOrElse(2 minutes))
val remotePluginConversationChooserActorLifespan = Full(readTimespan(cometConfig,"remotePluginConversationChooserActorLifespan").getOrElse(2 minutes))
val editConversationActorLifespan = Full(readTimespan(cometConfig,"conversationEditActorLifespan").getOrElse(2 minutes))
var metlingPots:List[MeTLingPotAdaptor] = ExternalMeTLingPotAdaptors.configureFromXml(readNode(propFile,"metlingPotAdaptors")).right.toOption.getOrElse(Nil)
var ltiIntegrationPlugins:List[LtiIntegration] = ExternalLtiIntegrations.configureFromXml(readNode(propFile,"lti")).right.toOption.getOrElse(Nil)
val cloudConverterApiKey = readText(propFile,"cloudConverterApiKey").getOrElse("")
val themeName = readText(propFile,"themeName").getOrElse("neutral")
val googleAnalytics = ("stackable",readText(propFile,"googleAnalytics"))
val clientGoogleAnalytics = ("client",readText(propFile,"clientGoogleAnalytics"))
val h2ThreadPoolMultiplier = readInt(propFile,"h2ThreadPoolMultiplier").getOrElse(8)
def stackOverflowName(location:String):String = "%s_StackOverflow_%s".format(location,currentUser.is)
def stackOverflowName(who:String,location:String):String = "%s_StackOverflow_%s".format(location,who)
def noticesName(user:String):String = "%s_Notices".format(user)
case class PropertyNotFoundException(key: String) extends Exception(key) {
override def getMessage: String = "Property not found: " + key
}
object currentStack extends SessionVar[Topic](Topic.defaultValue)
def getUserGroups:List[OrgUnit] = {
casState.is.eligibleGroups.toList
}
var userProfileProvider:Option[UserProfileProvider] = Some(new CachedInMemoryProfileProvider())
var groupsProviders:List[ExternalGroupsProvider] = Nil
var gradebookProviders:List[ExternalGradebook] = Nil
def getGradebookProvider(providerId:String):Option[ExternalGradebook] = gradebookProviders.find(_.id == providerId)
def getGradebookProviders:List[ExternalGradebook] = gradebookProviders
def getGroupsProvider(providerStoreId:String):Option[ExternalGroupsProvider] = getGroupsProviders.find(_.storeId == providerStoreId)
def getGroupsProviders:List[ExternalGroupsProvider] = groupsProviders
var mailer:Option[SimpleMailer] = for {
mailerNode <- (propFile \\\\ "mailer").headOption
smtp <- readText(mailerNode, "smtp")
port <- readInt(mailerNode, "port")
ssl <- readBool(mailerNode, "ssl")
username <- readText(mailerNode, "username")
password <- readText(mailerNode, "password")
fromAddress <- readText(mailerNode, "fromAddress")
recipients <- Some(readNodes(readNode(mailerNode, "recipients"),"recipient").map(_.text).toList)
} yield {
SimpleMailer(smtp, port, ssl, username, password, Full(fromAddress), recipients)
}
object casState {
import net.liftweb.http.S
private object validState extends SessionVar[Option[LiftAuthStateData]](None){
def getForSession(s:LiftSession):Option[LiftAuthStateData] = {
SessionVarHelper.getFromSession(s,name).openOr(None)
}
}
def getAuthStateForSession(s:LiftSession):Option[LiftAuthStateData] = validState.getForSession(s)
def is:LiftAuthStateData = {
validState.is.getOrElse({
assumeContainerSession
})
}
private object actualUsername extends SessionVar[String]("forbidden")
private object actuallyIsImpersonator extends SessionVar[Boolean](false)
def isSuperUser:Boolean = {
is.eligibleGroups.exists(g => g.ouType == "special" && g.name == "superuser")
}
def isAnalyst:Boolean = {
is.eligibleGroups.exists(g => g.ouType == "special" && g.name == "analyst")
}
def isImpersonator:Boolean = actuallyIsImpersonator.is
def authenticatedUsername:String = actualUsername.is
def impersonate(newUsername:String,personalAttributes:List[Tuple2[String,String]] = Nil):LiftAuthStateData = {
if (isImpersonator){
val prelimAuthStateData = LiftAuthStateData(true,newUsername,Nil,(personalAttributes.map(pa => Detail(pa._1,pa._2)) ::: userProfileProvider.toList.flatMap(_.getProfiles(newUsername).right.toOption.toList.flatten.flatMap(_.foreignRelationships.toList)).map(pa => Detail(pa._1,pa._2))))
val groups = Globals.groupsProviders.filter(_.canRestrictConversations).flatMap(_.getGroupsFor(prelimAuthStateData))
val personalDetails = Globals.groupsProviders.flatMap(_.getPersonalDetailsFor(prelimAuthStateData))
val impersonatedState = LiftAuthStateData(true,newUsername,groups,personalDetails)
validState(Some(impersonatedState))
SecurityListener.ensureSessionRecord
impersonatedState
} else {
LiftAuthStateDataForbidden
}
}
def assumeContainerSession:LiftAuthStateData = {
S.containerSession.map(s => {
val username = s.attribute("user").asInstanceOf[String]
val authenticated = s.attribute("authenticated").asInstanceOf[Boolean]
val userGroups = s.attribute("userGroups").asInstanceOf[List[Tuple2[String,String]]].map(t => OrgUnit(t._1,t._2,List(Member(username,Nil,None)),Nil))
val userAttributes = s.attribute("userAttributes").asInstanceOf[List[Tuple2[String,String]]]
val prelimAuthStateData = LiftAuthStateData(authenticated,username,userGroups,userAttributes.map(ua => Detail(ua._1,ua._2)))
if (authenticated){
actualUsername(username)
val groups = Globals.groupsProviders.filter(_.canRestrictConversations).flatMap(_.getGroupsFor(prelimAuthStateData))
actuallyIsImpersonator(groups.exists(g => g.ouType == "special" && g.name == "impersonator"))
val personalDetails = Globals.groupsProviders.flatMap(_.getPersonalDetailsFor(prelimAuthStateData))
val lasd = LiftAuthStateData(true,username,groups,personalDetails)
validState(Some(lasd))
userProfileProvider.foreach(upp => {
upp.updateUserProfile(lasd)
})
info("generated authState: %s".format(lasd))
lasd
} else {
LiftAuthStateDataForbidden
}
}).getOrElse({
LiftAuthStateDataForbidden
})
}
}
object currentUser {
def is:String = casState.is.username
}
// special roles
def isSuperUser:Boolean = casState.isSuperUser
def isImpersonator:Boolean = casState.isImpersonator
def isAnalyst:Boolean = casState.isAnalyst
def assumeContainerSession:LiftAuthStateData = casState.assumeContainerSession
def impersonate(newUsername:String,personalAttributes:List[Tuple2[String,String]] = Nil):LiftAuthStateData = casState.impersonate(newUsername,personalAttributes)
}
object IsInteractiveUser extends SessionVar[Box[Boolean]](Full(true))
object CurrentStreamEncryptor extends SessionVar[Box[Crypto]](Empty)
object CurrentHandshakeEncryptor extends SessionVar[Box[Crypto]](Empty)
object ThumbnailSizes {
val printDpi = 100
val ThumbnailSize = new RenderDescription(320,240)
val SmallSize = new RenderDescription(640,480)
val MediumSize = new RenderDescription(1024,768)
val LargeSize = new RenderDescription(1920,1080)
val PrintSize = new RenderDescription(21 * printDpi, 29 * printDpi)
val snapshotSizes = List(ThumbnailSize/*,SmallSize,MediumSize,LargeSize*//*,PrintSize*/)
def parse(size:String):RenderDescription = {
size.trim.toLowerCase match {
case "thumbnail" => ThumbnailSize
case "small" => SmallSize
case "medium" => MediumSize
case "large" => LargeSize
case "print" => PrintSize
case _ => ThumbnailSize
}
}
}
//object UserAgent extends SessionVar[Box[String]](S.userAgent)
| StackableRegiments/analyticalmetlx | src/main/scala/com/metl/model/Globals.scala | Scala | apache-2.0 | 15,250 |
object Prob2 extends App {
val num = 4000000
val fibonacci: Stream[BigInt] = BigInt(0) #:: BigInt(1) #::
fibonacci.zip(fibonacci.tail).map { case (x, y) => x + y }
println(fibonacci.takeWhile(_ < num).filter(_ % 2 == 0).sum)
}
| ponkotuy/ProjectEular | src/main/scala/Prob2.scala | Scala | mit | 238 |
package com.sfxcode.nosql.mongo.bson
import java.math.BigInteger
import java.time.{LocalDate, LocalDateTime, ZoneId}
import java.util.Date
import org.mongodb.scala.Document
import org.mongodb.scala.bson.BsonArray.fromIterable
import org.mongodb.scala.bson.{ObjectId, _}
import scala.jdk.CollectionConverters._
import scala.collection.mutable
import scala.util.matching.Regex
object BsonConverter {
val DocumentKeyDivider = "."
def hasRelation(key: String): Boolean = key.indexOf(DocumentKeyDivider) != -1
def relationKey(key: String): String = key.substring(0, key.indexOf(DocumentKeyDivider))
def newKeyFromRelation(key: String): String = key.substring(key.indexOf(DocumentKeyDivider) + 1)
def lastKeyFromRelation(key: String): String = key.substring(key.lastIndexOf(DocumentKeyDivider) + 1)
def documentValueOption(document: Document, key: String): Option[Any] =
if (hasRelation(key)) {
val newKey = newKeyFromRelation(key)
val relation = relationKey(key)
if (document.contains(relation) && documentValueOption(document, relation).isDefined) {
val value = documentValueOption(document, relation).get
value match {
case document: Document =>
documentValueOption(document, newKey)
case _ =>
None
}
}
else
None
}
else if (document.contains(key))
Some(fromBson(document(key)))
else
None
def updateDocumentValue(document: Document, key: String, value: Any): Document = {
val doc = org.mongodb.scala.bson.collection.mutable.Document(document.toJson())
val result = updateDocumentValueInternal(doc, key, value)
Document(result.toJson())
}
private def updateDocumentValueInternal(
document: org.mongodb.scala.bson.collection.mutable.Document,
key: String,
value: Any,
root: Option[org.mongodb.scala.bson.collection.mutable.Document] = None
): org.mongodb.scala.bson.collection.mutable.Document =
if (hasRelation(key)) {
val newKey = newKeyFromRelation(key)
val relation = relationKey(key)
var relatedDocument = Document()
val relationValue = documentValueOption(Document(document.toJson()), relation)
if (relationValue.isDefined) {
val value = relationValue.get
value match {
case document: Document =>
relatedDocument = document
case _ =>
}
}
val mutableDoc = org.mongodb.scala.bson.collection.mutable.Document.apply(relatedDocument.toJson())
document.put(relation, mutableDoc)
if (root.isEmpty)
updateDocumentValueInternal(mutableDoc, newKey, value, Some(document))
else
updateDocumentValueInternal(mutableDoc, newKey, value, root)
}
else {
document.put(key, toBson(value))
if (root.isEmpty)
document
else
root.get
}
var converterPlugin: AbstractConverterPlugin = new BaseConverterPlugin()
def toBson(value: Any): BsonValue =
value match {
case bsonValue: BsonValue => bsonValue
case option: Option[Any] =>
if (option.isDefined)
toBson(option.get)
else
BsonNull()
case v: Any if converterPlugin.hasCustomClass(v) =>
converterPlugin.toBson(v)
case b: Boolean => BsonBoolean(b)
case s: String => BsonString(s)
case c: Char => BsonString(c.toString)
case bytes: Array[Byte] => BsonBinary(bytes)
case r: Regex => BsonRegularExpression(r)
case d: Date => BsonDateTime(d)
case ld: LocalDate =>
BsonDateTime(Date.from(ld.atStartOfDay(ZoneId.systemDefault()).toInstant))
case ldt: LocalDateTime =>
BsonDateTime(Date.from(ldt.atZone(ZoneId.systemDefault()).toInstant))
case oid: ObjectId => BsonObjectId(oid)
case i: Int => BsonInt32(i)
case l: Long => BsonInt64(l)
case bi: BigInt => BsonInt64(bi.toLong)
case bi: BigInteger => BsonInt64(bi.longValue())
case d: Double => BsonDouble(d)
case f: Float => BsonDouble(f)
case bd: BigDecimal => BsonDecimal128.apply(bd)
case bd: java.math.BigDecimal => BsonDecimal128.apply(bd)
case doc: Document => BsonDocument(doc)
case map: scala.collection.Map[_, _] =>
var doc = Document()
map.keys.foreach { key =>
val v = map(key)
doc.+=(key.toString -> toBson(v))
}
BsonDocument(doc)
case map: java.util.Map[_, _] =>
var doc = Document()
map
.keySet()
.asScala
.foreach { key =>
val v = map.get(key)
doc.+=(key.toString -> toBson(v))
}
BsonDocument(doc)
case it: Iterable[Any] =>
fromIterable(it.map(v => toBson(v)))
case list: java.util.List[_] =>
fromIterable(list.asScala.map(v => toBson(v)))
case v: AnyRef => converterPlugin.objectToBson(v)
case _ =>
BsonNull()
}
def fromBson(value: BsonValue): Any =
value match {
case b: BsonBoolean => b.getValue
case s: BsonString => s.getValue
case bytes: BsonBinary => bytes.getData
case r: BsonRegularExpression => r.getPattern
case d: BsonDateTime => new Date(d.getValue)
case d: BsonTimestamp => new Date(d.getTime)
case oid: BsonObjectId => oid.getValue
case i: BsonInt32 => i.getValue
case l: BsonInt64 => l.getValue
case d: BsonDouble => d.doubleValue()
case d: BsonDecimal128 => d.getValue.bigDecimalValue()
case doc: BsonDocument => Document(doc)
case array: BsonArray =>
array.getValues.asScala.toList.map(v => fromBson(v))
case n: BsonNull => null
case _ => value
}
def asMap(document: Document): Map[String, Any] = {
val result = new mutable.HashMap[String, Any]()
document.keySet.foreach { key =>
val value = fromBson(document(key))
value match {
case d: Document =>
result.+=(key -> asMap(d))
case _ => result.+=(key -> value)
}
}
result.toMap
}
def asMapList(documents: List[Document]): List[Map[String, Any]] = {
val result = new mutable.ArrayBuffer[Map[String, Any]]()
documents.foreach(document => result.+=(asMap(document)))
result.toList
}
}
| sfxcode/simple-mongo | src/main/scala/com/sfxcode/nosql/mongo/bson/BsonConverter.scala | Scala | apache-2.0 | 6,602 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.kafka
import java.util.Properties
import scala.collection.Map
import scala.reflect.{classTag, ClassTag}
import kafka.consumer.{Consumer, ConsumerConfig, ConsumerConnector, KafkaStream}
import kafka.serializer.Decoder
import kafka.utils.VerifiableProperties
import org.apache.spark.internal.Logging
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream._
import org.apache.spark.streaming.receiver.Receiver
import org.apache.spark.util.ThreadUtils
/**
* Input stream that pulls messages from a Kafka Broker.
*
* @param kafkaParams Map of kafka configuration parameters.
* See: http://kafka.apache.org/configuration.html
* @param topics Map of (topic_name to numPartitions) to consume. Each partition is consumed
* in its own thread.
* @param storageLevel RDD storage level.
*/
private[streaming]
class KafkaInputDStream[
K: ClassTag,
V: ClassTag,
U <: Decoder[_]: ClassTag,
T <: Decoder[_]: ClassTag](
_ssc: StreamingContext,
kafkaParams: Map[String, String],
topics: Map[String, Int],
useReliableReceiver: Boolean,
storageLevel: StorageLevel
) extends ReceiverInputDStream[(K, V)](_ssc) with Logging {
def getReceiver(): Receiver[(K, V)] = {
if (!useReliableReceiver) {
new KafkaReceiver[K, V, U, T](kafkaParams, topics, storageLevel)
} else {
new ReliableKafkaReceiver[K, V, U, T](kafkaParams, topics, storageLevel)
}
}
}
private[streaming]
class KafkaReceiver[
K: ClassTag,
V: ClassTag,
U <: Decoder[_]: ClassTag,
T <: Decoder[_]: ClassTag](
kafkaParams: Map[String, String],
topics: Map[String, Int],
storageLevel: StorageLevel
) extends Receiver[(K, V)](storageLevel) with Logging {
// Connection to Kafka
var consumerConnector: ConsumerConnector = null
def onStop() {
if (consumerConnector != null) {
consumerConnector.shutdown()
consumerConnector = null
}
}
def onStart() {
logInfo("Starting Kafka Consumer Stream with group: " + kafkaParams("group.id"))
// Kafka connection properties
val props = new Properties()
kafkaParams.foreach(param => props.put(param._1, param._2))
val zkConnect = kafkaParams("zookeeper.connect")
// Create the connection to the cluster
logInfo("Connecting to Zookeeper: " + zkConnect)
val consumerConfig = new ConsumerConfig(props)
consumerConnector = Consumer.create(consumerConfig)
logInfo("Connected to " + zkConnect)
val keyDecoder = classTag[U].runtimeClass.getConstructor(classOf[VerifiableProperties])
.newInstance(consumerConfig.props)
.asInstanceOf[Decoder[K]]
val valueDecoder = classTag[T].runtimeClass.getConstructor(classOf[VerifiableProperties])
.newInstance(consumerConfig.props)
.asInstanceOf[Decoder[V]]
// Create threads for each topic/message Stream we are listening
val topicMessageStreams = consumerConnector.createMessageStreams(
topics, keyDecoder, valueDecoder)
val executorPool =
ThreadUtils.newDaemonFixedThreadPool(topics.values.sum, "KafkaMessageHandler")
try {
// Start the messages handler for each partition
topicMessageStreams.values.foreach { streams =>
streams.foreach { stream => executorPool.submit(new MessageHandler(stream)) }
}
} finally {
executorPool.shutdown() // Just causes threads to terminate after work is done
}
}
// Handles Kafka messages
private class MessageHandler(stream: KafkaStream[K, V])
extends Runnable {
def run() {
logInfo("Starting MessageHandler.")
try {
val streamIterator = stream.iterator()
while (streamIterator.hasNext()) {
val msgAndMetadata = streamIterator.next()
store((msgAndMetadata.key, msgAndMetadata.message))
}
} catch {
case e: Throwable => reportError("Error handling message; exiting", e)
}
}
}
}
| akopich/spark | external/kafka-0-8/src/main/scala/org/apache/spark/streaming/kafka/KafkaInputDStream.scala | Scala | apache-2.0 | 4,822 |
package core.http
import akka.http.scaladsl.server.Directives._
import akka.util.Timeout
import scala.concurrent.duration._
trait HttpRoute extends CorsSupport {
val timeoutPeriod = 3.seconds
implicit val timeout = Timeout(timeoutPeriod)
val route = pathPrefix("v2") {
corsHandler {
pathEndOrSingleSlash {
get {
complete("hello world!")
}
}
}
}
} | sumanyu/n-body-akka | src/main/scala/core/http/HttpRoute.scala | Scala | apache-2.0 | 405 |
package net.fehmicansaglam.bson.element
import net.fehmicansaglam.bson.Implicits.BsonValueBinary
case class BsonBinary(name: String, value: BsonValueBinary) extends BsonElement {
val code: Byte = 0x05
}
| cancobanoglu/tepkin | bson/src/main/scala/net/fehmicansaglam/bson/element/BsonBinary.scala | Scala | apache-2.0 | 207 |
/* *\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2013-2015, Gary Keorkunian **
** **
\* */
package squants.electro
import squants._
/**
* @author garyKeorkunian
* @since 0.1
*
* @param value value in [[squants.electro.Siemens]]
*/
final class ElectricalConductance private (val value: Double, val unit: ElectricalConductanceUnit)
extends Quantity[ElectricalConductance] {
def dimension = ElectricalConductance
def /(that: Length): Conductivity = SiemensPerMeter(this.toSiemens / that.toMeters)
def /(that: Conductivity): Length = Meters(this.toSiemens / that.toSiemensPerMeter)
def toSiemens = to(Siemens)
def inOhms = Ohms(1.0 / value)
}
object ElectricalConductance extends Dimension[ElectricalConductance] {
private[electro] def apply[A](n: A, unit: ElectricalConductanceUnit)(implicit num: Numeric[A]) = new ElectricalConductance(num.toDouble(n), unit)
def apply(value: Any) = parse(value)
def name = "ElectricalConductance"
def primaryUnit = Siemens
def siUnit = Siemens
def units = Set(Siemens)
}
trait ElectricalConductanceUnit extends UnitOfMeasure[ElectricalConductance] with UnitConverter {
def apply[A](n: A)(implicit num: Numeric[A]) = ElectricalConductance(n, this)
}
object Siemens extends ElectricalConductanceUnit with PrimaryUnit with SiUnit {
val symbol = "S"
}
object ElectricalConductanceConversions {
lazy val siemen = Siemens(1)
implicit class ElectricalConductanceConversions[A](n: A)(implicit num: Numeric[A]) {
def siemens = Siemens(n)
}
implicit object ElectricalConductanceNumeric extends AbstractQuantityNumeric[ElectricalConductance](ElectricalConductance.primaryUnit)
}
| garyKeorkunian/squants | shared/src/main/scala/squants/electro/ElectricalConductance.scala | Scala | apache-2.0 | 2,097 |
import a.C1
println( /* path: a.C1 */ C1.getClass)
println(classOf[ /* path: a.C1 */ C1])
| katejim/intellij-scala | testdata/resolve2/import/path/Dir.scala | Scala | apache-2.0 | 91 |
/**
* COPYRIGHT (C) 2015 Alpine Data Labs Inc. All Rights Reserved.
*/
package com.alpine.plugin.core.io.defaults
import com.alpine.plugin.core.io.{HdfsRawTextDataset, OperatorInfo}
/**
* Default implementation.
* Developers wanting to change behaviour can extend HdfsRawTextDataset.
*/
abstract class AbstractHdfsRawTextDataset(val path: String,
val sourceOperatorInfo: Option[OperatorInfo],
val addendum: Map[String, AnyRef]
)
extends HdfsRawTextDataset {
override def displayName: String = path
}
case class HdfsRawTextDatasetDefault(override val path: String,
override val sourceOperatorInfo: Option[OperatorInfo],
override val addendum: Map[String, AnyRef] = Map[String, AnyRef]()
)
extends AbstractHdfsRawTextDataset(path, sourceOperatorInfo, addendum) | holdenk/PluginSDK | plugin-io-impl/src/main/scala/com/alpine/plugin/core/io/defaults/HdfsRawTextDatasetDefault.scala | Scala | apache-2.0 | 1,012 |
/*
* Copyright 2010 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb {
package common {
import _root_.org.specs._
import _root_.org.specs.log.{Log => _}
import _root_.net.liftweb.common.Box._
import _root_.org.specs.runner._
import _root_.org.specs.Sugar._
import _root_.org.specs.ScalaCheck
import _root_.org.scalacheck.Gen._
import _root_.org.scalacheck._
import _root_.org.scalacheck.Arbitrary._
import _root_.org.scalacheck.Prop.{forAll}
class LoggingTest extends Runner(LoggingUnit) with JUnit
class MyTopClass extends Logger {
val x=1
debug("Top level class logging")
}
object MyTopObj extends Logger {
val x=1
debug("Top level object logging")
}
/**
* Test relies on logback being on the classpath, so no configuration necessary
*/
object LoggingUnit extends Specification {
"Logging" can {
"be mixed directly into object" in {
object MyObj extends Logger {
info("direct Hello")
val x = 2
}
MyObj.x must_== 2
(new MyTopClass).x must_== 1
MyTopObj.x must_==1
}
"be nested in object" in {
object MyObj extends Loggable {
logger.info("nested Hello")
val x = 2
}
MyObj.x must_== 2
}
"create named loggers" in {
val logger = Logger("MyLogger")
logger.info("Logged with my named logger")
1 must_== 1
}
"log static MDC values" in {
val logger = Logger("StaticMDC")
logger.info("Logged with no MDC")
MDC.put("mdc1" -> (1,2))
logger.info("Logged with mdc1=(1,2)")
MDC.put("mdc2" -> "yy")
logger.info("Logged with mdc1=(1,2), mdc2=yy")
MDC.put("mdc1" -> 99)
logger.info("Logged with mdc1=99, mdc2=yy")
MDC.remove("mdc1")
logger.info("Logged with mdc2=yy")
MDC.clear()
logger.info("Logged with no MDC")
1 must_== 1
}
"save MDC context with logWith" in {
val logger = Logger("logWith")
logger.info("Logged with no MDC")
MDC.put("mdc1" -> (1,2), "mdc2" -> "yy")
logger.info("Logged with mdc1=(1,2), mdc2=yy")
Logger.logWith("mdc2" -> "xx") {
logger.info("Logged with mdc1=(1,2), mdc2=xx")
Logger.logWith("mdc1" -> 99) {
logger.info("Logged with mdc1=99, mdc2=xx")
}
logger.info("Logged with mdc1=(1,2), mdc2=xx")
}
logger.info("Logged with mdc1=(1,2), mdc2=yy")
MDC.clear
logger.info("No MDC values")
1 must_== 1
}
"trace function results" in {
object MyObj extends Logger {
val l = 1 to 10
info("Starting test")
trace("result",l.foldLeft(0)(trace("lhs",_) + trace("rhs",_))) must_== l.foldLeft(0)(_+_)
val x = 1
}
MyObj.x
}
"be used in different levels and yield different loggers" in {
class First {
First.info("In first")
}
object First extends Logger
trait Second {
private val logger = Logger(classOf[Second])
logger.info("In second")
}
class C extends First with Second with Logger {
info("In C")
val x = 2
}
(new C).x must_== 2
}
}
}
}
}
| wsaccaco/lift | framework/lift-base/lift-common/src/test/scala/net/liftweb/common/LoggingSpec.scala | Scala | apache-2.0 | 3,776 |
package edu.cmu.lti.oaqa.bagpipes.run
import edu.cmu.lti.oaqa.bagpipes.space.explorer.Explorer
import edu.cmu.lti.oaqa.bagpipes.space.explorer.SimpleExplorer._
import edu.cmu.lti.oaqa.bagpipes.configuration.Descriptors.ConfigurationDescriptor
import edu.cmu.lti.oaqa.bagpipes.space.ConfigurationSpace
import edu.cmu.lti.oaqa.bagpipes.configuration.Descriptors.CollectionReaderDescriptor
import edu.cmu.lti.oaqa.bagpipes.configuration.AbstractDescriptors._
import edu.cmu.lti.oaqa.bagpipes.controller.ExecutionController
import edu.cmu.lti.oaqa.bagpipes.executor.uima.UimaExecutor
import edu.cmu.lti.oaqa.bagpipes.space.explorer.DepthExplorer
import edu.cmu.lti.oaqa.bagpipes.space.explorer.BreadthExplorer
import edu.cmu.lti.oaqa.bagpipes.space.explorer.KBestPathExplorer
import edu.cmu.lti.oaqa.bagpipes.configuration.YAMLParser
object BagPipesRun {
//pass some of these as arguments or in the yaml descriptor
//Also, make explorer and executor parameters
def run(descPath: String, baseDir: Option[String] = None, fromFile: Boolean = true): Unit = {
val controller = SimpleUimaExecutionController
val parser = YAMLParser(baseDir)
//parse to ConfigurationDescriptor object
val confDesc = parser.parse(descPath, fromFile)
//ConfigurationDescriptor -> ConfigurationSpace
val spaceTree = ConfigurationSpace(confDesc).getSpace
//execute pipeline
controller.execute(spaceTree)
}
}
object SimpleUimaExecutionController extends ExecutionController(DepthExplorer, UimaExecutor)
| oaqa/bagpipes | src/main/scala/edu/cmu/lti/oaqa/bagpipes/run/BagPipesRun.scala | Scala | apache-2.0 | 1,512 |
//简单的函数式编程
/*
def printlnArgs(args: Array[String]):Unit={
args.foreach(println);
}
printlnArgs(args)
println("-----------------")
def formatArgs(args:Array[String])= args.mkString("fff")
println(formatArgs(Array("z","n"," t f")))
*/
import scala.io.Source
if(args.length>0){
for(line<-Source.fromFile(args(0)).getLines)
//print(line.length+" "+line)
widthOfLength(line);
}
else
Console.err.println("请输入文件名")
def widthOfLength(s:String):Int={
//println(s);
println("xx"+s.length+"---toStringLength:"+s.length.toString.length)
s.length
}
| peachyy/scalastu | 4fun.scala | Scala | apache-2.0 | 578 |
package com.sksamuel.elastic4s.searches.aggs
import com.sksamuel.elastic4s.script.ScriptDefinition
import com.sksamuel.elastic4s.searches.aggs.pipeline.PipelineAggregationDefinition
import com.sksamuel.exts.OptionImplicits._
import org.elasticsearch.search.aggregations.Aggregator
import org.elasticsearch.search.aggregations.bucket.terms.Terms
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude
import org.elasticsearch.search.aggregations.support.ValueType
case class TermsAggregationDefinition(name: String,
field: Option[String] = None,
script: Option[ScriptDefinition] = None,
missing: Option[AnyRef] = None,
size: Option[Int] = None,
minDocCount: Option[Long] = None,
showTermDocCountError: Option[Boolean] = None,
valueType: Option[ValueType] = None,
executionHint: Option[String] = None,
shardMinDocCount: Option[Long] = None,
collectMode: Option[Aggregator.SubAggCollectionMode] = None,
order: Option[Terms.Order] = None,
shardSize: Option[Int] = None,
includeExclude: Option[IncludeExclude] = None,
pipelines: Seq[PipelineAggregationDefinition] = Nil,
subaggs: Seq[AggregationDefinition] = Nil,
metadata: Map[String, AnyRef] = Map.empty)
extends AggregationDefinition {
type T = TermsAggregationDefinition
def field(field: String): TermsAggregationDefinition = copy(field = field.some)
def script(script: ScriptDefinition): TermsAggregationDefinition = copy(script = script.some)
def missing(missing: AnyRef): TermsAggregationDefinition = copy(missing = missing.some)
def size(size: Int): TermsAggregationDefinition = copy(size = size.some)
def minDocCount(min: Long): TermsAggregationDefinition = copy(minDocCount = min.some)
def showTermDocCountError(showError: Boolean): TermsAggregationDefinition = copy(showTermDocCountError = showError.some)
def valueType(valueType: ValueType): TermsAggregationDefinition = copy(valueType = valueType.some)
def executionHint(hint: String): TermsAggregationDefinition = copy(executionHint = hint.some)
def shardMinDocCount(min: Long): TermsAggregationDefinition = copy(shardMinDocCount = min.some)
def collectMode(mode: Aggregator.SubAggCollectionMode): TermsAggregationDefinition = copy(collectMode = mode.some)
def order(order: Terms.Order): TermsAggregationDefinition = copy(order = order.some)
def shardSize(shardSize: Int): TermsAggregationDefinition = copy(shardSize = shardSize.some)
def includeExclude(include: String, exclude: String): TermsAggregationDefinition =
copy(includeExclude = new IncludeExclude(include.some.orNull, exclude.some.orNull).some)
def includeExclude(include: Iterable[String], exclude: Iterable[String]): TermsAggregationDefinition = {
// empty array doesn't work, has to be null
val inc = if (include.isEmpty) null else include.toArray
val exc = if (exclude.isEmpty) null else exclude.toArray
copy(includeExclude = new IncludeExclude(inc, exc).some)
}
def includeExcludeLongs(include: Iterable[Long], exclude: Iterable[Long]): TermsAggregationDefinition = {
// empty array doesn't work, has to be null
val inc = if (include.isEmpty) null else include.toArray
val exc = if (exclude.isEmpty) null else exclude.toArray
copy(includeExclude = new IncludeExclude(inc, exc).some)
}
def includeExcludeDoubles(include: Iterable[Double], exclude: Iterable[Double]): TermsAggregationDefinition = {
// empty array doesn't work, has to be null
val inc = if (include.isEmpty) null else include.toArray
val exc = if (exclude.isEmpty) null else exclude.toArray
copy(includeExclude = new IncludeExclude(inc, exc).some)
}
override def pipelines(pipelines: Iterable[PipelineAggregationDefinition]): T = copy(pipelines = pipelines.toSeq)
override def subAggregations(aggs: Iterable[AggregationDefinition]): T = copy(subaggs = aggs.toSeq)
override def metadata(map: Map[String, AnyRef]): T = copy(metadata = metadata)
}
| tyth/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/searches/aggs/TermsAggregationDefinition.scala | Scala | apache-2.0 | 4,528 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.testsuite.scalalib
import org.junit.Test
import org.junit.Assert._
import org.junit.Assume._
import scala.collection.immutable.NumericRange
import scala.math.BigDecimal
import org.scalajs.testsuite.utils.Platform._
class RangesTest {
@Test def Iterable_range_should_not_emit_dce_warnings_issue_650(): Unit = {
Iterable.range(1, 10)
}
@Test def Iterable_range_and_simple_range_should_be_equal(): Unit = {
// Mostly to exercise more methods of ranges for dce warnings
assertEquals((0 until 10).toList, Iterable.range(0, 10).toList)
}
@Test def NumericRange_overflow_issue_2407(): Unit = {
assumeFalse("Assumed not on JVM for 2.11.{0-7}",
executingInJVM && (0 to 7).map("2.11." + _).contains(scalaVersion))
val nr = NumericRange(Int.MinValue, Int.MaxValue, 1 << 23)
assertEquals(Int.MinValue, nr.sum)
}
@Test def Range_foreach_issue_2409(): Unit = {
assumeFalse("Assumed not on JVM for 2.11.{0-7}",
executingInJVM && (0 to 7).map("2.11." + _).contains(scalaVersion))
val r = Int.MinValue to Int.MaxValue by (1 << 23)
var i = 0
r.foreach(_ => i += 1)
assertEquals(512, i)
assertEquals(512, r.length)
assertEquals(Int.MinValue, r.sum)
}
@Test def Range_toString_issue_2412(): Unit = {
if (scalaVersion.startsWith("2.11.")) {
assertEquals("Range(1, 3, 5, 7, 9)", (1 to 10 by 2).toString)
assertEquals("Range()", (1 until 1 by 2).toString)
assertTrue(
(BigDecimal(0.0) to BigDecimal(1.0)).toString.startsWith("scala.collection.immutable.Range$Partial"))
assertEquals("Range(0, 1)", (0 to 1).toString)
} else {
assertEquals("inexact Range 1 to 10 by 2", (1 to 10 by 2).toString)
assertEquals("empty Range 1 until 1 by 2", (1 until 1 by 2).toString)
assertEquals("Range requires step", (BigDecimal(0.0) to BigDecimal(1.0)).toString)
assertEquals("Range 0 to 1", (0 to 1).toString)
}
}
@Test def NumericRange_toString_issue_2412(): Unit = {
if (scalaVersion.startsWith("2.11.")) {
assertEquals("NumericRange(0, 2, 4, 6, 8, 10)",
NumericRange.inclusive(0, 10, 2).toString())
assertEquals("NumericRange(0, 2, 4, 6, 8)",
NumericRange(0, 10, 2).toString)
} else {
assertEquals("NumericRange 0 to 10 by 2",
NumericRange.inclusive(0, 10, 2).toString())
assertEquals("NumericRange 0 until 10 by 2",
NumericRange(0, 10, 2).toString)
}
}
@Test def NumericRange_with_arbitrary_integral(): Unit = {
// This is broken in Scala JVM up to (including) 2.11.8, 2.12.1 (SI-10086).
assumeFalse("Assumed not on JVM for 2.11.{0-8}",
executingInJVM && (0 to 8).map("2.11." + _).contains(scalaVersion))
assumeFalse("Assumed not on JVM for 2.12.{0-1}",
executingInJVM && (0 to 1).map("2.12." + _).contains(scalaVersion))
// Our custom integral type.
case class A(v: Int)
implicit object aIsIntegral extends scala.math.Integral[A] {
def compare(x: A, y: A): Int = x.v.compare(y.v)
def fromInt(x: Int): A = A(x)
def minus(x: A, y: A): A = A(x.v - y.v)
def negate(x: A): A = A(-x.v)
def plus(x: A, y: A): A = A(x.v + y.v)
def times(x: A, y: A): A = A(x.v * y.v)
def quot(x: A, y: A): A = A(x.v / y.v)
def rem(x: A, y: A): A = A(x.v % y.v)
def toDouble(x: A): Double = x.v.toDouble
def toFloat(x: A): Float = x.v.toFloat
def toInt(x: A): Int = x.v
def toLong(x: A): Long = x.v.toLong
def parseString(str: String): Option[A] = Some(A(str.toInt))
}
val r = NumericRange(A(1), A(10), A(1))
assertEquals(A(1), r.min)
assertEquals(A(9), r.max)
// Also test with custom ordering.
assertEquals(A(9), r.min(aIsIntegral.reverse))
assertEquals(A(1), r.max(aIsIntegral.reverse))
}
}
| nicolasstucki/scala-js | test-suite/shared/src/test/scala/org/scalajs/testsuite/scalalib/RangesTest.scala | Scala | apache-2.0 | 4,132 |
package services
import domain.{MatchWithGames, MatchWithResults}
import models.{Game, MatchResult, Match, PlayerElo}
import org.joda.time.DateTime
import org.specs2.mutable._
class StatisticsServiceSpec extends Specification {
// ===== Tests =====
"The boundary Elos" should {
"be empty when no matches have been played" in {
val lowestElos = StatisticsService.determineBoundaryElos(Seq(), Ordering[Double])
lowestElos must beEmpty
}
"only contain a single elo per player" in {
val playerElos = List(
buildPlayerElo(1, "Ronaldo", 1100.0),
buildPlayerElo(2, "Ronaldo", 1000.0),
buildPlayerElo(3, "Ronaldo", 1200.0)
)
val lowestElos = StatisticsService.determineBoundaryElos(playerElos, Ordering[Double])
lowestElos must haveSize(1)
lowestElos.head.record.value must beEqualTo(1000)
}
"contain the lowest elos" in {
val playerElos = List(
buildPlayerElo(1, "Ronaldo", 1000.0),
buildPlayerElo(2, "Zidane", 800.0),
buildPlayerElo(3, "Maradona", 1100.0),
buildPlayerElo(4, "Messi", 1200.0),
buildPlayerElo(5, "Zidane", 1100.0),
buildPlayerElo(6, "Ronaldo", 900.0)
)
val lowestElos = StatisticsService.determineBoundaryElos(playerElos, Ordering[Double])
lowestElos must haveSize(3)
lowestElos.map(x => (x.player, x.record.value)) must contain(exactly(
("Zidane", 800), ("Ronaldo", 900), ("Maradona", 1100)
))
}
"contain the latest elos" in {
val playerElos = List(
buildPlayerElo(1, "Ronaldo", 1000.0),
buildPlayerElo(2, "Zidane", 1000.0),
buildPlayerElo(3, "Maradona", 1000.0),
buildPlayerElo(4, "Messi", 1000.0)
)
val lowestElos = StatisticsService.determineBoundaryElos(playerElos, Ordering[Double])
lowestElos must haveSize(3)
lowestElos.map(x => (x.player, x.record.value)) must contain(exactly(
("Messi", 1000), ("Maradona", 1000), ("Zidane", 1000)
))
}
}
"The longest streaks" should {
"be empty when no matches have been played" in {
val longestWinningStreaks = StatisticsService.determineLongestStreaks(Seq(), MatchResult.Result.Winner)
longestWinningStreaks must beEmpty
}
"be empty when no match has been won" in {
val matches = List(
buildMatchForResult(1, "Ronaldo", "Zidane", "Maradona", "Messi", None, Some("Ronaldo")),
buildMatchForResult(2, "Ronaldo", "Zidane", "Maradona", "Messi", None, Some("Ronaldo")),
buildMatchForResult(3, "Ronaldo", "Zidane", "Maradona", "Messi", None, Some("Zidane"))
)
val longestWinningStreaks = StatisticsService.determineLongestStreaks(matches, MatchResult.Result.Winner)
longestWinningStreaks must beEmpty
}
"be interrupted if the player played and did not win" in {
val matches = List(
buildMatchForResult(1, "Ronaldo", "Zidane", "Maradona", "Messi", Some("Ronaldo"), None),
buildMatchForResult(2, "Ronaldo", "Zidane", "Maradona", "Messi", Some("Ronaldo"), None),
buildMatchForResult(3, "Ronaldo", "Zidane", "Maradona", "Messi", Some("Ronaldo"), None),
buildMatchForResult(4, "Ronaldo", "Zidane", "Maradona", "Messi", Some("Zidane"), None),
buildMatchForResult(5, "Ronaldo", "Zidane", "Maradona", "Messi", Some("Ronaldo"), None),
buildMatchForResult(6, "Ronaldo", "Zidane", "Maradona", "Messi", Some("Ronaldo"), None)
)
val longestWinningStreaks = StatisticsService.determineLongestStreaks(matches, MatchResult.Result.Winner)
longestWinningStreaks must haveSize(3)
longestWinningStreaks.map(x => (x.player, x.record.value)) must contain(exactly(
("Ronaldo", 3), ("Ronaldo", 2), ("Zidane", 1)
))
}
"not be interrupted if the player did not play" in {
val matches = List(
buildMatchForResult(1, "Ronaldo", "Zidane", "Maradona", "Messi", Some("Ronaldo"), None),
buildMatchForResult(2, "Ronaldo", "Zidane", "Maradona", "Messi", Some("Ronaldo"), None),
buildMatchForResult(3, "Ronaldo", "Zidane", "Maradona", "Messi", Some("Ronaldo"), None),
buildMatchForResult(4, "Pele", "Zidane", "Maradona", "Messi", Some("Zidane"), None),
buildMatchForResult(5, "Ronaldo", "Zidane", "Maradona", "Messi", Some("Ronaldo"), None),
buildMatchForResult(6, "Ronaldo", "Zidane", "Maradona", "Messi", Some("Ronaldo"), None)
)
val longestWinningStreaks = StatisticsService.determineLongestStreaks(matches, MatchResult.Result.Winner)
longestWinningStreaks must haveSize(2)
longestWinningStreaks.map(x => (x.player, x.record.value)) must contain(exactly(
("Ronaldo", 5), ("Zidane", 1)
))
}
"contain the latest streaks" in {
val matches = List(
buildMatchForResult(1, "Ronaldo", "Zidane", "Maradona", "Messi", Some("Ronaldo"), None),
buildMatchForResult(2, "Ronaldo", "Zidane", "Maradona", "Messi", Some("Zidane"), None),
buildMatchForResult(3, "Ronaldo", "Zidane", "Maradona", "Messi", Some("Maradona"), None),
buildMatchForResult(4, "Ronaldo", "Zidane", "Maradona", "Messi", Some("Messi"), None)
)
val longestWinningStreaks = StatisticsService.determineLongestStreaks(matches, MatchResult.Result.Winner)
longestWinningStreaks must haveSize(3)
longestWinningStreaks.map(x => (x.player, x.record.value)) must contain(exactly(
("Messi", 1), ("Maradona", 1), ("Zidane", 1)
))
}
}
"The highest score differences" should {
"be empty when no matches have been played" in {
val highestScoreDifferences = StatisticsService.determineHighestScoreDifferences(Seq())
highestScoreDifferences must beEmpty
}
"ignore draws" in {
val matches = List(
buildMatchWithGame(1, "Ronaldo", "Zidane", "Maradona", "Messi", 0)
)
val highestScoreDifferences = StatisticsService.determineHighestScoreDifferences(matches)
highestScoreDifferences must beEmpty
}
"contain the highest score differences" in {
val matches = List(
buildMatchWithGame(1, "Ronaldo", "Zidane", "Maradona", "Messi", 5),
buildMatchWithGame(2, "Ronaldo", "Zidane", "Maradona", "Messi", 3),
buildMatchWithGame(3, "Ronaldo", "Zidane", "Maradona", "Messi", 10),
buildMatchWithGame(4, "Ronaldo", "Zidane", "Maradona", "Messi", 7)
)
val highestScoreDifferences = StatisticsService.determineHighestScoreDifferences(matches)
val winners = Seq("Ronaldo", "Zidane")
val losers = Seq("Maradona", "Messi")
highestScoreDifferences must haveSize(3)
highestScoreDifferences.map(x => (x.winners, x.losers, x.record.value)) must contain(exactly(
(winners, losers, 10), (winners, losers, 7), (winners, losers,5)
))
}
"contain the latest score differences" in {
val matches = List(
buildMatchWithGame(1, "Maradona", "Messi", "Ronaldo", "Zidane", 10),
buildMatchWithGame(2, "Ronaldo", "Zidane", "Maradona", "Messi", 10),
buildMatchWithGame(3, "Ronaldo", "Zidane", "Maradona", "Messi", 10),
buildMatchWithGame(4, "Ronaldo", "Zidane", "Maradona", "Messi", 10)
)
val highestScoreDifferences = StatisticsService.determineHighestScoreDifferences(matches)
val oldWinners = Seq("Maradona", "Messi")
val oldLosers = Seq("Ronaldo", "Zidane")
highestScoreDifferences must haveSize(3)
highestScoreDifferences.map(x => (x.winners, x.losers, x.record.value)) must not contain(
(oldWinners, oldLosers, 10)
)
}
}
"The boundary partnerships" should {
"be empty when no matches have been played" in {
val weakestPartnerships = StatisticsService.determineBoundaryPartnerships(Seq(), Ordering[Double])
weakestPartnerships must beEmpty
}
"contain the weakest partnerships" in {
val games = List(
buildGame(1, "Ronaldo", "Zidane", "Maradona", "Messi", 8),
buildGame(2, "Ronaldo", "Zidane", "Maradona", "Messi", 4),
buildGame(3, "Ronaldo", "Zidane", "Beckham", "Pele", 6),
buildGame(4, "Ronaldo", "Zidane", "Beckham", "Pele", 10),
buildGame(5, "Ronaldo", "Zidane", "Beckham", "Messi", 10)
)
val weakestPartnerships = StatisticsService.determineBoundaryPartnerships(games, Ordering[Double])
weakestPartnerships must haveSize(3)
weakestPartnerships.map(x => (x.players, x.record.value)) must contain(exactly(
(Seq("Beckham", "Messi"), -10), (Seq("Beckham", "Pele"), -8), (Seq("Maradona", "Messi"), -6)
))
}
}
"The player nemesis" should {
"be empty when no matches have been played" in {
val playerNemesis = StatisticsService.determinePlayerNemesis(Seq())
playerNemesis must beEmpty
}
"be the opponent with whom the player performs worst" in {
val matches = List(
buildMatchWithGame(1, "Ronaldo", "Zidane", "Maradona", "Messi", 4),
buildMatchWithGame(2, "Ronaldo", "Zidane", "Beckham", "Pele", 0),
buildMatchWithGame(3, "Ronaldo", "Maradona", "Beckham", "Pele", 8)
)
val playerNemesis = StatisticsService.determinePlayerNemesis(matches)
playerNemesis.keys must haveSize(6)
playerNemesis("Ronaldo") must beEqualTo("Zidane")
}
}
// ===== Builders =====
private def buildPlayerElo(eloPos: Int, player: String, elo: Double): PlayerElo = PlayerElo(None, player, DateTime.now.plusMinutes(eloPos), 1L, 0.0, elo)
private def buildMatchForResult(matchPos: Int, player1: String, player2: String, player3: String, player4: String, winner: Option[String], loser: Option[String]): MatchWithResults = {
val foosMatch = Match(Some(matchPos), DateTime.now.plusMinutes(matchPos), "User", Match.Format.CompleteMatch)
val playerResults = Seq(player1, player2, player3, player4).map { player =>
val result = Some(player) match {
case `winner` => MatchResult.Result.Winner
case `loser` => MatchResult.Result.Loser
case _ => MatchResult.Result.NoResult
}
MatchResult(matchPos, player, result, 1, 2, 10)
}
MatchWithResults(foosMatch, playerResults)
}
private def buildMatchWithGame(matchPos: Int, leftPlayer1: String, leftPlayer2: String, rightPlayer1: String, rightPlayer2: String, goalDifference: Int): MatchWithGames = {
val foosMatch = Match(Some(matchPos), DateTime.now.plusMinutes(matchPos), "User", Match.Format.CompleteMatch)
val game = buildGame(matchPos, leftPlayer1, leftPlayer2, rightPlayer1, rightPlayer2, goalDifference)
MatchWithGames(foosMatch, Seq(game))
}
private def buildGame(gamePos: Int, leftPlayer1: String, leftPlayer2: String, rightPlayer1: String, rightPlayer2: String, goalDifference: Int): Game = {
val leftScore1 = if (goalDifference == 0) 0 else goalDifference / 2
val leftScore2 = if (goalDifference == 0) 0 else if (goalDifference % 2 == 0) goalDifference / 2 else goalDifference / 2 + 1
Game(Some(gamePos), gamePos, leftPlayer1, leftPlayer2, rightPlayer1, rightPlayer2, leftScore1, leftScore2, 0, 0)
}
}
| PriscH/Foosball | test/services/StatisticsServiceSpec.scala | Scala | gpl-3.0 | 11,214 |
/*
* Copyright 2010 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.webui.profile
import javax.ws.rs._
import javax.ws.rs.core.Response.Status._
import org.codehaus.jackson.annotate.JsonProperty
import io.fabric8.webui._
import scala.Some
import scala.Some
import io.fabric8.webui.{Services, ByID, BaseResource}
import com.sun.jersey.multipart.FormDataParam
import java.io.{FileOutputStream, File, InputStream}
import com.sun.jersey.core.header.FormDataContentDisposition
import scala.Some
import java.util
import java.io._
import org.apache.commons.io.IOUtils
import org.apache.commons.compress.archivers.zip.ZipFile
import scala.Some
import io.fabric8.webui.patching.BaseUpgradeResource
class CreateVersionDTO {
@JsonProperty
var id: String = _
@JsonProperty
var derived_from: String = _
}
class DeleteVersionsDTO {
@JsonProperty
var ids: Array[String] = _
}
class SetDefaultVersionDTO {
@JsonProperty
var id: String = _
}
@Path("/versions")
class VersionsResource extends BaseResource {
@GET
override def get: Array[VersionResource] = {
fabric_service.getVersions.map(new VersionResource(_)).sortWith(ByID(_, _))
}
@Path("{id}")
def get(@PathParam("id") id: String): VersionResource = {
val rc = get.find(_.id == id)
rc getOrElse not_found
}
@POST
@Path("import")
@Consumes(Array("multipart/form-data"))
@Produces(Array("text/html"))
def import_version( @FormDataParam("target-name") target_name: String,
@FormDataParam("import-file") file: InputStream,
@FormDataParam("import-file") file_detail: FormDataContentDisposition): String = {
val filename = file_detail.getFileName
if (!filename.endsWith(".zip")) {
respond(BAD_REQUEST, "Profile must be stored in a .zip file")
}
var name = filename.replace(".zip", "")
Services.LOG.debug("Received file : {}", filename)
val tmp = File.createTempFile("imp", ".zip")
tmp.deleteOnExit()
val fout = new FileOutputStream(tmp)
IOUtils.copy(file, fout)
fout.close
val zip = new ZipFile(tmp)
val profiles = new util.HashMap[String, util.HashMap[String, Array[Byte]]]
import collection.JavaConverters._
def get_profile(name: String) = {
Option(profiles.get(name)) match {
case Some(data) =>
data
case None =>
profiles.put(name, new util.HashMap[String, Array[Byte]]())
profiles.get(name)
}
}
zip.getEntries.asScala.foreach((x) => {
if (x.isDirectory()) {
val profile_name = x.getName.replace("/", "")
get_profile(profile_name)
} else {
val Array(profile, property_name, _*) = x.getName.split("/")
Services.LOG.debug("Found entry profile: {}, property: {}", profile, property_name)
Services.LOG.debug("Entry is (supposedly) {} bytes", x.getSize)
val in = new BufferedInputStream(zip.getInputStream(x))
val buffer = IOUtils.toByteArray(in);
in.close()
Services.LOG.debug("Read {} bytes", buffer.length)
profiles.get(profile).put(property_name, buffer)
}
})
zip.close
tmp.delete
val version = if (target_name.equals("")) {
val rc = BaseUpgradeResource.create_version(BaseUpgradeResource.last_version_id)
Services.LOG.info("Creating new version {}", rc.getId());
rc
} else {
try {
Option(fabric_service.getVersion(target_name)) match {
case Some(rc) =>
Services.LOG.info("Overwriting existing version {}", rc.getId());
rc
case None =>
Services.LOG.info("Creating new emtpy version {}", target_name);
BaseUpgradeResource.create_version(BaseUpgradeResource.last_version_id)
}
} catch {
case _ =>
Services.LOG.info("Creating new emtpy version {}", target_name);
BaseUpgradeResource.create_version(BaseUpgradeResource.last_version_id)
}
}
val ps = version.getProfiles
ps.foreach(_.delete)
profiles.keySet.foreach( (p) =>
try {
version.createProfile(p)
} catch {
case _ =>
// ignore
})
profiles.asScala.foreach {
case (profile: String, data: util.HashMap[String, Array[Byte]]) => {
VersionResource.create_profile(version, data, profile)
}
}
version.getId
}
//@GET @Path("default")
//def default : VersionResource = new VersionResource(fabric_service.getDefaultVersion)
@POST
def create(options: CreateVersionDTO) = {
val latestVersion = get.last.self
val new_id = if (options.id == null || options.id == "" || options.id == "<unspecified>") {
latestVersion.getSequence.next.getName
} else {
options.id
}
val derived_from: String = if (options.derived_from == null || options.derived_from == "" || options.derived_from == "none") {
null
} else {
options.derived_from
}
val rc = Option(derived_from).flatMap(id => get.find(_.id == derived_from)) match {
case Some(version) =>
fabric_service.createVersion(version.self, new_id)
case None =>
fabric_service.createVersion(latestVersion, new_id)
}
new VersionResource(rc)
}
def delete_version(id: String) = get(id).self.delete
@POST
@Path("delete")
def delete_versions(args: DeleteVersionsDTO) = args.ids.foreach(delete_version(_))
@POST
@Path("set_default")
def set_default(args: SetDefaultVersionDTO) = {
require(args.id != null, "Must specify a version name")
val new_default = get(args.id).self
fabric_service.setDefaultVersion(new_default)
}
}
| alexeev/jboss-fuse-mirror | sandbox/fmc/fmc-rest/src/main/scala/org/fusesource/fabric/webui/profile/VersionsResource.scala | Scala | apache-2.0 | 6,255 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.data.api
import org.apache.predictionio.data.storage.Storage
import akka.testkit.TestProbe
import akka.actor.ActorSystem
import akka.actor.Props
import spray.http.HttpEntity
import spray.http.HttpResponse
import spray.http.ContentTypes
import spray.httpx.RequestBuilding.Get
import org.specs2.mutable.Specification
class EventServiceSpec extends Specification {
val system = ActorSystem("EventServiceSpecSystem")
val eventClient = Storage.getLEvents()
val accessKeysClient = Storage.getMetaDataAccessKeys()
val channelsClient = Storage.getMetaDataChannels()
val eventServiceActor = system.actorOf(
Props(
new EventServiceActor(
eventClient,
accessKeysClient,
channelsClient,
EventServerConfig()
)
)
)
"GET / request" should {
"properly produce OK HttpResponses" in {
val probe = TestProbe()(system)
probe.send(eventServiceActor, Get("/"))
probe.expectMsg(
HttpResponse(
200,
HttpEntity(
contentType = ContentTypes.`application/json`,
string = """{"status":"alive"}"""
)
)
)
success
}
}
step(system.shutdown())
}
| pferrel/PredictionIO | data/src/test/scala/org/apache/predictionio/data/api/EventServiceSpec.scala | Scala | apache-2.0 | 2,037 |
import sbt._
import Keys._
object ProjectBuild extends Build {
override lazy val settings = super.settings ++
Seq(scalaVersion := "2.10.4", resolvers := Seq())
val appDependencies = Seq(
"com.github.nscala-time" %% "nscala-time" % "0.6.0"
)
lazy val root = Project(
id = "timesheet",
base = file("."),
settings = Project.defaultSettings ++ Seq(
libraryDependencies ++= appDependencies)
)
}
| ThStock/timesheet | project/Build.scala | Scala | apache-2.0 | 434 |
package scray.cassandra.tools
import org.junit.runner.RunWith
import org.scalatest.WordSpec
import org.scalatest.junit.JUnitRunner
import com.typesafe.scalalogging.LazyLogging
import scray.cassandra.tools.types.ScrayColumnTypes._
import scray.cassandra.tools.api.LucenIndexedColumn
import scray.cassandra.tools.types.LuceneColumnTypes
import scray.querying.description.TableIdentifier
import scala.annotation.tailrec
@RunWith(classOf[JUnitRunner])
class CassandraLuceneIndexStatementGeneratorImplSpecs extends WordSpec with LazyLogging {
"LuceneIndexStatementGenerator " should {
"create index statement for one column " in {
val statementGenerator = new CassandraLuceneIndexStatementGeneratorImpl
val configurationString = statementGenerator.getIndexString(
TableIdentifier("cassandra", "ks1", "cf1"),
List(LucenIndexedColumn("col1", LuceneColumnTypes.String(""), true)),
(2, 2, 7))
val expectedResult = s"""
CREATE CUSTOM INDEX "cf1_lucene_index" ON "ks1"."cf1" (lucene)
USING 'com.stratio.cassandra.lucene.Index'
WITH OPTIONS = {
'refresh_seconds' : '1',
'schema' : '{
fields : {
col1 : {type: "string", sorted: true }
}
}'
};"""
assert(configurationString.isDefined)
assert(removePrettyPrintingChars(configurationString.getOrElse("")) == removePrettyPrintingChars(expectedResult))
}
"create index statement for multiple columns " in {
val statementGenerator = new CassandraLuceneIndexStatementGeneratorImpl
val configurationString = statementGenerator.getIndexString(
TableIdentifier("cassandra", "ks", "cf1"),
List(LucenIndexedColumn("col1", LuceneColumnTypes.String(""), false), LucenIndexedColumn("col2", LuceneColumnTypes.String(""), false)),
(2, 2, 7))
val expectedResult = s"""
CREATE CUSTOM INDEX "cf1_lucene_index" ON "ks"."cf1" (lucene)
USING 'com.stratio.cassandra.lucene.Index'
WITH OPTIONS = {
'refresh_seconds' : '1',
'schema' : '{
fields : {
col1 : {type: "string", sorted: false }
col2 : {type: "string", sorted: false }
}
}'
};"""
assert(configurationString.isDefined)
assert(removePrettyPrintingChars(configurationString.getOrElse("")) == removePrettyPrintingChars(expectedResult))
}
"create no index statement if lucene plugin version is wrong " in {
val statementGenerator = new CassandraLuceneIndexStatementGeneratorImpl
val configurationString = statementGenerator.getIndexString(
TableIdentifier("cassandra", "ks", "cf1"),
List(LucenIndexedColumn("col1", LuceneColumnTypes.String(""), false)),
(1, 0, 0))
assert( ! configurationString.isDefined)
}
" create alter table statement " in {
val statementGenerator = new CassandraLuceneIndexStatementGeneratorImpl
val alterStatement = statementGenerator.getAlterTableStatement(TableIdentifier("cassandra", "ks1", "col1"))
assert(alterStatement == "ALTER TABLE \\"ks1\\".\\"col1\\" ADD lucene text;")
}
}
private def removePrettyPrintingChars(prettyString: java.lang.String): java.lang.String = {
@tailrec
def removeSpaces(string: java.lang.String): java.lang.String = {
if(string.contains(" ")) {
removeSpaces(string.replace(" ", " "))
} else {
string
}
}
removeSpaces(removeSpaces(prettyString).replace("\\n ", "\\n").replace(" \\n", "\\n").replace("\\n", " ").replace("\\t","").trim())
}
} | scray/scray | scray-cassandra/src/test/scala/scray/cassandra/tools/CassandraLuceneIndexStatementGeneratorImplSpecs.scala | Scala | apache-2.0 | 3,717 |
package com.sparcedge.turbine.data
import java.io.{StringWriter,PrintWriter}
import akka.actor.{Actor,ActorRef}
import akka.util.Timeout
import akka.pattern.ask
import scala.concurrent.{ExecutionContext,Await,Future}
import scala.concurrent.duration._
import scala.util.{Try,Success,Failure}
import scala.collection.mutable
import spray.routing.RequestContext
import spray.http.{HttpResponse,HttpEntity,StatusCodes}
import com.sparcedge.turbine.event.{EventPackage,EventIngressPackage,Event}
import com.sparcedge.turbine.BladeManagerRepository
import com.sparcedge.turbine.util.{WrappedTreeMap,CustomJsonSerializer}
import com.sparcedge.turbine.data._
import com.sparcedge.turbine.Blade
object WriteHandler {
case class WriteEventRequest(id: String, eventPkgBytes: Array[Byte])
}
import BladeManagerRepository._
import BladeManager._
import WriteHandler._
class WriteHandler(bladeManagerRepository: ActorRef) extends Actor {
implicit val timeout = Timeout(240.seconds)
implicit val ec: ExecutionContext = context.dispatcher
def receive = {
case WriteEventRequest(id, eventPkgBytes) =>
// IN THE FUTURE!!!
val eventIngressPkg = EventIngressPackage.fromBytes(eventPkgBytes)
val eventPkg = EventPackage.fromEventIngressPackage(eventIngressPkg)
val manager = retrieveAndOrCreateManager(eventPkg.blade)
writeEvent(manager, id, eventPkg.event)
case _ =>
}
def retrieveAndOrCreateManager(blade: Blade): Future[ActorRef] = {
val bladeManResponseFuture = (bladeManagerRepository ? BladeManagerGetOrCreateRequest(blade)).mapTo[BladeManagerGetOrCreateResponse]
bladeManResponseFuture.map { response => response.manager }
}
def writeEvent(manager: Future[ActorRef], id: String, event: Event) {
manager.foreach { man => man ! AddEvent(id, event) }
}
} | bobwilliams/turbinedb | src/main/scala/com/sparcedge/turbine/data/WriteHandler.scala | Scala | gpl-3.0 | 1,781 |
package cromwell.docker
import org.scalatest.prop.TableDrivenPropertyChecks
import org.scalatest.{FlatSpec, Matchers}
class DockerImageIdentifierSpec extends FlatSpec with Matchers with TableDrivenPropertyChecks {
behavior of "DockerImageID"
it should "parse valid docker images" in {
val valid = Table(
("sourceString", "host", "repo", "image", "reference"),
// Without tags -> latest
("ubuntu", None, None, "ubuntu", "latest"),
("broad/cromwell", None, Some("broad"), "cromwell", "latest"),
("index.docker.io/ubuntu", Option("index.docker.io"), None, "ubuntu", "latest"),
("broad/cromwell/submarine", None, Some("broad/cromwell"), "submarine", "latest"),
("gcr.io/google/alpine", Option("gcr.io"), Some("google"), "alpine", "latest"),
// With tags
("ubuntu:latest", None, None, "ubuntu", "latest"),
("ubuntu:1235-SNAP", None, None, "ubuntu", "1235-SNAP"),
("ubuntu:V3.8-5_1", None, None, "ubuntu", "V3.8-5_1"),
("quay.io/biocontainers/platypus-variant:0.8.1.1--htslib1.5_0", Option("quay.io"), Some("biocontainers"), "platypus-variant", "0.8.1.1--htslib1.5_0")
)
forAll(valid) { (dockerString, host, repo, image, reference) =>
val imageId = DockerImageIdentifier.fromString(dockerString)
imageId.isSuccess shouldBe true
val successfulId = imageId.get
successfulId.host shouldBe host
successfulId.repository shouldBe repo
successfulId.image shouldBe image
successfulId.reference shouldBe reference
}
// With digest
val withDigest = DockerImageIdentifier.fromString("ubuntu@sha256:45168651")
withDigest.isSuccess shouldBe true
val successfulDigest = withDigest.get
successfulDigest.host shouldBe None
successfulDigest.repository shouldBe None
successfulDigest.image shouldBe "ubuntu"
successfulDigest.reference shouldBe "sha256:45168651"
successfulDigest.isInstanceOf[DockerImageIdentifierWithHash] shouldBe true
successfulDigest.asInstanceOf[DockerImageIdentifierWithHash].hash shouldBe DockerHashResult("sha256", "45168651")
// With tag + digest
val withTagDigest = DockerImageIdentifier.fromString("ubuntu:latest@sha256:45168651")
withTagDigest.isSuccess shouldBe true
val successfulTagDigest = withTagDigest.get
successfulTagDigest.host shouldBe None
successfulTagDigest.repository shouldBe None
successfulTagDigest.image shouldBe "ubuntu"
successfulTagDigest.reference shouldBe "latest@sha256:45168651"
successfulTagDigest.isInstanceOf[DockerImageIdentifierWithHash] shouldBe true
successfulTagDigest.asInstanceOf[DockerImageIdentifierWithHash].hash shouldBe DockerHashResult("sha256", "45168651")
}
it should "not parse invalid docker images" in {
val invalid = List(
"_notvalid:latest",
"NotValid:latest",
"not:_valid",
"/not:valid",
"not%valid",
"not@sha256:digest:tag",
"not:sha256:digest@tag"
)
invalid foreach { image =>
DockerImageIdentifier.fromString(image).isSuccess shouldBe false
}
}
}
| ohsu-comp-bio/cromwell | dockerHashing/src/test/scala/cromwell/docker/DockerImageIdentifierSpec.scala | Scala | bsd-3-clause | 3,559 |
package com.learning.akka.hello
import akka.actor.{Props, Actor}
/**
* Created by lgrcyanny on 15/10/7.
*/
class HelloWorld extends Actor {
override def preStart = {
val greeter = context.actorOf(Props[Greeter], "greeter")
greeter ! Greeter.Greet
}
override def receive = {
case Greeter.Done =>
context.stop(self)
}
}
| lgrcyanny/LearningAkka | src/main/scala/com/learning/akka/hello/HelloWorld.scala | Scala | gpl-2.0 | 349 |
package org.jetbrains.plugins.scala.lang.resolve.testAllResolve
package generated
class TestAllResolveSelfTest extends TestAllResolveTestBase {
//This class was generated by build script, please don't change this
override def folderPath: String = super.folderPath + "self/"
def testSelf = doTest
} | triggerNZ/intellij-scala | test/org/jetbrains/plugins/scala/lang/resolve/testAllResolve/generated/TestAllResolveSelfTest.scala | Scala | apache-2.0 | 305 |
package ch.ethz.inf.da.tipstersearch.io
import ch.ethz.inf.da.tipstersearch.Query
import scala.io.Source
/**
* Reads binary relevance truth values from the tipster dataset
*/
object RelevanceReader{
/**
* Reads binary relevance truth values from given file and stores
* them in their appropriate query objects
*
* @param path the path of the file to read from
* @param queries the list of queries to store the values in
*/
def read(path:String, queries:List[Query]) {
for(query <- queries) {
query.truth = Source.fromFile(path).getLines
.filter(line => line.startsWith("" + query.id))
.map(line => (line.replaceAll("^[0-9]+ 0 ([^ ]+).*", "$1"), line.replaceAll(".*([01])$", "$1").toInt) )
.toList
}
}
}
| rjagerman/TipsterSearch | src/main/scala/io/RelevanceReader.scala | Scala | mit | 865 |
package mesosphere.marathon
package core.storage.store.impl.zk
import java.time.OffsetDateTime
import java.time.format.DateTimeFormatter
import java.util.UUID
import akka.actor.Scheduler
import akka.stream.Materializer
import akka.stream.scaladsl.{ Flow, Keep, Merge, Sink, Source }
import akka.util.ByteString
import akka.{ Done, NotUsed }
import com.typesafe.scalalogging.StrictLogging
import mesosphere.marathon.Protos.{ StorageVersion, ZKStoreEntry }
import mesosphere.marathon.core.storage.backup.BackupItem
import mesosphere.marathon.core.storage.store.impl.{ BasePersistenceStore, CategorizedKey }
import mesosphere.marathon.storage.migration.{ Migration, StorageVersions }
import mesosphere.marathon.util.{ Retry, WorkQueue, toRichFuture }
import org.apache.zookeeper.KeeperException
import org.apache.zookeeper.KeeperException.{ NoNodeException, NodeExistsException }
import org.apache.zookeeper.data.Stat
import scala.async.Async.{ async, await }
import scala.collection.immutable.Seq
import scala.concurrent.duration.Duration
import scala.concurrent.{ ExecutionContext, Future }
import scala.util.control.NonFatal
import scala.util.{ Failure, Success }
case class ZkId(category: String, id: String, version: Option[OffsetDateTime]) {
private val bucket = math.abs(id.hashCode % ZkId.HashBucketSize)
def path: String = version.fold(f"/$category/$bucket%x/$id") { v =>
f"/$category/$bucket%x/$id/${ZkId.DateFormat.format(v)}"
}
}
object ZkId {
val DateFormat = DateTimeFormatter.ISO_OFFSET_DATE_TIME
val HashBucketSize = 16
}
case class ZkSerialized(bytes: ByteString)
class ZkPersistenceStore(
val client: RichCuratorFramework,
timeout: Duration,
maxConcurrent: Int = 8,
maxQueued: Int = 100
)(
implicit
mat: Materializer,
ctx: ExecutionContext,
scheduler: Scheduler) extends BasePersistenceStore[ZkId, String, ZkSerialized]() with StrictLogging {
private val limitRequests = WorkQueue("ZkPersistenceStore", maxConcurrent = maxConcurrent, maxQueueLength = maxQueued)
private val retryOn: Retry.RetryOnFn = {
case _: KeeperException.ConnectionLossException => true
case _: KeeperException => false
case NonFatal(_) => true
}
private def retry[T](name: String)(f: => Future[T]) = Retry(name, retryOn = retryOn, maxDuration = timeout) {
limitRequests(f)
}
@SuppressWarnings(Array("all")) // async/await
override def storageVersion(): Future[Option[StorageVersion]] =
retry("ZkPersistenceStore::storageVersion") {
async {
await(client.data(s"/${Migration.StorageVersionName}").asTry) match {
case Success(GetData(_, _, byteString)) =>
val wrapped = ZKStoreEntry.parseFrom(byteString.toArray)
Some(StorageVersion.parseFrom(wrapped.getValue))
case Failure(_: NoNodeException) =>
None
case Failure(e: KeeperException) =>
throw new StoreCommandFailedException("Unable to get version", e)
case Failure(e) =>
throw e
}
}
}
@SuppressWarnings(Array("all")) // async/await
override def setStorageVersion(storageVersion: StorageVersion): Future[Done] =
retry(s"ZkPersistenceStore::setStorageVersion($storageVersion)") {
async {
val path = s"/${Migration.StorageVersionName}"
val actualVersion = storageVersion.toBuilder.setFormat(StorageVersion.StorageFormat.PERSISTENCE_STORE).build()
val data = ByteString(
ZKStoreEntry.newBuilder().setValue(com.google.protobuf.ByteString.copyFrom(actualVersion.toByteArray))
.setName(Migration.StorageVersionName)
.setCompressed(false)
.setUuid(com.google.protobuf.ByteString.copyFromUtf8(UUID.randomUUID().toString))
.build.toByteArray
)
await(client.setData(path, data).asTry) match {
case Success(_) =>
Done
case Failure(_: NoNodeException) =>
await(client.create(path, data = Some(data)))
Done
case Failure(e: KeeperException) =>
throw new StoreCommandFailedException(s"Unable to update storage version $storageVersion", e)
case Failure(e) =>
throw e
}
}
}
@SuppressWarnings(Array("all")) // async/await
override protected def rawIds(category: String): Source[ZkId, NotUsed] = {
val childrenFuture = retry(s"ZkPersistenceStore::ids($category)") {
async {
val buckets = await(client.children(s"/$category").recover {
case _: NoNodeException => Children(category, new Stat(), Nil)
}).children
val childFutures = buckets.map { bucket =>
retry(s"ZkPersistenceStore::ids($category/$bucket)") {
client.children(s"/$category/$bucket").map(_.children)
}
}
val children = await(Future.sequence(childFutures))
children.flatten.map { child =>
ZkId(category, child, None)
}
}
}
Source.fromFuture(childrenFuture).mapConcat(identity)
}
@SuppressWarnings(Array("all")) // async/await
override protected def rawVersions(id: ZkId): Source[OffsetDateTime, NotUsed] = {
val unversioned = id.copy(version = None)
val path = unversioned.path
val versions = retry(s"ZkPersistenceStore::versions($path)") {
async {
await(client.children(path).asTry) match {
case Success(Children(_, _, nodes)) =>
nodes.map { path =>
OffsetDateTime.parse(path, ZkId.DateFormat)
}
case Failure(_: NoNodeException) =>
Seq.empty
case Failure(e: KeeperException) =>
throw new StoreCommandFailedException(s"Unable to get versions of $id", e)
case Failure(e) =>
throw e
}
}
}
Source.fromFuture(versions).mapConcat(identity)
}
@SuppressWarnings(Array("all")) // async/await
override protected[store] def rawGet(id: ZkId): Future[Option[ZkSerialized]] =
retry(s"ZkPersistenceStore::get($id)") {
async {
await(client.data(id.path).asTry) match {
case Success(GetData(_, _, bytes)) =>
if (bytes.nonEmpty) { // linter:ignore UseIfExpression
Some(ZkSerialized(bytes))
} else {
None
}
case Failure(_: NoNodeException) =>
None
case Failure(e: KeeperException) =>
throw new StoreCommandFailedException(s"Unable to get $id", e)
case Failure(e) =>
throw e
}
}
}
@SuppressWarnings(Array("all")) // async/await
override protected def rawDelete(id: ZkId, version: OffsetDateTime): Future[Done] =
retry(s"ZkPersistenceStore::delete($id, $version)") {
async {
await(client.delete(id.copy(version = Some(version)).path).asTry) match {
case Success(_) | Failure(_: NoNodeException) => Done
case Failure(e: KeeperException) =>
throw new StoreCommandFailedException(s"Unable to delete $id", e)
case Failure(e) =>
throw e
}
}
}
@SuppressWarnings(Array("all")) // async/await
override protected def rawDeleteCurrent(id: ZkId): Future[Done] = {
retry(s"ZkPersistenceStore::deleteCurrent($id)") {
async {
await(client.setData(id.path, data = ByteString()).asTry) match {
case Success(_) | Failure(_: NoNodeException) => Done
case Failure(e: KeeperException) =>
throw new StoreCommandFailedException(s"Unable to delete current $id", e)
case Failure(e) =>
throw e
}
}
}
}
@SuppressWarnings(Array("all")) // async/await
override protected def rawStore[V](id: ZkId, v: ZkSerialized): Future[Done] = {
retry(s"ZkPersistenceStore::store($id, $v)") {
async {
await(client.setData(id.path, v.bytes).asTry) match {
case Success(_) =>
Done
case Failure(_: NoNodeException) =>
await(limitRequests(client.create(
id.path,
creatingParentContainersIfNeeded = true, data = Some(v.bytes))).asTry) match {
case Success(_) =>
Done
case Failure(_: NodeExistsException) =>
// it could have been created by another call too... (e.g. creatingParentContainers if needed could
// have created the node when creating the parent's, e.g. the version was created first)
await(limitRequests(client.setData(id.path, v.bytes)))
Done
case Failure(e: KeeperException) =>
throw new StoreCommandFailedException(s"Unable to store $id", e)
case Failure(e) =>
throw e
}
case Failure(e: KeeperException) =>
throw new StoreCommandFailedException(s"Unable to store $id", e)
case Failure(e) =>
throw e
}
}
}
}
@SuppressWarnings(Array("all")) // async/await
override protected def rawDeleteAll(id: ZkId): Future[Done] = {
val unversionedId = id.copy(version = None)
retry(s"ZkPersistenceStore::delete($unversionedId)") {
client.delete(unversionedId.path, guaranteed = true, deletingChildrenIfNeeded = true).map(_ => Done).recover {
case _: NoNodeException =>
Done
}
}
}
@SuppressWarnings(Array("all")) // async/await
override protected[store] def allKeys(): Source[CategorizedKey[String, ZkId], NotUsed] = {
val sources = retry("ZkPersistenceStore::keys()") {
async {
val rootChildren = await(client.children("/").map(_.children))
val sources = rootChildren.map(rawIds)
sources.foldLeft(Source.empty[ZkId])(_.concat(_))
}
}
Source.fromFuture(sources).flatMapConcat(identity).map { k => CategorizedKey(k.category, k) }
}
@SuppressWarnings(Array("all")) // async/await
override def backup(): Source[BackupItem, NotUsed] = {
val ids: Source[ZkId, NotUsed] = allKeys().map(_.key)
val versions: Source[ZkId, NotUsed] = ids.flatMapConcat(id => rawVersions(id).map(v => id.copy(version = Some(v))))
val combined = Source.combine(ids, versions)(Merge(_))
combined.mapAsync(maxConcurrent) { id =>
rawGet(id).filter(_.isDefined).map(ser => BackupItem(id.category, id.id, id.version, ser.get.bytes))
}.concat {
Source.fromFuture(storageVersion()).map { storedVersion =>
val version = storedVersion.getOrElse(StorageVersions.current)
val name = Migration.StorageVersionName
BackupItem(name, name, None, ByteString(version.toByteArray))
}
}
}
override def restore(): Sink[BackupItem, Future[Done]] = {
def store(item: BackupItem): Future[Done] = {
val id = ZkId(item.category, item.key, item.version)
rawStore(id, ZkSerialized(item.data))
}
def clean(): Future[Done] = {
client.delete("/", guaranteed = true, deletingChildrenIfNeeded = true).map(_ => Done)
}
def setVersion(item: BackupItem): Future[Done] = {
setStorageVersion(StorageVersion.parseFrom(item.data.toArray))
}
Flow[BackupItem]
.map {
case item if item.key == Migration.StorageVersionName => () => setVersion(item)
case item => () => store(item)
}
.prepend { Source.single(() => clean()) }
.mapAsync(1) { _.apply() } // no parallelization: first element needs to be processed before the second
.toMat(Sink.ignore)(Keep.right)
}
}
| natemurthy/marathon | src/main/scala/mesosphere/marathon/core/storage/store/impl/zk/ZkPersistenceStore.scala | Scala | apache-2.0 | 11,551 |
// scalac: -Ydelambdafy:method
object Test {
def testC: Unit = {
val f1 = (c: C) => c.value
val f2 = (x: Int) => new C(x)
val f3 = (c1: C) => (c2: C) => (c1, c2)
val r1 = f2(2)
val r2 = f2(2)
val r3 = f3(r1)(r2)
val result = f1(r3._2)
assert(result == 2)
}
def testD: Unit = {
val f1 = (c: D) => c.value
val f2 = (x: String) => new D(x)
val f3 = (c1: D) => (c2: D) => (c1, c2)
val r1 = f2("2")
val r2 = f2("2")
val r3 = f3(r1)(r2)
val result = f1(r3._2)
assert(result == "2")
}
def testE: Unit = {
val f1 = (c: E[Int]) => c.value
val f2 = (x: Int) => new E(x)
val f3 = (c1: E[Int]) => (c2: E[Int]) => (c1, c2)
val r1 = f2(2)
val r2 = f2(2)
val r3 = f3(r1)(r2)
val result = f1(r3._2)
assert(result == 2)
}
def main(args: Array[String]): Unit = {
testC
testD
testE
}
}
| scala/scala | test/files/run/t8017/value-class-lambda.scala | Scala | apache-2.0 | 894 |
package org.fedoraproject.coprscala
case class CoprConfig(baseurl: String, authentication: Option[(String, String)])
| fedora-infra/copr-scala | src/main/scala/CoprConfig.scala | Scala | apache-2.0 | 118 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.index.metadata
import java.io.Closeable
import java.nio.charset.StandardCharsets
import java.util.concurrent.TimeUnit
import com.github.benmanes.caffeine.cache.{CacheLoader, Caffeine}
import com.typesafe.scalalogging.LazyLogging
import org.locationtech.geomesa.utils.collection.{CloseableIterator, IsSynchronized, MaybeSynchronized, NotSynchronized}
import org.locationtech.geomesa.utils.conf.GeoMesaSystemProperties.SystemProperty
import org.locationtech.geomesa.utils.io.WithClose
import scala.util.control.NonFatal
/**
* Backs metadata with a cache to save repeated database reads. Underlying table will be lazily created
* when required.
*
* @tparam T type param
*/
trait CachedLazyMetadata[T] extends GeoMesaMetadata[T] with LazyLogging {
this: MetadataAdapter =>
// separator used between type names and keys
val typeNameSeparator: Char = '~'
protected def serializer: MetadataSerializer[T]
// only synchronize if table doesn't exist - otherwise it's ready only and we can avoid synchronization
private val tableExists: MaybeSynchronized[Boolean] =
if (checkIfTableExists) { new NotSynchronized(true) } else { new IsSynchronized(false) }
// cache for our metadata - invalidate every 10 minutes so we keep things current
private val metaDataCache = {
val expiry = CachedLazyMetadata.Expiry.toDuration.get.toMillis
Caffeine.newBuilder().expireAfterWrite(expiry, TimeUnit.MILLISECONDS).build(
new CacheLoader[(String, String), Option[T]] {
override def load(k: (String, String)): Option[T] = {
if (tableExists.get) {
val (typeName, key) = k
scanValue(encodeRow(typeName, key)).flatMap(b => Option(serializer.deserialize(typeName, key, b)))
} else {
None
}
}
}
)
}
override def getFeatureTypes: Array[String] = {
if (tableExists.get) {
WithClose(scanRows(None)) { rows =>
rows.flatMap { row =>
try {
val (name, key) = CachedLazyMetadata.decodeRow(row, typeNameSeparator)
if (key == GeoMesaMetadata.ATTRIBUTES_KEY) { Iterator.single(name) } else { Iterator.empty }
} catch {
case NonFatal(_) =>
logger.warn(s"Ignoring unexpected row in catalog table: ${new String(row, StandardCharsets.UTF_8)}")
Iterator.empty
}
}.toArray
}
} else {
Array.empty
}
}
override def read(typeName: String, key: String, cache: Boolean): Option[T] = {
if (!cache) {
metaDataCache.invalidate((typeName, key))
}
metaDataCache.get((typeName, key))
}
override def insert(typeName: String, key: String, value: T): Unit = insert(typeName, Map(key -> value))
override def insert(typeName: String, kvPairs: Map[String, T]): Unit = {
ensureTableExists()
val inserts = kvPairs.toSeq.map { case (k, v) =>
// note: side effect in .map - pre-fetch into the cache
metaDataCache.put((typeName, k), Option(v))
(encodeRow(typeName, k), serializer.serialize(typeName, k, v))
}
write(inserts)
}
override def invalidateCache(typeName: String, key: String): Unit = metaDataCache.invalidate((typeName, key))
override def remove(typeName: String, key: String): Unit = {
if (tableExists.get) {
delete(encodeRow(typeName, key))
// also remove from the cache
metaDataCache.invalidate((typeName, key))
} else {
logger.debug(s"Trying to delete '$typeName:$key' but table does not exist")
}
}
override def delete(typeName: String): Unit = {
import scala.collection.JavaConversions._
if (tableExists.get) {
val prefix = encodeRow(typeName, "")
val rows = scanRows(Some(prefix))
try {
val all = rows.toBuffer
if (all.nonEmpty) {
delete(all)
}
} finally {
rows.close()
}
} else {
logger.debug(s"Trying to delete type '$typeName' but table does not exist")
}
metaDataCache.asMap.keys.foreach(k => if (k._1 == typeName) { metaDataCache.invalidate(k) })
}
def encodeRow(typeName: String, key: String): Array[Byte] =
CachedLazyMetadata.encodeRow(typeName, key, typeNameSeparator)
// checks that the table is already created, and creates it if not
def ensureTableExists(): Unit = tableExists.set(true, false, createTable())
}
object CachedLazyMetadata {
val Expiry = SystemProperty("geomesa.metadata.expiry", "10 minutes")
def encodeRow(typeName: String, key: String, separator: Char): Array[Byte] = {
// escaped to %U+XXXX unicode since decodeRow splits by separator
val escape = s"%${"U+%04X".format(separator.toInt)}"
s"${typeName.replace(separator.toString, escape)}$separator$key".getBytes(StandardCharsets.UTF_8)
}
def decodeRow(row: Array[Byte], separator: Char): (String, String) = {
// escaped to %U+XXXX unicode since decodeRow splits by separator
val escape = s"%${"U+%04X".format(separator.toInt)}"
val all = new String(row, StandardCharsets.UTF_8)
val split = all.indexOf(separator)
(all.substring(0, split).replace(escape, separator.toString), all.substring(split + 1, all.length))
}
}
/**
* Binding for underlying database. Methods should not be synchronized
*/
trait MetadataAdapter extends Closeable {
/**
* Checks if the underlying table exists
*
* @return
*/
protected def checkIfTableExists: Boolean
/**
* Creates the underlying table
*/
protected def createTable(): Unit
/**
* Writes row/value pairs
*
* @param rows row/values
*/
protected def write(rows: Seq[(Array[Byte], Array[Byte])]): Unit
/**
* Deletes a row
*
* @param row row
*/
protected def delete(row: Array[Byte])
/**
* Deletes multiple rows
*
* @param rows rows
*/
protected def delete(rows: Seq[Array[Byte]])
/**
* Reads a value from the underlying table
*
* @param row row
* @return value, if it exists
*/
protected def scanValue(row: Array[Byte]): Option[Array[Byte]]
/**
* Reads row keys from the underlying table
*
* @param prefix row key prefix
* @return matching row keys (not values)
*/
protected def scanRows(prefix: Option[Array[Byte]]): CloseableIterator[Array[Byte]]
} | jahhulbert-ccri/geomesa | geomesa-index-api/src/main/scala/org/locationtech/geomesa/index/metadata/CachedLazyMetadata.scala | Scala | apache-2.0 | 6,819 |
import java.time._
import scala.concurrent._
import scala.concurrent.duration._
import scala.util._
import ExecutionContext.Implicits.global
object Main extends App {
def getData1() = {
for (i <- 1 to 10) {
Thread.sleep(100);
print("1...")
}
val result = Random.nextInt(10)
println(result)
result
}
def getData2() = {
for (i <- 1 to 10) {
Thread.sleep(100);
print("2...")
}
val result = 10 * Random.nextInt(10)
println(result)
result
}
val future1 = Future { getData1() }
val future2 = Future { getData2() }
val combined = future1.flatMap(n1 => future2.map(n2 => n1 + n2))
Thread.sleep(3000)
println(combined)
}
| yeahnoob/scala-impatient-2e-code | src/ch17/sec05/Futures4.scala | Scala | gpl-3.0 | 698 |
package com.outr.arango.api.model
import io.circe.Json
case class PostAPIIndexSkiplist(`type`: String,
deduplicate: Option[Boolean] = None,
fields: Option[List[String]] = None,
sparse: Option[Boolean] = None,
unique: Option[Boolean] = None) | outr/arangodb-scala | api/src/main/scala/com/outr/arango/api/model/PostAPIIndexSkiplist.scala | Scala | mit | 371 |
package com.jeffharwell.commoncrawl.warcparser
import com.typesafe.config.ConfigFactory
import java.net.URL
import java.io.File
import java.util.zip.GZIPInputStream
import java.io.BufferedInputStream
import java.io.BufferedReader
import java.io.InputStreamReader
import java.io.FileInputStream
object Main extends App {
val conf = ConfigFactory.load()
/*
* Try with the local WET file
*/
val testwetfilename = conf.getString("general.testwetfile")
val gzipFile: File = getTheFile(testwetfilename)
var records: Int = 0;
val parser = Parser(new BufferedInputStream(new FileInputStream(gzipFile)))
// Count the Records
records = parser.foldLeft(0) { (acc, i) =>
acc + 1
}
println(s"Found $records")
// Little function to get the full path of the file in resources or die trying
def getTheFile(testwetfilename: String): File = {
val fileResource = Option(this.getClass().getClassLoader().getResource(testwetfilename))
fileResource match {
case Some(f) => { return(new File(f.getFile())) }
case None => { throw new RuntimeException(s"Didn't find a resource $testwetfilename") }
}
}
}
| jeffharwell/CommonCrawlScalaTools | warcparser/src/main/scala/com/jeffharwell/commoncrawl/warcparser/main.scala | Scala | mit | 1,150 |
/* Copyright 2009-2016 EPFL, Lausanne */
package leon
package genc
import java.io.File
import java.io.FileWriter
import java.io.BufferedWriter
object CFileOutputPhase extends UnitPhase[CAST.Prog] {
val name = "C File Output"
val description = "Output converted C program to the specified file (default leon.c)"
val optOutputFile = new LeonOptionDef[String] {
val name = "o"
val description = "Output file"
val default = "leon.c"
val usageRhs = "file"
val parser = OptionParsers.stringParser
}
override val definedOptions: Set[LeonOptionDef[Any]] = Set(optOutputFile)
def apply(ctx: LeonContext, program: CAST.Prog) {
val timer = ctx.timers.genc.print.start()
// Get the output file name from command line options, or use default
val outputFile = new File(ctx.findOptionOrDefault(optOutputFile))
val parent = outputFile.getParentFile()
try {
if (parent != null) {
parent.mkdirs()
}
} catch {
case _ : java.io.IOException => ctx.reporter.fatalError("Could not create directory " + parent)
}
// Output C code to the file
try {
val fstream = new FileWriter(outputFile)
val out = new BufferedWriter(fstream)
val p = new CPrinter
p.print(program)
out.write(p.toString)
out.close()
ctx.reporter.info(s"Output written to $outputFile")
} catch {
case _ : java.io.IOException => ctx.reporter.fatalError("Could not write on " + outputFile)
}
timer.stop()
}
}
| epfl-lara/leon | src/main/scala/leon/genc/CFileOutputPhase.scala | Scala | gpl-3.0 | 1,518 |
/*******************************************************************************
Copyright (c) 2013-2014, S-Core, KAIST.
All rights reserved.
Use is subject to license terms.
This distribution may include materials developed by third parties.
******************************************************************************/
package kr.ac.kaist.jsaf.analysis.typing.models.builtin
import kr.ac.kaist.jsaf.analysis.cfg.{CFGExpr, CFG, FunctionId}
import kr.ac.kaist.jsaf.analysis.typing.domain._
import kr.ac.kaist.jsaf.analysis.typing.domain.{BoolFalse => F, BoolTrue => T}
import kr.ac.kaist.jsaf.analysis.typing.models._
import kr.ac.kaist.jsaf.analysis.typing._
import kr.ac.kaist.jsaf.analysis.typing.{AccessHelper=>AH}
import kr.ac.kaist.jsaf.analysis.typing.AddressManager._
object BuiltinNumber extends ModelData {
val ConstLoc = newSystemLoc("NumberConst", Recent)
val ProtoLoc = newSystemLoc("NumberProto", Recent)
private val prop_const: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Function")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(FunctionProtoLoc), F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("@scope", AbsConstValue(PropValueNullTop)),
("@function", AbsInternalFunc("Number")),
("@construct", AbsInternalFunc("Number.constructor")),
("@hasinstance", AbsConstValue(PropValueNullTop)),
("prototype", AbsConstValue(PropValue(ObjectValue(Value(ProtoLoc), F, F, F)))),
("length", AbsConstValue(PropValue(ObjectValue(AbsNumber.alpha(1), F, F, F)))),
("MAX_VALUE", AbsConstValue(PropValue(ObjectValue(AbsNumber.alpha(Double.MaxValue), F, F, F)))),
("MIN_VALUE", AbsConstValue(PropValue(ObjectValue(AbsNumber.alpha(java.lang.Double.MIN_VALUE), F, F, F)))),
("NaN", AbsConstValue(PropValue(ObjectValue(NaN, F, F, F)))),
("NEGATIVE_INFINITY", AbsConstValue(PropValue(ObjectValue(NegInf, F, F, F)))),
("POSITIVE_INFINITY", AbsConstValue(PropValue(ObjectValue(PosInf, F, F, F))))
)
private val prop_proto: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Number")))),
("@proto", AbsConstValue(PropValue(ObjectValue(ObjProtoLoc, F, F, F)))),
("@extensible", AbsConstValue(PropValue(BoolTrue))),
("@primitive", AbsConstValue(PropValue(AbsNumber.alpha(+0)))),
("constructor", AbsConstValue(PropValue(ObjectValue(ConstLoc, F, F, F)))),
("toString", AbsBuiltinFunc("Number.prototype.toString", 0)),
("toLocaleString", AbsBuiltinFunc("Number.prototype.toLocaleString", 0)),
("valueOf", AbsBuiltinFunc("Number.prototype.valueOf", 0)),
("toFixed", AbsBuiltinFunc("Number.prototype.toFixed", 1)),
("toExponential", AbsBuiltinFunc("Number.prototype.toExponential", 1)),
("toPrecision", AbsBuiltinFunc("Number.prototype.toPrecision", 1))
)
def getInitList(): List[(Loc, List[(String, AbsProperty)])] = List(
(ConstLoc, prop_const), (ProtoLoc, prop_proto)
)
def getSemanticMap(): Map[String, SemanticFun] = {
Map(
"Number" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
// 15.7.1.1 Number( [value] )
val v_1 = getArgValue(h, ctx, args, "0")
val arg_length = getArgValue(h, ctx, args, "length")._1._4
// If value is not supplied, +0 is returned.
val value_1 =
if (AbsNumber.alpha(0) <= arg_length) Value(AbsNumber.alpha(0))
else ValueBot
// Returns a Number value computed by ToNumber(value).
val value_2 =
if (AbsNumber.alpha(0) != arg_length && !(arg_length <= NumBot)) Value(Helper.toNumber(Helper.toPrimitive_better(h, v_1)))
else ValueBot
val value = value_1 + value_2
((Helper.ReturnStore(h, value), ctx), (he, ctxe))
}),
"Number.constructor" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
// 15.7.2.1 new Number( [value] )
val v_1 = getArgValue(h, ctx, args, "0")
val arg_length = getArgValue(h, ctx, args, "length")._1._4
// [[PrimitiveValue]]
val value_1 =
if (AbsNumber.alpha(0) <= arg_length) AbsNumber.alpha(0)
else NumBot
val value_2 =
if (AbsNumber.alpha(0) != arg_length && !(arg_length <= NumBot)) Helper.toNumber(Helper.toPrimitive_better(h, v_1))
else NumBot
val primitive_value = value_1 + value_2
val h_1 = lset_this.foldLeft(h)((_h, l) => _h.update(l, Helper.NewNumber(primitive_value)))
if (primitive_value </ NumBot)
((Helper.ReturnStore(h_1, Value(lset_this)), ctx), (he, ctxe))
else
((HeapBot, ContextBot), (he, ctxe))
}),
"Number.prototype.toString" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
val n_arglen = Operator.ToUInt32(getArgValue(h, ctx, args, "length"))
val lset_num = lset_this.filter((l) => AbsString.alpha("Number") <= h(l)("@class")._2._1._5)
val es = notGenericMethod(h, lset_this, "Number")
val (v, es2) =
n_arglen.getAbsCase match {
case AbsBot => (ValueBot, ExceptionBot)
case _ => AbsNumber.getUIntSingle(n_arglen) match {
case Some(n_arglen) if n_arglen == 0 =>
(Value(Helper.defaultToString(h, lset_num)), ExceptionBot)
case Some(n_arglen) if n_arglen > 0 => {
val es =
if (BoolTrue <= Operator.bopGreater(getArgValue(h, ctx, args, "0"), Value(AbsNumber.alpha(36)))._1._3)
Set[Exception](RangeError)
else if (BoolTrue <= Operator.bopLess(getArgValue(h, ctx, args, "0"), Value(AbsNumber.alpha(2)))._1._3)
Set[Exception](RangeError)
else
ExceptionBot
(Value(StrTop), es)
}
case _ => {
(Value(StrTop), Set[Exception](RangeError))
}
}
}
val (h_e, ctx_e) = Helper.RaiseException(h, ctx, es ++ es2)
if (v </ ValueBot)
((Helper.ReturnStore(h, v), ctx), (he + h_e, ctxe + ctx_e))
else
((HeapBot, ContextBot), (he + h_e, ctxe + ctx_e))
}),
"Number.prototype.toLocaleString" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
val v_prim = lset_this.foldLeft(ValueBot)((_v, _l) => _v + h(_l)("@primitive")._2)
val v = Value(Helper.toString(v_prim._1))
if (v </ ValueBot)
((Helper.ReturnStore(h, v), ctx), (he, ctxe))
else
((HeapBot, ContextBot), (he, ctxe))
}),
"Number.prototype.valueOf" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
val es = notGenericMethod(h, lset_this, "Number")
val lset_num = lset_this.filter((l) => AbsString.alpha("Number") <= h(l)("@class")._2._1._5)
val n = lset_num.foldLeft[AbsNumber](NumBot)((_b, l) => _b + h(l)("@primitive")._2._1._4)
val (h_1, c_1) =
if (n == NumBot)
(HeapBot, ContextBot)
else
(Helper.ReturnStore(h, Value(n)), ctx)
val (h_e, ctx_e) = Helper.RaiseException(h, ctx, es)
((h_1, c_1), (he + h_e, ctxe + ctx_e))
}),
"Number.prototype.toFixed" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val v_1 = getArgValue(h, ctx, args, "0")
val v_2 =
if (UndefTop <= v_1._1._1)
Value(PValue(UndefBot, v_1._1._2, v_1._1._3, AbsNumber.alpha(0) + v_1._1._4, v_1._1._5), v_1._2)
else
v_1
val es =
if (BoolTrue <= Operator.bopGreater(v_2, Value(AbsNumber.alpha(20)))._1._3)
Set[Exception](RangeError)
else if (BoolTrue <= Operator.bopLess(v_2, Value(AbsNumber.alpha(0)))._1._3)
Set[Exception](RangeError)
else
ExceptionBot
val (h_e, ctx_e) = Helper.RaiseException(h, ctx, es)
((Helper.ReturnStore(h, Value(StrTop)), ctx), (he + h_e, ctxe + ctx_e))
}),
"Number.prototype.toExponential" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val v_1 = getArgValue(h, ctx, args, "0")
val v_2 =
if (UndefTop <= v_1._1._1)
Value(PValue(UndefBot, v_1._1._2, v_1._1._3, v_1._1._4, v_1._1._5), v_1._2)
else
v_1
val es =
if (BoolTrue <= Operator.bopGreater(v_2, Value(AbsNumber.alpha(20)))._1._3)
Set[Exception](RangeError)
else if (BoolTrue <= Operator.bopLess(v_2, Value(AbsNumber.alpha(0)))._1._3)
Set[Exception](RangeError)
else
ExceptionBot
val (h_e, ctx_e) = Helper.RaiseException(h, ctx, es)
((Helper.ReturnStore(h, Value(StrTop)), ctx), (he + h_e, ctxe + ctx_e))
}),
"Number.prototype.toPrecision" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val v_1 = getArgValue(h, ctx, args, "0")
val v_2 =
if (UndefTop <= v_1._1._1)
Value(PValue(UndefBot, v_1._1._2, v_1._1._3, v_1._1._4, v_1._1._5), v_1._2)
else
v_1
val es =
if (BoolTrue <= Operator.bopGreater(v_2, Value(AbsNumber.alpha(21)))._1._3)
Set[Exception](RangeError)
else if (BoolTrue <= Operator.bopLess(v_2, Value(AbsNumber.alpha(1)))._1._3)
Set[Exception](RangeError)
else
ExceptionBot
val (h_e, ctx_e) = Helper.RaiseException(h, ctx, es)
((Helper.ReturnStore(h, Value(StrTop)), ctx), (he + h_e, ctxe + ctx_e))
})
)
}
def getPreSemanticMap(): Map[String, SemanticFun] = {
Map(
"Number" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
// 15.7.1.1 Number( [value] )
val v_1 = getArgValue_pre(h, ctx, args, "0", PureLocalLoc)
val arg_length = getArgValue_pre(h, ctx, args, "length", PureLocalLoc)._1._4
// If value is not supplied, +0 is returned.
val value_1 =
if (AbsNumber.alpha(0) <= arg_length) Value(AbsNumber.alpha(0))
else ValueBot
// Returns a Number value computed by ToNumber(value).
val value_2 =
if (AbsNumber.alpha(0) != arg_length && !(arg_length <= NumBot)) Value(PreHelper.toNumber(PreHelper.toPrimitive(v_1)))
else ValueBot
val value = value_1 + value_2
((PreHelper.ReturnStore(h, PureLocalLoc, value), ctx), (he, ctxe))
}),
"Number.constructor" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
val lset_this = h(PureLocalLoc)("@this")._2._2
// 15.7.2.1 new Number( [value] )
val v_1 = getArgValue_pre(h, ctx, args, "0", PureLocalLoc)
val arg_length = getArgValue_pre(h, ctx, args, "length", PureLocalLoc)._1._4
// [[PrimitiveValue]]
val value_1 =
if (AbsNumber.alpha(0) <= arg_length) AbsNumber.alpha(0)
else NumBot
val value_2 =
if (AbsNumber.alpha(0) != arg_length && !(arg_length <= NumBot)) PreHelper.toNumber(PreHelper.toPrimitive(v_1))
else NumBot
val primitive_value = value_1 + value_2
val h_1 = lset_this.foldLeft(h)((_h, l) => _h.update(l, PreHelper.NewNumber(primitive_value)))
if (primitive_value </ NumBot)
((PreHelper.ReturnStore(h_1, PureLocalLoc, Value(lset_this)), ctx), (he, ctxe))
else
((h, ctx), (he, ctxe))
}),
"Number.prototype.toString" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
val lset_this = h(PureLocalLoc)("@this")._2._2
val n_arglen = Operator.ToUInt32(getArgValue_pre(h, ctx, args, "length", PureLocalLoc))
val lset_num = lset_this.filter((l) => AbsString.alpha("Number") <= h(l)("@class")._2._1._5)
val v_prim = lset_num.foldLeft(ValueBot)((_v, _l) => _v + h(_l)("@primitive")._2)
val es = notGenericMethod(h, lset_this, "Number")
val (v, es2) =
n_arglen.getAbsCase match {
case AbsBot => (ValueBot, ExceptionBot)
case _ => AbsNumber.getUIntSingle(n_arglen) match {
case Some(n_arglen) if n_arglen == 0 =>
(Value(PreHelper.toString(v_prim._1)), ExceptionBot)
case Some(n_arglen) if n_arglen > 0 => {
val es =
if (BoolTrue <= Operator.bopGreater(getArgValue_pre(h, ctx, args, "0", PureLocalLoc), Value(AbsNumber.alpha(36)))._1._3)
Set[Exception](RangeError)
else if (BoolTrue <= Operator.bopLess(getArgValue_pre(h, ctx, args, "0", PureLocalLoc), Value(AbsNumber.alpha(2)))._1._3)
Set[Exception](RangeError)
else
ExceptionBot
(Value(StrTop), es)
}
case _ => {
(Value(StrTop), Set[Exception](RangeError))
}
}
}
val (h_e, ctx_e) = PreHelper.RaiseException(h, ctx, PureLocalLoc, es ++ es2)
if (v </ ValueBot)
((PreHelper.ReturnStore(h_e, PureLocalLoc, v), ctx_e), (he + h_e, ctxe + ctx_e))
else
((h, ctx), (he + h_e, ctxe + ctx_e))
}),
"Number.prototype.toLocaleString" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
val lset_this = h(PureLocalLoc)("@this")._2._2
val v_prim = lset_this.foldLeft(ValueBot)((_v, _l) => _v + h(_l)("@primitive")._2)
val v = Value(PreHelper.toString(v_prim._1))
if (v </ ValueBot)
((PreHelper.ReturnStore(h, PureLocalLoc, v), ctx), (he, ctxe))
else
((h, ctx), (he, ctxe))
}),
"Number.prototype.valueOf" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
val lset_this = h(PureLocalLoc)("@this")._2._2
val es = notGenericMethod(h, lset_this, "Number")
val lset_num = lset_this.filter((l) => AbsString.alpha("Number") <= h(l)("@class")._2._1._5)
val n = lset_num.foldLeft[AbsNumber](NumBot)((_b, l) => _b + h(l)("@primitive")._2._1._4)
val (h_1, c_1) =
if (n == NumBot)
(h, ctx)
else
(PreHelper.ReturnStore(h, PureLocalLoc, Value(n)), ctx)
val (h_e, ctx_e) = PreHelper.RaiseException(h_1, c_1, PureLocalLoc, es)
((h_1, c_1), (he + h_e, ctxe + ctx_e))
}),
"Number.prototype.toFixed" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
val lset_this = h(PureLocalLoc)("@this")._2._2
val v_1 = getArgValue_pre(h, ctx, args, "0", PureLocalLoc)
val v_2 =
if (UndefTop <= v_1._1._1)
Value(PValue(UndefBot, v_1._1._2, v_1._1._3, AbsNumber.alpha(0) + v_1._1._4, v_1._1._5), v_1._2)
else
v_1
val es =
if (BoolTrue <= Operator.bopGreater(v_2, Value(AbsNumber.alpha(20)))._1._3)
Set[Exception](RangeError)
else if (BoolTrue <= Operator.bopLess(v_2, Value(AbsNumber.alpha(0)))._1._3)
Set[Exception](RangeError)
else
ExceptionBot
val (h_e, ctx_e) = PreHelper.RaiseException(h, ctx, PureLocalLoc, es)
((PreHelper.ReturnStore(h_e, PureLocalLoc, Value(StrTop)), ctx_e), (he + h_e, ctxe + ctx_e))
}),
"Number.prototype.toExponential" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
val v_1 = getArgValue_pre(h, ctx, args, "0", PureLocalLoc)
val v_2 =
if (UndefTop <= v_1._1._1)
Value(PValue(UndefBot, v_1._1._2, v_1._1._3, v_1._1._4, v_1._1._5), v_1._2)
else
v_1
val es =
if (BoolTrue <= Operator.bopGreater(v_2, Value(AbsNumber.alpha(20)))._1._3)
Set[Exception](RangeError)
else if (BoolTrue <= Operator.bopLess(v_2, Value(AbsNumber.alpha(0)))._1._3)
Set[Exception](RangeError)
else
ExceptionBot
val (h_e, ctx_e) = PreHelper.RaiseException(h, ctx, PureLocalLoc, es)
((PreHelper.ReturnStore(h_e, PureLocalLoc, Value(StrTop)), ctx_e), (he + h_e, ctxe + ctx_e))
}),
"Number.prototype.toPrecision" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val PureLocalLoc = cfg.getPureLocal(cp)
val v_1 = getArgValue_pre(h, ctx, args, "0", PureLocalLoc)
val v_2 =
if (UndefTop <= v_1._1._1)
Value(PValue(UndefBot, v_1._1._2, v_1._1._3, v_1._1._4, v_1._1._5), v_1._2)
else
v_1
val es =
if (BoolTrue <= Operator.bopGreater(v_2, Value(AbsNumber.alpha(21)))._1._3)
Set[Exception](RangeError)
else if (BoolTrue <= Operator.bopLess(v_2, Value(AbsNumber.alpha(1)))._1._3)
Set[Exception](RangeError)
else
ExceptionBot
val (h_e, ctx_e) = PreHelper.RaiseException(h, ctx, PureLocalLoc, es)
((PreHelper.ReturnStore(h_e, PureLocalLoc, Value(StrTop)), ctx_e), (he + h_e, ctxe + ctx_e))
})
)
}
def getDefMap(): Map[String, AccessFun] = {
Map(
"Number" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
LPSet((SinglePureLocalLoc, "@return"))
}),
"Number.constructor" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
val LP1 = lset_this.foldLeft(LPBot)((lpset, l) =>
AH.NewNumber_def.foldLeft(lpset)((_lpset, prop) => _lpset +(l, prop)))
LP1 +(SinglePureLocalLoc, "@return")
}),
"Number.prototype.toString" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
val n_arglen = Operator.ToUInt32(getArgValue(h, ctx, args, "length"))
val es1 = notGenericMethod(h, lset_this, "Number")
val es2 =
n_arglen.getAbsCase match {
case AbsBot => ExceptionBot
case _ => AbsNumber.getUIntSingle(n_arglen) match {
case Some(n_arglen) if n_arglen == 0 => ExceptionBot
case Some(n_arglen) if n_arglen > 0 => {
if (BoolTrue <= Operator.bopGreater(getArgValue(h, ctx, args, "0"), Value(AbsNumber.alpha(36)))._1._3)
Set[Exception](RangeError)
else if (BoolTrue <= Operator.bopLess(getArgValue(h, ctx, args, "0"), Value(AbsNumber.alpha(2)))._1._3)
Set[Exception](RangeError)
else
ExceptionBot
}
case _ => Set[Exception](RangeError)
}
}
AH.RaiseException_def(es1 ++ es2) +(SinglePureLocalLoc, "@return")
}),
"Number.prototype.toLocaleString" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
LPSet((SinglePureLocalLoc, "@return"))
}),
"Number.prototype.valueOf" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
val es = notGenericMethod(h, lset_this, "Number")
AH.RaiseException_def(es) +(SinglePureLocalLoc, "@return")
}),
"Number.prototype.toFixed" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val v_1 = getArgValue(h, ctx, args, "0")
val v_2 =
if (UndefTop <= v_1._1._1)
Value(PValue(UndefBot, v_1._1._2, v_1._1._3, AbsNumber.alpha(0) + v_1._1._4, v_1._1._5), v_1._2)
else
v_1
val es =
if (BoolTrue <= Operator.bopGreater(v_2, Value(AbsNumber.alpha(20)))._1._3)
Set[Exception](RangeError)
else if (BoolTrue <= Operator.bopLess(v_2, Value(AbsNumber.alpha(0)))._1._3)
Set[Exception](RangeError)
else
ExceptionBot
AH.RaiseException_def(es) +(SinglePureLocalLoc, "@return")
}),
"Number.prototype.toExponential" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val v_1 = getArgValue(h, ctx, args, "0")
val v_2 =
if (UndefTop <= v_1._1._1)
Value(PValue(UndefBot, v_1._1._2, v_1._1._3, v_1._1._4, v_1._1._5), v_1._2)
else
v_1
val es =
if (BoolTrue <= Operator.bopGreater(v_2, Value(AbsNumber.alpha(20)))._1._3)
Set[Exception](RangeError)
else if (BoolTrue <= Operator.bopLess(v_2, Value(AbsNumber.alpha(0)))._1._3)
Set[Exception](RangeError)
else
ExceptionBot
AH.RaiseException_def(es) +(SinglePureLocalLoc, "@return")
}),
"Number.prototype.toPrecision" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val v_1 = getArgValue(h, ctx, args, "0")
val v_2 =
if (UndefTop <= v_1._1._1)
Value(PValue(UndefBot, v_1._1._2, v_1._1._3, v_1._1._4, v_1._1._5), v_1._2)
else
v_1
val es =
if (BoolTrue <= Operator.bopGreater(v_2, Value(AbsNumber.alpha(21)))._1._3)
Set[Exception](RangeError)
else if (BoolTrue <= Operator.bopLess(v_2, Value(AbsNumber.alpha(1)))._1._3)
Set[Exception](RangeError)
else
ExceptionBot
AH.RaiseException_def(es) +(SinglePureLocalLoc, "@return")
})
)
}
def getUseMap(): Map[String, AccessFun] = {
Map(
"Number" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
getArgValue_use(h, ctx, args, "0") ++
getArgValue_use(h, ctx, args, "length") +(SinglePureLocalLoc, "@return")
}),
"Number.constructor" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
/* may def */
val LP1 = lset_this.foldLeft(LPBot)((lpset, l) =>
AH.NewNumber_def.foldLeft(lpset)((_lpset, prop) => _lpset +(l, prop)))
LP1 ++ getArgValue_use(h, ctx, args, "0") ++
getArgValue_use(h, ctx, args, "length") +(SinglePureLocalLoc, "@return") +(SinglePureLocalLoc, "@this")
}),
"Number.prototype.toString" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
val n_arglen = Operator.ToUInt32(getArgValue(h, ctx, args, "length"))
val LP1 = getArgValue_use(h, ctx, args, "length")
val es1 = notGenericMethod(h, lset_this, "Number")
val lset_num = lset_this.filter((l) => AbsString.alpha("Number") <= h(l)("@class")._2._1._5)
val LP2 = lset_this.foldLeft(LPBot)((lpset, l) => lpset +(l, "@class"))
val v_prim = lset_num.foldLeft(ValueBot)((_v, _l) => _v + h(_l)("@primitive")._2)
val LP3 = lset_num.foldLeft(LPBot)((lpset, l) => lpset +(l, "@primitive"))
val (es2, lpset4) =
n_arglen.getAbsCase match {
case AbsBot => (ExceptionBot, LPBot)
case _ => AbsNumber.getUIntSingle(n_arglen) match {
case Some(n_arglen) if n_arglen == 0 => (ExceptionBot, LPBot)
case Some(n_arglen) if n_arglen > 0 => {
if (BoolTrue <= Operator.bopGreater(getArgValue(h, ctx, args, "0"), Value(AbsNumber.alpha(36)))._1._3)
(Set[Exception](RangeError), getArgValue_use(h, ctx, args, "0"))
else if (BoolTrue <= Operator.bopLess(getArgValue(h, ctx, args, "0"), Value(AbsNumber.alpha(2)))._1._3)
(Set[Exception](RangeError), getArgValue_use(h, ctx, args, "0"))
else
(ExceptionBot, getArgValue_use(h, ctx, args, "0"))
}
case _ => (Set[Exception](RangeError), getArgValue_use(h, ctx, args, "0"))
}
}
val LP5 = AH.RaiseException_use(es1 ++ es2)
LP1 ++ LP2 ++ LP3 ++ lpset4 ++ LP5 +(SinglePureLocalLoc, "@return") +(SinglePureLocalLoc, "@this")
}),
"Number.prototype.toLocaleString" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
val LP1 = lset_this.foldLeft(LPBot)((lpset, l) => lpset +(l, "@primitive"))
LP1 ++ getArgValue_use(h, ctx, args, "length") +(SinglePureLocalLoc, "@return") +(SinglePureLocalLoc, "@this")
}),
"Number.prototype.valueOf" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val lset_this = h(SinglePureLocalLoc)("@this")._2._2
val lset_num = lset_this.filter((l) => AbsString.alpha("Number") <= h(l)("@class")._2._1._5)
val es = notGenericMethod(h, lset_this, "Number")
val LP1 = lset_this.foldLeft(LPBot)((lpset, l) => lpset +(l, "@class"))
val LP2 = lset_num.foldLeft(LPBot)((lpset, l) => lpset +(l, "@primitive"))
LP1 ++ LP2 ++ AH.RaiseException_use(es) +(SinglePureLocalLoc, "@return") +(SinglePureLocalLoc, "@this")
}),
"Number.prototype.toFixed" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val v_1 = getArgValue(h, ctx, args, "0")
val v_2 =
if (UndefTop <= v_1._1._1)
Value(PValue(UndefBot, v_1._1._2, v_1._1._3, AbsNumber.alpha(0) + v_1._1._4, v_1._1._5), v_1._2)
else
v_1
val es =
if (BoolTrue <= Operator.bopGreater(v_2, Value(AbsNumber.alpha(20)))._1._3)
Set[Exception](RangeError)
else if (BoolTrue <= Operator.bopLess(v_2, Value(AbsNumber.alpha(0)))._1._3)
Set[Exception](RangeError)
else
ExceptionBot
getArgValue_use(h, ctx, args, "0") ++ AH.RaiseException_def(es) +(SinglePureLocalLoc, "@return") +(SinglePureLocalLoc, "@this")
}),
"Number.prototype.toExponential" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val v_1 = getArgValue(h, ctx, args, "0")
val v_2 =
if (UndefTop <= v_1._1._1)
Value(PValue(UndefBot, v_1._1._2, v_1._1._3, v_1._1._4, v_1._1._5), v_1._2)
else
v_1
val es =
if (BoolTrue <= Operator.bopGreater(v_2, Value(AbsNumber.alpha(20)))._1._3)
Set[Exception](RangeError)
else if (BoolTrue <= Operator.bopLess(v_2, Value(AbsNumber.alpha(0)))._1._3)
Set[Exception](RangeError)
else
ExceptionBot
getArgValue_use(h, ctx, args, "0") ++ AH.RaiseException_def(es) +(SinglePureLocalLoc, "@return") +(SinglePureLocalLoc, "@this")
}),
"Number.prototype.toPrecision" -> (
(h: Heap, ctx: Context, cfg: CFG, fun: String, args: CFGExpr, fid: FunctionId) => {
val v_1 = getArgValue(h, ctx, args, "0")
val v_2 =
if (UndefTop <= v_1._1._1)
Value(PValue(UndefBot, v_1._1._2, v_1._1._3, v_1._1._4, v_1._1._5), v_1._2)
else
v_1
val es =
if (BoolTrue <= Operator.bopGreater(v_2, Value(AbsNumber.alpha(21)))._1._3)
Set[Exception](RangeError)
else if (BoolTrue <= Operator.bopLess(v_2, Value(AbsNumber.alpha(1)))._1._3)
Set[Exception](RangeError)
else
ExceptionBot
getArgValue_use(h, ctx, args, "0") ++ AH.RaiseException_def(es) +(SinglePureLocalLoc, "@return") +(SinglePureLocalLoc, "@this")
})
)
}
}
| darkrsw/safe | src/main/scala/kr/ac/kaist/jsaf/analysis/typing/models/builtin/BuiltinNumber.scala | Scala | bsd-3-clause | 30,646 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.