code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
package views.html.feedback import play.templates._ import play.templates.TemplateMagic._ import play.api.templates._ import play.api.templates.PlayMagic._ import models._ import controllers._ import java.lang._ import java.util._ import scala.collection.JavaConversions._ import scala.collection.JavaConverters._ import play.api.i18n._ import play.core.j.PlayMagicForJava._ import play.mvc._ import play.data._ import play.api.data.Field import play.mvc.Http.Context.Implicit._ import views.html._ /**/ object feedbackIndex extends BaseScalaTemplate[play.api.templates.Html,Format[play.api.templates.Html]](play.api.templates.HtmlFormat) with play.api.templates.Template3[com.mnt.core.helper.SearchContext,dto.MenuBar,models.user.User,play.api.templates.Html] { /**/ def apply/*1.2*/(context:com.mnt.core.helper.SearchContext,_menuContext: dto.MenuBar,user : models.user.User):play.api.templates.Html = { _display_ { Seq[Any](format.raw/*1.95*/(""" """),_display_(Seq[Any](/*2.2*/main("Feedback")/*2.18*/ {_display_(Seq[Any](format.raw/*2.20*/(""" """),_display_(Seq[Any](/*3.6*/menuContext(_menuContext,user))),format.raw/*3.36*/(""" """),_display_(Seq[Any](/*4.6*/searchContext(context,null,"add"))),format.raw/*4.39*/(""" """)))})),format.raw/*5.2*/(""" """))} } def render(context:com.mnt.core.helper.SearchContext,_menuContext:dto.MenuBar,user:models.user.User): play.api.templates.Html = apply(context,_menuContext,user) def f:((com.mnt.core.helper.SearchContext,dto.MenuBar,models.user.User) => play.api.templates.Html) = (context,_menuContext,user) => apply(context,_menuContext,user) def ref: this.type = this } /* -- GENERATED -- DATE: Mon Apr 21 14:16:21 EDT 2014 SOURCE: /Users/Secret/Technology/play-2.1.0/Time_Trotter/app/views/feedback/feedbackIndex.scala.html HASH: 35b74d239bcf12ca304c551c36e26bfe25fa7a51 MATRIX: 796->1|966->94|1003->97|1027->113|1066->115|1107->122|1158->152|1199->159|1253->192|1286->195 LINES: 26->1|29->1|30->2|30->2|30->2|31->3|31->3|32->4|32->4|33->5 -- GENERATED -- */
paperlotus/Time-Trotter
target/scala-2.10/src_managed/main/views/html/feedback/feedbackIndex.template.scala
Scala
apache-2.0
2,262
package dzufferey.utils object Ratio { def gcd(i : Long, j : Long) : Long = { def gcd1(i : Long, j : Long) : Long = { if (j == 0) i else gcd1(j, i % j) } if (i.abs == 0 && j.abs == 0) 1 //TODO better else if (i.abs == 0) j.abs else if (j.abs == 0) i.abs else gcd1(i.abs, j.abs) } val zero = new Ratio(0,1) val one = new Ratio(1,1) val mone = new Ratio(-1,1) def max(a: Ratio, b: Ratio) = if (a > b) a else b } object RatioImplicits { import scala.language.implicitConversions implicit def i2r(i: Int): Ratio = new Ratio(i, 1L) implicit def l2r(l: Long): Ratio = new Ratio(l, 1L) } //TODO those operations are prone to overflow class Ratio(_num: Long, _denom: Long) extends Ordered[Ratio] { def this(n: Long) = this(n, 1) def this(n: Int) = this(n, 1) def this() = this(0, 1) val (n, d) = { val gcd = Ratio.gcd(_num.abs, _denom.abs) val sign = if (_num < 0L ^ _denom < 0L) -1 else 1 val _num2 = sign * _num.abs / gcd val _denom2 = _denom.abs / gcd assert(_num * _denom2 == _num2 * _denom) (_num2, _denom2) } def num = n def denom = d def toDouble = num.toDouble / denom.toDouble def abs = if (num < 0) new Ratio(-num, denom) else this def *(r : Ratio) = new Ratio(num * r.num, denom * r.denom) def +(r : Ratio) = new Ratio(num * r.denom + r.num * denom, denom * r.denom) def -(r : Ratio) = new Ratio(num * r.denom - r.num * denom, denom * r.denom) def /(r : Ratio) = new Ratio(num * r.denom, denom * r.num) def %(r : Ratio) = new Ratio( (num * r.denom) % (denom * r.num), denom * r.denom) def *(l : Long) = new Ratio(num * l, denom) def +(l : Long) = new Ratio(num + l * denom, denom) def -(l : Long) = new Ratio(num - l * denom, denom) def /(l : Long) = new Ratio(num, denom * l) def %(l : Long) = new Ratio( num % (denom * l), denom) override def <(r : Ratio) : Boolean = num * r.denom < r.num * denom override def <=(r : Ratio) : Boolean = num * r.denom <= r.num * denom override def >(r : Ratio) : Boolean = num * r.denom > r.num * denom override def >=(r : Ratio) : Boolean = num * r.denom >= r.num * denom def ==(r : Ratio) : Boolean = num * r.denom == r.num * denom def !=(r : Ratio) : Boolean = num * r.denom != r.num * denom override def compare(r : Ratio) : Int = (num * r.denom) compare (r.num * denom) override def toString = s"$num/$denom" def isWhole = denom == 1L def toLong = { assert(isWhole); num } }
dzufferey/misc-scala-utils
src/main/scala/dzufferey/utils/Ratio.scala
Scala
apache-2.0
2,470
import sbt._ import sbt.Keys._ import android.Dependencies.LibraryProject import android.Keys._ object MyProjectBuild extends Build { // meta project lazy val root = Project(id = "gmroot", base = file(".")).settings( android.Plugin.androidCommands :+ (install <<= ( // install all apps install in (guidemate, Android), install in (geophon, Android)) map { (_,_) => () }): _* //:+ ).aggregate(guidemate, geophon, guidemate_lib) // android application project lazy val guidemate = Project(id = "app", base = file("app")).settings( appSettings:_*).dependsOn(guidemate_lib) lazy val geophon = Project(id = "app2", base = file("app2")).settings( appSettings:_*).dependsOn(guidemate_lib) val guidemate_lib = Project(id = "lib", base = file("lib-with-resources")).settings( android.Plugin.androidBuildApklib: _*) .settings(libraryDependencies ++= Seq( "org.scalatest" % "scalatest_2.10" % "1.9.1" % "test", "com.pivotallabs" % "robolectric" % "1.1" % "test", "junit" % "junit" % "4.10" % "test", "commons-io" % "commons-io" % "2.1", "com.javadocmd" % "simplelatlng" % "1.0.0", "org.joda" % "joda-convert" % "1.2", "joda-time" % "joda-time" % "2.0", "commons-lang" % "commons-lang" % "2.6", "org.osmdroid" % "osmdroid-android" % "3.0.10", "org.slf4j" % "slf4j-simple" % "1.7.5")) lazy val appSettings = android.Plugin.androidBuild(guidemate_lib) ++ List(localProjects in Android += LibraryProject(guidemate_lib.base), platformTarget in Android := "android-17", useProguard in Android := true, useProguardInDebug in Android := true, proguardScala in Android := true, proguardOptions in Android += "-dontwarn **", packagingOptions in Android := PackagingOptions(excludes = Seq( "META-INF/LICENSE.txt", "META-INF/NOTICE.txt"))) }
wpc009/android-sdk-plugin
sbt-test/android-sdk-plugin/multiproject-lib-with-resources/project/build.scala
Scala
bsd-3-clause
2,096
/* * Copyright 2016 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.dllib.nn.ops import com.intel.analytics.bigdl.dllib.nn.Graph import com.intel.analytics.bigdl.dllib.nn.tf.Const import com.intel.analytics.bigdl.dllib.tensor.Tensor import com.intel.analytics.bigdl.dllib.utils.T import com.intel.analytics.bigdl.dllib.utils.serializer.ModuleSerializationTest class TensorArrayScatterSerialTest extends ModuleSerializationTest { override def test(): Unit = { import com.intel.analytics.bigdl.dllib.nn.tf._ val tensorArray = new TensorArrayCreator[Float, Float]().inputs() val data = Const[Float, Float](Tensor[Float](3, 4).rand()).inputs() val indices = Const[Float, Int](Tensor[Int](T(0, 1, 2))).inputs() val scatter = new TensorArrayScatter[Float, Float]().inputs((tensorArray, 1), (indices, 1), (data, 1)) val ctr = new com.intel.analytics.bigdl.dllib.nn.tf.ControlDependency[Float]().inputs(scatter) val gather = new TensorArrayGather[Float, Float]().inputs((tensorArray, 1), (indices, 1), (ctr, 1)) val ctr2 = new com.intel.analytics.bigdl.dllib.nn.tf.ControlDependency[Float]().inputs(gather) val close = new TensorArrayClose[Float]().inputs((tensorArray, 1), (ctr2, 1)) val model = Graph.dynamic[Float](Array(tensorArray), Array(gather, close)) runSerializationTestWithMultiClass(model, Tensor.scalar[Int](10), Array( tensorArray.element.getClass.asInstanceOf[Class[_]], scatter.element.getClass.asInstanceOf[Class[_]], gather.element.getClass.asInstanceOf[Class[_]], close.element.getClass.asInstanceOf[Class[_]] )) } }
intel-analytics/BigDL
scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/nn/ops/TensorArrayScatterSpec.scala
Scala
apache-2.0
2,188
package controllers import play.api.data.Form import play.api.mvc._, Results._ import lila.api.Context import lila.app._ import lila.common.LilaCookie import views._ object Pref extends LilaController { private def api = Env.pref.api private def forms = Env.pref.forms def form = Auth { implicit ctx => me => forms prefOf me map { form => Ok(html.account.pref(me, form)) } } def formApply = AuthBody { implicit ctx => me => implicit val req = ctx.body FormFuResult(forms.pref) { err => fuccess(html.account.pref(me, err)) } { data => api getPref me flatMap { pref => api.setPref(data(pref), notifyChange = true) } inject Ok("saved") } } def set(name: String) = OpenBody { implicit ctx => implicit val req = ctx.body (setters get name) ?? { case (form, fn) => FormResult(form) { v => fn(v, ctx) map { Ok(()) withCookies _ } } } } def saveTag(name: String, value: String) = Auth { implicit ctx => me => api.saveTag(me, name, value) } private lazy val setters = Map( "theme" -> (forms.theme -> save("theme") _), "pieceSet" -> (forms.pieceSet -> save("pieceSet") _), "theme3d" -> (forms.theme3d -> save("theme3d") _), "pieceSet3d" -> (forms.pieceSet3d -> save("pieceSet3d") _), "bg" -> (forms.bg -> save("bg") _), "is3d" -> (forms.is3d -> save("is3d") _)) private def save(name: String)(value: String, ctx: Context): Fu[Cookie] = ctx.me ?? { api.setPrefString(_, name, value, notifyChange = false) } inject LilaCookie.session(name, value)(ctx.req) }
danilovsergey/i-bur
app/controllers/Pref.scala
Scala
mit
1,646
package org.randi3.edc.model.openClinica import org.randi3.model.criterion.Criterion import scala.collection.mutable.ListBuffer import org.randi3.model.criterion.constraint.Constraint import org.randi3.model.{Entity, Trial} case class TrialOC(val id: Int = Int.MinValue, val version: Int = 0, val identifier: String, val oid: String, val name: String, val description: String, val metaDataVersionOID: String = "", val events: List[EventOC] = List(), val trial: Option[Trial], connection: ConnectionOC, val treatmentItem: Option[(EventOC, FormOC, ItemGroupOC, ItemOC)], val sites: Map[String, String] = Map()) extends Entity { def getAllCriteria(): List[Criterion[Any, Constraint[Any]]] = { val result = new ListBuffer[Criterion[Any, Constraint[Any]]] //TODO refactor loops for (event <- events) { for (forms <- event.forms) { for (itemGroup <- forms.items) { for (item <- itemGroup.items) { if (item.criterion != null) result += item.criterion } } } } result.toList } def getMappedElementsFromCriteria(criterion: Criterion[Any, Constraint[Any]]): Option[(EventOC, FormOC, ItemGroupOC, ItemOC)] = { val criteriaOid = criterion.name for (event <- events) { for (form <- event.forms) { for (itemGroup <- form.items) { for (item <- itemGroup.items) { if (criteriaOid == item.oid) { return Some((event, form, itemGroup, item)) } } } } } None } }
dschrimpf/randi3-edc
src/main/scala/org/randi3/edc/model/openClinica/TrialOC.scala
Scala
gpl-3.0
1,553
// Copyright 2017 EPFL DATA Lab (data.epfl.ch) // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package squid import utils._ /** This contains the virtualized definitions of core Scala constructs such as if-then-else's, loops and variables. */ package object lib { @transparent // Note: `IfThenElse` is NOT @transparencyPropagating as it executes the closures it is passed def IfThenElse[A](cond: Boolean, thn: => A, els: => A): A = if (cond) thn else els @transparent def While(cond: => Boolean, loop: => Unit): Unit = while (cond) loop @transparencyPropagating def Imperative[A](effects: Any*)(result: A): A = result @inline @transparent def And(lhs: Boolean, rhs: => Boolean) = lhs && rhs @inline @transparent def Or(lhs: Boolean, rhs: => Boolean) = lhs || rhs final class ThunkParam private[lib]() private val ThunkParam = new ThunkParam def ThunkArg: ThunkParam = ThunkParam abstract class MutVar[A] { def := (that: A) : Unit def ! : A def get = this.! override def equals(that: Any) = that |>? { case v: MutVar[_] => this.! == v.! } Else false override def toString = s"MutVar(${this!})" } object MutVar { def apply[A](init: A): MutVar[A] = new MutVar[A] { private[this] var cur = init def := (that: A) = cur = that def ! = cur } } /** Used in ReinterpreterToScala when a local variable "escapes", to preserve the validity and semantics of the program. */ private class MutVarProxy[A](getter: => A, setter: A => Unit) extends MutVar[A] { def := (that: A) = setter(that) def ! = getter } @transparencyPropagating def MutVarProxy[A](get: => A, set: A => Unit): MutVar[A] = // upcasted to MutVar so code uses the same symbol... new MutVarProxy(get, set) // More confusing than useful, especially since it seems to be automatically imported along with Var: //implicit def readVar[A](v: Var[A]): A = v! @transparencyPropagating def uncurried0[b](f: => b): () => b = () => f @transparencyPropagating def uncurried1[a, b](f: a => b): a => b = // Not actually used, just here for syntactic completeness ^_^ (x1) => f(x1) @transparencyPropagating def uncurried2[a1, a2, b](f: a1 => a2 => b): (a1, a2) => b = (x1, x2) => f(x1)(x2) @transparencyPropagating def uncurried3[a1, a2, a3, b](f: a1 => a2 => a3 => b): (a1, a2, a3) => b = (x1, x2, x3) => f(x1)(x2)(x3) @transparencyPropagating def uncurried4[a1, a2, a3, a4, b](f: a1 => a2 => a3 => a4 => b): (a1, a2, a3, a4) => b = (x1, x2, x3, x4) => f(x1)(x2)(x3)(x4) @transparencyPropagating def uncurried5[a1, a2, a3, a4, a5, b](f: a1 => a2 => a3 => a4 => a5 => b): (a1, a2, a3, a4, a5) => b = (x1, x2, x3, x4, x5) => f(x1)(x2)(x3)(x4)(x5) @transparent def nullValue[T] = null.asInstanceOf[T] /** Communicates the intention that this null value is never checked; mainly used to initialize variables with a value * that is never supposed to be accessed. */ @transparent def uncheckedNullValue[T] = null.asInstanceOf[T] /** A dummy function to be used by internal compiler passes to hold temporary code; mainly used to avoid hygiene * problems arising from recursive extrusion (until hygienic context polymorphism is implemented). */ @transparencyPropagating def placeHolder[T](id: String): T = throw new AssertionError(s"Tried to execute `$id` placeholder, which was not supposed to be compiled into final program.") import scala.annotation.{StaticAnnotation, compileTimeOnly} class ExtractedBinder extends StaticAnnotation class Implicit extends StaticAnnotation }
epfldata/squid
core/src/main/scala/squid/lib/package.scala
Scala
apache-2.0
4,187
package spatial.codegen.scalagen import argon.core._ import argon.codegen.scalagen.ScalaCodegen import spatial.aliases._ import spatial.nodes._ trait ScalaGenBits extends ScalaCodegen { dependencies ::= FileDep("scalagen", "Bool.scala") dependencies ::= FileDep("scalagen", "FixedPoint.scala") dependencies ::= FileDep("scalagen", "FixedPointRange.scala") dependencies ::= FileDep("scalagen", "FloatPoint.scala") dependencies ::= FileDep("scalagen", "DataImplicits.scala") dependencies ::= FileDep("scalagen", "Number.scala") def invalid(tp: Type[_]): String = tp match { case _ => throw new Exception(u"Don't know how to generate invalid for type $tp") } override def emitFileHeader() = { emit(src"import DataImplicits._") super.emitFileHeader() } }
stanford-ppl/spatial-lang
spatial/core/src/spatial/codegen/scalagen/ScalaGenBits.scala
Scala
mit
787
package controllers import play.api._ import play.api.mvc._ import play.api.libs._ import play.api.libs.iteratee._ import Play.current import play.api.libs.concurrent.Akka import akka.actor.Props import utils.{ CleanUpActor, CleanUp } import java.text.DecimalFormat import java.io.File import models.Dump object Admin extends Controller { def index = Action { val dmpPath = Play.current.configuration.getString("dmpster.dmp.path").getOrElse("dmps") val filePath = new File(dmpPath) val totalSpace = filePath.getTotalSpace val freeSpace = filePath.getFreeSpace val referencedFiles = Dump.all.map(_.pathInStorageDirectory) def getActualFiles(filePath: File): List[File] = { val all = filePath.listFiles().toList val files = all.filterNot(_.isDirectory) val dirs = all.filter(_.isDirectory) val subfiles = dirs.flatMap(getActualFiles(_)) files ++ subfiles } val referencedFilesAbsolute = referencedFiles.map(f => new File(filePath, f)).map(_.getPath) val danglingFiles = getActualFiles(filePath).map(_.getPath).filterNot { f => referencedFilesAbsolute.contains(f) } Ok(views.html.admin(totalSpace, freeSpace, formatFileSize(totalSpace), formatFileSize(freeSpace), danglingFiles)) } def cleanUpNow = Action { Logger.info("clean up requested") val actor = Akka.system.actorSelection("/user/cleanUpActor") actor ! CleanUp Redirect(routes.Admin.index) } private def formatFileSize(size: Long) = { if (size <= 0) "0" else { val units = List("B", "KB", "MB", "GB", "TB") val digitGroups = (Math.log10(size) / Math.log10(1024)).toInt s"${new DecimalFormat("#,##0.#").format(size / Math.pow(1024, digitGroups))} ${units(digitGroups)}" } } }
codingphil/dmpster
app/controllers/Admin.scala
Scala
mit
1,823
package model class Duck() { var name: String = "" def getName: String = name def setName(n: String) { name = n } def quack: String = "quack!!" }
lsgro/cliwe
cliwe-sample/app/model/Duck.scala
Scala
mit
163
package domala.tests.entity import domala._ import domala.jdbc.{Config, Result} import domala.tests.TestConfig import org.scalatest.{BeforeAndAfter, FunSuite} class ColumnTestSuite extends FunSuite with BeforeAndAfter { implicit val config: Config = TestConfig val dao: ColumnTestDao = ColumnTestDao.impl before { Required { dao.create() } } after { Required { dao.drop() } } test("insert & update") { Required { dao.insert(ColumnTest(Some(1), foo = Some("aaa"), bar = Some("bbb"), baz = Some("ccc"))) assert(dao.selectAll == Seq( ColumnTest(Some(1), Some("aaa"), None, Some("ccc")) )) dao.update(ColumnTest(Some(1), foo = Some("ddd"), bar = Some("eee"), baz = Some("fff"))) assert(dao.selectAll == Seq( ColumnTest(Some(1), Some("ddd"), Some("eee"), Some("ccc")) )) } } } @Entity case class ColumnTest ( @domala.Id id: Option[Int], @Column(name = "fooo", quote = true) foo: Option[String], @Column(insertable = false) bar: Option[String], @Column(updatable = false) baz: Option[String] ) @Dao trait ColumnTestDao { @Script(sql = """ create table column_test( id long not null identity primary key, "fooo" varchar(20), bar varchar(20), baz varchar(20) ); """) def create() @Script(sql = """ drop table column_test; """) def drop() @Select(sql=""" select * from column_test """) def selectAll: Seq[ColumnTest] @Insert def insert(entity: ColumnTest): Result[ColumnTest] @Update def update(entity: ColumnTest): Result[ColumnTest] }
bakenezumi/domala
paradise/src/test/scala/domala/tests/entity/ColumnTestSuite.scala
Scala
apache-2.0
1,603
package edu.berkeley.cs.amplab.mlmatrix import scala.collection.mutable.ArrayBuffer import scala.reflect.ClassTag import breeze.linalg._ import org.apache.spark.rdd.RDD import org.apache.spark.rdd.PartitionPruningRDD import org.apache.spark.SparkContext import org.apache.spark.SparkContext._ import org.apache.spark.SparkException case class BlockPartition( blockIdRow: Int, blockIdCol: Int, mat: DenseMatrix[Double]) extends Serializable // Information about BlockPartitionedMatrix maintained on the driver case class BlockPartitionInfo( partitionId: Int, blockIdRow: Int, blockIdCol: Int, startRow: Long, numRows: Int, startCol: Long, numCols: Int) extends Serializable class BlockPartitionedMatrix( val numRowBlocks: Int, val numColBlocks: Int, val rdd: RDD[BlockPartition]) extends DistributedMatrix with Logging { @transient var blockInfo_ : Map[(Int, Int), BlockPartitionInfo] = null override def getDim = { val bi = getBlockInfo val xDim = bi.map { x => (x._1._1, x._2.numRows.toLong) }.groupBy(x => x._1).values.map { x => x.head._2.toLong }.reduceLeft { _ + _ } val yDim = bi.map { x => (x._1._2, x._2.numCols.toLong) }.groupBy(x => x._1).values.map { x => x.head._2.toLong }.reduceLeft { _ + _ } (xDim, yDim) } private def calculateBlockInfo() { // TODO: Part of this is repeated in the fromArray code. See if we can avoid this // duplication. val blockStartRowColsParts = rdd.mapPartitionsWithIndex { case (partId, iter) => val partDimIter = iter.map { part => val dims = new ArrayBuffer[(Int, Int, Long, Long)] val partIdMap = (part.blockIdRow, part.blockIdCol) -> partId dims += ((part.blockIdRow, part.blockIdCol, part.mat.rows.toLong, part.mat.cols.toLong)) (partIdMap, dims) } partDimIter }.collect() val blockStartRowCols = blockStartRowColsParts.flatMap(x => x._2).sortBy(x => (x._1, x._2)) val rowReps = blockStartRowCols.groupBy(x => x._1).values.map { x => x.head }.toSeq.sortBy{ x => (x._1, x._2) } val colReps = blockStartRowCols.groupBy(x => x._2).values.map { x => x.head }.toSeq.sortBy{ x => (x._1, x._2) } // Calculate startRows val cumulativeRowSum = rowReps.scanLeft(0L) { case(x1, x2) => x1 + x2._3 }.dropRight(1) val rowStarts = rowReps.zip(cumulativeRowSum).map { x => (x._1._1, (x._1._3, x._2)) }.toMap val cumulativeColSum = colReps.scanLeft(0L) { case(x1, x2) => x1 + x2._4 }.dropRight(1) val colStarts = colReps.zip(cumulativeColSum).map { x => (x._1._2, (x._1._4, x._2)) }.toMap val partitionIdsMap = blockStartRowColsParts.map(x => x._1).toMap blockInfo_ = rowStarts.keys.flatMap { r => colStarts.keys.filter { c => partitionIdsMap.contains(r, c) }.map { c => ((r,c), BlockPartitionInfo(partitionIdsMap((r,c)), r, c, rowStarts(r)._2, rowStarts(r)._1.toInt, colStarts(c)._2, colStarts(c)._1.toInt)) } }.toMap } def getBlockInfo = { if (blockInfo_ == null) { calculateBlockInfo() } blockInfo_ } override def +(other: Double) = { new BlockPartitionedMatrix(numRowBlocks, numColBlocks, rdd.map { lm => BlockPartition(lm.blockIdRow, lm.blockIdCol, lm.mat :+ other) }) } override def *(other: Double) = { new BlockPartitionedMatrix(numRowBlocks, numColBlocks, rdd.map { lm => BlockPartition(lm.blockIdRow, lm.blockIdCol, lm.mat :* other) }) } override def mapElements(f: Double => Double) = { new BlockPartitionedMatrix(numRowBlocks, numColBlocks, rdd.map { lm => BlockPartition(lm.blockIdRow, lm.blockIdCol, new DenseMatrix[Double](lm.mat.rows, lm.mat.cols, lm.mat.data.map(f))) }) } override def aggregateElements[U: ClassTag](zeroValue: U)(seqOp: (U, Double) => U, combOp: (U, U) => U): U = { rdd.map { part => part.mat.data.aggregate(zeroValue)(seqOp, combOp) }.reduce(combOp) } override def reduceRowElements(f: (Double, Double) => Double): DistributedMatrix = { val blockReduced = rdd.map { block => val rows = block.mat.data.grouped(block.mat.cols) val reduced = rows.map(_.reduce(f)).toArray BlockPartition(block.blockIdRow, block.blockIdCol, new DenseMatrix[Double](block.mat.rows, 1, reduced) ) } def rowWiseReduce(block1: Array[Double], block2: Array[Double]): Array[Double] = { block1.zip(block2).map { case (d1, d2) => f(d1, d2) } } val reduced = blockReduced .map { block => (block.blockIdRow, block.mat.data) } .groupByKey(numRowBlocks) .map { case (blockRow, blocks) => val reducedBlocks = blocks.reduce(rowWiseReduce).toArray BlockPartition(blockRow, 0, new DenseMatrix[Double](reducedBlocks.length, 1, reducedBlocks)) } new BlockPartitionedMatrix(numRowBlocks, 1, reduced) } override def reduceColElements(f: (Double, Double) => Double): DistributedMatrix = { val blockReduced = rdd.map { block => val cols = block.mat.data.grouped(block.mat.cols).toArray.transpose val reduced = cols.map(_.reduce(f)) BlockPartition(block.blockIdRow, block.blockIdCol, new DenseMatrix[Double](1, block.mat.cols, reduced) ) } def colWiseReduce(block1: Array[Double], block2: Array[Double]): Array[Double] = { block1.zip(block2).map { case (d1, d2) => f(d1, d2) } } val reduced = blockReduced .map { block => (block.blockIdCol, block.mat.data) } .groupByKey(numColBlocks) .map { case (blockCol, blocks) => val reducedBlocks = blocks.reduce(colWiseReduce).toArray BlockPartition(0, blockCol, new DenseMatrix[Double](1, reducedBlocks.length, reducedBlocks)) } new BlockPartitionedMatrix(1, numColBlocks, reduced) } override def +(other: DistributedMatrix) = { other match { // We really need a function to check if two matrices are partitioned similarly case otherBlocked: BlockPartitionedMatrix => if (getBlockInfo == otherBlocked.getBlockInfo) { // TODO: Optimize if the blockIds are in the same order. val blockRDD = rdd.map(x => ((x.blockIdRow, x.blockIdCol), x.mat)).join { otherBlocked.rdd.map(y => ((y.blockIdRow, y.blockIdCol), y.mat)) }.map { x => new BlockPartition(x._1._1, x._1._2, x._2._1 + x._2._2) } new BlockPartitionedMatrix(numRowBlocks, numColBlocks, blockRDD) } else { throw new SparkException( "Cannot add matrices with unequal partitions") } case _ => throw new IllegalArgumentException("Cannot add matrices of different types") } } def normFro() = { math.sqrt(rdd.map{ lm => lm.mat.data.map(x => math.pow(x, 2)).sum }.reduce(_ + _)) } override def apply(rowRange: Range, colRange: ::.type) = { val blockInfos = getBlockInfo val blockInfosBcast = rdd.context.broadcast(blockInfos) val blocksWithRows = blockInfos.filter { bi => rowRange.filter(i => i >= bi._2.startRow && i < bi._2.startRow + bi._2.numRows).nonEmpty }.values.toSeq.sortBy(x => x.blockIdRow) val blocksFilteredIds = blocksWithRows.map(bi => bi.partitionId).toSet val prunedRdd = PartitionPruningRDD.create(rdd, part => blocksFilteredIds.contains(part)) // Renumber the blockIdRows from 0 to number of row blocks val newBlockIdMap = blocksWithRows.map(x => x.blockIdRow).distinct.zipWithIndex.toMap val newBlockIdBcast = rdd.context.broadcast(newBlockIdMap) val blockRDD = prunedRdd.filter { part => newBlockIdBcast.value.contains(part.blockIdRow) }.map { part => // Get a new blockIdRow, keep same blockIdCol and update the matrix val newBlockIdRow = newBlockIdBcast.value(part.blockIdRow) val blockInfo = blockInfosBcast.value((part.blockIdRow, part.blockIdCol)) val validIdx = rowRange.filter { i => i >= blockInfo.startRow && i < blockInfo.startRow + blockInfo.numRows } val localIdx = validIdx.map(x => x - blockInfo.startRow).map(x => x.toInt) val newMat = part.mat(localIdx.head to localIdx.last, ::) BlockPartition(newBlockIdRow, blockInfo.blockIdCol, newMat) } new BlockPartitionedMatrix(newBlockIdMap.size, numColBlocks, blockRDD) } override def apply(rowRange: ::.type, colRange: Range) = { val blockInfos = getBlockInfo val blockInfosBcast = rdd.context.broadcast(blockInfos) val blocksWithCols = blockInfos.filter { bi => colRange.filter(i => i >= bi._2.startCol && i < bi._2.startCol + bi._2.numCols).nonEmpty }.values.toSeq.sortBy(x => x.blockIdCol) val blocksFilteredIds = blocksWithCols.map(bi => bi.partitionId).toSet val prunedRdd = PartitionPruningRDD.create(rdd, part => blocksFilteredIds.contains(part)) // Renumber the blockIdRows from 0 to number of row blocks val newBlockIdMap = blocksWithCols.map(x => x.blockIdCol).distinct.zipWithIndex.toMap val newBlockIdBcast = rdd.context.broadcast(newBlockIdMap) val blockRDD = prunedRdd.filter { part => newBlockIdBcast.value.contains(part.blockIdCol) }.map { part => // Get a new blockIdRow, keep same blockIdCol and update the matrix val newBlockIdCol = newBlockIdBcast.value(part.blockIdCol) val blockInfo = blockInfosBcast.value((part.blockIdRow, part.blockIdCol)) val validIdx = colRange.filter { i => i >= blockInfo.startCol && i < blockInfo.startCol + blockInfo.numCols } val localIdx = validIdx.map(x => x - blockInfo.startCol).map(x => x.toInt) val newMat = part.mat(::, localIdx.head to localIdx.last) BlockPartition(blockInfo.blockIdRow, newBlockIdCol, newMat) } new BlockPartitionedMatrix(numRowBlocks, newBlockIdMap.size, blockRDD) } override def apply(rowRange: Range, colRange: Range) = ??? override def cache() = { rdd.cache() this } // TODO: This is terribly inefficient if we have more partitions. // Make this more efficient override def collect(): DenseMatrix[Double] = { val parts = rdd.map(x => ((x.blockIdRow, x.blockIdCol), x.mat)).collect() val dims = getDim val mat = new DenseMatrix[Double](dims._1.toInt, dims._2.toInt) val blockInfos = getBlockInfo parts.foreach { part => val blockInfo = blockInfos((part._1._1, part._1._2)) // Figure out where this part should be put val rowRange = blockInfo.startRow.toInt until (blockInfo.startRow + blockInfo.numRows).toInt val colRange = blockInfo.startCol.toInt until (blockInfo.startCol + blockInfo.numCols).toInt mat(rowRange, colRange) := part._2 } mat } def getBlockRange( startRowBlock: Int, endRowBlock: Int, startColBlock: Int, endColBlock: Int) = { val blockInfos = getBlockInfo val blocksFiltered = blockInfos.filter { bi => bi._2.blockIdRow >= startRowBlock && bi._2.blockIdRow < endRowBlock && bi._2.blockIdCol >= startColBlock && bi._2.blockIdCol < endColBlock } val blocksFilteredIds = blocksFiltered.values.map(bi => bi.partitionId).toSet val newBlockIds = blocksFiltered.mapValues { bi => (bi.blockIdRow - startRowBlock, bi.blockIdCol - startColBlock) }.map(identity) val newBlockIdBcast = rdd.context.broadcast(newBlockIds) val prunedRdd = PartitionPruningRDD.create(rdd, part => blocksFilteredIds.contains(part)) val blockRDD = prunedRdd.filter { part => newBlockIdBcast.value.contains((part.blockIdRow, part.blockIdCol)) }.map { part => // Get a new blockIdRow, blockIdCol val newBlockIds = newBlockIdBcast.value((part.blockIdRow, part.blockIdCol)) BlockPartition(newBlockIds._1, newBlockIds._2, part.mat) } new BlockPartitionedMatrix(endRowBlock - startRowBlock, endColBlock - startColBlock, blockRDD) } // Get a single column block as a row partitioned matrix def getColBlock(colBlock: Int) : RowPartitionedMatrix = { val blockRDD = getBlockRange(0, numRowBlocks, colBlock, colBlock + 1) RowPartitionedMatrix.fromMatrix(blockRDD.rdd.map(_.mat)) } } object BlockPartitionedMatrix { // def fromColumnBlocks(colBlocks: Seq[RowPartitionedMatrix]) // def fromRowBlocks(rowBlocks: Seq[ColumnPartitionedMatrix]) // Assumes each row is represented as an array of Doubles def fromArray( matrixRDD: RDD[Array[Double]], numRowsPerBlock: Int, numColsPerBlock: Int): BlockPartitionedMatrix = { // Collect how many rows are there in each partition of RDD val perPartDims = matrixRDD.mapPartitionsWithIndex { case (part, iter) => var numRows = 0L var numCols = 0 while (iter.hasNext) { numRows += 1L numCols = iter.next().length } Iterator.single( (part, numRows, numCols) ) }.collect().sortBy(x => x._1) val cumulativeSum = perPartDims.scanLeft(0L){ case(x1, x2) => x1 + x2._2 } val numRows = cumulativeSum.takeRight(1).head val rowStarts = perPartDims.zip(cumulativeSum.dropRight(1)).map { x => (x._1._1, (x._2, x._1._3)) }.toMap val rowStartsBroadcast = matrixRDD.context.broadcast(rowStarts) val numColBlocks = math.ceil( rowStarts.head._2._2.toFloat / numColsPerBlock.toFloat).toInt val numRowBlocks = math.ceil(numRows / numRowsPerBlock).toInt val blockRDD = matrixRDD.mapPartitionsWithIndex { case (part, iter) => val startRow = rowStartsBroadcast.value(part)._1 val ret = new ArrayBuffer[(Int, Array[Double])] var rowNo = 0L while (iter.hasNext) { // For each row val rowBlock = ((rowNo + startRow) / numRowsPerBlock).toInt val arr = iter.next() rowNo += 1 (0 until numColBlocks).foreach { col => ret.append( (rowBlock * numColBlocks + col, arr.slice(col * numColsPerBlock, (col + 1) * numColsPerBlock)) ) } } ret.iterator }.groupByKey(numColBlocks * numRowBlocks).map { item => val matData = new ArrayBuffer[Double] var numRows = 0 var numCols = 0 val iter = item._2.iterator while (iter.hasNext) { val arr = iter.next() matData ++= arr numRows += 1 numCols = arr.length } new BlockPartition(item._1 / numColBlocks, item._1 % numColBlocks, new DenseMatrix[Double](numCols, numRows, matData.toArray).t) } new BlockPartitionedMatrix(numRowBlocks, numColBlocks, blockRDD) } }
amplab/ml-matrix
src/main/scala/edu/berkeley/cs/amplab/mlmatrix/BlockPartitionedMatrix.scala
Scala
apache-2.0
14,694
package io.buoyant.linkerd package telemeter import io.buoyant.namer.{NamerInitializer, TestNamerInitializer} import io.buoyant.telemetry.{TelemeterInitializer, TestTelemeterInitializer} import io.buoyant.test.FunSuite class UsageDataTelemeterTest extends FunSuite { def initializer( protos: Seq[ProtocolInitializer] = Seq(TestProtocol.Plain, TestProtocol.Fancy), namers: Seq[NamerInitializer] = Seq(TestNamerInitializer), telemeters: Seq[TelemeterInitializer] = Seq(new TestTelemeterInitializer) ) = Linker.Initializers(protocol = protos, namer = namers, telemetry = telemeters) def parse(yaml: String) = initializer().load(yaml) test("usage telemeter is enabled by default") { val yaml = """|routers: |- protocol: plain | servers: | - port: 1 |""".stripMargin val linker = parse(yaml) assert(linker.telemeters.exists(_.isInstanceOf[UsageDataTelemeter])) } test("usage telemeter can be configured") { val yaml = """|routers: |- protocol: plain | servers: | - port: 1 |usage: | orgId: my-org |""".stripMargin val linker = parse(yaml) val orgId = linker.telemeters.collectFirst { case t: UsageDataTelemeter => t.orgId }.flatten assert(orgId == Some("my-org")) } test("usage telemeter can be disabled") { val yaml = """|routers: |- protocol: plain | servers: | - port: 1 |usage: | enabled: false |""".stripMargin val linker = parse(yaml) assert(!linker.telemeters.exists(_.isInstanceOf[UsageDataTelemeter])) } }
denverwilliams/linkerd
linkerd/core/src/test/scala/io/buoyant/linkerd/telemeter/UsageDataTelemeterTest.scala
Scala
apache-2.0
1,671
package functions /** * Created by Yuichiro on 2015/01/12. */ import scalaz._ import Scalaz._ object PointFree { def main(args: Array[String]) { tupleAsTmp() } def tupleAsTmp(): Unit = { val t = ("a", 1) val f = (t: (String, Int)) => if (t._1.size == t._2) (t._1 + t._2.toString, t._2) else t val tmp = t |> f val result = tmp == t println(result) val makeTmp = (t: (String, Int)) => (t, t) val i = (t: (String, Int)) => t val comp = (tt: ((String, Int), (String, Int))) => (tt._1 == tt._2, tt._2) val rmTmp = (t: (Boolean, (String, Int))) => t._1 val result2 = makeTmp >>> (f *** i) >>> comp >>> rmTmp t |> result2 >>> println } }
YuichiroSato/ScratchScalaz
src/functions/PointFree.scala
Scala
apache-2.0
699
package maker import maker.utils.FileUtils._ import java.io.File import maker.utils.Int import org.eclipse.aether.util.artifact.JavaScopes import maker.project.DependencyPimps trait MakerConfig extends DependencyPimps { private def maybeProperty(name: String): Option[String] = Option(System.getProperty(name)) private def maybeEnvVar(name: String): Option[String] = Option(System.getenv(name)) def runningInExecMode = { maybeProperty("maker.exec-mode").map(_.toBoolean).getOrElse(false) } // if debug port set then enable remote degubbing def remoteDebuggingOption = { maybeProperty("maker.debug.port") match { case Some(port) => Seq(s"-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=$port") case None => Nil } } protected def taskThreadPoolSize: Option[Int] = None def unitTestHeapSize : Int = { maybeProperty("maker.unit-test-heap-size") match { case Some(size) => size.toInt case None => val runtimeMemory = (Runtime.getRuntime.maxMemory / 1024 / 1024).toInt (runtimeMemory / 2) min 1024 } } def javaHome = { maybeEnvVar("JAVA_HOME") orElse maybeEnvVar("JDK_HOME") match { case Some(dir) => file(dir) case None => throw new IllegalStateException("JAVA_HOME or JDK_HOME must be specified") } } def javaExecutable = { file(javaHome, "bin", "java") } def httpResolvers = Seq( ("maven", "http://repo1.maven.org/maven2/"), ("typesafe", "http://repo.typesafe.com/typesafe/releases/"), ("sonatype-snapshots", "https://oss.sonatype.org/content/repositories/snapshots/"), ("sonatype-releases", "https://oss.sonatype.org/content/repositories/releases/") ) def gpgPassPhrase = maybeEnvVar("MAKER_GPG_PASS_PHRASE") match { case Some(phrase) => phrase case None => throw new Exception(s"MAKER_GPG_PASS_PHRASE variable not set") } def sonatypeCredentials = maybeEnvVar("MAKER_SONATYPE_CREDENTIALS") match { case Some(cred) => cred.split(":") case None => throw new Exception(s"MAKER_SONATYPE_CREDENTIALS variable not set") } def scalatestOutputParameters : String = "-oHL" /* Methods that are overriden by maker unit tests projects/modules */ def reportBuildResult : Boolean = true def systemExitOnExecModeFailures : Boolean = true def updateIncludesSourceJars : Boolean = true }
cage433/maker
maker/src/maker/MakerConfig.scala
Scala
bsd-2-clause
2,397
package com.github.cthulhu314.scalaba.captcha class NopCaptcha extends Captcha { def generate() = { CaptchaImage("",Array.empty) } def validate(id : String,result : String) = { false } }
cthulhu314/scalaba
src/main/scala/com/github/cthulhu314/scalaba/captcha/NopCaptcha.scala
Scala
mit
197
package ch.octo.cffpoc.gtfs /** * Created by alex on 03/09/16. */ class TripCollection(trips: Map[TripId, Trip]) { lazy val tripIds: Set[TripId] = trips.keySet def size = trips.size def apply(tripId: TripId) = trips(tripId) def toList = trips.values.toList def map[T](f: (Trip) => T) = trips.values.map(f) def filter(f: (Trip) => Boolean): TripCollection = TripCollection(trips.filter((p) => f(p._2))) } object TripCollection { def apply(trips: Map[TripId, Trip]) = new TripCollection(trips) }
alexmasselot/gtfs-simulation-play
src/main/scala/ch/octo/cffpoc/gtfs/TripCollection.scala
Scala
apache-2.0
514
/* __ *\\ ** ________ ___ / / ___ __ ____ Scala.js Test Suite ** ** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL ** ** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ ** ** /____/\\___/_/ |_/____/_/ | |__/ /____/ ** ** |/____/ ** \\* */ package org.scalajs.testsuite.javalib.io import scala.language.implicitConversions import java.io._ import org.junit.Test import org.junit.Assert._ import org.scalajs.testsuite.utils.Platform.executingInJVM class PrintWriterTest { private def newPrintWriter( autoFlush: Boolean = false): (MockPrintWriter, MockStringWriter) = { val sw = new MockStringWriter val pw = new MockPrintWriter(sw, autoFlush) (pw, sw) } @Test def flush(): Unit = { val (pw, sw) = newPrintWriter() pw.print("hello") assertFalse(sw.flushed) pw.flush() assertTrue(sw.flushed) } @Test def close(): Unit = { val (pw, sw) = newPrintWriter() pw.write("begin") assertFalse(sw.flushed) pw.close() if (!executingInJVM) assertTrue(sw.flushed) assertTrue(sw.closed) assertFalse(pw.checkError()) // can double-close without error pw.close() assertFalse(pw.checkError()) pw.clearError() // when closed, other operations cause error def expectCausesError(body: => Unit): Unit = { body assertTrue(pw.checkError()) pw.clearError() } expectCausesError(pw.print("never printed")) expectCausesError(pw.write(Array('a', 'b'))) expectCausesError(pw.append("hello", 1, 3)) expectCausesError(pw.flush()) // at the end of it all, sw is still what it was when it was closed assertEquals("begin", sw.toString()) } @Test def write_does_not_flush_even_with_new_line(): Unit = { def test(body: PrintWriter => Unit, expected: String): Unit = { val (pw, sw) = newPrintWriter(autoFlush = true) body(pw) assertFalse(sw.flushed) assertFalse(pw.checkError()) assertEquals(expected, sw.toString()) } test(_.write('\\n'), "\\n") test(_.write("hello\\n"), "hello\\n") test(_.write("hello\\nworld", 3, 3), "lo\\n") test(_.write(Array('A', '\\n')), "A\\n") test(_.write(Array('A', 'B', '\\n', 'C'), 1, 2), "B\\n") } @Test def print_does_not_flush_even_with_new_line(): Unit = { def test(body: PrintWriter => Unit, expected: String): Unit = { val (pw, sw) = newPrintWriter(autoFlush = true) body(pw) assertFalse(sw.flushed) assertFalse(pw.checkError()) assertEquals(expected, sw.toString()) } test(_.print(true), "true") test(_.print('Z'), "Z") test(_.print('\\n'), "\\n") test(_.print(5), "5") test(_.print(1234567891011L), "1234567891011") test(_.print(1.5f), "1.5") test(_.print(Math.PI), "3.141592653589793") test(_.print(Array('A', '\\n')), "A\\n") test(_.print("hello\\n"), "hello\\n") test(_.print(null: String), "null") test(_.print((1, 2)), "(1,2)") test(_.print(null: AnyRef), "null") } @Test def println_forwards_and_flushes_when_autoFlush_is_true(): Unit = { testPrintlnForward(_.println(), "\\n", autoFlush = true) testPrintlnForward(_.println(true), "true\\n", autoFlush = true) testPrintlnForward(_.println('Z'), "Z\\n", autoFlush = true) testPrintlnForward(_.println('\\n'), "\\n\\n", autoFlush = true) testPrintlnForward(_.println(5), "5\\n", autoFlush = true) testPrintlnForward(_.println(1234567891011L), "1234567891011\\n", autoFlush = true) testPrintlnForward(_.println(1.5f), "1.5\\n", autoFlush = true) testPrintlnForward(_.println(Math.PI), "3.141592653589793\\n", autoFlush = true) testPrintlnForward(_.println(Array('A', '\\n')), "A\\n\\n", autoFlush = true) testPrintlnForward(_.println("hello\\n"), "hello\\n\\n", autoFlush = true) testPrintlnForward(_.println(null: String), "null\\n", autoFlush = true) testPrintlnForward(_.println((1, 2)), "(1,2)\\n", autoFlush = true) testPrintlnForward(_.println(null: AnyRef), "null\\n", autoFlush = true) } @Test def println_and_forwards_do_not_flush_when_autoFlush_is_false(): Unit = { testPrintlnForward(_.println(), "\\n", autoFlush = false) testPrintlnForward(_.println(true), "true\\n", autoFlush = false) testPrintlnForward(_.println('Z'), "Z\\n", autoFlush = false) testPrintlnForward(_.println('\\n'), "\\n\\n", autoFlush = false) testPrintlnForward(_.println(5), "5\\n", autoFlush = false) testPrintlnForward(_.println(1234567891011L), "1234567891011\\n", autoFlush = false) testPrintlnForward(_.println(1.5f), "1.5\\n", autoFlush = false) testPrintlnForward(_.println(Math.PI), "3.141592653589793\\n", autoFlush = false) testPrintlnForward(_.println(Array('A', '\\n')), "A\\n\\n", autoFlush = false) testPrintlnForward(_.println("hello\\n"), "hello\\n\\n", autoFlush = false) testPrintlnForward(_.println(null: String), "null\\n", autoFlush = false) testPrintlnForward(_.println((1, 2)), "(1,2)\\n", autoFlush = false) testPrintlnForward(_.println(null: AnyRef), "null\\n", autoFlush = false) } private def testPrintlnForward(body: PrintWriter => Unit, expected: String, autoFlush: Boolean): Unit = { val (pw, sw) = newPrintWriter(autoFlush = autoFlush) body(pw) if (autoFlush) assertTrue(sw.flushed) else assertFalse(sw.flushed) assertFalse(pw.checkError()) assertEquals(expected, sw.toString()) } @Test def printf_and_format_which_flushes_when_autoFlush_is_true(): Unit = { testPrintfFormat(_.printf("%04d", Int.box(5)), "0005", autoFlush = true) testPrintfFormat(_.format("%.5f", Double.box(Math.PI)), "3.14159", autoFlush = true) } @Test def printf_and_format_do_not_flush_when_autoFlush_is_false(): Unit = { testPrintfFormat(_.printf("%04d", Int.box(5)), "0005", autoFlush = false) testPrintfFormat(_.format("%.5f", Double.box(Math.PI)), "3.14159", autoFlush = false) } private def testPrintfFormat(body: PrintWriter => Unit, expected: String, autoFlush: Boolean): Unit = { val (pw, sw) = newPrintWriter(autoFlush = autoFlush) body(pw) if (autoFlush) assertTrue(sw.flushed) else assertFalse(sw.flushed) assertFalse(pw.checkError()) assertEquals(expected, sw.toString()) } @Test def append_does_not_flush_even_with_new_line(): Unit = { def test(body: PrintWriter => Unit, expected: String): Unit = { val (pw, sw) = newPrintWriter(autoFlush = true) body(pw) assertFalse(sw.flushed) assertFalse(pw.checkError()) assertEquals(expected, sw.toString()) } test(_.append("hello\\n"), "hello\\n") test(_.append(null: CharSequence), "null") test(_.append("hello\\nworld", 3, 6), "lo\\n") test(_.append(null: CharSequence, 1, 2), "u") test(_.append('A'), "A") test(_.append('\\n'), "\\n") } @Test def traps_all_IOException_and_updates_checkError(): Unit = { def test(body: PrintWriter => Unit): Unit = { val (pw, sw) = newPrintWriter() sw.throwing = true body(pw) assertTrue(pw.checkError()) } test(_.flush()) test(_.close()) test(_.write('Z')) test(_.write("booh")) test(_.write("booh", 1, 1)) test(_.write(Array('A', 'B'))) test(_.write(Array('A', 'B'), 1, 1)) test(_.print(true)) test(_.print('Z')) test(_.print('\\n')) test(_.print(5)) test(_.print(1234567891011L)) test(_.print(1.5f)) test(_.print(Math.PI)) test(_.print(Array('A', '\\n'))) test(_.print("hello\\n")) test(_.print(null: String)) test(_.print((1, 2))) test(_.print(null: AnyRef)) test(_.println()) test(_.println(true)) test(_.println('Z')) test(_.println('\\n')) test(_.println(5)) test(_.println(1234567891011L)) test(_.println(1.5f)) test(_.println(Math.PI)) test(_.println(Array('A', '\\n'))) test(_.println("hello\\n")) test(_.println(null: String)) test(_.println((1, 2))) test(_.println(null: AnyRef)) test(_.append("hello\\n")) test(_.append(null: CharSequence)) test(_.append("hello\\nworld", 3, 6)) test(_.append(null: CharSequence, 1, 2)) test(_.append('A')) test(_.append('\\n')) } /** A PrintWriter that exposes various hooks for testing purposes. */ private class MockPrintWriter(out: Writer, autoFlush: Boolean) extends PrintWriter(out, autoFlush) { def this(out: Writer) = this(out, false) override def clearError(): Unit = super.clearError() } /** A StringWriter that exposes various hooks for testing purposes. */ private class MockStringWriter extends StringWriter { private var _flushed: Boolean = true private var _closed: Boolean = false var throwing: Boolean = false def flushed: Boolean = _flushed def closed: Boolean = _closed private def maybeThrow(): Unit = { if (throwing) throw new IOException("MockStringWriter throws") } private def writeOp[A](op: => A): A = { maybeThrow() _flushed = false op } override def flush(): Unit = { maybeThrow() super.flush() _flushed = true } override def close(): Unit = { maybeThrow() super.close() _closed = true } override def append(c: Char): StringWriter = writeOp(super.append(c)) override def append(csq: CharSequence): StringWriter = writeOp(super.append(csq)) override def append(csq: CharSequence, start: Int, end: Int): StringWriter = writeOp(super.append(csq, start, end)) override def write(c: Int): Unit = writeOp(super.write(c)) override def write(cbuf: Array[Char]): Unit = writeOp(super.write(cbuf)) override def write(cbuf: Array[Char], off: Int, len: Int): Unit = writeOp(super.write(cbuf, off, len)) override def write(str: String): Unit = writeOp(super.write(str)) override def write(str: String, off: Int, len: Int): Unit = writeOp(super.write(str, off, len)) } }
lrytz/scala-js
test-suite/shared/src/test/scala/org/scalajs/testsuite/javalib/io/PrintWriterTest.scala
Scala
bsd-3-clause
10,237
/* * @author Flavio Keller * * Copyright 2014 University of Zurich * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.signalcollect.sna.metrics import java.math.MathContext import scala.BigDecimal import scala.collection.mutable.ArrayBuffer import scala.collection.mutable.SynchronizedBuffer import com.signalcollect.DataGraphVertex import com.signalcollect.ExecutionConfiguration import com.signalcollect.Graph import com.signalcollect.Vertex import com.signalcollect.configuration.ExecutionMode import com.signalcollect.sna.ComputationResults import com.signalcollect.sna.ExecutionResult import com.signalcollect.DefaultEdge /** * Executes the calculation of the local cluster coefficient values of a graph's vertices */ object LocalClusterCoefficient { /** * Function responsible for the execution * @param the parsed graph, instance of {@link com.signalcollect.Graph} * @return {@link com.signalcollect.sna.ExecutionResult} object */ def run(graph: Graph[Any, Any]): ExecutionResult = { val execmode = ExecutionConfiguration(ExecutionMode.Synchronous) val stats = graph.execute(execmode) graph.awaitIdle var vertexArray = new ArrayBuffer[Vertex[Any, _, Any, Any]] with SynchronizedBuffer[Vertex[Any, _, Any, Any]] graph.foreachVertex(v => vertexArray += v) var vertexMap = scala.collection.mutable.Map[Int, LocalClusterCoefficientVertex]() for (vertex <- vertexArray) { vertexMap.put(vertex.id.asInstanceOf[Int], vertex.asInstanceOf[LocalClusterCoefficientVertex]) } graph.shutdown var sumOfLCC = 0.0 var treeMap = new java.util.TreeMap[String, Object]() /* * determining the single local cluster coefficient values and adding them up * in order to determine the average later */ for (d <- vertexMap) { val lcc = gatherNeighbours(d._2, vertexMap.toMap) sumOfLCC += lcc treeMap.put(d._1.toString, BigDecimal(lcc).round(new MathContext(3)).toDouble.asInstanceOf[Object]) } val averageclcoeff = sumOfLCC / vertexMap.toMap.size.toDouble new ExecutionResult(new ComputationResults(BigDecimal(averageclcoeff).round(new MathContext(3)).toDouble, treeMap), vertexArray, stats) } /** * Function which is responsible to gather a vertex' neighbours together * and calculate its local cluster coefficient value out of it */ def gatherNeighbours(vertex: LocalClusterCoefficientVertex, vertexMap: Map[Int, LocalClusterCoefficientVertex]): Double = { var connectedNeighbours = 0.0 var passedNeighbours = scala.collection.mutable.Set[Int]() /* * set that represents the incoming edges and outgoing edges together */ val neighbourSet = vertex.state.keySet.union(vertex.outgoingEdges.keySet.asInstanceOf[Set[Int]]) if (neighbourSet.isEmpty) { 0.0 } else { val nrOfPossibleConnections = if (neighbourSet.size == 1) 1 else (neighbourSet.size * (neighbourSet.size - 1)).toDouble /* * iterating through outgoing edges */ for (outgoingNeighbour <- vertex.outgoingEdges) { val neighbourVertex = vertexMap.get(outgoingNeighbour._2.targetId.asInstanceOf[Int]).get if (!passedNeighbours.contains(outgoingNeighbour._1.asInstanceOf[Int])) { val outgoingneighboursOfneighbour = neighbourVertex.state.filter(p => neighbourSet.contains(p._1)) connectedNeighbours += outgoingneighboursOfneighbour.size } passedNeighbours.add(outgoingNeighbour._1.asInstanceOf[Int]) } /* * iterating through incoming edges */ for (incomingNeighbour <- vertex.state) { val neighbourVertex = vertexMap.get(incomingNeighbour._1).get val neighbourSet = vertex.state.keySet.union(vertex.outgoingEdges.keySet.asInstanceOf[Set[Int]]) if (!passedNeighbours.contains(incomingNeighbour._1)) { val outgoingneighboursOfneighbour = neighbourVertex.state.filter(p => neighbourSet.contains(p._1)) connectedNeighbours += outgoingneighboursOfneighbour.size } passedNeighbours.add(incomingNeighbour._1.asInstanceOf[Int]) } val localClusterCoefficient = connectedNeighbours / nrOfPossibleConnections localClusterCoefficient } } } /** * Represents a vertex of a Local Cluster Coefficient graph, extends {@link com.signalcollect.DataGraphVertex} * @param the vertex' id */ class LocalClusterCoefficientVertex(id: Any) extends DataGraphVertex(id, Map[Int, Set[Int]]()) { type Signal = Set[Int] type State = Map[Int, Set[Int]] /** * The collect function stores all ids and outgoing edges of the incoming edges' source vertices in a map */ def collect: State = { var neighbours = scala.collection.mutable.Map[Int, Set[Int]]() for (neighbour <- mostRecentSignalMap) { neighbours.put(neighbour._1.asInstanceOf[Int], neighbour._2) } neighbours.toMap } } /** * Represents an edge of a Local Cluster Coefficient graph, extends {@link com.signalcollect.DefaultEdge} * @param the traget vertex' id */ class LocalClusterCoefficientEdge(t: Int) extends DefaultEdge(t) { type Source = DataGraphVertex[Any, Any] /** * The signal function transmits all outgoing edges as a set to its target vertex */ def signal = source.outgoingEdges.values.toSet }
fkzrh/signal-collect-sna
src/main/scala/com/signalcollect/sna/metrics/LocalClusterCoefficient.scala
Scala
apache-2.0
5,859
/* * Copyright 2012 Twitter Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.twitter.zipkin.config import com.twitter.zipkin.storage.cassandra.CassandraStorage import com.twitter.zipkin.gen import com.twitter.cassie.codecs.{Utf8Codec, LongCodec} import com.twitter.cassie.{ReadConsistency, WriteConsistency} import com.twitter.logging.Logger trait CassandraStorageConfig extends StorageConfig { val log = Logger.get(getClass.getName) def cassandraConfig: CassandraConfig // this is how many traces we fetch from cassandra in one request var traceFetchBatchSize = 500 var tracesCf : String = "Traces" def apply(): CassandraStorage = { val _storageConfig = this val _keyspace = cassandraConfig.keyspace /** * Row key is the trace id. * Column name is the span identifier. * Value is a Thrift serialized Span. */ val _traces = _keyspace.columnFamily[Long, String, gen.Span](tracesCf, LongCodec, Utf8Codec, cassandraConfig.spanCodec) .consistency(WriteConsistency.One) .consistency(ReadConsistency.One) new CassandraStorage() { val cassandraConfig = _storageConfig.cassandraConfig val storageConfig = _storageConfig keyspace = _keyspace val traces = _traces } } }
rodzyn0688/zipkin
zipkin-server/src/main/scala/com/twitter/zipkin/config/CassandraStorageConfig.scala
Scala
apache-2.0
1,818
package com.artclod.mathml.scalar import com.artclod.mathml._ import com.artclod.mathml.scalar.apply._ import com.artclod.mathml.scalar.concept._ import scala.annotation.tailrec import scala.util._ import scala.xml._ abstract class MathMLElem( prefix: String, label: String, attributes1: MetaData, scope: NamespaceBinding, minimizeEmpty: Boolean, child: Node*) extends Elem(prefix, label, attributes1, scope, minimizeEmpty, child: _*) with MathMLChildren { // LATER it would be nice if this was just called eval but Map[A,B] is Iterable[(A,B)] so the signatures conflict def evalT(boundVariables: (String, Double)*) : Try[Double] = eval(Map(boundVariables:_*)) def eval(boundVariables: Map[String, Double] = Map()): Try[Double] def isZero: Boolean = c.map(_.isZero).getOrElse(false) def isOne: Boolean = c.map(_.isOne).getOrElse(false) private var s_ : MathMLElem = null def s = { if (s_ == null) { s_ = simplifyRecurse(this) } s_ } def simplify = s @tailrec private def simplifyRecurse(elem: MathMLElem): MathMLElem = { val simp = elem.simplifyStepWithCNCheck if (simp == elem) { elem } else { simplifyRecurse(simp) } } private def simplifyStepWithCNCheck: MathMLElem = c.getOrElse(simplifyStep) /** * Does one round of simplification on this element. * Implementations of this method should not use the "s". */ protected def simplifyStep: MathMLElem protected def constant: Option[Constant] private var c_ : Option[Constant] = null def c = { if (c_ == null) { c_ = constant } c_ } def variables: Set[String] protected def derivative(wrt: String): MathMLElem def d(wrt: String) = if (!variables.contains(wrt)) `0` else derivative(wrt).s def dx = d("x") def +(m: MathMLElem) = ApplyPlus(this, m) def +(m: Short) = ApplyPlus(this, Cn(m)) def +(m: Int) = ApplyPlus(this, Cn(m)) def +(m: Long) = ApplyPlus(this, Cn(m)) def +(m: Float) = ApplyPlus(this, Cn(m)) def +(m: Double) = ApplyPlus(this, Cn(m)) def *(m: MathMLElem) = ApplyTimes(this, m) def *(m: Short) = ApplyTimes(this, Cn(m)) def *(m: Int) = ApplyTimes(this, Cn(m)) def *(m: Long) = ApplyTimes(this, Cn(m)) def *(m: Float) = ApplyTimes(this, Cn(m)) def *(m: Double) = ApplyTimes(this, Cn(m)) def -(m: MathMLElem) = ApplyMinusB(this, m) def -(m: Short) = ApplyMinusB(this, Cn(m)) def -(m: Int) = ApplyMinusB(this, Cn(m)) def -(m: Long) = ApplyMinusB(this, Cn(m)) def -(m: Float) = ApplyMinusB(this, Cn(m)) def -(m: Double) = ApplyMinusB(this, Cn(m)) def unary_-() = ApplyMinusU(this) def /(m: MathMLElem) = ApplyDivide(this, m) def /(m: Short) = ApplyDivide(this, Cn(m)) def /(m: Int) = ApplyDivide(this, Cn(m)) def /(m: Long) = ApplyDivide(this, Cn(m)) def /(m: Float) = ApplyDivide(this, Cn(m)) def /(m: Double) = ApplyDivide(this, Cn(m)) def ^(m: MathMLElem) = ApplyPower(this, m) def ^(m: Short) = ApplyPower(this, Cn(m)) def ^(m: Int) = ApplyPower(this, Cn(m)) def ^(m: Long) = ApplyPower(this, Cn(m)) def ^(m: Float) = ApplyPower(this, Cn(m)) def ^(m: Double) = ApplyPower(this, Cn(m)) def ?=(e: MathMLElem) = MathMLEq.checkEq("x", this, e) def isDefinedAt(boundVariables: (String, Double)*) = MathMLDefined.isDefinedAt(this, boundVariables:_*) def toMathJS : String } sealed trait MathMLChildren { } trait NoMathMLChildren extends MathMLChildren { } trait OneMathMLChild extends MathMLChildren { def mathMLChild : MathMLElem def copy(child: MathMLElem) : MathMLElem } trait TwoMathMLChildren extends MathMLChildren { def mathMLChildren : (MathMLElem, MathMLElem) def copy(first: MathMLElem, second: MathMLElem) : MathMLElem } trait SomeMathMLChildren extends MathMLChildren { def mathMLChildren : Seq[MathMLElem] def copy(children: MathMLElem*) : MathMLElem }
kristiankime/calc-tutor
app/com/artclod/mathml/scalar/MathMLElem.scala
Scala
mit
3,793
package org.vaslabs.granger.reminders import java.io.File import java.time.ZonedDateTime import akka.actor.typed.scaladsl.Behaviors import akka.actor.typed.{ ActorRef, Behavior } import monocle.macros.Lenses import org.eclipse.jgit.api.Git import org.vaslabs.granger.modelv2.PatientId import org.vaslabs.granger.repo.git.{ EmptyProvider, GitRepo } import org.vaslabs.granger.repo.{ EmptyRepo, RepoErrorState } object RCTReminderActor { import io.circe.java8.time._ import io.circe.generic.auto._ implicit val emptyNotificationsProvider: EmptyProvider[List[Reminder]] = () => List.empty def behaviour(repoLocation: String)(implicit git: Git): Behavior[Protocol] = Behaviors.setup { ctx => val notificationsRepo: GitRepo[List[Reminder]] = new GitRepo[List[Reminder]](new File(repoLocation), "notification_changes.json") notificationsRepo.getState().left.map(err => ctx.self ! LoadingError(err)).map(ReminderState).foreach(ctx.self ! _) Behaviors.receive { case (ctx, ReminderState(reminders)) => ctx.log.info("Notification system for RCT reminders is initialised") behaviourWithReminders(reminders.toSet, notificationsRepo) case (ctx, LoadingError(EmptyRepo)) => notificationsRepo.saveNew() ctx.log.info("Notification system for RCT reminders is initialised for the first time") behaviourWithReminders(Set.empty, notificationsRepo) case (ctx, LoadingError(genericError)) => ctx.log.info("Unhandled error {}, reminders are disabled", genericError) Behaviors.ignore case _ => Behaviors.ignore } } private[this] def behaviourWithReminders( reminders: Set[Reminder], notificationsRepo: GitRepo[List[Reminder]]): Behavior[Protocol] = Behaviors.receiveMessage { case CheckReminders(now, replyTo: ActorRef[Notify]) => val remindersToSend = reminders.filter(_.remindOn.compareTo(now) <= 0).filterNot(_.deletedOn.isDefined) val notify = Notify(remindersToSend.map(r => Notification(r.submitted, r.remindOn, r.externalReference)).toList) replyTo ! notify Behaviors.same case SetReminder(submitted, remindOn, externalReference, replyTo: ActorRef[ReminderSetAck]) => val newReminder = Reminder(submitted, remindOn, externalReference) val allReminders = reminders + newReminder replyTo ! ReminderSetAck(externalReference, submitted, remindOn) behaviourWithReminders(allReminders, notificationsRepo) case ModifyReminder(timestamp, snoozeTo, externalReference, replyTo: ActorRef[SnoozeAck]) => val savedReminders = for { reminderToModify <- reminders.find( r => r.externalReference == externalReference && r.submitted.compareTo(timestamp) == 0 && r.deletedOn.isEmpty) modifiedReminder = Reminder.remindOn.set(snoozeTo)(reminderToModify) newReminders = (reminders - reminderToModify) + (modifiedReminder) _ <- notificationsRepo .save(s"Saving reminder modification $timestamp of patient id $externalReference", newReminders.toList) .toOption _ = replyTo ! SnoozeAck(modifiedReminder.externalReference, timestamp, modifiedReminder.remindOn) } yield newReminders savedReminders.map(behaviourWithReminders(_, notificationsRepo)).getOrElse(Behaviors.same) case DeleteReminder(timestamp, externalReference, deletionTime, replyTo: ActorRef[DeletedAck]) => val savedReminders = for { reminderToDelete <- reminders.find(r => r.externalReference == externalReference && r.submitted == timestamp) reminderAfterDeletion = Reminder.deletedOn.set(Some(deletionTime))(reminderToDelete) newReminders = (reminders - reminderToDelete) + reminderAfterDeletion _ <- notificationsRepo .save(s"Stopping reminder $timestamp of patient id $externalReference", newReminders.toList) .toOption } yield newReminders savedReminders.foreach(_ => replyTo ! DeletedAck(timestamp, externalReference)) savedReminders.map(behaviourWithReminders(_, notificationsRepo)).getOrElse(Behaviors.same) case PatientReminders(patientId, replyTo: ActorRef[AllPatientReminders]) => replyTo ! AllPatientReminders( reminders .filter(_.externalReference == patientId) .filter(_.deletedOn.isEmpty) .map(r => Notification(r.submitted, r.remindOn, r.externalReference)) .toList) Behaviors.same } } sealed trait Protocol case class SetReminder( submitted: ZonedDateTime, remindOn: ZonedDateTime, externalReference: PatientId, actorRef: ActorRef[ReminderSetAck]) extends Protocol case class ModifyReminder( reminderTimestamp: ZonedDateTime, snoozeTo: ZonedDateTime, externalReference: PatientId, replyTo: ActorRef[SnoozeAck]) extends Protocol case class DeleteReminder( reminderTimestamp: ZonedDateTime, externalReference: PatientId, deletionTime: ZonedDateTime, actorRef: ActorRef[DeletedAck]) extends Protocol case class ReminderSetAck(externalId: PatientId, timestamp: ZonedDateTime, notificationTime: ZonedDateTime) case class SnoozeAck(externalReference: PatientId, timestamp: ZonedDateTime, movedAt: ZonedDateTime) case class DeletedAck(timestamp: ZonedDateTime, externalRefernce: PatientId) case class Notification(timestamp: ZonedDateTime, notificationTime: ZonedDateTime, externalReference: PatientId) case class Notify(notifications: List[Notification]) extends Protocol case class CheckReminders(now: ZonedDateTime, replyTo: ActorRef[Notify]) extends Protocol case class PatientReminders(externalId: PatientId, replyTo: ActorRef[AllPatientReminders]) extends Protocol case class AllPatientReminders(notifications: List[Notification]) @Lenses private[reminders] case class Reminder( submitted: ZonedDateTime, remindOn: ZonedDateTime, externalReference: PatientId, deletedOn: Option[ZonedDateTime] = None) extends Protocol private[reminders] case class ReminderState(reminders: List[Reminder]) extends Protocol private[reminders] case class LoadingError(repoErrorState: RepoErrorState) extends Protocol
vaslabs/granger
src/main/scala/org/vaslabs/granger/reminders/RCTReminderActor.scala
Scala
lgpl-3.0
6,202
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql import org.apache.spark.sql.test.SharedSQLContext class JsonFunctionsSuite extends QueryTest with SharedSQLContext { import testImplicits._ test("function get_json_object") { val df: DataFrame = Seq(("""{"name": "alice", "age": 5}""", "")).toDF("a", "b") checkAnswer( df.selectExpr("get_json_object(a, '$.name')", "get_json_object(a, '$.age')"), Row("alice", "5")) } val tuples: Seq[(String, String)] = ("1", """{"f1": "value1", "f2": "value2", "f3": 3, "f5": 5.23}""") :: ("2", """{"f1": "value12", "f3": "value3", "f2": 2, "f4": 4.01}""") :: ("3", """{"f1": "value13", "f4": "value44", "f3": "value33", "f2": 2, "f5": 5.01}""") :: ("4", null) :: ("5", """{"f1": "", "f5": null}""") :: ("6", "[invalid JSON string]") :: Nil test("function get_json_object - null") { val df: DataFrame = tuples.toDF("key", "jstring") val expected = Row("1", "value1", "value2", "3", null, "5.23") :: Row("2", "value12", "2", "value3", "4.01", null) :: Row("3", "value13", "2", "value33", "value44", "5.01") :: Row("4", null, null, null, null, null) :: Row("5", "", null, null, null, null) :: Row("6", null, null, null, null, null) :: Nil checkAnswer( df.select($"key", functions.get_json_object($"jstring", "$.f1"), functions.get_json_object($"jstring", "$.f2"), functions.get_json_object($"jstring", "$.f3"), functions.get_json_object($"jstring", "$.f4"), functions.get_json_object($"jstring", "$.f5")), expected) } test("json_tuple select") { val df: DataFrame = tuples.toDF("key", "jstring") val expected = Row("1", "value1", "value2", "3", null, "5.23") :: Row("2", "value12", "2", "value3", "4.01", null) :: Row("3", "value13", "2", "value33", "value44", "5.01") :: Row("4", null, null, null, null, null) :: Row("5", "", null, null, null, null) :: Row("6", null, null, null, null, null) :: Nil checkAnswer( df.select($"key", functions.json_tuple($"jstring", "f1", "f2", "f3", "f4", "f5")), expected) } test("json_tuple filter and group") { val df: DataFrame = tuples.toDF("key", "jstring") val expr = df .select(functions.json_tuple($"jstring", "f1", "f2")) .where($"c0".isNotNull) .groupBy($"c1") .count() val expected = Row(null, 1) :: Row("2", 2) :: Row("value2", 1) :: Nil checkAnswer(expr, expected) } }
chenc10/Spark-PAF
sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala
Scala
apache-2.0
3,342
package scala.tools.nsc.doc.html import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 @RunWith(classOf[JUnit4]) class StringLiteralTest { @Test def testHighlightingQuote() { val in = "\\"" val out = SyntaxHigh(in).map(_.toText).mkString assertEquals("""<span class="lit">"</span>""", out) } @Test def testHighlightingDoubleQuotes() { val in = "\\"content\\"" val out = SyntaxHigh(in).map(_.toText).mkString assertEquals("""<span class="lit">"content"</span>""", out) } @Test def testHighlightingQuoteFollowingBackslash() { val in = "\\\\\\"" val out = SyntaxHigh(in).map(_.toText).mkString assertEquals("""\\<span class="lit">"</span>""", out) } @Test def testHighlightingQuotesIgnoringEscapedQuote() { val in = "\\"\\\\\\"\\"" val out = SyntaxHigh(in).map(_.toText).mkString assertEquals("""<span class="lit">"\\""</span>""", out) } @Test def testHighlightingTripleQuotes() { val in = "\\"\\"\\"" val out = SyntaxHigh(in).map(_.toText).mkString assertEquals("<span class=\\"lit\\">\\"\\"\\"</span>", out) } @Test def testHighlightingRawStringLiteralIgnoringQuote() { val in = "\\"\\"\\"content\\"\\"content\\"\\"\\"" val out = SyntaxHigh(in).map(_.toText).mkString assertEquals("<span class=\\"lit\\">\\"\\"\\"content\\"\\"content\\"\\"\\"</span>", out) } }
martijnhoekstra/scala
test/junit/scala/tools/nsc/doc/html/StringLiteralTest.scala
Scala
apache-2.0
1,394
package specs.analysis import helpers.Utils import org.scalatest._ import utils.TestUtils class AnonymousClassSpec extends FlatSpec { var i = 0 def testNr: String = { i += 1 i.toString } it should testNr in { TestUtils.expectMutability(Map(List("Fn") -> Utils.IsDeeplyImmutable)) { """ class Fn { val fn = (x: Int) => x + 1 } """ } TestUtils.expectMutability(Map(List("Fn") -> Utils.IsDeeplyImmutable)) { """ class Fn { val fn = (x: Int) => (x: Int) => x + 1 } """ } } it should testNr in { TestUtils.expectMutability(Map(List("Test") -> Utils.IsDeeplyImmutable)) { """ trait B {} class A {} class Test { val fn = new A with B } """ } } it should testNr in { TestUtils.expectMutability(Map(List("Test", "A") -> Utils.IsShallowImmutable, List("B") -> Utils.IsMutable)) { """ trait B { var a: String = "mutable" } class A { val b: B = null } class Test { val fn = new A with B } """ } TestUtils.expectMutability(Map(List("Test", "A") -> Utils.IsShallowImmutable, List("B") -> Utils.IsMutable)) { """ trait B { var a: String = "mutable" } class Test { val fn = new A with B } class A { val b: B = null } """ } TestUtils.expectMutability(Map(List("Test", "A") -> Utils.IsShallowImmutable, List("B") -> Utils.IsMutable)) { """ class Test { val fn = new A with B } trait B { var a: String = "mutable" } class A { val b: B = null } """ } } }
luax/scala-immutability-plugin
plugin/src/test/scala/specs/analysis/AnonymousClassSpec.scala
Scala
mit
1,747
/* * Copyright (C) 2010 Romain Reuillon * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.openmole.plugin.domain.file import java.io.File import org.openmole.core.dsl._ import org.openmole.core.dsl.extension._ object ListFilesDomain extends JavaLogger { implicit def isDiscrete: DiscreteFromContextDomain[ListFilesDomain, File] = domain ⇒ Domain( domain.iterator, domain.directory.toSeq.flatMap(_.inputs) ++ domain.filter.toSeq.flatMap(_.inputs), domain.directory.toSeq.map(_.validate) ++ domain.filter.toSeq.map(_.validate) ) def apply( base: File, directory: OptionalArgument[FromContext[String]] = OptionalArgument(), recursive: Boolean = false, filter: OptionalArgument[FromContext[String]] = OptionalArgument() ): ListFilesDomain = new ListFilesDomain(base, directory, recursive, filter) } import org.openmole.plugin.domain.file.ListFilesDomain.Log class ListFilesDomain( base: File, private val directory: Option[FromContext[String]] = None, recursive: Boolean = false, private val filter: Option[FromContext[String]] = None ) { def iterator = FromContext { p ⇒ import p._ def toFilter(f: File) = filter.map(e ⇒ f.getName.matches(e.from(context))).getOrElse(true) val dir = directory.map(s ⇒ new File(base, s.from(context))).getOrElse(base) if (!dir.exists) { Log.logger.warning("Directory " + dir + " in ListFilesDomain doesn't exists, returning an empty list of values.") Iterator.empty } else if (recursive) dir.listRecursive(toFilter _).iterator else dir.listFilesSafe(toFilter _).iterator } }
openmole/openmole
openmole/plugins/org.openmole.plugin.domain.file/src/main/scala/org/openmole/plugin/domain/file/ListFilesDomain.scala
Scala
agpl-3.0
2,357
package org.fusesource.cloudmix.agent.resources import java.io.File /** * @version $Revision: 1.1 $ */ class RootDirectoryResource(file: File, parentPath: String) extends DirectoryResource(file, parentPath) { override def path = parentPath override def parentLink = { val idx = parentPath.lastIndexOf('/') if (idx > 0) { parentPath.substring(0, idx) } else { "/" } } }
chirino/cloudmix
org.fusesource.cloudmix.agent.mop.web/src/main/scala/org/fusesource/cloudmix/agent/resources/RootDirectoryResource.scala
Scala
agpl-3.0
411
package tethys.derivation.builder import scala.annotation.compileTimeOnly import scala.language.implicitConversions sealed trait ReaderField[A]
tethys-json/tethys
modules/macro-derivation/src/main/scala/tethys/derivation/builder/ReaderField.scala
Scala
apache-2.0
145
package apps /** The `montecarlo` package contains example applications for Monte Carlo * Simulation. */ package object montecarlo { }
NBKlepp/fda
scalation_1.3/scalation_models/src/main/scala/apps/montecarlo/package.scala
Scala
mit
142
/* * The MIT License (MIT) * <p> * Copyright (c) 2018 * <p> * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * <p> * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * <p> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package io.techcode.streamy.event import akka.actor.DeadLetterSuppression import io.techcode.streamy.plugin.Plugin /** * All plugin events. */ object PluginEvent { /** * Represent an app event. */ abstract class All(val name: String) extends DeadLetterSuppression { /** * Convert a plugin event to a plugin lifecycle state. * * @return plugin lifecycle state. */ def toState: Plugin.State.State override def toString: String = toState.toString } /** * Represent an plugin loading event. * This event is fired when a plugin is loading. */ case class Loading(override val name: String) extends All(name) { def toState: Plugin.State.State = Plugin.State.Loading } /** * Represent an plugin running event. * This event is fired when a plugin is running. */ case class Running(override val name: String) extends All(name) { def toState: Plugin.State.State = Plugin.State.Running } /** * Represent an plugin stopping event. * This event is fired when a plugin is stopping. */ case class Stopping(override val name: String) extends All(name) { def toState: Plugin.State.State = Plugin.State.Stopping } /** * Represent an plugin stopping event. * This event is fired when a plugin is stopped. */ case class Stopped(override val name: String) extends All(name) { def toState: Plugin.State.State = Plugin.State.Stopped } }
amannocci/streamy
core/src/main/scala/io/techcode/streamy/event/PluginEvent.scala
Scala
mit
2,614
package org.jetbrains.plugins.scala.codeInspection import org.jetbrains.annotations.Nls package object scaladoc { @Nls private[scaladoc] val FamilyName = ScalaInspectionBundle.message("family.name.scala.scaladoc") }
JetBrains/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/codeInspection/scaladoc/package.scala
Scala
apache-2.0
223
package org.jetbrains.plugins.scala package lang.refactoring.changeSignature import com.intellij.openapi.project.Project import com.intellij.psi._ import com.intellij.psi.codeStyle.JavaCodeStyleManager import com.intellij.psi.search.GlobalSearchScope import com.intellij.refactoring.changeSignature.JavaParameterInfo import com.intellij.refactoring.util.CanonicalTypes import org.jetbrains.plugins.scala.lang.psi.api.base.ScMethodLike import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{ScParameter, ScParameterClause} import org.jetbrains.plugins.scala.lang.psi.types._ import org.jetbrains.plugins.scala.lang.psi.types.api.{FunctionType, JavaArrayType} import org.jetbrains.plugins.scala.lang.psi.types.result.TypingContext import scala.beans.{BeanProperty, BooleanBeanProperty} /** * Nikolay.Tropin * 2014-08-10 */ class ScalaParameterInfo(@BeanProperty var name: String, @BeanProperty val oldIndex: Int, var scType: ScType, val project: Project, var isRepeatedParameter: Boolean, var isByName: Boolean, @BeanProperty var defaultValue: String = "", var keywordsAndAnnotations: String = "", val isIntroducedParameter: Boolean = false) extends JavaParameterInfo { def this(p: ScParameter) { this(p.name, p.index, p.getType(TypingContext.empty).getOrAny, p.getProject, p.isRepeatedParameter, p.isCallByNameParameter, keywordsAndAnnotations = ScalaParameterInfo.keywordsAndAnnotations(p)) } var defaultForJava = defaultValue @BooleanBeanProperty var useAnySingleVariable: Boolean = false val wasArrayType: Boolean = scType match { case JavaArrayType(_) => true case _ => false } val isVarargType = false //overriders in java of method with repeated parameters are not varargs protected def psiType: PsiType = { if (scType == null) return null val allScope = GlobalSearchScope.allScope(project) if (isByName) { val functionType = FunctionType(scType, Seq())(project, allScope) functionType.toPsiType(project, allScope) } else if (isRepeatedParameter) { val seqType = ScDesignatorType.fromClassFqn("scala.collection.Seq", project, allScope) ScParameterizedType(seqType, Seq(scType)).toPsiType(project, allScope) } else scType.toPsiType(project, allScope) } override def createType(context: PsiElement, manager: PsiManager): PsiType = psiType override def getValue(expr: PsiCallExpression): PsiExpression = { if (defaultForJava.isEmpty) return null val defaultText = if (defaultForJava.contains("$default$")) { val qual = expr match { case mc: PsiMethodCallExpression => mc.getMethodExpression.getQualifierExpression match { case s: PsiSuperExpression => "" case null => "" case q => q.getText + "." } case _ => "" } qual + defaultForJava } else defaultForJava val expression = JavaPsiFacade.getElementFactory(project).createExpressionFromText(defaultText, expr) JavaCodeStyleManager.getInstance(project).shortenClassReferences(expression).asInstanceOf[PsiExpression] } override def getTypeWrapper: CanonicalTypes.Type = { if (scType != null) CanonicalTypes.createTypeWrapper(psiType) else null } override def getTypeText: String = if (scType != null) getTypeWrapper.getTypeText else null def typeText = { val baseText = Option(scType).fold("")(_.presentableText) if (isRepeatedParameter) baseText + "*" else if (isByName) " => " + baseText else baseText } } object ScalaParameterInfo { def apply(p: ScParameter) = new ScalaParameterInfo(p) def apply(project: Project) = new ScalaParameterInfo("", -1, null, project, false, false) def keywordsAndAnnotations(p: ScParameter) = { val nameId = p.nameId val elems = p.children.takeWhile(_ != nameId) elems.map(_.getText).mkString } def allForMethod(methodLike: ScMethodLike): Seq[Seq[ScalaParameterInfo]] = { def infos(clause: ScParameterClause): Seq[ScalaParameterInfo] = clause.parameters.map(new ScalaParameterInfo(_)) methodLike.parameterList.clauses.map(infos) } }
katejim/intellij-scala
src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaParameterInfo.scala
Scala
apache-2.0
4,360
package incompatibleMacroEngine import scala.tools.nsc.Global import scala.tools.nsc.plugins.{Plugin => NscPlugin} class Plugin(val global: Global) extends NscPlugin { import global._ import analyzer._ val name = "incompatibleMacroEngine" val description = "A sample analyzer plugin that crafts a macro impl binding with a non-standard macro engine." val components = Nil addMacroPlugin(MacroPlugin) object MacroPlugin extends MacroPlugin { def fixupBinding(tree: Tree) = new AstTransformer { override def transform(tree: Tree) = { tree match { case Literal(const @ Constant(x)) if tree.tpe == null => tree setType ConstantType(const) case _ if tree.tpe == null => tree setType NoType case _ => ; } super.transform(tree) } }.transform(tree) override def pluginsTypedMacroBody(typer: Typer, ddef: DefDef): Option[Tree] = { val result = standardTypedMacroBody(typer, ddef) val List(AnnotationInfo(atp, List(Apply(nucleus, _ :: others)), Nil)) = ddef.symbol.annotations val updatedBinding = Apply(nucleus, Assign(Literal(Constant("macroEngine")), Literal(Constant("vxxx (implemented in the incompatibleMacroEngine plugin)"))) :: others) ddef.symbol.setAnnotations(List(AnnotationInfo(atp, List(fixupBinding(updatedBinding)), Nil))) Some(result) } } }
scala/scala
test/files/neg/macro-incompatible-macro-engine-b/Plugin_1.scala
Scala
apache-2.0
1,381
package com.twitter.finagle.buoyant.linkerd import com.twitter.finagle._ import com.twitter.finagle.tracing.{Trace, TraceInitializerFilter, Tracer} object ThriftTraceInitializer { val role = TraceInitializerFilter.role def serverModule[Req, Rep]: Stackable[ServiceFactory[Req, Rep]] = new Stack.Module1[param.Tracer, ServiceFactory[Req, Rep]] { val role = ThriftTraceInitializer.role val description = "Ensure that there is a trace id set" def make(_tracer: param.Tracer, next: ServiceFactory[Req, Rep]) = { val param.Tracer(tracer) = _tracer new ServerFilter(tracer) andThen next } } class ServerFilter[Req, Rep](tracer: Tracer) extends SimpleFilter[Req, Rep] { def apply(req: Req, service: Service[Req, Rep]) = { if (!Trace.hasId) Trace.letTracerAndNextId(tracer) { service(req) } else service(req) } } }
denverwilliams/linkerd
linkerd/protocol/thrift/src/main/scala/com/twitter/finagle/buoyant/linkerd/ThriftTraceInitializer.scala
Scala
apache-2.0
919
/* * Copyright (C) 2016 Nikos Katzouris * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package utils import logic.{Clause, Literal} /** * Created by nkatz on 11/7/16. */ object LookAheads { /* * Whenever a new lookahead policy is defined (and declared in a mode declarations file), * the policy should be implemented in the LookAheadImplementations object and a mapping * between the policy's name and its implementation should be defined here. * */ val policyMap = Map("appearsEarlier" -> LookAheadImplementations.appearsEarlier_<-) class LookAheadSpecification(val lookAheadDefinition: String) { /* * A lookahead specification is a declaration of the form: * * lookahead( transaction(X,Y,T), before(T,T1), appearsEarlier(2) ) * * In this definition: * - transaction(X,Y,T) is the current atom * - before(T,T1) is the lookahead atom * - appearsEarlier(2) is the policy atom * * The lookahead specification says that when a version of a current atom is to be * added to the clause, then it must be accompanied by a version of the lookahead atom. * Moreover these two atoms must share (have in common) their 3rd and 1rst variable respectively * (denoted by the same variable T in the respective positions in the atom signatures). Also, the * policy atom enforces additional constrains on the remaining variables of the lookahead atom. * For instance the policy atom above (appearsEarlier(2)) states that the second variable in a * lookahead atom must appear earlier in the clause in which the lookahead atom is about to be added. * Policies are implemented in the LookAheadImplementations object * */ val parsed = Literal.parse(lookAheadDefinition) // the current atom val currentLiteralSignature = parsed.terms.head.asInstanceOf[Literal] // the lookahead val lookAheadLiteralSignature = parsed.terms(1).asInstanceOf[Literal] // the policy atom val policySignature = parsed.terms(2).asInstanceOf[Literal] // the linking (shared) variable between the current and the lookahead atom. // The variable itself is not important, we only need it to extract the positions // in the actual current and lookahead atoms that we'll encounter during learning, // which the linking variable should occupy. val targetLookAheadSharedVariable = { val vars = currentLiteralSignature.getVars vars.toSet.intersect(lookAheadLiteralSignature.getVars.toSet) } // fail if no shared variable is found require(targetLookAheadSharedVariable.nonEmpty, s"No shared variables between current and lookahead atoms inthe lookahead specification $lookAheadDefinition") // index of shared variable in a current atom val sharedVarIndex_in_CurrentAtom = currentLiteralSignature.getVars.indexOf(targetLookAheadSharedVariable.head) // index of shared variable in a lookahead atom val sharedVarIndex_in_LookAheadAtom = lookAheadLiteralSignature.getVars.indexOf(targetLookAheadSharedVariable.head) // index of linking variable in a lookahead atom val linkingVar_in_LookAheadAtom = policySignature.terms.head.name.toInt - 1 def policy = policyMap(policySignature.predSymbol) } object LookAheadImplementations { /* * All lookahead implementations should be declared here. */ /* * This is an implementation of the "appearsEarlier_<-" lookahead policy. This policy is declared in the * mode declarations file as follows (predicate and variable names, arities etc are random, just for demonstration): * * lookahead( p(X,Y,T), q(T,T1), appearsEarlier(2) ) * * The intended meaning of this declaration is: Whenever a p/3 literal is added to a clause r, a q/2 literal * should also be added. Both these literals are drawn from a bottom clause (bottomClause in the method's signature), while * policyLiteral in the method's signature is the literal appearsEarlier(2). * The relation between these two literals is that they should share a variable T. Also, the * remaining variable T1 of q/2 should appear in some literal r' that already appears in clause r. * This is specified by appearsEarlier(2), which means that the second variable of q/2 should "appearEarlier". The "<-" in the * name of the policy means that we search clause r for literal r' "from right to left" i.e. from the last body literal * to the head atom. * * This method returns the lookahead literal for which the linking variable appears "as closer" to the end of the clause as possible, i.e. * it appears in a literal closer to the end of the clause. * */ val appearsEarlier_<- = (lit: Literal, specification: LookAheadSpecification, clause: Clause, bottomClause: Clause) => { val currentAtomSignature = specification.currentLiteralSignature if (lit.predSymbol == currentAtomSignature.predSymbol && lit.arity == currentAtomSignature.arity) { val sharedVarIndex_in_CurrentAtom = specification.sharedVarIndex_in_CurrentAtom val sharedVarIndex_in_LookAheadAtom = specification.sharedVarIndex_in_LookAheadAtom val sharedVar = lit.getVars(sharedVarIndex_in_CurrentAtom) // for the shared variable find in the bottom clause all literals that match the lookahead atom // signature and contain the shared variable in the proper position val candidateLookAheads = bottomClause.body.filter { p => p.predSymbol == specification.lookAheadLiteralSignature.predSymbol && p.arity == specification.lookAheadLiteralSignature.arity && p.getVars(sharedVarIndex_in_LookAheadAtom).name == sharedVar.name && clause.toLiteralList.filter(l => List("fraud", "transaction").contains(l.predSymbol)).exists(s => s.getVars.map(_.name).contains(p.getVars(specification.linkingVar_in_LookAheadAtom).name)) } val f = (x: logic.Variable) => { // get max to get the literal closest to the end of the clause clause.toLiteralList.filter(l => List("fraud", "transaction").contains(l.predSymbol)).map(y => if (y.getVars.map(_.name).contains(x.name)) clause.toLiteralList.indexOf(y) + 1 else 0).max } if (candidateLookAheads.nonEmpty) { candidateLookAheads.map{ q => (q, f(q.getVars(specification.linkingVar_in_LookAheadAtom))) }.sortBy(z => z._2).last._1 } else { Literal() } } else { Literal() } } def appearsEarlier_<-(lit: Literal, specification: String, clause: Clause, bottomClause: Clause) = { // This is a total hack, just to make it work. I'll see how to make it generic // A lookahead link looks like that: // -------------------------------------------------------------------------------------- // transaction/4 -> { before/2, (4,1) }, { greaterThan/2, (2,1) }, { lessThan/2, (2,1) } // -------------------------------------------------------------------------------------- // -------------------------------------------------------------------------------------- /* * I'll use a specific example to see how it plays out. The atoms involved will be before/2, after/2, greaterThan/2, lessThan/2. * The way these atoms will be used in the example will be indicative of what I want to achieve. When that's done, I'll find a * way to make it generic, specify lookaheads in the mode declarations, parse them into objects for easy handling and search etc * * So, here comes the working example * * Assume that r is the clause that we are currently learning and * α = transaction(Card, A1, Cntry, T1) is the atom that we are about to add to r. Assume also that * β = transaction(Card, A2, Cntry, T2) is the last atom that appears in r. * if T1 is not the time variable that appears in head(r): * STEP 1: Find either a before/2, or after/2 atom in the bottom clause that links T1 and T2. * STEP 2: Find either a greaterThan/2, lessThan/2 or * else: * simply add α to r */ var foundLookAheadAtom = Literal() } /* * This policy is similar to "appearsEarlier_<-" but the search for a literal that contains a linking variable is done * "left to right", i.e. from the head to the last body literal */ def appearsEarlier_->(lit: Literal, lookAheadLit: Literal, clause: Clause, searchSpace: Clause) = { } /* * This policy is similar to "appearsEarlier_<-" but the search for a literal that contains a linking variable is done randomly */ def appearsEarlier_*(lit: Literal, lookAheadLit: Literal, clause: Clause, searchSpace: Clause) = { } } }
nkatzz/OLED
src/main/scala/utils/LookAheads.scala
Scala
gpl-3.0
9,479
/** * Licensed to Gravity.com under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Gravity.com licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gravity.goose.network import org.apache.http.Header import org.apache.http.HeaderElement import org.apache.http.HttpEntity import org.apache.http.HttpHost import org.apache.http.HttpResponse import org.apache.http.HttpVersion import org.apache.http.{HttpRequest, HttpRequestInterceptor, HttpResponse, HttpResponseInterceptor, HeaderElementIterator} import org.apache.http.client.entity.GzipDecompressingEntity import org.apache.http.client.CookieStore import org.apache.http.impl.client.BasicCookieStore import org.apache.http.client.entity.GzipDecompressingEntity import org.apache.http.client.HttpClient import org.apache.http.client.methods.HttpGet import org.apache.http.client.params.CookiePolicy import org.apache.http.client.protocol.ClientContext import org.apache.http.conn.params.ConnRoutePNames import org.apache.http.conn.ConnectionKeepAliveStrategy import org.apache.http.conn.scheme.PlainSocketFactory import org.apache.http.conn.ssl.SSLSocketFactory import org.apache.http.conn.scheme.{Scheme, SchemeRegistry} import org.apache.http.cookie.Cookie import org.apache.http.impl.conn.PoolingClientConnectionManager import org.apache.http.message.BasicHeaderElementIterator import org.apache.http.params.{HttpParams, BasicHttpParams, HttpConnectionParams, HttpProtocolParams} import org.apache.http.protocol.{HTTP, BasicHttpContext, HttpContext} import org.apache.http.entity.ContentType import java.io._ import java.net.SocketException import java.net.SocketTimeoutException import java.net.URL import java.net.URLConnection import java.util.ArrayList import java.util.List import java.util.Date import com.gravity.goose.utils.Logging import com.gravity.goose.Configuration import org.apache.http.impl.client.{DefaultHttpRequestRetryHandler, AbstractHttpClient, DefaultHttpClient} import org.apache.commons.io.IOUtils import com.ibm.icu.text.CharsetDetector import org.apache.http.util.EntityUtils import org.apache.http.conn.ClientConnectionManager import com.gravity.goose.network.gae.GAEConnectionManager import org.apache.http.conn.HttpClientConnectionManager /** * User: Jim Plush * Date: 12/16/10 * This guy is kind of a doozy because goose is meant to pull millions of articles per day so the legitimacy of these links * is in question. For example many times you'll see mp3, mov, wav, etc.. files mislabeled as HTML with HTML content types, * only through inspection of the actual content will you learn what the real type of content is. Also spam sites could * contain up to 1GB of text that is just wasted resources so we set a max bytes level on how much content we're going * to try and pull back before we say screw it. */ object HtmlFetcher extends AbstractHtmlFetcher with Logging { /** * holds a reference to our override cookie store, we don't want to store * cookies for head requests, only slows shit down */ var emptyCookieStore: CookieStore = null /** * holds the HttpClient object for making requests */ private var httpClient: HttpClient = null initClient() def getHttpClient: HttpClient = { httpClient } /** * Makes an http fetch to go retrieve the HTML from a url, store it to disk and pass it off * @param config Goose Configuration * @param url The web address to fetch * @return If all goes well, a `Some[String]` otherwise `None` * @throws NotFoundException(String) * @throws BadRequestException(String) * @throws NotAuthorizedException(String, Int) * @throws ServerErrorException(String, Int) * @throws UnhandledStatusCodeException(String, Int) * @throws MaxBytesException() */ def getHtml(config: Configuration, url: String): Option[String] = { var httpget: HttpGet = null var htmlResult: String = null var entity: HttpEntity = null var instream: InputStream = null var contentType: ContentType = null // Identified the the apache http client does not drop URL fragments before opening the request to the host // more info: http://stackoverflow.com/questions/4251841/400-error-with-httpclient-for-a-link-with-an-anchor val cleanUrl = { val foundAt = url.indexOf("#") if (foundAt >= 0) url.substring(0, foundAt) else url } try { val localContext: HttpContext = new BasicHttpContext localContext.setAttribute(ClientContext.COOKIE_STORE, new BasicCookieStore) httpget = new HttpGet(cleanUrl) httpget.setHeader("referer", config.getBrowserReferer()) val params = httpClient.getParams HttpProtocolParams.setUserAgent(params, config.getBrowserUserAgent()) trace("Setting UserAgent To: " + HttpProtocolParams.getUserAgent(httpClient.getParams)) HttpConnectionParams.setConnectionTimeout(params, config.getConnectionTimeout()) HttpConnectionParams.setSoTimeout(params, config.getSocketTimeout()) val response: HttpResponse = httpClient.execute(httpget, localContext) HttpStatusValidator.validate(cleanUrl, response.getStatusLine.getStatusCode) match { case Left(ex) => throw ex case _ => } entity = response.getEntity // via http://hc.apache.org/httpcomponents-client-ga/httpclient/examples/org/apache/http/examples/client/ClientGZipContentCompression.java if (entity != null) { try { val ceheader: Header = entity.getContentEncoding(); if (ceheader != null) { val codecs: Array[HeaderElement] = ceheader.getElements(); for(i <- 0 until codecs.length) { if (codecs(i).getName().equalsIgnoreCase("gzip")) { entity = new GzipDecompressingEntity(response.getEntity()) } } } } catch { case e: Exception => { trace("Unable to get header elements: " + cleanUrl) } } } if (entity != null) { instream = entity.getContent val encodingType: String = config.resolveCharSet(url, entity) try { contentType = ContentType.get(entity) trace("Got contentType: " + contentType) } catch { case e: Exception => { if (logger.isDebugEnabled) { trace("Unable to get charset for: " + cleanUrl) trace("Encoding Type is: " + encodingType) } } } try { htmlResult = HtmlFetcher.convertStreamToString(instream, encodingType).trim } finally { EntityUtils.consume(entity) } } else { trace("Unable to fetch URL Properly: " + cleanUrl) } } catch { case e: NullPointerException => { logger.warn(e.toString + " " + e.getMessage + " Caught for URL: " + cleanUrl) } case e: MaxBytesException => { trace("GRVBIGFAIL: " + cleanUrl + " Reached max bytes size") throw e } case e: SocketException => { logger.warn(e.getMessage + " Caught for URL: " + cleanUrl) } case e: SocketTimeoutException => { trace(e.toString) throw new GatewayTimeoutException(e.toString + " " + e.getMessage) } case e: LoggableException => { logger.warn(e.getMessage) throw e } case e: Exception => { warn("FAILURE FOR LINK: " + cleanUrl + " " + e.toString) throw e } } finally { if (instream != null) { try { instream.close() } catch { case e: Exception => { logger.warn(e.getMessage + " Caught for URL: " + cleanUrl) } } } if (httpget != null) { try { httpget.abort() httpget.releaseConnection() entity = null } catch { case e: Exception => { } } } } if (logger.isDebugEnabled) { logger.debug("starting...") } if (htmlResult == null || htmlResult.length < 1) { if (logger.isDebugEnabled) { logger.debug("HTMLRESULT is empty or null") } throw new NotHtmlException(cleanUrl) } var is: InputStream = null var mimeType: String = null try { is = new ByteArrayInputStream(htmlResult.getBytes("UTF-8")) mimeType = URLConnection.guessContentTypeFromStream(is) if (mimeType != null || contentType != null) { if(mimeType == null) { mimeType = contentType.getMimeType() trace("no guessed mimetype? using contentType: " + mimeType + " - " + cleanUrl) } if ((mimeType == "text/html") || (mimeType == "application/xml") || (mimeType == "application/xhtml+xml") || (mimeType == "text/xml") ) { return Some(htmlResult) } else { if (htmlResult.contains("<title>") == true && htmlResult.contains("<p>") == true) { return Some(htmlResult) } trace("GRVBIGFAIL: " + mimeType + " - " + cleanUrl) throw new NotHtmlException(cleanUrl) } } else { trace("no mimetype?: " + mimeType + " - " + cleanUrl) throw new NotHtmlException(cleanUrl) } } catch { case e: UnsupportedEncodingException => { logger.warn(e.getMessage + " Caught for URL: " + cleanUrl) } case e: IOException => { logger.warn(e.getMessage + " Caught for URL: " + cleanUrl) } } None } private def initClient() { trace("Initializing HttpClient") val httpParams: HttpParams = new BasicHttpParams HttpConnectionParams.setConnectionTimeout(httpParams, 10 * 1000) // 10 seconds HttpConnectionParams.setSoTimeout(httpParams, 10 * 1000) // 10 seconds HttpProtocolParams.setVersion(httpParams, HttpVersion.HTTP_1_1) emptyCookieStore = new CookieStore { def addCookie(cookie: Cookie) { } def getCookies: List[Cookie] = { emptyList } def clearExpired(date: Date): Boolean = { false } def clear() { } private[network] var emptyList: ArrayList[Cookie] = new ArrayList[Cookie] } httpParams.setParameter("http.protocol.single-cookie-header", true) httpParams.setParameter("http.protocol.cookie-policy", CookiePolicy.BROWSER_COMPATIBILITY) httpParams.setParameter("http.User-Agent", "Mozilla/5.0 (X11; U; Linux x86_64; de; rv:1.9.2.8) Gecko/20100723 Ubuntu/10.04 (lucid) Firefox/3.6.8") httpParams.setParameter("http.language.Accept-Language", "en-us") httpParams.setParameter("http.protocol.content-charset", "UTF-8") httpParams.setParameter("Accept", "application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5") httpParams.setParameter("Cache-Control", "max-age=0") httpParams.setParameter("http.connection.stalecheck", true) //gae??? val cm = createConnectionManager httpClient = new DefaultHttpClient(cm, httpParams) httpClient.asInstanceOf[AbstractHttpClient].setHttpRequestRetryHandler(new DefaultHttpRequestRetryHandler(0, false)) httpClient.getParams.setParameter("http.connection-manager.timeout", 20000L) // timeout for retrieving a connection from the pool httpClient.getParams.setParameter("http.protocol.wait-for-continue", 5000L) // timeout for how long the client waits for 100-continue before sending request body httpClient.getParams.setParameter("http.tcp.nodelay", true) // First check proxy configured from java properties, otherwise use env var if set if (scala.sys.props.isDefinedAt("http.proxyHost")) { val host = scala.sys.props.getOrElse("http.proxyHost", "") val port = scala.sys.props.getOrElse("http.proxyPort", "80").toInt httpClient.getParams.setParameter(ConnRoutePNames.DEFAULT_PROXY, new HttpHost(host, port)) } else if (sys.env.isDefinedAt("http_proxy")) { val url = new URL(sys.env.getOrElse("http_proxy", "")) val host = url.getHost val port = url.getPort httpClient.getParams.setParameter(ConnRoutePNames.DEFAULT_PROXY, new HttpHost(host, port)) } // http://hc.apache.org/httpcomponents-client-ga/httpclient/examples/org/apache/http/examples/client/ClientGZipContentCompression.java httpClient.asInstanceOf[AbstractHttpClient].addRequestInterceptor(new HttpRequestInterceptor() { def process( request: HttpRequest, context: HttpContext) { if (!request.containsHeader("Accept-Encoding")) { request.addHeader("Accept-Encoding", "gzip") } // First check proxy configured from java properties, otherwise use env var if set if (scala.sys.props.isDefinedAt("http.proxyHost")) { val host = scala.sys.props.getOrElse("http.proxyHost", "") val port = scala.sys.props.getOrElse("http.proxyPort", "80").toInt httpClient.getParams.setParameter(ConnRoutePNames.DEFAULT_PROXY, new HttpHost(host, port)) } else if (sys.env.isDefinedAt("http_proxy")) { val url = new URL(sys.env.getOrElse("http_proxy", "")) val host = url.getHost val port = url.getPort httpClient.getParams.setParameter(ConnRoutePNames.DEFAULT_PROXY, new HttpHost(host, port)) } } }) httpClient.asInstanceOf[AbstractHttpClient].addResponseInterceptor(new HttpResponseInterceptor() { def process( response: HttpResponse, context: HttpContext) { val entity: HttpEntity = response.getEntity() if (entity != null) { val ceheader: Header = entity.getContentEncoding() if (ceheader != null) { val codecs = ceheader.getElements() for ( c <- codecs) { if (c.getName().equalsIgnoreCase("gzip")) { response.setEntity( new GzipDecompressingEntity(response.getEntity())) return } } } } } }) httpClient.asInstanceOf[AbstractHttpClient].setKeepAliveStrategy(new ConnectionKeepAliveStrategy() { def getKeepAliveDuration( response: HttpResponse, context: HttpContext): Long = { // Honor 'keep-alive' header val it: HeaderElementIterator = new BasicHeaderElementIterator(response.headerIterator(HTTP.CONN_KEEP_ALIVE)) while (it.hasNext()) { val he: HeaderElement = it.nextElement() val param: String = he.getName() val value: String = he.getValue() if (value != null && param.equalsIgnoreCase("timeout")) { try { return value.toLong * 1000 } catch { case e: NumberFormatException => {} // ignore numberformat errors } } } // otherwise keep alive for 10 seconds return 10 * 1000 } }) } /** * reads bytes off the string and returns a string * * @param is the source stream from the response * @param maxBytes The max bytes that we want to read from the input stream * @return String */ def convertStreamToString(is: InputStream, httpEncodingType: String): String = { try { var buf : Array[Byte] = IOUtils.toByteArray(is) return encodedText(buf) } catch { case e: SocketTimeoutException => { logger.warn(e.toString + " " + e.getMessage) } case e: UnsupportedEncodingException => { logger.warn(e.toString + " " + e.getMessage) } case e: IOException => { logger.warn(e.toString + " " + e.getMessage) } } null } def encodedText(buf : Array[Byte]) : String = { val detector = new CharsetDetector() detector.setText(buf) val matched = detector.detect() matched.getLanguage matched.getString } def createConnectionManager:ClientConnectionManager = createDefaultConnectionManager //enable gae connection manager //def createConnectionManager:ClientConnectionManager = createGaeConnectionManager def createGaeConnectionManager = new GAEConnectionManager def createDefaultConnectionManager:ClientConnectionManager = { val schemeRegistry: SchemeRegistry = new SchemeRegistry schemeRegistry.register(new Scheme("http", 80, PlainSocketFactory.getSocketFactory)) schemeRegistry.register(new Scheme("https", 443, SSLSocketFactory.getSocketFactory)) //gae?? // val cm = new ThreadSafeClientConnManager(schemeRegistry) val cm = new PoolingClientConnectionManager(schemeRegistry) cm.setMaxTotal(4000) cm.setDefaultMaxPerRoute(20) cm } }
raisercostin/goose
src/main/scala/com/gravity/goose/network/HtmlFetcher.scala
Scala
apache-2.0
17,254
package io.iohk.ethereum.jsonrpc import java.time.Duration import akka.actor.ActorSystem import akka.testkit.TestKit import akka.util.ByteString import io.iohk.ethereum.{LongPatience, WithActorSystemShutDown} import io.iohk.ethereum.domain._ import io.iohk.ethereum.jsonrpc.serialization.JsonSerializers.{ OptionNoneToJNullSerializer, QuantitiesSerializer, UnformattedDataJsonSerializer } import io.iohk.ethereum.jsonrpc.PersonalService._ import monix.eval.Task import monix.execution.Scheduler.Implicits.global import org.bouncycastle.util.encoders.Hex import org.json4s.JsonAST._ import org.json4s.JsonDSL._ import org.json4s.{DefaultFormats, Formats} import org.scalatest.concurrent.{Eventually, ScalaFutures} import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks class JsonRpcControllerPersonalSpec extends TestKit(ActorSystem("JsonRpcControllerPersonalSpec_System")) with AnyFlatSpecLike with WithActorSystemShutDown with Matchers with JRCMatchers with ScalaCheckPropertyChecks with ScalaFutures with LongPatience with Eventually { implicit val formats: Formats = DefaultFormats.preservingEmptyValues + OptionNoneToJNullSerializer + QuantitiesSerializer + UnformattedDataJsonSerializer it should "personal_importRawKey" in new JsonRpcControllerFixture { val key = "7a44789ed3cd85861c0bbf9693c7e1de1862dd4396c390147ecf1275099c6e6f" val keyBytes = ByteString(Hex.decode(key)) val addr = Address("0x00000000000000000000000000000000000000ff") val pass = "aaa" (personalService.importRawKey _) .expects(ImportRawKeyRequest(keyBytes, pass)) .returning(Task.now(Right(ImportRawKeyResponse(addr)))) val params = JString(key) :: JString(pass) :: Nil val rpcRequest = newJsonRpcRequest("personal_importRawKey", params) val response = jsonRpcController.handleRequest(rpcRequest).runSyncUnsafe() response should haveStringResult(addr.toString) } it should "personal_newAccount" in new JsonRpcControllerFixture { val addr = Address("0x00000000000000000000000000000000000000ff") val pass = "aaa" (personalService.newAccount _) .expects(NewAccountRequest(pass)) .returning(Task.now(Right(NewAccountResponse(addr)))) val params = JString(pass) :: Nil val rpcRequest = newJsonRpcRequest("personal_newAccount", params) val response = jsonRpcController.handleRequest(rpcRequest).runSyncUnsafe() response should haveStringResult(addr.toString) } it should "personal_listAccounts" in new JsonRpcControllerFixture { val addresses = List(34, 12391, 123).map(Address(_)) val pass = "aaa" (personalService.listAccounts _) .expects(ListAccountsRequest()) .returning(Task.now(Right(ListAccountsResponse(addresses)))) val rpcRequest = newJsonRpcRequest("personal_listAccounts") val response = jsonRpcController.handleRequest(rpcRequest).runSyncUnsafe() response should haveResult(JArray(addresses.map(a => JString(a.toString)))) } it should "personal_unlockAccount" in new JsonRpcControllerFixture { val address = Address(42) val pass = "aaa" val params = JString(address.toString) :: JString(pass) :: Nil (personalService.unlockAccount _) .expects(UnlockAccountRequest(address, pass, None)) .returning(Task.now(Right(UnlockAccountResponse(true)))) val rpcRequest = newJsonRpcRequest("personal_unlockAccount", params) val response = jsonRpcController.handleRequest(rpcRequest).runSyncUnsafe() response should haveBooleanResult(true) } it should "personal_unlockAccount for specified duration" in new JsonRpcControllerFixture { val address = Address(42) val pass = "aaa" val dur = "1" val params = JString(address.toString) :: JString(pass) :: JString(dur) :: Nil (personalService.unlockAccount _) .expects(UnlockAccountRequest(address, pass, Some(Duration.ofSeconds(1)))) .returning(Task.now(Right(UnlockAccountResponse(true)))) val rpcRequest = newJsonRpcRequest("personal_unlockAccount", params) val response = jsonRpcController.handleRequest(rpcRequest).runSyncUnsafe() response should haveBooleanResult(true) } it should "personal_unlockAccount should handle possible duration errors" in new JsonRpcControllerFixture { val address = Address(42) val pass = "aaa" val dur = "alksjdfh" val params = JString(address.toString) :: JString(pass) :: JString(dur) :: Nil val rpcRequest = newJsonRpcRequest("personal_unlockAccount", params) val response = jsonRpcController.handleRequest(rpcRequest).runSyncUnsafe() response should haveError(JsonRpcError(-32602, "Invalid method parameters", None)) val dur2 = Long.MaxValue val params2 = JString(address.toString) :: JString(pass) :: JInt(dur2) :: Nil val rpcRequest2 = newJsonRpcRequest("personal_unlockAccount", params2) val response2 = jsonRpcController.handleRequest(rpcRequest2).runSyncUnsafe() response2 should haveError( JsonRpcError(-32602, "Duration should be an number of seconds, less than 2^31 - 1", None) ) } it should "personal_unlockAccount should handle null passed as a duration for compatibility with Parity and web3j" in new JsonRpcControllerFixture { val address = Address(42) val pass = "aaa" val params = JString(address.toString) :: JString(pass) :: JNull :: Nil (personalService.unlockAccount _) .expects(UnlockAccountRequest(address, pass, None)) .returning(Task.now(Right(UnlockAccountResponse(true)))) val rpcRequest = newJsonRpcRequest("personal_unlockAccount", params) val response = jsonRpcController.handleRequest(rpcRequest).runSyncUnsafe() response should haveBooleanResult(true) } it should "personal_lockAccount" in new JsonRpcControllerFixture { val address = Address(42) val params = JString(address.toString) :: Nil (personalService.lockAccount _) .expects(LockAccountRequest(address)) .returning(Task.now(Right(LockAccountResponse(true)))) val rpcRequest = newJsonRpcRequest("personal_lockAccount", params) val response = jsonRpcController.handleRequest(rpcRequest).runSyncUnsafe() response should haveBooleanResult(true) } it should "personal_sendTransaction" in new JsonRpcControllerFixture { val params = JObject( "from" -> Address(42).toString, "to" -> Address(123).toString, "value" -> 1000 ) :: JString("passphrase") :: Nil val txHash = ByteString(1, 2, 3, 4) (personalService .sendTransaction(_: SendTransactionWithPassphraseRequest)) .expects(*) .returning(Task.now(Right(SendTransactionWithPassphraseResponse(txHash)))) val rpcRequest = newJsonRpcRequest("personal_sendTransaction", params) val response = jsonRpcController.handleRequest(rpcRequest).runSyncUnsafe() response should haveResult(JString(s"0x${Hex.toHexString(txHash.toArray)}")) } it should "personal_sign" in new JsonRpcControllerFixture { (personalService.sign _) .expects( SignRequest( ByteString(Hex.decode("deadbeaf")), Address(ByteString(Hex.decode("9b2055d370f73ec7d8a03e965129118dc8f5bf83"))), Some("thePassphrase") ) ) .returns(Task.now(Right(SignResponse(sig)))) val request: JsonRpcRequest = newJsonRpcRequest( "personal_sign", List( JString(s"0xdeadbeaf"), JString(s"0x9b2055d370f73ec7d8a03e965129118dc8f5bf83"), JString("thePassphrase") ) ) val response = jsonRpcController.handleRequest(request).runSyncUnsafe() response should haveStringResult( "0xa3f20717a250c2b0b729b7e5becbff67fdaef7e0699da4de7ca5895b02a170a12d887fd3b17bfdce3481f10bea41f45ba9f709d39ce8325427b57afcfc994cee1b" ) } it should "personal_ecRecover" in new JsonRpcControllerFixture { (personalService.ecRecover _) .expects(EcRecoverRequest(ByteString(Hex.decode("deadbeaf")), sig)) .returns( Task.now( Right(EcRecoverResponse(Address(ByteString(Hex.decode("9b2055d370f73ec7d8a03e965129118dc8f5bf83"))))) ) ) val request: JsonRpcRequest = newJsonRpcRequest( "personal_ecRecover", List( JString(s"0xdeadbeaf"), JString( s"0xa3f20717a250c2b0b729b7e5becbff67fdaef7e0699da4de7ca5895b02a170a12d887fd3b17bfdce3481f10bea41f45ba9f709d39ce8325427b57afcfc994cee1b" ) ) ) val response = jsonRpcController.handleRequest(request).runSyncUnsafe() response should haveStringResult("0x9b2055d370f73ec7d8a03e965129118dc8f5bf83") } }
input-output-hk/etc-client
src/test/scala/io/iohk/ethereum/jsonrpc/JsonRpcControllerPersonalSpec.scala
Scala
mit
8,726
package uk.co.mattthomson.coursera.ggp.gresley import akka.actor.ActorSystem import org.scalatra.LifeCycle import javax.servlet.ServletContext import uk.co.mattthomson.coursera.ggp.gresley.servlet.GresleyServlet import uk.co.mattthomson.coursera.ggp.gresley.moveselector.MoveSelectorPropsFactoryImpl class GresleyBootstrap extends LifeCycle { private val system = ActorSystem() private val moveSelectorPropsFactory = new MoveSelectorPropsFactoryImpl override def init(context: ServletContext) { context mount(new GresleyServlet(system, moveSelectorPropsFactory), "/") } }
matt-thomson/gresley
src/main/scala/uk/co/mattthomson/coursera/ggp/gresley/GresleyBootstrap.scala
Scala
mit
587
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.exchange import java.util.{HashMap => JHashMap, Map => JMap} import javax.annotation.concurrent.GuardedBy import scala.collection.mutable.ArrayBuffer import org.apache.spark.{MapOutputStatistics, ShuffleDependency, SimpleFutureAction} import org.apache.spark.internal.Logging import org.apache.spark.rdd.RDD import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.execution.{ShuffledRowRDD, SparkPlan} /** * A coordinator used to determines how we shuffle data between stages generated by Spark SQL. * Right now, the work of this coordinator is to determine the number of post-shuffle partitions * for a stage that needs to fetch shuffle data from one or multiple stages. * * A coordinator is constructed with three parameters, `numExchanges`, * `targetPostShuffleInputSize`, and `minNumPostShufflePartitions`. * - `numExchanges` is used to indicated that how many [[ShuffleExchangeExec]]s that will be * registered to this coordinator. So, when we start to do any actual work, we have a way to * make sure that we have got expected number of [[ShuffleExchangeExec]]s. * - `targetPostShuffleInputSize` is the targeted size of a post-shuffle partition's * input data size. With this parameter, we can estimate the number of post-shuffle partitions. * This parameter is configured through * `spark.sql.adaptive.shuffle.targetPostShuffleInputSize`. * - `minNumPostShufflePartitions` is an optional parameter. If it is defined, this coordinator * will try to make sure that there are at least `minNumPostShufflePartitions` post-shuffle * partitions. * * The workflow of this coordinator is described as follows: * - Before the execution of a [[SparkPlan]], for a [[ShuffleExchangeExec]] operator, * if an [[ExchangeCoordinator]] is assigned to it, it registers itself to this coordinator. * This happens in the `doPrepare` method. * - Once we start to execute a physical plan, a [[ShuffleExchangeExec]] registered to this * coordinator will call `postShuffleRDD` to get its corresponding post-shuffle * [[ShuffledRowRDD]]. * If this coordinator has made the decision on how to shuffle data, this [[ShuffleExchangeExec]] * will immediately get its corresponding post-shuffle [[ShuffledRowRDD]]. * - If this coordinator has not made the decision on how to shuffle data, it will ask those * registered [[ShuffleExchangeExec]]s to submit their pre-shuffle stages. Then, based on the * size statistics of pre-shuffle partitions, this coordinator will determine the number of * post-shuffle partitions and pack multiple pre-shuffle partitions with continuous indices * to a single post-shuffle partition whenever necessary. * - Finally, this coordinator will create post-shuffle [[ShuffledRowRDD]]s for all registered * [[ShuffleExchangeExec]]s. So, when a [[ShuffleExchangeExec]] calls `postShuffleRDD`, this * coordinator can lookup the corresponding [[RDD]]. * * The strategy used to determine the number of post-shuffle partitions is described as follows. * To determine the number of post-shuffle partitions, we have a target input size for a * post-shuffle partition. Once we have size statistics of pre-shuffle partitions from stages * corresponding to the registered [[ShuffleExchangeExec]]s, we will do a pass of those statistics * and pack pre-shuffle partitions with continuous indices to a single post-shuffle partition until * adding another pre-shuffle partition would cause the size of a post-shuffle partition to be * greater than the target size. * * For example, we have two stages with the following pre-shuffle partition size statistics: * stage 1: [100 MiB, 20 MiB, 100 MiB, 10MiB, 30 MiB] * stage 2: [10 MiB, 10 MiB, 70 MiB, 5 MiB, 5 MiB] * assuming the target input size is 128 MiB, we will have four post-shuffle partitions, * which are: * - post-shuffle partition 0: pre-shuffle partition 0 (size 110 MiB) * - post-shuffle partition 1: pre-shuffle partition 1 (size 30 MiB) * - post-shuffle partition 2: pre-shuffle partition 2 (size 170 MiB) * - post-shuffle partition 3: pre-shuffle partition 3 and 4 (size 50 MiB) */ class ExchangeCoordinator( advisoryTargetPostShuffleInputSize: Long, minNumPostShufflePartitions: Option[Int] = None) extends Logging { // The registered Exchange operators. private[this] val exchanges = ArrayBuffer[ShuffleExchangeExec]() // `lazy val` is used here so that we could notice the wrong use of this class, e.g., all the // exchanges should be registered before `postShuffleRDD` called first time. If a new exchange is // registered after the `postShuffleRDD` call, `assert(exchanges.length == numExchanges)` fails // in `doEstimationIfNecessary`. private[this] lazy val numExchanges = exchanges.size // This map is used to lookup the post-shuffle ShuffledRowRDD for an Exchange operator. private[this] lazy val postShuffleRDDs: JMap[ShuffleExchangeExec, ShuffledRowRDD] = new JHashMap[ShuffleExchangeExec, ShuffledRowRDD](numExchanges) // A boolean that indicates if this coordinator has made decision on how to shuffle data. // This variable will only be updated by doEstimationIfNecessary, which is protected by // synchronized. @volatile private[this] var estimated: Boolean = false /** * Registers a [[ShuffleExchangeExec]] operator to this coordinator. This method is only allowed * to be called in the `doPrepare` method of a [[ShuffleExchangeExec]] operator. */ @GuardedBy("this") def registerExchange(exchange: ShuffleExchangeExec): Unit = synchronized { exchanges += exchange } def isEstimated: Boolean = estimated /** * Estimates partition start indices for post-shuffle partitions based on * mapOutputStatistics provided by all pre-shuffle stages. */ def estimatePartitionStartIndices( mapOutputStatistics: Array[MapOutputStatistics]): Array[Int] = { // If minNumPostShufflePartitions is defined, it is possible that we need to use a // value less than advisoryTargetPostShuffleInputSize as the target input size of // a post shuffle task. val targetPostShuffleInputSize = minNumPostShufflePartitions match { case Some(numPartitions) => val totalPostShuffleInputSize = mapOutputStatistics.map(_.bytesByPartitionId.sum).sum // The max at here is to make sure that when we have an empty table, we // only have a single post-shuffle partition. // There is no particular reason that we pick 16. We just need a number to // prevent maxPostShuffleInputSize from being set to 0. val maxPostShuffleInputSize = math.max(math.ceil(totalPostShuffleInputSize / numPartitions.toDouble).toLong, 16) math.min(maxPostShuffleInputSize, advisoryTargetPostShuffleInputSize) case None => advisoryTargetPostShuffleInputSize } logInfo( s"advisoryTargetPostShuffleInputSize: $advisoryTargetPostShuffleInputSize, " + s"targetPostShuffleInputSize $targetPostShuffleInputSize.") // Make sure we do get the same number of pre-shuffle partitions for those stages. val distinctNumPreShufflePartitions = mapOutputStatistics.map(stats => stats.bytesByPartitionId.length).distinct // The reason that we are expecting a single value of the number of pre-shuffle partitions // is that when we add Exchanges, we set the number of pre-shuffle partitions // (i.e. map output partitions) using a static setting, which is the value of // spark.sql.shuffle.partitions. Even if two input RDDs are having different // number of partitions, they will have the same number of pre-shuffle partitions // (i.e. map output partitions). assert( distinctNumPreShufflePartitions.length == 1, "There should be only one distinct value of the number pre-shuffle partitions " + "among registered Exchange operator.") val numPreShufflePartitions = distinctNumPreShufflePartitions.head val partitionStartIndices = ArrayBuffer[Int]() // The first element of partitionStartIndices is always 0. partitionStartIndices += 0 var postShuffleInputSize = 0L var i = 0 while (i < numPreShufflePartitions) { // We calculate the total size of ith pre-shuffle partitions from all pre-shuffle stages. // Then, we add the total size to postShuffleInputSize. var nextShuffleInputSize = 0L var j = 0 while (j < mapOutputStatistics.length) { nextShuffleInputSize += mapOutputStatistics(j).bytesByPartitionId(i) j += 1 } // If including the nextShuffleInputSize would exceed the target partition size, then start a // new partition. if (i > 0 && postShuffleInputSize + nextShuffleInputSize > targetPostShuffleInputSize) { partitionStartIndices += i // reset postShuffleInputSize. postShuffleInputSize = nextShuffleInputSize } else postShuffleInputSize += nextShuffleInputSize i += 1 } partitionStartIndices.toArray } @GuardedBy("this") private def doEstimationIfNecessary(): Unit = synchronized { // It is unlikely that this method will be called from multiple threads // (when multiple threads trigger the execution of THIS physical) // because in common use cases, we will create new physical plan after // users apply operations (e.g. projection) to an existing DataFrame. // However, if it happens, we have synchronized to make sure only one // thread will trigger the job submission. if (!estimated) { // Make sure we have the expected number of registered Exchange operators. assert(exchanges.length == numExchanges) val newPostShuffleRDDs = new JHashMap[ShuffleExchangeExec, ShuffledRowRDD](numExchanges) // Submit all map stages val shuffleDependencies = ArrayBuffer[ShuffleDependency[Int, InternalRow, InternalRow]]() val submittedStageFutures = ArrayBuffer[SimpleFutureAction[MapOutputStatistics]]() var i = 0 while (i < numExchanges) { val exchange = exchanges(i) val shuffleDependency = exchange.shuffleDependency shuffleDependencies += shuffleDependency if (shuffleDependency.rdd.partitions.length != 0) { // submitMapStage does not accept RDD with 0 partition. // So, we will not submit this dependency. submittedStageFutures += exchange.sqlContext.sparkContext.submitMapStage(shuffleDependency) } i += 1 } // Wait for the finishes of those submitted map stages. val mapOutputStatistics = new Array[MapOutputStatistics](submittedStageFutures.length) var j = 0 while (j < submittedStageFutures.length) { // This call is a blocking call. If the stage has not finished, we will wait at here. mapOutputStatistics(j) = submittedStageFutures(j).get() j += 1 } // If we have mapOutputStatistics.length < numExchange, it is because we do not submit // a stage when the number of partitions of this dependency is 0. assert(mapOutputStatistics.length <= numExchanges) // Now, we estimate partitionStartIndices. partitionStartIndices.length will be the // number of post-shuffle partitions. val partitionStartIndices = if (mapOutputStatistics.length == 0) { Array.empty[Int] } else { estimatePartitionStartIndices(mapOutputStatistics) } var k = 0 while (k < numExchanges) { val exchange = exchanges(k) val rdd = exchange.preparePostShuffleRDD(shuffleDependencies(k), Some(partitionStartIndices)) newPostShuffleRDDs.put(exchange, rdd) k += 1 } // Finally, we set postShuffleRDDs and estimated. assert(postShuffleRDDs.isEmpty) assert(newPostShuffleRDDs.size() == numExchanges) postShuffleRDDs.putAll(newPostShuffleRDDs) estimated = true } } def postShuffleRDD(exchange: ShuffleExchangeExec): ShuffledRowRDD = { doEstimationIfNecessary() if (!postShuffleRDDs.containsKey(exchange)) { throw new IllegalStateException( s"The given $exchange is not registered in this coordinator.") } postShuffleRDDs.get(exchange) } override def toString: String = { s"coordinator[target post-shuffle partition size: $advisoryTargetPostShuffleInputSize]" } }
aosagie/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/exchange/ExchangeCoordinator.scala
Scala
apache-2.0
13,291
/* MET-API Copyright (C) 2014 met.no Contact information: Norwegian Meteorological Institute Box 43 Blindern 0313 OSLO NORWAY E-mail: met-api@met.no This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA */ package modules.observations import play.api._ import play.api.inject.guice._ import play.api.inject.guice.GuiceableModule.fromGuiceModule import com.google.inject.AbstractModule import services.observations._ /** * Configurations for Production Mode */ // $COVERAGE-OFF$ Can't test the production binding in Test mode class ObservationsProdModule extends AbstractModule { def configure() { bind(classOf[DatabaseAccess]).to(classOf[KdvhDatabaseAccess]) bind(classOf[ElementInfoGetter]).to(classOf[ProdElementInfoGetter]) } } // $COVERAGE-ON$ /** * Configurations for Development Mode */ class ObservationsNonProdModule extends AbstractModule { def configure() { bind(classOf[DatabaseAccess]).to(classOf[MockDatabaseAccess]) bind(classOf[ElementInfoGetter]).to(classOf[MockElementInfoGetter]) } } /** * Set up the Guice injector and provide the mechanism to return objects from the dependency graph. */ // $COVERAGE-OFF$ Can't test the production binding in Test mode class ObservationsApplicationLoader extends GuiceApplicationLoader() { override def builder(context: ApplicationLoader.Context): GuiceApplicationBuilder = { val builder = initialBuilder .in(context.environment) .loadConfig(context.initialConfiguration) .overrides(overrides(context): _*) Mode.Prod match { case Mode.Prod => builder.bindings(new ObservationsProdModule) case _ => builder.bindings(new ObservationsNonProdModule) } } } // $COVERAGE-ON$
metno/metapi-observations
app/modules/observations/ObservationsApplicationLoader.scala
Scala
gpl-2.0
2,431
import sbt._ import Keys._ object build extends Build { val sbtXjc = Project( id = "sbt-onejar", base = file("."), settings = Defaults.defaultSettings ++ ScriptedPlugin.scriptedSettings ++ Seq[Project.Setting[_]]( organization := "org.scala-sbt.plugins", version := "0.9-SNAPSHOT", sbtPlugin := true, scalacOptions in Compile ++= Seq("-deprecation"), publishTo := Some(Resolver.url("sbt-plugin-releases", new URL("http://repo.scala-sbt.org/scalasbt/sbt-plugin-releases/"))(Resolver.ivyStylePatterns)), publishMavenStyle := false ) ) }
jforge/sbt-onejar
project/build.scala
Scala
mit
591
package poly.collection.mut import poly.collection._ import scala.language.higherKinds /** * Represents a queue whose elements are kept distinct. * @author Tongfei Chen */ class DistinctQueue[T: Eq] private(private val inner: Queue[T]) extends Queue[T] { private[this] val seen = AutoSet[T]() def enqueue(x: T) = if (!seen(x)) { inner += x seen += x } override def enqueueAll(xs: Traversable[T]) = { val buf = ArraySeq[T]() for (x <- xs) if (!seen(x)) { seen += x buf :+= x } // buffers succeeding elements and push in batch inner ++= buf } def front = inner.front def dequeue() = inner.dequeue() def elements = inner.elements } object DistinctQueue { def apply[T: Eq](inner: Queue[T]): DistinctQueue[T] = { new DistinctQueue(inner) } }
ctongfei/poly-collection
core/src/main/scala/poly/collection/mut/DistinctQueue.scala
Scala
mit
822
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.api import _root_.java.util.concurrent.atomic.AtomicInteger import org.apache.calcite.plan.RelOptUtil import org.apache.calcite.plan.hep.HepMatchOrder import org.apache.calcite.rel.RelNode import org.apache.calcite.rel.`type`.RelDataType import org.apache.calcite.sql2rel.RelDecorrelator import org.apache.calcite.tools.RuleSet import org.apache.flink.api.common.functions.MapFunction import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.java.io.DiscardingOutputFormat import org.apache.flink.api.java.typeutils.GenericTypeInfo import org.apache.flink.api.java.{DataSet, ExecutionEnvironment} import org.apache.flink.table.explain.PlanJsonParser import org.apache.flink.table.expressions.{Expression, TimeAttribute} import org.apache.flink.table.plan.nodes.FlinkConventions import org.apache.flink.table.plan.nodes.dataset.DataSetRel import org.apache.flink.table.plan.rules.FlinkRuleSets import org.apache.flink.table.plan.schema.{DataSetTable, RowSchema, TableSinkTable, TableSourceTable} import org.apache.flink.table.runtime.MapRunner import org.apache.flink.table.sinks.{BatchTableSink, TableSink} import org.apache.flink.table.sources.{BatchTableSource, TableSource} import org.apache.flink.types.Row /** * The abstract base class for batch TableEnvironments. * * A TableEnvironment can be used to: * - convert a [[DataSet]] to a [[Table]] * - register a [[DataSet]] in the [[TableEnvironment]]'s catalog * - register a [[Table]] in the [[TableEnvironment]]'s catalog * - scan a registered table to obtain a [[Table]] * - specify a SQL query on registered tables to obtain a [[Table]] * - convert a [[Table]] into a [[DataSet]] * - explain the AST and execution plan of a [[Table]] * * @param execEnv The [[ExecutionEnvironment]] which is wrapped in this [[BatchTableEnvironment]]. * @param config The [[TableConfig]] of this [[BatchTableEnvironment]]. */ abstract class BatchTableEnvironment( private[flink] val execEnv: ExecutionEnvironment, config: TableConfig) extends TableEnvironment(config) { // a counter for unique table names. private val nameCntr: AtomicInteger = new AtomicInteger(0) // the naming pattern for internally registered tables. private val internalNamePattern = "^_DataSetTable_[0-9]+$".r /** * Checks if the chosen table name is valid. * * @param name The table name to check. */ override protected def checkValidTableName(name: String): Unit = { val m = internalNamePattern.findFirstIn(name) m match { case Some(_) => throw new TableException(s"Illegal Table name. " + s"Please choose a name that does not contain the pattern $internalNamePattern") case None => } } /** Returns a unique table name according to the internal naming pattern. */ protected def createUniqueTableName(): String = "_DataSetTable_" + nameCntr.getAndIncrement() /** * Registers an external [[BatchTableSource]] in this [[TableEnvironment]]'s catalog. * Registered tables can be referenced in SQL queries. * * @param name The name under which the [[TableSource]] is registered. * @param tableSource The [[TableSource]] to register. */ override def registerTableSource(name: String, tableSource: TableSource[_]): Unit = { checkValidTableName(name) tableSource match { case batchTableSource: BatchTableSource[_] => registerTableInternal(name, new TableSourceTable(batchTableSource)) case _ => throw new TableException("Only BatchTableSource can be registered in " + "BatchTableEnvironment") } } /** * Registers an external [[TableSink]] with given field names and types in this * [[TableEnvironment]]'s catalog. * Registered sink tables can be referenced in SQL DML statements. * * Example: * * {{{ * // create a table sink and its field names and types * val fieldNames: Array[String] = Array("a", "b", "c") * val fieldTypes: Array[TypeInformation[_]] = Array(Types.STRING, Types.INT, Types.LONG) * val tableSink: BatchTableSink = new YourTableSinkImpl(...) * * // register the table sink in the catalog * tableEnv.registerTableSink("output_table", fieldNames, fieldsTypes, tableSink) * * // use the registered sink * tableEnv.sqlUpdate("INSERT INTO output_table SELECT a, b, c FROM sourceTable") * }}} * * @param name The name under which the [[TableSink]] is registered. * @param fieldNames The field names to register with the [[TableSink]]. * @param fieldTypes The field types to register with the [[TableSink]]. * @param tableSink The [[TableSink]] to register. */ def registerTableSink( name: String, fieldNames: Array[String], fieldTypes: Array[TypeInformation[_]], tableSink: TableSink[_]): Unit = { checkValidTableName(name) if (fieldNames == null) throw TableException("fieldNames must not be null.") if (fieldTypes == null) throw TableException("fieldTypes must not be null.") if (fieldNames.length == 0) throw new TableException("fieldNames must not be empty.") if (fieldNames.length != fieldTypes.length) { throw new TableException("Same number of field names and types required.") } tableSink match { case batchTableSink: BatchTableSink[_] => val configuredSink = batchTableSink.configure(fieldNames, fieldTypes) registerTableInternal(name, new TableSinkTable(configuredSink)) case _ => throw new TableException("Only BatchTableSink can be registered in BatchTableEnvironment.") } } /** * Writes a [[Table]] to a [[TableSink]]. * * Internally, the [[Table]] is translated into a [[DataSet]] and handed over to the * [[TableSink]] to write it. * * @param table The [[Table]] to write. * @param sink The [[TableSink]] to write the [[Table]] to. * @param queryConfig The configuration for the query to generate. * @tparam T The expected type of the [[DataSet]] which represents the [[Table]]. */ override private[flink] def writeToSink[T]( table: Table, sink: TableSink[T], queryConfig: QueryConfig): Unit = { // We do not pass the configuration on, because there is nothing to configure for batch queries. queryConfig match { case _: BatchQueryConfig => case _ => throw new TableException("BatchQueryConfig required to configure batch query.") } sink match { case batchSink: BatchTableSink[T] => val outputType = sink.getOutputType // translate the Table into a DataSet and provide the type that the TableSink expects. val result: DataSet[T] = translate(table)(outputType) // Give the DataSet to the TableSink to emit it. batchSink.emitDataSet(result) case _ => throw new TableException("BatchTableSink required to emit batch Table.") } } /** * Creates a final converter that maps the internal row type to external type. * * @param physicalTypeInfo the input of the sink * @param schema the input schema with correct field names (esp. for POJO field mapping) * @param requestedTypeInfo the output type of the sink * @param functionName name of the map function. Must not be unique but has to be a * valid Java class identifier. */ protected def getConversionMapper[IN, OUT]( physicalTypeInfo: TypeInformation[IN], schema: RowSchema, requestedTypeInfo: TypeInformation[OUT], functionName: String) : Option[MapFunction[IN, OUT]] = { val converterFunction = generateRowConverterFunction[OUT]( physicalTypeInfo.asInstanceOf[TypeInformation[Row]], schema, requestedTypeInfo, functionName ) // add a runner if we need conversion converterFunction.map { func => new MapRunner[IN, OUT]( func.name, func.code, func.returnType) } } /** * Returns the AST of the specified Table API and SQL queries and the execution plan to compute * the result of the given [[Table]]. * * @param table The table for which the AST and execution plan will be returned. * @param extended Flag to include detailed optimizer estimates. */ private[flink] def explain(table: Table, extended: Boolean): String = { val ast = table.getRelNode val optimizedPlan = optimize(ast) val dataSet = translate[Row](optimizedPlan, ast.getRowType) (new GenericTypeInfo(classOf[Row])) dataSet.output(new DiscardingOutputFormat[Row]) val env = dataSet.getExecutionEnvironment val jasonSqlPlan = env.getExecutionPlan val sqlPlan = PlanJsonParser.getSqlExecutionPlan(jasonSqlPlan, extended) s"== Abstract Syntax Tree ==" + System.lineSeparator + s"${RelOptUtil.toString(ast)}" + System.lineSeparator + s"== Optimized Logical Plan ==" + System.lineSeparator + s"${RelOptUtil.toString(optimizedPlan)}" + System.lineSeparator + s"== Physical Execution Plan ==" + System.lineSeparator + s"$sqlPlan" } /** * Returns the AST of the specified Table API and SQL queries and the execution plan to compute * the result of the given [[Table]]. * * @param table The table for which the AST and execution plan will be returned. */ def explain(table: Table): String = explain(table: Table, extended = false) /** * Registers a [[DataSet]] as a table under a given name in the [[TableEnvironment]]'s catalog. * * @param name The name under which the table is registered in the catalog. * @param dataSet The [[DataSet]] to register as table in the catalog. * @tparam T the type of the [[DataSet]]. */ protected def registerDataSetInternal[T](name: String, dataSet: DataSet[T]): Unit = { val (fieldNames, fieldIndexes) = getFieldInfo[T](dataSet.getType) val dataSetTable = new DataSetTable[T]( dataSet, fieldIndexes, fieldNames ) registerTableInternal(name, dataSetTable) } /** * Registers a [[DataSet]] as a table under a given name with field names as specified by * field expressions in the [[TableEnvironment]]'s catalog. * * @param name The name under which the table is registered in the catalog. * @param dataSet The [[DataSet]] to register as table in the catalog. * @param fields The field expressions to define the field names of the table. * @tparam T The type of the [[DataSet]]. */ protected def registerDataSetInternal[T]( name: String, dataSet: DataSet[T], fields: Array[Expression]): Unit = { val (fieldNames, fieldIndexes) = getFieldInfo[T]( dataSet.getType, fields) if (fields.exists(_.isInstanceOf[TimeAttribute])) { throw new ValidationException( ".rowtime and .proctime time indicators are not allowed in a batch environment.") } val dataSetTable = new DataSetTable[T](dataSet, fieldIndexes, fieldNames) registerTableInternal(name, dataSetTable) } /** * Returns the built-in normalization rules that are defined by the environment. */ protected def getBuiltInNormRuleSet: RuleSet = FlinkRuleSets.DATASET_NORM_RULES /** * Returns the built-in optimization rules that are defined by the environment. */ protected def getBuiltInPhysicalOptRuleSet: RuleSet = FlinkRuleSets.DATASET_OPT_RULES /** * Generates the optimized [[RelNode]] tree from the original relational node tree. * * @param relNode The original [[RelNode]] tree * @return The optimized [[RelNode]] tree */ private[flink] def optimize(relNode: RelNode): RelNode = { // 0. convert sub-queries before query decorrelation val convSubQueryPlan = runHepPlanner( HepMatchOrder.BOTTOM_UP, FlinkRuleSets.TABLE_SUBQUERY_RULES, relNode, relNode.getTraitSet) // 0. convert table references val fullRelNode = runHepPlanner( HepMatchOrder.BOTTOM_UP, FlinkRuleSets.TABLE_REF_RULES, convSubQueryPlan, relNode.getTraitSet) // 1. decorrelate val decorPlan = RelDecorrelator.decorrelateQuery(fullRelNode) // 2. normalize the logical plan val normRuleSet = getNormRuleSet val normalizedPlan = if (normRuleSet.iterator().hasNext) { runHepPlanner(HepMatchOrder.BOTTOM_UP, normRuleSet, decorPlan, decorPlan.getTraitSet) } else { decorPlan } // 3. optimize the logical Flink plan val logicalOptRuleSet = getLogicalOptRuleSet val logicalOutputProps = relNode.getTraitSet.replace(FlinkConventions.LOGICAL).simplify() val logicalPlan = if (logicalOptRuleSet.iterator().hasNext) { runVolcanoPlanner(logicalOptRuleSet, normalizedPlan, logicalOutputProps) } else { normalizedPlan } // 4. optimize the physical Flink plan val physicalOptRuleSet = getPhysicalOptRuleSet val physicalOutputProps = relNode.getTraitSet.replace(FlinkConventions.DATASET).simplify() val physicalPlan = if (physicalOptRuleSet.iterator().hasNext) { runVolcanoPlanner(physicalOptRuleSet, logicalPlan, physicalOutputProps) } else { logicalPlan } physicalPlan } /** * Translates a [[Table]] into a [[DataSet]]. * * The transformation involves optimizing the relational expression tree as defined by * Table API calls and / or SQL queries and generating corresponding [[DataSet]] operators. * * @param table The root node of the relational expression tree. * @param tpe The [[TypeInformation]] of the resulting [[DataSet]]. * @tparam A The type of the resulting [[DataSet]]. * @return The [[DataSet]] that corresponds to the translated [[Table]]. */ protected def translate[A](table: Table)(implicit tpe: TypeInformation[A]): DataSet[A] = { val relNode = table.getRelNode val dataSetPlan = optimize(relNode) translate(dataSetPlan, relNode.getRowType) } /** * Translates a logical [[RelNode]] into a [[DataSet]]. Converts to target type if necessary. * * @param logicalPlan The root node of the relational expression tree. * @param logicalType The row type of the result. Since the logicalPlan can lose the * field naming during optimization we pass the row type separately. * @param tpe The [[TypeInformation]] of the resulting [[DataSet]]. * @tparam A The type of the resulting [[DataSet]]. * @return The [[DataSet]] that corresponds to the translated [[Table]]. */ protected def translate[A]( logicalPlan: RelNode, logicalType: RelDataType) (implicit tpe: TypeInformation[A]): DataSet[A] = { TableEnvironment.validateType(tpe) logicalPlan match { case node: DataSetRel => val plan = node.translateToPlan(this) val conversion = getConversionMapper( plan.getType, new RowSchema(logicalType), tpe, "DataSetSinkConversion") conversion match { case None => plan.asInstanceOf[DataSet[A]] // no conversion necessary case Some(mapFunction: MapFunction[Row, A]) => plan.map(mapFunction) .returns(tpe) .name(s"to: ${tpe.getTypeClass.getSimpleName}") .asInstanceOf[DataSet[A]] } case _ => throw TableException("Cannot generate DataSet due to an invalid logical plan. " + "This is a bug and should not happen. Please file an issue.") } } }
haohui/flink
flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/BatchTableEnvironment.scala
Scala
apache-2.0
16,454
/* * La Trobe University - Distributed Deep Learning System * Copyright 2016 Matthias Langer (t3l@threelights.de) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.latrobe.blaze.objectives import edu.latrobe._ import edu.latrobe.blaze._ import edu.latrobe.time._ /** * Objectives that derive from this are dummy objectives that inject a new * value for sub-objectives. */ abstract class ReplaceValue[TBuilder <: ReplaceValueBuilder[_]] extends DependentObjectiveEx[TBuilder] { final override protected def doEvaluate(sink: Sink, optimizer: OptimizerLike, runBeginIterationNo: Long, runBeginTime: Timestamp, runNoSamples: Long, model: Module, batch: Batch, output: Tensor, value: Real) : Option[ObjectiveEvaluationResult] = { val newValue = doEvaluate( optimizer, runBeginIterationNo, runBeginTime, runNoSamples, model, batch, output, value ) super.doEvaluate( sink, optimizer, runBeginIterationNo, runBeginTime, runNoSamples, model, batch, output, newValue ) } protected def doEvaluate(optimizer: OptimizerLike, runBeginIterationNo: Long, runBeginTime: Timestamp, runNoSamples: Long, model: Module, batch: Batch, output: Tensor, value: Real) : Real } abstract class ReplaceValueBuilder[TThis <: ReplaceValueBuilder[_]] extends DependentObjectiveExBuilder[TThis] { }
bashimao/ltudl
blaze/src/main/scala/edu/latrobe/blaze/objectives/ReplaceValue.scala
Scala
apache-2.0
2,577
object A { opaque type T = Int object T println }
som-snytt/dotty
tests/pos/i6287.scala
Scala
apache-2.0
61
import scala.collection.mutable.Builder class DDD[S,T,A] trait NN[S, T, A, K[_], +D <: DDD[Set[S],T,K[A]]] class NNN[S, T, K[_], A] extends NN[S, T, A, K, DDD[Set[S],T,K[A]]] object NN { def newBuilder[S, T, A, K[_]]: NNbuilder[S, T, K, A, DDD[Set[S],T,K[A]], NN[S,T,A,K,?], Unit] = new NNNbuilder[S, T, K, A] } // Remove the type parameter E, hardcoding in E := Unit, and the issue // goes away. trait NNbuilder [S, T, K[_], A, +D <: DDD[Set[S],T,K[A]], +N <: NN[S,T,A,K,D], E] extends Builder[Unit, N] { def clear(): Unit = throw new UnsupportedOperationException() final def addOne(builder: E): this.type = this } // Unfold this class defn, and the issue goes away abstract class AbstractNNNbuilder [S, T, K[_], A, +D <: DDD[Set[S],T,K[A]], +N <: NN[S,T,A,K,D], E] extends NNbuilder[S,T,K,A,D,N,E] class NNNbuilder[S, T, K[_], A] extends AbstractNNNbuilder[ S, T, K, A, DDD[Set[S], T, K[A]], NNN[S, T, K, A], Unit ] { override def result(): NNN[S, T, K, A] = new NNN[S, T, K, A] } @main def Test: Unit = { val builder = NN.newBuilder[String, Char, Int, Set] builder += () builder.result() }
dotty-staging/dotty
tests/run/i13087.scala
Scala
apache-2.0
1,138
/* * Copyright (c) 2012 Orderly Ltd. All rights reserved. * * This program is licensed to you under the Apache License Version 2.0, * and you may not use this file except in compliance with the Apache License Version 2.0. * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, * software distributed under the Apache License Version 2.0 is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ package co.orderly.narcolepsy package adapters // Java import java.net.{URLEncoder, URI} import java.io.InputStream // Apache HttpClient import org.apache.http.{Header, StatusLine} import org.apache.http.message._ import org.apache.http.auth._ import org.apache.http.params._ import org.apache.http.client._ import org.apache.http.client.methods._ import org.apache.http.client.utils.URLEncodedUtils import org.apache.http.entity.StringEntity import org.apache.http.impl.client._ import org.apache.http.protocol.HTTP // Scala import scala.io.Source import scala.collection.immutable.TreeMap // Narcolepsy import co.orderly.narcolepsy.utils._ // Full path because Apache HttpClient has a utils sub-package too trait ApacheHttpClientAdapter extends HttpAdapter { // Borrow these from the Client... self: { val configuration: ClientConfiguration } => // Private immutable copy of an Apache HttpClient, we use this to access the API private val httpClient = new DefaultHttpClient /** * Handles an HTTP request to the web service * @param requestMethod HttpMethod to apply to this request * @param requestData The payload * @param requestUri Relative path to resource. Attach rootUri to get the absolute URI * @return The RestfulResponse (response code, response body and response header) */ def execute(requestMethod: HttpMethod, requestData: Option[String], requestUri: String): RestfulResponse = { // Validate the URI string // TODO: this would really be common to all adapters. Should be moved out of here val uriString = configuration.rootUri + requestUri val uri = try { new URI(uriString) } catch { case _ => throw new HttpAdapterException("Web service URI for action is malformed: %s".format(uriString)) } // Construct the right type of HttpRequest object for our given HttpMethod, // and validate that our requestData payload is set appropriately val request = (requestMethod, requestData) match { case (GetMethod, _) => new HttpGet(uri) // _ because a GET action may have a payload case (DeleteMethod, _) => new HttpDelete(uri) // _ because a DELETE action may have a payload case (PutMethod, Some(d)) => new HttpPut(uri) case (PostMethod,Some(d)) => new HttpPost(uri) case (PutMethod, None) => throw new HttpAdapterException("Request data missing for HTTP PUT action") case (PostMethod, None) => throw new HttpAdapterException("Request data missing for HTTP POST action") case _ => throw new HttpAdapterException("Http action not supported") } // TODO: move to spray-style HttpMethod.GET structure so we can specify the unsupported verb in the error message // Configure the authentication httpClient.getCredentialsProvider().setCredentials( new AuthScope(request.getURI.getHost, request.getURI.getPort), new UsernamePasswordCredentials(configuration.username, configuration.password) ) // Attach the payload to the request if we have some - how we pass it in depends on whether it's a POST or PUT request.setHeader("Accept", configuration.contentType) // TODO - also figure out how encodeXML works with JSON request match { case r:HttpPut => r.setEntity(new StringEntity(requestData.get, configuration.contentType, configuration.encoding)) case r:HttpPost => r.setEntity(new StringEntity(requestData.get, configuration.contentType, configuration.encoding)) case _ => } // Execute the request and retrieve the response code and headers val response = httpClient.execute(request) val code = response.getStatusLine().getStatusCode() // TODO are we throwing away any info here? val headers = convertHeaders(response.getAllHeaders()) // Now get the response body if we have one val responseEntity = Option(response.getEntity()) val data = responseEntity match { case None => None case _ => Option(Source.fromInputStream(responseEntity.get.getContent(), configuration.encoding).mkString) } // Finally let's return the RestfulResponse // TODO: headers not working (code, headers, data) } /** * Converts a Java Array of Apache HTTP Headers into a Narcolepsy-friendly * RestfulHeaders type (a Map[String, String]). * @param apacheHeaders An array of HTTP headers in Apache HTTP Header format * @return The Narcolepsy-friendly RestfulHeaders */ protected def convertHeaders(apacheHeaders: Array[Header]): RestfulHeaders = (TreeMap.empty[String, String] /: apacheHeaders) { (tree, ah) => tree + (ah.getName() -> ah.getValue()) } }
orderly/narcolepsy-scala
src/main/scala/co/orderly/narcolepsy/adapters/ApacheHttpClientAdapter.scala
Scala
apache-2.0
5,368
object Test { trait S { type T; val x: AnyRef } trait A extends S { type T <: A; val x: A = null } trait B extends S { type T <: B; val x: B = null } val a = new A{} val b = new B{} val y = if (true) a else b // lub of y should allow for this println(y.x.x) }
yusuke2255/dotty
tests/pending/pos/t5317.scala
Scala
bsd-3-clause
278
package io.github.hbase4s.serializer import scala.collection.concurrent.TrieMap import scala.reflect.runtime.universe._ /** * Encoders (serializers) registry that allow implicitly transform different types * to and from byte array (that is necessary by HBase). * * Created by Volodymyr.Glushak on 31/05/2017. */ object EncoderRegistry { def fromString(someType: String, someVal: String) = { val enc = queryCache.getOrElse(someType, sys.error(s"$someType does not supported in query.")) enc.fromString(someVal) } private val cache = new TrieMap[Type, Encoder[_]]() private val queryCache = new TrieMap[String, QueryEncoder[_]]() def add(t: Type, enc: Encoder[_]): Option[Encoder[_]] = { cache.put(t, enc) enc match { case qenc: QueryEncoder[_] => queryCache.put(qenc.name, qenc) } } // there are two ways to encode - by type or by value (extracting type from it def encodeByType(tt: Type): Encoder[_] = cache.getOrElse(tt, { val (_, enc) = cache.find { case (kt, _) => tt =:= kt }.getOrElse( cache.find { case (kt, _) => tt <:< kt }.getOrElse(// try to use encoder for super type. is that correct? // do not implement global encoders... sys.error(s"Can't find encoder for type $tt.") ) ) enc }) // this method was introduced to handle cases when information about type lost in runtime // (for ex.: value of some specific type defined as Any) def encodeByValue[T: TypeTag](value: T): Encoder[_] = { val tValue = typeOf[T] val (_, enc) = cache.find { case (kt, _) => tValue =:= kt || tValue <:< kt }.getOrElse( cache.find { case (kt, _) => kt.typeSymbol.fullName == value.getClass.getTypeName }.getOrElse( sys.error(s"Can't find encoder for value with unexpected type $value.") ) ) enc } // some option encoders below add(typeOf[scala.Some[_]], new OptionEncoder[String]("option") { override def fromString(s: String): Option[String] = Option(s) }) add(typeOf[Option[_]], new OptionEncoder[String]("option") { override def fromString(s: String): Option[String] = Option(s) }) add(typeOf[Option[String]], new OptionEncoder[String]("option_str") { override def fromString(s: String): Option[String] = Option(s) }) add(typeOf[Option[Int]], new OptionEncoder[Int]("option_int") { override def fromString(s: String): Option[Int] = Option(s.toInt) }) add(typeOf[Option[Long]], new OptionEncoder[Long]("option_long") { override def fromString(s: String): Option[Long] = Option(s.toLong) }) add(typeOf[Option[Short]], new OptionEncoder[Short]("option_short") { override def fromString(s: String): Option[Short] = Option(s.toShort) }) add(typeOf[Option[Float]], new OptionEncoder[Float]("option_float") { override def fromString(s: String): Option[Float] = Option(s.toFloat) }) add(typeOf[Option[Double]], new OptionEncoder[Double]("option_double") { override def fromString(s: String): Option[Double] = Option(s.toDouble) }) add(typeOf[Option[Boolean]], new OptionEncoder[Boolean]("option_bool") { override def fromString(s: String): Option[Boolean] = Option(s.toBoolean) }) add(typeOf[Option[BigDecimal]], new OptionEncoder[BigDecimal]("option_bigdecimal") { override def fromString(s: String): Option[BigDecimal] = Option(BigDecimal(s)) }) add(typeOf[None.type], new OptionEncoder[String]("none") { override def fromString(s: String): Option[String] = None }) }
hbase4s/hbase4s
src/main/scala/io/github/hbase4s/serializer/EncoderRegistry.scala
Scala
mit
3,482
package io.youi.http.cookie import io.youi.http.DateHeaderKey sealed trait Cookie { def name: String def value: String def http: String } case class RequestCookie(name: String, value: String) extends Cookie { override def http: String = s"$name=$value" } case class ResponseCookie(name: String, value: String, expires: Option[Long] = None, maxAge: Option[Long] = None, domain: Option[String] = None, path: Option[String] = None, secure: Boolean = false, httpOnly: Boolean = false, sameSite: SameSite = SameSite.Normal) extends Cookie { override def http: String = { val b = new StringBuilder b.append(s"$name=$value") expires.foreach(l => b.append(s"; Expires=${DateHeaderKey.format(l)}")) maxAge.foreach(l => b.append(s"; Max-Age=$l")) domain.foreach(s => b.append(s"; Domain=$s")) path.foreach(s => b.append(s"; Path=$s")) if (secure) b.append("; Secure") if (httpOnly) b.append("; HttpOnly") sameSite match { case SameSite.Normal => // Nothing to set case SameSite.Lax => b.append("; SameSite=lax") case SameSite.Strict => b.append("; SameSite=strict") } b.toString() } override def hashCode(): Int = name.hashCode override def equals(obj: Any): Boolean = obj match { case that: Cookie => this.name == that.name case _ => false } }
outr/youi
core/shared/src/main/scala/io/youi/http/cookie/Cookie.scala
Scala
mit
1,537
package dynamite import java.io.File import zio.{Task, ZManaged} import zio.test._ import zio.test.Assertion._ object DynamiteConfigSpec extends DefaultRunnableSpec { def deleteFile(file: File): Task[Unit] = if (file.exists) { for { deleted <- if (file.isFile) { Task(file.delete()) } else { Task.foreach(file.listFiles().toSeq)(deleteFile(_)) *> Task( file.delete() ) } () <- if (!deleted) { Task.fail(new Exception(s"Failed to delee file $file")) } else Task.unit } yield () } else Task.unit // based on https://github.com/robey/scalatest-mixins/blob/master/src/main/scala/com/twitter/scalatest/TestFolder.scala def tempDirectory[A] = ZManaged.make(Task { val tempFolder = System.getProperty("java.io.tmpdir") var folder: File = null do { folder = new File(tempFolder, "scalatest-" + System.nanoTime) } while (!folder.mkdir()) folder })(folder => deleteFile(folder).orDie) def spec = suite("config")( testM("load default values for empty file")( tempDirectory.use { directory => for { result <- DynamiteConfig.loadConfig(directory) configFile = new File(directory, "config") config <- DynamiteConfig.parseConfig(configFile) } yield assert(configFile.exists)(isTrue) && assert(config)( equalTo(DynamiteConfig()) ) } ), testM("load existing config file")( tempDirectory.use { directory => val configFile = new File(directory, "config") DynamiteConfig.write( configFile, """dynamite { | pageSize = 30 |} """.stripMargin ) assertM(DynamiteConfig.loadConfig(directory))( equalTo(DynamiteConfig(pageSize = 30)) ) } ) ) }
joprice/dynamite
src/test/scala/dynamite/DynamiteConfigSpec.scala
Scala
apache-2.0
1,899
package probability.random import au.id.cxd.math.probability.analysis.AndersonDarlingTest import au.id.cxd.math.probability.continuous.Normal import au.id.cxd.math.probability.random.RUniform import org.scalatest.{FlatSpec, Matchers} class TestRandom extends FlatSpec with Matchers { def testRandom (sample:Seq[Double], min:Double, max:Double):Boolean = sample.forall { s => min <= s && s <= max } "uniform random" should "generate random between -1 and 1" in { val min = -1.0 val max = 1.0 val rand = RUniform(min,max) val sample = rand.draw(100) val flag = testRandom (sample, min, max) println(sample) flag should be (true) } "uniform random" should "generate sequence" in { val min = -1.0 val max= 1.0 val rand = RUniform(min,max) val sample = rand.generate .take(10) val flag = testRandom(sample, min, max) val lengthFlag = sample.length == 10 flag should be (true) lengthFlag should be (true) } "uniform random" should "generate scaled values between -5 and 5" in { val min = -5.0 val max = 5.0 val rand = RUniform(min,max) val sample = rand.draw(100) val flag = testRandom(sample, min, max) println(sample) flag should be(true) } "uniform random" should "not be normal" in { val min = 0.0 val max = 1.0 val rand = RUniform(min,max) val sample = rand.draw(100) val flag = testRandom(sample, min, max) println(sample) flag should be(true) } }
cxd/scala-au.id.cxd.math
math/src/test/scala/probability/random/TestRandom.scala
Scala
mit
1,498
/* Copyright 2013 Anton Kraievoy akraievoy@gmail.com This file is part of Holonet. Holonet is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Holonet is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Holonet. If not, see <http://www.gnu.org/licenses/>. */ package org.akraievoy.holonet.exp.data import org.akraievoy.holonet.exp._ import org.akraievoy.cnet.gen.vo.{ConnPreferenceYookJeongBarabasi, MetricEuclidean, EntropySourceRandom} import org.akraievoy.cnet.gen.domain.{OverlayNetFactory, MetricEDataGenStructural, LocationGeneratorRecursive} import org.akraievoy.cnet.net.vo._ import org.akraievoy.cnet.metrics.domain._ import scala.collection.JavaConversions._ import org.akraievoy.base.ref.{RefRO, Ref} import org.akraievoy.cnet.opt.api._ import org.akraievoy.cnet.soo.domain._ import org.akraievoy.cnet.opt.domain.ExperimentGeneticOpt import scala.Some import org.akraievoy.cnet.net.vo.Store.Width object OverlayGA { import java.lang.{ Byte => JByte, Integer => JInt, Long => JLong, Float => JFloat, Double => JDouble } object ParamNames { // stage 1 inputs val p1locProbSeed = ParamName[JLong]("p1locProbSeed") val p1locSeed = ParamName[JLong]("p1locSeed") val p1structSeed = ParamName[JLong]("p1structSeed") val p1physNodes = ParamName[JInt]("p1physNodes") val p1physPowFactor = ParamName[JDouble]("p1physAlpha") val p1physDistFactor = ParamName[JDouble]("p1physBeta") val p1physDegree = ParamName[JInt]("p1physDegree") // stage 1 outputs val p1physInit = ParamName[EdgeDataSparse]("p1physInit") val p1phys = ParamName[EdgeDataSparse]("p1phys") val p1dist = ParamName[EdgeDataDense]("p1dist") val p1routeLen = ParamName[EdgeDataDense]("p1routeLen") val p1locX = ParamName[VertexData]("p1locX") val p1locY = ParamName[VertexData]("p1locY") val p1density = ParamName[VertexData]("p1density") val p1lambda = ParamName[JDouble]("p1lambda") val p1powers = ParamName[StoreInt]("p1powers") val p1densities = ParamName[StoreDouble]("p1densities") val p1distances = ParamName[StoreDouble]("p1distances") // stage 2 inputs val p2nodeSeed = ParamName[JLong]("p2nodeSeed") val p2reqSeed = ParamName[JLong]("p2reqSeed") val p2nodePowFactor = ParamName[JDouble]("p2nodePowFactor") val p2nodeRatio = ParamName[JDouble]("p2nodeRatio") val p2reqClientFactor = ParamName[JDouble]("p2reqClientFactor") val p2reqServerFactor = ParamName[JDouble]("p2reqServerFactor") val p2reqVariance = ParamName[JDouble]("p2reqVariance") val p2reqMinVal = ParamName[JDouble]("p2reqMinVal") val p2reqMinRatio = ParamName[JDouble]("p2reqMinRatio") val p2reqStoreVol = ParamName[StoreDouble]("p2reqStoreVol") val p2reqStoreDist = ParamName[StoreDouble]("p2reqStoreDist") val p2reqStoreFromDensity = ParamName[StoreDouble]("p2reqStoreFromDensity") val p2reqStoreIntoDensity = ParamName[StoreDouble]("p2reqStoreIntoDensity") // stage 2 outputs val p2nodeIndex = ParamName[VertexData]("p2nodeIndex") val p2nodeDist = ParamName[EdgeDataDense]("p2nodeDist") val p2req = ParamName[EdgeDataSparse]("p2req") val p2locX = ParamName[VertexData]("p2locX") val p2locY = ParamName[VertexData]("p2locY") val p2density = ParamName[VertexData]("p2density") // stage 3 inputs val p3seed = ParamName[JLong]("p3gaSeed") val p3generation = ParamName[JInt]("p3generation") val p3specimen = ParamName[JInt]("p3specimen") val p3generateMax = ParamName[JDouble]("p3generateMax") val p3generatePow = ParamName[JDouble]("p3generatePow") val p3elite = ParamName[JDouble]("p3gaElite") val p3crossover = ParamName[JDouble]("p3crossover") val p3mutate = ParamName[JDouble]("p3mutate") val p3stateCrossoverMax = ParamName[JDouble]("p3stateCrossoverMax") val p3stateFitPowMax = ParamName[JDouble]("p3stateFitPowMax") val p3stateMutateMax = ParamName[JDouble]("p3stateMutateMax") val p3netDensityMax = ParamName[JDouble]("p3netDensityMax") val p3nodeDensityMin = ParamName[JDouble]("p3nodeDensityMin") val p3nodeDensityMax = ParamName[JDouble]("p3nodeDensityMax") val p3stepDelta = ParamName[JInt]("p3stepDelta") val p3minEff = ParamName[JDouble]("p3minEff") val p3flags = ParamName[String]("p3flags") val p3fitCap = ParamName[JDouble]("p3fitCap") // stage 3 outputs val p3genome = ParamName[JDouble]("p3genome") val p3genomeBest = ParamName[EdgeDataSparse]("p3genomeBest.0") val p3genomeBestDist = ParamName[StoreDouble]("p3genomeBestDist") val p3genomeBestPowers = ParamName[StoreInt]("p3genomeBestPowers") val p3time = ParamName[String]("p3time") val p3timeMillis = ParamName[JLong]("p3timeMillis") } import ParamNames._ val experiment1physDataset = Experiment( "overlayGO-1-physDataset", "Overlay GO [stage1] Physical Dataset", Nil, { rs => val entropySourceLocGen = new EntropySourceRandom() entropySourceLocGen.setSeed( rs.lens(p1locProbSeed).get.get ) val locationGenerator = new LocationGeneratorRecursive( entropySourceLocGen ) locationGenerator.setGridSize(1024) locationGenerator.setDimensionRatio(1.5) val netFactory = new MetricEDataGenStructural() netFactory.setNetNodeNum(3) netFactory.setType("path") netFactory.setTarget(rs.lens(p1physInit)) val entropySourceLocation = new EntropySourceRandom() entropySourceLocation.setSeed( rs.lens(p1locSeed).get.get ) val locationMetric = new MetricVDataLocation( entropySourceLocation, locationGenerator ) locationMetric.setTargetX(rs.lens(p1locX)) locationMetric.setTargetY(rs.lens(p1locY)) locationMetric.setNodes(rs.lens(p1physNodes).get.get) val distMetric = new MetricEDataDistance( new MetricEuclidean() ) distMetric.setSourceX(rs.lens(p1locX)) distMetric.setSourceY(rs.lens(p1locY)) distMetric.setTarget(rs.lens(p1dist)) val entropySource = new EntropySourceRandom() entropySource.setSeed( rs.lens(p1structSeed).get.get ) val connPreference = new ConnPreferenceYookJeongBarabasi() connPreference.setAlpha(rs.lens(p1physPowFactor).get.get) connPreference.setBeta(rs.lens(p1physDistFactor).get.get) val structure = new MetricEDataStructure( connPreference, entropySource ) structure.setDistSource(rs.lens(p1dist)) structure.setStructureSource(rs.lens(p1physInit)) structure.setTarget(rs.lens(p1phys)) structure.setBaseDegree(rs.lens(p1physDegree).get.get) val densityMetric = new MetricVDataDensity( locationGenerator ) densityMetric.setSourceX(rs.lens(p1locX)) densityMetric.setSourceY(rs.lens(p1locY)) densityMetric.setTarget(rs.lens(p1density)) val metricRoutesJohnson = new MetricRoutesJohnson() metricRoutesJohnson.setSource(rs.lens(p1phys)) metricRoutesJohnson.setDistSource(rs.lens(p1dist)) val routeLenMetric = new MetricEDataRouteLen( metricRoutesJohnson ) routeLenMetric.setTarget(rs.lens(p1routeLen)) val eigenGapMetric = new MetricScalarEigenGap() eigenGapMetric.setSource(rs.lens(p1phys)) eigenGapMetric.setTarget(rs.lens(p1lambda)) val powersMetric = new MetricVDataPowers( rs.lens(p1phys) ) val powersStoreMetric = new MetricStoreVData( powersMetric, rs.lens(p1powers), Width.INT ) val densitiesStoreMetric = new MetricStoreVData( rs.lens(p1density), rs.lens(p1densities), Width.DOUBLE ) val distancesStoreMetric = new MetricStoreEData( rs.lens(p1phys), rs.lens(p1dist), rs.lens(p1distances), Width.DOUBLE ) val main = new RunnableComposite() main.setGroup( Seq( locationGenerator, netFactory, locationMetric, distMetric, structure, densityMetric, routeLenMetric, eigenGapMetric, powersStoreMetric, densitiesStoreMetric, distancesStoreMetric ) ) main.run() // d'oh, at last }, Config( Param(p1locProbSeed, "123098"), Param(p1locSeed, "579384"), Param(p1structSeed, "780293"), Param(p1physNodes, "32"), Param(p1physPowFactor, "1.5"), Param(p1physDistFactor, "5"), Param(p1physDegree, "3") ), Config( "vis-requests", "Visual for Requests", Param(p1physNodes, "64"), Param(p1physDegree, "2") ), Config( "vis-byAlpha", "Visual : By Alpha (64 nodes)", Param(p1physNodes, "64"), Param(p1physPowFactor, "-4;2;4"), Param(p1physDegree, "2") ), Config( "vis-byBeta", "Visual : By Beta (64 nodes)", Param(p1physNodes, "64"), Param(p1physDistFactor, "-4;2;4"), Param(p1physDegree, "2") ), Config( "phys-1024", "Physical (1k nodes, 1 seed)", Param(p1physNodes, "1024") ), Config( "phys-64", "Physical (64 nodes, 1 seed)", Param(p1physNodes, "64") ) ).withGraphvizExport( GraphvizExport( name = "physical", desc = "physical network structure export", edgeStructure = {_.lens(p1phys)}, edgeLabel = {rs => Some(rs.lens(p1dist))}, edgeColor = {rs => Some(rs.lens(p1dist))}, vertexColor = {rs => Some(rs.lens(p1density))}, vertexCoordX = {rs => Some(rs.lens(p1locX))}, vertexCoordY = {rs => Some(rs.lens(p1locY))}, vertexRadius = {rs => Some(rs.lens(p1density))} ) ).withStoreExport( StoreExport( "powers", desc = "physical network, power distribution", Seq(p1powers) ) ).withStoreExport( StoreExport( "distances", desc = "physical network, distance distribution", Seq(p1distances) ) ).withStoreExport( StoreExport( "densities", desc = "physical network, density distribution", Seq(p1densities) ) ) val experiment2overlayDataset = Experiment( "overlayGO-2-ovlDataset", "Overlay GO [stage2] Overlay Dataset", Seq("overlayGO-1-physDataset"), { rs => val ovlESource = new EntropySourceRandom() ovlESource.setSeed( rs.lens(p2nodeSeed).get.get ) val reqESource = new EntropySourceRandom() reqESource.setSeed( rs.lens(p2reqSeed).get.get ) val ovlNetFactory = new OverlayNetFactory(ovlESource) ovlNetFactory.setOmega( rs.lens(p2nodePowFactor).get.get ) ovlNetFactory.setNu( rs.lens(p2nodeRatio).get.get ) ovlNetFactory.setSource( rs.lens(p1phys) ) ovlNetFactory.setTarget(rs.lens(p2nodeIndex)) ovlNetFactory.setEdgeDataMap( Map[RefRO[_ <: EdgeData], Ref[EdgeData]]( rs.lens(p1routeLen) -> rs.lens(p2nodeDist).asInstanceOf[Ref[EdgeData]] ) ) ovlNetFactory.setVertexDataMap( Map[RefRO[VertexData], Ref[VertexData]]( rs.lens(p1locX) -> rs.lens(p2locX), rs.lens(p1locY) -> rs.lens(p2locY), rs.lens(p1density) -> rs.lens(p2density) ) ) val ovlRequests = new MetricEDataOverlayRequest(reqESource) ovlRequests.setSource(rs.lens(p2density)) ovlRequests.setPhi(rs.lens(p2reqClientFactor).get.get) ovlRequests.setPsi(rs.lens(p2reqServerFactor).get.get) ovlRequests.setSigma(rs.lens(p2reqVariance).get.get) val reqThreshold = new MetricEDataThreshold(ovlRequests) reqThreshold.setTarget(rs.lens(p2req)) reqThreshold.setMinAbsValue(rs.lens(p2reqMinVal).get.get) reqThreshold.setMinToMaxRatio(rs.lens(p2reqMinRatio).get.get) val volumesStoreMetric = new MetricStoreEData( rs.lens(p2req), rs.lens(p2req), rs.lens(p2reqStoreVol), Width.DOUBLE ).withFromData( rs.lens(p2density), rs.lens(p2reqStoreFromDensity) ).withIntoData( rs.lens(p2density), rs.lens(p2reqStoreIntoDensity) ) val distancesStoreMetric = new MetricStoreEData( rs.lens(p2req), rs.lens(p2nodeDist), rs.lens(p2reqStoreDist), Width.DOUBLE ) ovlNetFactory.run() reqThreshold.run() volumesStoreMetric.run() distancesStoreMetric.run() }, Config( Param(p2nodeSeed, "31013"), Param(p2reqSeed, "11311"), Param(p2nodePowFactor, "-1"), Param(p2nodeRatio, "0.25"), Param(p2reqClientFactor, "0.25"), Param(p2reqServerFactor, "1.25"), Param(p2reqVariance, "2"), Param(p2reqMinVal, "0.2"), Param(p2reqMinRatio, "0.02") ), Config( "nu25", "Default (select 25% of nodes)", Param(p2nodeRatio, "0.25") ), Config( "nu50", "Default (select 50% of nodes)", Param(p2nodeRatio, "0.5") ), Config( "nu20_omegaProf", "Visual (select 20% of nodes), profile by Omega", Param(p2nodeRatio, "0.2"), Param(p2nodePowFactor, "-4;2;4"), Param(p2reqClientFactor, "1"), Param(p2reqServerFactor, "1"), Param(p2reqMinVal, "0.05"), Param(p2reqMinRatio, "0.05") ) ).withGraphvizExport( GraphvizExport( name = "request", desc = "overlay request network", edgeStructure = {_.lens(p2req)}, edgeWidth = {rs => Some(rs.lens(p2req))}, edgeLabel = {rs => Some(rs.lens(p2req))}, edgeColor = {rs => Some(rs.lens(p2nodeDist))}, vertexColor = { rs => val powers = new MetricVDataPowers() powers.setSource(rs.lens(p2req)) Some(powers) }, vertexCoordX = {rs => Some(rs.lens(p2locX))}, vertexCoordY = {rs => Some(rs.lens(p2locY))}, vertexRadius = { rs => Some(rs.lens(p2density)) }, vertexLabel = {rs => Some(rs.lens(p2nodeIndex))} ) ).withStoreExport( StoreExport( "requests", desc = "overlay network, request distribution", Seq(p2reqStoreVol, p2reqStoreDist, p2reqStoreFromDensity, p2reqStoreIntoDensity) ) ) val experiment3genetics = Experiment( "overlayGO-3-genetics", "Overlay GO [stage3] Overlay Genetics", Seq("overlayGO-2-ovlDataset"), { rs => val entropySourceGenetics = new EntropySourceRandom() entropySourceGenetics.setSeed( rs.lens(p3seed).get.get ) val gaState = new GeneticState() gaState.setFitnessDeviationMax(0.98) gaState.setFitnessDeviationMin(0.02) gaState.setMinElemFitnessNorm(0.005) gaState.setMaxCrossover(rs.lens(p3stateCrossoverMax).get.get) gaState.setMaxElemFitPow(rs.lens(p3stateFitPowMax).get.get) gaState.setMaxMutation(rs.lens(p3stateMutateMax).get.get) val gaStrategy = new GeneticStrategySoo( new MetricRoutesFloydWarshall() ) gaStrategy.setMinEff(rs.lens(p3minEff).get.get) gaStrategy.setNetDensityMax(rs.lens(p3netDensityMax).get.get) gaStrategy.setNodeDensityMin(rs.lens(p3nodeDensityMin).get.get) gaStrategy.setNodeDensityMax(rs.lens(p3nodeDensityMax).get.get) gaStrategy.setModes(rs.lens(p3flags).get.get) gaStrategy.setSteps(rs.lens(p3stepDelta).get.get) gaStrategy.setDistSource(rs.lens(p2nodeDist)) gaStrategy.setRequestSource(rs.lens(p2req)) gaStrategy.setFitnessCap(rs.lens(p3fitCap).get.get) val ga = new ExperimentGeneticOpt( gaStrategy.asInstanceOf[GeneticStrategy[Genome]], entropySourceGenetics ) ga.setState(gaState) ga.setSeedSource(new SeedSourceHeuristic().asInstanceOf[SeedSource[Genome]]) ga.setBreeders( Seq( new BreederSooExpand(), new BreederSooLocalize() ).map(_.asInstanceOf[Breeder[Genome]]) ) ga.setMutators( Seq( new MutatorSooRewireExpand(), new MutatorSooRewireLocalize() ).map(_.asInstanceOf[Mutator[Genome]]) ) ga.setAdaptMutators( Seq( new MutatorSooRegularize(), new MutatorSooClusterize(), new MutatorSooNoop() ).map(_.asInstanceOf[Mutator[Genome]]) ) ga.setConditions( Seq( new ConditionSooFitnessCapping(), new ConditionSooVertexDensity(), new ConditionSooEffectiveness(), new ConditionSooDensity(), new ConditionUnique() ).map(_.asInstanceOf[Condition[Genome]]) ) ga.setEliteRatio(rs.lens(p3elite).get.get) ga.setCrossoverRatio(rs.lens(p3crossover).get.get) ga.setMutationRatio(rs.lens(p3mutate).get.get) ga.setGenerateLimitRatioMax(rs.lens(p3generateMax).get.get) ga.setGenerateLimitRatioPow(rs.lens(p3generatePow).get.get) ga.setSpecimenLens(rs.lens(p3specimen)) ga.setGenerationLens(rs.lens(p3generation)) ga.setGenomeLens(rs.lens(p3genome)) val timing = new ExperimentTiming(ga) timing.setDurationTextRef(rs.lens(p3time)) timing.setDurationRef(rs.lens(p3timeMillis)) timing.run() if (rs.lens(p3genomeBest).get.isDefined) { val powersMetric = new MetricVDataPowers( rs.lens(p3genomeBest) ) powersMetric.run() val powersStoreMetric = new MetricStoreVData( powersMetric, rs.lens(p3genomeBestPowers), Width.INT ) powersStoreMetric.run() new MetricStoreEData( rs.lens(p3genomeBest), rs.lens(p2nodeDist), rs.lens(p3genomeBestDist), Width.DOUBLE ).run() } }, Config( Param(p3seed, "42600--42602"), Param(p3generation, "0--100", Strategy.ITERATE, Strategy.USE_LAST), Param(p3specimen, "0--90", Strategy.USE_FIRST, Strategy.USE_FIRST), Param(p3generateMax, "233"), Param(p3generatePow, "2"), Param(p3elite, "0.2"), Param(p3stateCrossoverMax, "0.1"), Param(p3crossover, "0.05"), Param(p3mutate, "0.05"), Param(p3stateFitPowMax, "2"), Param(p3stateMutateMax, "0.025"), Param(p3netDensityMax, "1.75"), Param(p3nodeDensityMin, "0.75"), Param(p3stepDelta, "1"), Param(p3flags, ""), Param(p3minEff, "1.25"), Param(p3fitCap, "1") ), Config( "corrStudy-smoke", "Correlation study --- smoke", Param(p3minEff, "0.6"), Param(p3seed, "42600"), Param(p3flags, "R"), Param(p3specimen, "0--21", Strategy.USE_FIRST, Strategy.USE_FIRST), Param( p3fitCap, "0.05;0.25" ), Param(p3generateMax, "64"), Param(p3generatePow, "3"), Param(p3netDensityMax, "0.75"), Param(p3nodeDensityMin, "0.75"), Param(p3nodeDensityMax, "0.75"), Param(p3generation, "0--10", Strategy.ITERATE, Strategy.USE_LAST), Param(p3elite, "0.2") ), Config( "corrStudy-full", "Correlation study --- full", Param(p3minEff, "0.6"), Param(p3seed, "42600--42607"), Param(p3flags, "R"), Param(p3specimen, "0--21", Strategy.USE_FIRST, Strategy.USE_FIRST), Param( p3fitCap, "0.55;0.1;0.15;0.2;0.3" ), Param(p3generateMax, "64"), Param(p3generatePow, "3"), Param(p3netDensityMax, "0.75"), Param(p3nodeDensityMin, "0.75"), Param(p3nodeDensityMax, "0.75"), Param(p3generation, "0--22", Strategy.ITERATE, Strategy.USE_LAST), Param(p3elite, "0.2") ), Config( "minEff12x2x3", "MinEff: 1.2 * 2 seeds * 3 gens (debug)", Param(p3minEff, "1.2"), Param(p3seed, "42600--42601"), Param(p3generation, "0--2", Strategy.ITERATE, Strategy.USE_LAST) ), Config("minEff13", "MinEff: 1.3", Param(p3minEff, "1.3")), Config("minEff14", "MinEff: 1.4", Param(p3minEff, "1.4")), Config("minEff15", "MinEff: 1.5", Param(p3minEff, "1.5")), Config("minEff16", "MinEff: 1.6", Param(p3minEff, "1.6")), Config("minEff17", "MinEff: 1.7", Param(p3minEff, "1.7")), Config( "minEff13-smoke", "MinEff: 1.3 (smoke)", Param(p3minEff, "1.3"), Param(p3seed, "42600"), Param(p3generation, "0--12", Strategy.ITERATE, Strategy.USE_LAST), Param(p3specimen, "0--9", Strategy.USE_FIRST, Strategy.USE_FIRST) ), Config("minEff19", "MinEff: 1.9", Param(p3minEff, "1.9")), Config( "minEff19x50x768", "MinEff: 1.9 * 50 seeds * 768 gens (full opt)", Param(p3minEff, "1.9"), Param(p3seed, "42600--42649"), Param(p3generation, "0--767", Strategy.ITERATE, Strategy.USE_LAST) ), Config( "minEff19x50x256", "MinEff: 1.9 * 50 seeds * 256 gens (tuning)", Param(p3minEff, "1.9"), Param(p3seed, "42600--42649"), Param(p3generation, "0--255", Strategy.ITERATE, Strategy.USE_LAST), Param(p3elite, "0.01;0.03;0.05;0.1;0.2") ), Config("minEff20", "MinEff: 2.0", Param(p3minEff, "2.0")), Config("minEff21", "MinEff: 2.1", Param(p3minEff, "2.1")), Config("minEff215", "MinEff: 2.15", Param(p3minEff, "2.15")), Config("minEff23", "MinEff: 2.3", Param(p3minEff, "2.3")), Config( "thetaProf", "Varying Density (Theta)", Param(p3netDensityMax, "1;1.25;1.5;1.75") ), Config( "stepsProf", "Varying Steps", Param(p3stepDelta, "1;3;5"), Param(p3seed, "42600--42620") ) ).withGraphvizExport( GraphvizExport( name = "overlay-best", desc = "overlay network - best specimen", edgeStructure = {_.lens(p3genomeBest)}, edgeColor = {rs => Some(rs.lens(p2nodeDist))}, vertexColor = { rs => val powers = new MetricVDataPowers() powers.setSource(rs.lens(p2req).asInstanceOf[Ref[EdgeData]]) Some(powers) }, vertexCoordX = {rs => Some(rs.lens(p2locX))}, vertexCoordY = {rs => Some(rs.lens(p2locY))}, vertexRadius = { rs => val powers = new MetricVDataPowers() powers.setSource(rs.lens(p3genomeBest).asInstanceOf[Ref[EdgeData]]) Some(powers) }, vertexLabel = {rs => Some(rs.lens(p2nodeIndex))} ) )/*.withStoreExport( // FIXME breaks with out of memory StoreExport( "distances", desc = "overlay network, distance distribution", Seq(p3genomeBestDist) ) ).withStoreExport( StoreExport( "powers", desc = "overlay network, power distribution", Seq(p3genomeBestPowers) ) )*/ }
akraievoy/holonet
src/main/scala/org/akraievoy/holonet/exp/data/OverlayGA.scala
Scala
gpl-3.0
23,473
/** * See <a href="https://www.codeeval.com/open_challenges/44/">Following Integer</a> */ object FollowingInteger extends Challenge { val lines = scala.io.Source.fromFile(args(0)).getLines().filter(_.length > 0) lines.collect { case Input(number, digits) => eval(number, digits) } foreach println import scala.annotation.tailrec @tailrec def eval(number: Int, digits: List[Char]): Int = { val result = compose(digits, Nil).sorted.find(_ > number) if (result.isEmpty) eval(number, '0' :: digits) else result.get } def compose(digits: List[Char], res: List[Char]): List[Int] = digits match { case Nil => res.reverse.mkString.toInt :: Nil case _ => (for (next <- digits if res != Nil || next != '0') yield compose(digits diff List(next), next :: res)).flatten } object Input { // 115 def unapply(line: String) = Some(line.toInt, line.toList) } }
zelca/codeeval
src/FollowingInteger.scala
Scala
mit
917
package com.lucidchart.open.nark.models.records import java.util.UUID trait HasId { def id: UUID } case class Tag( recordId: UUID, tag: String ) extends AppRecord case class TagMap[T]( contents: Map[String, List[T]] ) extends AppRecord
lucidsoftware/nark
app/com/lucidchart/open/nark/models/records/Tag.scala
Scala
apache-2.0
244
package com.example import com.twitter.logging.{ConsoleHandler, Level, Logger, LoggerFactory} import com.twitter.util.{Duration, Time, Timer, TimerTask} import com.twitter.conversions.time._ import com.twitter.app.App object Main extends App { LoggerFactory( node = "", level = Some(Level.INFO), handlers = ConsoleHandler() :: Nil ).apply() private val log = Logger.get(getClass) log.info("Hello World!") log.info("Using logger %s", log) }
manjuraj/scala-abc
src/main/scala/com/example/Main.scala
Scala
apache-2.0
466
/* * Copyright 2011-2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.vertx.scala.core.http import org.vertx.java.core.Handler import org.vertx.java.core.http.{ RouteMatcher => JRouteMatcher } import org.vertx.scala.core.FunctionConverters._ import org.vertx.scala.Self /** * Not sure whether this kind of RouteMatcher should stay in Scala... * * @author swilliams * @author <a href="http://www.campudus.com/">Joern Bernhardt</a> * @author Galder Zamarreño */ class RouteMatcher private[scala] (val asJava: JRouteMatcher = new JRouteMatcher()) extends Handler[HttpServerRequest] with (HttpServerRequest => Unit) with Self { def all(uri: String, handler: HttpServerRequest => Unit): RouteMatcher = wrap(asJava.all(uri, wrapHandler(handler))) def allWithRegEx(regex: String, handler: HttpServerRequest => Unit): RouteMatcher = wrap(asJava.allWithRegEx(regex, wrapHandler(handler))) def apply(request: HttpServerRequest): Unit = handle(request) def connect(uri: String, handler: HttpServerRequest => Unit): RouteMatcher = wrap(asJava.connect(uri, wrapHandler(handler))) def connectWithRegEx(regex: String, handler: HttpServerRequest => Unit): RouteMatcher = wrap(asJava.connectWithRegEx(regex, wrapHandler(handler))) def delete(uri: String, handler: HttpServerRequest => Unit): RouteMatcher = wrap(asJava.delete(uri, wrapHandler(handler))) def deleteWithRegEx(regex: String, handler: HttpServerRequest => Unit): RouteMatcher = wrap(asJava.deleteWithRegEx(regex, wrapHandler(handler))) def get(uri: String, handler: HttpServerRequest => Unit): RouteMatcher = wrap(asJava.get(uri, wrapHandler(handler))) def getWithRegEx(regex: String, handler: HttpServerRequest => Unit): RouteMatcher = wrap(asJava.getWithRegEx(regex, wrapHandler(handler))) def handle(request: HttpServerRequest): Unit = asJava.handle(request.asJava) def head(uri: String, handler: HttpServerRequest => Unit): RouteMatcher = wrap(asJava.head(uri, wrapHandler(handler))) def headWithRegEx(regex: String, handler: HttpServerRequest => Unit): RouteMatcher = wrap(asJava.headWithRegEx(regex, wrapHandler(handler))) def options(uri: String, handler: HttpServerRequest => Unit): RouteMatcher = wrap(asJava.options(uri, wrapHandler(handler))) def optionsWithRegEx(regex: String, handler: HttpServerRequest => Unit): RouteMatcher = wrap(asJava.optionsWithRegEx(regex, wrapHandler(handler))) def patch(uri: String, handler: HttpServerRequest => Unit): RouteMatcher = wrap(asJava.patch(uri, wrapHandler(handler))) def patchWithRegEx(regex: String, handler: HttpServerRequest => Unit): RouteMatcher = wrap(asJava.patchWithRegEx(regex, wrapHandler(handler))) def post(uri: String, handler: HttpServerRequest => Unit): RouteMatcher = wrap(asJava.post(uri, wrapHandler(handler))) def postWithRegEx(regex: String, handler: HttpServerRequest => Unit): RouteMatcher = wrap(asJava.postWithRegEx(regex, wrapHandler(handler))) def put(uri: String, handler: HttpServerRequest => Unit): RouteMatcher = wrap(asJava.put(uri, wrapHandler(handler))) def putWithRegEx(regex: String, handler: HttpServerRequest => Unit): RouteMatcher = wrap(asJava.putWithRegEx(regex, wrapHandler(handler))) def trace(uri: String, handler: HttpServerRequest => Unit): RouteMatcher = wrap(asJava.trace(uri, wrapHandler(handler))) def traceWithRegEx(regex: String, handler: HttpServerRequest => Unit): RouteMatcher = wrap(asJava.traceWithRegEx(regex, wrapHandler(handler))) private def wrapHandler(handler: HttpServerRequest => Unit) = fnToHandler(handler.compose(HttpServerRequest.apply)) } /** Factory for [[org.vertx.scala.core.http.RouteMatcher]] instances. */ object RouteMatcher { def apply(actual: JRouteMatcher): RouteMatcher = new RouteMatcher(actual) def apply(): RouteMatcher = new RouteMatcher() }
vert-x/mod-lang-scala
src/main/scala/org/vertx/scala/core/http/RouteMatcher.scala
Scala
apache-2.0
4,461
/* * Copyright 2015 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.domain import org.scalatest.{ShouldMatchers, WordSpec} import play.api.libs.json.{JsError, JsNumber, JsString, JsSuccess} class TaxCodeFormatsSpec extends WordSpec with ShouldMatchers { import TaxCodeFormats._ "TaxCode reads" should { "correctly read legal string" in { taxCodeReads.reads(JsString("K100")) shouldBe JsSuccess(TaxCode("K100")) } "return JsError if the json is not a string" in { taxCodeReads.reads(JsNumber(2)) shouldBe JsError("Expected a single string") } "return JsError if the string is not a legal taxcode" in { taxCodeReads.reads(JsString("foo")) shouldBe JsError("The code foo is not a legal tax code") } } "TaxCode writes" should { "correctly write a legal taxcode" in { taxCodeWrites.writes(TaxCode("K100")) shouldBe JsString("K100") } } }
howyp/domain
src/test/scala/uk/gov/hmrc/domain/TaxCodeFormatsSpec.scala
Scala
apache-2.0
1,462
package knot.msgpack import java.nio.ByteOrder import java.nio.channels.{ReadableByteChannel, WritableByteChannel} import java.util.concurrent.ConcurrentHashMap import knot.data.buffers.{InputSink, OutputSink} import knot.data.serialization._ import knot.msgpack.gen.{GenericSerializerGenerator, NonGenericSerializerGenerator, ObjectSerializerGenerator, Reflect} class MsgPackProvider extends SerializationProvider { type F[Data] = MsgPackSerializer[Data] import scala.reflect.runtime.universe._ private[this] val _serializerCache = new ConcurrentHashMap[String, MsgPackSerializer[_]] private[this] val generators = Array( new NonGenericSerializerGenerator(), new GenericSerializerGenerator(), new ObjectSerializerGenerator() ) override def get[T: TypeTag](): F[T] = { val key = Reflect.fullTypeName(typeOf[T]) _serializerCache.get(key) match { case null => val created = generators .collectFirst { case g if g.is(typeOf[T]) => g.generate[T] } created match { case None => throw new MsgPackException(s"not found serializer. type:$key") case Some(s) => _serializerCache.putIfAbsent(key, s) match { case null => s case some => some.asInstanceOf[MsgPackSerializer[T]] } } case ser => ser.asInstanceOf[MsgPackSerializer[T]] } } override def register[T: TypeTag](): Unit = get[T]() override def register[T: TypeTag](serializer: F[T]): Unit = { _serializerCache.putIfAbsent(Reflect.fullTypeName(typeOf[T]), serializer) } override def toArray(): Encoder = MsgPackEncoder() override def toChannel(channel: WritableByteChannel, bufferSize: Int): Encoder = MsgPackEncoder { new MsgPackOutput(OutputSink.toChannel(channel, bufferSize, ByteOrder.nativeOrder())) } override def fromArray(bytes: Array[Byte]): Decoder = MsgPackDecoder(bytes) override def fromChannel(channel: ReadableByteChannel, bufferSize: Int): Decoder = MsgPackDecoder { new MsgPackInput(InputSink.fromChannel(channel, bufferSize, ByteOrder.nativeOrder())) } }
defvar/knot
knot-msgpack/src/main/scala/knot/msgpack/MsgPackProvider.scala
Scala
mit
2,140
package prohax import scala.util.matching.Regex object Bootstrap { implicit def regexToRule(r: Regex) = Rule.r(r) def defineInflections_! = { Inflector.configure(inflect => { inflect.plural("$".r, "s") inflect.plural("(?i)s$".r, "s") inflect.plural("(?i)(ax|test)is$".r, "$1es") inflect.plural("(?i)(octop|vir)us$".r, "$1i") inflect.plural("(?i)(alias|status)$".r, "$1es") inflect.plural("(?i)(bu)s$".r, "$1ses") inflect.plural("(?i)(buffal|tomat)o$".r, "$1oes") inflect.plural("(?i)([ti])um$".r, "$1a") inflect.plural("(?i)sis$".r, "ses") inflect.plural("(?i)(?:([^f])fe|([lr])f)$".r, "$1$2ves") inflect.plural("(?i)(hive)$".r, "$1s") inflect.plural("(?i)([^aeiouy]|qu)y$".r, "$1ies") inflect.plural("(?i)(x|ch|ss|sh)$".r, "$1es") inflect.plural("(?i)(matr|vert|ind)(?:ix|ex)$".r, "$1ices") inflect.plural("(?i)([m|l])ouse$".r, "$1ice") inflect.plural("(?i)^(ox)$".r, "$1en") inflect.plural("(?i)(quiz)$".r, "$1zes") // inflect.singular(/s$/i, '') // inflect.singular(/(n)ews$/i, '\\1ews') // inflect.singular(/([ti])a$/i, '\\1um') // inflect.singular(/((a)naly|(b)a|(d)iagno|(p)arenthe|(p)rogno|(s)ynop|(t)he)ses$/i, '\\1\\2sis') // inflect.singular(/(^analy)ses$/i, '\\1sis') // inflect.singular(/([^f])ves$/i, '\\1fe') // inflect.singular(/(hive)s$/i, '\\1') // inflect.singular(/(tive)s$/i, '\\1') // inflect.singular(/([lr])ves$/i, '\\1f') // inflect.singular(/([^aeiouy]|qu)ies$/i, '\\1y') // inflect.singular(/(s)eries$/i, '\\1eries') // inflect.singular(/(m)ovies$/i, '\\1ovie') // inflect.singular(/(x|ch|ss|sh)es$/i, '\\1') // inflect.singular(/([m|l])ice$/i, '\\1ouse') // inflect.singular(/(bus)es$/i, '\\1') // inflect.singular(/(o)es$/i, '\\1') // inflect.singular(/(shoe)s$/i, '\\1') // inflect.singular(/(cris|ax|test)es$/i, '\\1is') // inflect.singular(/(octop|vir)i$/i, '\\1us') // inflect.singular(/(alias|status)es$/i, '\\1') // inflect.singular(/^(ox)en/i, '\\1') // inflect.singular(/(vert|ind)ices$/i, '\\1ex') // inflect.singular(/(matr)ices$/i, '\\1ix') // inflect.singular(/(quiz)zes$/i, '\\1') // inflect.singular(/(database)s$/i, '\\1') inflect.irregular("person", "people") inflect.irregular("man", "men") inflect.irregular("child", "children") inflect.irregular("sex", "sexes") inflect.irregular("move", "moves") inflect.irregular("cow", "kine") inflect.uncountable("equipment","information","rice", "money", "species", "series", "fish", "sheep") }) } }
nkpart/scala-inflector
src/main/scala/bootstrap.scala
Scala
mit
2,579
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gearpump.streaming.partitioner import org.apache.gearpump.Message /** * Partition messages by applying group by function first. * * For example: * {{{ * case class People(name: String, gender: String) * * object Test{ * * val groupBy: (People => String) = people => people.gender * val partitioner = GroupByPartitioner(groupBy) * } * }}} * * @param fn First apply message with groupBy function, then pick the hashCode of the output * to do the partitioning. You must define hashCode() for output type of groupBy function. */ class GroupByPartitioner[T, GROUP](fn: T => GROUP) extends UnicastPartitioner { override def getPartition(message: Message, partitionNum: Int, currentPartitionId: Int): Int = { val hashCode = fn(message.value.asInstanceOf[T]).hashCode() (hashCode & Integer.MAX_VALUE) % partitionNum } }
manuzhang/incubator-gearpump
streaming/src/main/scala/org/apache/gearpump/streaming/partitioner/GroupByPartitioner.scala
Scala
apache-2.0
1,677
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zeppelin.spark import java.io.{BufferedReader, File} import java.net.URLClassLoader import java.nio.file.{Files, Paths} import java.util.Properties import org.apache.spark.SparkConf import org.apache.spark.repl.SparkILoop import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion import org.apache.zeppelin.interpreter.util.InterpreterOutputStream import org.apache.zeppelin.interpreter.{InterpreterContext, InterpreterGroup} import org.slf4j.LoggerFactory import org.slf4j.Logger import scala.tools.nsc.Settings import scala.tools.nsc.interpreter._ /** * SparkInterpreter for scala-2.12 */ class SparkScala212Interpreter(override val conf: SparkConf, override val depFiles: java.util.List[String], override val properties: Properties, override val interpreterGroup: InterpreterGroup, override val sparkInterpreterClassLoader: URLClassLoader) extends BaseSparkScalaInterpreter(conf, depFiles, properties, interpreterGroup, sparkInterpreterClassLoader) { lazy override val LOGGER: Logger = LoggerFactory.getLogger(getClass) private var sparkILoop: SparkILoop = _ override val interpreterOutput = new InterpreterOutputStream(LOGGER) override def open(): Unit = { super.open() if (conf.get("spark.master", "local") == "yarn-client") { System.setProperty("SPARK_YARN_MODE", "true") } // Only Spark1 requires to create http server, Spark2 removes HttpServer class. val rootDir = conf.get("spark.repl.classdir", System.getProperty("java.io.tmpdir")) val outputDir = Files.createTempDirectory(Paths.get(rootDir), "spark").toFile outputDir.deleteOnExit() conf.set("spark.repl.class.outputDir", outputDir.getAbsolutePath) val settings = new Settings() settings.processArguments(List("-Yrepl-class-based", "-Yrepl-outdir", s"${outputDir.getAbsolutePath}"), true) settings.embeddedDefaults(sparkInterpreterClassLoader) settings.usejavacp.value = true settings.classpath.value = getUserJars.mkString(File.pathSeparator) val printReplOutput = properties.getProperty("zeppelin.spark.printREPLOutput", "true").toBoolean val replOut = if (printReplOutput) { new JPrintWriter(interpreterOutput, true) } else { new JPrintWriter(Console.out, true) } sparkILoop = new SparkILoop(None, replOut) sparkILoop.settings = settings sparkILoop.createInterpreter() val in0 = getDeclareField(sparkILoop, "in0").asInstanceOf[Option[BufferedReader]] val reader = in0.fold(sparkILoop.chooseReader(settings))(r => SimpleReader(r, replOut, interactive = true)) sparkILoop.in = reader sparkILoop.initializeSynchronous() sparkILoop.in.postInit() this.scalaCompletion = reader.completion createSparkContext() createZeppelinContext() } protected override def completion(buf: String, cursor: Int, context: InterpreterContext): java.util.List[InterpreterCompletion] = { val completions = scalaCompletion.complete(buf.substring(0, cursor), cursor).candidates .map(e => new InterpreterCompletion(e, e, null)) scala.collection.JavaConversions.seqAsJavaList(completions) } protected def bind(name: String, tpe: String, value: Object, modifier: List[String]): Unit = { sparkILoop.beQuietDuring { val result = sparkILoop.bind(name, tpe, value, modifier) if (result != IR.Success) { throw new RuntimeException("Fail to bind variable: " + name) } } } override def close(): Unit = { super.close() if (sparkILoop != null) { sparkILoop.closeInterpreter() } } def scalaInterpret(code: String): scala.tools.nsc.interpreter.IR.Result = sparkILoop.interpret(code) }
sergeymazin/zeppelin
spark/scala-2.12/src/main/scala/org/apache/zeppelin/spark/SparkScala212Interpreter.scala
Scala
apache-2.0
4,673
/*********************************************************************** * Copyright (c) 2013-2015 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 which * accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. *************************************************************************/ package org.locationtech.geomesa.accumulo.iterators import com.typesafe.scalalogging.LazyLogging import org.apache.accumulo.core.client.mock.MockInstance import org.apache.accumulo.core.client.security.tokens.PasswordToken import org.apache.accumulo.core.client.{Connector, IteratorSetting} import org.apache.accumulo.core.iterators.user.RegExFilter import org.junit.runner.RunWith import org.locationtech.geomesa.CURRENT_SCHEMA_VERSION import org.locationtech.geomesa.accumulo.GEOMESA_ITERATORS_VERSION import org.locationtech.geomesa.accumulo.iterators.TestData._ import org.locationtech.geomesa.accumulo.util.GeoMesaBatchWriterConfig import org.specs2.mutable.Specification import org.specs2.runner.JUnitRunner import scala.collection.GenSeq import scala.collection.JavaConversions._ import scala.util.{Random, Try} @RunWith(classOf[JUnitRunner]) class SpatioTemporalIntersectingIteratorTest extends Specification with LazyLogging { def getRandomSuffix: String = { val chars = Array[Char]('0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F') (1 to 20).map(i => chars(Random.nextInt(chars.size))).mkString } def setupMockAccumuloTable(entries: GenSeq[Entry], tableName: String): Connector = { val mockInstance = new MockInstance() val c = mockInstance.getConnector(TEST_USER, new PasswordToken(Array[Byte]())) c.tableOperations.create(tableName) val bw = c.createBatchWriter(tableName, GeoMesaBatchWriterConfig()) logger.debug(s"Add mutations to table $tableName.") entries.foreach { entry => bw.addMutations(createObject(entry.id, entry.wkt, entry.dt)) } logger.debug(s"Done adding mutations to table $tableName.") bw.flush() c } "Consistency Iterator" should { "verify consistency of table" in { val table = "consistentTest" val c = setupMockAccumuloTable(TestData.shortListOfPoints, table) val s = c.createScanner(table, TEST_AUTHORIZATIONS) val cfg = new IteratorSetting(1000, "consistency-iter", classOf[ConsistencyCheckingIterator]) cfg.addOption(GEOMESA_ITERATORS_VERSION, CURRENT_SCHEMA_VERSION.toString) s.addScanIterator(cfg) // validate the total number of query-hits s.iterator().size mustEqual 0 } "verify inconsistency of table" in { val table = "inconsistentTest" val c = setupMockAccumuloTable(TestData.shortListOfPoints, table) val bd = c.createBatchDeleter(table, TEST_AUTHORIZATIONS, 2, GeoMesaBatchWriterConfig()) bd.addScanIterator({ val cfg = new IteratorSetting(100, "regex", classOf[RegExFilter]) RegExFilter.setRegexs(cfg, ".*~1~.*", null, ".*\\\\|data\\\\|1", null, false) cfg }) bd.setRanges(List(new org.apache.accumulo.core.data.Range())) bd.delete() bd.flush() val s = c.createScanner(table, TEST_AUTHORIZATIONS) val cfg = new IteratorSetting(1000, "consistency-iter", classOf[ConsistencyCheckingIterator]) cfg.addOption(GEOMESA_ITERATORS_VERSION, CURRENT_SCHEMA_VERSION.toString) s.addScanIterator(cfg) // validate the total number of query-hits s.iterator().size mustEqual 1 } } "Feature with a null ID" should { "not fail to insert" in { val c = Try(setupMockAccumuloTable(TestData.pointWithNoID, "nullIdTest")) c.isFailure must be equalTo false } } }
vpipkt/geomesa
geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/iterators/SpatioTemporalIntersectingIteratorTest.scala
Scala
apache-2.0
3,849
/* * Copyright 2017 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.ct.computations import uk.gov.hmrc.ct.box._ import uk.gov.hmrc.ct.computations.Validators.TradingLossesValidation import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever case class CP287(value: Option[Int]) extends CtBoxIdentifier(name = "Amount of loss carried back to earlier periods") with CtOptionalInteger with Input with ValidatableBox[ComputationsBoxRetriever] with TradingLossesValidation { override def validate(boxRetriever: ComputationsBoxRetriever): Set[CtValidation] = { import boxRetriever._ val max = cp118().value - cp998().orZero collectErrors( requiredErrorIf(value.isEmpty && boxRetriever.cpQ20.isTrue), cannotExistErrorIf({ value.nonEmpty && !boxRetriever.cpQ20().orFalse }), exceedsMax(value, max), belowMin(value, 1) ) } } object CP287 { def apply(int: Int): CP287 = CP287(Some(int)) }
liquidarmour/ct-calculations
src/main/scala/uk/gov/hmrc/ct/computations/CP287.scala
Scala
apache-2.0
1,507
package pump.uno.service import pump.uno.model.TopicPage import spray.http.HttpCookie import scala.concurrent.Future trait TopicPageFetcherComponent { def topicPageFetcher: TopicPageFetcher trait TopicPageFetcher { def fetch(url: String, auth: HttpCookie): Future[TopicPage] } }
gkonst/pump
src/main/scala/pump/uno/service/TopicPageFetcherComponent.scala
Scala
mit
294
package com.evojam.mongodb import com.evojam.mongodb.client.codec.{Writer, Reader} import org.bson.codecs.{Codec, DocumentCodec, BsonDocumentCodec} package object client { implicit lazy val bsonDocumentCodec = new BsonDocumentCodec() implicit lazy val documentCodec = new DocumentCodec() implicit def identityReader[T: Codec]: Reader[T] = new Reader[T] { override type R = T override val codec = implicitly[Codec[T]] override def read(doc: T): T = doc } implicit def identityWriter[T: Codec]: Writer[T] = new Writer[T] { override type R = T override val codec = implicitly[Codec[T]] override def write(doc: T): T = doc } }
evojam/mongodb-driver-scala
src/main/scala/com/evojam/mongodb/client/package.scala
Scala
apache-2.0
663
/* * Copyright (c) 2010 Thorsten Berger <berger@informatik.uni-leipzig.de> * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package gsd.buildanalysis.linux.model import org.kiama.attribution.Attribution._ import org.kiama.attribution.Attributable import org.kiama.rewriting.Rewriter._ import org.kiama.==> import gsd.common.Logging import gsd.buildanalysis.linux.{PersistenceManager, Expression} /** * Represents the AST we derive from the whole build system. */ case class BNode( ntype: BNodeType, subnodes: List[BNode], // "children" collides with a field in Attributable exp: Option[Expression], details: BNodeDetails ) extends Attributable{ override def toString = this match{ case BNode( RootNode, _, _, _ ) => "[root]" case BNode( _, _, _, MakefileDetails(m) ) => "[Makefile: " + m + "]" case BNode( _, _, _, ObjectDetails(oF,_,_,_,_,_,_) ) => "[Object: " + oF + "]" case BNode( _, _, _, VariableDefinitionDetails(v) ) => "[VariableDefinition: " + v + "]" case BNode( _, _, _, VariableReferenceDetails(v) ) => "[VariableReference: " + v + "]" case BNode( _, _, _, VariableAssignmentDetails( n, v, o) ) => "[VariableAssignment: " + n + "]" case BNode( _, _, _, TempCompositeListDetails( n, p ) ) => "[TempCompositeList: " + n + "]" case BNode( IfBNode, _, e, _ ) => "[IF: " + e + "]" case _ => super.toString } } sealed abstract class BNodeType case object RootNode extends BNodeType case object IfBNode extends BNodeType case object MakefileBNode extends BNodeType case object ObjectBNode extends BNodeType case object TempCompositeListBNode extends BNodeType case object TempReferenceBNode extends BNodeType /** * other variables that define lists of objects, e.g. like: COMMON_FILES:= \\ \\ data_skip.o \\ data_extract_all.o \\ data_extract_to_stdout.o */ case object VariableDefinitionBNode extends BNodeType /** * arbitrary variables that get referenced * represents a reference to a variable in one of the list assignments, e.g. like: * obj-y += ${COMMON_FILES} test.o */ case object VariableReferenceBNode extends BNodeType /** * Similar to VariableDefinitionBNode, but catches all other variable definitions * and assignments (i.e. where the values aren't lists of objects) */ case object VariableAssignmentBNode extends BNodeType /** * Detailed information per BNode */ sealed abstract class BNodeDetails case object NoDetails extends BNodeDetails case class ObjectDetails( objectFile: String, built_as: Option[String], extension: String, generated: Boolean, addedByList: String, sourceFile: Option[String], fullPathToObject: Option[String] ) extends BNodeDetails //case class SourceFile( name: List[Any] ) case class MakefileDetails( makefile: String ) extends BNodeDetails case class TempReferenceDetails( variable: String, selectionSuffix: String ) extends BNodeDetails case class TempCompositeListDetails( listName: String, suffix: Option[String] ) extends BNodeDetails case class VariableReferenceDetails( varName: String ) extends BNodeDetails case class VariableDefinitionDetails( varName: String ) extends BNodeDetails case class VariableAssignmentDetails( varName: String, op: String, value: String ) extends BNodeDetails /** * Attribute grammar implementation... */ trait TreeHelper extends Logging{ /** * Attribute that returns the "Makefile scope" of nodes, i.e. the next * Makefile node in the hierarchy (without current node). */ val mfScope: BNode => BNode = attr{ case BNode( RootNode, _, _, _ ) => sys.error( "No containing Makefile found!" ) case b => b.parent[BNode] match{ case p@BNode( MakefileBNode, _, _, _ ) => p case _ => { if(b.parent==null) println("parent NULL for: " + b); b.parent[BNode]->mfScope } } } /** * Attribute that calculates possible predecessors in terms of control and * data flow. */ val moveUp: BNode => List[BNode] = attr{ case BNode( RootNode, _, _, _ ) => List() case b@BNode( TempCompositeListBNode, _, _, TempCompositeListDetails( ln, _ ) ) =>{ val compositeObjects = findCompositeObjectNodes( ln, b->mfScope ) val referenceNodes = findTempReferenceNodes( ln, b->mfScope ) compositeObjects ::: referenceNodes } case b@BNode( VariableDefinitionBNode, _, _, VariableDefinitionDetails( vN ) ) =>{ trace("trying to find variable reference, var: " + vN ) scopedCollectl{ case b@BNode( VariableReferenceBNode, _, _, VariableReferenceDetails( vRN ) ) if vN == vRN => b }( b->mfScope ) } case b:BNode =>{ trace( "parent of " + node2String(b) + " is: " + node2String(b.parent[BNode]) ) b.parent[BNode] :: Nil } } // val varAssignments: BNode => List[BNode] = // attr{ // case b:BNode => b.prev[BNode] match{ // case null => b.parent[BNode]->varAssignments // case b2:BNode( t, _, _, _ ) if t!=MakefileBNode => b2->varAssignments // } // } private def node2String( b: BNode ) = b.ntype.toString + " --> " + PersistenceManager.getDetails( b ).toString def findCompositeObjectNodes( listName: String, scope: BNode ): List[BNode] ={ trace("trying to find comp. object node, list: " + listName ) scopedCollectl{ case b@BNode( ObjectBNode, _, _, ObjectDetails( oF, _, _, false, _, None, _ ) ) if oF == listName => b }( scope ) } /** * Ignore references in patterns like in crypto/Makefile * crypto_algapi-$(CONFIG_PROC_FS) += proc.o * crypto_algapi-objs := algapi.o scatterwalk.o $(crypto_algapi-y) * obj-$(CONFIG_CRYPTO_ALGAPI2) += crypto_algapi.o * * i.e. ignore the inclusion of $(list-y) in list-objs, since it's unnecessary and doesn't affect variability conditions (and causes troubles...) * */ def findTempReferenceNodes( name: String, scope: BNode ): List[BNode] = scopedCollectl{ case b@BNode( TempReferenceBNode, _, _, TempReferenceDetails( variable, _ ) ) if( variable == name && ( b.parent[BNode] match{ case BNode( _, _, _, TempCompositeListDetails( lN, _ ) ) if lN == variable => false case _ => true } ) ) => b }( scope ) def getSourceFile( b: BNode ) = b match{ case BNode( _, _, _, ObjectDetails( _, _, _, _, _, Some( sF ), _ ) ) => sF case _ => sys.error( "Not an ObjectBNode with an associated source file!" ) } def getMakeFile( b: Term ) = b match{ case BNode( _, _, _, MakefileDetails( mF ) ) => mF case _ => sys.error( "Not a MakefileBNode!" ) } /** * Like Kiama's collectl, but stops when it finds a makefile, i.e. the current * makefile determines the scope, i.e. the query remains in it. */ def scopedCollectl[T] (f : PartialFunction[Term,T]) : Term => List[T] = (t : Term) => { trace("# traversing " + getMakeFile(t) ) var collection = List[T]() def collect = (v : T) => { trace(" # collecting: " + v.toString ) collection = collection ::: List (v) } ( mytd( query( f andThen collect ) ) ) (t) collection } def mytd( s: => Strategy ): Strategy = attempt(s) <* visitAllTDExceptMakefiles( mytd( attempt(s) ) ) /** * Visit all children of the term, as long as they aren't Makefiles. Further, the * strategy never applies any rewriting, it just runs the given strategy for its * side-effects. */ def visitAllTDExceptMakefiles(s : => Strategy): Strategy = new Strategy { def apply( t: Term ): Option[Term] = { t match { case p: Product => for (i <- 0 until p.productArity) p.productElement (i) match { case b@BNode( MakefileBNode, _, _, _ ) => ; case ct: Term => s(ct) case _ => ; } case _ => ; } Some( t ) } } // control-flow attributes val succ: BNode => Set[BNode] = attr { case BNode( RootNode, c, _, _ ) => if( c.isEmpty ) Set() else Set( c.head ) case b@BNode( IfBNode, c, _, _ ) => if( c.isEmpty ) b->following else Set( c.head ) // case b@BNode( IfBNode, c, _, _ ) => (b->following) ++ ( if( !c.isEmpty ) Set( c.head ) else Set.empty ) case b@BNode( MakefileBNode, c, _, _ ) => if( c.isEmpty ) b->following else Set( c.head ) // case b@BNode( MakefileBNode, c, _, _ ) => (b->following) ++ ( if( !c.isEmpty ) Set( c.head ) else Set.empty ) case b@BNode( TempCompositeListBNode, c, _, _ ) => if( c.isEmpty ) b->following else Set( c.head ) case b@BNode( VariableDefinitionBNode, c, _, _ ) => if( c.isEmpty ) b->following else Set( c.head ) case b => b->following } val following: BNode => Set[BNode] = attr { ( s: BNode) => s.parent match{ case b@BNode( _, _, _, _ ) if s isLast => b->following case b@BNode( _, _, _, _ ) => Set( s.next[BNode] ) // case b @ lock (_*) if s isLast => b->following // case lock (_*) => Set (s.next) case _ => Set () } } case class VarAssign( name: String /*, value: String, origin: BNode */) // case class VarUse( name: String ) val defines: BNode => Set[VarAssign] = attr { case b@BNode( _, _, _, VariableAssignmentDetails( varName, op, value ) ) => Set ( VarAssign( varName/*, value, b*/ ) ) case _ => Set () } val varOcc = """\\$\\((.+)\\)""".r val uses: BNode => Set[VarAssign] = attr { case BNode( ObjectBNode, _, _, ObjectDetails(oF,_,_,_,_,_,_) ) => varOcc.findAllIn( oF ).map{ case varOcc( v ) => VarAssign( v ) }.toSet case _ => Set () } // val previousDefinitions: BNode => Set[VarAssign] = // attr{ // // } val in: BNode => Set[VarAssign] = circular (Set[VarAssign]()) ( s => { uses (s) ++ (out (s) -- defines (s)) } ) val out : BNode => Set[VarAssign] = circular (Set[VarAssign]()) ( s => { (s->succ) flatMap (in) } ) // val definedBefore: BNode => Set[String] = // attr{ // // } // val in : BNode => Set[String] = // circular ( Set[String]() ) ( // (s:BNode) => { (s->uses) ++ ( (s->out) -- (s->defines) ) } // ) // // val out : BNode => Set[String] = // circular ( Set[String]() ) ( // (s:BNode) => { (s->succ) flatMap (in) } // ) // val in : BNode => Set[String] = // circular (Set[String]()) { // case s => uses (s) ++ (out (s) -- defines (s)) // } // // val out : BNode => Set[String] = // circular (Set[String]()) { // case s => (s->succ) flatMap (in) // } }
ckaestne/KBuildMiner
src/main/scala/gsd/buildanalysis/linux/model/Model.scala
Scala
gpl-3.0
11,824
package pl.touk.nussknacker.sql.db.ignite import com.typesafe.scalalogging.LazyLogging import pl.touk.nussknacker.engine.api.typed.TypedObjectDefinition import pl.touk.nussknacker.engine.api.typed.typing.Typed import pl.touk.nussknacker.sql.db.schema.TableDefinition import java.sql.{Connection, PreparedStatement, ResultSet} import scala.collection.mutable.ArrayBuffer import scala.util.Using class IgniteQueryHelper(getConnection: () => Connection) extends LazyLogging { private val tablesInSchemaQuery = """ |select t.TABLE_NAME, c.COLUMN_NAME, c.TYPE, c.AFFINITY_COLUMN |from SYS.TABLES t |join SYS.TABLE_COLUMNS c on t.TABLE_NAME = c.TABLE_NAME and t.SCHEMA_NAME = c.SCHEMA_NAME |where t.SCHEMA_NAME = ? and c.COLUMN_NAME not in ('_KEY', '_VAL') |""".stripMargin def fetchTablesMeta: Map[String, TableDefinition] = { Using.resource(getConnection()) { connection => getIgniteQueryResults(connection = connection, query = tablesInSchemaQuery, setArgs = List(_.setString(1, connection.getSchema))) { r => (r.getString("TABLE_NAME"), r.getString("COLUMN_NAME"), r.getString("TYPE"), r.getBoolean("AFFINITY_COLUMN")) }.groupBy { case (tableName, _, _, _) => tableName } .map { case (tableName, entries) => val columnTypings = entries.map { case (_, columnName, klassName, _) => columnName -> Typed.typedClass(Class.forName(klassName)) } tableName -> TableDefinition(typedObjectDefinition = TypedObjectDefinition(columnTypings)) } } } private def getIgniteQueryResults[T](connection: Connection, query: String, setArgs: List[PreparedStatement => Unit] = Nil)(f: ResultSet => T): List[T] = { Using.resource(connection.prepareStatement(query)) { statement => logger.debug(s"Executing query: $query") setArgs.foreach(setArg => setArg(statement)) val resultSet = statement.executeQuery() val arr = ArrayBuffer.empty[T] while (resultSet.next()) { arr += f(resultSet) } arr.toList } } }
TouK/nussknacker
components/sql/src/main/scala/pl/touk/nussknacker/sql/db/ignite/IgniteQueryHelper.scala
Scala
apache-2.0
2,052
package name.mikulskibartosz.parkinglot.clock import java.time.ZonedDateTime import cats.free.{Free, Inject} /** * Operations related to generating ids. */ sealed trait ClockAction[A] case class CurrentDateTime() extends ClockAction[ZonedDateTime] class ClockActions[F[_]](implicit I: Inject[ClockAction, F]) { def currentTime(): Free[F, ZonedDateTime] = Free.inject[ClockAction, F](CurrentDateTime()) } object ClockActions { implicit def idGenerationActions[F[_]](implicit I: Inject[ClockAction, F]): ClockActions[F] = new ClockActions[F] }
mikulskibartosz/clean-arch-example-parking-lot
domain/src/main/scala/name/mikulskibartosz/parkinglot/clock/ClockAction.scala
Scala
gpl-3.0
554
package com.etsy.scalding.jobs.conjecture import com.twitter.scalding.{Args, Job, Mode, SequenceFile, Tsv} import com.etsy.conjecture.scalding.evaluate.MulticlassCrossValidator import com.etsy.conjecture.scalding.train.MulticlassModelTrainer import com.etsy.conjecture.data.{MulticlassLabeledInstance, StringKeyedVector} import com.etsy.conjecture.model.MulticlassLogisticRegression import com.google.gson.Gson import cascading.tuple.Fields class AdHocMulticlassClassifier(args : Args) extends Job(args) { val input = args("input") val out_dir = args("out_dir") val folds = args.getOrElse("folds", "0").toInt val categories = args("categories").split(",").toArray val xmx = args.getOrElse("xmx", "3").toInt // Let the user configure the field names on the command line. val data_field_names = args.getOrElse("data_fields", "instance").split(",") val data_fields = data_field_names.tail.foldLeft(new Fields(data_field_names.head)) { (x,y) => x.append(new Fields(y)) } val instance_field = Symbol(args.getOrElse("instance_field", "instance")) // assumes input instances are a sequence file val instances = SequenceFile(input, data_fields).project(instance_field) val model_pipe = new MulticlassModelTrainer(args, categories) .train(instances, instance_field, 'model) model_pipe .write(SequenceFile(out_dir + "/model")) .mapTo('model -> 'json) { x : MulticlassLogisticRegression => new Gson().toJson(x) } .write(Tsv(out_dir + "/model_json")) if(folds > 0) { val eval_pred = new MulticlassCrossValidator(args, folds, categories) .crossValidateWithPredictions(instances, instance_field, 'pred) eval_pred._1 .write(Tsv(out_dir + "/xval")) eval_pred._2 .write(SequenceFile(out_dir + "/pred")) } override def config(implicit mode : Mode) = super.config ++ Map("mapred.child.java.opts" -> "-Xmx%dG".format(xmx)) }
zviri/Conjecture
src/main/scala/com/etsy/scalding/jobs/conjecture/AdHocMulticlassClassifier.scala
Scala
mit
1,899
/* * Copyright 2017 Datamountaineer. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datamountaineer.streamreactor.connect.pulsar.config import com.datamountaineer.streamreactor.connect.converters.source.{AvroConverter, BytesConverter} import org.apache.kafka.common.config.ConfigException import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import scala.collection.JavaConverters._ class PulsarSourceSettingsTest extends AnyWordSpec with Matchers { "PulsarSourceSetting" should { val pulsarTopic = "persistent://landoop/standalone/connect/kafka-topic" "create an instance of settings" in { val config = PulsarSourceConfig(Map( PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650", PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kTopic SELECT * FROM $pulsarTopic WITHCONVERTER=`${classOf[AvroConverter].getCanonicalName}`", PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true", PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500" ).asJava) val settings = PulsarSourceSettings(config, 1) settings.sourcesToConverters shouldBe Map(pulsarTopic -> classOf[AvroConverter].getCanonicalName) settings.throwOnConversion shouldBe true settings.pollingTimeout shouldBe 500 settings.connection shouldBe "pulsar://localhost:6650" } "converted defaults to BytesConverter if not provided" in { val config = PulsarSourceConfig(Map( PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650", PulsarConfigConstants.KCQL_CONFIG -> "INSERT INTO kTopic SELECT * FROM pulsarSource", PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true", PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500" ).asJava) val settings = PulsarSourceSettings (config, 1) settings.sourcesToConverters shouldBe Map("pulsarSource" -> classOf[BytesConverter].getCanonicalName) } "throw an config exception if no kcql is set" in { intercept[ConfigException] { PulsarSourceConfig(Map( PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650", PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true", PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500" ).asJava) } } "throw an config exception if HOSTS_CONFIG is not defined" in { intercept[ConfigException] { val config = PulsarSourceConfig(Map( PulsarConfigConstants.KCQL_CONFIG -> "INSERT INTO kTopic SELECT * FROM pulsarSource", PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true", PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500" ).asJava) PulsarSourceSettings(config, 1) } } "throw an config exception if the converter class can't be found" in { intercept[ConfigException] { PulsarSourceConfig(Map( PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kTopic SELECT * FROM pulsarSource WITHCONVERTER=`com.non.existance.SomeConverter`", PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true", PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500" ).asJava) } } "throw an config exception if the converter settings with invalid source" in { intercept[ConfigException] { PulsarSourceConfig(Map( PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kTopic SELECT * FROM pulsarSource WITHCONVERTER=`${classOf[AvroConverter].getCanonicalName}`", PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true", PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500" ).asJava) } } "throw an config exception if the converter topic doesn't match the KCQL settings" in { intercept[ConfigException] { PulsarSourceConfig(Map( PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kTopic SELECT * FROM pulsarSource WITHCONVERTER=`${classOf[AvroConverter].getCanonicalName}`", PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true", PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500" ).asJava) } } "throw an config exception if exclusive and max tasks > 1" in { intercept[ConfigException] { val config = PulsarSourceConfig(Map( PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650", PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kTopic SELECT * FROM $pulsarTopic WITHSUBSCRIPTION = exclusive", PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true", PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500" ).asJava) val settings = PulsarSourceSettings(config, 2) } } } }
datamountaineer/stream-reactor
kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/config/PulsarSourceSettingsTest.scala
Scala
apache-2.0
5,319
/* * Licensed to Intel Corporation under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * Intel Corporation licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.nn import com.intel.analytics.bigdl.nn.abstractnn.AbstractModule import com.intel.analytics.bigdl.tensor.Tensor import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric import com.intel.analytics.bigdl.utils.RandomGenerator._ import com.intel.analytics.bigdl.utils.{T, Table} import scala.reflect.ClassTag class RnnCell[T : ClassTag] ( inputSize: Int = 4, hiddenSize: Int = 3, private var initMethod: InitializationMethod = Default) (implicit ev: TensorNumeric[T]) extends AbstractModule[Table, Tensor[T], T] { val parallelTable = ParallelTable[T]() val i2h = Linear[T](inputSize, hiddenSize) val h2h = Linear[T](hiddenSize, hiddenSize) parallelTable.add(i2h) parallelTable.add(h2h) val cAddTable = CAddTable[T]() def setInitMethod(initMethod: InitializationMethod): this.type = { this.initMethod = initMethod this } override def reset(): Unit = { initMethod match { case Default => parallelTable.modules.foreach( m => { val inputSize = m.asInstanceOf[Linear[T]].weight.size(1).toFloat val outputSize = m.asInstanceOf[Linear[T]].weight.size(2).toFloat val stdv = 6.0 / (inputSize + outputSize) m.asInstanceOf[Linear[T]].weight.apply1( _ => ev.fromType[Double](RNG.uniform(0, 1) * 2 * stdv - stdv)) m.asInstanceOf[Linear[T]].bias.apply1( _ => ev.fromType[Double](0.0)) }) case _ => throw new IllegalArgumentException(s"Unsupported initMethod type ${initMethod}") } zeroGradParameters() } override def updateOutput(input: Table): Tensor[T] = { output = cAddTable.updateOutput(parallelTable.updateOutput(input)) output } override def updateGradInput(input: Table, gradOutput: Tensor[T]): Table = { val _gradOutput = cAddTable.updateGradInput(input, gradOutput) parallelTable.updateGradInput(input, _gradOutput) } override def accGradParameters(input: Table, gradOutput: Tensor[T], scale: Double = 1.0): Unit = { parallelTable.accGradParameters(input, cAddTable.updateGradInput(input, gradOutput)) } override def updateParameters(learningRate: T): Unit = { parallelTable.updateParameters(learningRate) } override def zeroGradParameters(): Unit = { parallelTable.zeroGradParameters() } override def parameters(): (Array[Tensor[T]], Array[Tensor[T]]) = { parallelTable.parameters() } override def getParametersTable(): Table = { parallelTable.getParametersTable() } override def toString(): String = { var str = "nn.RnnCell" str } } object RnnCell { def apply[@specialized(Float, Double) T: ClassTag]( inputSize: Int = 4, hiddenSize: Int = 3) (implicit ev: TensorNumeric[T]) : RnnCell[T] = { new RnnCell[T](inputSize, hiddenSize) } }
dding3/BigDL
dl/src/main/scala/com/intel/analytics/bigdl/nn/RNN.scala
Scala
apache-2.0
3,666
package net.kinetc.biryo.actor import akka.actor.{Actor, ActorRef, Props} import akka.util.Timeout import net.kinetc.biryo.parser.WikiParser import net.kinetc.biryo.renderer.{ ASTPostProcessor, FrameRenderer, HTMLRenderer, KatexRenderer } import org.parboiled2.{ErrorFormatter, ParseError} import scala.concurrent.duration._ import scala.concurrent.{Await, ExecutionContext, Future} import scala.util.{Failure, Success, Try} /** Created by KINETC on 2017-07-27. */ object MDictMaker { def props(printActor: ActorRef, framePrinterActor: ActorRef) = Props(new MDictMaker(printActor, framePrinterActor)) final case class MDictDoc( title: String, text: String, printRaw: Boolean = false ) final case class FrameDoc(title: String, text: String) case object ParseEnd } class MDictMaker(printActor: ActorRef, framePrinterActor: ActorRef) extends Actor { import FramePrinterActor._ import MDictMaker._ import PrinterActor._ implicit val ec: ExecutionContext = context.system.dispatchers.lookup("biryo-blocking-dispatcher") val katex = new KatexRenderer implicit val askTimeout = Timeout(1 minutes) val compileTimeout = Timeout(10 seconds) var sendCount = 0 def makeMDictHtml(title: String, text: String): Unit = { val parser = new WikiParser(text) val renderer = new HTMLRenderer(katex) val postProcessor = new ASTPostProcessor(title) val futureText: Future[Option[String]] = Future { parser.NamuMarkRule.run() match { case Success(result) => val postResult = postProcessor.postProcessAST(result) val compiledText = title + "\n" + renderer.generateHTML(title, postResult) + "\n</>" Some(compiledText) case Failure(e: ParseError) => println(parser.formatError(e, new ErrorFormatter(showTraces = true))) None case Failure(e) => e.printStackTrace() None } } Try(Await.result(futureText, compileTimeout.duration)) match { case Success(Some(compiledText)) => printActor ! PrintText(compiledText) case _ => printActor ! GetError(title, text) } } sendCount += 1 if (sendCount % 1000 == 0) { println(s"Actor ${self.path.name}: $sendCount") } def makeRawHtml(title: String, text: String): Unit = { printActor ! PrintText( title + "\n<pre>" + HTMLRenderer.escapeHTML(text) + "</pre>\n</>" ) } def makeFrameJS(title: String, text: String): Unit = { val parser = new WikiParser(text) val renderer = new FrameRenderer(katex) val postProcessor = new ASTPostProcessor(title) val futureText: Future[Option[String]] = Future { parser.NamuMarkRule.run() match { case Success(result) => val postResult = postProcessor.postProcessAST(result) val compiledText = renderer.generateHTML(title, postResult) Some(compiledText) case Failure(e: ParseError) => println(parser.formatError(e, new ErrorFormatter(showTraces = true))) None case Failure(e) => e.printStackTrace() None } } Try(Await.result(futureText, compileTimeout.duration)) match { case Success(Some(compiledText)) => framePrinterActor ! MakeJSFile(title, compiledText) case _ => printActor ! GetError(s"$title - frame", text) } } def receive = { case MDictDoc(title, text, printRaw) => { if (printRaw) makeRawHtml(title, text) else makeMDictHtml(title, text) } case FrameDoc(title, text) => makeFrameJS(title, text) case ParseEnd => printActor ! Close framePrinterActor ! CloseFPA } }
MerHS/biryo
src/main/scala/net/kinetc/biryo/actor/MDictMaker.scala
Scala
apache-2.0
3,739
package com.github.dronegator.nlp import java.io.File import com.github.dronegator.nlp.utils.Match /** * Created by cray on 9/3/16. */
dronegator/nlp
utils/src/main/scala/com/github/dronegator/nlp/match.scala
Scala
apache-2.0
140
package net.liftmodules import _root_.net.liftweb._ import http._ /** * ==FoBo Highlight API Module== * * This FoBo API module provides FoBo/Lift API components for the Highlight Toolkit Module, * but can also be used as-is, see below for setup information. * * If you are using this module via the FoBo/FoBo module see also [[net.liftmodules.fobo]] for setup information. */ package object fobohlapi { override def toString() = fobohlapi.API.toString() /** * Initiate FoBo's Highlight API in you bootstrap liftweb Boot. * * @example * {{{ * import net.liftmodules.{fobohlapi => fobo} * : * fobo.API.init=fobo.API.[API Object] * }}} * '''Note:''' To see available objects click on the round trait icon in the header of this page. */ sealed trait API object API extends API { //we don't actually need to store the objects (for now) so lets just save //the object name, we can easily change this if we need to private type Store = List[String] //List[API] private var store: Store = List() def init: Store = store def init_=(t: API): Store = { store = if (store contains t.toString) store else t.toString :: store store } override def toString() = "fobohlapi.API = " + store.toString() /** * Enable usage of FoBo's Highlight API for Highlight version 9&#8228;X&#8228;X in your bootstrap liftweb Boot. * @version 9.X.X * * @example * * {{{ * import net.liftmodules.{fobohlapi => fobo} * : * fobo.API.init=fobo.API.HighlightJS9 * }}} * */ case object HighlightJS9 extends API { FoBoAPI.init } } private object FoBoAPI { lazy val init: Unit = { LiftRules.addToPackages("net.liftmodules.fobohl") } } }
karma4u101/FoBo
Highlight/HighlightJS-API/src/main/scala/net/liftmodules/fobohl/fobohlapi.scala
Scala
apache-2.0
1,838
class I0 { super.+("") }
som-snytt/dotty
tests/fuzzy/RE-341cc574ffab7ad1cca7c0683c727f4cd8ca9f68.scala
Scala
apache-2.0
25
package ml.sparkling.graph.operators.algorithms.aproximation import java.util import java.util.function.BiConsumer import ml.sparkling.graph.api.operators.IterativeComputation.{VertexPredicate, _} import ml.sparkling.graph.api.operators.algorithms.coarsening.CoarseningAlgorithm.Component import ml.sparkling.graph.api.operators.algorithms.shortestpaths.ShortestPathsTypes.{JDouble, JLong, JMap} import ml.sparkling.graph.operators.algorithms.coarsening.labelpropagation.LPCoarsening import ml.sparkling.graph.operators.algorithms.shortestpaths.pathprocessors.{PathProcessor, SingleVertexProcessor} import ml.sparkling.graph.operators.algorithms.shortestpaths.pathprocessors.fastutils.{FastUtilWithDistance, FastUtilWithPath} import ml.sparkling.graph.operators.predicates.{AllPathPredicate, ByIdPredicate, ByIdsPredicate} import org.apache.spark.graphx.{EdgeTriplet, Graph, _} import ml.sparkling.graph.operators.algorithms.shortestpaths.ShortestPathsAlgorithm import ml.sparkling.graph.operators.algorithms.shortestpaths.pathprocessors.fastutils.FastUtilWithDistance.DataMap import org.apache.log4j.Logger import org.apache.spark.rdd.RDD import scala.collection.mutable.ListBuffer import scala.collection.JavaConversions._ import scala.reflect.ClassTag /** * Created by Roman Bartusiak <riomus@gmail.com> on 07.02.17. */ case object ApproximatedShortestPathsAlgorithm { val logger=Logger.getLogger(ApproximatedShortestPathsAlgorithm.getClass()) type PathModifier=(VertexId,VertexId,JDouble)=>JDouble val defaultNewPath:(JDouble=>JDouble)= (path:JDouble)=>3*path+2; val defaultPathModifier:PathModifier= (fromVertex:VertexId, toVertex:VertexId, path:JDouble)=>defaultNewPath(path) def computeShortestPathsLengthsUsing[VD:ClassTag, ED: ClassTag](graph: Graph[VD, ED], vertexPredicate: SimpleVertexPredicate= AllPathPredicate, treatAsUndirected: Boolean = true,modifier:PathModifier=defaultPathModifier)(implicit num: Numeric[ED]):Graph[Iterable[(VertexId,JDouble)],ED] = { val coarsedGraph=LPCoarsening.coarse(graph,treatAsUndirected) computeShortestPathsLengthsWithoutCoarsingUsing(graph,coarsedGraph,vertexPredicate,treatAsUndirected,modifier) } def computeShortestPathsLengthsWithoutCoarsingUsing[VD:ClassTag, ED: ClassTag](graph: Graph[VD, ED], coarsedGraph: Graph[Component, ED], vertexPredicate: SimpleVertexPredicate= AllPathPredicate, treatAsUndirected: Boolean = true,modifier:PathModifier=defaultPathModifier)(implicit num: Numeric[ED]):Graph[Iterable[(VertexId,JDouble)],ED] = { val newVertexPredicate:VertexPredicate[Component]=AnyMatchingComponentPredicate(vertexPredicate); val coarsedShortestPaths: Graph[DataMap, ED] =ShortestPathsAlgorithm.computeShortestPathsLengths(coarsedGraph,newVertexPredicate,treatAsUndirected) aproximatePaths(graph, coarsedGraph, coarsedShortestPaths,modifier,vertexPredicate,treatAsUndirected) } def computeShortestPathsForDirectoryComputationUsing[VD:ClassTag, ED: ClassTag](graph: Graph[VD, ED], coarsedGraph: Graph[Component, ED], vertexPredicate: SimpleVertexPredicate= AllPathPredicate, treatAsUndirected: Boolean = true, modifier:PathModifier=defaultPathModifier)(implicit num: Numeric[ED]):Graph[Iterable[(VertexId,JDouble)],ED] = { val newVertexPredicate:VertexPredicate[Component]=SimpleWrapper(vertexPredicate) val newIds: Set[VertexId] =coarsedGraph.vertices.filter{ case (vid, component)=>vertexPredicate(vid) }.treeAggregate[Set[VertexId]](Set())(seqOp=(agg, id)=>agg++id._2,combOp= (agg1, agg2)=>agg1++agg2) val coarsedShortestPaths: Graph[DataMap, ED] =ShortestPathsAlgorithm.computeShortestPathsLengths(coarsedGraph,newVertexPredicate,treatAsUndirected) aproximatePaths(graph, coarsedGraph, coarsedShortestPaths,modifier,vertexPredicate=ByIdsPredicate(newIds),treatAsUndirected=treatAsUndirected) } def aproximatePaths[ED: ClassTag, VD:ClassTag](graph: Graph[VD, ED], coarsedGraph: Graph[Component, ED], coarsedShortestPaths: Graph[DataMap, ED], modifier:PathModifier=defaultPathModifier, vertexPredicate: SimpleVertexPredicate= AllPathPredicate, treatAsUndirected:Boolean)(implicit num:Numeric[ED]):Graph[Iterable[(VertexId,JDouble)],ED] = { logger.info("Aproximating shortes paths"); val modifiedPaths = coarsedShortestPaths.vertices.mapPartitions(iter=>iter.map{ case (vertexId: VertexId, paths: DataMap) => { paths.forEach(new BiConsumer[JLong,JDouble] { override def accept(t: JLong, u: JDouble): Unit = { paths.put(t,modifier(vertexId,t,u)) } }); paths.remove(vertexId) (vertexId,paths) } }) val fromMapped: RDD[(VertexId, (List[VertexId], JDouble))] =modifiedPaths.join(coarsedGraph.vertices,100).mapPartitions( iter=>iter.flatMap{ case (_,(data,componentFrom) )=>{ data.map{ case (to,len)=>(to.toLong,(componentFrom,len)) } } } ) val toJoined: RDD[(VertexId, ((List[VertexId], JDouble), List[VertexId]))] =fromMapped.join(coarsedGraph.vertices) val toMapped: RDD[(VertexId, (List[VertexId], JDouble))] =toJoined.mapPartitions((iter)=>{ iter.flatMap{ case (_,((componentFrom,len),componentTo))=>{ componentFrom.map( (fromId)=>(fromId,(componentTo,len)) ) } } }) val toMappedGroups=toMapped.aggregateByKey(ListBuffer[(List[VertexId], JDouble)]())( (agg,data)=>{agg+=data;agg}, (agg1,agg2)=>{agg1++=agg2;agg1} ).mapPartitions((iter: Iterator[(VertexId, ListBuffer[(List[VertexId], JDouble)])]) =>{ iter.map{ case (from,data)=>(from,data.flatMap{ case (datas,len)=>datas.map((id)=>(id,len)) }) } }) val outGraph=Graph(toMappedGroups, graph.edges,ListBuffer[(VertexId, JDouble)]()) val one:JDouble=1.0 val two:JDouble=2.0 val neighboursExchanged: RDD[(VertexId,ListBuffer[VertexId])] =outGraph.edges .mapPartitions((data)=>{ data.flatMap((edge)=>{ val toSrc= if(vertexPredicate(edge.dstId)) Iterable((edge.srcId,edge.dstId)) else Iterable() val toDst= if(vertexPredicate(edge.srcId) && treatAsUndirected) Iterable((edge.dstId,edge.srcId)) else Iterable() toSrc++toDst }) }) .aggregateByKey[ListBuffer[VertexId]](ListBuffer[VertexId]())((agg,e)=>{agg+=e;agg},(agg1,agg2)=>{agg1++=agg2;agg1}) val graphWithNeighbours=outGraph.outerJoinVertices(neighboursExchanged) { case (_, _, Some(newData)) => newData case (_, _, None) => ListBuffer[VertexId]() } val secondLevelNeighbours: RDD[(VertexId, ListBuffer[VertexId])] =graphWithNeighbours.triplets.mapPartitions( (data)=>{ data.flatMap((edge)=>{ val toSrc= Iterable((edge.srcId,edge.dstAttr)) val toDst= if(treatAsUndirected) Iterable((edge.dstId,edge.srcAttr)) else Iterable() toSrc++toDst }) } ).aggregateByKey[ListBuffer[VertexId]](ListBuffer[VertexId]())((agg, e)=>{agg++=e;agg}, (agg1, agg2)=>{agg1++=agg2;agg1}) val neighbours=neighboursExchanged.fullOuterJoin(secondLevelNeighbours).map{ case (vId,(firstOpt,secondOpt))=>(vId,(firstOpt.map(d=>d.map(id=>(id,one)))::(secondOpt.map(_.map(id=>(id,two))))::Nil).flatten.flatten.filter(_._1!=vId)) } val out: Graph[ListBuffer[(VertexId, JDouble)], ED] =outGraph.joinVertices(neighbours){ case (_,data,newData)=>data++newData } out.mapVertices{ case (id,data)=> val out=data.groupBy(_._1).mapValues(l=>l.map(_._2).min).map(identity) if(vertexPredicate(id)){ out + (id -> 0.0) } else{ out } } } def computeSingleShortestPathsLengths[VD:ClassTag, ED: ClassTag](graph: Graph[VD, ED], vertexId: VertexId, treatAsUndirected: Boolean = true, modifier:PathModifier=defaultPathModifier)(implicit num: Numeric[ED]):Graph[Iterable[(VertexId,JDouble)],ED]= { computeShortestPathsLengthsUsing(graph,ByIdPredicate(vertexId),treatAsUndirected,modifier=defaultPathModifier) } def computeShortestPaths[VD:ClassTag, ED: ClassTag](graph: Graph[VD, ED], vertexPredicate: SimpleVertexPredicate = AllPathPredicate, treatAsUndirected: Boolean = true,modifier:PathModifier=defaultPathModifier)(implicit num: Numeric[ED]) = { computeShortestPathsLengthsUsing(graph,vertexPredicate,treatAsUndirected,modifier=defaultPathModifier) } def computeShortestPathsLengthsIterativeUsing[VD: ClassTag, ED: ClassTag](graph: Graph[VD, ED], coarsedGraph: Graph[Component, ED], bucketSizeProvider: BucketSizeProvider[Component,ED], treatAsUndirected: Boolean = true,modifier:PathModifier=defaultPathModifier)(implicit num: Numeric[ED]):Graph[Iterable[(VertexId,JDouble)],ED] = { val coarsedShortestPaths: Graph[DataMap, ED] =ShortestPathsAlgorithm.computeShortestPathsLengthsIterative[Component,ED](coarsedGraph,bucketSizeProvider,treatAsUndirected) aproximatePaths(graph, coarsedGraph, coarsedShortestPaths,modifier,treatAsUndirected=treatAsUndirected) } def computeShortestPathsLengthsIterative[VD: ClassTag, ED: ClassTag](graph: Graph[VD, ED], bucketSizeProvider: BucketSizeProvider[Component,ED], treatAsUndirected: Boolean = true,modifier:PathModifier=defaultPathModifier)(implicit num: Numeric[ED]):Graph[Iterable[(VertexId,JDouble)],ED] = { val coarsedGraph=LPCoarsening.coarse(graph,treatAsUndirected) computeShortestPathsLengthsIterativeUsing(graph,coarsedGraph,bucketSizeProvider,treatAsUndirected) } def computeAPSPToDirectory[VD: ClassTag, ED: ClassTag](graph: Graph[VD, ED], outDirectory: String, treatAsUndirected: Boolean, bucketSize:Long)(implicit num: Numeric[ED]): Unit = { val coarsedGraph=LPCoarsening.coarse(graph,treatAsUndirected) logger.info(s"Coarsed graph has size ${coarsedGraph.vertices.count()} in comparision to ${graph.vertices.count()}") val verticesGroups = coarsedGraph.vertices.map(_._1).sortBy(k => k).collect().grouped(bucketSize.toInt).zipWithIndex.toList val numberOfIterations=verticesGroups.length; (verticesGroups).foreach{ case (group,iteration) => { logger.info(s"Approximated Shortest Paths iteration ${iteration+1} from ${numberOfIterations}") val shortestPaths = ApproximatedShortestPathsAlgorithm.computeShortestPathsForDirectoryComputationUsing(graph,coarsedGraph, new ByIdsPredicate(group.toSet), treatAsUndirected) val joinedGraph = graph .outerJoinVertices(shortestPaths.vertices)((vId, data, newData) => (data, newData.getOrElse(Iterable()))) joinedGraph.vertices.values.map { case (vertex, data) => { val dataStr = data .map{ case (key,value)=>s"${key}:${value}" }.mkString(";") s"$vertex;$dataStr" } }.saveAsTextFile(s"${outDirectory}/from_${group.head}") shortestPaths.unpersist(blocking = false) } } graph.vertices.map(t => List(t._1, t._2).mkString(";")).saveAsTextFile(s"${outDirectory}/index") } }
sparkling-graph/sparkling-graph
operators/src/main/scala/ml/sparkling/graph/operators/algorithms/aproximation/ApproximatedShortestPathsAlgorithm.scala
Scala
bsd-2-clause
11,061
/* * Copyright 2014–2018 SlamData Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package quasar.impl.datasources import slamdata.Predef.{Stream => _, _} import quasar.Condition import quasar.api.MockSchemaConfig import quasar.api.datasource._ import quasar.api.datasource.DatasourceError._ import quasar.api.resource.ResourcePath import quasar.contrib.cats.stateT._ import quasar.contrib.cats.writerT._ import quasar.contrib.cats.effect.stateT.catsStateTEffect import quasar.contrib.fs2.stream._ import quasar.contrib.scalaz.MonadState_ import quasar.impl.storage.PureIndexedStore import DefaultDatasourcesSpec._ import java.io.IOException import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration.FiniteDuration import cats.data.{StateT, WriterT} import cats.effect.IO import cats.instances.list._ import cats.syntax.applicative._ import cats.syntax.flatMap._ import eu.timepit.refined.auto._ import fs2.Stream import matryoshka.data.Fix import monocle.macros.Lenses import scalaz.{-\\/, \\/-, IMap, ISet, Monoid} import scalaz.std.anyVal._ import scalaz.std.string._ import scalaz.syntax.std.option._ import shims._ final class DefaultDatasourcesSpec extends DatasourcesSpec[DefaultM, Stream[DefaultM, ?], Int, String, MockSchemaConfig.type] { val monadIdx: MonadState_[DefaultM, Int] = MonadState_.zoom[DefaultM](DefaultState.idx) implicit val monadInitd: MonadState_[DefaultM, ISet[Int]] = MonadState_.zoom[DefaultM](DefaultState.initd) implicit val monadRefs: MonadState_[DefaultM, Refs] = MonadState_.zoom[DefaultM](DefaultState.refs) def datasources = mkDatasources(IMap.empty, c => c)(_ => None) def sanitizedDatasources = mkDatasources(IMap.empty, _ => "sanitized")(_ => None) def supportedType = DatasourceType("test-type", 3L) def validConfigs = ("one", "two") val schemaConfig = MockSchemaConfig def gatherMultiple[A](as: Stream[DefaultM, A]) = as.compile.toList def mkDatasources( errs: IMap[Int, Exception], sanitize: String => String)( init: String => Option[InitializationError[String]]) : Datasources[DefaultM, Stream[DefaultM, ?], Int, String, MockSchemaConfig.type] = { val freshId = for { i <- monadIdx.get _ <- monadIdx.put(i + 1) } yield i val refs = PureIndexedStore[DefaultM, Int, DatasourceRef[String]] val errors = DatasourceErrors.fromMap(errs.pure[DefaultM]) val manager = MockDatasourceManager[Int, String, Fix, DefaultM, Stream[DefaultM, ?], Unit]( ISet.singleton(supportedType), init, sanitize, ()) val schema = new ResourceSchema[DefaultM, MockSchemaConfig.type, (ResourcePath, Unit)] { def apply(c: MockSchemaConfig.type, r: (ResourcePath, Unit), d: FiniteDuration) = MockSchemaConfig.MockSchema.some.pure[DefaultM] } DefaultDatasources(freshId, refs, errors, manager, schema) } "implementation specific" >> { "add datasource" >> { "initializes datasource" >> { val addB = refB >>= datasources.addDatasource addB.runEmpty.value.map(_ must beLike { case (s, \\/-(i)) => s.initd.member(i) must beTrue }).unsafeRunSync() } "doesn't store config when initialization fails" >> { val err3 = MalformedConfiguration(supportedType, "three", "3 isn't a config!") val ds = mkDatasources(IMap.empty, _ => "") { case "three" => Some(err3) case _ => None } val add = refA .map(DatasourceRef.config.set("three")) .flatMap(ds.addDatasource) add.runEmpty.value.map(_ must beLike { case (s, -\\/(e)) => s.refs.isEmpty must beTrue s.initd.isEmpty must beTrue (e: DatasourceError[Int, String]) must_= err3 }).unsafeRunSync() } } "lookup status" >> { "includes errors" >> { val errs = IMap[Int, Exception](1 -> new IOException()) val ds = mkDatasources(errs, _ => "")(_ => None) val lbar = for { a <- refA _ <- ds.addDatasource(a) b <- refB _ <- ds.addDatasource(b) r <- ds.datasourceStatus(1) } yield r lbar.runEmptyA.value.map(_ must beLike { case \\/-(Condition.Abnormal(ex)) => ex must beAnInstanceOf[IOException] }).unsafeRunSync() } } "all metadata" >> { "includes errors" >> { val errs = IMap[Int, Exception](0 -> new IOException()) val ds = mkDatasources(errs, _ => "")(_ => None) val lbar = for { a <- refA _ <- ds.addDatasource(a) b <- refB _ <- ds.addDatasource(b) g <- ds.allDatasourceMetadata l <- gatherMultiple(g) m = IMap.fromList(l) } yield m lbar.runEmptyA.value.map(_.lookup(0) must beLike { case Some(DatasourceMeta(t, _, Condition.Abnormal(ex))) => (t must_= supportedType) and (ex must beAnInstanceOf[IOException]) }).unsafeRunSync() } } "remove datasource" >> { "shutdown existing" >> { val sdown = for { a <- refA _ <- datasources.addDatasource(a) cond <- datasources.removeDatasource(0) } yield cond sdown.runEmpty.run.map(_ must beLike { case (sdowns, (s, Condition.Normal())) => s.initd must_= ISet.empty sdowns must_= List(0) }).unsafeRunSync() } } "replace datasource" >> { "updates manager" >> { val replaced = for { a <- refA b <- refB r <- datasources.addDatasource(a) i = r.toOption.get c <- datasources.replaceDatasource(i, b) } yield c replaced.runEmpty.run.map(_ must beLike { case (sdowns, (s, Condition.Normal())) => s.initd must_= ISet.singleton(0) sdowns must_= List(0) }).unsafeRunSync() } "doesn't update manager when only name changed" >> { val renamed = for { a <- refA n <- randomName b = DatasourceRef.name.set(n)(a) r <- datasources.addDatasource(a) i = r.toOption.get c <- datasources.replaceDatasource(i, b) } yield c renamed.runEmpty.run.map(_ must beLike { case (sdowns, (s, Condition.Normal())) => s.initd must_= ISet.singleton(0) sdowns must_= Nil }).unsafeRunSync() } } "sanitize config" >> { "ref is sanitized" >> { val ds = for { a <- refA _ <- sanitizedDatasources.addDatasource(a) l <- sanitizedDatasources.datasourceRef(0) } yield l ds.runEmpty.run.map(x => x must beLike { case (_, (_, \\/-(ref))) => { ref.config must_= "sanitized" } }).unsafeRunSync() } } } } object DefaultDatasourcesSpec { import MockDatasourceManager.{Initialized, Shutdowns} type Refs = IMap[Int, DatasourceRef[String]] type DefaultM[A] = StateT[WriterT[IO, Shutdowns[Int], ?], DefaultState, A] @Lenses final case class DefaultState(idx: Int, initd: Initialized[Int], refs: Refs) object DefaultState { implicit val monoid: Monoid[DefaultState] = new Monoid[DefaultState] { val zero = DefaultState(0, ISet.empty, IMap.empty) def append(x: DefaultState, y: => DefaultState) = DefaultState( x.idx + y.idx, x.initd union y.initd, x.refs union y.refs) } } }
slamdata/slamengine
impl/src/test/scala/quasar/impl/datasources/DefaultDatasourcesSpec.scala
Scala
apache-2.0
8,220
package monocle.std import monocle.MonocleSuite import monocle.std.{validated => mValidated} import cats.syntax.validated._ class ValidatedExample extends MonocleSuite { test("success defines a Prism that can get or set the underlying value of a Success instance") { mValidated.success.getOption(123.valid) shouldEqual Some(123) mValidated.success.getOption("abc".invalid) shouldEqual None mValidated.success.set(555)(123.valid) shouldEqual 555.valid mValidated.success.set(123)("abc".invalid) shouldEqual "abc".invalid } test("failure defines a Prism that can modify the underlying value of a Failure instance") { mValidated.failure[String, Int].modify(_.reverse)("abc".invalid) shouldEqual "cba".invalid } }
aoiroaoino/Monocle
example/src/test/scala/monocle/std/ValidatedExample.scala
Scala
mit
743
/* Author: Kristal Curtis */ package siren import it.unimi.dsi.fastutil.longs.LongList import it.unimi.dsi.fastutil.longs.LongArrayList // assumes range is inclusive on both endpoints class UnionFindGridDiagonal(range: (Long, Long)) extends UnionFindAbstract { val rangeLen = range._2 - range._1 + 1 // Union Find data structures //val parent = (range._1 to range._2).toArray // turn this into LongArrayList val rank = Array.fill(rangeLen.toInt)(0) // can be int val clusterSize = Array.fill(rangeLen.toInt)(0) // can be int because a cluster size will in all likelihood be < 2B var totalClusters = 0 var nonTrivialClusters = List[Long]() // needs to be long b/c you're storing cluster ids, which are positions of a member in the cluster var nonTrivialMembers = scala.collection.mutable.Map[Long, LongArrayList]() //var firstMember = Array.fill(rangeLen.toInt)(0L) // needs to be long b/c this is storing positions // turn this into LongArrayList val parent = new LongArrayList(rangeLen.toInt) val firstMember = new LongArrayList(rangeLen.toInt) var i = 0 while (i < rangeLen) { parent.add(i, range._1 + i) firstMember.add(i, range._1 + i) i += 1 } // Methods override def size = rangeLen override def getNonTrivialMembers = nonTrivialMembers def toIndex(pos: Long): Int = { (pos - range._1).toInt // convert pos in range to index in array } override def find(v: Long): Long = { if (v != parent.getLong(toIndex(v))) // parent(v) = find(parent(v)) parent.set(toIndex(v), find(parent.getLong(toIndex(v)))) // this line is causing stack overflow return parent.getLong(toIndex(v)) } override def union(v: Long, w: Long) = { val x = find(v) val y = find(w) if (rank(toIndex(x)) > rank(toIndex(y))) { parent.set(toIndex(y), x) } else { parent.set(toIndex(x), y) if (rank(toIndex(y)) == rank(toIndex(x))) rank(toIndex(y)) += 1 } } // assumes nontrivial cluster size is 2 override def findClusters(readLen: Int, containsN: (Array[Byte] => Boolean)) = { var pos = range._1 val posStr = new Array[Byte](readLen) while (pos <= range._2 - readLen) { GenomeLoader.genome.getSubstring(pos, pos + readLen, posStr) //if (isValid(pos, pos + readLen)) { if (!containsN(posStr)) { val p = find(pos) clusterSize(toIndex(p)) += 1 if (clusterSize(toIndex(p)) == 1) { totalClusters += 1 firstMember.set(toIndex(p), pos) // Remember in case cluster becomes non-trivial } else if (clusterSize(toIndex(p)) == 2) { // Just found a new non-trivial cluster (with more than one element) nonTrivialClusters = p :: nonTrivialClusters val l = new LongArrayList l.add(pos) l.add(firstMember.getLong(toIndex(p))) nonTrivialMembers += ((p, l)) } else if (clusterSize(toIndex(p)) > 2) { val members = nonTrivialMembers.get(p).get members.add(pos) } } pos += 1 } nonTrivialClusters = nonTrivialClusters.sortWith((c1, c2) => clusterSize(toIndex(c1)) > clusterSize(toIndex(c2))) } override def printClusterStats(validPositions: Long) = { println("Valid positions: " + validPositions) println("Total clusters: " + totalClusters) println("Non-trivial clusters: " + nonTrivialClusters.size) val nonTrivialPositions = validPositions - (totalClusters - nonTrivialClusters.size) println("Positions in non-trivial clusters: " + nonTrivialPositions) println("Mean size of non-trivial clusters: " + nonTrivialPositions / nonTrivialClusters.size.toDouble) } override def getStats(validPositions: Long): String = { val nonTrivialPositions = validPositions - (totalClusters - nonTrivialClusters.size) "Valid positions: " + validPositions + "\\n" + "Total clusters: " + totalClusters + "\\n" + "Non-trivial clusters: " + nonTrivialClusters.size + "\\n" + "Positions in non-trivial clusters: " + nonTrivialPositions + "\\n" + "Mean size of non-trivial clusters: " + (nonTrivialPositions / nonTrivialClusters.size.toDouble) + "\\n" } }
fnothaft/siren-release
src/main/scala/siren/UnionFindGridDiagonal.scala
Scala
bsd-2-clause
4,242
println(classOf[/* file: Predef */ Manifest[String]])
katejim/intellij-scala
testdata/resolve2/predef/element/TypeAlias.scala
Scala
apache-2.0
54
package org.aprsdroid.app import _root_.android.app.ListActivity import _root_.android.os.Bundle import _root_.android.view.Window class LoadingListActivity extends ListActivity with UIHelper { override def onCreate(savedInstanceState: Bundle) { super.onCreate(savedInstanceState) requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS) requestWindowFeature(Window.FEATURE_PROGRESS) } override def onStartLoading() { setProgressBarIndeterminateVisibility(true) } override def onStopLoading() { setProgressBarIndeterminateVisibility(false) } override def onResume() { super.onResume() setKeepScreenOn() setVolumeControls() } }
ge0rg/aprsdroid
src/LoadingListActivity.scala
Scala
gpl-2.0
669
package it.polimi.genomics.core.DataStructures.MetadataCondition /** * Check the negation of a given predicate */ case class NOT(predicate : MetadataCondition) extends MetadataCondition { }
DEIB-GECO/GMQL
GMQL-Core/src/main/scala/it/polimi/genomics/core/DataStructures/MetadataCondition/NOT.scala
Scala
apache-2.0
194
/* * Scala (https://www.scala-lang.org) * * Copyright EPFL and Lightbend, Inc. * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). * * See the NOTICE file distributed with this work for * additional information regarding copyright ownership. */ package scala package math import java.util.Comparator import scala.language.higherKinds /** A trait for representing equivalence relations. It is important to * distinguish between a type that can be compared for equality or * equivalence and a representation of equivalence on some type. This * trait is for representing the latter. * * An [[http://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]] * is a binary relation on a type. This relation is exposed as * the `equiv` method of the `Equiv` trait. The relation must be: * * 1. reflexive: `equiv(x, x) == true` for any x of type `T`. * 1. symmetric: `equiv(x, y) == equiv(y, x)` for any `x` and `y` of type `T`. * 1. transitive: if `equiv(x, y) == true` and `equiv(y, z) == true`, then * `equiv(x, z) == true` for any `x`, `y`, and `z` of type `T`. */ trait Equiv[T] extends Any with Serializable { /** Returns `true` iff `x` is equivalent to `y`. */ def equiv(x: T, y: T): Boolean } trait LowPriorityEquiv { self: Equiv.type => /** * @deprecated since 2.13.0. This implicit universal `Equiv` instance allows accidentally * comparing instances of types for which equality isn't well-defined or implemented. * (For example, it does not make sense to compare two `Function1` instances.) * * Use `Equiv.universal` explicitly instead. If you really want an implicit universal `Equiv` instance * despite the potential problems, consider `implicit def universalEquiv[T]: Equiv[T] = universal[T]`. */ @deprecated("Use explicit Equiv.universal instead. See Scaladoc entry for more information: " + "https://www.scala-lang.org/api/2.13.0/scala/math/Equiv$.html#universalEquiv[T]:scala.math.Equiv[T]", since = "2.13.0") implicit def universalEquiv[T] : Equiv[T] = universal[T] } object Equiv extends LowPriorityEquiv { def reference[T <: AnyRef]: Equiv[T] = { _ eq _ } def universal[T]: Equiv[T] = { _ == _ } def fromComparator[T](cmp: Comparator[T]): Equiv[T] = { (x, y) => cmp.compare(x, y) == 0 } def fromFunction[T](cmp: (T, T) => Boolean): Equiv[T] = { (x, y) => cmp(x, y) } def by[T, S: Equiv](f: T => S): Equiv[T] = ((x, y) => implicitly[Equiv[S]].equiv(f(x), f(y))) @inline def apply[T: Equiv]: Equiv[T] = implicitly[Equiv[T]] /* copied from Ordering */ private final val optionSeed = 43 private final val iterableSeed = 47 private final class IterableEquiv[CC[X] <: Iterable[X], T](private val eqv: Equiv[T]) extends Equiv[CC[T]] { def equiv(x: CC[T], y: CC[T]): Boolean = { val xe = x.iterator val ye = y.iterator while (xe.hasNext && ye.hasNext) { if (!eqv.equiv(xe.next(), ye.next())) return false } xe.hasNext == ye.hasNext } override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true case that: IterableEquiv[CC, T] => this.eqv == that.eqv case _ => false } override def hashCode(): Int = eqv.hashCode() * iterableSeed } trait ExtraImplicits { /** Not in the standard scope due to the potential for divergence: * For instance `implicitly[Equiv[Any]]` diverges in its presence. */ implicit def seqEquiv[CC[X] <: scala.collection.Seq[X], T](implicit eqv: Equiv[T]): Equiv[CC[T]] = new IterableEquiv[CC, T](eqv) implicit def sortedSetEquiv[CC[X] <: scala.collection.SortedSet[X], T](implicit eqv: Equiv[T]): Equiv[CC[T]] = new IterableEquiv[CC, T](eqv) } /** An object containing implicits which are not in the default scope. */ object Implicits extends ExtraImplicits { } implicit object Unit extends Equiv[Unit] { def equiv(x: Unit, y: Unit): Boolean = true } implicit object Boolean extends Equiv[Boolean] { def equiv(x: Boolean, y: Boolean): Boolean = x == y } implicit object Byte extends Equiv[Byte] { def equiv(x: Byte, y: Byte): Boolean = x == y } implicit object Char extends Equiv[Char] { def equiv(x: Char, y: Char): Boolean = x == y } implicit object Short extends Equiv[Short] { def equiv(x: Short, y: Short): Boolean = x == y } implicit object Int extends Equiv[Int] { def equiv(x: Int, y: Int): Boolean = x == y } implicit object Long extends Equiv[Long] { def equiv(x: Long, y: Long): Boolean = x == y } /** `Equiv`s for `Float`s. * * @define floatEquiv Because the behaviour of `Float`s specified by IEEE is * not consistent with behaviors required of an equivalence * relation for `NaN` (it is not reflexive), there are two * equivalences defined for `Float`: `StrictEquiv`, which * is reflexive, and `IeeeEquiv`, which is consistent * with IEEE spec and floating point operations defined in * [[scala.math]]. */ object Float { /** An equivalence for `Float`s which is reflexive (treats all `NaN`s * as equivalent), and treats `-0.0` and `0.0` as not equivalent; it * behaves the same as [[java.lang.Float#compare]]. * * $floatEquiv * * This equivalence may be preferable for collections. * * @see [[IeeeEquiv]] */ trait StrictEquiv extends Equiv[Float] { def equiv(x: Float, y: Float): Boolean = java.lang.Float.compare(x, y) == 0 } implicit object StrictEquiv extends StrictEquiv /** An equivalence for `Float`s which is consistent with IEEE specifications. * * $floatEquiv * * This equivalence may be preferable for numeric contexts. * * @see [[StrictEquiv]] */ trait IeeeEquiv extends Equiv[Float] { override def equiv(x: Float, y: Float): Boolean = x == y } implicit object IeeeEquiv extends IeeeEquiv } @deprecated("There are multiple equivalences for Floats (Equiv.Float.TotalEquiv, " + "Equiv.Float.IeeeEquiv). Specify one by using a local import, assigning an implicit val, or passing it " + "explicitly. See the documentation for details.", since = "2.13.0") implicit object DeprecatedFloatEquiv extends Float.StrictEquiv /** `Equiv`s for `Double`s. * * @define doubleEquiv Because the behaviour of `Double`s specified by IEEE is * not consistent with behaviors required of an equivalence * relation for `NaN` (it is not reflexive), there are two * equivalences defined for `Double`: `StrictEquiv`, which * is reflexive, and `IeeeEquiv`, which is consistent * with IEEE spec and doubleing point operations defined in * [[scala.math]]. */ object Double { /** An equivalence for `Double`s which is reflexive (treats all `NaN`s * as equivalent), and treats `-0.0` and `0.0` as not equivalent; it * behaves the same as [[java.lang.Double#compare]]. * * $doubleEquiv * * This equivalence may be preferable for collections. * * @see [[IeeeEquiv]] */ trait StrictEquiv extends Equiv[Double] { def equiv(x: Double, y: Double): Boolean = java.lang.Double.compare(x, y) == 0 } implicit object StrictEquiv extends StrictEquiv /** An equivalence for `Double`s which is consistent with IEEE specifications. * * $doubleEquiv * * This equivalence may be preferable for numeric contexts. * * @see [[StrictEquiv]] */ trait IeeeEquiv extends Equiv[Double] { def equiv(x: Double, y: Double): Boolean = x == y } implicit object IeeeEquiv extends IeeeEquiv } @deprecated("There are multiple equivalences for Doubles (Equiv.Double.TotalEquiv, " + "Equiv.Double.IeeeEquiv). Specify one by using a local import, assigning an implicit val, or passing it " + "explicitly. See the documentation for details.", since = "2.13.0") implicit object DeprecatedDoubleEquiv extends Double.StrictEquiv implicit object BigInt extends Equiv[BigInt] { def equiv(x: BigInt, y: BigInt): Boolean = x == y } implicit object BigDecimal extends Equiv[BigDecimal] { def equiv(x: BigDecimal, y: BigDecimal): Boolean = x == y } implicit object String extends Equiv[String] { def equiv(x: String, y: String): Boolean = x == y } implicit object Symbol extends Equiv[Symbol] { def equiv(x: Symbol, y: Symbol): Boolean = x == y } implicit def Option[T](implicit eqv: Equiv[T]): Equiv[Option[T]] = new OptionEquiv[T](eqv) private[this] final class OptionEquiv[T](private val eqv: Equiv[T]) extends Equiv[Option[T]] { def equiv(x: Option[T], y: Option[T]): Boolean = (x, y) match { case (None, None) => true case (Some(x), Some(y)) => eqv.equiv(x, y) case _ => false } override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true case that: OptionEquiv[T] => this.eqv == that.eqv case _ => false } override def hashCode(): Int = eqv.hashCode() * optionSeed } implicit def Tuple2[T1, T2](implicit eqv1: Equiv[T1], eqv2: Equiv[T2]): Equiv[(T1, T2)] = new Tuple2Equiv(eqv1, eqv2) private[this] final class Tuple2Equiv[T1, T2](private val eqv1: Equiv[T1], private val eqv2: Equiv[T2]) extends Equiv[(T1, T2)] { def equiv(x: (T1, T2), y: (T1, T2)): Boolean = eqv1.equiv(x._1, y._1) && eqv2.equiv(x._2, y._2) override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true case that: Tuple2Equiv[T1, T2] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 case _ => false } override def hashCode(): Int = (eqv1, eqv2).hashCode() } implicit def Tuple3[T1, T2, T3](implicit eqv1: Equiv[T1], eqv2: Equiv[T2], eqv3: Equiv[T3]) : Equiv[(T1, T2, T3)] = new Tuple3Equiv(eqv1, eqv2, eqv3) private[this] final class Tuple3Equiv[T1, T2, T3](private val eqv1: Equiv[T1], private val eqv2: Equiv[T2], private val eqv3: Equiv[T3]) extends Equiv[(T1, T2, T3)] { def equiv(x: (T1, T2, T3), y: (T1, T2, T3)): Boolean = eqv1.equiv(x._1, y._1) && eqv2.equiv(x._2, y._2) && eqv3.equiv(x._3, y._3) override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true case that: Tuple3Equiv[T1, T2, T3] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 && this.eqv3 == that.eqv3 case _ => false } override def hashCode(): Int = (eqv1, eqv2, eqv3).hashCode() } implicit def Tuple4[T1, T2, T3, T4](implicit eqv1: Equiv[T1], eqv2: Equiv[T2], eqv3: Equiv[T3], eqv4: Equiv[T4]) : Equiv[(T1, T2, T3, T4)] = new Tuple4Equiv(eqv1, eqv2, eqv3, eqv4) private[this] final class Tuple4Equiv[T1, T2, T3, T4](private val eqv1: Equiv[T1], private val eqv2: Equiv[T2], private val eqv3: Equiv[T3], private val eqv4: Equiv[T4]) extends Equiv[(T1, T2, T3, T4)] { def equiv(x: (T1, T2, T3, T4), y: (T1, T2, T3, T4)): Boolean = eqv1.equiv(x._1, y._1) && eqv2.equiv(x._2, y._2) && eqv3.equiv(x._3, y._3) && eqv4.equiv(x._4, y._4) override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true case that: Tuple4Equiv[T1, T2, T3, T4] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 && this.eqv3 == that.eqv3 && this.eqv4 == that.eqv4 case _ => false } override def hashCode(): Int = (eqv1, eqv2, eqv3, eqv4).hashCode() } implicit def Tuple5[T1, T2, T3, T4, T5](implicit eqv1: Equiv[T1], eqv2: Equiv[T2], eqv3: Equiv[T3], eqv4: Equiv[T4], eqv5: Equiv[T5]): Equiv[(T1, T2, T3, T4, T5)] = new Tuple5Equiv(eqv1, eqv2, eqv3, eqv4, eqv5) private[this] final class Tuple5Equiv[T1, T2, T3, T4, T5](private val eqv1: Equiv[T1], private val eqv2: Equiv[T2], private val eqv3: Equiv[T3], private val eqv4: Equiv[T4], private val eqv5: Equiv[T5]) extends Equiv[(T1, T2, T3, T4, T5)] { def equiv(x: (T1, T2, T3, T4, T5), y: (T1, T2, T3, T4, T5)): Boolean = eqv1.equiv(x._1, y._1) && eqv2.equiv(x._2, y._2) && eqv3.equiv(x._3, y._3) && eqv4.equiv(x._4, y._4) && eqv5.equiv(x._5, y._5) override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true case that: Tuple5Equiv[T1, T2, T3, T4, T5] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 && this.eqv3 == that.eqv3 && this.eqv4 == that.eqv4 && this.eqv5 == that.eqv5 case _ => false } override def hashCode(): Int = (eqv1, eqv2, eqv3, eqv4, eqv5).hashCode() } implicit def Tuple6[T1, T2, T3, T4, T5, T6](implicit eqv1: Equiv[T1], eqv2: Equiv[T2], eqv3: Equiv[T3], eqv4: Equiv[T4], eqv5: Equiv[T5], eqv6: Equiv[T6]): Equiv[(T1, T2, T3, T4, T5, T6)] = new Tuple6Equiv(eqv1, eqv2, eqv3, eqv4, eqv5, eqv6) private[this] final class Tuple6Equiv[T1, T2, T3, T4, T5, T6](private val eqv1: Equiv[T1], private val eqv2: Equiv[T2], private val eqv3: Equiv[T3], private val eqv4: Equiv[T4], private val eqv5: Equiv[T5], private val eqv6: Equiv[T6]) extends Equiv[(T1, T2, T3, T4, T5, T6)] { def equiv(x: (T1, T2, T3, T4, T5, T6), y: (T1, T2, T3, T4, T5, T6)): Boolean = eqv1.equiv(x._1, y._1) && eqv2.equiv(x._2, y._2) && eqv3.equiv(x._3, y._3) && eqv4.equiv(x._4, y._4) && eqv5.equiv(x._5, y._5) && eqv6.equiv(x._6, y._6) override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true case that: Tuple6Equiv[T1, T2, T3, T4, T5, T6] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 && this.eqv3 == that.eqv3 && this.eqv4 == that.eqv4 && this.eqv5 == that.eqv5 && this.eqv6 == that.eqv6 case _ => false } override def hashCode(): Int = (eqv1, eqv2, eqv3, eqv4, eqv5, eqv6).hashCode() } implicit def Tuple7[T1, T2, T3, T4, T5, T6, T7](implicit eqv1: Equiv[T1], eqv2: Equiv[T2], eqv3: Equiv[T3], eqv4: Equiv[T4], eqv5: Equiv[T5], eqv6: Equiv[T6], eqv7: Equiv[T7]): Equiv[(T1, T2, T3, T4, T5, T6, T7)] = new Tuple7Equiv(eqv1, eqv2, eqv3, eqv4, eqv5, eqv6, eqv7) private[this] final class Tuple7Equiv[T1, T2, T3, T4, T5, T6, T7](private val eqv1: Equiv[T1], private val eqv2: Equiv[T2], private val eqv3: Equiv[T3], private val eqv4: Equiv[T4], private val eqv5: Equiv[T5], private val eqv6: Equiv[T6], private val eqv7: Equiv[T7]) extends Equiv[(T1, T2, T3, T4, T5, T6, T7)] { def equiv(x: (T1, T2, T3, T4, T5, T6, T7), y: (T1, T2, T3, T4, T5, T6, T7)): Boolean = eqv1.equiv(x._1, y._1) && eqv2.equiv(x._2, y._2) && eqv3.equiv(x._3, y._3) && eqv4.equiv(x._4, y._4) && eqv5.equiv(x._5, y._5) && eqv6.equiv(x._6, y._6) && eqv7.equiv(x._7, y._7) override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true case that: Tuple7Equiv[T1, T2, T3, T4, T5, T6, T7] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 && this.eqv3 == that.eqv3 && this.eqv4 == that.eqv4 && this.eqv5 == that.eqv5 && this.eqv6 == that.eqv6 && this.eqv7 == that.eqv7 case _ => false } override def hashCode(): Int = (eqv1, eqv2, eqv3, eqv4, eqv5, eqv6, eqv7).hashCode() } implicit def Tuple8[T1, T2, T3, T4, T5, T6, T7, T8](implicit eqv1: Equiv[T1], eqv2: Equiv[T2], eqv3: Equiv[T3], eqv4: Equiv[T4], eqv5: Equiv[T5], eqv6: Equiv[T6], eqv7: Equiv[T7], eqv8: Equiv[T8]): Equiv[(T1, T2, T3, T4, T5, T6, T7, T8)] = new Tuple8Equiv(eqv1, eqv2, eqv3, eqv4, eqv5, eqv6, eqv7, eqv8) private[this] final class Tuple8Equiv[T1, T2, T3, T4, T5, T6, T7, T8](private val eqv1: Equiv[T1], private val eqv2: Equiv[T2], private val eqv3: Equiv[T3], private val eqv4: Equiv[T4], private val eqv5: Equiv[T5], private val eqv6: Equiv[T6], private val eqv7: Equiv[T7], private val eqv8: Equiv[T8]) extends Equiv[(T1, T2, T3, T4, T5, T6, T7, T8)] { def equiv(x: (T1, T2, T3, T4, T5, T6, T7, T8), y: (T1, T2, T3, T4, T5, T6, T7, T8)): Boolean = eqv1.equiv(x._1, y._1) && eqv2.equiv(x._2, y._2) && eqv3.equiv(x._3, y._3) && eqv4.equiv(x._4, y._4) && eqv5.equiv(x._5, y._5) && eqv6.equiv(x._6, y._6) && eqv7.equiv(x._7, y._7) && eqv8.equiv(x._8, y._8) override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true case that: Tuple8Equiv[T1, T2, T3, T4, T5, T6, T7, T8] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 && this.eqv3 == that.eqv3 && this.eqv4 == that.eqv4 && this.eqv5 == that.eqv5 && this.eqv6 == that.eqv6 && this.eqv7 == that.eqv7 && this.eqv8 == that.eqv8 case _ => false } override def hashCode(): Int = (eqv1, eqv2, eqv3, eqv4, eqv5, eqv6, eqv7, eqv8).hashCode() } implicit def Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9](implicit eqv1: Equiv[T1], eqv2: Equiv[T2], eqv3: Equiv[T3], eqv4: Equiv[T4], eqv5: Equiv[T5], eqv6: Equiv[T6], eqv7: Equiv[T7], eqv8 : Equiv[T8], eqv9: Equiv[T9]): Equiv[(T1, T2, T3, T4, T5, T6, T7, T8, T9)] = new Tuple9Equiv(eqv1, eqv2, eqv3, eqv4, eqv5, eqv6, eqv7, eqv8, eqv9) private[this] final class Tuple9Equiv[T1, T2, T3, T4, T5, T6, T7, T8, T9](private val eqv1: Equiv[T1], private val eqv2: Equiv[T2], private val eqv3: Equiv[T3], private val eqv4: Equiv[T4], private val eqv5: Equiv[T5], private val eqv6: Equiv[T6], private val eqv7: Equiv[T7], private val eqv8: Equiv[T8], private val eqv9: Equiv[T9]) extends Equiv[(T1, T2, T3, T4, T5, T6, T7, T8, T9)] { def equiv(x: (T1, T2, T3, T4, T5, T6, T7, T8, T9), y: (T1, T2, T3, T4, T5, T6, T7, T8, T9)): Boolean = eqv1.equiv(x._1, y._1) && eqv2.equiv(x._2, y._2) && eqv3.equiv(x._3, y._3) && eqv4.equiv(x._4, y._4) && eqv5.equiv(x._5, y._5) && eqv6.equiv(x._6, y._6) && eqv7.equiv(x._7, y._7) && eqv8.equiv(x._8, y._8) && eqv9.equiv(x._9, y._9) override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true case that: Tuple9Equiv[T1, T2, T3, T4, T5, T6, T7, T8, T9] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 && this.eqv3 == that.eqv3 && this.eqv4 == that.eqv4 && this.eqv5 == that.eqv5 && this.eqv6 == that.eqv6 && this.eqv7 == that.eqv7 && this.eqv8 == that.eqv8 && this.eqv9 == that.eqv9 case _ => false } override def hashCode(): Int = (eqv1, eqv2, eqv3, eqv4, eqv5, eqv6, eqv7, eqv8, eqv9).hashCode() } }
martijnhoekstra/scala
src/library/scala/math/Equiv.scala
Scala
apache-2.0
21,634
package com.zxg.chat.lightdd.ui import org.gnome.gtk.Align import org.gnome.gtk.Button import org.gnome.gtk.CellRendererText import org.gnome.gtk.ComboBox import org.gnome.gtk.DataColumnString import org.gnome.gtk.Entry import org.gnome.gtk.Grid import org.gnome.gtk.Label import org.gnome.gtk.ListStore import org.gnome.gtk.Window import org.gnome.gtk.WindowPosition import org.gnome.gtk.WindowType import iqq.im.bean.QQStatus import com.zxg.chat.lightdd.Main import com.zxg.chat.lightdd.service.UserService import com.zxg.chat.lightdd.service.UserService class LoginWindow(windowType: WindowType = WindowType.TOPLEVEL) extends Window(windowType) { setTitle("light-dd login") val rootGrid = new Grid rootGrid.setColumnSpacing(5) rootGrid.setRowSpacing(5) rootGrid.setBorderWidth(5) { val idLabel = new Label("ID:") idLabel.setAlignHorizontal(Align.END) rootGrid.attach(idLabel, 0, 0, 1, 1) } val idEntry = new Entry idEntry.setAlignHorizontal(Align.FILL) idEntry.setExpandHorizontal(true) rootGrid.attach(idEntry, 1, 0, 3, 1) { val passwordLabel = new Label("Password:") rootGrid.attach(passwordLabel, 0, 1, 1, 1) } val passwordEntry = new Entry passwordEntry.setVisibility(false) rootGrid.attach(passwordEntry, 1, 1, 3, 1) val loginButton = new Button("Login") loginButton.setAlignHorizontal(Align.CENTER) rootGrid.attach(loginButton, 0, 2, 2, 1) val statusLabelColumn = new DataColumnString val statusModel = new ListStore(Array(statusLabelColumn)) val statusList = List( ("online", QQStatus.ONLINE), ("call me", QQStatus.CALLME), ("away", QQStatus.AWAY), ("busy", QQStatus.BUSY), ("slient", QQStatus.SLIENT), ("hidden", QQStatus.HIDDEN)) statusList.foreach({ (t) => statusModel.setValue(statusModel.appendRow(), statusLabelColumn, t._1) }) val comboBox = new ComboBox(statusModel); { val comboBoxRenderer = new CellRendererText(comboBox); comboBoxRenderer.setText(statusLabelColumn); comboBox.setAlignHorizontal(Align.END) comboBox.setActive(0) } rootGrid.attach(comboBox, 2, 2, 2, 1) add(rootGrid) // setDefaultSize(300, 150) setPosition(WindowPosition.CENTER) loginButton.connect(new Button.Clicked { override def onClicked(source: Button) = { LoginWindow.this.login() } }) def login() = { val userName = idEntry.getText() val password = passwordEntry.getText() val status = statusList(comboBox.getActive())._2 Main.getService(classOf[UserService]).login(userName, password, status) } }
Xianguang-Zhou/light-dd
light-dd/src/main/scala/com/zxg/chat/lightdd/ui/LoginWindow.scala
Scala
gpl-3.0
2,570
/* Copyright 2009-2011 Jay Conrod * * This file is part of Tungsten. * * Tungsten is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation, either version 2 of * the License, or (at your option) any later version. * * Tungsten is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with Tungsten. If not, see * <http://www.gnu.org/licenses/>. */ package tungsten import Utilities._ final case class Struct(name: Symbol, fields: List[Symbol], annotations: List[AnnotationValue] = Nil) extends Definition with AggregateDefinition { override def isGlobal = true override def validateComponents(module: Module) = { super.validateComponents(module) ++ validateComponentsOfClass[Field](module, fields) } override def validateScope(module: Module, scope: Set[Symbol]): List[CompileException] = { validateComponentsScope(module, scope, fields) } }
jayconrod/tungsten
core/src/main/scala/tungsten/Struct.scala
Scala
gpl-2.0
1,316
package views import play.api.test.PlaySpecification /** * Tests the <code>index.scala.html</code> view template. */ class IndexSpec extends PlaySpecification { "index template" should { "render the page" in { val html = views.html.index() contentAsString(html) must contain("Welcome to the Home Media System") } } }
chrisnappin/home-media-microservices
web-front-end/test/views/IndexSpec.scala
Scala
apache-2.0
349
/** found in genericNest.jar, compiled from OuterTParams.java */ import nestpkg._; // bug #695 object ForceParse extends OuterTParams[AnyRef] { // Force import of HarderToParse<A>.InnerClass, // which has confusing method signature. var field: InnerClass = null } object Test extends App { ForceParse }
felixmulder/scala
test/files/jvm/genericNest.scala
Scala
bsd-3-clause
313
package filodb.jmh import java.nio.charset.StandardCharsets import java.util.concurrent.TimeUnit import ch.qos.logback.classic.{Level, Logger} import com.typesafe.scalalogging.StrictLogging import org.jboss.netty.buffer.ChannelBuffers import org.openjdk.jmh.annotations._ import remote.RemoteStorage.{LabelPair, Sample, TimeSeries} import filodb.core.binaryrecord2.RecordBuilder import filodb.gateway.conversion.{InfluxProtocolParser, PrometheusInputRecord} import filodb.memory.MemFactory /** * Measures the shard calculation, deserialization, and ingestion record creation logic used in the Gateway */ @State(Scope.Thread) class GatewayBenchmark extends StrictLogging { org.slf4j.LoggerFactory.getLogger("filodb").asInstanceOf[Logger].setLevel(Level.WARN) val tagMap = Map( "__name__" -> "heap_usage", "dc" -> "DC1", "_ws_" -> "demo", "_ns_" -> "App-123", "partition" -> "partition-2", "host" -> "abc.xyz.company.com", "instance" -> s"Instance-123" ) val influxTags = tagMap.filterKeys(_ != "__name__").toSeq.sortBy(_._1) val initTimestamp = System.currentTimeMillis val value: Double = 2.5 def timeseries(tags: Map[String, String], dblValue: Double = value): TimeSeries = { val builder = TimeSeries.newBuilder .addSamples(Sample.newBuilder.setTimestampMs(initTimestamp).setValue(dblValue).build) tags.foreach { case (k, v) => builder.addLabels(LabelPair.newBuilder.setName(k).setValue(v).build) } builder.build } val singlePromTSBytes = timeseries(tagMap).toByteArray val singleInfluxRec = s"${tagMap("__name__")},${influxTags.map{case (k, v) => s"$k=$v"}.mkString(",")} " + s"counter=$value ${initTimestamp}000000" val singleInfluxBuf = ChannelBuffers.buffer(1024) singleInfluxBuf.writeBytes(singleInfluxRec.getBytes(StandardCharsets.UTF_8)) // Histogram containing 8 buckets + sum and count val histBuckets = Map("0.025" -> 0, "0.05" -> 0, "0.1" -> 2, "0.25" -> 2, "0.5" -> 5, "1.0" -> 9, "2.5" -> 11, "+Inf" -> 11) val histSum = histBuckets.values.sum val histPromSeries = histBuckets.map { case (bucket, count) => timeseries(tagMap ++ Map("__name__" -> "heap_usage_bucket", "le" -> bucket), count) } ++ Seq(timeseries(tagMap ++ Map("__name__" -> "heap_usage_sum"), histSum), timeseries(tagMap ++ Map("__name__" -> "heap_usage_count"), histBuckets.size)) val histPromBytes = histPromSeries.map(_.toByteArray) val histInfluxRec = s"${tagMap("__name__")},${influxTags.map{case (k, v) => s"$k=$v"}.mkString(",")} " + s"${histBuckets.map { case (k, v) => s"$k=$v"}.mkString(",") },sum=$histSum,count=8 " + s"${initTimestamp}000000" val histInfluxBuf = ChannelBuffers.buffer(1024) histInfluxBuf.writeBytes(histInfluxRec.getBytes(StandardCharsets.UTF_8)) val builder = new RecordBuilder(MemFactory.onHeapFactory, reuseOneContainer = true) @Benchmark @BenchmarkMode(Array(Mode.Throughput)) @OutputTimeUnit(TimeUnit.SECONDS) def promCounterProtoConversion(): Int = { val record = PrometheusInputRecord(TimeSeries.parseFrom(singlePromTSBytes)).head val partHash = record.partitionKeyHash val shardHash = record.shardKeyHash record.getMetric record.nonMetricShardValues.length record.addToBuilder(builder) partHash | shardHash } @Benchmark @BenchmarkMode(Array(Mode.Throughput)) @OutputTimeUnit(TimeUnit.SECONDS) def influxCounterConversion(): Int = { // reset the ChannelBuffer so it can be read every timeseries singleInfluxBuf.resetReaderIndex() val record = InfluxProtocolParser.parse(singleInfluxBuf).get val partHash = record.partitionKeyHash val shardHash = record.shardKeyHash record.getMetric record.nonMetricShardValues.length record.addToBuilder(builder) partHash | shardHash } @Benchmark @BenchmarkMode(Array(Mode.Throughput)) @OutputTimeUnit(TimeUnit.SECONDS) def promHistogramProtoConversion(): Int = { var overallHash = 7 histPromBytes.foreach { tsBytes => val record = PrometheusInputRecord(TimeSeries.parseFrom(tsBytes)).head val partHash = record.partitionKeyHash val shardHash = record.shardKeyHash record.getMetric record.nonMetricShardValues.length record.addToBuilder(builder) overallHash |= partHash | shardHash } overallHash } @Benchmark @BenchmarkMode(Array(Mode.Throughput)) @OutputTimeUnit(TimeUnit.SECONDS) def influxHistogramConversion(): Int = { // reset the ChannelBuffer so it can be read every timeseries histInfluxBuf.resetReaderIndex() val record = InfluxProtocolParser.parse(histInfluxBuf).get val partHash = record.partitionKeyHash val shardHash = record.shardKeyHash record.getMetric record.nonMetricShardValues.length record.addToBuilder(builder) partHash | shardHash } }
filodb/FiloDB
jmh/src/main/scala/filodb.jmh/GatewayBenchmark.scala
Scala
apache-2.0
4,951
package objektwerks.types import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers final case class Glucose(datetime: Long, level: Int) final case class Med(datetime: Long, medtype: Int, dosage: Int) class TypeReflectTest extends AnyFunSuite with Matchers { test("class tag") { import scala.reflect._ def isTypeof[T: ClassTag](list: List[T]): ClassTag[T] = classTag[T] match { case g if g === classTag[Glucose] => println(s"*** glucose class tag: ${list.toString}"); g case m if m === classTag[Med] => println(s"*** med class tag: ${list.toString}"); m case _ => fail("class tag test failed!") } isTypeof(List.empty[Glucose]) === classTag[Glucose] shouldBe true isTypeof(List.empty[Med]) === classTag[Med] shouldBe true } test("type tag") { import scala.reflect.runtime.universe._ def isTypeof[T: TypeTag](list: List[T]): TypeTag[T] = typeTag[T] match { case g if g === typeTag[Glucose] => println(s"*** glucose type tag: ${list.toString}"); g case m if m === typeTag[Med] => println(s"*** med type tag: ${list.toString}"); m case _ => fail("type tag test failed!") } isTypeof(List.empty[Glucose]) === typeTag[Glucose] shouldBe true isTypeof(List.empty[Med]) === typeTag[Med] shouldBe true } }
objektwerks/scala
src/test/scala/objektwerks/types/TypeReflectTest.scala
Scala
apache-2.0
1,316
package spinoco.protocol.http.header import spinoco.protocol.http.HostPort import spinoco.protocol.http.header.value.HeaderCodecDefinition /** * RFC 7231 section 5.4 * * @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Host */ sealed case class Host(value: HostPort) extends DefaultHeader object Host { val codec = HeaderCodecDefinition[Host](HostPort.codec.xmap (Host.apply, _.value)) }
Spinoco/protocol
http/src/main/scala/spinoco/protocol/http/header/Host.scala
Scala
mit
423
/* * Copyright 2009-2010 LinkedIn, Inc * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.linkedin.norbert import javacompat.cluster.{JavaNode, Node => JNode} import com.linkedin.norbert.cluster.{Node => SNode} package object javacompat { implicit def scalaSetToJavaSet[T](set: Set[T]): java.util.Set[T] = { val s = new java.util.HashSet[T] set.foreach { elem => s.add(elem) } s } implicit def javaSetToImmutableSet[T](nodes: java.util.Set[T]): Set[T] = { collection.JavaConversions.asScalaSet(nodes).foldLeft(Set[T]()) { (set, n) => set + n } } implicit def javaIntegerSetToScalaIntSet(set: java.util.Set[java.lang.Integer]): Set[Int] = { collection.JavaConversions.asScalaSet(set).foldLeft(collection.immutable.Set.empty[Int]) { _ + _.intValue } } implicit def scalaIntSetToJavaIntegerSet(set: Set[Int]): java.util.Set[java.lang.Integer] = { val result = new java.util.HashSet[java.lang.Integer](set.size) set.foreach (result add _) result } implicit def scalaNodeToJavaNode(node: SNode): JNode = { if (node == null) null else JavaNode(node) } implicit def javaNodeToScalaNode(node: JNode): SNode = { if (node == null) null else { val iter = node.getPartitionIds.iterator var partitionIds = Set.empty[Int] while(iter.hasNext) { partitionIds += iter.next.intValue } SNode(node.getId, node.getUrl, node.isAvailable, partitionIds, if (node.getCapability == null) None else Some(node.getCapability.longValue), if (node.getPersistentCapability == null) None else Some(node.getPersistentCapability.longValue)) } } implicit def convertSNodeSet(set: Set[SNode]): java.util.Set[JNode] = { var result = new java.util.HashSet[JNode](set.size) set.foreach(elem => result.add(scalaNodeToJavaNode(elem))) result } implicit def convertJNodeSet(set: java.util.Set[JNode]): Set[SNode] = { val iter = set.iterator var result = Set.empty[SNode] while(iter.hasNext) result += javaNodeToScalaNode(iter.next) result } }
linkedin/norbert
java-cluster/src/main/scala/com/linkedin/norbert/javacompat/package.scala
Scala
apache-2.0
2,588
package com.github.gigurra.glasciia /** * Created by johan on 2016-11-10. */ sealed trait Orientation object Orientation { case object Landscape extends Orientation case object Portrait extends Orientation }
GiGurra/glasciia
glasciia-core/src/main/scala/com/github/gigurra/glasciia/Orientation.scala
Scala
mit
217
package spoker.hand import spoker._ import spoker.hand.HandSpecializations.{FourOfAKind, ThreeOfAKind, OnePair} private object HandExtractors { object Broadway { def unapply(cards: Cards) = { cards.take(5).map(_.rank) match { case List(Ace, King, Queen, Jack, Ten) => Option(cards) case _ => None } } } object Straight { def unapply(cards: Cards): Option[Cards] = { val longestStraight: Cards = cards.tail.foldLeft(List(cards.head))((result, card) => { if (result.size == 5 || (result.size > 0 && card.rank == result.head.rank)) //already found the straight or have a pair result else if (result.size == 0 || card.rank.id < (result.head.rank.id - 1)) //restart straight counting card :: Nil else card :: result }) if (longestStraight.size == 5) Option(longestStraight.reverse) else if (longestStraight.size == 4 && longestStraight.head.rank == Two && cards.head.rank == Ace) //wheel straight Option(longestStraight.reverse :+ cards.head) else None } } object Flush { def unapply(cards: Cards) = { val upper = (cards groupBy { _.suit } maxBy { _._2.size })._1 cards filter { _.suit == upper } match { case it if it.size > 4 => Option(it) case _ => None } } } object MatchedAndRest { def apply(cards: Cards, matched: Cards) = Option(matched, cards diff matched) } object Pair { def unapply(cards: Cards) = groupsOfTwo(cards) lastOption match { case None => None case Some(matched) => MatchedAndRest(cards, matched) } } object Three { def unapply(cards: Cards) = groupOf(3)(cards) match { case None => None case Some(matched) => MatchedAndRest(cards, matched) } } object Four { def unapply(cards: Cards) = groupOf(4)(cards) match { case None => None case Some(matched) => MatchedAndRest(cards, matched) } } object OfAKind { def unapply(t: (Cards, Cards)): Option[Hand] = { val (matched, rest) = t val kickers = rest.take(5 - matched.size) matched.size match { case 4 => Option(new FourOfAKind(matched, kickers)) case 3 => Option(new ThreeOfAKind(matched, kickers)) case 2 => Option(new OnePair(matched, kickers)) } } } private def groupOf(size: Int)(cards: Cards) = cards groupBy { _.rank } map { _._2 } find { size == _.size } private def groupsOfTwo(cards: Cards) = (cards groupBy { _.rank } map { _._2 } filter { 2 == _.size }).toList sortBy { _.head.rank } }
vladmm/scala-poker
src/main/scala/spoker/hand/HandExtractors.scala
Scala
mit
2,784
package launchers import java.io.{BufferedWriter, File, FileWriter} import java.util.TimerTask import models.Project import org.apache.spark.launcher.SparkLauncher import org.joda.time.{Period, DateTime} import scala.io.Source import scala.sys.process.{ProcessLogger, _} class ReprocessLauncher(orchestratorLauncher: MEOrchestratorLauncher, foldersToReprocessPath: String, reprocessLogPath: String) extends TimerTask { def run { println("Going to reprocess stuff") reprocessFolders(foldersToReprocessPath, reprocessLogPath) } def reprocessFolders(foldersToReprocessPath: String, reprocessLogPath: String): Unit = { val folderList = Source.fromFile(foldersToReprocessPath).getLines val file = new File(reprocessLogPath) val bw = new BufferedWriter(new FileWriter(file)) bw.write("Starting\\n") for (folder <- folderList) { val start = DateTime.now() println(s"${start.toString} - Going to process folder ${folder}") bw.write(s"${start.toString} - Going to process folder ${folder}\\n") try { val launchedStatus : Int = orchestratorLauncher.launchOrchestratorAndWait(folder) val spent = new Period(start, DateTime.now) if(launchedStatus==0) { println(s"Success! Status ${launchedStatus} Took ${spent.getHours}:${spent.getMinutes}:${spent.getSeconds} with folder ${folder}") bw.write(s"Success! Status ${launchedStatus} Took ${spent.getHours}:${spent.getMinutes}:${spent.getSeconds} with folder ${folder}\\n") }else{ println(s"Error ${launchedStatus} after (${spent.getHours}:${spent.getMinutes}:${spent.getSeconds}) with folder ${folder}") bw.write(s"Error ${launchedStatus} after ${spent} (${spent.getHours}:${spent.getMinutes}:${spent.getSeconds}) with folder ${folder}\\n") } println("----------------------------------") bw.write("---------------------------------\\n") } catch { case e: Exception => { println(s"Error executing folder: ${folder}") println(e) println(e.getMessage) bw.write(s"Error executing folder ${folder}\\n") bw.write(e.toString + "\\n") bw.write(e.getMessage + "\\n") } } } val end = DateTime.now() println(s"${end.toString} - Finished reprocessing\\n") bw.write(s"${end.toString} - Finished reprocessing\\n\\n") bw.close() } }
canademar/me_extractors
BRMProjectManager/src/main/scala/launchers/ReprocessLauncher.scala
Scala
gpl-2.0
2,422
/* bisect.scala */ object Bisect { def findRoot(low: Double, high: Double)(f: Double => Double): Double = ??? } /* eof */
darrenjw/scala-course
exercises/bisection/src/main/scala/bisect.scala
Scala
gpl-3.0
133
package test import scala.slick.driver.SQLiteDriver.simple._ import Database.threadLocalSession import java.io.File import play.api.test.FakeApplication import fr.lium.api.{AudioFileApi, MediaFileApi, SegApi} import tasks.{DropCreateSchema, LoadFixtures} final class TestEnv() { lazy val database: Database = Database.forURL( "jdbc:sqlite:liumtest.db", driver = "org.sqlite.JDBC") lazy val baseDir = new File("/tmp/testaudio/") lazy val basename = "audio" lazy val dropCreateSchema = new DropCreateSchema lazy val loadFixtures = new LoadFixtures lazy val spkPublicSegFile = "audio.iv.seg" def mediaFileApi() = new MediaFileApi( baseDirectory = baseDir, audioFileBasename = basename, database, None ) def audioFileApi()(implicit app: FakeApplication) = { new AudioFileApi(baseDir, basename, database) } def segApi() = { new SegApi( spkPublicSegFile, mediaFileApi ) } } object Env { lazy val current = new TestEnv }
bsalimi/speech-recognition-api
test/TestEnv.scala
Scala
mit
999
import sbt.Keys._ import sbt._ object ProjectBuild extends Build { val ScalaVersion = "2.11.7" val ScalatestVersion = "3.0.0-M7" val ApacheCommonsVersion = "2.4" val SpecsVersion = "3.0.0-M9" val Json4sVersion = "3.3.0" val AkkaStreamVersion = "1.0" val GuiceVersion = "4.0" val SprayTestkitVersion = "1.3.1" val ScalamockVersion = "3.2.2" lazy val parent = Project( id = "parent", base = file("."), settings = super.settings ++ sharedSettings ) .settings( name := "Many thanks!" ) .aggregate(gcmTransport, repository, webService, serviceAPI, serviceImpl) lazy val gcmTransport = Project( id = "gcmTransport", base = file("gcmTransport"), settings = super.settings ++ sharedSettings ) .settings( libraryDependencies ++= Seq( "commons-io" % "commons-io" % "2.4", "org.json4s" % "json4s-native_2.11" % Json4sVersion ) ) lazy val webService = Project( id = "webService", base = file("webService"), settings = super.settings ++ sharedSettings ) .settings( libraryDependencies ++= Seq( "com.typesafe.akka" % "akka-stream-experimental_2.11" % AkkaStreamVersion, "com.typesafe.akka" % "akka-http-core-experimental_2.11" % AkkaStreamVersion, "com.typesafe.akka" % "akka-http-spray-json-experimental_2.11" % AkkaStreamVersion, "com.google.inject" % "guice" % GuiceVersion, "org.scalatest" % "scalatest_2.11" % ScalatestVersion % "test", "com.typesafe.akka" % "akka-http-testkit-experimental_2.11" % AkkaStreamVersion % "it,test", "io.spray" % "spray-routing_2.11" % SprayTestkitVersion % "test", "org.scalamock" % "scalamock-scalatest-support_2.11" % ScalamockVersion % "test" ) ) .configs(IntegrationTest) .dependsOn(serviceAPI) lazy val repository = project.in(file("repository")) lazy val serviceAPI = project.in(file("serviceAPI")) lazy val serviceImpl = project .in(file("serviceImpl")) .dependsOn(serviceAPI, repository, gcmTransport) lazy val sharedSettings = super.settings ++ Seq( version := "1.0.0", scalaVersion := ScalaVersion, scalaBinaryVersion:= CrossVersion.binaryScalaVersion(ScalaVersion), autoCompilerPlugins := true, scalacOptions ++= Seq( "-language:postfixOps", "-language:implicitConversions", "-language:reflectiveCalls", "-language:higherKinds", "-language:existentials", "-Yinline-warnings", "-Xlint", "-deprecation", "-feature", "-unchecked" ), ivyScala := ivyScala.value map { _.copy(overrideScalaVersion = true) } ) }
nyavro/manythanks
project/ProjectBuild.scala
Scala
apache-2.0
2,795
package regolic.qelim import regolic.asts.core.Trees._ import regolic.asts.core.Manip._ import regolic.asts.fol.Trees._ import regolic.asts.fol.Manip.prenexNormalForm abstract class QuantifierElimination { //apply full quantifier elimination to a formula, return a quantifier free //equivalent formula. Note that this will take care of any combination of //quantifiers //defaut implementation will use elimExistential, but optimized version //can be redefined for each theory def apply(formula: Formula): Formula = { val prenexFormula = prenexNormalForm(formula) def rec(f: Formula): Formula = f match { case Forall(x, b) => { val qf = rec(b) Not(elimExistential(Not(qf), x)) } case Exists(x, b) => { val qf = rec(b) elimExistential(qf, x) } case f => f } rec(prenexFormula) } //one step of the quantifier elimination, assume the input formula //is quantifier free and eliminate the variable that is assumed to //be existentially quantified. def elimExistential(formula: Formula, variable: Variable): Formula = { val (f, _) = constructiveElimExistential(formula, variable) f } //the basic building block, it returns a witness for the variable that prove //the existence of a term. It also return an equivalent formula, which simply //could be the formula with x substituted by the witness term, but could //also be more optimized def constructiveElimExistential(formula: Formula, variable: Variable): (Formula, Term) }
regb/scabolic
src/main/scala/regolic/qelim/QuantifierElimination.scala
Scala
mit
1,544
package sharry.backend.account import cats.effect.Sync import cats.implicits._ import sharry.common._ case class NewAccount( id: Ident, login: Ident, source: AccountSource, state: AccountState = AccountState.Active, password: Password = Password.empty, email: Option[String] = None, admin: Boolean = false ) { def validate: Either[String, NewAccount] = if (id.isEmpty) Left("An id is required") else if (login.isEmpty) Left("A login name is required") else Right(this) } object NewAccount { def create[F[_]: Sync]( login: Ident, source: AccountSource, state: AccountState = AccountState.Active, password: Password = Password.empty, email: Option[String] = None, admin: Boolean = false ): F[NewAccount] = for { id <- Ident.randomId[F] } yield NewAccount(id, login, source, state, password, email, admin) }
eikek/sharry
modules/backend/src/main/scala/sharry/backend/account/NewAccount.scala
Scala
gpl-3.0
908