code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
/* * Copyright 2017 Datamountaineer. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datamountaineer.streamreactor.connect.config.base.traits import com.datamountaineer.streamreactor.connect.config.base.const.TraitConfigConst.WRITE_TIMEOUT_SUFFIX /** * Created by andrew@datamountaineer.com on 31/07/2017. * stream-reactor */ trait WriteTimeoutSettings extends BaseSettings { val writeTimeoutSettingsConst = s"$connectorPrefix.$WRITE_TIMEOUT_SUFFIX" def getWriteTimeout = getInt(writeTimeoutSettingsConst) }
datamountaineer/kafka-connect-common
src/main/scala/com/datamountaineer/streamreactor/connect/config/base/traits/WriteTimeoutSettings.scala
Scala
apache-2.0
1,057
package org.eclairjs.tools.generate.org.eclairjs.tools.generate.model import scala.util.matching.Regex /** * Created by berkland on 11/25/15. */ class Comment(comment:String) { var lines= comment.split("\\n") if (lines.length>1) lines= lines.slice(1,lines.length-1) else { val trimmedComment=comment.replace("/**"," * ").replace("*/","") lines.update(0, trimmedComment) } val newLines= scala.collection.mutable.ListBuffer.empty[String] val endLines= scala.collection.mutable.ListBuffer.empty[String] newLines +="/**" // make one leading space for each line val docStart="""\\s+\\*""" lines=lines.map(str=> str.replaceFirst(docStart," *")) def addReturn(returnType: String) = { val returnsText=getTagValue("return") removeTag("return") endLines+=s""" * @returns {$returnType} $returnsText""" } def addTag(tag:String, tagValue:String) = { endLines+=s""" * @$tag $tagValue""" } def getTagValue(tag:String): String = { val rx=tagRX(tag) var lineOpt=lines.find(str=> { str match { case rx(rest) => true case _ => false } }) lineOpt match { case Some(line) => line match { case rx(rest) => rest case _ => "" } case None =>"" } } def changeTagValue(tag:String, callback: String => String) { val rx=tagRX(tag) changeLines(rx,rest=> s"@$tag ${callback(rest)}") } def changeLines(rx:Regex, callback: String => String) { lines=lines.map(str=> { str match { case rx(rest) => s" * ${callback(rest)}" case _ => str } }) } def fixExamples() { val rxStart="\\\\s+\\\\*\\\\s+\\\\{\\\\{\\\\{(.*)".r val rxEnd="\\\\s+\\\\*(.*)\\\\}\\\\}\\\\}.*".r changeLines(rxStart,rest=> s"@example $rest") changeLines(rxEnd,rest=> rest) } def tagRX(tag:String) = { val str="\\\\s\\\\*\\\\s+@"+tag+"(.*)" str.r } def removeTag(tag:String): Unit = { val rx=tagRX(tag) lines=lines.filter(str=> { str match { case rx(rest) => false case _ => true } }) } def removeUnusedTags()={ removeTag("tparam") removeTag("group") removeTag("author") removeTag("version") removeTag("groupname") removeTag("groupdesc") removeTag("groupprio") } // should we convert org.apache.spark.d1.cls to ./d1/cls ?? // for now, just return last segmen def convertQualifiedName(name:String):String = { val parts = name.split("\\\\.") parts.last } def fixLinks()={ val regx="(.+)\\\\[\\\\[([\\\\w\\\\.]*)\\\\]\\\\](.*)".r lines=lines.map(str=> { str match { case regx(p1,name,p3) => {p1+"{@link "+convertQualifiedName(name)+"}"+p3} case _ => str } }) } def asJSDoc():String = { removeUnusedTags() fixLinks(); fixExamples() newLines++=lines newLines++=endLines newLines +=" */" if (newLines.length>2) newLines.mkString("\\n") else "" // dont' generate empty jsdoc } }
conker84/eclairjs-nashorn
tools/generateJS/src/main/scala/org/eclairjs/tools/generate/org/eclairjs/tools/generate/model/Comment.scala
Scala
apache-2.0
3,024
object StringComparison { def main(args: Array[String]) { val s1 = args(0) val s2 = args(1) if (s1.equals(s2)) println(s"'$s1' is equal to '$s2' with case.") if (s1.equalsIgnoreCase(s2)) println(s"'$s1' is equal to '$s2' without case.") val order = s1.compareTo(s2) if (order < 0) println(s"'$s1' is lexically before '$s2'.") if (order == 0) println(s"'$s1' is lexically equal to '$s2'.") if (order > 0) println(s"'$s1' is lexically after '$s2'.") } }
DWiechert/rosetta-stone
basics/string-comparison/scala/StringComparison.scala
Scala
apache-2.0
498
import sbt._ import Keys._ import play.Project._ object ApplicationBuild extends Build { val appName = "geebooks" val appVersion = "1.0-SNAPSHOT" val appDependencies = Seq( // Add your project dependencies here, "com.typesafe.play" %% "play-slick" % "0.4.0", "postgresql" % "postgresql" % "8.4-702.jdbc4" ) val main = play.Project(appName, appVersion, appDependencies).settings( // Add your own project settings here ) }
bobeal/geebooks
project/Build.scala
Scala
agpl-3.0
475
/* * Copyright 2017 helloscala.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package helloscala.algorithm import scala.annotation.tailrec object Searchs { /** * 泛型二分查找 * @param list 待查找的列表 * @param item 要查找的值 * @param ev1 隐式函数(将元素转变成可比较的) * @return */ def genericBinarySearch[T](list: IndexedSeq[T], item: T)(implicit ev1: T => Ordered[T]): Int = { @tailrec def _search(low: Int, high: Int): Int = { val mid = (low + high) / 2 if (mid > high) { -1 } else { val guess = list(mid) // println(s"low: $low, high: $high, mid: $mid, guess: $guess ? item: $item") if (guess == item) mid else if (guess > item) _search(low, mid - 1) else if (guess < item) _search(mid + 1, high) else -1 } } if (list.isEmpty) -1 else _search(0, list.size - 1) } }
helloscala/helloscala
hs-core/src/main/scala/helloscala/algorithm/Searchs.scala
Scala
apache-2.0
1,463
package cats.examples.datatypes /** * Validated provides a means to accumulate validation errors so all errors * for a given set of inputs can be reported in a single result. * * See http://typelevel.org/cats/datatypes/validated.html */ object ValidatedExample extends App { // Imagine you are filling out a web form to signup for an account. You input // your username and password and submit. Response comes back saying your // username can’t have dashes in it, so you make some changes and resubmit. // Can’t have special characters either. Change, resubmit. Passwords need to // have at least one capital letter. Change, resubmit. Password needs to have // at least one number. // Or perhaps you’re reading from a configuration file. One could imagine the // configuration library you’re using returns a scala.util.Try, or maybe a // scala.util.Either. Your parsing may look something like: { import scala.util.Try val fakeConfig = Map( "endpoint" -> "www.example.com", "port" -> "this is not an int" ) def config[T](s: String): Try[T] = Try { fakeConfig(s).asInstanceOf[T] } case class ConnectionParams(endpoint: String, port: Int) val invalidKey = for { url <- config[String]("url") port <- config[Int]("port") } yield ConnectionParams(url, port) assert(invalidKey.isFailure) val invalidPort = for { url <- config[String]("endpoint") port <- config[Int]("port") } yield ConnectionParams(url, port) assert(invalidPort.isFailure) } // You run your program and it says key “url” not found, turns out the key was // “endpoint”. So you change your code and re-run. Now it says the “port” key // was not a well-formed integer. // It would be nice to have all of these errors be reported simultaneously. // That the username can’t have dashes can be validated separately from it not // having special characters, as well as from the password needing to have // certain requirements. A misspelled (or missing) field in a config can be // validated separately from another field not being well-formed. // Enter Validated. // Parallel validation // Our goal is to report any and all errors across independent bits of data. // For instance, when we ask for several pieces of configuration, each // configuration field can be validated separately from one another. How then // do we enforce that the data we are working with is independent? We ask for // both of them up front. // As our running example, we will look at config parsing. Our config will be // represented by a Map[String, String]. Parsing will be handled by a Read // type class - we provide instances just for String and Int for brevity. trait Read[A] { def read(s: String): Option[A] } object Read { def apply[A](implicit A: Read[A]): Read[A] = A implicit val stringRead: Read[String] = new Read[String] { def read(s: String): Option[String] = Some(s) } implicit val intRead: Read[Int] = new Read[Int] { def read(s: String): Option[Int] = if (s.matches("-?[0-9]+")) Some(s.toInt) else None } } // Then we enumerate our errors - when asking for a config value, one of two // things can go wrong: the field is missing, or it is not well-formed with // regards to the expected type. sealed abstract class ConfigError final case class MissingConfig(field: String) extends ConfigError final case class ParseError(field: String) extends ConfigError // We need a data type that can represent either a successful value (a parsed // configuration), or an error. { sealed abstract class Validated[+E, +A] object Validated { final case class Valid[+A](a: A) extends Validated[Nothing, A] final case class Invalid[+E](e: E) extends Validated[E, Nothing] } } // Note - the example continues using the cats Validated implementation. import cats.data.Validated import cats.data.Validated.{Invalid, Valid} case class Config(map: Map[String, String]) { def parse[A : Read](key: String): Validated[ConfigError, A] = map.get(key) match { case None => Invalid(MissingConfig(key)) case Some(value) => Read[A].read(value) match { case None => Invalid(ParseError(key)) case Some(a) => Valid(a) } } } // Everything is in place to write the parallel validator. Recall that we can // only do parallel validation if each piece is independent. How do we enforce // the data is independent? By asking for all of it up front. Let’s start with // two pieces of data. def parallelValidate1[E, A, B, C](v1: Validated[E, A], v2: Validated[E, B])(f: (A, B) => C): Validated[E, C] = (v1, v2) match { case (Valid(a), Valid(b)) => Valid(f(a, b)) case (Valid(_), i@Invalid(_)) => i case (i@Invalid(_), Valid(_)) => i case (Invalid(e1), Invalid(e2)) => ??? } // We’ve run into a problem. In the case where both have errors, we want to // report both. But we have no way of combining the two errors into one error! // Perhaps we can put both errors into a List, but that seems needlessly // specific - clients may want to define their own way of combining errors. // How then do we abstract over a binary operation? The Semigroup type class // captures this idea. import cats.Semigroup def parallelValidate2[E : Semigroup, A, B, C](v1: Validated[E, A], v2: Validated[E, B])(f: (A, B) => C): Validated[E, C] = (v1, v2) match { case (Valid(a), Valid(b)) => Valid(f(a, b)) case (Valid(_), i@Invalid(_)) => i case (i@Invalid(_), Valid(_)) => i case (Invalid(e1), Invalid(e2)) => Invalid(Semigroup[E].combine(e1, e2)) } // Perfect! But.. going back to our example, we don’t have a way to combine // ConfigErrors. But as clients, we can change our Validated values where the // error can be combined, say, a List[ConfigError]. It is more common however // to use a NonEmptyList[ConfigError] - the NonEmptyList statically guarantees // we have at least one value, which aligns with the fact that if we have an // Invalid, then we most certainly have at least one error. This technique is // so common there is a convenient method on Validated called toValidatedNel // that turns any Validated[E, A] value to a Validated[NonEmptyList[E], A]. // Additionally, the type alias ValidatedNel[E, A] is provided. // Time to parse. import cats.SemigroupK import cats.data.NonEmptyList case class ConnectionParams(endpoint: String, port: Int) val config = Config( Map( "endpoint" -> "127.0.0.1", "port" -> "not an int" ) ) implicit val nelSemigroup: Semigroup[NonEmptyList[ConfigError]] = SemigroupK[NonEmptyList].algebra[ConfigError] implicit val readString: Read[String] = Read.stringRead implicit val readInt: Read[Int] = Read.intRead // Any and all errors are reported! val v1 = parallelValidate2( config.parse[String]("url").toValidatedNel, config.parse[Int]("port").toValidatedNel )(ConnectionParams.apply) assert(v1 == Invalid(NonEmptyList.of(MissingConfig("url"), ParseError("port")))) val v2 = parallelValidate2( config.parse[String]("endpoint").toValidatedNel, config.parse[Int]("port").toValidatedNel )(ConnectionParams.apply) assert(v2 == Invalid(NonEmptyList.of(ParseError("port")))) val validConfig = Config( Map( "endpoint" -> "127.0.0.1", "port" -> "1234" ) ) val v3 = parallelValidate2( validConfig.parse[String]("endpoint").toValidatedNel, validConfig.parse[Int]("port").toValidatedNel )(ConnectionParams.apply) assert(v3 == Valid(ConnectionParams("127.0.0.1", 1234))) // Apply // Our parallelValidate function looks awfully like the Apply#map2 function. def map2[F[_], A, B, C](fa: F[A], fb: F[B])(f: (A, B) => C): F[C] = ??? // Which can be defined in terms of Apply#ap and Apply#map, the very functions // needed to create an Apply instance. // Can we perhaps define an Apply instance for Validated? Better yet, can we // define an Applicative instance? // Note: the example below assumes usage of the kind-projector compiler plugin // and will not compile if it is not being used in a project. import cats.Applicative implicit def validatedApplicative[E : Semigroup]: Applicative[Validated[E, ?]] = new Applicative[Validated[E, ?]] { def ap[A, B](f: Validated[E, A => B])(fa: Validated[E, A]): Validated[E, B] = (fa, f) match { case (Valid(a), Valid(fab)) => Valid(fab(a)) case (i@Invalid(_), Valid(_)) => i case (Valid(_), i@Invalid(_)) => i case (Invalid(e1), Invalid(e2)) => Invalid(Semigroup[E].combine(e1, e2)) } def pure[A](x: A): Validated[E, A] = Validated.valid(x) } // Awesome! And now we also get access to all the goodness of Applicative, // which includes map{2-22}, as well as the Cartesian syntax |@|. // We can now easily ask for several bits of configuration and get any and all // errors returned back. import cats.Apply import cats.data.ValidatedNel val personConfig = Config( Map( "name" -> "cat", "age" -> "not a number", "houseNumber" -> "1234", "lane" -> "feline street" ) ) case class Address(houseNumber: Int, street: String) case class Person(name: String, age: Int, address: Address) // Thus. val personFromConfig: ValidatedNel[ConfigError, Person] = Apply[ValidatedNel[ConfigError, ?]].map4(config.parse[String]("name").toValidatedNel, personConfig.parse[Int]("age").toValidatedNel, personConfig.parse[Int]("house_number").toValidatedNel, personConfig.parse[String]("street").toValidatedNel) { case (name, age, houseNumber, street) => Person(name, age, Address(houseNumber, street)) } assert(personFromConfig == Invalid( NonEmptyList.of( MissingConfig("street"), MissingConfig("house_number"), ParseError("age"), MissingConfig("name") ) )) // Of flatMaps and Eithers // Option has flatMap, Either has flatMap, where’s Validated’s? Let’s try to // implement it - better yet, let’s implement the Monad type class. { import cats.Monad implicit def validatedMonad[E]: Monad[Validated[E, ?]] = new Monad[Validated[E, ?]] { def flatMap[A, B](fa: Validated[E, A])(f: A => Validated[E, B]): Validated[E, B] = fa match { case Valid(a) => f(a) case i@Invalid(_) => i } def pure[A](x: A): Validated[E, A] = Valid(x) @annotation.tailrec def tailRecM[A, B](a: A)(f: A => Validated[E, Either[A, B]]): Validated[E, B] = f(a) match { case Valid(Right(b)) => Valid(b) case Valid(Left(a)) => tailRecM(a)(f) case i@Invalid(_) => i } } // Note that all Monad instances are also Applicative instances, where ap is // defined as /** trait Monad[F[_]] { def flatMap[A, B](fa: F[A])(f: A => F[B]): F[B] def pure[A](x: A): F[A] def map[A, B](fa: F[A])(f: A => B): F[B] = flatMap(fa)(f.andThen(pure)) def ap[A, B](fa: F[A])(f: F[A => B]): F[B] = flatMap(fa)(a => map(f)(fab => fab(a))) } **/ // However, the ap behavior defined in terms of flatMap does not behave the // same as that of our ap defined above. Observe: val v = validatedMonad.tuple2( Validated.invalidNel[String, Int]("oops"), Validated.invalidNel[String, Double]("uh oh") ) assert(v == Invalid(NonEmptyList.of("oops"))) // This one short circuits! Therefore, if we were to define a Monad (or // FlatMap) instance for Validated we would have to override ap to get the // behavior we want. But then the behavior of flatMap would be inconsistent // with that of ap, not good. Therefore, Validated has only an Applicative // instance. } // Validated vs Either // We’ve established that an error-accumulating data type such as Validated // can’t have a valid Monad instance. Sometimes the task at hand requires // error-accumulation. However, sometimes we want a monadic structure that we // can use for sequential validation (such as in a for-comprehension). This // leaves us in a bit of a conundrum. // Cats has decided to solve this problem by using separate data structures // for error-accumulation (Validated) and short-circuiting monadic behavior // (Either). // If you are trying to decide whether you want to use Validated or Either, a // simple heuristic is to use Validated if you want error-accumulation and to // otherwise use Either. // Sequential Validation // If you do want error accumulation but occasionally run into places where // sequential validation is needed, then Validated provides a couple methods // that may be helpful. // andThen // The andThen method is similar to flatMap (such as Either.flatMap). In the // case of success, it passes the valid value into a function that returns a // new Validated instance. val houseNumber = config.parse[Int]("house_number").andThen{ n => if (n >= 0) Validated.valid(n) else Validated.invalid(ParseError("house_number")) } assert(houseNumber == Invalid(MissingConfig("house_number"))) // withEither // The withEither method allows you to temporarily turn a Validated instance // into an Either instance and apply it to a function. def positive(field: String, i: Int): Either[ConfigError, Int] = { if (i >= 0) Right(i) else Left(ParseError(field)) } // Thus. val houseNumber2 = config.parse[Int]("house_number").withEither{ either: Either[ConfigError, Int] => either.flatMap{ i => positive("house_number", i) } } assert(houseNumber2 == Invalid(MissingConfig("house_number"))) }
carwynellis/cats-examples
src/main/scala/cats/examples/datatypes/ValidatedExample.scala
Scala
mit
14,118
package com.arcusys.valamis.hook.utils import java.util.Locale import com.liferay.portal.kernel.language.LanguageUtil import com.liferay.portal.kernel.log.LogFactoryUtil import com.liferay.portal.kernel.template.TemplateConstants._ import com.liferay.portal.kernel.util.{FileUtil, LocaleUtil} import com.liferay.portal.model._ import com.liferay.portal.service._ import com.liferay.portal.service.permission.PortletPermissionUtil import com.liferay.portlet.dynamicdatamapping.model.{DDMStructure, DDMStructureConstants, DDMTemplate, DDMTemplateConstants} import com.liferay.portlet.dynamicdatamapping.service.{DDMStructureLocalServiceUtil, DDMTemplateLocalServiceUtil} import com.liferay.portlet.journal.model.JournalArticle import scala.collection.JavaConverters._ import scala.util.control.NonFatal /** * Created by Igor Borisov on 12.05.16. */ object Utils { private val log = LogFactoryUtil.getLog(this.getClass) def addStructureWithTemplate( groupId: Long, userId: Long, structureInfo: StructureInfo, templateInfo: TemplateInfo): Unit = { addStructureWithTemplates(groupId, userId, structureInfo, Array(templateInfo)) } def addStructureWithTemplates(groupId: Long, userId: Long, structureInfo: StructureInfo, templates: Array[TemplateInfo]): Unit = { val defaultLocale = LocaleUtil.getDefault val structureId = addStructure( groupId, userId, structureInfo.key, Map(defaultLocale -> LanguageUtil.get(defaultLocale, structureInfo.name)), Map(defaultLocale -> LanguageUtil.get(defaultLocale, structureInfo.description)), getFileAsString(s"structures/${structureInfo.key}.xml") ) templates.foreach { template => addStructureTemplate( groupId, userId, structureId, template.key, Map(defaultLocale -> LanguageUtil.get(defaultLocale, template.name)), Map(defaultLocale -> LanguageUtil.get(defaultLocale, template.description)), getFileAsString(s"templates/${template.key}.ftl"), LANG_TYPE_FTL ) } } private def addStructureTemplate( groupId: Long, userId: Long, structureId: Long, templateKey: String, nameMap: Map[Locale, String], descriptionMap: Map[Locale, String], body: String, langType: String) { val templateClassNameId = ClassNameLocalServiceUtil.getClassNameId(classOf[DDMStructure]) addTemplate( groupId, userId, Some(structureId), templateKey, nameMap, descriptionMap, body, langType, templateClassNameId) } def addTemplate( groupId: Long, userId: Long, classPK: Option[Long], templateKey: String, nameMap: Map[Locale, String], descriptionMap: Map[Locale, String], body: String, langType: String, templateClassNameId: Long ) = { val serviceContext = new ServiceContext serviceContext.setAddGuestPermissions(true) DDMTemplateLocalServiceUtil.fetchTemplate(groupId, templateClassNameId, templateKey, true) match { case template: DDMTemplate => log.info("Existing template found with id: " + template.getTemplateId) DDMTemplateLocalServiceUtil.updateTemplate( template.getTemplateId, classPK getOrElse 0, nameMap.asJava, descriptionMap.asJava, DDMTemplateConstants.TEMPLATE_TYPE_DISPLAY, null, langType, body, false, false, null, null, serviceContext) log.info("Template " + template.getTemplateId + " updated successfully.") case _ => log.info("Could not find an existing template. Adding a new template with id: " + templateKey + " for structure with id: " + classPK) DDMTemplateLocalServiceUtil.addTemplate( userId, groupId, templateClassNameId, classPK getOrElse 0, templateKey, nameMap.asJava, descriptionMap.asJava, DDMTemplateConstants.TEMPLATE_TYPE_DISPLAY, null, langType, body, false, false, null, null, serviceContext) } } def addStructure(groupId: Long, userId: Long, structureKey: String, nameMap: Map[Locale, String], descriptionMap: Map[Locale, String], xsd: String): Long = { val serviceContext = new ServiceContext serviceContext.setAddGuestPermissions(true) val structureClassNameId = ClassNameLocalServiceUtil.getClassNameId(classOf[JournalArticle]) val structure = DDMStructureLocalServiceUtil.fetchStructure(groupId, structureClassNameId, structureKey) match { case structure: DDMStructure => log.info("Existing structure found with id: " + structure.getStructureId) structure.setXsd(xsd) DDMStructureLocalServiceUtil.updateStructure( structure.getStructureId, DDMStructureConstants.DEFAULT_PARENT_STRUCTURE_ID, nameMap.asJava, descriptionMap.asJava, xsd, serviceContext) log.info("Structure " + structure.getStructureId + " updated successfully.") structure case _ => log.info("Could not find an existing structure. Adding a new structure with id: " + structureKey) DDMStructureLocalServiceUtil.addStructure( userId, groupId, DDMStructureConstants.DEFAULT_PARENT_STRUCTURE_ID, structureClassNameId, structureKey, nameMap.asJava, descriptionMap.asJava, xsd, "xml", DDMStructureConstants.TYPE_DEFAULT, serviceContext) } structure.getStructureId } def getFileAsString(path: String): String = { val classLoader = Thread.currentThread().getContextClassLoader val is = classLoader.getResourceAsStream(path) new String(FileUtil.getBytes(is)) } def getLayout(siteGroupId: Long, isPrivate: Boolean, friendlyUrl: String): Option[Layout] = { Option(LayoutLocalServiceUtil.fetchLayoutByFriendlyURL(siteGroupId, isPrivate, friendlyUrl)) } def removeLayout(siteGroupId: Long, isPrivate: Boolean, friendlyUrl: String) { val layout = getLayout(siteGroupId, isPrivate, friendlyUrl) layout match { case Some(l) => LayoutLocalServiceUtil.deleteLayout(l) case None => log.debug("Cannot remove layout - layout does not exist") log.debug(s"groupId: $siteGroupId private: $isPrivate friendlyUrl: $friendlyUrl") } } def addLayout(siteGroupId: Long, userId: Long, layoutName: String, isPrivate: Boolean, friendlyURL: String, parentId: Long): Layout = { val serviceContext = new ServiceContext serviceContext.setAddGuestPermissions(true) val title = "" val description = "" val layoutType = LayoutConstants.TYPE_PORTLET val isHidden = false LayoutLocalServiceUtil.addLayout( userId, siteGroupId, isPrivate, parentId, layoutName, title, description, layoutType, isHidden, friendlyURL, serviceContext ) } def setupPages(siteId: Long, userId: Long, isSitePrivate: Boolean, layouts: Seq[PageLayout]) { log.info("Create site pages") layouts foreach { layout => addLayout(siteId, userId, isSitePrivate, layout) } } def addLayout( siteId: Long, userId: Long, isSitePrivate: Boolean, pageLayout: PageLayout, parentId: Long = LayoutConstants.DEFAULT_PARENT_LAYOUT_ID): Unit = { try { log.info(s"Update layout ${pageLayout.name}") val serviceContext = new ServiceContext serviceContext.setAddGuestPermissions(true) val layout = getLayout(siteId, pageLayout.isPrivate, pageLayout.friendlyUrl) match { case Some(l) => log.info(s"Found existed layout ${pageLayout.name}. Existed layout will be updated") Option(l) case None => log.info(s"Layout ${pageLayout.name} does not exist. New layout will be created") Option(addLayout(siteId, userId, pageLayout.name, isSitePrivate, pageLayout.friendlyUrl, parentId)) } layout foreach { l => val layoutType = l.getLayoutType.asInstanceOf[LayoutTypePortlet] layoutType.setLayoutTemplateId(userId, pageLayout.templateId) Utils.updateLayout(l) pageLayout.children .foreach(childLayout => addLayout(siteId, userId, isSitePrivate, childLayout, l.getLayoutId)) } } catch { case NonFatal(e) => log.warn(s"Failed to add layout ${pageLayout.name} to group $siteId") } } def addSite(userId: Long, name: String, friendlyURL: String): Group = { val groupType = GroupConstants.TYPE_SITE_OPEN val parentGroupId = GroupConstants.DEFAULT_PARENT_GROUP_ID val liveGroupId = GroupConstants.DEFAULT_LIVE_GROUP_ID val membershipRestriction = GroupConstants.DEFAULT_MEMBERSHIP_RESTRICTION val description = "" val manualMembership = true val isSite = true val isActive = true val serviceContext = new ServiceContext serviceContext.setAddGuestPermissions(true) GroupLocalServiceUtil.addGroup( userId, parentGroupId, classOf[Group].getName, 0, //classPK liveGroupId, name, description, groupType, manualMembership, membershipRestriction, friendlyURL, isSite, isActive, serviceContext) } def setupTheme(siteGroupId: Long, themeId: String): LayoutSet = { LayoutSetLocalServiceUtil .updateLookAndFeel(siteGroupId, false, themeId, "", "", false) LayoutSetLocalServiceUtil .updateLookAndFeel(siteGroupId, true, themeId, "", "", false) } //portlets: Map[String, String] -> ( portletId, columnId ) def addPortletsToLayout(layout: Layout, portlets: Map[String, String]):Unit = { portlets.foreach { case (portletId, columnId) => Utils.addPortletById(layout, portletId, columnId) } } def addPortletById(layout: Layout, portletId: String, columnId: String): Unit = { log.info(s"Add portlet $portletId to ${layout.getNameCurrentValue} at $columnId") val layoutTypePortlet = layout.getLayoutType.asInstanceOf[LayoutTypePortlet] if (!layoutTypePortlet.hasPortletId(portletId)) { val newPortletId = layoutTypePortlet.addPortletId(0, portletId, columnId, -1, false) addResources(layout, newPortletId) updateLayout(layout) } } def updateLayout(layout: Layout) { LayoutLocalServiceUtil.updateLayout(layout.getGroupId, layout.isPrivateLayout, layout.getLayoutId, layout.getTypeSettings) } def addResources(layout: Layout, portletId: String) { val rootPortletId = PortletConstants.getRootPortletId(portletId) val portletPrimaryKey = PortletPermissionUtil.getPrimaryKey(layout.getPlid, portletId) ResourceLocalServiceUtil.addResources( layout.getCompanyId, layout.getGroupId, 0, //userId rootPortletId, portletPrimaryKey, true, true, true) } def hasPage(groupId: Long, isPrivate: Boolean, friendlyUrl: String): Boolean = { getLayout(groupId, isPrivate, friendlyUrl).isDefined } def updatePortletsForLayout(siteId: Long, pageLayout: PageLayout, portlets: Map[String, String]):Unit = { val layout = Utils.getLayout(siteId, pageLayout.isPrivate, pageLayout.friendlyUrl) layout match { case Some(l) => Utils.addPortletsToLayout(l, portlets) case None => log.info(s"Cannot add portlets to layout - layout ${pageLayout.name} does not exist") } } }
igor-borisov/valamis
hook-utils/src/main/scala/com/arcusys/valamis/hook/utils/Utils.scala
Scala
gpl-3.0
12,449
import scala.io.Source import java.io.PrintStream import Math._ object Solver { type Input = String //return remaining input and one parsed out problem; t will not be Nil def parseInput(t: Seq[String]): (Seq[String], Input) = { //val first = t.head.split(' ').map(x => x.toInt) val input = t(0) (t.drop(1), input) } def doProblem(input: Input) = { //... //algorithm //... input } def jamFormat(obj: Any): String = obj match { case d: Double => "%1.7f" format d case s: Seq[_] => s.map(jamFormat).mkString(" ") case a => a.toString } def display(i :Int, a :Any, os: Seq[PrintStream]) { os.foreach(_.println(s"Case #$i: ${jamFormat(a)}")) } def splitInput(l: Seq[String], v: Seq[Input]): Seq[Input] = { if(l.length == 0) v.reverse else { val (remaining, input) = parseInput(l) splitInput(remaining, input +: v) } } def main(args: Array[String]) { val ifname = if(args.length >= 1) args(0) else "input.txt" val ofname = if(args.length >= 2) args(1) else "output.txt" val outfs = Seq(System.out, new PrintStream(ofname)) //dropped line is the "T test cases follow" line val source = Source.fromFile(ifname).getLines().drop(1).toSeq val input = splitInput(source, Nil) //change to "input.par" to try parallel execution val output = input.map(doProblem) output.zipWithIndex foreach { case(a,i) => display(i+1, a, outfs) } } } import Solver._ object Test{ // input without the first N-line-follow line val testInput: String = """ 1 """ val testOutput: String = """ Case #1: 1 """ val caseRegex = """Case #\\d*: """.r def expected = { val result = collection.mutable.Buffer[String]() val itr = caseRegex.findAllIn(testOutput) if(itr.hasNext){ var startIdx = itr.end itr.next while(itr.hasNext){ result += testOutput.slice(startIdx, itr.start).trim startIdx = itr.end itr.next } result += testOutput.slice(startIdx, testOutput.size).trim } result } def main(args: Array[String]) { val input = splitInput(testInput.split("\\n").drop(1), Nil) val output = input.map(doProblem).map(jamFormat) var error = false for(i <- 0 until input.size){ if(output(i) != expected(i)){ error = true println(s"""|Error in case $i: | expected: ${expected(i)} | result : ${output(i)}""".stripMargin) } } if(!error) println("Tests passed!") } }
BrettAM/AlgorithmTemplates
scalaTemplate/Solver.scala
Scala
apache-2.0
2,842
/** * Copyright 2016, deepsense.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.deepsense.deeplang.utils import org.apache.spark.sql.Row import org.apache.spark.sql.types.StructType import org.scalatest.Matchers import io.deepsense.deeplang.doperables.dataframe.DataFrame trait DataFrameMatchers extends Matchers { def assertDataFramesEqual( actualDf: DataFrame, expectedDf: DataFrame, checkRowOrder: Boolean = true, checkNullability: Boolean = true): Unit = { // Checks only semantic identity, not objects location in memory assertSchemaEqual( actualDf.sparkDataFrame.schema, expectedDf.sparkDataFrame.schema, checkNullability) val collectedRows1: Array[Row] = actualDf.sparkDataFrame.collect() val collectedRows2: Array[Row] = expectedDf.sparkDataFrame.collect() if (checkRowOrder) { collectedRows1 shouldBe collectedRows2 } else { collectedRows1 should contain theSameElementsAs collectedRows2 } } def assertSchemaEqual( actualSchema: StructType, expectedSchema: StructType, checkNullability: Boolean): Unit = { val (actual, expected) = if (checkNullability) { (actualSchema, expectedSchema) } else { val actualNonNull = StructType(actualSchema.map(_.copy(nullable = false))) val expectedNonNull = StructType(expectedSchema.map(_.copy(nullable = false))) (actualNonNull, expectedNonNull) } assertSchemaEqual(actual, expected) } def assertSchemaEqual(actualSchema: StructType, expectedSchema: StructType): Unit = { actualSchema.treeString shouldBe expectedSchema.treeString } } object DataFrameMatchers extends DataFrameMatchers
deepsense-io/seahorse-workflow-executor
deeplang/src/it/scala/io/deepsense/deeplang/utils/DataFrameMatchers.scala
Scala
apache-2.0
2,211
package BIDMach.models import BIDMat.{Mat,SBMat,CMat,DMat,FMat,IMat,HMat,GMat,GIMat,GSMat,SMat,SDMat} import BIDMat.MatFunctions._ import BIDMat.SciFunctions._ import edu.berkeley.bid.CUMAT import BIDMach.datasources._ import BIDMach.updaters._ import BIDMach.mixins._ import BIDMach._ /** * Factorization Machine Model. * This class computes a factorization machine model a la * * Steffen Rendle (2012): Factorization Machines with libFM, in ACM Trans. Intell. Syst. Technol., 3(3), May. * * We depart slightly from the original FM formulation by including both positive definite and negative definite factors. * While the positive definite factor can approximate any matrix in the limit, using both positive and negative definite factors * should give better performance for a fixed number of factors. This is what we observed on several datasets. * With both positive definite and negative definite factors, there should also be no need to remove diagonal terms, since * the positive and negative factorizations already form a conventional eigendecomposition (a best least-squares fit for a given * number of factors) of the matrix of second-order interactions. * * The types of model are given by the values of opts.links (IMat) and are the same as for GLM models. They are: - 0 = linear model (squared loss) - 1 = logistic model (logistic loss) - 2 = absolute logistic (hinge loss on logistic prediction) - 3 = SVM model (hinge loss) * * Options are: - links: an IMat whose nrows should equal the number of targets. Values as above. Can be different for different targets. - iweight: an FMat typically used to select a weight row from the input. i.e. iweight = 0,1,0,0,0 uses the second * row of input data as weights to be applied to input samples. The iweight field should be 0 in mask. - dim1: Dimension of the positive definite factor - dim2: Dimension of the negative definite factor - strictFM: the exact FM model zeros the diagonal terms of the factorization. As mentioned above, this probably isn't needed * in our version of the model, but its available. * * Inherited from Regression Model: - rmask: FMat, optional, 0-1-valued. Used to ignore certain input rows (which are targets or weights). * Zero value in an element will ignore the corresponding row. - targets: FMat, optional, 0-1-valued. ntargs x nfeats. Used to specify which input features corresponding to targets. - targmap: FMat, optional, 0-1-valued. nntargs x ntargs. Used to replicate actual targets, e.g. to train multiple models * (usually with different parameters) for the same target. * * Some convenience functions for training: * {{{ * val (mm, opts) = FM.learner(a, d) // On an input matrix a including targets (set opts.targets to specify them), * // learns an FM model of type d. * // returns the model (nn) and the options class (opts). * val (mm, opts) = FM.learner(a, c, d) // On an input matrix a and target matrix c, learns an FM model of type d. * // returns the model (nn) and the options class (opts). * val (nn, nopts) = FM.predictor(model, ta, pc, d) // constructs a prediction learner from an existing model. returns the learner and options. * // pc should be the same dims as the test label matrix, and will contain results after nn.predict * val (mm, mopts, nn, nopts) = FM.learner(a, c, ta, pc, d) // a = training data, c = training labels, ta = test data, pc = prediction matrix, d = type. * // returns a training learner mm, with options mopts. Also returns a prediction model nn with its own options. * // typically set options, then do mm.train; nn.predict with results in pc. * val (mm, opts) = learner(ds) // Build a learner for a general datasource ds (e.g. a files data source). * }}} * */ class FM(override val opts:FM.Opts = new FM.Options) extends RegressionModel(opts) { var mylinks:Mat = null; var iweight:Mat = null; val linkArray = GLM.linkArray var totflops = 0L var mv:Mat = null var mm1:Mat = null var mm2:Mat = null var uv:Mat = null var um1:Mat = null var um2:Mat = null var xs:Mat = null var ulim:Mat = null var llim:Mat = null override def copyTo(mod:Model) = { super.copyTo(mod); val rmod = mod.asInstanceOf[FM]; rmod.mylinks = mylinks; rmod.iweight = iweight; rmod.mv = mv; rmod.mm1 = mm1; rmod.mm2 = mm2; rmod.uv = uv; rmod.um1 = um2; rmod.um2 = um2; } override def init() = { super.init() mylinks = if (useGPU) GIMat(opts.links) else opts.links iweight = if (opts.iweight.asInstanceOf[AnyRef] != null) convertMat(opts.iweight) else null if (refresh) { mv = modelmats(0); mm1 = convertMat(normrnd(0, opts.initscale/math.sqrt(opts.dim1).toFloat, opts.dim1, mv.ncols)); mm2 = convertMat(normrnd(0, opts.initscale/math.sqrt(opts.dim2).toFloat, opts.dim2, mv.ncols)); ulim = convertMat(row(opts.lim)) llim = convertMat(row(-opts.lim)) setmodelmats(Array(mv, mm1, mm2)); if (mask.asInstanceOf[AnyRef] != null) { mv ~ mv ∘ mask; mm1 ~ mm1 ∘ mask; mm2 ~ mm2 ∘ mask; } } (0 until 3).map((i) => modelmats(i) = convertMat(modelmats(i))) uv = updatemats(0) um1 = uv.zeros(opts.dim1, uv.ncols) um2 = uv.zeros(opts.dim2, uv.ncols) updatemats = Array(uv, um1, um2) totflops = 0L for (i <- 0 until opts.links.length) { totflops += linkArray(opts.links(i)).fnflops } } def mupdate(in:Mat, ipass:Int, pos:Long) = { val targs = targets * in min(targs, 1f, targs) val alltargs = if (targmap.asInstanceOf[AnyRef] != null) targmap * targs else targs val dweights = if (iweight.asInstanceOf[AnyRef] != null) iweight * in else null mupdate3(in, alltargs, dweights) } def mupdate2(in:Mat, targ:Mat, ipass:Int, pos:Long) = mupdate3(in, targ, null); // Update the positive/negative factorizations def mupdate3(in:Mat, targ:Mat, dweights:Mat) = { val ftarg = full(targ); val vt1 = mm1 * in val vt2 = mm2 * in val eta = mv * in + (vt1 ∙ vt1) - (vt2 ∙ vt2) if (opts.strictFM) { // Strictly follow the FM formula (remove diag terms) vs. let linear predictor cancel them. xs = in.copy (xs.contents ~ xs.contents) ∘ xs.contents // xs is the element-wise square of in. eta ~ eta - (((mm1 ∘ mm1) - (mm2 ∘ mm2)) * xs) } if (opts.lim > 0) { max(eta, llim, eta); min(eta, ulim, eta); } GLM.preds(eta, eta, mylinks, totflops) GLM.derivs(eta, ftarg, eta, mylinks, totflops) if (dweights.asInstanceOf[AnyRef] != null) eta ~ eta ∘ dweights uv ~ eta *^ in um1 ~ ((eta * 2f) ∘ vt1) *^ in um2 ~ ((eta * -2f) ∘ vt2) *^ in if (opts.strictFM) { val xeta = (eta * 2f) *^ xs um1 ~ um1 - (mm1 ∘ xeta); um2 ~ um2 + (mm2 ∘ xeta); } if (mask.asInstanceOf[AnyRef] != null) { uv ~ uv ∘ mask; um1 ~ um1 ∘ mask; um2 ~ um2 ∘ mask; } } // Update a simple factorization A*B for the second order terms. def mupdate4(in:Mat, targ:Mat, dweights:Mat) = { val ftarg = full(targ); val vt1 = mm1 * in val vt2 = mm2 * in val eta = mv * in + (vt1 ∙ vt2) GLM.preds(eta, eta, mylinks, totflops) GLM.derivs(eta, ftarg, eta, mylinks, totflops) if (dweights.asInstanceOf[AnyRef] != null) eta ~ eta ∘ dweights uv ~ eta *^ in um1 ~ (eta ∘ vt2) *^ in um2 ~ (eta ∘ vt1) *^ in if (mask.asInstanceOf[AnyRef] != null) { uv ~ uv ∘ mask; um1 ~ um1 ∘ mask; um2 ~ um2 ∘ mask; } } def meval(in:Mat):FMat = { val targs = targets * in min(targs, 1f, targs) val alltargs = if (targmap.asInstanceOf[AnyRef] != null) targmap * targs else targs val dweights = if (iweight.asInstanceOf[AnyRef] != null) iweight * in else null meval3(in, alltargs, dweights) } def meval2(in:Mat, targ:Mat):FMat = meval3(in, targ, null) // Evaluate the positive/negative factorizations def meval3(in:Mat, targ:Mat, dweights:Mat):FMat = { val ftarg = full(targ) val vt1 = mm1 * in val vt2 = mm2 * in val eta = mv * in + (vt1 dot vt1) - (vt2 dot vt2) if (opts.strictFM) { in.contents ~ in.contents ∘ in.contents; eta ~ eta - ((mm1 ∘ mm1) * in); eta ~ eta + ((mm2 ∘ mm2) * in); } if (opts.lim > 0) { max(eta, - opts.lim, eta) min(eta, opts.lim, eta) } GLM.preds(eta, eta, mylinks, totflops) val v = GLM.llfun(eta, ftarg, mylinks, totflops) if (putBack >= 0) {targ <-- eta} if (dweights.asInstanceOf[AnyRef] != null) { FMat(sum(v ∘ dweights, 2) / sum(dweights)) } else { FMat(mean(v, 2)) } } // evaluate a simple A*B factorization of the interactions. def meval4(in:Mat, targ:Mat, dweights:Mat):FMat = { val ftarg = full(targ) val vt1 = mm1 * in val vt2 = mm2 * in val eta = mv * in + (vt1 dot vt2) GLM.preds(eta, eta, mylinks, totflops) val v = GLM.llfun(eta, ftarg, mylinks, totflops) if (putBack >= 0) {targ <-- eta} if (dweights.asInstanceOf[AnyRef] != null) { FMat(sum(v ∘ dweights, 2) / sum(dweights)) } else { FMat(mean(v, 2)) } } } object FM { trait Opts extends GLM.Opts { var strictFM = false; var dim1 = 32 var dim2 = 32 var initscale = 0.1f } class Options extends Opts {} def mkFMModel(fopts:Model.Opts) = { new FM(fopts.asInstanceOf[FM.Opts]) } def mkUpdater(nopts:Updater.Opts) = { new ADAGrad(nopts.asInstanceOf[ADAGrad.Opts]) } def mkRegularizer(nopts:Mixin.Opts):Array[Mixin] = { Array(new L1Regularizer(nopts.asInstanceOf[L1Regularizer.Opts])) } class LearnOptions extends Learner.Options with FM.Opts with MatDS.Opts with ADAGrad.Opts with L1Regularizer.Opts def learner(mat0:Mat, d:Int = 0) = { val opts = new LearnOptions opts.batchSize = math.min(10000, mat0.ncols/30 + 1) val nn = new Learner( new MatDS(Array(mat0:Mat), opts), new FM(opts), mkRegularizer(opts), new ADAGrad(opts), opts) (nn, opts) } def learner(mat0:Mat):(Learner, LearnOptions) = learner(mat0, 0) def learner(mat0:Mat, targ:Mat, d:Int) = { val opts = new LearnOptions opts.batchSize = math.min(10000, mat0.ncols/30 + 1) if (opts.links == null) opts.links = izeros(targ.nrows,1) opts.links.set(d) val nn = new Learner( new MatDS(Array(mat0, targ), opts), new FM(opts), mkRegularizer(opts), new ADAGrad(opts), opts) (nn, opts) } def learner(mat0:Mat, targ:Mat):(Learner, LearnOptions) = learner(mat0, targ, 0) // This function constructs a learner and a predictor. def learner(mat0:Mat, targ:Mat, mat1:Mat, preds:Mat, d:Int):(Learner, LearnOptions, Learner, LearnOptions) = { val mopts = new LearnOptions; val nopts = new LearnOptions; mopts.lrate = row(1f, 0.1f, 0.1f) mopts.batchSize = math.min(10000, mat0.ncols/30 + 1) mopts.autoReset = false if (mopts.links == null) mopts.links = izeros(targ.nrows,1) nopts.links = mopts.links mopts.links.set(d) nopts.batchSize = mopts.batchSize nopts.putBack = 1 val model = new FM(mopts) val mm = new Learner( new MatDS(Array(mat0, targ), mopts), model, mkRegularizer(mopts), new ADAGrad(mopts), mopts) val nn = new Learner( new MatDS(Array(mat1, preds), nopts), model, null, null, nopts) (mm, mopts, nn, nopts) } // This function constructs a predictor from an existing model def predictor(model:Model, mat1:Mat, preds:Mat):(Learner, LearnOptions) = { val mod = model.asInstanceOf[FM]; val mopts = mod.opts; val nopts = new LearnOptions; nopts.batchSize = math.min(10000, mat1.ncols/30 + 1) nopts.links = mopts.links.copy; nopts.putBack = 1; val newmod = new FM(nopts); newmod.refresh = false model.copyTo(newmod) val nn = new Learner( new MatDS(Array(mat1, preds), nopts), newmod, null, null, nopts) (nn, nopts) } class FMOptions extends Learner.Options with FM.Opts with ADAGrad.Opts with L1Regularizer.Opts // A learner that uses a general data source (e.g. a files data source). // The datasource options (like batchSize) need to be set externally. def learner(ds:DataSource):(Learner, FMOptions) = { val mopts = new FMOptions; mopts.lrate = row(0.01f, 0.001f, 0.001f) mopts.autoReset = false val model = new FM(mopts) val mm = new Learner( ds, model, mkRegularizer(mopts), new ADAGrad(mopts), mopts) (mm, mopts) } def learnBatch(mat0:Mat, d:Int) = { val opts = new LearnOptions opts.batchSize = math.min(100000, mat0.ncols/30 + 1) opts.links.set(d) val nn = new Learner( new MatDS(Array(mat0), opts), new FM(opts), mkRegularizer(opts), new ADAGrad(opts), opts) (nn, opts) } class LearnParOptions extends ParLearner.Options with FM.Opts with MatDS.Opts with ADAGrad.Opts with L1Regularizer.Opts def learnPar(mat0:Mat, d:Int) = { val opts = new LearnParOptions opts.batchSize = math.min(100000, mat0.ncols/30 + 1) opts.links.set(d) val nn = new ParLearnerF( new MatDS(Array(mat0), opts), opts, mkFMModel _, opts, mkRegularizer _, opts, mkUpdater _, opts) (nn, opts) } def learnPar(mat0:Mat):(ParLearnerF, LearnParOptions) = learnPar(mat0, 0) def learnPar(mat0:Mat, targ:Mat, d:Int) = { val opts = new LearnParOptions opts.batchSize = math.min(100000, mat0.ncols/30 + 1) if (opts.links == null) opts.links = izeros(targ.nrows,1) opts.links.set(d) val nn = new ParLearnerF( new MatDS(Array(mat0, targ), opts), opts, mkFMModel _, opts, mkRegularizer _, opts, mkUpdater _, opts) (nn, opts) } def learnPar(mat0:Mat, targ:Mat):(ParLearnerF, LearnParOptions) = learnPar(mat0, targ, 0) class LearnFParOptions extends ParLearner.Options with FM.Opts with SFilesDS.Opts with ADAGrad.Opts with L1Regularizer.Opts def learnFParx( nstart:Int=FilesDS.encodeDate(2012,3,1,0), nend:Int=FilesDS.encodeDate(2012,12,1,0), d:Int = 0 ) = { val opts = new LearnFParOptions val nn = new ParLearnerxF( null, (dopts:DataSource.Opts, i:Int) => Experiments.Twitter.twitterWords(nstart, nend, opts.nthreads, i), opts, mkFMModel _, opts, mkRegularizer _, opts, mkUpdater _, opts ) (nn, opts) } def learnFPar( nstart:Int=FilesDS.encodeDate(2012,3,1,0), nend:Int=FilesDS.encodeDate(2012,12,1,0), d:Int = 0 ) = { val opts = new LearnFParOptions val nn = new ParLearnerF( Experiments.Twitter.twitterWords(nstart, nend), opts, mkFMModel _, opts, mkRegularizer _, opts, mkUpdater _, opts ) (nn, opts) } }
ypkang/BIDMach
src/main/scala/BIDMach/models/FM.scala
Scala
bsd-3-clause
15,892
package bad.robot.temperature.server import java.net.InetAddress import java.time.Clock import bad.robot.temperature.IpAddress._ import bad.robot.temperature._ import cats.data.NonEmptyList import cats.effect.IO import cats.implicits._ import org.http4s._ import org.http4s.dsl.io._ object ConnectionsEndpoint { private implicit val encoder = jsonEncoder[List[Connection]] def apply(connections: Connections, ipAddresses: => NonEmptyList[Option[InetAddress]] = currentIpAddress)(implicit clock: Clock) = HttpService[IO] { case GET -> Root / "connections" => { Ok(connections.all).map(_.putHeaders(xForwardedHost(ipAddresses))) } case GET -> Root / "connections" / "active" / "within" / LongVar(period) / "mins" => { Ok(connections.allWithin(period)).map(_.putHeaders(xForwardedHost(ipAddresses))) } } private def xForwardedHost(ipAddresses: NonEmptyList[Option[InetAddress]]): Header = { Header("X-Forwarded-Host", ipAddresses.map(_.fold("unknown")(_.getHostAddress)).mkString_("", ", ", "")) } }
tobyweston/temperature-machine
src/main/scala/bad/robot/temperature/server/ConnectionsEndpoint.scala
Scala
apache-2.0
1,046
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.datasources import java.util.{Date, UUID} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path import org.apache.hadoop.mapreduce._ import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl import org.apache.spark._ import org.apache.spark.internal.Logging import org.apache.spark.internal.io.{FileCommitProtocol, SparkHadoopWriterUtils} import org.apache.spark.shuffle.FetchFailedException import org.apache.spark.sql.SparkSession import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.catalog.BucketSpec import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.expressions.BindReferences.bindReferences import org.apache.spark.sql.catalyst.plans.physical.HashPartitioning import org.apache.spark.sql.catalyst.util.{CaseInsensitiveMap, DateTimeUtils} import org.apache.spark.sql.execution.{SortExec, SparkPlan, SQLExecution} import org.apache.spark.util.{SerializableConfiguration, Utils} /** A helper object for writing FileFormat data out to a location. */ object FileFormatWriter extends Logging { /** Describes how output files should be placed in the filesystem. */ case class OutputSpec( outputPath: String, customPartitionLocations: Map[TablePartitionSpec, String], outputColumns: Seq[Attribute]) /** * Basic work flow of this command is: * 1. Driver side setup, including output committer initialization and data source specific * preparation work for the write job to be issued. * 2. Issues a write job consists of one or more executor side tasks, each of which writes all * rows within an RDD partition. * 3. If no exception is thrown in a task, commits that task, otherwise aborts that task; If any * exception is thrown during task commitment, also aborts that task. * 4. If all tasks are committed, commit the job, otherwise aborts the job; If any exception is * thrown during job commitment, also aborts the job. * 5. If the job is successfully committed, perform post-commit operations such as * processing statistics. * @return The set of all partition paths that were updated during this write job. */ def write( sparkSession: SparkSession, plan: SparkPlan, fileFormat: FileFormat, committer: FileCommitProtocol, outputSpec: OutputSpec, hadoopConf: Configuration, partitionColumns: Seq[Attribute], bucketSpec: Option[BucketSpec], statsTrackers: Seq[WriteJobStatsTracker], options: Map[String, String]) : Set[String] = { val job = Job.getInstance(hadoopConf) job.setOutputKeyClass(classOf[Void]) job.setOutputValueClass(classOf[InternalRow]) FileOutputFormat.setOutputPath(job, new Path(outputSpec.outputPath)) val partitionSet = AttributeSet(partitionColumns) val dataColumns = outputSpec.outputColumns.filterNot(partitionSet.contains) val bucketIdExpression = bucketSpec.map { spec => val bucketColumns = spec.bucketColumnNames.map(c => dataColumns.find(_.name == c).get) // Use `HashPartitioning.partitionIdExpression` as our bucket id expression, so that we can // guarantee the data distribution is same between shuffle and bucketed data source, which // enables us to only shuffle one side when join a bucketed table and a normal one. HashPartitioning(bucketColumns, spec.numBuckets).partitionIdExpression } val sortColumns = bucketSpec.toSeq.flatMap { spec => spec.sortColumnNames.map(c => dataColumns.find(_.name == c).get) } val caseInsensitiveOptions = CaseInsensitiveMap(options) val dataSchema = dataColumns.toStructType DataSourceUtils.verifyWriteSchema(fileFormat, dataSchema) // Note: prepareWrite has side effect. It sets "job". val outputWriterFactory = fileFormat.prepareWrite(sparkSession, job, caseInsensitiveOptions, dataSchema) val description = new WriteJobDescription( uuid = UUID.randomUUID().toString, serializableHadoopConf = new SerializableConfiguration(job.getConfiguration), outputWriterFactory = outputWriterFactory, allColumns = outputSpec.outputColumns, dataColumns = dataColumns, partitionColumns = partitionColumns, bucketIdExpression = bucketIdExpression, path = outputSpec.outputPath, customPartitionLocations = outputSpec.customPartitionLocations, maxRecordsPerFile = caseInsensitiveOptions.get("maxRecordsPerFile").map(_.toLong) .getOrElse(sparkSession.sessionState.conf.maxRecordsPerFile), timeZoneId = caseInsensitiveOptions.get(DateTimeUtils.TIMEZONE_OPTION) .getOrElse(sparkSession.sessionState.conf.sessionLocalTimeZone), statsTrackers = statsTrackers ) // We should first sort by partition columns, then bucket id, and finally sorting columns. val requiredOrdering = partitionColumns ++ bucketIdExpression ++ sortColumns // the sort order doesn't matter val actualOrdering = plan.outputOrdering.map(_.child) val orderingMatched = if (requiredOrdering.length > actualOrdering.length) { false } else { requiredOrdering.zip(actualOrdering).forall { case (requiredOrder, childOutputOrder) => requiredOrder.semanticEquals(childOutputOrder) } } SQLExecution.checkSQLExecutionId(sparkSession) // This call shouldn't be put into the `try` block below because it only initializes and // prepares the job, any exception thrown from here shouldn't cause abortJob() to be called. committer.setupJob(job) try { val rdd = if (orderingMatched) { plan.execute() } else { // SPARK-21165: the `requiredOrdering` is based on the attributes from analyzed plan, and // the physical plan may have different attribute ids due to optimizer removing some // aliases. Here we bind the expression ahead to avoid potential attribute ids mismatch. val orderingExpr = bindReferences( requiredOrdering.map(SortOrder(_, Ascending)), outputSpec.outputColumns) SortExec( orderingExpr, global = false, child = plan).execute() } // SPARK-23271 If we are attempting to write a zero partition rdd, create a dummy single // partition rdd to make sure we at least set up one write task to write the metadata. val rddWithNonEmptyPartitions = if (rdd.partitions.length == 0) { sparkSession.sparkContext.parallelize(Array.empty[InternalRow], 1) } else { rdd } val ret = new Array[WriteTaskResult](rddWithNonEmptyPartitions.partitions.length) sparkSession.sparkContext.runJob( rddWithNonEmptyPartitions, (taskContext: TaskContext, iter: Iterator[InternalRow]) => { executeTask( description = description, sparkStageId = taskContext.stageId(), sparkPartitionId = taskContext.partitionId(), sparkAttemptNumber = taskContext.taskAttemptId().toInt & Integer.MAX_VALUE, committer, iterator = iter) }, rddWithNonEmptyPartitions.partitions.indices, (index, res: WriteTaskResult) => { committer.onTaskCommit(res.commitMsg) ret(index) = res }) val commitMsgs = ret.map(_.commitMsg) committer.commitJob(job, commitMsgs) logInfo(s"Write Job ${description.uuid} committed.") processStats(description.statsTrackers, ret.map(_.summary.stats)) logInfo(s"Finished processing stats for write job ${description.uuid}.") // return a set of all the partition paths that were updated during this job ret.map(_.summary.updatedPartitions).reduceOption(_ ++ _).getOrElse(Set.empty) } catch { case cause: Throwable => logError(s"Aborting job ${description.uuid}.", cause) committer.abortJob(job) throw new SparkException("Job aborted.", cause) } } /** Writes data out in a single Spark task. */ private def executeTask( description: WriteJobDescription, sparkStageId: Int, sparkPartitionId: Int, sparkAttemptNumber: Int, committer: FileCommitProtocol, iterator: Iterator[InternalRow]): WriteTaskResult = { val jobId = SparkHadoopWriterUtils.createJobID(new Date, sparkStageId) val taskId = new TaskID(jobId, TaskType.MAP, sparkPartitionId) val taskAttemptId = new TaskAttemptID(taskId, sparkAttemptNumber) // Set up the attempt context required to use in the output committer. val taskAttemptContext: TaskAttemptContext = { // Set up the configuration object val hadoopConf = description.serializableHadoopConf.value hadoopConf.set("mapreduce.job.id", jobId.toString) hadoopConf.set("mapreduce.task.id", taskAttemptId.getTaskID.toString) hadoopConf.set("mapreduce.task.attempt.id", taskAttemptId.toString) hadoopConf.setBoolean("mapreduce.task.ismap", true) hadoopConf.setInt("mapreduce.task.partition", 0) new TaskAttemptContextImpl(hadoopConf, taskAttemptId) } committer.setupTask(taskAttemptContext) val dataWriter = if (sparkPartitionId != 0 && !iterator.hasNext) { // In case of empty job, leave first partition to save meta for file format like parquet. new EmptyDirectoryDataWriter(description, taskAttemptContext, committer) } else if (description.partitionColumns.isEmpty && description.bucketIdExpression.isEmpty) { new SingleDirectoryDataWriter(description, taskAttemptContext, committer) } else { new DynamicPartitionDataWriter(description, taskAttemptContext, committer) } try { Utils.tryWithSafeFinallyAndFailureCallbacks(block = { // Execute the task to write rows out and commit the task. while (iterator.hasNext) { dataWriter.write(iterator.next()) } dataWriter.commit() })(catchBlock = { // If there is an error, abort the task dataWriter.abort() logError(s"Job $jobId aborted.") }) } catch { case e: FetchFailedException => throw e case t: Throwable => throw new SparkException("Task failed while writing rows.", t) } } /** * For every registered [[WriteJobStatsTracker]], call `processStats()` on it, passing it * the corresponding [[WriteTaskStats]] from all executors. */ private def processStats( statsTrackers: Seq[WriteJobStatsTracker], statsPerTask: Seq[Seq[WriteTaskStats]]) : Unit = { val numStatsTrackers = statsTrackers.length assert(statsPerTask.forall(_.length == numStatsTrackers), s"""Every WriteTask should have produced one `WriteTaskStats` object for every tracker. |There are $numStatsTrackers statsTrackers, but some task returned |${statsPerTask.find(_.length != numStatsTrackers).get.length} results instead. """.stripMargin) val statsPerTracker = if (statsPerTask.nonEmpty) { statsPerTask.transpose } else { statsTrackers.map(_ => Seq.empty) } statsTrackers.zip(statsPerTracker).foreach { case (statsTracker, stats) => statsTracker.processStats(stats) } } }
hhbyyh/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FileFormatWriter.scala
Scala
apache-2.0
12,219
package tmeval.data import java.io._ import org.apache.commons.vfs2.{VFS,FileObject} import tmeval.Constants.TMEVAL_DIR import CorpusUtil.writeString /** * Convert the Gutenberg books in the supplied bzip2 archive into "documents" of * a specified length (default = 1000 tokens). * * The documents come from those collected for Kumar, Baldridge, Lease, and Ghosh (2012) * "Dating Texts without Explicit Temporal Cues", available here: * * http://arxiv.org/abs/1211.2290 * * @author jasonbaldridge */ object GutenbergPreparer { val tmevalDir = new File(TMEVAL_DIR) val outputDir = new File(tmevalDir, "data/extracted/gutenberg") def main(args: Array[String]) { extract() } def extract(length: Int = 1000, minLength: Int = 100) { println("Extracting Gutenberg data.") val fileDescr = "tbz2:file:/" + tmevalDir.getAbsolutePath + "/data/orig/gutenberg.tar.bz2" val topDir = VFS.getManager.resolveFile(fileDescr).getChild("gutenberg") process(topDir.getChild("train"), new File(outputDir, "train"), length, minLength) process(topDir.getChild("eval"), new File(outputDir, "eval"), length, minLength) } def process(raw: FileObject, outputDir: File, targetLength: Int, minLength: Int) { println("Processing files in " + raw) outputDir.mkdirs for (gutenbergFile <- raw.getChildren if gutenbergFile.getName.getBaseName.endsWith("_clean.txt")) { val name = gutenbergFile.getName.getBaseName val outputName = name.replaceAll(" ","-") println("** " + name) var currentDocument = new StringBuffer var numWordsThisDoc = 0 var subdocumentId = 0 // Use latin1 so that we don't get invalid character exception. // See: http://stackoverflow.com/questions/7280956/how-to-skip-invalid-characters-in-stream-in-java-scala val gutfileInputStream = gutenbergFile.getContent.getInputStream io.Source.fromInputStream(gutfileInputStream, "latin1").getLines.foreach { line => { numWordsThisDoc += line.replaceAll("""\\s+"""," ").split(" ").length currentDocument.append(line).append('\\n') if (numWordsThisDoc > targetLength) { writeString(outputDir, outputName+"-"+subdocumentId, currentDocument.toString) currentDocument = new StringBuffer numWordsThisDoc = 0 subdocumentId += 1 } }} // Only write the last document if it has at least minLength words. if (numWordsThisDoc > minLength) writeString(outputDir, outputName+"-"+subdocumentId, currentDocument.toString) } } }
utcompling/topicmodel-eval
src/main/scala/tmeval/data/Gutenberg.scala
Scala
apache-2.0
2,597
package com.teambytes.inflatable.raft import com.teambytes.inflatable.raft.model.{RaftSnapshotMetadata, RaftSnapshot} import scala.concurrent.Future /** * The main API trait for RaftActors. * By implementing these methods you're implementing the replicated state machine. * * Messages sent to `apply` are guaranteed by Raft to have been '''committed''', so they're safe to apply to your STM. * * In order to start using replicated log snapshotting, you just need to override the `prepareSnapshot` method, * which will be called each time Raft decides to take a snapshot of the replicated journal (how often that happens is configurable). */ private[inflatable] trait ReplicatedStateMachine { type ReplicatedStateMachineApply = PartialFunction[Any, Any] /** * Use this method to change the actor's internal state. * It will be called with whenever a message is committed by the raft cluster. * * Please note that this is different than a plain `receive`, because the returned value from application * will be sent back _by the leader_ to the client that originaly has sent the message. * * All other __Followers__ will also apply this message to their internal state machines when it's committed, * although the result of those applications will ''not'' be sent back to the client who originally sent the message - * only the __Leader__ responds to the client (`1 message <-> 1 response`). Although you're free to use `!` inside an * apply (resulting in possibly `1 message <-> n messages`). * * You can treat this as a raft equivalent of receive, with the difference that apply is guaranteed to be called, * only after the message has been propagated to the majority of members. * * '''Log compaction and snapshots''': * Match for [[com.teambytes.inflatable.raft.protocol.RaftProtocol.InstallSnapshot]] in order to install snapshots * to your internal state machine if you're using log compactation (see `prepareSnapshot`) * * @return the returned value will be sent back to the client issuing the command. * The reply is only sent once, by the current raft leader. */ private[inflatable] def apply: ReplicatedStateMachineApply /** * Called whenever compaction is performed on the raft replicated log. * * Log compaction is required in order to maintain long running raft clusters * * The produced snapshot MUST include [[com.teambytes.inflatable.raft.model.RaftSnapshotMetadata]] * obtained as the parameter in this call. The simplest snapshotting example would be to complete with the current state: * * {{{ * class SummingActor extends RaftActor { * var sum: Int = 0 * * def receive = { case i: Int => sum += i } * * // compaction is simple, we can just store the current state * def onCompaction(meta: RaftSnapshotMetadata) = * Future(Some(RaftSnapshot(meta, sum))) * } * }}} * * @return if you don't want to store a snapshot (default impl), you can just complete the Future with None, * otherwise, return the snapshot data you want to store */ def prepareSnapshot(snapshotMetadata: RaftSnapshotMetadata): Future[Option[RaftSnapshot]] = Future.successful(None) }
grahamar/inflatable
src/main/scala/com/teambytes/inflatable/raft/ReplicatedStateMachine.scala
Scala
apache-2.0
3,266
/* * Copyright 2013 agwlvssainokuni * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package views.asynctask import java.text.DecimalFormat import java.text.SimpleDateFormat import java.util.Date import models.AsyncTask object fmt { def patternDtm = "yyyy-MM-dd HH:mm:ss" def patternCount = "#,##0 '件'" def status(item: AsyncTask) = item.status match { case AsyncTask.New => "登録" case AsyncTask.Started => "軌道中" case AsyncTask.Running => "実行中" case AsyncTask.OkEnd => "正常終了" case AsyncTask.NgEnd => "異常終了" case _ => "状態不定" } def startDtm(item: AsyncTask) = dtm(item.startDtm) def endDtm(item: AsyncTask) = dtm(item.endDtm) def totalCount(item: AsyncTask) = count(item.totalCount) def okCount(item: AsyncTask) = count(item.okCount) def ngCount(item: AsyncTask) = count(item.ngCount) private def dtm(dtm: Option[Date]) = (dtm.map { (new SimpleDateFormat(patternDtm)).format(_) }).getOrElse("-") private def count(count: Option[Long]) = (count.map { (new DecimalFormat(patternCount)).format(_) }).getOrElse("-") }
agwlvssainokuni/lifelog
lifelog-website/app/views/asynctask/fmt.scala
Scala
apache-2.0
1,635
/* This file is part of Intake24. Copyright 2015, 2016 Newcastle University. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package security class DatabaseAccessException(msg: String) extends RuntimeException(msg)
digitalinteraction/intake24
ApiPlayServer/app/security/DatabaseAccessException.scala
Scala
apache-2.0
697
package org.jetbrains.plugins.scala.lang.completion.postfix.templates import com.intellij.codeInsight.template.postfix.templates.PostfixTemplateWithExpressionSelector import com.intellij.openapi.editor.Editor import com.intellij.psi.PsiElement import org.jetbrains.plugins.scala.lang.completion.postfix.templates.selector.{SelectorConditions, AncestorSelector} import org.jetbrains.plugins.scala.lang.completion.postfix.templates.selector.SelectorType._ import org.jetbrains.plugins.scala.lang.refactoring.introduceField.ScalaIntroduceFieldFromExpressionHandler /** * @author Roman.Shein * @since 10.09.2015. */ class ScalaIntroduceFieldPostfixTemplate extends PostfixTemplateWithExpressionSelector("field", "field = expr", new AncestorSelector(SelectorConditions.ANY_EXPR, All)) { override def expandForChooseExpression(expression: PsiElement, editor: Editor): Unit = { val range = expression.getTextRange editor.getSelectionModel.setSelection(range.getStartOffset, range.getEndOffset) new ScalaIntroduceFieldFromExpressionHandler().invoke(expression.getProject, editor, expression.getContainingFile, expression.getTextRange.getStartOffset, expression.getTextRange.getEndOffset) } }
ilinum/intellij-scala
src/org/jetbrains/plugins/scala/lang/completion/postfix/templates/ScalaIntroduceFieldPostfixTemplate.scala
Scala
apache-2.0
1,213
package scalanlp.optimize.linear /* Copyright 2010 David Hall, Daniel Ramage Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ import org.scalatest._; import org.scalatest.junit._; import org.scalatest.prop._; import org.scalacheck._; import org.junit.runner.RunWith import scalala.tensor.dense._; /** * * @author dlwh */ @RunWith(classOf[JUnitRunner]) class AffineScalingTest extends FunSuite { // from: http://en.wikipedia.org/wiki/Karmarkar's_algorithm test("Small example") { val x0 = DenseVector(0.0,0.0); val c = DenseVector(1.,1.); val A = DenseMatrix.zeros[Double](11,2); val b = DenseVector.zeros[Double](11); for(i <- 0 to 10) { val p = i / 10.0 A(i,0) = 2 * p; A(i,1) = 1; b(i) = p * p + 1; } val x = AffineScaling.maximize(A=A,b=b,c=c,x0=x0); assert( (A * x - b).nonzero.values.iterator.forall(_ < 0), (A * x)); assert( (x(0) - 0.5).abs < 1E-3, x(0)); assert( (x(1) - 0.75).abs < 1E-3, x(1)); } }
MLnick/scalanlp-core
learn/src/test/scala/scalanlp/optimize/linear/AffineScalingTest.scala
Scala
apache-2.0
1,472
// Databricks notebook source exported at Sun, 19 Jun 2016 03:06:55 UTC // MAGIC %md // MAGIC // MAGIC # [Scalable Data Science](http://www.math.canterbury.ac.nz/~r.sainudiin/courses/ScalableDataScience/) // MAGIC // MAGIC // MAGIC ### prepared by [Raazesh Sainudiin](https://nz.linkedin.com/in/raazesh-sainudiin-45955845) and [Sivanand Sivaram](https://www.linkedin.com/in/sivanand) // MAGIC // MAGIC *supported by* [![](https://raw.githubusercontent.com/raazesh-sainudiin/scalable-data-science/master/images/databricks_logoTM_200px.png)](https://databricks.com/) // MAGIC and // MAGIC [![](https://raw.githubusercontent.com/raazesh-sainudiin/scalable-data-science/master/images/AWS_logoTM_200px.png)](https://www.awseducate.com/microsite/CommunitiesEngageHome) // COMMAND ---------- // MAGIC %md // MAGIC This is an elaboration of the [Apache Spark 1.6 mllib-progamming-guide on mllib-data-types](http://spark.apache.org/docs/latest/mllib-data-types.html). // MAGIC // MAGIC # [Overview](/#workspace/scalable-data-science/xtraResources/ProgGuides1_6/MLlibProgrammingGuide/000_MLlibProgGuide) // MAGIC // MAGIC ## [Data Types - MLlib Programming Guide](/#workspace/scalable-data-science/xtraResources/ProgGuides1_6/MLlibProgrammingGuide/dataTypes/000_dataTypesProgGuide) // MAGIC // MAGIC - [Local vector](/#workspace/scalable-data-science/xtraResources/ProgGuides1_6/MLlibProgrammingGuide/dataTypes/001_LocalVector) and [URL](http://spark.apache.org/docs/latest/mllib-data-types.html#local-vector) // MAGIC - [Labeled point](/#workspace/scalable-data-science/xtraResources/ProgGuides1_6/MLlibProgrammingGuide/dataTypes/002_LabeledPoint) and [URL](http://spark.apache.org/docs/latest/mllib-data-types.html#labeled-point) // MAGIC - [Local matrix](/#workspace/scalable-data-science/xtraResources/ProgGuides1_6/MLlibProgrammingGuide/dataTypes/003_LocalMatrix) and [URL](http://spark.apache.org/docs/latest/mllib-data-types.html#local-matrix) // MAGIC - [Distributed matrix](/#workspace/scalable-data-science/xtraResources/ProgGuides1_6/MLlibProgrammingGuide/dataTypes/004_DistributedMatrix) and [URL](http://spark.apache.org/docs/latest/mllib-data-types.html#distributed-matrix) // MAGIC - [RowMatrix](/#workspace/scalable-data-science/xtraResources/ProgGuides1_6/MLlibProgrammingGuide/dataTypes/005_RowMatrix) and [URL](http://spark.apache.org/docs/latest/mllib-data-types.html#rowmatrix) // MAGIC - [IndexedRowMatrix](/#workspace/scalable-data-science/xtraResources/ProgGuides1_6/MLlibProgrammingGuide/dataTypes/006_IndexedRowMatrix) and [URL](http://spark.apache.org/docs/latest/mllib-data-types.html#indexedrowmatrix) // MAGIC - [CoordinateMatrix](/#workspace/scalable-data-science/xtraResources/ProgGuides1_6/MLlibProgrammingGuide/dataTypes/007_CoordinateMatrix) and [URL](http://spark.apache.org/docs/latest/mllib-data-types.html#coordinatematrix) // MAGIC - [BlockMatrix](/#workspace/scalable-data-science/xtraResources/ProgGuides1_6/MLlibProgrammingGuide/dataTypes/008_BlockMatrix) and [URL](http://spark.apache.org/docs/latest/mllib-data-types.html#blockmatrix) // MAGIC // MAGIC MLlib supports local vectors and matrices stored on a single machine, as // MAGIC well as distributed matrices backed by one or more RDDs. Local vectors // MAGIC and local matrices are simple data models that serve as public // MAGIC interfaces. The underlying linear algebra operations are provided by // MAGIC [Breeze](http://www.scalanlp.org/) and [jblas](http://jblas.org/). A // MAGIC training example used in supervised learning is called a “labeled point” // MAGIC in MLlib. // COMMAND ---------- // MAGIC %md // MAGIC ### BlockMatrix in Scala // MAGIC // MAGIC A `BlockMatrix` is a distributed matrix backed by an RDD of // MAGIC `MatrixBlock`s, where a `MatrixBlock` is a tuple of // MAGIC `((Int, Int), Matrix)`, where the `(Int, Int)` is the index of the // MAGIC block, and `Matrix` is the sub-matrix at the given index with size // MAGIC `rowsPerBlock` x `colsPerBlock`. `BlockMatrix` supports methods such as // MAGIC `add` and `multiply` with another `BlockMatrix`. `BlockMatrix` also has // MAGIC a helper function `validate` which can be used to check whether the // MAGIC `BlockMatrix` is set up properly. // MAGIC // MAGIC A [`BlockMatrix`](http://spark.apache.org/docs/latest/api/scala/index.html#org.apache.spark.mllib.linalg.distributed.BlockMatrix) // MAGIC can be most easily created from an `IndexedRowMatrix` or // MAGIC `CoordinateMatrix` by calling `toBlockMatrix`. `toBlockMatrix` creates // MAGIC blocks of size 1024 x 1024 by default. Users may change the block size // MAGIC by supplying the values through // MAGIC `toBlockMatrix(rowsPerBlock, colsPerBlock)`. // MAGIC // MAGIC Refer to the [`BlockMatrix` Scala docs](http://spark.apache.org/docs/latest/api/scala/index.html#org.apache.spark.mllib.linalg.distributed.BlockMatrix) // MAGIC for details on the API. // COMMAND ---------- //import org.apache.spark.mllib.linalg.{Matrix, Matrices} import org.apache.spark.mllib.linalg.distributed.{BlockMatrix, CoordinateMatrix, MatrixEntry} // COMMAND ---------- val entries: RDD[MatrixEntry] = sc.parallelize(Array(MatrixEntry(0, 0, 1.2), MatrixEntry(1, 0, 2.1), MatrixEntry(6, 1, 3.7))) // an RDD of matrix entries // COMMAND ---------- // Create a CoordinateMatrix from an RDD[MatrixEntry]. val coordMat: CoordinateMatrix = new CoordinateMatrix(entries) // COMMAND ---------- // Transform the CoordinateMatrix to a BlockMatrix val matA: BlockMatrix = coordMat.toBlockMatrix().cache() // COMMAND ---------- // Validate whether the BlockMatrix is set up properly. Throws an Exception when it is not valid. // Nothing happens if it is valid. matA.validate() // COMMAND ---------- // Calculate A^T A. val ata = matA.transpose.multiply(matA) // COMMAND ---------- ata.blocks.collect() // COMMAND ---------- ata.toLocalMatrix() // COMMAND ---------- // MAGIC %md // MAGIC ### BlockMatrix in Scala // MAGIC A [`BlockMatrix`](http://spark.apache.org/docs/latest/api/python/pyspark.mllib.html#pyspark.mllib.linalg.distributed.BlockMatrix) // MAGIC can be created from an `RDD` of sub-matrix blocks, where a sub-matrix // MAGIC block is a `((blockRowIndex, blockColIndex), sub-matrix)` tuple. // MAGIC // MAGIC Refer to the [`BlockMatrix` Python docs](http://spark.apache.org/docs/latest/api/python/pyspark.mllib.html#pyspark.mllib.linalg.distributed.BlockMatrix) // MAGIC for more details on the API. // COMMAND ---------- // MAGIC %py // MAGIC from pyspark.mllib.linalg import Matrices // MAGIC from pyspark.mllib.linalg.distributed import BlockMatrix // MAGIC // MAGIC # Create an RDD of sub-matrix blocks. // MAGIC blocks = sc.parallelize([((0, 0), Matrices.dense(3, 2, [1, 2, 3, 4, 5, 6])), // MAGIC ((1, 0), Matrices.dense(3, 2, [7, 8, 9, 10, 11, 12]))]) // MAGIC // MAGIC # Create a BlockMatrix from an RDD of sub-matrix blocks. // MAGIC mat = BlockMatrix(blocks, 3, 2) // MAGIC // MAGIC # Get its size. // MAGIC m = mat.numRows() # 6 // MAGIC n = mat.numCols() # 2 // MAGIC print (m,n) // MAGIC // MAGIC # Get the blocks as an RDD of sub-matrix blocks. // MAGIC blocksRDD = mat.blocks // MAGIC // MAGIC # Convert to a LocalMatrix. // MAGIC localMat = mat.toLocalMatrix() // MAGIC // MAGIC # Convert to an IndexedRowMatrix. // MAGIC indexedRowMat = mat.toIndexedRowMatrix() // MAGIC // MAGIC # Convert to a CoordinateMatrix. // MAGIC coordinateMat = mat.toCoordinateMatrix() // COMMAND ---------- // MAGIC %md // MAGIC // MAGIC # [Scalable Data Science](http://www.math.canterbury.ac.nz/~r.sainudiin/courses/ScalableDataScience/) // MAGIC // MAGIC // MAGIC ### prepared by [Raazesh Sainudiin](https://nz.linkedin.com/in/raazesh-sainudiin-45955845) and [Sivanand Sivaram](https://www.linkedin.com/in/sivanand) // MAGIC // MAGIC *supported by* [![](https://raw.githubusercontent.com/raazesh-sainudiin/scalable-data-science/master/images/databricks_logoTM_200px.png)](https://databricks.com/) // MAGIC and // MAGIC [![](https://raw.githubusercontent.com/raazesh-sainudiin/scalable-data-science/master/images/AWS_logoTM_200px.png)](https://www.awseducate.com/microsite/CommunitiesEngageHome)
raazesh-sainudiin/scalable-data-science
db/xtraResources/ProgGuides1_6/MLlibProgrammingGuide/dataTypes/008_BlockMatrix.scala
Scala
unlicense
8,216
/* * Copyright 2017 PayPal * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.squbs.pattern.orchestration import scala.collection.compat._ import scala.collection.mutable.ArrayBuffer import scala.reflect.ClassTag import scala.util.control.NonFatal import scala.util.{Failure, Success, Try} import scala.language.higherKinds /** The trait that represents futures like scala.concurrent.Future BUT works in a single threaded environment. * It does not have blocking calls to the future and a future cannot be created from passing a closure. * The future is obtained from the corresponding Promise or translated from one or more other Futures. * * Asynchronous computations that yield futures are created with the `future` call: * * {{{ * val s = "Hello" * val f: Future[String] = future { * s + " future!" * } * f onSuccess { * case msg => println(msg) * } * }}} * * @author Philipp Haller, Heather Miller, Aleksandar Prokopec, Viktor Klang * * @define multipleCallbacks * Multiple callbacks may be registered; there is no guarantee that they will be * executed in a particular order. * * @define caughtThrowables * The future may contain a throwable object and this means that the future failed. * Futures obtained through combinators have the same exception as the future they were obtained from. * The following throwable objects are not contained in the future: * - `Error` - errors are not contained within futures * - `InterruptedException` - not contained within futures * - all `scala.util.control.ControlThrowable` except `NonLocalReturnControl` - not contained within futures * * Instead, the future is completed with a ExecutionException with one of the exceptions above * as the cause. * If a future is failed with a `scala.runtime.NonLocalReturnControl`, * it is completed with a value from that throwable instead. * * @define nonDeterministic * Note: using this method yields nondeterministic dataflow programs. * * @define forComprehensionExamples * Example: * * {{{ * val f = future { 5 } * val g = future { 3 } * val h = for { * x: Int <- f // returns Future(5) * y: Int <- g // returns Future(5) * } yield x + y * }}} * * is translated to: * * {{{ * f flatMap { (x: Int) => g map { (y: Int) => x + y } } * }}} * * @define callbackInContext * The provided callback always runs in the provided implicit *`ExecutionContext`, though there is no guarantee that the * `execute()` method on the `ExecutionContext` will be called once * per callback or that `execute()` will be called in the current * thread. That is, the implementation may run multiple callbacks * in a batch within a single `execute()` and it may run * `execute()` either immediately or asynchronously. */ @deprecated("The Orchestration module is deprecated. Please use Akka streams for safer orchestration instead.", since = "0.15.0") trait OFuture[+T] { /* Callbacks */ /** When this future is completed successfully (i.e. with a value), * apply the provided partial function to the value if the partial function * is defined at that value. * * If the future has already been completed with a value, * this will either be applied immediately or be scheduled asynchronously. * * $multipleCallbacks * $callbackInContext */ def onSuccess[U](pf: PartialFunction[T, U]): Unit = onComplete { case Success(v) if pf isDefinedAt v => pf(v) case _ => } /** When this future is completed with a failure (i.e. with a throwable), * apply the provided callback to the throwable. * * $caughtThrowables * * If the future has already been completed with a failure, * this will either be applied immediately or be scheduled asynchronously. * * Will not be called in case that the future is completed with a value. * * $multipleCallbacks * $callbackInContext */ def onFailure[U](callback: PartialFunction[Throwable, U]): Unit = onComplete { case Failure(t) if NonFatal(t) && callback.isDefinedAt(t) => callback(t) case _ => } /** When this future is completed, either through an exception, or a value, * apply the provided function. * * If the future has already been completed, * this will either be applied immediately or be scheduled asynchronously. * * $multipleCallbacks * $callbackInContext */ def onComplete[U](func: Try[T] => U): Unit /* Miscellaneous */ /** Returns whether the future has already been completed with * a value or an exception. * * $nonDeterministic * * @return `true` if the future is already completed, `false` otherwise */ def isCompleted: Boolean /** The value of this `Future`. * * If the future is not completed the returned value will be `None`. * If the future is completed the value will be `Some(Success(t))` * if it contains a valid result, or `Some(Failure(error))` if it contains * an exception. */ def value: Option[Try[T]] /* Projections */ /** Returns a failed projection of this future. * * The failed projection is a future holding a value of type `Throwable`. * * It is completed with a value which is the throwable of the original future * in case the original future is failed. * * It is failed with a `NoSuchElementException` if the original future is completed successfully. * * Blocking on this future returns a value if the original future is completed with an exception * and throws a corresponding exception if the original future fails. */ def failed: OFuture[Throwable] = { val p = OPromise[Throwable]() onComplete { case Failure(t) => p success t case Success(v) => p failure new NoSuchElementException("Future.failed not completed with a throwable.") } p.future } /** Returns the successful projection of this future. * * If the future has not been completed, a NoSuchElementException is thrown. * If the future failed, the exception causing the failure is thrown. * @return The value of this future, provided it is completed. */ def apply(): T = { value match { case Some(Failure(t)) => throw t case Some(Success(v)) => v case None => throw new NoSuchElementException("Future not completed.") } } /* Monadic operations */ /** Asynchronously processes the value in the future once the value becomes available. * * Will not be called if the future fails. */ def foreach[U](f: T => U): Unit = onComplete { case Success(r) => f(r) case _ => // do nothing } /** Creates a new future by applying the 's' function to the successful result of * this future, or the 'f' function to the failed result. If there is any non-fatal * exception thrown when 's' or 'f' is applied, that exception will be propagated * to the resulting future. * * @param s function that transforms a successful result of the receiver into a * successful result of the returned future * @param f function that transforms a failure of the receiver into a failure of * the returned future * @return a future that will be completed with the transformed value */ def transform[S](s: T => S, f: Throwable => Throwable): OFuture[S] = { val p = OPromise[S]() onComplete { case result => try { result match { case Failure(t) => p failure f(t) case Success(r) => p success s(r) } } catch { case NonFatal(t) => p failure t } } p.future } /** Creates a new future by applying a function to the successful result of * this future. If this future is completed with an exception then the new * future will also contain this exception. * * $forComprehensionExamples */ def map[S](f: T => S): OFuture[S] = { // transform(f, identity) val p = OPromise[S]() onComplete { case result => try { result match { case Success(r) => p success f(r) case f: Failure[_] => p complete f.asInstanceOf[Failure[S]] } } catch { case NonFatal(t) => p failure t } } p.future } /** Creates a new future by applying a function to the successful result of * this future, and returns the result of the function as the new future. * If this future is completed with an exception then the new future will * also contain this exception. * * $forComprehensionExamples */ def flatMap[S](f: T => OFuture[S]): OFuture[S] = { val p = OPromise[S]() onComplete { case f: Failure[_] => p complete f.asInstanceOf[Failure[S]] case Success(v) => try { f(v).onComplete({ case f: Failure[_] => p complete f.asInstanceOf[Failure[S]] case Success(v0) => p success v0 }) } catch { case NonFatal(t) => p failure t } } p.future } /** Creates a new future by filtering the value of the current future with a predicate. * * If the current future contains a value which satisfies the predicate, the new future will also hold that value. * Otherwise, the resulting future will fail with a `NoSuchElementException`. * * If the current future fails, then the resulting future also fails. * * Example: * {{{ * val f = future { 5 } * val g = f filter { _ % 2 == 1 } * val h = f filter { _ % 2 == 0 } * Await.result(g, Duration.Zero) // evaluates to 5 * Await.result(h, Duration.Zero) // throw a NoSuchElementException * }}} */ def filter(pred: T => Boolean): OFuture[T] = { val p = OPromise[T]() onComplete { case f: Failure[_] => p complete f.asInstanceOf[Failure[T]] case Success(v) => try { if (pred(v)) p success v else p failure new NoSuchElementException("Future.filter predicate is not satisfied") } catch { case NonFatal(t) => p failure t } } p.future } /** Used by for-comprehensions. */ final def withFilter(p: T => Boolean): OFuture[T] = filter(p) // final def withFilter(p: T => Boolean) = new FutureWithFilter[T](this, p) // final class FutureWithFilter[+S](self: Future[S], p: S => Boolean) { // def foreach(f: S => Unit): Unit = self filter p foreach f // def map[R](f: S => R) = self filter p map f // def flatMap[R](f: S => Future[R]) = self filter p flatMap f // def withFilter(q: S => Boolean): FutureWithFilter[S] = new FutureWithFilter[S](self, x => p(x) && q(x)) // } /** Creates a new future by mapping the value of the current future, if the given partial function is defined at that value. * * If the current future contains a value for which the partial function is defined, the new future will also hold that value. * Otherwise, the resulting future will fail with a `NoSuchElementException`. * * If the current future fails, then the resulting future also fails. * * Example: * {{{ * val f = future { -5 } * val g = f collect { * case x if x < 0 => -x * } * val h = f collect { * case x if x > 0 => x * 2 * } * Await.result(g, Duration.Zero) // evaluates to 5 * Await.result(h, Duration.Zero) // throw a NoSuchElementException * }}} */ def collect[S](pf: PartialFunction[T, S]): OFuture[S] = { val p = OPromise[S]() onComplete { case f: Failure[_] => p complete f.asInstanceOf[Failure[S]] case Success(v) => try { if (pf.isDefinedAt(v)) p success pf(v) else p failure new NoSuchElementException("Future.collect partial function is not defined at: " + v) } catch { case NonFatal(t) => p failure t } } p.future } /** Creates a new future that will handle any matching throwable that this * future might contain. If there is no match, or if this future contains * a valid result then the new future will contain the same. * * Example: * * {{{ * future (6 / 0) recover { case e: ArithmeticException => 0 } // result: 0 * future (6 / 0) recover { case e: NotFoundException => 0 } // result: exception * future (6 / 2) recover { case e: ArithmeticException => 0 } // result: 3 * }}} */ def recover[U >: T](pf: PartialFunction[Throwable, U]): OFuture[U] = { val p = OPromise[U]() onComplete { case tr => p.complete(tr recover pf) } p.future } /** Creates a new future that will handle any matching throwable that this * future might contain by assigning it a value of another future. * * If there is no match, or if this future contains * a valid result then the new future will contain the same result. * * Example: * * {{{ * val f = future { Int.MaxValue } * future (6 / 0) recoverWith { case e: ArithmeticException => f } // result: Int.MaxValue * }}} */ def recoverWith[U >: T](pf: PartialFunction[Throwable, OFuture[U]]): OFuture[U] = { val p = OPromise[U]() onComplete { case Failure(t) if pf isDefinedAt t => try { p completeWith pf(t) } catch { case NonFatal(t0) => p failure t0 } case otherwise => p complete otherwise } p.future } /** Zips the values of `this` and `that` future, and creates * a new future holding the tuple of their results. * * If `this` future fails, the resulting future is failed * with the throwable stored in `this`. * Otherwise, if `that` future fails, the resulting future is failed * with the throwable stored in `that`. */ def zip[U](that: OFuture[U]): OFuture[(T, U)] = { val p = OPromise[(T, U)]() this onComplete { case f: Failure[_] => p complete f.asInstanceOf[Failure[(T, U)]] case Success(r) => that onSuccess { case r2 => p success ((r, r2)) } that onFailure { case f => p failure f } } p.future } /** Creates a new future which holds the result of this future if it was completed successfully, or, if not, * the result of the `that` future if `that` is completed successfully. * If both futures are failed, the resulting future holds the throwable object of the first future. * * Using this method will not cause concurrent programs to become nondeterministic. * * Example: * {{{ * val f = future { sys.error("failed") } * val g = future { 5 } * val h = f fallbackTo g * Await.result(h, Duration.Zero) // evaluates to 5 * }}} */ def fallbackTo[U >: T](that: OFuture[U]): OFuture[U] = { val p = OPromise[U]() onComplete { case s @ Success(_) => p complete s case _ => p completeWith that } p.future } /** Creates a new `Future[S]` which is completed with this `Future`'s result if * that conforms to `S`'s erased type or a `ClassCastException` otherwise. */ def mapTo[S](implicit tag: ClassTag[S]): OFuture[S] = { def boxedType(c: Class[_]): Class[_] = { if (c.isPrimitive) OFuture.toBoxed(c) else c } val p = OPromise[S]() onComplete { case f: Failure[_] => p complete f.asInstanceOf[Failure[S]] case Success(t) => p complete (try { Success(boxedType(tag.runtimeClass).cast(t).asInstanceOf[S]) } catch { case e: ClassCastException => Failure(e) }) } p.future } /** Applies the side-effecting function to the result of this future, and returns * a new future with the result of this future. * * This method allows one to enforce that the callbacks are executed in a * specified order. * * Note that if one of the chained `andThen` callbacks throws * an exception, that exception is not propagated to the subsequent `andThen` * callbacks. Instead, the subsequent `andThen` callbacks are given the original * value of this future. * * The following example prints out `5`: * * {{{ * val f = future { 5 } * f andThen { * case r => sys.error("runtime exception") * } andThen { * case Failure(t) => println(t) * case Success(v) => println(v) * } * }}} */ def andThen[U](pf: PartialFunction[Try[T], U]): OFuture[T] = { val p = OPromise[T]() onComplete { case r => try if (pf isDefinedAt r) pf(r) finally p complete r } p.future } /** * Converts this orchestration future to a scala.concurrent.Future. * @return A scala.concurrent.Future representing this future. */ def toFuture: scala.concurrent.Future[T] = { import scala.concurrent.{Promise => CPromise} val cPromise = CPromise[T]() onComplete { case Success(v) => cPromise success v case Failure(t) => cPromise failure t } cPromise.future } } /** Future companion object. * * @define nonDeterministic * Note: using this method yields nondeterministic dataflow programs. */ @deprecated("The Orchestration module is deprecated. Please use Akka streams for safer orchestration instead.", since = "0.15.0") object OFuture { private[orchestration] val toBoxed = Map[Class[_], Class[_]]( classOf[Boolean] -> classOf[java.lang.Boolean], classOf[Byte] -> classOf[java.lang.Byte], classOf[Char] -> classOf[java.lang.Character], classOf[Short] -> classOf[java.lang.Short], classOf[Int] -> classOf[java.lang.Integer], classOf[Long] -> classOf[java.lang.Long], classOf[Float] -> classOf[java.lang.Float], classOf[Double] -> classOf[java.lang.Double], classOf[Unit] -> classOf[scala.runtime.BoxedUnit] ) /** Creates an already completed Future with the specified exception. * * @tparam T the type of the value in the future * @return the newly created `Future` object */ def failed[T](exception: Throwable): OFuture[T] = OPromise.failed(exception).future /** Creates an already completed Future with the specified result. * * @tparam T the type of the value in the future * @return the newly created `Future` object */ def successful[T](result: T): OFuture[T] = OPromise.successful(result).future /** Simple version of `Futures.traverse`. Transforms a `IterableOnce[Future[A]]` into a `Future[IterableOnce[A]]`. * Useful for reducing many `Future`s into a single `Future`. */ def sequence[A, M[X] <: IterableOnce[X]](in: M[OFuture[A]]) (implicit bf: BuildFrom[M[OFuture[A]], A, M[A]]): OFuture[M[A]] = { in.iterator.foldLeft(OPromise.successful(bf.newBuilder(in)).future) { (fr, fa) => for (r <- fr; a <- fa.asInstanceOf[OFuture[A]]) yield r += a } map (_.result()) } /** Returns a `Future` to the result of the first future in the list that is completed. */ def firstCompletedOf[T](futures: IterableOnce[OFuture[T]]): OFuture[T] = { val p = OPromise[T]() val completeFirst: Try[T] => Unit = p tryComplete _ futures.iterator.foreach(_ onComplete completeFirst) p.future } /** Returns a `Future` that will hold the optional result of the first `Future` with a result that matches the predicate. */ def find[T](futurestravonce: IterableOnce[OFuture[T]])(predicate: T => Boolean): OFuture[Option[T]] = { val futures = futurestravonce.iterator.to(ArrayBuffer) if (futures.isEmpty) OPromise.successful[Option[T]](None).future else { val result = OPromise[Option[T]]() var ref = futures.size val search: Try[T] => Unit = v => try { v match { case Success(r) => if (predicate(r)) result tryComplete Success(Some(r)) case _ => } } finally { ref -= 1 if (ref == 0) { result tryComplete Success(None) } } futures.foreach(_ onComplete search) result.future } } /** A non-blocking fold over the specified futures, with the start value of the given zero. * The fold is performed on the thread where the last future is completed, * the result will be the first failure of any of the futures, or any failure in the actual fold, * or the result of the fold. * * Example: * {{{ * val result = Await.result(Future.fold(futures)(0)(_ + _), 5 seconds) * }}} */ def fold[T, R](futures: IterableOnce[OFuture[T]])(zero: R)(foldFun: (R, T) => R): OFuture[R] = { if (futures.iterator.isEmpty) OPromise.successful(zero).future else sequence(futures)(ArrayBuffer).map(_.iterator.foldLeft(zero)(foldFun)) } /** Initiates a fold over the supplied futures where the fold-zero is the result value of the `Future` that's completed first. * * Example: * {{{ * val result = Await.result(Future.reduce(futures)(_ + _), 5 seconds) * }}} */ def reduce[T, R >: T](futures: IterableOnce[OFuture[T]])(op: (R, T) => R): OFuture[R] = { if (futures.iterator.isEmpty) OPromise[R]().failure(new NoSuchElementException("reduce attempted on empty collection")).future else sequence(futures)(ArrayBuffer).map(a => a.iterator.reduceLeft(op)) } /** Transforms a `IterableOnce[A]` into a `Future[IterableOnce[B]]` using the provided function `A => Future[B]`. * This is useful for performing a parallel map. For example, to apply a function to all items of a list * in parallel: * * {{{ * val myFutureList = Future.traverse(myList)(x => Future(myFunc(x))) * }}} */ def traverse[A, B, M[_] <: IterableOnce[_]](in: M[A])(fn: A => OFuture[B]) (implicit bf: BuildFrom[M[A], B, M[B]]): OFuture[M[B]] = in.foldLeft(OPromise.successful(bf.newBuilder(in)).future) { (fr, a) => val fb = fn(a.asInstanceOf[A]) for (r <- fr; b <- fb) yield r += b }.map(_.result()) } /** A marker indicating that a `java.lang.Runnable` provided to `scala.concurrent.ExecutionContext` * wraps a callback provided to `Future.onComplete`. * All callbacks provided to a `Future` end up going through `onComplete`, so this allows an * `ExecutionContext` to special-case callbacks that were executed by `Future` if desired. */ @deprecated("The Orchestration module is deprecated. Please use Akka streams for safer orchestration instead.", since = "0.15.0") trait OnCompleteRunnable { self: Runnable => }
paypal/squbs
squbs-pattern/src/main/scala/org/squbs/pattern/orchestration/OFuture.scala
Scala
apache-2.0
23,360
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.cep.scala import org.apache.flink.api.common.functions.util.ListCollector import org.apache.flink.cep.scala.pattern.Pattern import org.apache.flink.streaming.api.operators.{StreamFlatMap, StreamMap} import org.apache.flink.streaming.api.scala._ import org.apache.flink.streaming.api.transformations.OneInputTransformation import org.apache.flink.util.{Collector, TestLogger} import org.apache.flink.types.{Either => FEither} import org.apache.flink.api.java.tuple.{Tuple2 => FTuple2} import java.lang.{Long => JLong} import java.util.{Map => JMap} import scala.collection.JavaConverters._ import scala.collection.mutable import org.junit.Assert._ import org.junit.Test class PatternStreamScalaJavaAPIInteroperabilityTest extends TestLogger { @Test @throws[Exception] def testScalaJavaAPISelectFunForwarding { val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment val dummyDataStream: DataStream[(Int, Int)] = env.fromElements() val pattern: Pattern[(Int, Int), _] = Pattern.begin[(Int, Int)]("dummy") val pStream: PatternStream[(Int, Int)] = CEP.pattern(dummyDataStream, pattern) val param = mutable.Map("begin" ->(1, 2)).asJava val result: DataStream[(Int, Int)] = pStream .select((pattern: mutable.Map[String, (Int, Int)]) => { //verifies input parameter forwarding assertEquals(param, pattern.asJava) param.get("begin") }) val out = extractUserFunction[StreamMap[java.util.Map[String, (Int, Int)], (Int, Int)]](result) .getUserFunction.map(param) //verifies output parameter forwarding assertEquals(param.get("begin"), out) } @Test @throws[Exception] def testScalaJavaAPIFlatSelectFunForwarding { val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment val dummyDataStream: DataStream[List[Int]] = env.fromElements() val pattern: Pattern[List[Int], _] = Pattern.begin[List[Int]]("dummy") val pStream: PatternStream[List[Int]] = CEP.pattern(dummyDataStream, pattern) val inList = List(1, 2, 3) val inParam = mutable.Map("begin" -> inList).asJava val outList = new java.util.ArrayList[List[Int]] val outParam = new ListCollector[List[Int]](outList) val result: DataStream[List[Int]] = pStream .flatSelect((pattern: mutable.Map[String, List[Int]], out: Collector[List[Int]]) => { //verifies input parameter forwarding assertEquals(inParam, pattern.asJava) out.collect(pattern.get("begin").get) }) extractUserFunction[StreamFlatMap[java.util.Map[String, List[Int]], List[Int]]](result). getUserFunction.flatMap(inParam, outParam) //verify output parameter forwarding and that flatMap function was actually called assertEquals(inList, outList.get(0)) } @Test @throws[Exception] def testTimeoutHandling: Unit = { val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment val dummyDataStream: DataStream[String] = env.fromElements() val pattern: Pattern[String, _] = Pattern.begin[String]("dummy") val pStream: PatternStream[String] = CEP.pattern(dummyDataStream, pattern) val inParam = mutable.Map("begin" -> "barfoo").asJava val outList = new java.util.ArrayList[Either[String, String]] val output = new ListCollector[Either[String, String]](outList) val expectedOutput = List(Right("match"), Right("barfoo"), Left("timeout"), Left("barfoo")) .asJava val result: DataStream[Either[String, String]] = pStream.flatSelect { (pattern: mutable.Map[String, String], timestamp: Long, out: Collector[String]) => out.collect("timeout") out.collect(pattern("begin")) } { (pattern: mutable.Map[String, String], out: Collector[String]) => //verifies input parameter forwarding assertEquals(inParam, pattern.asJava) out.collect("match") out.collect(pattern("begin")) } val fun = extractUserFunction[ StreamFlatMap[ FEither[ FTuple2[JMap[String, String], JLong], JMap[String, String]], Either[String, String]]](result) fun.getUserFunction.flatMap(FEither.Right(inParam), output) fun.getUserFunction.flatMap(FEither.Left(FTuple2.of(inParam, 42L)), output) assertEquals(expectedOutput, outList) } def extractUserFunction[T](dataStream: DataStream[_]) = { dataStream.javaStream .getTransformation .asInstanceOf[OneInputTransformation[_, _]] .getOperator .asInstanceOf[T] } }
DieBauer/flink
flink-libraries/flink-cep-scala/src/test/scala/org/apache/flink/cep/scala/PatternStreamScalaJavaAPIInteroperabilityTest.scala
Scala
apache-2.0
5,413
package com.twitter.finagle.netty3.ssl.server import com.twitter.finagle.ssl.server.{SslServerConfiguration, SslServerSessionVerifier} import java.net.SocketAddress import java.security.cert.Certificate import javax.net.ssl.{SSLEngine, SSLSession} import org.jboss.netty.channel._ import org.jboss.netty.handler.ssl.SslHandler import org.mockito.Matchers._ import org.mockito.Mockito.{times, verify, when} import org.scalatest.FunSuite import org.scalatest.mock.MockitoSugar class SslServerConnectHandlerTest extends FunSuite with MockitoSugar { class SslConnectHandlerHelper { val ctx = mock[ChannelHandlerContext] val sslHandler = mock[SslHandler] val sslSession = mock[SSLSession] when(sslSession.getPeerCertificates) thenReturn Array.empty[Certificate] val engine = mock[SSLEngine] when(engine.getSession) thenReturn sslSession when(sslHandler.getEngine) thenReturn engine val channel = mock[Channel] when(ctx.getChannel) thenReturn channel val pipeline = mock[ChannelPipeline] when(channel.getPipeline) thenReturn pipeline val closeFuture = Channels.future(channel) when(channel.getCloseFuture) thenReturn closeFuture val remoteAddress = mock[SocketAddress] when(channel.getRemoteAddress) thenReturn remoteAddress val handshakeFuture = Channels.future(channel) when(sslHandler.handshake()) thenReturn handshakeFuture } class SslServerConnectHandlerHelper extends SslConnectHandlerHelper { var shutdownCount = 0 def onShutdown() = shutdownCount += 1 val config = mock[SslServerConfiguration] val verifier = mock[SslServerSessionVerifier] val connectHandler = new SslServerConnectHandler(sslHandler, config, verifier, onShutdown) val event = new UpstreamChannelStateEvent( channel, ChannelState.CONNECTED, remoteAddress) connectHandler.handleUpstream(ctx, event) } test("SslServerConnectHandler should call the shutdown callback on channel shutdown") { val h = new SslServerConnectHandlerHelper import h._ val event = new UpstreamChannelStateEvent(channel, ChannelState.OPEN, null) connectHandler.channelClosed(mock[ChannelHandlerContext], event) assert(shutdownCount == 1) } test("SslServerConnectHandler should delay connection until the handshake is complete") { val h = new SslServerConnectHandlerHelper import h._ when(verifier.apply(config, sslSession)) thenReturn true verify(sslHandler, times(1)).handshake() verify(ctx, times(0)).sendUpstream(any[ChannelEvent]) handshakeFuture.setSuccess() verify(ctx, times(1)).sendUpstream(any[ChannelEvent]) } test("SslServerConnectHandler should not connect when verification fails") { val h = new SslServerConnectHandlerHelper import h._ when(verifier.apply(config, sslSession)) thenReturn false verify(sslHandler, times(1)).handshake() verify(ctx, times(0)).sendUpstream(any[ChannelEvent]) handshakeFuture.setSuccess() verify(ctx, times(0)).sendUpstream(any[ChannelEvent]) } test("SslServerConnectHandler should not connect when verification throws") { val h = new SslServerConnectHandlerHelper import h._ when(verifier.apply(config, sslSession)) thenThrow new RuntimeException("Failed verification") verify(sslHandler, times(1)).handshake() verify(ctx, times(0)).sendUpstream(any[ChannelEvent]) handshakeFuture.setSuccess() verify(ctx, times(0)).sendUpstream(any[ChannelEvent]) } }
koshelev/finagle
finagle-netty3/src/test/scala/com/twitter/finagle/netty3/ssl/server/SslServerConnectHandlerTest.scala
Scala
apache-2.0
3,484
import auth.UserAuthenticator import org.scalatestplus.play.PlaySpec import org.scalatestplus.play.guice.GuiceOneAppPerSuite import play.api.test.Helpers._ import play.api.test._ class WithAuthenticationSpec extends PlaySpec with GuiceOneAppPerSuite with AuthenticationHelper { val userAuthenticator = app.injector.instanceOf[UserAuthenticator] "WithAuthentication controller" should { val userAuthenticator = app.injector.instanceOf[UserAuthenticator] "redirect to the login page on accessing a page that requires authentication without being authenticated" in { val result = route(app, FakeRequest(controllers.routes.WithAuthentication.authenticated)).get status(result) mustBe SEE_OTHER } "successfully return a page that requires authentication while being authenticated" in { val sessionCookie = getSessionCookie(userLoginData(false)) val result = route(app, FakeRequest(controllers.routes.WithAuthentication.authenticated).withCookies(sessionCookie.get)).get status(result) mustBe OK } } }
jasperdenkers/play-auth
integration/test/WithAuthenticationSpec.scala
Scala
mit
1,070
package im.rore.countem.utils /** * Scala front-end to SLF4J API. */ import org.slf4j.{Logger => SLF4JLogger} /** * Scala front-end to a SLF4J logger. */ class Logger(val logger: SLF4JLogger) { /** * Get the name associated with this logger. * * @return the name. */ def name = logger.getName /** * Determine whether trace logging is enabled. */ def isTraceEnabled = logger.isTraceEnabled /** * Issue a trace logging message. * * @param msg the message object. `toString()` is called to convert it * to a loggable string. */ def trace(msg: => AnyRef): Unit = if (isTraceEnabled) logger.trace(msg.toString) /** * Issue a trace logging message, with an exception. * * @param msg the message object. `toString()` is called to convert it * to a loggable string. * @param t the exception to include with the logged message. */ def trace(msg: => AnyRef, t: => Throwable): Unit = if (isTraceEnabled) logger.trace(msg.toString, t) /** * Determine whether debug logging is enabled. */ def isDebugEnabled = logger.isDebugEnabled /** * Issue a debug logging message. * * @param msg the message object. `toString()` is called to convert it * to a loggable string. */ def debug(msg: => AnyRef): Unit = if (isDebugEnabled) logger.debug(msg.toString) /** * Issue a debug logging message, with an exception. * * @param msg the message object. `toString()` is called to convert it * to a loggable string. * @param t the exception to include with the logged message. */ def debug(msg: => AnyRef, t: => Throwable): Unit = if (isDebugEnabled) logger.debug(msg.toString, t) /** * Determine whether trace logging is enabled. */ def isErrorEnabled = logger.isErrorEnabled /** * Issue a trace logging message. * * @param msg the message object. `toString()` is called to convert it * to a loggable string. */ def error(msg: => AnyRef): Unit = if (isErrorEnabled) logger.error(msg.toString) /** * Issue a trace logging message, with an exception. * * @param msg the message object. `toString()` is called to convert it * to a loggable string. * @param t the exception to include with the logged message. */ def error(msg: => AnyRef, t: => Throwable): Unit = if (isErrorEnabled) logger.error(msg.toString, t) /** * Determine whether trace logging is enabled. */ def isInfoEnabled = logger.isInfoEnabled /** * Issue a trace logging message. * * @param msg the message object. `toString()` is called to convert it * to a loggable string. */ def info(msg: => AnyRef): Unit = if (isInfoEnabled) logger.info(msg.toString) /** * Issue a trace logging message, with an exception. * * @param msg the message object. `toString()` is called to convert it * to a loggable string. * @param t the exception to include with the logged message. */ def info(msg: => AnyRef, t: => Throwable): Unit = if (isInfoEnabled) logger.info(msg.toString, t) /** * Determine whether trace logging is enabled. */ def isWarnEnabled = logger.isWarnEnabled /** * Issue a trace logging message. * * @param msg the message object. `toString()` is called to convert it * to a loggable string. */ def warn(msg: => AnyRef): Unit = if (isWarnEnabled) logger.warn(msg.toString) /** * Issue a trace logging message, with an exception. * * @param msg the message object. `toString()` is called to convert it * to a loggable string. * @param t the exception to include with the logged message. */ def warn(msg: => AnyRef, t: => Throwable): Unit = if (isWarnEnabled) logger.warn(msg.toString, t) } /** * A factory for retrieving an SLF4JLogger. */ object Logger { /** * The name associated with the root logger. */ //val RootLoggerName = SLF4JLogger.ROOT_LOGGER_NAME val RootLoggerName = "serendip"; /** * Get the logger with the specified name. Use `RootName` to get the * root logger. * * @param name the logger name * * @return the `Logger`. */ def apply(name: String): Logger = new Logger(org.slf4j.LoggerFactory.getLogger(name)) /** * Get the logger for the specified class, using the class's fully * qualified name as the logger name. * * @param cls the class * * @return the `Logger`. */ def apply(cls: Class[_]): Logger = apply(cls.getName) def apply(obj: AnyRef): Logger = apply(obj.getClass) /** * Get the root logger. * * @return the root logger */ val rootLogger = apply(RootLoggerName) var reportToErrorLog = true; def trace(msg:String)= rootLogger.trace(msg) def trace(msg:String, t:Throwable) = rootLogger.trace(msg,t) def debug(msg:String) = rootLogger.debug(msg) def debug(msg:String, t:Throwable) = rootLogger.debug(msg,t) def error(msg:String) = { rootLogger.error(msg) } def error(msg:String, t:Throwable) = { rootLogger.error(msg,t) } def info(msg:String) = rootLogger.info(msg) def info(msg:String, t:Throwable) = rootLogger.info(msg,t) def warn(msg:String) = rootLogger.warn(msg) def warn(msg:String, t:Throwable) = rootLogger.warn(msg,t) // Java interface def logError(msg:String, t:Throwable=null){ rootLogger.error(msg,t); } def logInfo(msg:String) = rootLogger.info(msg) def logWarn(msg:String) = rootLogger.warn(msg) def logDebug(msg:String) = rootLogger.debug(msg) } trait Logging { private val logger = Logger(getClass) protected def reportToErrorLog = true; def trace(msg: => AnyRef)= logger.trace(msg) def trace(msg: => AnyRef, t: => Throwable) = logger.trace(msg,t) def debug(msg: => AnyRef) = logger.debug(msg) def debug(msg: => AnyRef, t: => Throwable) = logger.debug(msg,t) def error(msg:String) = { val s = msg.toString; logger.error(s) } def error(t: => Throwable) = { val s = "exception"; logger.error(s, t) } def error(msg: => AnyRef, t: => Throwable) = { val s = msg.toString; logger.error(msg,t) } def info(msg: => AnyRef) = logger.info(msg) def info(msg: => AnyRef, t: => Throwable) = logger.info(msg,t) def warn(msg: => AnyRef) = logger.warn(msg) def warn(msg: => AnyRef, t: => Throwable) = logger.warn(msg,t) }
rore/countem
countem-core/src/main/scala/im/rore/countem/utils/Logger.scala
Scala
apache-2.0
6,831
/* * * /\\\\\\\\\\ * /\\\\\\///\\\\\\ * /\\\\\\/ \\///\\\\\\ /\\\\\\\\\\\\\\\\\\ /\\\\\\ /\\\\\\ * /\\\\\\ \\//\\\\\\ /\\\\\\/////\\\\\\ /\\\\\\\\\\\\\\\\\\\\\\ \\/// /\\\\\\\\\\ /\\\\\\\\\\ /\\\\\\ /\\\\\\ /\\\\\\\\\\\\\\\\\\\\ * \\/\\\\\\ \\/\\\\\\ \\/\\\\\\\\\\\\\\\\\\\\ \\////\\\\\\//// /\\\\\\ /\\\\\\///\\\\\\\\\\///\\\\\\ \\/\\\\\\ \\/\\\\\\ \\/\\\\\\////// * \\//\\\\\\ /\\\\\\ \\/\\\\\\////// \\/\\\\\\ \\/\\\\\\ \\/\\\\\\ \\//\\\\\\ \\/\\\\\\ \\/\\\\\\ \\/\\\\\\ \\/\\\\\\\\\\\\\\\\\\\\ * \\///\\\\\\ /\\\\\\ \\/\\\\\\ \\/\\\\\\_/\\\\ \\/\\\\\\ \\/\\\\\\ \\/\\\\\\ \\/\\\\\\ \\/\\\\\\ \\/\\\\\\ \\////////\\\\\\ * \\///\\\\\\\\\\/ \\/\\\\\\ \\//\\\\\\\\\\ \\/\\\\\\ \\/\\\\\\ \\/\\\\\\ \\/\\\\\\ \\//\\\\\\\\\\\\\\\\\\ /\\\\\\\\\\\\\\\\\\\\ * \\///// \\/// \\///// \\/// \\/// \\/// \\/// \\///////// \\////////// * * The mathematical programming library for Scala. * */ import sbt._ object Dependencies { object v { final val Logback = "1.2.3" final val ScalaLogging = "3.9.3" final val ScalaTest = "3.2.7" final val ScalaCheck = "1.15.3" final val ScalaTestPlus = "3.2.7.0" final val LpSolve = "5.5.2.0" final val ojAlgorithms = "48.3.1" final val Trove = "3.1.0" final val ScalaXML = "1.3.0" final val Enums = "1.6.1" } // Logging using slf4j and logback lazy val Logging: Seq[ModuleID] = Seq( "ch.qos.logback" % "logback-classic" % v.Logback, "com.typesafe.scala-logging" %% "scala-logging" % v.ScalaLogging ) // ScalaTest and ScalaCheck for UNIT testing lazy val ScalaTest: Seq[ModuleID] = Seq( "org.scalatest" %% "scalatest" % v.ScalaTest % "test", "org.scalatestplus" %% "scalacheck-1-15" % v.ScalaTestPlus % "test", "org.scalacheck" %% "scalacheck" % v.ScalaCheck % "test" ) // GNU Trove collections and other tools lazy val Tools: Seq[ModuleID] = Seq( "org.scala-lang.modules" %% "scala-xml" % v.ScalaXML, "net.sf.trove4j" % "core" % v.Trove, "com.beachape" %% "enumeratum" % v.Enums ) // LpSolve library for linear programming lazy val LpSolve: ModuleID = "com.datumbox" % "lpsolve" % v.LpSolve // oj! Algorithms library for linear and quadratic programming lazy val ojAlgorithms: ModuleID = "org.ojalgo" % "ojalgo" % v.ojAlgorithms }
vagmcs/Optimus
project/Dependencies.scala
Scala
lgpl-3.0
2,142
import akka.actor.{ActorRef, ActorRefFactory} import scavlink.connection.udp.UdpBridge import scavlink.link.Vehicle import scavlink.link.fence.FenceActor import scavlink.task.TaskInitializer package object scavlink { type EventMatcher[T] = T => Boolean type PartialEventMatcher[T] = PartialFunction[T, Boolean] type ScavlinkInitializer = (ActorRef, ScavlinkContext, ActorRefFactory) => Seq[ActorRef] type VehicleInitializer = (Vehicle, ActorRefFactory) => Seq[ActorRef] type KeyAuthorizer = String => Boolean val DefaultScavlinkInitializers = Seq( UdpBridge.initializer, TaskInitializer // TrafficControlActor.initializer(ProximityMonitor(20, 1)) ) val DefaultVehicleInitializers = Seq( FenceActor.initializer ) }
nickolasrossi/scavlink
src/main/scala/scavlink/package.scala
Scala
mit
753
def safeHead[A]: List[A] => Option[A] = { case Nil => None case x :: xs => Some(x) }
hmemcpy/milewski-ctfp-pdf
src/content/1.10/code/scala/snippet04.scala
Scala
gpl-3.0
88
package controllers import akka.pattern.ask import play.api.data._, Forms._ import play.api.libs.concurrent.Akka import play.api.libs.iteratee._ import play.api.libs.json._ import play.api.mvc._, Results._ import lila.app._ import lila.hub.actorApi.captcha.ValidCaptcha import makeTimeout.large import views._ object Main extends LilaController { private lazy val blindForm = Form(tuple( "enable" -> nonEmptyText, "redirect" -> nonEmptyText )) def toggleBlindMode = OpenBody { implicit ctx => implicit val req = ctx.body fuccess { blindForm.bindFromRequest.fold( err => BadRequest, { case (enable, redirect) => Redirect(redirect) withCookies lila.common.LilaCookie.cookie( Env.api.Accessibility.blindCookieName, if (enable == "0") "" else Env.api.Accessibility.hash, maxAge = Env.api.Accessibility.blindCookieMaxAge.some, httpOnly = true.some) }) } } def websocket = SocketOption { implicit ctx => get("sri") ?? { uid => Env.site.socketHandler(uid, ctx.userId, get("flag")) map some } } def captchaCheck(id: String) = Open { implicit ctx => Env.hub.actor.captcher ? ValidCaptcha(id, ~get("solution")) map { case valid: Boolean => Ok(valid fold (1, 0)) } } def embed = Action { req => Ok { s"""document.write("<iframe src='${Env.api.Net.BaseUrl}?embed=" + document.domain + "' class='lichess-iframe' allowtransparency='true' frameBorder='0' style='width: ${getInt("w", req) | 820}px; height: ${getInt("h", req) | 650}px;' title='Lichess free online chess'></iframe>");""" } as JAVASCRIPT withHeaders (CACHE_CONTROL -> "max-age=86400") } def developers = Open { implicit ctx => fuccess { html.site.developers() } } def irc = Open { implicit ctx => ctx.me ?? Env.team.api.mine map { html.site.irc(_) } } def themepicker = Open { implicit ctx => fuccess { html.base.themepicker() } } def mobile = Open { implicit ctx => OptionOk(Prismic oneShotBookmark "mobile-apk") { case (doc, resolver) => html.site.mobile(doc, resolver) } } }
Happy0/lila
app/controllers/Main.scala
Scala
mit
2,193
package xyz.hyperreal.avr8 object Main extends App { SRecord( io.Source fromString """S00F000068656C6C6F202020202000003C S11F00007C0802A6900100049421FFF07C6C1B787C8C23783C6000003863000026 S11F001C4BFFFFE5398000007D83637880010014382100107C0803A64E800020E9 S111003848656C6C6F20776F726C642E0A0042 S5030003F9 S9030000FC""", v => println( "header: " + new String(v.toArray) ), (address, data) => println( "data: " + address.toHexString + ": " + data.map("%02x".format(_)).mkString ), c => println( "count: " + c ), address => println( "start: " + address.toHexString ) ) }
edadma/avr8
src/main/scala/Main.scala
Scala
mit
580
package db import io.flow.delta.v0.models.{Build, State, StateForm, Version} import io.flow.postgresql.Authorization import io.flow.test.utils.FlowPlaySpec class BuildStatesDaoSpec extends FlowPlaySpec with Helpers { def upsertBuildDesiredState( build: Build = upsertBuild(), form: StateForm = createStateForm() ): State = { rightOrErrors(buildDesiredStatesDao.create(systemUser, build, form)) } def createStateForm(): StateForm = { StateForm( versions = Seq( Version(name = "0.0.1", instances = 3), Version(name = "0.0.2", instances = 2) ) ) } "create desired" in { val build = upsertBuild() val state = rightOrErrors(buildDesiredStatesDao.create(systemUser, build, createStateForm())) state.versions.map(_.name) must be(Seq("0.0.1", "0.0.2")) state.versions.map(_.instances) must be(Seq(3, 2)) } "create actual" in { val build = upsertBuild() val state = rightOrErrors(buildLastStatesDao.create(systemUser, build, createStateForm())) state.versions.map(_.name) must be(Seq("0.0.1", "0.0.2")) state.versions.map(_.instances) must be(Seq(3, 2)) } "upsert" in { val build = upsertBuild() val state = rightOrErrors(buildDesiredStatesDao.upsert(systemUser, build, createStateForm())) val second = rightOrErrors(buildDesiredStatesDao.upsert(systemUser, build, createStateForm())) second.versions.map(_.name) must be(Seq("0.0.1", "0.0.2")) state.versions.map(_.instances) must be(Seq(3, 2)) } "delete" in { val build = upsertBuild() upsertBuildDesiredState(build) buildDesiredStatesDao.delete(systemUser, build) buildDesiredStatesDao.findByBuildId(Authorization.All, build.id) must be(None) } "saving prunes records w/ zero instances" in { val form = StateForm( versions = Seq( Version(name = "0.0.1", instances = 0), Version(name = "0.0.2", instances = 2) ) ) val build = upsertBuild() val state = rightOrErrors(buildDesiredStatesDao.create(systemUser, build, form)) state.versions.map(_.name) must be(Seq("0.0.2")) state.versions.map(_.instances) must be(Seq(2)) } }
flowcommerce/delta
api/test/db/BuildStatesDaoSpec.scala
Scala
mit
2,177
package ca.genovese.scalding.accumulo import cascading.tap.Tap import cascading.tuple.Fields import com.talk3.cascading.accumulo.{AccumuloScheme, AccumuloTap} import com.twitter.scalding.{AccessMode, Mode, Source} case class AccumuloSource(connectionString: String, scheme: String) extends Source { private val accumuloFields = new Fields("rowID", "colF", "colQ", "colVis", "colTimestamp", "colVal") protected def createAccumuloTap: AccumuloTap = { new AccumuloTap(connectionString, new AccumuloScheme(scheme)) } override def createTap(readOrWrite: AccessMode)(implicit mode: Mode): Tap[_, _, _] = mode match { case _ => createAccumuloTap.asInstanceOf[Tap[_, _, _]] } }
agenovese/scalding-accumulo
src/main/scala/ca/genovese/scalding/accumulo/AccumuloSource.scala
Scala
apache-2.0
708
/* * Copyright 2022 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package controllers.registration.returns import config.{AuthClientConnector, BaseControllerComponents, FrontendAppConfig} import controllers.BaseController import forms.ZeroRatedSuppliesForm import play.api.mvc.{Action, AnyContent} import services.{ReturnsService, SessionProfile, SessionService, VatRegistrationService} import uk.gov.hmrc.http.InternalServerException import views.html.returns.zero_rated_supplies import javax.inject.{Inject, Singleton} import scala.concurrent.{ExecutionContext, Future} @Singleton class ZeroRatedSuppliesController @Inject()(val sessionService: SessionService, val authConnector: AuthClientConnector, returnsService: ReturnsService, vatRegistrationService: VatRegistrationService, zeroRatesSuppliesView: zero_rated_supplies )(implicit val executionContext: ExecutionContext, appConfig: FrontendAppConfig, baseControllerComponents: BaseControllerComponents) extends BaseController with SessionProfile { val show: Action[AnyContent] = isAuthenticatedWithProfile() { implicit request => implicit profile => returnsService.getReturns flatMap { returns => vatRegistrationService.fetchTurnoverEstimates map { optEstimates => (returns.zeroRatedSupplies, optEstimates) match { case (Some(zeroRatedSupplies), Some(estimates)) => Ok(zeroRatesSuppliesView( routes.ZeroRatedSuppliesController.submit, ZeroRatedSuppliesForm.form(estimates).fill(zeroRatedSupplies) )) case (None, Some(estimates)) => Ok(zeroRatesSuppliesView( routes.ZeroRatedSuppliesController.submit, ZeroRatedSuppliesForm.form(estimates) )) case (_, None) => throw new InternalServerException("[ZeroRatedSuppliesController][show] Did not find user's turnover estimates") } } } } val submit: Action[AnyContent] = isAuthenticatedWithProfile() { implicit request => implicit profile => vatRegistrationService.fetchTurnoverEstimates flatMap { case Some(estimates) => ZeroRatedSuppliesForm.form(estimates).bindFromRequest.fold( errors => Future.successful( BadRequest(zeroRatesSuppliesView( routes.ZeroRatedSuppliesController.submit, errors ))), success => returnsService.saveZeroRatesSupplies(success) map { _ => Redirect(controllers.registration.returns.routes.ClaimRefundsController.show) } ) case None => throw new InternalServerException("[ZeroRatedSuppliesController][submit] Did not find user's turnover estimates") } } }
hmrc/vat-registration-frontend
app/controllers/registration/returns/ZeroRatedSuppliesController.scala
Scala
apache-2.0
3,659
package breeze.linalg import breeze.generic.UFunc import breeze.linalg.operators.OpDiv /** * Normalizes the argument such that its norm is 1.0 (with respect to the argument n). * Returns value if value's norm is 0. */ object normalize extends UFunc { implicit def normalizeDoubleImpl[T, U>:T](implicit div: OpDiv.Impl2[T, Double, U], canNorm: norm.Impl2[T, Double, Double]):Impl2[T, Double, U] = { new Impl2[T, Double, U] { def apply(t: T, n: Double): U = { val norm = canNorm(t, n) if(norm == 0) t else div(t,norm) } } } implicit def normalizeImpl[T, U>:T](implicit impl: Impl2[T, Double, U]):Impl[T, U] = { new Impl[T, U] { def apply(v: T): U = impl(v, 2.0) } } implicit def normalizeIntImpl[T, U>:T](implicit impl: Impl2[T, Double, U]):Impl2[T, Int, U] = { new Impl2[T, Int, U] { def apply(v: T, n: Int): U = impl(v, n) } } }
wavelets/breeze
src/main/scala/breeze/linalg/normalize.scala
Scala
apache-2.0
923
package com.twitter.finagle.http2.transport import com.twitter.finagle.Stack import com.twitter.finagle.http2.{Http2PipelineInitializer, MultiplexHandlerBuilder} import com.twitter.finagle.netty4.http._ import com.twitter.finagle.param.Stats import io.netty.channel.{Channel, ChannelHandlerContext, ChannelInitializer} import io.netty.handler.ssl.{ApplicationProtocolNames, ApplicationProtocolNegotiationHandler} final private[http2] class ServerNpnOrAlpnHandler( init: ChannelInitializer[Channel], params: Stack.Params) extends ApplicationProtocolNegotiationHandler(ApplicationProtocolNames.HTTP_1_1) { private[this] val Stats(statsReceiver) = params[Stats] private[this] val upgradeCounter = statsReceiver.scope("upgrade").counter("success") @throws(classOf[Exception]) protected def configurePipeline(ctx: ChannelHandlerContext, protocol: String): Unit = protocol match { case ApplicationProtocolNames.HTTP_2 => // Http2 has been negotiated, replace the HttpCodec with an Http2Codec upgradeCounter.incr() ctx.channel.config.setAutoRead(true) val initializer = H2StreamChannelInit.initServer(init, params) val (codec, handler) = MultiplexHandlerBuilder.serverFrameCodec(params, initializer) MultiplexHandlerBuilder.addStreamsGauge(statsReceiver, codec, ctx.channel) ctx.pipeline.replace(HttpCodecName, Http2CodecName, codec) ctx.pipeline.addAfter(Http2CodecName, Http2MultiplexHandlerName, handler) Http2PipelineInitializer.setup(ctx, params) case ApplicationProtocolNames.HTTP_1_1 => // The Http codec is already in the pipeline, so we are good! case _ => throw new IllegalStateException("unknown protocol: " + protocol) } }
luciferous/finagle
finagle-http2/src/main/scala/com/twitter/finagle/http2/transport/ServerNpnOrAlpnHandler.scala
Scala
apache-2.0
1,762
/* * Copyright (c) 2017 Richard Hull * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package byok3.annonation import scala.annotation.StaticAnnotation class Immediate extends StaticAnnotation
rm-hull/byok3
core/src/main/scala/byok3/annonation/Immediate.scala
Scala
mit
1,224
/** * (c) Copyright 2013 WibiData, Inc. * * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kiji.express.flow.framework.serialization import com.esotericsoftware.kryo.Kryo import com.esotericsoftware.kryo.Serializer import com.esotericsoftware.kryo.io.Input import com.esotericsoftware.kryo.io.Output import org.apache.avro.Schema import org.apache.avro.generic.GenericContainer import org.apache.avro.generic.GenericDatumReader import org.apache.avro.generic.GenericDatumWriter import org.apache.avro.io.DecoderFactory import org.apache.avro.io.EncoderFactory import org.apache.avro.specific.SpecificDatumReader import org.apache.avro.specific.SpecificDatumWriter import org.apache.avro.specific.SpecificRecord import org.kiji.annotations.ApiAudience import org.kiji.annotations.ApiStability import org.kiji.annotations.Inheritance /** * Provides serialization for Avro schemas while using Kryo serialization. */ @ApiAudience.Private @ApiStability.Stable final class AvroSchemaSerializer extends Serializer[Schema] { setAcceptsNull(false) override def write( kryo: Kryo, output: Output, schema: Schema ) { val encodedSchema = schema.toString(false) output.writeString(encodedSchema) } override def read( kryo: Kryo, input: Input, klazz: Class[Schema] ): Schema = { val encodedSchema = input.readString() new Schema.Parser().parse(encodedSchema) } } /** * Provides serialization for Avro generic records while using Kryo serialization. Record schemas * are prepended to the encoded generic record data. */ @ApiAudience.Private @ApiStability.Stable final class AvroGenericSerializer extends Serializer[GenericContainer] { // TODO(EXP-269): Cache encoders per schema. // We at least need an avro schema to perform serialization. setAcceptsNull(false) override def write( kryo: Kryo, output: Output, avroObject: GenericContainer ) { // Serialize the schema. new AvroSchemaSerializer().write(kryo, output, avroObject.getSchema) // Serialize the data. val writer = new GenericDatumWriter[GenericContainer](avroObject.getSchema) val encoder = EncoderFactory .get() .directBinaryEncoder(output, null) writer.write(avroObject, encoder) } override def read( kryo: Kryo, input: Input, klazz: Class[GenericContainer] ): GenericContainer = { // Deserialize the schema. val schema = new AvroSchemaSerializer().read(kryo, input, null) // Deserialize the data. val reader = new GenericDatumReader[GenericContainer](schema) val decoder = DecoderFactory .get() .directBinaryDecoder(input, null) reader.read(null.asInstanceOf[GenericContainer], decoder) } } /** * Provides serialization for Avro specific records while using Kryo serialization. Record schemas * are not serialized as all clients interacting with this data are assumed to have the correct * specific record class on their classpath. */ @ApiAudience.Private @ApiStability.Stable final class AvroSpecificSerializer extends Serializer[SpecificRecord] { // TODO(EXP-269) Cache encoders per class/schema. setAcceptsNull(false) override def write( kryo: Kryo, output: Output, record: SpecificRecord ) { val writer = new SpecificDatumWriter[SpecificRecord](record.getClass.asInstanceOf[Class[SpecificRecord]]) val encoder = EncoderFactory .get() .directBinaryEncoder(output, null) writer.write(record, encoder) } override def read( kryo: Kryo, input: Input, klazz: Class[SpecificRecord] ): SpecificRecord = { val reader = new SpecificDatumReader[SpecificRecord](klazz) val decoder = DecoderFactory .get() .directBinaryDecoder(input, null) reader.read(null.asInstanceOf[SpecificRecord], decoder) } }
kijiproject/kiji-express
kiji-express/src/main/scala/org/kiji/express/flow/framework/serialization/AvroSerializer.scala
Scala
apache-2.0
4,530
package com.peterparameter.ecm.common import com.peterparameter.ecm.common.Alias.Num import spire.math.SafeLong.one case class Factor(n: Num) case class Factors(factors: Map[Num, Int]) { def folded: Num = factors.foldLeft(one){ case (acc, (b, e)) => acc * b ^ e } } object Factors { def apply(f: Num): Factors = Factors(Map(f -> 1)) }
pnosko/spire-ecm
src/main/scala/com/peterparameter/ecm/common/Factors.scala
Scala
mit
342
/* * Copyright 2001-2013 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalatest /* Delete this later: class ArithmeticSuite extends FunSuite with matchers.ShouldMatchers { test("addition works") { 1 + 1 should equal (2) } ignore("subtraction works") { 1 - 1 should equal (0) } test("multiplication works") { 1 * 1 should equal (2) } test("division works") (pending) } */ /** * Trait whose instances provide a <a href="run$.html"><code>run</code></a> method and configuration fields that implement * the <em>ScalaTest shell</em>: its DSL for the Scala interpreter. * * <p> * The main command of the ScalaTest shell is <code>run</code>, which you can use to run a suite of tests. * The shell also provides several commands for configuring a call to <code>run</code>: * </p> * * <ul> * <li><code>color</code> (the default) - display results in color (green for success; red for failure; yellow for warning; blue for statistics)</li> * <li><code>nocolor</code> - display results without color</li> * <li><code>durations</code> - display durations of (<em>i.e.</em>, how long it took to run) tests and suites</li> * <li><code>nodurations</code> (the default) - do not display durations of tests and suites</li> * <li><code>shortstacks</code> - display short (<em>i.e.</em>, truncated to show just the most useful portion) stack traces for all exceptions</li> * <li><code>fullstacks</code> - display full stack trackes for all exceptions</li> * <li><code>nostacks</code> (the default) - display no stack trace for <code>StackDepth</code> exceptions and a short stack trace for non-<code>StackDepth</code> * exceptions</li> * <li><code>stats</code> - display statistics before and after the run, such as expected test count before the run and tests succeeded, failed, pending, * <em>etc.</em>, counts after the run</li> * <li><code>nostats</code> (the default) not display statistics before or after the run</li> * </ul> * * <p> * The default configuration is <code>color</code>, <code>nodurations</code>, <code>nostacks</code>, and <code>nostats</code>. * </p> * * <p> * All of these commands are fields of trait <code>org.scalatest.Shell</code>. Each configuration command is a field that refers to * another <code>Shell</code> instance with every configuration parameter * the same except for the one you've asked to change. For example, <code>durations</code> provides a * <code>Shell</code> instance that has every parameter configured the same way, except with durations enabled. When you invoke * <code>run</code> on that, you will get a run with durations enabled and every other configuration parameter at its default value. * <p> * * <p> * The other useful "command" * to know about, though not technically part of the shell, is the <code>apply</code> factory method in the <a href="Suites$.html"><code>Suites</code></a> * singleton object. This allows you to easily create composite suites out of nested suites, which you can then pass to <code>run</code>. This * will be demonstrated later in this documentation. * </p> * * <h2>Using the ScalaTest shell</h2> * * <p> * The package object of the <code>org.scalatest</code> package, although it does not extend <code>Shell</code>, declares all the * same members as <code>Shell</code>. Its <code>run</code> method runs with all the <code>Shell</code> configuration parameters set * to their default values. A good way to use the ScalaTest shell, therefore, is to import the members of package <code>org.scalatest</code>: * </p> * * <pre style="background-color: #2c415c; padding: 10px"> * <span style="color: white">scala> import org.scalatest._ * import org.scalatest._</span> * </pre> * * <p> * One thing importing <code>org.scalatest._</code> allows you to do is access any of ScalaTest's classes and traits by shorter * names, for example: * </p> * * <pre style="background-color: #2c415c; padding: 10px"> * <span style="color: white">scala> class ArithmeticSuite extends FunSuite with matchers.ShouldMatchers { * | test("addition works") { * | 1 + 1 should equal (2) * | } * | ignore("subtraction works") { * | 1 - 1 should equal (0) * | } * | test("multiplication works") { * | 1 * 1 should equal (2) * | } * | test("division works") (pending) * | } * defined class ArithmeticSuite</span> * </pre> * * <p> * But importing <code>org.scalatest._</code> also brings into scope the commands of the <code>Shell</code>, so you can, for * example, invoke <code>run</code> without qualification: * </p> * * <pre style="background-color: #2c415c; padding: 10px"> * <span style="color: white">scala> run(new ArithmeticSuite)</span> * <span style="color: #00cc00">ArithmeticSuite: * - addition works</span> * <span style="color: #cfc923">- subtraction works !!! IGNORED !!!</span> * <span style="color: #dd2233">- multiplication works *** FAILED *** * 1 did not equal 2 (<console>:16)</span> * <span style="color: #cfc923">- division works (pending)</span> * </pre> * * <h2>Configuring a single run</h2> * * <p> * To configure a single run, you can prefix run by one or more configuration commands, separated by dots. For example, to enable * durations during a single run, you would write: * </p> * * <pre style="background-color: #2c415c; padding: 10px"> * <span style="color: white">scala> durations.run(new ArithmeticSuite)</span> * <span style="color: #00cc00">ArithmeticSuite: * - addition works (102 milliseconds)</span> * <span style="color: #cfc923">- subtraction works !!! IGNORED !!!</span> * <span style="color: #dd2233">- multiplication works *** FAILED *** (36 milliseconds) * 1 did not equal 2 (<console>:16)</span> * <span style="color: #cfc923">- division works (pending)</span> * </pre> * * <p> * To enable statistics during a single run, you would write: * </p> * * <pre style="background-color: #2c415c; padding: 10px"> * <span style="color: white">scala> stats.run(new ArithmeticSuite)</span> * <span style="color: #00dddd">Run starting. Expected test count is: 3</span> * <span style="color: #00cc00">ArithmeticSuite: * - addition works</span> * <span style="color: #cfc923">- subtraction works !!! IGNORED !!!</span> * <span style="color: #dd2233">- multiplication works *** FAILED *** * 1 did not equal 2 (<console>:16)</span> * <span style="color: #cfc923">- division works (pending)</span> * <span style="color: #00dddd">Run completed in 386 milliseconds. * Total number of tests run: 2 * Suites: completed 1, aborted 0 * Tests: succeeded 1, failed 1, ignored 1, pending 1</span> * <span style="color: #dd2233">*** 1 TEST FAILED ***</span> * </pre> * * <p> * And to enable both durations and statistics during a single run, you could write: * </p> * * <pre style="background-color: #2c415c; padding: 10px"> * <span style="color: white">scala> durations.stats.run(new ArithmeticSuite)</span> * <span style="color: #00dddd">Run starting. Expected test count is: 3</span> * <span style="color: #00cc00">ArithmeticSuite: * - addition works (102 milliseconds)</span> * <span style="color: #cfc923">- subtraction works !!! IGNORED !!!</span> * <span style="color: #dd2233">- multiplication works *** FAILED (36 milliseconds)*** * 1 did not equal 2 (<console>:16)</span> * <span style="color: #cfc923">- division works (pending)</span> * <span style="color: #00dddd">Run completed in 386 milliseconds. * Total number of tests run: 2 * Suites: completed 1, aborted 0 * Tests: succeeded 1, failed 1, ignored 1, pending 1</span> * <span style="color: #dd2233">*** 1 TEST FAILED ***</span> * </pre> * * <p> * The order doesn't matter when you are chaining multiple configuration commands. You'll get the same * result whether you write <code>durations.stats.run</code> or <code>stats.durations.run</code>. * </p> * * <p> * To disable color, use <code>nocolor</code>: * </p> * * <pre style="background-color: #2c415c; padding: 10px"> * <span style="color: white">scala> nocolor.run(new ArithmeticSuite) * ArithmeticSuite: * - addition works * - subtraction works !!! IGNORED !!! * - multiplication works *** FAILED *** * 1 did not equal 2 (<console>:16) * - division works (pending)</span> * </pre> * * <p> * To enable short stack traces during a single run, use <code>shortstacks</code>: * </p> * * <pre style="background-color: #2c415c; padding: 10px"> * <span style="color: white">scala> shortstacks.run(new ArithmeticSuite)</span> * <span style="color: #00cc00">ArithmeticSuite: * - addition works (101 milliseconds)</span> * <span style="color: #cfc923">- subtraction works !!! IGNORED !!!</span> * <span style="color: #dd2233">- multiplication works *** FAILED *** (33 milliseconds) * 1 did not equal 2 (<console>:16) * org.scalatest.TestFailedException: * ... * at line2$object$$iw$$iw$$iw$$iw$ArithmeticSuite$$anonfun$3.apply$mcV$sp(<console>:16) * at line2$object$$iw$$iw$$iw$$iw$ArithmeticSuite$$anonfun$3.apply(<console>:16) * at line2$object$$iw$$iw$$iw$$iw$ArithmeticSuite$$anonfun$3.apply(<console>:16) * at org.scalatest.FunSuite$$anon$1.apply(FunSuite.scala:992) * at org.scalatest.Suite$class.withFixture(Suite.scala:1661) * at line2$object$$iw$$iw$$iw$$iw$ArithmeticSuite.withFixture(<console>:8) * at org.scalatest.FunSuite$class.invokeWithFixture$1(FunSuite.scala:989) * ...</span> * <span style="color: #cfc923">- division works (pending)</span> * </pre> * * <h2>Changing the default configuration</h2> * * <p> * If you want to change the default for multiple runs, you can import the members of your favorite <code>Shell</code> configuration. For example, * if you <em>always</em> like to run with durations and statistics enabled, you could write: * <p> * * <pre style="background-color: #2c415c; padding: 10px"> * <span style="color: white">scala> import stats.durations._ * import stats.durations._</span> * </pre> * * <p> * Now anytime you run statistics and durations will, by default, be enabled: * <p> * * <pre style="background-color: #2c415c; padding: 10px"> * <span style="color: white">scala> run(new ArithmeticSuite)</span> * <span style="color: #00dddd">Run starting. Expected test count is: 3</span> * <span style="color: #00cc00">ArithmeticSuite: * - addition works (9 milliseconds)</span> * <span style="color: #cfc923">- subtraction works !!! IGNORED !!!</span> * <span style="color: #dd2233">- multiplication works *** FAILED *** (10 milliseconds) * 1 did not equal 2 (<console>:18)</span> * <span style="color: #cfc923">- division works (pending)</span> * <span style="color: #00dddd">Run completed in 56 milliseconds. * Total number of tests run: 2 * Suites: completed 1, aborted 0 * Tests: succeeded 1, failed 1, ignored 1, pending 1</span> * <span style="color: #dd2233">*** 1 TEST FAILED ***</span> * </pre> * * <h2>Running multiple suites</h2> * * <p> * If you want to run multiple suites, you can use the factory method in the <a href="Suites$.html"><code>Suites</code></a> * singleton object. If you wrap a comma-separated list of suite instances inside <code>Suites(...)</code>, for example, * you'll get a suite instance that contains no tests, but whose nested suites includes the suite instances you placed between * the parentheses. You can place <code>Suites</code> inside <code>Suites</code> to any level of depth, creating a tree of * suites to pass to <code>run</code>. Here's a (contrived) example in which <code>ArithmeticSuite</code> is executed four times: * <p> * * <pre style="background-color: #2c415c; padding: 10px"> * <span style="color: white">scala> run(Suites(new ArithmeticSuite, new ArithmeticSuite, Suites(new ArithmeticSuite, new ArithmeticSuite)))</span> * <span style="color: #00dddd">Run starting. Expected test count is: 12</span> * <span style="color: #00cc00">Suites: * ArithmeticSuite: * - addition works (0 milliseconds)</span> * <span style="color: #cfc923">- subtraction works !!! IGNORED !!!</span> * <span style="color: #dd2233">- multiplication works *** FAILED *** (1 millisecond) * 1 did not equal 2 (<console>:16)</span> * <span style="color: #cfc923">- division works (pending)</span> * <span style="color: #00cc00">ArithmeticSuite: * - addition works (1 millisecond)</span> * <span style="color: #cfc923">- subtraction works !!! IGNORED !!!</span> * <span style="color: #dd2233">- multiplication works *** FAILED *** (0 milliseconds) * 1 did not equal 2 (<console>:16)</span> * <span style="color: #cfc923">- division works (pending)</span> * <span style="color: #00cc00">Suites: * ArithmeticSuite: * - addition works (0 milliseconds)</span> * <span style="color: #cfc923">- subtraction works !!! IGNORED !!!</span> * <span style="color: #dd2233">- multiplication works *** FAILED *** (0 milliseconds) * 1 did not equal 2 (<console>:16)</span> * <span style="color: #cfc923">- division works (pending)</span> * <span style="color: #00cc00">ArithmeticSuite: * - addition works (0 milliseconds)</span> * <span style="color: #cfc923">- subtraction works !!! IGNORED !!!</span> * <span style="color: #dd2233">- multiplication works *** FAILED *** (0 milliseconds) * 1 did not equal 2 (<console>:16)</span> * <span style="color: #cfc923">- division works (pending)</span> * <span style="color: #00dddd">Run completed in 144 milliseconds. * Total number of tests run: 8 * Suites: completed 6, aborted 0 * Tests: succeeded 4, failed 4, ignored 4, pending 4</span> * <span style="color: #dd2233">*** 4 TESTS FAILED ***</span> * </pre> * * <h2>Running a single test</h2> * * <p> * The <code>run</code> command also allows you to specify the name of a test to run and/or a config map. You can run * a particular test in a suite, for example, by specifying the test name after the suite instance in your call to <code>run</code>, like this: * <p> * * <pre style="background-color: #2c415c; padding: 10px"> * <span style="color: white">scala> run(new ArithmeticSuite, "addition works")</span> * <span style="color: #00cc00">ArithmeticSuite: * - addition works</span> * </pre> */ sealed trait Shell { /** * A <code>Shell</code> whose <code>run</code> method will pass <code>true</code> for <code>execute</code>'s <code>color</code> * parameter, and pass for all other parameters the same values as this <code>Shell</code>. */ val color: Shell /** * A <code>Shell</code> whose <code>run</code> method will pass <code>true</code> for <code>execute</code>'s <code>durations</code> * parameter, and pass for all other parameters the same values as this <code>Shell</code>. */ val durations: Shell /** * A <code>Shell</code> whose <code>run</code> method will pass <code>true</code> for <code>execute</code>'s <code>shortstacks</code> * parameter and <code>false</code> for its <code>fullstacks</code> parameter, and pass for all other parameters the same values as * this <code>Shell</code>. */ val shortstacks: Shell /** * A <code>Shell</code> whose <code>run</code> method will pass <code>false</code> for <code>execute</code>'s <code>shortstacks</code> * parameter and <code>true</code> for its <code>fullstacks</code> parameter, and pass for all other parameters the same values as this <code>Shell</code>. */ val fullstacks: Shell /** * A <code>Shell</code> whose <code>run</code> method will pass <code>true</code> for <code>execute</code>'s <code>stats</code> * parameter, and pass for all other parameters the same values as this <code>Shell</code>. */ val stats: Shell /** * Returns a copy of this <code>Shell</code> with <code>colorPassed</code> configuration parameter set to <code>false</code>. */ val nocolor: Shell /** * Returns a copy of this <code>Shell</code> with <code>durationsPassed</code> configuration parameter set to <code>false</code>. */ val nodurations: Shell /** * Returns a copy of this <code>Shell</code> with <code>shortStacksPassed</code> configuration parameter set to <code>false</code>. */ val nostacks: Shell /** * Returns a copy of this <code>Shell</code> with <code>statsPassed</code> configuration parameter set to <code>false</code>. */ val nostats: Shell /** * Run the passed suite, optionally passing in a test name and config map. * * <p> * This method will invoke <code>execute</code> on the passed <code>suite</code>, passing in * the specified (or default) <code>testName</code> and <code>configMap</code> and a set of configuration values. A * particular <code>Shell</code> instance will always pass the same configuration values (<code>color</code>, * <code>durations</code>, <code>shortstacks</code>, <code>fullstacks</code>, and <code>stats</code>) to <code>execute</code> each time * this method is invoked. * </p> */ def run(suite: Suite, testName: String = null, configMap: ConfigMap = ConfigMap.empty): Unit } // parameters were private, but after pulling out the trait so I don't import copy() as part // of the package object, I made the whole case class private[scalatest], so I made these normal // so that I could write some tests against it. private[scalatest] final case class ShellImpl( colorPassed: Boolean = true, durationsPassed: Boolean = false, shortstacksPassed: Boolean = false, fullstacksPassed: Boolean = false, statsPassed: Boolean = false ) extends Shell { lazy val color: Shell = copy(colorPassed = true) lazy val durations: Shell = copy(durationsPassed = true) lazy val shortstacks: Shell = copy(shortstacksPassed = true, fullstacksPassed = false) lazy val fullstacks: Shell = copy(fullstacksPassed = true, shortstacksPassed = false) lazy val stats: Shell = copy(statsPassed = true) lazy val nocolor: Shell = copy(colorPassed = false) lazy val nodurations: Shell = copy(durationsPassed = false) lazy val nostacks: Shell = copy(shortstacksPassed = false, fullstacksPassed = false) lazy val nostats: Shell = copy(statsPassed = false) def run(suite: Suite, testName: String = null, configMap: ConfigMap = ConfigMap.empty) { suite.execute(testName, configMap, colorPassed, durationsPassed, shortstacksPassed, fullstacksPassed, statsPassed) } }
travisbrown/scalatest
src/main/scala/org/scalatest/Shell.scala
Scala
apache-2.0
18,929
/* * This file is part of eCobertura. * * Copyright (c) 2009, 2010 Joachim Hofer * All rights reserved. * * This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package ecobertura.ui.views.session.commands import org.eclipse.jface.action.IContributionItem import java.util import org.eclipse.ui.PlatformUI import org.eclipse.ui.menus._ import org.eclipse.ui.actions.CompoundContributionItem import ecobertura.core.data.CoverageSession import ecobertura.ui.views.session.CoverageSessionModel import scala.collection.JavaConversions._ class SessionHistoryContributionItem extends CompoundContributionItem { override def getContributionItems = { val contributionsFromRecentSessions = retrieveContributionsFromRecentSessions if (contributionsFromRecentSessions.isEmpty) { Array(createDisabledEmptyContribution) } else { contributionsFromRecentSessions } } private def retrieveContributionsFromRecentSessions = { (for { coverageSession <- CoverageSessionModel.get.coverageSessionHistory } yield createCommandContributionItem(coverageSession)).toArray } private def createCommandContributionItem(coverageSession: CoverageSession) : IContributionItem = { val cciParam = basicCommandContributionItemParameter cciParam.label = coverageSession.displayName cciParam.parameters = createSessionParameter(coverageSession.displayName) new CommandContributionItem(cciParam) } private def createSessionParameter(sessionName: String) = { val params = new util.HashMap[String, String]() params.put( "ecobertura.ui.views.session.commands.selectRecentCoverageSession.session", sessionName) params.put("org.eclipse.ui.commands.radioStateParameter", sessionName) params } private def createDisabledEmptyContribution : IContributionItem = { val cciParam = basicCommandContributionItemParameter cciParam.label = "<empty>" new CommandContributionItem(cciParam) { override def isEnabled = false } } private def basicCommandContributionItemParameter = new CommandContributionItemParameter( PlatformUI.getWorkbench().getActiveWorkbenchWindow(), null, "ecobertura.ui.views.session.commands.selectRecentCoverageSession", CommandContributionItem.STYLE_RADIO) }
jmhofer/eCobertura
ecobertura.ui/src/main/scala/ecobertura/ui/views/session/commands/SessionHistoryContributionItem.scala
Scala
epl-1.0
2,420
package com.sksamuel.scapegoat.inspections import com.sksamuel.scapegoat.{ Inspection, InspectionContext, Inspector, Levels } import scala.collection.mutable /** @author Stephen Samuel */ class LonelySealedTrait extends Inspection { override def inspector(context: InspectionContext): Inspector = new Inspector(context) { import context.global._ private val sealedClasses = mutable.HashMap[String, ClassDef]() private val implementedClasses = mutable.HashSet[String]() override def postInspection(): Unit = { for ((name, cdef) <- sealedClasses) { if (!implementedClasses.contains(name)) { context.warn("Lonely sealed trait", cdef.pos, Levels.Error, s"Sealed trait ${cdef.name} has no implementing classes", LonelySealedTrait.this) } } } private def inspectParents(parents: List[Tree]): Unit = { parents.foreach { case parent => for (c <- parent.tpe.baseClasses) implementedClasses.add(c.name.toString) } } override def postTyperTraverser = Some apply new context.Traverser { override def inspect(tree: Tree): Unit = { tree match { case cdef @ ClassDef(mods, name, _, _) if mods.isSealed => sealedClasses.put(cdef.name.toString, cdef) case ClassDef(_, name, _, Template(parents, _, _)) => inspectParents(parents) case ModuleDef(_, name, Template(parents, _, _)) => inspectParents(parents) case _ => } continue(tree) } } } }
pwwpche/scalac-scapegoat-plugin
src/main/scala/com/sksamuel/scapegoat/inspections/LonelySealedTrait.scala
Scala
apache-2.0
1,589
/* * Copyright 2015 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.atlas.core.model import java.math.BigInteger import java.time.Duration import com.netflix.atlas.core.util.Math trait StatefulExpr extends TimeSeriesExpr { } object StatefulExpr { /*case class RollingCount(expr: TimeSeriesExpr, n: Int) extends StatefulExpr { def dataExprs: List[DataExpr] = expr.dataExprs override def toString: String = s"$expr,$n,:rolling-count" def isGrouped: Boolean = expr.isGrouped def groupByKey(tags: Map[String, String]): Option[String] = expr.groupByKey(tags) def eval(context: EvalContext, data: List[TimeSeries]): ResultSet = { ResultSet(this, Nil) } }*/ case class RollingCount(expr: TimeSeriesExpr, n: Int) extends StatefulExpr { import com.netflix.atlas.core.model.StatefulExpr.RollingCount._ def dataExprs: List[DataExpr] = expr.dataExprs override def toString: String = s"$expr,$n,:rolling-count" def isGrouped: Boolean = expr.isGrouped def groupByKey(tags: Map[String, String]): Option[String] = expr.groupByKey(tags) private def eval(ts: ArrayTimeSeq, s: State): State = { val data = ts.data var pos = s.pos var value = s.value var buf = s.buf var i = 0 while (i < data.length) { if (pos < n) { buf(pos % n) = data(i) value = value + Math.toBooleanDouble(data(i)) data(i) = value } else { val p = pos % n value = value - Math.toBooleanDouble(buf(p)) value = value + Math.toBooleanDouble(data(i)) buf(p) = data(i) data(i) = value } pos += 1 i += 1 } State(pos, value, buf) } private def newState: State = { State(0, 0.0, new Array[Double](n)) } def eval(context: EvalContext, data: Map[DataExpr, List[TimeSeries]]): ResultSet = { val rs = expr.eval(context, data) if (n <= 1) rs else { val state = rs.state.getOrElse(this, new StateMap).asInstanceOf[StateMap] val newData = rs.data.map { t => val bounded = t.data.bounded(context.start, context.end) val s = state.getOrElse(t.id, newState) state(t.id) = eval(bounded, s) TimeSeries(t.tags, s"rolling-count(${t.label}, $n)", bounded) } ResultSet(this, newData, rs.state + (this -> state)) } } } object RollingCount { case class State(pos: Int, value: Double, buf: Array[Double]) type StateMap = scala.collection.mutable.AnyRefMap[BigInteger, State] } case class Des( expr: TimeSeriesExpr, trainingSize: Int, alpha: Double, beta: Double) extends StatefulExpr { import com.netflix.atlas.core.model.StatefulExpr.Des._ def dataExprs: List[DataExpr] = expr.dataExprs override def toString: String = s"$expr,$trainingSize,$alpha,$beta,:des" def isGrouped: Boolean = expr.isGrouped def groupByKey(tags: Map[String, String]): Option[String] = expr.groupByKey(tags) private def eval(ts: ArrayTimeSeq, s: State): State = { var pos = s.pos var currentSample = s.currentSample var sp = s.sp var bp = s.bp val data = ts.data while (pos < data.length) { val yn = data(pos) data(pos) = if (currentSample > trainingSize) sp else Double.NaN if (!yn.isNaN) { if (currentSample == 0) { sp = yn; bp = 0.0 } else { val sn = alpha * yn + (1 - alpha) * (sp + bp) val bn = beta * (sn - sp) + (1 - beta) * bp sp = sn; bp = bn } currentSample += 1 } pos += 1 } State(pos, currentSample, sp, bp) } private def newState: State = State(0, 0, 0.0, 0.0) def eval(context: EvalContext, data: Map[DataExpr, List[TimeSeries]]): ResultSet = { val rs = expr.eval(context, data) val state = rs.state.getOrElse(this, new StateMap).asInstanceOf[StateMap] val newData = rs.data.map { t => val bounded = t.data.bounded(context.start, context.end) val s = state.getOrElse(t.id, newState) state(t.id) = eval(bounded, s) TimeSeries(t.tags, s"des(${t.label})", bounded) } ResultSet(this, newData, rs.state + (this -> state)) } } object Des { case class State(pos: Int, currentSample: Int, sp: Double, bp: Double) type StateMap = scala.collection.mutable.AnyRefMap[BigInteger, State] } case class Trend(expr: TimeSeriesExpr, window: Duration) extends StatefulExpr { import com.netflix.atlas.core.model.StatefulExpr.Trend._ def dataExprs: List[DataExpr] = expr.dataExprs override def toString: String = s"$expr,$window,:trend" def isGrouped: Boolean = expr.isGrouped def groupByKey(tags: Map[String, String]): Option[String] = expr.groupByKey(tags) private def eval(period: Int, ts: ArrayTimeSeq, s: State): State = { val data = ts.data var nanCount = s.nanCount var pos = s.pos var value = s.value var buf = s.buf var i = 0 while (i < data.length) { if (data(i).isNaN) nanCount += 1 if (pos < period - 1) { buf(pos % period) = data(i) value = Math.addNaN(value, data(i)) data(i) = Double.NaN } else { val p = pos % period if (buf(p).isNaN) nanCount -= 1 value = Math.addNaN(Math.subtractNaN(value, buf(p)), data(i)) buf(p) = data(i) data(i) = if (nanCount < period) value / period else Double.NaN } pos += 1 i += 1 } State(nanCount, pos, value, buf) } private def newState(period: Int): State = { State(0, 0, 0.0, new Array[Double](period)) } def eval(context: EvalContext, data: Map[DataExpr, List[TimeSeries]]): ResultSet = { val period = (window.toMillis / context.step).toInt val rs = expr.eval(context, data) if (period <= 1) rs else { val state = rs.state.getOrElse(this, new StateMap).asInstanceOf[StateMap] val newData = rs.data.map { t => val bounded = t.data.bounded(context.start, context.end) val s = state.getOrElse(t.id, newState(period)) state(t.id) = eval(period, bounded, s) TimeSeries(t.tags, s"trend(${t.label}, $window)", bounded) } ResultSet(this, newData, rs.state + (this -> state)) } } } object Trend { case class State(nanCount: Int, pos: Int, value: Double, buf: Array[Double]) type StateMap = scala.collection.mutable.AnyRefMap[BigInteger, State] } case class Integral(expr: TimeSeriesExpr) extends StatefulExpr { type StateMap = scala.collection.mutable.AnyRefMap[BigInteger, Double] def dataExprs: List[DataExpr] = expr.dataExprs override def toString: String = s"$expr,:integral" def isGrouped: Boolean = expr.isGrouped def groupByKey(tags: Map[String, String]): Option[String] = expr.groupByKey(tags) def eval(context: EvalContext, data: Map[DataExpr, List[TimeSeries]]): ResultSet = { val rs = expr.eval(context, data) val state = rs.state.getOrElse(this, new StateMap).asInstanceOf[StateMap] val newData = rs.data.map { t => val bounded = t.data.bounded(context.start, context.end) val length = bounded.data.length var i = 1 bounded.data(0) += state.getOrElse(t.id, 0.0) while (i < length) { bounded.data(i) += bounded.data(i - 1) i += 1 } state(t.id) = bounded.data(i - 1) TimeSeries(t.tags, s"integral(${t.label})", bounded) } ResultSet(this, newData, rs.state + (this -> state)) } } case class Derivative(expr: TimeSeriesExpr) extends StatefulExpr { type StateMap = scala.collection.mutable.AnyRefMap[BigInteger, Double] def dataExprs: List[DataExpr] = expr.dataExprs override def toString: String = s"$expr,:derivative" def isGrouped: Boolean = expr.isGrouped def groupByKey(tags: Map[String, String]): Option[String] = expr.groupByKey(tags) def eval(context: EvalContext, data: Map[DataExpr, List[TimeSeries]]): ResultSet = { val rs = expr.eval(context, data) val state = rs.state.getOrElse(this, new StateMap).asInstanceOf[StateMap] val newData = rs.data.map { t => val bounded = t.data.bounded(context.start, context.end) val length = bounded.data.length var i = 1 var prev = bounded.data(0) bounded.data(0) -= state.getOrElse(t.id, 0.0) while (i < length) { val tmp = prev prev = bounded.data(i) bounded.data(i) -= tmp i += 1 } state(t.id) = prev TimeSeries(t.tags, s"derivative(${t.label})", bounded) } ResultSet(this, newData, rs.state + (this -> state)) } } }
gorcz/atlas
atlas-core/src/main/scala/com/netflix/atlas/core/model/StatefulExpr.scala
Scala
apache-2.0
9,443
/* * Copyright 2015 the original author or authors. * @https://github.com/scouter-project/scouter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package scouter.server.netio.service.handle; import scouter.util.StringUtil import scouter.lang.TextTypes import scouter.lang.pack.MapPack import scouter.lang.pack.Pack import scouter.lang.pack.PackEnum import scouter.lang.pack.XLogPack import scouter.lang.pack.XLogProfilePack import scouter.lang.value.DecimalValue import scouter.lang.value.ListValue import scouter.io.DataInputX import scouter.io.DataOutputX import scouter.net.RequestCmd import scouter.net.TcpFlag import scouter.server.Configure import scouter.server.Logger import scouter.server.core.cache.CacheOut import scouter.server.core.cache.TextCache import scouter.server.core.cache.XLogCache import scouter.server.db.XLogProfileRD import scouter.server.db.XLogRD import scouter.server.netio.service.anotation.ServiceHandler import scouter.util.DateUtil import scouter.util.IPUtil import scouter.util.IntSet import scouter.util.StrMatch import java.io.IOException import scouter.server.db.TextRD import scouter.server.util.EnumerScala import sun.security.provider.certpath.ForwardBuilder import scouter.lang.value.Value import scouter.util.CastUtil class XLogService { @ServiceHandler(RequestCmd.TRANX_PROFILE) def getProfile(din: DataInputX, dout: DataOutputX, login: Boolean) { val param = din.readMapPack(); var date = param.getText("date"); val txid = param.getLong("txid"); val max = param.getInt("max"); if (StringUtil.isEmpty(date)) { date = DateUtil.yyyymmdd(System.currentTimeMillis()); } try { val profilePacket = XLogProfileRD.getProfile(date, txid, max); if (profilePacket != null) { dout.writeByte(TcpFlag.HasNEXT); val p = new XLogProfilePack(); p.profile = profilePacket; dout.writePack(p); // ProfilePacket } } catch { case e: Exception => e.printStackTrace(); } } @ServiceHandler(RequestCmd.TRANX_PROFILE_FULL) def getFullProfile(din: DataInputX, dout: DataOutputX, login: Boolean) { val param = din.readMapPack(); var date = param.getText("date"); val txid = param.getLong("txid"); val max = -1; if (StringUtil.isEmpty(date)) { date = DateUtil.yyyymmdd(); } XLogProfileRD.getFullProfile(date, txid, max, (data: Array[Byte]) => { dout.writeByte(TcpFlag.HasNEXT); dout.writeBlob(data); }) } @ServiceHandler(RequestCmd.TRANX_REAL_TIME_GROUP) def getRealtimePerfGroup(din: DataInputX, dout: DataOutputX, login: Boolean) { val param = din.readMapPack(); val index = param.getInt("index"); val loop = param.getLong("loop"); var limit = param.getInt("limit"); limit = Math.max(Configure.getInstance().xlog_realtime_lower_bound_ms, limit); val objHashLv = param.getList("objHash"); val intSet = if(objHashLv == null || objHashLv.size() < 1) null else new IntSet(objHashLv.size(), 1.0f) EnumerScala.foreach(objHashLv, (obj: DecimalValue) => { intSet.add(obj.intValue()); }) val d = if(intSet != null) XLogCache.get(intSet, loop, index, limit) else XLogCache.get(loop, index, limit) if (d == null) return ; // 첫번째 패킷에 정보를 전송한다. val outparam = new MapPack(); outparam.put("loop", new DecimalValue(d.loop)); outparam.put("index", new DecimalValue(d.index)); dout.writeByte(TcpFlag.HasNEXT); dout.writePack(outparam); EnumerScala.forward(d.data, (p: Array[Byte]) => { dout.writeByte(TcpFlag.HasNEXT); dout.write(p); }) } @ServiceHandler(RequestCmd.TRANX_REAL_TIME_GROUP_LATEST) def getRealtimePerfGroupLatestCount(din: DataInputX, dout: DataOutputX, login: Boolean) { val param = din.readMapPack(); val index = param.getInt("index"); val loop = param.getLong("loop"); var count = param.getInt("count"); val objHashLv = param.getList("objHash"); val objHashSet = if(objHashLv == null || objHashLv.size() < 1) null else new IntSet(objHashLv.size(), 1.0f) EnumerScala.foreach(objHashLv, (obj: DecimalValue) => { objHashSet.add(obj.intValue()); }) val d = if(objHashSet != null) XLogCache.getWithinCount(objHashSet, loop, index, count) else XLogCache.getWithinCount(loop, index, count) if (d == null) return ; // 첫번째 패킷에 정보를 전송한다. val outparam = new MapPack(); outparam.put("loop", new DecimalValue(d.loop)); outparam.put("index", new DecimalValue(d.index)); dout.writeByte(TcpFlag.HasNEXT); dout.writePack(outparam); EnumerScala.forward(d.data, (p: Array[Byte]) => { dout.writeByte(TcpFlag.HasNEXT); dout.write(p); }) } @ServiceHandler(RequestCmd.TRANX_LOAD_TIME_GROUP) def getHistoryPerfGroup(din: DataInputX, dout: DataOutputX, login: Boolean) { val param = din.readMapPack(); val date = param.getText("date"); val stime = param.getLong("stime"); val etime = param.getLong("etime"); val limitTime = param.getInt("limit"); val limit = Math.max(Configure.getInstance().xlog_pasttime_lower_bound_ms, limitTime); val max = param.getInt("max"); val rev = param.getBoolean("reverse"); val objHashLv = param.getList("objHash"); if (objHashLv == null || objHashLv.size() < 1) { return ; } val objHashSet = new IntSet(); EnumerScala.foreach(objHashLv, (obj: DecimalValue) => { objHashSet.add(obj.intValue()); }) var cnt = 0; val handler = (time: Long, data: Array[Byte]) => { val x = new DataInputX(data).readPack().asInstanceOf[XLogPack]; if (objHashSet.contains(x.objHash) && x.elapsed > limit) { dout.writeByte(TcpFlag.HasNEXT); dout.write(data); dout.flush(); cnt += 1; } if (max > 0 && cnt >= max) { return ; } } if (rev) { XLogRD.readFromEndTime(date, stime, etime, handler) } else { XLogRD.readByTime(date, stime, etime, handler); } } @ServiceHandler(RequestCmd.XLOG_READ_BY_GXID) def readByGxId(din: DataInputX, dout: DataOutputX, login: Boolean) { val param = din.readMapPack(); val date = param.getText("date"); val gxid = param.getLong("gxid"); try { val list = XLogRD.getByGxid(date, gxid); EnumerScala.forward(list, (xlog: Array[Byte]) => { dout.writeByte(TcpFlag.HasNEXT); dout.write(xlog); dout.flush(); }) } catch { case e: Exception => {} } } @ServiceHandler(RequestCmd.XLOG_READ_BY_TXID) def readByTxId(din: DataInputX, dout: DataOutputX, login: Boolean) { val param = din.readMapPack(); val date = param.getText("date"); val txid = param.getLong("txid"); try { val xbytes = XLogRD.getByTxid(date, txid); if (xbytes != null) { dout.writeByte(TcpFlag.HasNEXT); dout.write(xbytes); dout.flush(); } } catch { case e: Exception => {} } } @ServiceHandler(RequestCmd.XLOG_LOAD_BY_GXID) def loadByGxId(din: DataInputX, dout: DataOutputX, login: Boolean) { val param = din.readMapPack(); val stime = param.getLong("stime"); val etime = param.getLong("etime"); val gxid = param.getLong("gxid"); val date = DateUtil.yyyymmdd(stime); val date2 = DateUtil.yyyymmdd(etime); try { val list = XLogRD.getByGxid(date, gxid); EnumerScala.forward(list, (xlog: Array[Byte]) => { dout.writeByte(TcpFlag.HasNEXT); dout.write(xlog); dout.flush(); }) } catch { case e: Exception => {} } if (date.equals(date2) == false) { try { val list = XLogRD.getByGxid(date2, gxid); EnumerScala.forward(list, (xlog: Array[Byte]) => { dout.writeByte(TcpFlag.HasNEXT); dout.write(xlog); dout.flush(); }); } catch { case e: Exception => {} } } } @ServiceHandler(RequestCmd.QUICKSEARCH_XLOG_LIST) def quickSearchXlogList(din: DataInputX, dout: DataOutputX, login: Boolean) { val param = din.readMapPack(); val date = param.getText("date"); val txid = param.getLong("txid"); val gxid = param.getLong("gxid"); if (txid != 0) { try { val xbytes = XLogRD.getByTxid(date, txid); if (xbytes != null) { dout.writeByte(TcpFlag.HasNEXT); dout.write(xbytes); dout.flush(); } } catch { case e: Exception => {} } } if (gxid != 0) { try { val list = XLogRD.getByGxid(date, gxid); if (list == null) return ; for (i <- 0 to list.size() - 1) { val xlog = list.get(i); dout.writeByte(TcpFlag.HasNEXT); dout.write(xlog); dout.flush(); } } catch { case e: Exception => {} } } } @ServiceHandler(RequestCmd.SEARCH_XLOG_LIST) def searchXlogList(din: DataInputX, dout: DataOutputX, login: Boolean) { val param = din.readMapPack(); val stime = param.getLong("stime"); val etime = param.getLong("etime"); val service = param.getText("service"); val objHash = param.getInt("objHash"); val ip = param.getText("ip"); val serverMatch = if (service == null) null else new StrMatch(service); val ipMatch = if (ip == null) null else new StrMatch(ip); val date = DateUtil.yyyymmdd(stime); val date2 = DateUtil.yyyymmdd(etime); var mtime = 0L; var twoDays = false; var loadCount = 0; if (date.equals(date2) == false) { mtime = DateUtil.yyyymmdd(date2); twoDays = true; } val handler = (time: Long, data: Array[Byte]) => { if (loadCount >= 500) { return ; } val x = new DataInputX(data).readPack().asInstanceOf[XLogPack]; var ok = true if (ipMatch != null) { if (x.ipaddr == null) { ok = false; } if (ipMatch.include(IPUtil.toString(x.ipaddr)) == false) { ok = false; } } if (objHash != 0 && x.objHash != objHash) { ok = false; } if (serverMatch != null) { var serviceName = TextRD.getString(DateUtil.yyyymmdd(time), TextTypes.SERVICE, x.service); if (serverMatch.include(serviceName) == false) { ok = false; } } if (ok) { dout.writeByte(TcpFlag.HasNEXT); dout.write(data); dout.flush(); loadCount += 1; } } if (twoDays) { XLogRD.readByTime(date, stime, mtime - 1, handler); XLogRD.readByTime(date2, mtime, etime, handler); } else { XLogRD.readByTime(date, stime, etime, handler); } } }
yuyupapa/OpenSource
scouter.server/src/scouter/server/netio/service/handle/XLogService.scala
Scala
apache-2.0
12,841
/* * Copyright (C) 2017. envisia GmbH * All Rights Reserved. */ package de.envisia.postgresql.parsers import java.nio.ByteBuffer import akka.util.ByteString import de.envisia.postgresql.message.backend.{ DataRowMessage, ServerMessage } object DataRowParser extends MessageParser { def parseMessage(buffer: ByteBuffer): ServerMessage = { val row = new Array[ByteString](buffer.getShort()) row.indices.foreach { column => val length = buffer.getInt() if (length != -1) { val data = new Array[Byte](length) buffer.get(data) row(column) = ByteString(data) } else { row(column) = null } } DataRowMessage(row) } }
schmitch/akka-pg
src/main/scala/de/envisia/postgresql/parsers/DataRowParser.scala
Scala
apache-2.0
694
/* * Copyright (C) 2015 Noorq, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package controllers.account import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future import play.api.mvc.Action import scaldi.Injector import play.api.data.Form import play.api.data.Forms._ import services.AccountService import play.api.libs.json.Json import play.api.libs.json.Writes import play.api.libs.json.JsValue import utils.ScalaHelper import com.mailrest.maildal.model.DomainVerificationEvent import com.mailrest.maildal.model.Domain class AccountDomainController(implicit inj: Injector) extends AbstractAccountController { val accountService = inject [AccountService] implicit val domainVerificationEventWrites = new Writes[DomainVerificationEvent] { override def writes(dv: DomainVerificationEvent): JsValue = { Json.obj( "eventAt" -> dv.eventAt, "status" -> dv.status.name, "message" -> dv.message ) } } implicit val accountDomainWrites = new Writes[Domain] { override def writes(ad: Domain): JsValue = { Json.obj( "accountId" -> ad.accountId, "domainId" -> ad.domainId, "createdAt" -> ad.createdAt, "domainIdn" -> ad.domainIdn, "events" -> Json.arr(ScalaHelper.asSeq(ad.events)) ) } } val newDomainForm = Form( mapping ( "domainIdn" -> nonEmptyText )(NewDomainForm.apply)(NewDomainForm.unapply) ) def findAll(accId: String) = readAction(accId).async { accountService.findDomains(accId).map(x => Ok(Json.toJson(x))) } def create(accId: String) = writeAction(accId).async { implicit request => { val form = newDomainForm.bindFromRequest.get accountService.addDomain(accId, form.domainIdn).map(x => Ok) } } def find(accId: String, domIdn: String) = readAction(accId).async { accountService.findDomain(accId, domIdn).map { x => { x match { case Some(ad) => Ok(Json.toJson(ad)) case None => NotFound } }} } def delete(accId: String, domIdn: String) = writeAction(accId).async { accountService.deleteDomain(accId, domIdn).map(x => Ok) } } case class NewDomainForm(domainIdn: String)
mailrest/mailrest
app/controllers/account/AccountDomainController.scala
Scala
apache-2.0
2,971
package org.jetbrains.plugins.scala package lang package psi package impl package base import com.intellij.lang.ASTNode import com.intellij.psi.stubs.StubElement import com.intellij.psi.tree.IElementType import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes import org.jetbrains.plugins.scala.lang.psi.api.base._ import org.jetbrains.plugins.scala.lang.psi.api.base.patterns._ import org.jetbrains.plugins.scala.lang.psi.stubs.ScPatternListStub /** * @author Alexander Podkhalyuzin * Date: 22.02.2008 */ class ScPatternListImpl private (stub: StubElement[ScPatternList], nodeType: IElementType, node: ASTNode) extends ScalaStubBasedElementImpl(stub, nodeType, node) with ScPatternList{ def this(node: ASTNode) = {this(null, null, node)} def this(stub: ScPatternListStub) = {this(stub, ScalaElementTypes.PATTERN_LIST, null)} override def toString: String = "ListOfPatterns" def patterns: Seq[ScPattern] = { val stub = getStub if (stub != null && allPatternsSimple) { return stub.getChildrenByType(ScalaElementTypes.REFERENCE_PATTERN, JavaArrayFactoryUtil.ScReferencePatternFactory) } findChildrenByClass[ScPattern](classOf[ScPattern]) } def allPatternsSimple: Boolean = { val stub = getStub if (stub != null) { return stub.asInstanceOf[ScPatternListStub].allPatternsSimple } !patterns.exists(p => !(p.isInstanceOf[ScReferencePattern])) } }
double-y/translation-idea-plugin
src/org/jetbrains/plugins/scala/lang/psi/impl/base/ScPatternListImpl.scala
Scala
apache-2.0
1,415
package djinni import djinni.ast._ import djinni.generatorTools._ import djinni.meta._ class JNIMarshal(spec: Spec) extends Marshal(spec) { // For JNI typename() is always fully qualified and describes the mangled Java type to be used in field/method signatures override def typename(tm: MExpr): String = javaTypeSignature(tm) def typename(name: String, ty: TypeDef) = s"L${undecoratedTypename(name, ty)};" override def fqTypename(tm: MExpr): String = typename(tm) def fqTypename(name: String, ty: TypeDef): String = typename(name, ty) override def paramType(tm: MExpr): String = toJniType(tm, false) override def fqParamType(tm: MExpr): String = paramType(tm) override def returnType(ret: Option[TypeRef]): String = ret.fold("void")(toJniType) override def fqReturnType(ret: Option[TypeRef]): String = returnType(ret) override def fieldType(tm: MExpr): String = paramType(tm) override def fqFieldType(tm: MExpr): String = fqParamType(tm) override def toCpp(tm: MExpr, expr: String): String = { s"${helperClass(tm)}::toCpp(jniEnv, $expr)" } override def fromCpp(tm: MExpr, expr: String): String = { s"${helperClass(tm)}::fromCpp(jniEnv, $expr)" } // Name for the autogenerated class containing field/method IDs and toJava()/fromJava() methods def helperClass(name: String) = spec.jniClassIdentStyle(name) private def helperClass(tm: MExpr) = helperName(tm) + helperTemplates(tm) def references(m: Meta, exclude: String = ""): Seq[SymbolReference] = m match { case o: MOpaque => List(ImportRef(q(spec.jniBaseLibIncludePrefix + "Marshal.hpp"))) case d: MDef => List(ImportRef(include(d.name))) case e: MExtern => List(ImportRef(e.jni.header)) case _ => List() } def include(ident: String) = q(spec.jniIncludePrefix + spec.jniFileIdentStyle(ident) + "." + spec.cppHeaderExt) def toJniType(ty: TypeRef): String = toJniType(ty.resolved, false) def toJniType(m: MExpr, needRef: Boolean): String = m.base match { case p: MPrimitive => if (needRef) "jobject" else p.jniName case MString => "jstring" case MOptional => toJniType(m.args.head, true) case MBinary => "jbyteArray" case tp: MParam => helperClass(tp.name) + "::JniType" case e: MExtern => helperClass(m) + (if(needRef) "::Boxed" else "") + "::JniType" case _ => "jobject" } // The mangled Java typename without the "L...;" decoration useful only for class reflection on our own type def undecoratedTypename(name: String, ty: TypeDef): String = { val javaClassName = idJava.ty(name) spec.javaPackage.fold(javaClassName)(p => p.replaceAllLiterally(".", "/") + "/" + javaClassName) } private def javaTypeSignature(tm: MExpr): String = tm.base match { case o: MOpaque => o match { case p: MPrimitive => p.jSig case MString => "Ljava/lang/String;" case MDate => "Ljava/util/Date;" case MBinary => "[B" case MOptional => tm.args.head.base match { case p: MPrimitive => s"Ljava/lang/${p.jBoxed};" case MOptional => throw new AssertionError("nested optional?") case m => javaTypeSignature(tm.args.head) } case MList => "Ljava/util/ArrayList;" case MSet => "Ljava/util/HashSet;" case MMap => "Ljava/util/HashMap;" } case e: MExtern => e.jni.typeSignature case MParam(_) => "Ljava/lang/Object;" case d: MDef => s"L${undecoratedTypename(d.name, d.body)};" } def javaMethodSignature(params: Iterable[Field], ret: Option[TypeRef]) = { params.map(f => typename(f.ty)).mkString("(", "", ")") + ret.fold("V")(typename) } def helperName(tm: MExpr): String = tm.base match { case d: MDef => withNs(Some(spec.jniNamespace), helperClass(d.name)) case e: MExtern => e.jni.translator case o => withNs(Some("djinni"), o match { case p: MPrimitive => p.idlName match { case "i8" => "I8" case "i16" => "I16" case "i32" => "I32" case "i64" => "I64" case "f32" => "F32" case "f64" => "F64" case "bool" => "Bool" } case MOptional => "Optional" case MBinary => "Binary" case MString => "String" case MDate => "Date" case MList => "List" case MSet => "Set" case MMap => "Map" case d: MDef => throw new AssertionError("unreachable") case e: MExtern => throw new AssertionError("unreachable") case p: MParam => throw new AssertionError("not applicable") }) } private def helperTemplates(tm: MExpr): String = { def f() = if(tm.args.isEmpty) "" else tm.args.map(helperClass).mkString("<", ", ", ">") tm.base match { case MOptional => assert(tm.args.size == 1) val argHelperClass = helperClass(tm.args.head) s"<${spec.cppOptionalTemplate}, $argHelperClass>" case MList | MSet => assert(tm.args.size == 1) f case MMap => assert(tm.args.size == 2) f case _ => f } } def isJavaHeapObject(ty: TypeRef): Boolean = isJavaHeapObject(ty.resolved.base) def isJavaHeapObject(m: Meta): Boolean = m match { case _: MPrimitive => false case _ => true } }
eastonhou/djinni_with_cx
src/source/JNIMarshal.scala
Scala
apache-2.0
5,161
package org.denigma.threejs.extensions.animations import java.util.Date import org.scalajs.dom import Animation.{Started, AniState} import scala.concurrent.duration.Duration import scala.scalajs.js class Scheduler { def current = js.Date.now var animations = List.empty[Animation] def add(ani:Animation) = { this.animations = ani::animations ani.state = Animation.Running(current) } def tick() = { val now = current animations.foreach{ani=> ani.state match { case Animation.Running(start)=> ani(now) case other => //dom.console.info(other.toString) //do nothing } } } protected def onEnterFrameFunction(double: Double): Unit = { this.tick() start() } def start() = { dom.requestAnimationFrame( onEnterFrameFunction _ ) this } } object Easings { val linear:Double=>Double = i=>i } /** * contains states of animations and some other useful stuff */ object Animation{ trait AniState trait Started extends AniState{ def start:Double } case object Stopped extends AniState case class Finished(start:Double) extends Started case class Paused(start:Double) extends Started case class Running(start:Double) extends Started case class Backward(start:Double,finished:Double) extends Started } class Animation(val length:Duration,easing:Double=>Double = Easings.linear)(fun:(Double=>Unit)) { lazy val lengthMillis: Long = length.toMillis var state:AniState = Animation.Stopped def apply(current:Double) = state match { case st:Started=> val finish:Double = st.start +this.lengthMillis easing( 1.0-(finish-current)/length.toMillis) match{ case p if p>=1.0=> fun(1.0) this.state = Animation.Finished(current) case p if p < 0.0=> dom.console.error(s"animation percent is $p that is below zero!\n " + s"Current time is $current, start is ${st.start} and length is $lengthMillis") this.state = Animation.Finished(current) case p=> fun(p) //dom.console.info( s"Current time is $current, start is ${st.start} and length is $lengthMillis and percent is $p") } case _=> dom.console.error("trying to run an operation that has not started") } def go(implicit scheduler:Scheduler) = { scheduler.add(this) } }
waman/threejs-facade
facade/src/main/scala/org/denigma/threejs/extensions/animations/Animation.scala
Scala
mpl-2.0
2,399
package com.imaginea.activegrid.core.models import org.neo4j.graphdb.Node import org.slf4j.LoggerFactory /** * Created by nagulmeeras on 27/10/16. */ case class IpPermissionInfo(override val id: Option[Long], fromPort: Int, toPort: Int, ipProtocol: IpProtocol, groupIds: Set[String], ipRanges: List[String]) extends BaseEntity object IpPermissionInfo { val ipPermissionLabel = "IpPermissionInfo" val logger = LoggerFactory.getLogger(getClass) def fromNeo4jGraph(nodeId: Long): Option[IpPermissionInfo] = { val maybeNode = Neo4jRepository.findNodeById(nodeId) maybeNode.flatMap { node => if (Neo4jRepository.hasLabel(node, ipPermissionLabel)) { val map = Neo4jRepository.getProperties(node, "fromPort", "toPort", "ipProtocol", "groupIds", "ipRanges") Some(IpPermissionInfo(Some(nodeId), map("fromPort").asInstanceOf[Int], map("toPort").asInstanceOf[Int], IpProtocol.toProtocol(map("ipProtocol").asInstanceOf[String]), map("groupIds").asInstanceOf[Array[String]].toSet, map("ipRanges").asInstanceOf[Array[String]].toList)) } else { None } } } implicit class IpPermissionInfoImpl(ipPermissionInfo: IpPermissionInfo) extends Neo4jRep[IpPermissionInfo] { override def toNeo4jGraph(entity: IpPermissionInfo): Node = { val map = Map("fromPort" -> entity.fromPort, "toPort" -> entity.toPort, "ipProtocol" -> entity.ipProtocol.value, "groupIds" -> entity.groupIds.toArray, "ipRanges" -> entity.ipRanges.toArray) Neo4jRepository.saveEntity[IpPermissionInfo](ipPermissionLabel, entity.id, map) } override def fromNeo4jGraph(nodeId: Long): Option[IpPermissionInfo] = { IpPermissionInfo.fromNeo4jGraph(nodeId) } } }
eklavya/activeGrid
src/main/scala/com/imaginea/activegrid/core/models/IpPermissionInfo.scala
Scala
apache-2.0
1,964
package monocle.bench import scala.collection.immutable.SortedMap import scala.util.Random object BenchModel { def safeDivide(a: Int, b: Int): Option[Int] = if (b == 0) None else Some(a / b) case class Nested0(s: String, i: Int, n: Nested1, l: Long) case class Nested1(s: String, i: Int, n: Nested2, l: Long) case class Nested2(s: String, i: Int, n: Nested3, l: Long) case class Nested3(s: String, i: Int, n: Nested4, l: Long) case class Nested4(s: String, i: Int, n: Nested5, l: Long) case class Nested5(s: String, i: Int, n: Nested6, l: Long) case class Nested6(s: String, i: Int) val r = new Random def genInt(): Int = r.nextInt() def genLong(): Long = r.nextLong() def genStr(): String = r.nextString(r.nextInt(100)) sealed trait ADT case class I(i: Int) extends ADT case class R(r: ADT) extends ADT def getIOption(adt: ADT): Option[Int] = adt match { case I(i) => Some(i); case _ => None } def getROption(adt: ADT): Option[ADT] = adt match { case R(r) => Some(r); case _ => None } def mkI(i: Int): ADT = I(i) def mkR(r: ADT): ADT = R(r) case class Point3(x: Int, y: Int, z: Int) val p = Point3(2, 10, 24) val map = SortedMap(1.to(200).map(_ -> 5): _*) case class IntWrapper0(i: Int) case class IntWrapper1(i: Int) case class IntWrapper2(i: Int) case class IntWrapper3(i: Int) case class IntWrapper4(i: Int) case class IntWrapper5(i: Int) case class IntWrapper6(i: Int) val i = genInt() val w0 = IntWrapper0(i) val w3 = IntWrapper0(i) val w6 = IntWrapper6(i) }
julien-truffaut/Monocle
bench/src/main/scala/monocle/bench/BenchModel.scala
Scala
mit
1,548
package com.aughma.dataflow trait Block { val errorPort: OutputPort def trigger }
fahdrafi/Aughma-Dataflow-Service
src/com/aughma/dataflow/Block.scala
Scala
apache-2.0
84
/* * Copyright 2010 WorldWide Conferencing, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.liftweb package squerylrecord import common.{Box, Full} import record.{BaseField, MetaRecord, Record, TypedField} import record.field._ import org.squeryl.internals.{FieldMetaData, PosoMetaData, FieldMetaDataFactory} import org.squeryl.annotations.Column import java.lang.reflect.{Method, Field} import java.lang.annotation.Annotation import java.sql.{ResultSet, Timestamp} import java.util.{Calendar, Date} import scala.collection.immutable.Map /** FieldMetaDataFactory that allows Squeryl to use Records as model objects. */ class RecordMetaDataFactory extends FieldMetaDataFactory { /** Cache MetaRecords by the model object class (Record class) */ private var metaRecordsByClass: Map[Class[_], MetaRecord[_]] = Map.empty /** Given a model object class (Record class) and field name, return the BaseField from the meta record */ private def findMetaField(clasz: Class[_], name: String): BaseField = { def fieldFrom(mr: MetaRecord[_]): BaseField = mr.asInstanceOf[Record[_]].fieldByName(name) match { case Full(f: BaseField) => f case Full(_) => error("field " + name + " in Record metadata for " + clasz + " is not a TypedField") case _ => error("failed to find field " + name + " in Record metadata for " + clasz) } metaRecordsByClass get clasz match { case Some(mr) => fieldFrom(mr) case None => try { val rec = clasz.newInstance.asInstanceOf[Record[_]] val mr = rec.meta metaRecordsByClass = metaRecordsByClass updated (clasz, mr) fieldFrom(mr) } catch { case ex => error("failed to find MetaRecord for " + clasz + " due to exception " + ex.toString) } } } /** Build a Squeryl FieldMetaData for a particular field in a Record */ def build(parentMetaData: PosoMetaData[_], name: String, property: (Option[Field], Option[Method], Option[Method], Set[Annotation]), sampleInstance4OptionTypeDeduction: AnyRef, isOptimisticCounter: Boolean): FieldMetaData = { if (!isRecord(parentMetaData.clasz)) { // No Record class, treat it as a normal class in primitive type mode. // This is needed for ManyToMany association classes, for example return SquerylRecord.posoMetaDataFactory.build(parentMetaData, name, property, sampleInstance4OptionTypeDeduction, isOptimisticCounter) } val metaField = findMetaField(parentMetaData.clasz, name) val (field, getter, setter, annotations) = property val colAnnotation = annotations.find(a => a.isInstanceOf[Column]).map(a => a.asInstanceOf[Column]) val fieldsValueType = metaField match { case (f: SquerylRecordField) => f.classOfPersistentField case (_: BooleanTypedField) => classOf[Boolean] case (_: DateTimeTypedField) => classOf[Timestamp] case (_: DoubleTypedField) => classOf[Double] case (_: IntTypedField) => classOf[java.lang.Integer] case (_: LongTypedField) => classOf[java.lang.Long] case (_: DecimalTypedField) => classOf[BigDecimal] case (_: TimeZoneTypedField) => classOf[String] case (_: StringTypedField) => classOf[String] case (_: PasswordTypedField) => classOf[String] case (_: BinaryTypedField) => classOf[Array[Byte]] case (_: LocaleTypedField) => classOf[String] case (_: EnumTypedField[_]) => classOf[Enumeration#Value] case (_: EnumNameTypedField[_]) => classOf[Enumeration#Value] case _ => error("Unsupported field type. Consider implementing " + "SquerylRecordField for defining the persistent class." + "Field: " + metaField) } val overrideColLength = metaField match { case (stringTypedField: StringTypedField) => Some(stringTypedField.maxLength) case _ => None } new FieldMetaData( parentMetaData, name, fieldsValueType, // if isOption, this fieldType is the type param of Option, i.e. the T in Option[T] fieldsValueType, //in primitive type mode fieldType == wrappedFieldType, in custom type mode wrappedFieldType is the 'real' type, i.e. the (primitive) type that jdbc understands None, //val customTypeFactory: Option[AnyRef=>Product1[Any]], metaField.optional_?, getter, setter, field, colAnnotation, isOptimisticCounter, metaField) { override def length = overrideColLength getOrElse super.length private def fieldFor(o: AnyRef) = getter.get.invoke(o).asInstanceOf[TypedField[AnyRef]] override def setFromResultSet(target: AnyRef, rs: ResultSet, index: Int) = fieldFor(target).setFromAny(Box!!resultSetHandler(rs, index)) override def get(o: AnyRef) = fieldFor(o).valueBox match { case Full(c: Calendar) => new Timestamp(c.getTime.getTime) case Full(other) => other case _ => null } } } /** * Checks if the given class is a subclass of Record. A special handling is only * needed for such subtypes. For other classes, use the standard squeryl methods. */ private def isRecord(clasz: Class[_]) = { classOf[Record[_]].isAssignableFrom(clasz) } /** * For records, the constructor must not be used directly when * constructing Objects. Instead, the createRecord method must be called. */ def createPosoFactory(posoMetaData: PosoMetaData[_]): () => AnyRef = { if (!isRecord(posoMetaData.clasz)) { // No record class - use standard poso meta data factory return SquerylRecord.posoMetaDataFactory.createPosoFactory(posoMetaData); } // Extract the MetaRecord for the companion object. This // is done only once for each class. val metaRecord = Class.forName(posoMetaData.clasz.getName + "$").getField("MODULE$").get(null).asInstanceOf[MetaRecord[_]] () => metaRecord.createRecord.asInstanceOf[AnyRef] } }
wsaccaco/lift
framework/lift-persistence/lift-squeryl-record/src/main/scala/net/liftweb/squerylrecord/RecordMetaDataFactory.scala
Scala
apache-2.0
6,497
object t { def f = { object C; case class C(); 1 } def g = { case class D(x: Int); object D; 2 } def h = { case class E(y: Int = 10); 3 } }
yusuke2255/dotty
tests/pos/caseClassInMethod.scala
Scala
bsd-3-clause
146
/* 叠加在一起的特质 super*/ class Account{ var balance = 100.0 } trait Logged{ def log(msg: String) {println(msg)} //非抽象方法 } //日志中加日期 trait TimestamLogger extends Logged{ override def log(msg: String){ //如果log为抽象方法, 需要添加 abstract override 才不会报错 super.log(new java.util.Date() + " " + msg) } } //切断过去冗长的日子消息 trait ShortLogger extends Logged{ val maxLength = 4; //特质中的字段 override def log(msg: String){ super.log(if(msg.length <= maxLength) msg else msg.substring(0, maxLength-3) + ".......") } } class SavingsAccount extends Account with TimestamLogger{ def draw(account: Double){ if(account > balance) log("Error") else println("True") } } class ShortAccount extends Account with ShortLogger{ def draw(account: Double){ if(account > balance) log("Error") else println("True") } } object TraitTest3{ def main(args: Array[String]){ var acct = new SavingsAccount acct.draw(105) /* 注意执行顺序 特质从后向前执行*/ var acct2 = new SavingsAccount with ShortLogger //带特质的对象, 将特质 SavingsAccount 和 ShortAccount 合并使用了 acct2.draw(107) var acct3 = new ShortAccount with TimestamLogger acct3.draw(108) } }
PengLiangWang/Scala
Class/traitTest/TraitTest3.scala
Scala
gpl-3.0
1,478
package org.jetbrains.sbt package annotator import java.io.File import com.intellij.openapi.util.io.FileUtil import com.intellij.openapi.util.text.StringUtil import com.intellij.openapi.vfs.{CharsetToolkit, LocalFileSystem} import com.intellij.psi.PsiFileFactory import com.intellij.testFramework.PlatformTestCase import org.jetbrains.plugins.scala.util.TestUtils import org.jetbrains.sbt.language.{SbtFileImpl, SbtFileType} /** * @author Nikolay Obedin * @since 8/4/14. */ abstract class AnnotatorTestBase extends PlatformTestCase { def testdataPath = TestUtils.getTestDataPath + "/annotator/Sbt/" def loadTestFile() = { val fileName = getTestName(false) + ".sbt" val filePath = testdataPath + fileName val file = LocalFileSystem.getInstance.findFileByPath(filePath.replace(File.separatorChar, '/')) assert(file != null, "file " + filePath + " not found") val fileText = StringUtil.convertLineSeparators(FileUtil.loadFile(new File(file.getCanonicalPath), CharsetToolkit.UTF8)) PsiFileFactory.getInstance(getProject).createFileFromText(fileName , SbtFileType, fileText).asInstanceOf[SbtFileImpl] } }
katejim/intellij-scala
test/org/jetbrains/sbt/annotator/AnnotatorTestBase.scala
Scala
apache-2.0
1,139
package org.workcraft.dom.visual.connections import java.awt.geom.Point2D sealed trait StaticVisualConnectionData case class Bezier(cp1 : RelativePoint, cp2 : RelativePoint) extends StaticVisualConnectionData case class Polyline(cps : List[Point2D.Double]) extends StaticVisualConnectionData
tuura/workcraft-2.2
ScalaGraphEditorUtil/src/main/scala/org/workcraft/dom/visual/connections/StaticVisualConnectionData.scala
Scala
gpl-3.0
295
/* * Copyright (c) 2013-2014 Telefónica Investigación y Desarrollo S.A.U. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package es.tid.cosmos.api.controllers.pages import org.scalatest.FlatSpec import org.scalatest.matchers.MustMatchers import play.api.mvc.Session import es.tid.cosmos.api.auth.oauth2.OAuthUserProfile import es.tid.cosmos.api.controllers.pages.CosmosSession._ import es.tid.cosmos.api.profile.UserId class CosmosSessionTest extends FlatSpec with MustMatchers { val profile = OAuthUserProfile(id=UserId("db-1234"), name = Some("John Smith")) trait WithEmptySession { def session: CosmosSession = new Session } trait WithUserSession { def session: CosmosSession = Session().setUserProfile(profile) } "An empty cosmos session" must "be not authenticated" in new WithEmptySession { session must not be 'authenticated } it must "store and retrieve the OAuth token" in new WithEmptySession { session.setToken("token").token must be (Some("token")) } it must "store and retrieve the user profile" in new WithEmptySession { session.setUserProfile(profile).userProfile must be (Some(profile)) } "A cosmos session with a user id but no cosmos id" must "be authenticated" in new WithUserSession { session must be ('authenticated) } }
telefonicaid/fiware-cosmos-platform
cosmos-api/test/es/tid/cosmos/api/controllers/pages/CosmosSessionTest.scala
Scala
apache-2.0
1,828
/* * Copyright (c) 2016 dawid.melewski * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package actyxpoweruseralert import java.time.{ ZoneOffset, ZonedDateTime } import actyxpoweruseralert.model.MachineInfo trait Helpers { def isFromLast5minutes(machine: MachineInfo): Boolean = { val utc = ZonedDateTime.now(ZoneOffset.UTC) machine.timestamp.isAfter(utc.toLocalDateTime.minusMinutes(5)) } def calculateAverage(list: List[MachineInfo]): Option[Double] = { if (list.isEmpty) { None } else { Option(list.filter(isFromLast5minutes).map(_.current).sum / list.size) } } }
meloniasty/ActyxPowerUserAlert
src/main/scala/actyxpoweruseralert/Helpers.scala
Scala
mit
1,641
/* * Copyright 2018 Analytics Zoo Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.zoo.pipeline.api.keras.objectives import com.intel.analytics.bigdl.nn.{LogSoftMax, SoftMax} import com.intel.analytics.bigdl.tensor.Tensor import com.intel.analytics.zoo.pipeline.api.keras.layers.{KerasRunner, Loss} import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric.NumericFloat import com.intel.analytics.zoo.pipeline.api.keras.layers.KerasBaseSpec import scala.math.abs class SparseCategoricalCrossEntropySpec extends KerasBaseSpec { "SparseCategoricalCrossEntropy" should "be the same as Keras" in { val kerasCode = """ |input_tensor = Input(shape=[3, ]) |target_tensor = Input(batch_shape=[3, ]) |loss = sparse_categorical_crossentropy(target_tensor, input_tensor) |input = input = np.array([[0.6, 0.3, 0.1], [0.2, 0.5, 0.3], [0.1, 0.1, 0.8]]) |Y = np.array([0.0, 1.0, 2.0]) """.stripMargin val loss = SparseCategoricalCrossEntropy[Float](logProbAsInput = false) val (gradInput, gradWeight, weights, input, target, output) = KerasRunner.run(kerasCode, Loss) val boutput = loss.forward(input, target) val koutput = output.mean() NumericFloat.nearlyEqual(boutput, koutput, 1e-5) should be (true) } "SparseCategoricalCrossEntropy" should "generate correct output and grad" in { val criterion = SparseCategoricalCrossEntropy[Double](logProbAsInput = true) val input = Tensor[Double](3, 3) input(Array(1, 1)) = -1.0262627674932 input(Array(1, 2)) = -1.2412600935171 input(Array(1, 3)) = -1.0423174168648 input(Array(2, 1)) = -0.90330565804228 input(Array(2, 2)) = -1.3686840144413 input(Array(2, 3)) = -1.0778380454479 input(Array(3, 1)) = -0.99131220658219 input(Array(3, 2)) = -1.0559142847536 input(Array(3, 3)) = -1.2692712660404 val target = Tensor[Double](3) target(Array(1)) = 0 target(Array(2)) = 1 target(Array(3)) = 2 val expectedOutput = 1.2214060159916 val expectedGrad = Tensor[Double](3, 3) expectedGrad(Array(1, 1)) = -0.33333333333333 expectedGrad(Array(1, 2)) = 0 expectedGrad(Array(1, 3)) = 0 expectedGrad(Array(2, 1)) = 0 expectedGrad(Array(2, 2)) = -0.33333333333333 expectedGrad(Array(2, 3)) = 0 expectedGrad(Array(3, 1)) = 0 expectedGrad(Array(3, 2)) = 0 expectedGrad(Array(3, 3)) = -0.33333333333333 val output = criterion.forward(input, target) val gradInput = criterion.backward(input, target) assert(abs(expectedOutput - output) < 1e-6) expectedGrad.map(gradInput, (v1, v2) => { assert(abs(v1 - v2) < 1e-6) v1 }) } "SparseCategoricalCrossEntropy with weight" should "generate correct output and grad" in { val weight = Tensor[Double](3) weight(Array(1)) = 0.539598016534 weight(Array(2)) = 0.20644677849486 weight(Array(3)) = 0.67927200254053 val criterion = SparseCategoricalCrossEntropy[Double]( weights = weight, logProbAsInput = true) val input = Tensor[Double](3, 3) input(Array(1, 1)) = -1.2412808758149 input(Array(1, 2)) = -1.4300331461186 input(Array(1, 3)) = -0.75144359487463 input(Array(2, 1)) = -1.2200775853117 input(Array(2, 2)) = -1.1747087276299 input(Array(2, 3)) = -0.92663456371434 input(Array(3, 1)) = -1.1718541533533 input(Array(3, 2)) = -1.0983546295516 input(Array(3, 3)) = -1.0306113735619 val target = Tensor[Double](3) target(Array(1)) = 0 target(Array(2)) = 1 target(Array(3)) = 2 val expectedOutput = 1.1312383221403 val expectedGrad = Tensor[Double](3, 3) expectedGrad(Array(1, 1)) = -0.37858111084791 expectedGrad(Array(1, 2)) = 0 expectedGrad(Array(1, 3)) = 0 expectedGrad(Array(2, 1)) = 0 expectedGrad(Array(2, 2)) = -0.14484273169791 expectedGrad(Array(2, 3)) = 0 expectedGrad(Array(3, 1)) = 0 expectedGrad(Array(3, 2)) = 0 expectedGrad(Array(3, 3)) = -0.47657615745419 val output = criterion.forward(input, target) val gradInput = criterion.backward(input, target) assert(abs(expectedOutput - output) < 1e-6) expectedGrad.map(gradInput, (v1, v2) => { assert(abs(v1 - v2) < 1e-6) v1 }) } "SparseCategoricalCrossEntropy with sizeAverage false and 1-based label" should "generate correct output and grad" in { val criterion = SparseCategoricalCrossEntropy[Double]( zeroBasedLabel = false, sizeAverage = false, logProbAsInput = true) val input = Tensor[Double](3, 3) input(Array(1, 1)) = -1.10821131127 input(Array(1, 2)) = -0.92179085988591 input(Array(1, 3)) = -1.3017876357682 input(Array(2, 1)) = -0.72992115377362 input(Array(2, 2)) = -1.2817109257719 input(Array(2, 3)) = -1.4250730090114 input(Array(3, 1)) = -1.1074577039332 input(Array(3, 2)) = -1.0506933510994 input(Array(3, 3)) = -1.1397251596433 val target = Tensor[Double](3) target(Array(1)) = 1 target(Array(2)) = 2 target(Array(3)) = 3 val expectedOutput = 3.5296473966852 val expectedGrad = Tensor[Double](3, 3) expectedGrad(Array(1, 1)) = -1 expectedGrad(Array(1, 2)) = 0 expectedGrad(Array(1, 3)) = 0 expectedGrad(Array(2, 1)) = 0 expectedGrad(Array(2, 2)) = -1 expectedGrad(Array(2, 3)) = 0 expectedGrad(Array(3, 1)) = 0 expectedGrad(Array(3, 2)) = 0 expectedGrad(Array(3, 3)) = -1 val output = criterion.forward(input, target) val gradInput = criterion.backward(input, target) assert(abs(expectedOutput - output) < 1e-6) expectedGrad.map(gradInput, (v1, v2) => { assert(abs(v1 - v2) < 1e-6) v1 }) } "SparseCategoricalCrossEntropy with probabilities input" should "generate correct output and grad" in { val input = Tensor[Float](Array(4, 4)).rand() val target = Tensor[Float](Array[Float](0, 1, 2, 3), Array(4)) val logSoftMax = LogSoftMax[Float]() val softMax = SoftMax[Float]() val logProb = logSoftMax.forward(input) val prob = softMax.forward(input) val referenceLayer = SparseCategoricalCrossEntropy[Float](logProbAsInput = true) val testedLayer = SparseCategoricalCrossEntropy[Float]() val expectedLoss = referenceLayer.forward(logProb, target) val loss = testedLayer.forward(prob, target) val expectedGradInput = logSoftMax.backward(input, referenceLayer.backward(logProb, target)) val gradInput = softMax.backward(input, testedLayer.backward(prob, target)) math.abs(expectedLoss - loss) < 1e-5 should be (true) expectedGradInput.almostEqual(gradInput, 1e-5) should be (true) } }
intel-analytics/analytics-zoo
zoo/src/test/scala/com/intel/analytics/zoo/pipeline/api/keras/objectives/SparseCategoricalCrossEntropySpec.scala
Scala
apache-2.0
7,190
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package wvlet.airframe.control import java.util.UUID import java.util.concurrent._ import java.util.concurrent.atomic.{AtomicBoolean, AtomicLong, AtomicReference} import wvlet.log.LogSupport import scala.reflect.ClassTag import scala.jdk.CollectionConverters._ import scala.util.control.{ControlThrowable, NonFatal} /** * Utilities for parallel execution. */ object Parallel extends LogSupport { private class BreakException extends ControlThrowable // Parallel execution can be stopped by calling this method. def break: Unit = throw new BreakException() val stats = new ParallelExecutionStats() class ParallelExecutionStats( val totalThreads: AtomicLong = new AtomicLong(0), val runningWorkers: AtomicLong = new AtomicLong(0), val startedTasks: AtomicLong = new AtomicLong(0), val finishedTasks: AtomicLong = new AtomicLong(0) ) private class ResultIterator[R](queue: LinkedBlockingQueue[Option[R]]) extends Iterator[R] { private var nextMessage: Option[R] = None override def hasNext: Boolean = { nextMessage = queue.take() nextMessage.isDefined } override def next(): R = { nextMessage.get } } /** * Process all elements of the source by the given function then wait for the completion. * * @param source * Source collection * @param parallelism * Parallelism, the default value is a number of available processors * @param f * Function which processes each element of the source collection * @return * Collection of the results */ def run[T, R: ClassTag](source: Seq[T], parallelism: Int = Runtime.getRuntime.availableProcessors())( f: T => R ): Seq[R] = { val executionId = UUID.randomUUID.toString trace(s"$executionId - Begin Parallel.run (parallelism = ${parallelism})") val requestQueue = new LinkedBlockingQueue[Worker[T, R]](parallelism) val resultQueue = new LinkedBlockingQueue[Option[R]]() val interrupted = new AtomicBoolean(false) Range(0, parallelism).foreach { i => val worker = new Worker[T, R](executionId, i.toString, requestQueue, resultQueue, interrupted, f) requestQueue.put(worker) } val executor = Executors.newFixedThreadPool(parallelism) stats.totalThreads.addAndGet(parallelism) try { // Process all elements of source val it = source.iterator while (it.hasNext && !interrupted.get()) { val worker = requestQueue.take() if (!interrupted.get()) { worker.message.set(it.next()) executor.execute(worker) } else { requestQueue.put(worker) } } // Wait for completion while (requestQueue.size() != parallelism) { try { Thread.sleep(10) } catch { case _: InterruptedException => () } } resultQueue.asScala.flatten.toSeq } catch { case _: InterruptedException => throw new TimeoutException() } finally { // Cleanup executor.shutdown() requestQueue.clear() stats.totalThreads.addAndGet(parallelism * -1) } } /** * Process all elements of the source by the given function then don't wait completion. The result is an iterator * which is likely a stream which elements are pushed continuously. * * @param source * the source iterator * @param parallelism * Parallelism, the default value is a number of available processors * @param f * Function which processes each element of the source collection * @return * Iterator of the results */ def iterate[T, R]( source: Iterator[T], parallelism: Int = Runtime.getRuntime.availableProcessors() )(f: T => R): Iterator[R] = { val executionId = UUID.randomUUID.toString trace(s"$executionId - Begin Parallel.iterate (parallelism = ${parallelism})") val requestQueue = new LinkedBlockingQueue[Worker[T, R]](parallelism) val resultQueue = new LinkedBlockingQueue[Option[R]]() val interrupted = new AtomicBoolean(false) Range(0, parallelism).foreach { i => val worker = new Worker[T, R](executionId, i.toString, requestQueue, resultQueue, interrupted, f) requestQueue.put(worker) } new Thread { override def run(): Unit = { val executor = Executors.newFixedThreadPool(parallelism) stats.totalThreads.addAndGet(parallelism) try { // Process all elements of source while (source.hasNext && !interrupted.get()) { val worker = requestQueue.take() if (!interrupted.get()) { worker.message.set(source.next()) executor.execute(worker) } else { requestQueue.put(worker) } } // Wait for completion while (requestQueue.size() != parallelism) { try { Thread.sleep(10) } catch { case _: InterruptedException => () } } } catch { case _: InterruptedException => throw new TimeoutException() } finally { // Terminate resultQueue.put(None) // Cleanup executor.shutdown() requestQueue.clear() stats.totalThreads.addAndGet(parallelism * -1) } } }.start() new ResultIterator[R](resultQueue) } private[control] class Worker[T, R]( executionId: String, workerId: String, requestQueue: BlockingQueue[Worker[T, R]], resultQueue: BlockingQueue[Option[R]], interrupted: AtomicBoolean, f: T => R ) extends Runnable with LogSupport { val message: AtomicReference[T] = new AtomicReference[T]() override def run: Unit = { trace(s"$executionId - Begin worker-$workerId") Parallel.stats.runningWorkers.incrementAndGet() stats.startedTasks.incrementAndGet() try { resultQueue.put(Some(f(message.get()))) } catch { case _: BreakException => interrupted.set(true) case NonFatal(e) => warn(s"$executionId - Error worker-$workerId", e) throw e } finally { trace(s"$executionId - End worker-$workerId") stats.finishedTasks.incrementAndGet() Parallel.stats.runningWorkers.decrementAndGet() requestQueue.put(this) } } } }
wvlet/airframe
airframe-control/.jvm/src/main/scala/wvlet/airframe/control/Parallel.scala
Scala
apache-2.0
6,990
package com.twitter.finagle.thrift import com.twitter.conversions.DurationOps._ import com.twitter.finagle.Service import com.twitter.util.{Await, Future} import org.mockito.Matchers import org.mockito.Mockito.{times, verify, when} import org.scalatest.FunSuite import org.scalatestplus.mockito.MockitoSugar class ClientIdRequiredFilterTest extends FunSuite with MockitoSugar { case class ClientIdRequiredFilterContext(underlying: Service[String, String]) { lazy val service = new ClientIdRequiredFilter andThen underlying } val request = "request" val response = Future.value("response") val clientId = ClientId("test") test("ClientIdRequiredFilter passes through when ClientId exists") { val c = ClientIdRequiredFilterContext(mock[Service[String, String]]) import c._ when(underlying(request)).thenReturn(response) clientId.asCurrent { val result = service(request) assert(Await.result(result, 10.seconds) == Await.result(response, 10.seconds)) result } } test("ClientIdRequiredFilter throws NoClientIdSpecifiedException when ClientId does not exist") { val c = ClientIdRequiredFilterContext(mock[Service[String, String]]) import c._ ClientId.let(None) { intercept[NoClientIdSpecifiedException] { Await.result(service(request), 10.seconds) } verify(underlying, times(0)).apply(Matchers.anyString()) } } }
luciferous/finagle
finagle-thrift/src/test/scala/com/twitter/finagle/thrift/ClientIdRequiredFilterTest.scala
Scala
apache-2.0
1,417
/** * Taken from https://github.com/scalatra/rl/blob/v0.4.10/core/src/main/scala/rl/UrlCodingUtils.scala * Copyright (c) 2011 Mojolly Ltd. */ package org.http4s.util import java.nio.charset.Charset import java.nio.charset.StandardCharsets.UTF_8 import org.http4s.internal.parboiled2.CharPredicate private[http4s] object UrlCodingUtils { @deprecated("Moved to org.http4s.Uri.Unreserved", "0.20.13") val Unreserved = org.http4s.Uri.Unreserved val GenDelims = CharPredicate.from(":/?#[]@".toSet) val SubDelims = CharPredicate.from("!$&'()*+,;=".toSet) private val toSkip = org.http4s.Uri.Unreserved ++ "!$&'()*+,;=:/?@" /** * Percent-encodes a string. Depending on the parameters, this method is * appropriate for URI or URL form encoding. Any resulting percent-encodings * are normalized to uppercase. * * @param toEncode the string to encode * @param charset the charset to use for characters that are percent encoded * @param spaceIsPlus if space is not skipped, determines whether it will be * rendreed as a `"+"` or a percent-encoding according to `charset`. * @param toSkip a predicate of characters exempt from encoding. In typical * use, this is composed of all Unreserved URI characters and sometimes a * subset of Reserved URI characters. */ @deprecated("Moved to org.http4s.Uri.encode", "0.20.13") def urlEncode( toEncode: String, charset: Charset = UTF_8, spaceIsPlus: Boolean = false, toSkip: Char => Boolean = toSkip): String = org.http4s.Uri.encode(toEncode, charset, spaceIsPlus, toSkip) @deprecated("Moved to org.http4s.Uri.pathEncode", "0.20.13") def pathEncode(s: String, charset: Charset = UTF_8): String = org.http4s.Uri.pathEncode(s, charset) /** * Percent-decodes a string. * * @param toDecode the string to decode * @param charset the charset of percent-encoded characters * @param plusIsSpace true if `'+'` is to be interpreted as a `' '` * @param toSkip a predicate of characters whose percent-encoded form * is left percent-encoded. Almost certainly should be left empty. */ @deprecated("Moved to org.http4s.Uri.decode", "0.20.13") def urlDecode( toDecode: String, charset: Charset = UTF_8, plusIsSpace: Boolean = false, toSkip: Char => Boolean = Function.const(false)): String = org.http4s.Uri.decode(toDecode, charset, plusIsSpace, toSkip) }
ChristopherDavenport/http4s
core/src/main/scala/org/http4s/util/UrlCoding.scala
Scala
apache-2.0
2,469
package clifton.graph.exceptions /** * Created by #GrowinScala */ class CollectException(msg: String) extends Exception(msg)
exocute/Toolkit
src/main/scala/clifton/graph/exceptions/CollectException.scala
Scala
bsd-2-clause
129
/* * Licensed to Cloudera, Inc. under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Cloudera, Inc. licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cloudera.hue.livy.server.interactive import com.cloudera.hue.livy.sessions.Kind case class CreateInteractiveRequest( kind: Kind, proxyUser: Option[String] = None, jars: List[String] = List(), pyFiles: List[String] = List(), files: List[String] = List(), driverMemory: Option[String] = None, driverCores: Option[Int] = None, executorMemory: Option[String] = None, executorCores: Option[Int] = None, numExecutors: Option[Int] = None, archives: List[String] = List(), queue: Option[String] = None, name: Option[String] = None)
hdinsight/hue
apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/interactive/CreateInteractiveRequest.scala
Scala
apache-2.0
1,383
/** * Licensed to Big Data Genomics (BDG) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The BDG licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bdgenomics.adam.cli import org.bdgenomics.adam.rdd.ADAMContext._ import org.bdgenomics.adam.util.ADAMFunSuite import org.bdgenomics.utils.cli.Args4j class Reads2CoverageSuite extends ADAMFunSuite { sparkTest("correctly calculates coverage from small sam file") { val inputPath = copyResource("artificial.sam") val outputPath = tmpFile("coverage.adam") val args: Array[String] = Array(inputPath, outputPath) new Reads2Coverage(Args4j[Reads2CoverageArgs](args)).run(sc) val coverage = sc.loadCoverage(outputPath) val pointCoverage = coverage.flatten.rdd.filter(_.start == 30).first assert(pointCoverage.count == 5) } }
massie/adam
adam-cli/src/test/scala/org/bdgenomics/adam/cli/Reads2CoverageSuite.scala
Scala
apache-2.0
1,461
package com.twitter.finagle.tracing import com.twitter.finagle._ import com.twitter.finagle.client.Transporter import com.twitter.util.Future import java.net.InetSocketAddress /** * [[com.twitter.finagle.ServiceFactoryProxy]] used to trace the local addr and * server addr. */ private[finagle] class ServerDestTracingProxy[Req, Rep](self: ServiceFactory[Req, Rep]) extends ServiceFactoryProxy[Req, Rep](self) { override def apply(conn: ClientConnection): Future[Service[Req, Rep]] = self(conn).map(sf => new ServerDestTracingFilter[Req, Rep](conn).andThen(sf)) } private final class ServerDestTracingFilter[Req, Rep](conn: ClientConnection) extends SimpleFilter[Req, Rep] { def apply(request: Req, service: Service[Req, Rep]): Future[Rep] = { val trace = Trace() if (trace.isActivelyTracing) { conn.localAddress match { case ia: InetSocketAddress => trace.recordLocalAddr(ia) trace.recordServerAddr(ia) case _ => // do nothing for non-ip address } conn.remoteAddress match { case ia: InetSocketAddress => trace.recordClientAddr(ia) case _ => // do nothing for non-ip address } } service(request) } } private[finagle] object ClientDestTracingFilter { val role = Stack.Role("EndpointTracing") /** * $module [[com.twitter.finagle.tracing.ClientDestTracingFilter]]. */ def module[Req, Rep]: Stackable[ServiceFactory[Req, Rep]] = new Stack.Module2[Transporter.EndpointAddr, param.Tracer, ServiceFactory[Req, Rep]] { val role = ClientDestTracingFilter.role val description = "Record remote address of server" def make( _addr: Transporter.EndpointAddr, _tracer: param.Tracer, next: ServiceFactory[Req, Rep] ) = { val param.Tracer(tracer) = _tracer if (tracer.isNull) next else { val Transporter.EndpointAddr(addr) = _addr new ClientDestTracingFilter(addr) andThen next } } } } /** * [[com.twitter.finagle.Filter]] for clients to record the remote address of the server. * We don't log the local addr here because it's already done in the client Dispatcher. */ class ClientDestTracingFilter[Req, Rep](addr: Address) extends SimpleFilter[Req, Rep] { def apply(request: Req, service: Service[Req, Rep]): Future[Rep] = { val trace = Trace() val rep = service(request) if (trace.isActivelyTracing) { addr match { case Address.Inet(ia, _) => trace.recordServerAddr(ia) case _ => // do nothing for non-ip address } } rep } }
luciferous/finagle
finagle-core/src/main/scala/com/twitter/finagle/tracing/DestinationTracing.scala
Scala
apache-2.0
2,636
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy.k8s private[spark] sealed trait KubernetesVolumeSpecificConf private[spark] case class KubernetesHostPathVolumeConf( hostPath: String) extends KubernetesVolumeSpecificConf private[spark] case class KubernetesPVCVolumeConf( claimName: String) extends KubernetesVolumeSpecificConf private[spark] case class KubernetesEmptyDirVolumeConf( medium: Option[String], sizeLimit: Option[String]) extends KubernetesVolumeSpecificConf private[spark] case class KubernetesVolumeSpec[T <: KubernetesVolumeSpecificConf]( volumeName: String, mountPath: String, mountReadOnly: Boolean, volumeConf: T)
tejasapatil/spark
resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/KubernetesVolumeSpec.scala
Scala
apache-2.0
1,460
package org.jetbrains.plugins.scala package lang package psi package api package toplevel import org.jetbrains.plugins.scala.lang.psi.api.statements.params._ import com.intellij.psi._ import parser.ScalaElementTypes import com.intellij.openapi.progress.ProgressManager trait ScTypeParametersOwner extends ScalaPsiElement { @volatile private var res: Seq[ScTypeParam] = null @volatile private var modCount: Long = 0L def typeParameters: Seq[ScTypeParam] = { def inner(): Seq[ScTypeParam] = { typeParametersClause match { case Some(clause) => clause.typeParameters case _ => Seq.empty } } val curModCount = getManager.getModificationTracker.getModificationCount if (res != null && curModCount == modCount) return res res = inner() modCount = curModCount res } def typeParametersClause: Option[ScTypeParamClause] = { this match { case st: ScalaStubBasedElementImpl[_] => { val stub = st.getStub if (stub != null) { val array = stub.getChildrenByType(ScalaElementTypes.TYPE_PARAM_CLAUSE, JavaArrayFactoryUtil.ScTypeParamClauseFactory) if (array.length == 0) { return None } else { return Some(array.apply(0)) } } } case _ => } findChild(classOf[ScTypeParamClause]) } import com.intellij.psi.scope.PsiScopeProcessor override def processDeclarations(processor: PsiScopeProcessor, state: ResolveState, lastParent: PsiElement, place: PsiElement): Boolean = { if (lastParent != null) { var i = 0 while (i < typeParameters.length) { ProgressManager.checkCanceled() if (!processor.execute(typeParameters.apply(i), state)) return false i = i + 1 } } true } }
consulo/consulo-scala
src/org/jetbrains/plugins/scala/lang/psi/api/toplevel/ScTypeParametersOwner.scala
Scala
apache-2.0
1,928
package org.scalameta package build import java.lang.ProcessBuilder._ import java.nio.file._ import java.nio.file.Files._ import scala.collection.JavaConverters._ import sbt._ import sbt.Keys._ import sbt.plugins._ object Build extends AutoPlugin { override def requires: Plugins = JvmPlugin override def trigger: PluginTrigger = allRequirements import autoImport._ object autoImport { trait BenchSuite { def initCommands: List[String] = List( "bench/clean", "wow " + Versions.LatestScala212 ) def metacpBenches: List[String] def metacpCommands: List[String] = { if (metacpBenches.isEmpty) Nil else List("bench/jmh:run " + metacpBenches.mkString(" ")) } def scalacBenches: List[String] def scalacCommands: List[String] = { if (scalacBenches.isEmpty) Nil else List("bench/jmh:run " + scalacBenches.mkString(" ")) } def scalametaBenches: List[String] def scalametaCommands: List[String] = { if (scalametaBenches.isEmpty) Nil else List("bench/jmh:run " + scalametaBenches.mkString(" ")) } final def command: String = { val benchCommands = metacpCommands ++ scalacCommands ++ scalametaCommands (initCommands ++ benchCommands).map(c => s";$c ").mkString("") } } object benchLSP extends BenchSuite { def metacpBenches = List("Metacp") def scalacBenches = List("ScalacBaseline") def scalametaBenches = List("ScalametaBaseline") } object benchAll extends BenchSuite { def metacpBenches = List("Metacp") def scalacBenches = List("Scalac") def scalametaBenches = List("Scalameta") } object benchQuick extends BenchSuite { def metacpBenches = List("Metacp") def scalacBenches = Nil def scalametaBenches = List("ScalametaBaseline") } // https://stackoverflow.com/questions/41229451/how-to-disable-slow-tagged-scalatests-by-default-allow-execution-with-option lazy val Fast = config("fast").extend(Test) lazy val Slow = config("slow").extend(Test) } }
MasseGuillaume/scalameta
project/Build.scala
Scala
bsd-3-clause
2,116
package regolic.asts.fol import regolic.asts.core.Trees._ object Trees { object ConstantSymbol { def apply(name: String, sort: Sort) = FunctionSymbol(name, Nil, sort) def unapply(symbol: FunctionSymbol): Option[(String, Sort)] = symbol match { case FunctionSymbol(n, Nil, s) => Some((n, s)) case _ => None } } object Constant { def apply(name: String, sort: Sort) = FunctionApplication(ConstantSymbol(name, sort), Nil) def unapply(apply: FunctionApplication): Option[(String, Sort)] = apply match { case FunctionApplication(ConstantSymbol(n, s), Nil) => Some((n, s)) case _ => None } } def freshConstant(prefix: String, sort: Sort): FunctionApplication = FunctionApplication(freshFunctionSymbol(prefix, List(), sort), List()) def freshConstant(prefix: FunctionApplication, sort: Sort): FunctionApplication = { require(prefix.terms == List()) freshConstant(prefix.fSymbol.name, prefix.sort) } object PropositionalVariableSymbol { def apply(name: String) = PredicateSymbol(name, Nil) def unapply(symbol: PredicateSymbol): Option[String] = symbol match { case PredicateSymbol(n, Nil) => Some(n) case _ => None } } object PropositionalVariable { def apply(name: String) = PredicateApplication(PropositionalVariableSymbol(name), Nil) def unapply(apply: PredicateApplication): Option[String] = apply match { case PredicateApplication(PropositionalVariableSymbol(n), Nil) => Some(n) case _ => None } } def freshPropositionalVariable(prefix: String): PredicateApplication = PredicateApplication(freshPredicateSymbol(prefix, List()), List()) def freshPropositionalVariable(prefix: PredicateApplication): PredicateApplication = { require(prefix.terms == List()) freshPropositionalVariable(prefix.symbol.name) } object EqualsSymbol { def apply(sort: Sort): PredicateSymbol = PredicateSymbol("=", List(sort, sort)) def unapply(s: PredicateSymbol): Option[Sort] = s match { case PredicateSymbol("=", List(s1, s2)) if s1 == s2 => Some(s1) case _ => None } } //TODO: maybe this Equals Object could be used in every theories object Equals { def apply(t1: Term, t2: Term): PredicateApplication = PredicateApplication(EqualsSymbol(t1.sort), List(t1, t2)) def unapply(pApply: PredicateApplication): Option[(Term, Term)] = pApply match { case PredicateApplication(EqualsSymbol(_), List(t1, t2)) => Some((t1, t2)) case _ => None } } object TrueSymbol { def apply() = ConnectiveSymbol("true", 0) def unapply(symbol: ConnectiveSymbol): Boolean = symbol match { case ConnectiveSymbol("true", 0) => true case _ => false } } object True { def apply() = ConnectiveApplication(TrueSymbol(), Nil) def unapply(appli: ConnectiveApplication): Boolean = appli match { case ConnectiveApplication(TrueSymbol(), Nil) => true case _ => false } } object FalseSymbol { def apply() = ConnectiveSymbol("false", 0) def unapply(symbol: ConnectiveSymbol): Boolean = symbol match { case ConnectiveSymbol("false", 0) => true case _ => false } } object False { def apply() = ConnectiveApplication(FalseSymbol(), Nil) def unapply(appli: ConnectiveApplication): Boolean = appli match { case ConnectiveApplication(FalseSymbol(), Nil) => true case _ => false } } object NotSymbol { def apply() = ConnectiveSymbol("not", 1) def unapply(symbol: ConnectiveSymbol): Boolean = symbol match { case ConnectiveSymbol("not", 1) => true case _ => false } } object Not { def apply(formula: Formula) = ConnectiveApplication(NotSymbol(), List(formula)) def unapply(appli: ConnectiveApplication): Option[Formula] = appli match { case ConnectiveApplication(NotSymbol(), formula :: Nil) => Some(formula) case _ => None } } object AndSymbol { def apply(arity: Int) = ConnectiveSymbol("and", arity) def unapply(symbol: ConnectiveSymbol): Option[Int] = symbol match { case ConnectiveSymbol("and", n) => Some(n) case _ => None } } object And { def apply(formulas: List[Formula]) = ConnectiveApplication(AndSymbol(formulas.size), formulas) def apply(formulas: Formula*) = ConnectiveApplication(AndSymbol(formulas.size), formulas.toList) def unapply(appli: ConnectiveApplication): Option[List[Formula]] = appli match { case ConnectiveApplication(AndSymbol(_), formulas) => Some(formulas) case _ => None } } object OrSymbol { def apply(arity: Int) = ConnectiveSymbol("or", arity) def unapply(symbol: ConnectiveSymbol): Option[Int] = symbol match { case ConnectiveSymbol("or", n) => Some(n) case _ => None } } object Or { def apply(formulas: List[Formula]) = ConnectiveApplication(OrSymbol(formulas.size), formulas) def apply(formulas: Formula*) = ConnectiveApplication(OrSymbol(formulas.size), formulas.toList) def unapply(appli: ConnectiveApplication): Option[List[Formula]] = appli match { case ConnectiveApplication(OrSymbol(_), formulas) => Some(formulas) case _ => None } } object IffSymbol { def apply() = ConnectiveSymbol("iff", 2) def unapply(symbol: ConnectiveSymbol): Boolean = symbol match { case ConnectiveSymbol("iff", 2) => true case _ => false } } object Iff { def apply(f1: Formula, f2: Formula) = ConnectiveApplication(IffSymbol(), List(f1, f2)) def unapply(appli: ConnectiveApplication): Option[(Formula, Formula)] = appli match { case ConnectiveApplication(IffSymbol(), List(f1, f2)) => Some((f1, f2)) case _ => None } } object ImpliesSymbol { def apply() = ConnectiveSymbol("implies", 2) def unapply(symbol: ConnectiveSymbol): Boolean = symbol match { case ConnectiveSymbol("implies", 2) => true case _ => false } } object Implies { def apply(f1: Formula, f2: Formula) = ConnectiveApplication(ImpliesSymbol(), List(f1, f2)) def unapply(appli: ConnectiveApplication): Option[(Formula, Formula)] = appli match { case ConnectiveApplication(ImpliesSymbol(), List(f1, f2)) => Some((f1, f2)) case _ => None } } object IfThenElseSymbol { def apply() = ConnectiveSymbol("if_then_else", 3) def unapply(symbol: ConnectiveSymbol): Boolean = symbol match { case ConnectiveSymbol("if_then_else", 3) => true case _ => false } } object IfThenElse { def apply(f1: Formula, f2: Formula, f3: Formula) = ConnectiveApplication(IfThenElseSymbol(), List(f1, f2, f3)) def unapply(appli: ConnectiveApplication): Option[(Formula, Formula, Formula)] = appli match { case ConnectiveApplication(IfThenElseSymbol(), List(f1, f2, f3)) => Some((f1, f2, f3)) case _ => None } } object ForallSymbol { def apply() = QuantifierSymbol("forall") def unapply(symbol: QuantifierSymbol): Boolean = symbol match { case QuantifierSymbol("forall") => true case _ => false } } object Forall { def apply(v: Variable, f: Formula) = QuantifierApplication(ForallSymbol(), v, f) def unapply(appli: QuantifierApplication): Option[(Variable, Formula)] = appli match { case QuantifierApplication(ForallSymbol(), v, f) => Some((v, f)) case _ => None } } object ExistsSymbol { def apply() = QuantifierSymbol("exists") def unapply(symbol: QuantifierSymbol): Boolean = symbol match { case QuantifierSymbol("exists") => true case _ => false } } object Exists { def apply(v: Variable, f: Formula) = QuantifierApplication(ExistsSymbol(), v, f) def unapply(appli: QuantifierApplication): Option[(Variable, Formula)] = appli match { case QuantifierApplication(ExistsSymbol(), v, f) => Some((v, f)) case _ => None } } }
regb/scabolic
src/main/scala/regolic/asts/fol/Trees.scala
Scala
mit
7,896
package euranova import org.scalatest.FunSuite import org.scalatest.FlatSpec class ThingTest extends FlatSpec { "Thing.prop" should " have the right value" in { val thing = new Thing() assert("wrong value" != thing.prop) } "Thing" should "always return that it's ready" in { val thing = new Thing() assert(true === thing.isReady) } }
euranova/code_retreat
scaffolds/scala/src/test/scala/euranova/ThingTest.scala
Scala
mit
363
package org.jetbrains.sbt.project.settings import com.intellij.openapi.externalSystem.settings.{DelegatingExternalSystemSettingsListener, ExternalSystemSettingsListener} /** * Stub to satisfy scaffolding of ExternalSystem * @author Pavel Fatin */ class SbtProjectSettingsListenerAdapter(listener: ExternalSystemSettingsListener[SbtProjectSettings]) extends DelegatingExternalSystemSettingsListener[SbtProjectSettings](listener) with SbtProjectSettingsListener
ilinum/intellij-scala
src/org/jetbrains/sbt/project/settings/SbtProjectSettingsListenerAdapter.scala
Scala
apache-2.0
467
/* * Copyright (C) Lightbend Inc. <https://www.lightbend.com> */ package com.lightbend.lagom.scaladsl.api.broker.kafka import com.lightbend.lagom.scaladsl.api.Descriptor /** * Provides a set of Kafka specific properties that can be used when creating a topic descriptor. */ object KafkaProperties { /** * A PartitionKeyStrategy produces a key for each message published to a Kafka topic. * * The key is used to determine on which topic's partition a message is published. It is guaranteed that messages * with the same key will arrive to the same partition, in the order they are published. */ def partitionKeyStrategy[Message]: Descriptor.Property[Message, PartitionKeyStrategy[Message]] = Descriptor.Property[Message, PartitionKeyStrategy[Message]]("kafkaPartitionKeyStrategy") }
lagom/lagom
service/scaladsl/api/src/main/scala/com/lightbend/lagom/scaladsl/api/broker/kafka/KafkaProperties.scala
Scala
apache-2.0
813
package org.json4s package reflect import java.lang.reflect.{Field, TypeVariable} sealed abstract class Descriptor extends Product with Serializable object ScalaType { private val types = new Memo[Manifest[_], ScalaType] private val singletonFieldName = "MODULE$" def apply[T](mf: Manifest[T]): ScalaType = { /* optimization */ if (mf.runtimeClass == classOf[Int] || mf.runtimeClass == classOf[java.lang.Integer]) ScalaType.IntType else if (mf.runtimeClass == classOf[Long] || mf.runtimeClass == classOf[java.lang.Long]) ScalaType.LongType else if (mf.runtimeClass == classOf[Byte] || mf.runtimeClass == classOf[java.lang.Byte]) ScalaType.ByteType else if (mf.runtimeClass == classOf[Short] || mf.runtimeClass == classOf[java.lang.Short]) ScalaType.ShortType else if (mf.runtimeClass == classOf[Float] || mf.runtimeClass == classOf[java.lang.Float]) ScalaType.FloatType else if (mf.runtimeClass == classOf[Double] || mf.runtimeClass == classOf[java.lang.Double]) ScalaType.DoubleType else if (mf.runtimeClass == classOf[BigInt] || mf.runtimeClass == classOf[java.math.BigInteger]) ScalaType.BigIntType else if (mf.runtimeClass == classOf[BigDecimal] || mf.runtimeClass == classOf[java.math.BigDecimal]) ScalaType.BigDecimalType else if (mf.runtimeClass == classOf[Boolean] || mf.runtimeClass == classOf[java.lang.Boolean]) ScalaType.BooleanType else if (mf.runtimeClass == classOf[String] || mf.runtimeClass == classOf[java.lang.String]) ScalaType.StringType else if (mf.runtimeClass == classOf[java.util.Date]) ScalaType.DateType else if (mf.runtimeClass == classOf[java.sql.Timestamp]) ScalaType.TimestampType else if (mf.runtimeClass == classOf[Symbol]) ScalaType.SymbolType else if (mf.runtimeClass == classOf[Number]) ScalaType.NumberType else if (mf.runtimeClass == classOf[JObject]) ScalaType.JObjectType else if (mf.runtimeClass == classOf[JArray]) ScalaType.JArrayType else if (mf.runtimeClass == classOf[JValue]) ScalaType.JValueType /* end optimization */ else { if (mf.typeArguments.isEmpty) types(mf, new ScalaType(_)) else new ScalaType(mf) } } def apply(erasure: Class[_], typeArgs: Seq[ScalaType] = Seq.empty): ScalaType = { val mf = ManifestFactory.manifestOf(erasure, typeArgs.map(_.manifest)) ScalaType(mf) } def apply(target: TypeInfo): ScalaType = { target match { case t: TypeInfo with SourceType => t.scalaType case t => val tArgs = t.parameterizedType.map(_.getActualTypeArguments.toList.map(Reflector.scalaTypeOf(_))).getOrElse(Nil) ScalaType(target.clazz, tArgs) } } // Deal with the most common cases as an optimization /* optimization */ private val IntType: ScalaType = new PrimitiveScalaType(Manifest.Int) private val NumberType: ScalaType = new PrimitiveScalaType(manifest[Number]) private val LongType: ScalaType = new PrimitiveScalaType(Manifest.Long) private val ByteType: ScalaType = new PrimitiveScalaType(Manifest.Byte) private val ShortType: ScalaType = new PrimitiveScalaType(Manifest.Short) private val BooleanType: ScalaType = new PrimitiveScalaType(Manifest.Boolean) private val FloatType: ScalaType = new PrimitiveScalaType(Manifest.Float) private val DoubleType: ScalaType = new PrimitiveScalaType(Manifest.Double) private val StringType: ScalaType = new PrimitiveScalaType(manifest[java.lang.String]) private val SymbolType: ScalaType = new PrimitiveScalaType(manifest[Symbol]) private val BigDecimalType: ScalaType = new PrimitiveScalaType(manifest[BigDecimal]) private val BigIntType: ScalaType = new PrimitiveScalaType(manifest[BigInt]) private val JValueType: ScalaType = new PrimitiveScalaType(manifest[JValue]) private val JObjectType: ScalaType = new PrimitiveScalaType(manifest[JObject]) private val JArrayType: ScalaType = new PrimitiveScalaType(manifest[JArray]) private val DateType: ScalaType = new PrimitiveScalaType(manifest[java.util.Date]) private val TimestampType: ScalaType = new PrimitiveScalaType(manifest[java.sql.Timestamp]) private class PrimitiveScalaType(mf: Manifest[_]) extends ScalaType(mf) { override val isPrimitive = true } private class CopiedScalaType( mf: Manifest[_], private[this] var _typeVars: Map[TypeVariable[_], ScalaType], override val isPrimitive: Boolean) extends ScalaType(mf) { override def typeVars: Map[TypeVariable[_], ScalaType] = { if (_typeVars == null) _typeVars = Map.empty ++ erasure.getTypeParameters.map(_.asInstanceOf[TypeVariable[_]]).zip(typeArgs) _typeVars } } /* end optimization */ } class ScalaType(private val manifest: Manifest[_]) extends Equals { import ScalaType.{ types, CopiedScalaType } val erasure: Class[_] = manifest.runtimeClass val typeArgs: Seq[ScalaType] = manifest.typeArguments.map(ta => Reflector.scalaTypeOf(ta)) ++ ( if (erasure.isArray) List(Reflector.scalaTypeOf(erasure.getComponentType)) else Nil ) private[this] var _typeVars: Map[TypeVariable[_], ScalaType] = null def typeVars: Map[TypeVariable[_], ScalaType] = { if (_typeVars == null) _typeVars = Map.empty ++ erasure.getTypeParameters.map(_.asInstanceOf[TypeVariable[_]]).zip(typeArgs) _typeVars } val isArray: Boolean = erasure.isArray private[this] var _rawFullName: String = null def rawFullName: String = { if (_rawFullName == null) _rawFullName = erasure.getName _rawFullName } private[this] var _rawSimpleName: String = null def rawSimpleName: String = { if (_rawSimpleName == null) { _rawSimpleName = safeSimpleName(erasure) } _rawSimpleName } lazy val simpleName: String = rawSimpleName + (if (typeArgs.nonEmpty) typeArgs.map(_.simpleName).mkString("[", ", ", "]") else (if (typeVars.nonEmpty) typeVars.map(_._2.simpleName).mkString("[", ", ", "]") else "")) lazy val fullName: String = rawFullName + (if (typeArgs.nonEmpty) typeArgs.map(_.fullName).mkString("[", ", ", "]") else "") lazy val typeInfo: TypeInfo = new TypeInfo( erasure, if (typeArgs.nonEmpty) Some(Reflector.mkParameterizedType(erasure, typeArgs.map(_.erasure).toSeq)) else None ) with SourceType { val scalaType: ScalaType = ScalaType.this } val isPrimitive = false def isMap = classOf[collection.immutable.Map[_, _]].isAssignableFrom(erasure) || classOf[collection.Map[_, _]].isAssignableFrom(erasure) def isCollection = erasure.isArray || classOf[Iterable[_]].isAssignableFrom(erasure) || classOf[java.util.Collection[_]].isAssignableFrom(erasure) def isOption = classOf[Option[_]].isAssignableFrom(erasure) def isEither = classOf[Either[_, _]].isAssignableFrom(erasure) def <:<(that: ScalaType): Boolean = manifest <:< that.manifest def >:>(that: ScalaType): Boolean = manifest >:> that.manifest private def singletonField = erasure.getFields.find(_.getName.equals(ScalaType.singletonFieldName)) def isSingleton = singletonField.isDefined def singletonInstance = singletonField.map(_.get(null)) override def hashCode(): Int = manifest.## override def equals(obj: Any): Boolean = obj match { case a: ScalaType => manifest == a.manifest case _ => false } def canEqual(that: Any): Boolean = that match { case s: ScalaType => manifest.canEqual(s.manifest) case _ => false } def copy(erasure: Class[_] = erasure, typeArgs: Seq[ScalaType] = typeArgs, typeVars: Map[TypeVariable[_], ScalaType] = _typeVars): ScalaType = { /* optimization */ if (erasure == classOf[Int] || erasure == classOf[java.lang.Integer]) ScalaType.IntType else if (erasure == classOf[Long] || erasure == classOf[java.lang.Long]) ScalaType.LongType else if (erasure == classOf[Byte] || erasure == classOf[java.lang.Byte]) ScalaType.ByteType else if (erasure == classOf[Short] || erasure == classOf[java.lang.Short]) ScalaType.ShortType else if (erasure == classOf[Float] || erasure == classOf[java.lang.Float]) ScalaType.FloatType else if (erasure == classOf[Double] || erasure == classOf[java.lang.Double]) ScalaType.DoubleType else if (erasure == classOf[BigInt] || erasure == classOf[java.math.BigInteger]) ScalaType.BigIntType else if (erasure == classOf[BigDecimal] || erasure == classOf[java.math.BigDecimal]) ScalaType.BigDecimalType else if (erasure == classOf[Boolean] || erasure == classOf[java.lang.Boolean]) ScalaType.BooleanType else if (erasure == classOf[String] || erasure == classOf[java.lang.String]) ScalaType.StringType else if (erasure == classOf[java.util.Date]) ScalaType.DateType else if (erasure == classOf[java.sql.Timestamp]) ScalaType.TimestampType else if (erasure == classOf[Symbol]) ScalaType.SymbolType else if (erasure == classOf[Number]) ScalaType.NumberType else if (erasure == classOf[JObject]) ScalaType.JObjectType else if (erasure == classOf[JArray]) ScalaType.JArrayType else if (erasure == classOf[JValue]) ScalaType.JValueType /* end optimization */ else { val mf = ManifestFactory.manifestOf(erasure, typeArgs.map(_.manifest)) val st = new CopiedScalaType(mf, typeVars, isPrimitive) if (typeArgs.isEmpty) types.replace(mf, st) else st } } override def toString: String = simpleName } case class PropertyDescriptor(name: String, mangledName: String, returnType: ScalaType, field: Field) extends Descriptor { def set(receiver: Any, value: Any) = field.set(receiver, value) def get(receiver: AnyRef) = field.get(receiver) } case class ConstructorParamDescriptor(name: String, mangledName: String, argIndex: Int, argType: ScalaType, defaultValue: Option[() => Any]) extends Descriptor { lazy val isOptional = defaultValue.isDefined || argType.isOption } case class ConstructorDescriptor(params: Seq[ConstructorParamDescriptor], constructor: Executable, isPrimary: Boolean) extends Descriptor case class SingletonDescriptor(simpleName: String, fullName: String, erasure: ScalaType, instance: AnyRef, properties: Seq[PropertyDescriptor]) extends Descriptor sealed abstract class ObjectDescriptor extends Descriptor case class ClassDescriptor(simpleName: String, fullName: String, erasure: ScalaType, companion: Option[SingletonDescriptor], constructors: Seq[ConstructorDescriptor], properties: Seq[PropertyDescriptor]) extends ObjectDescriptor { def bestMatching(argNames: List[String]): Option[ConstructorDescriptor] = { val names = Set(argNames: _*) def countOptionals(args: List[ConstructorParamDescriptor]) = args.foldLeft(0)((n, x) => { if (x.isOptional) n+1 else n }) def score(args: List[ConstructorParamDescriptor]) = args.foldLeft(0)((s, arg) => if (names.contains(arg.name)) s+1 else if (arg.isOptional) s else -100 ) if (constructors.isEmpty) None else { val best = constructors.tail.foldLeft((constructors.head, score(constructors.head.params.toList))) { (best, c) => val newScore = score(c.params.toList) val newIsBetter = { (newScore == best._2 && countOptionals(c.params.toList) < countOptionals(best._1.params.toList)) || newScore > best._2 } if (newIsBetter) (c, newScore) else best } Some(best._1) } } private[this] var _mostComprehensive: Seq[ConstructorParamDescriptor] = null def mostComprehensive: Seq[ConstructorParamDescriptor] = { if (_mostComprehensive == null) _mostComprehensive = if (constructors.nonEmpty) constructors.sortBy(-_.params.size).headOption.map(_.params).getOrElse(Nil) else Nil _mostComprehensive } } case class PrimitiveDescriptor(erasure: ScalaType, default: Option[() => Any] = None) extends ObjectDescriptor
dacr/json4s
core/src/main/scala/org/json4s/reflect/descriptors.scala
Scala
apache-2.0
11,848
package breeze.linalg.operators /* Copyright 2012 David Hall Licensed under the Apache License, Version 2.0 (the "License") you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ import breeze.generic.{MMRegistry3, UFunc, MMRegistry2} import breeze.generic.UFunc.InPlaceImpl3 import scala.reflect.ClassTag /** * This is a special kind of BinaryUpdateOp that supports registration * of specialized implementations for a given operation. * @author dlwh */ // This trait could reuse code from Multimethod2, but not doing so allows us to reduce code size a lot // because we don't need BinaryOp's to inherit from Function2, which has a lot of @specialzied cruft. trait TernaryUpdateRegistry[A, B, C, Op] extends UFunc.InPlaceImpl3[Op, A, B, C] with MMRegistry3[UFunc.InPlaceImpl3[Op, _ <: A, _ <: B, _ <: C]] { protected def bindingMissing(a: A, b: B, c: C):Unit = throw new UnsupportedOperationException("Types not found!" + a + b + " " + ops) protected def multipleOptions(a: A, b: B, c: C, m: Map[(Class[_],Class[_], Class[_]),UFunc.InPlaceImpl3[Op, _ <: A, _ <: B, _ <: C]]):Unit = { throw new RuntimeException("Multiple bindings for method: " + m) } def apply(a: A, b: B, c: C) { val ac = a.asInstanceOf[AnyRef].getClass val bc = b.asInstanceOf[AnyRef].getClass val cc = c.asInstanceOf[AnyRef].getClass val cached = cache.get((ac,bc,cc)) if(cached != null) { cached match { case None => bindingMissing(a, b, c) case Some(m) => m.asInstanceOf[InPlaceImpl3[Op, A, B, C]].apply(a, b, c) } } else { val options = resolve(ac, bc, cc) options.size match { case 0 => cache.put( (ac, bc, cc), None) bindingMissing(a, b, c) case 1 => val method = options.values.head cache.put( (ac, bc, cc), Some(method)) method.asInstanceOf[InPlaceImpl3[Op, A, B, C]].apply(a, b, c) case _ => val selected = selectBestOption(options) if(selected.size != 1) { multipleOptions(a, b, c, options) } else { val method = selected.values.head cache.put((ac, bc, cc), Some(method)) method.asInstanceOf[InPlaceImpl3[Op, A, B, C]].apply(a, b, c) } } } } def register[AA<:A, BB<:B, CC <: C](op: InPlaceImpl3[Op, AA, BB, CC])(implicit manA: ClassTag[AA], manB: ClassTag[BB], manC: ClassTag[CC]) { super.register(manA.runtimeClass, manB.runtimeClass, manC.runtimeClass, op) } }
chen0031/breeze
math/src/main/scala/breeze/linalg/operators/TernaryUpdateRegistry.scala
Scala
apache-2.0
2,936
package yang.flexmapping.process import java.text.ParseException import java.util import com.nsn.oss.nbi.flexmapping.EventField import org.apache.log4j.Logger import yang.flexmapping.datas.DataHolderCreatorFactory import yang.flexmapping.getval.GetValue import yang.flexmapping.StructuredEventWarpper import scala.xml.NodeSeq /** * Created by y28yang on 4/9/2016. */ class AttributeProcesser(node: NodeSeq) extends FlexMappingProcesser { val log = Logger.getLogger(classOf[AttributeProcesser]) val id = node \\@ "id" val dataHolderCreator = initTypeField(node) val valueGeter = GetValue.parse(node) //TODO //val isFilterable???? override def process(event: util.Map[EventField, String], structuredEvent: StructuredEventWarpper) = { var value:String="" try { value=valueGeter.getValue(event) val dataHolder = dataHolderCreator.create(id, value) dataHolder.init structuredEvent.filterData += dataHolder } catch { case nfm: NumberFormatException => log.error(s"can not parse,the id:$id,type:${dataHolderCreator.typeCode},value:$value", nfm) case parse: ParseException => log.error(s"can not parse,the id:$id,type:${dataHolderCreator.typeCode},value:$value", parse) case nup: NullPointerException => log.error(s"can not parse,the id:$id,type:${dataHolderCreator.typeCode},value:$value", nup) } } def initTypeField(node: NodeSeq) = { val theType = node \\@ "type".trim val creator=DataHolderCreatorFactory.getOrWithDefault(theType) creator } def initGetValue() = { } }
wjingyao2008/firsttry
NextGenAct/src/main/scala/yang/flexmapping/process/AttributeProcesser.scala
Scala
apache-2.0
1,576
package repository.postgres.silhouette import com.google.inject.Inject import com.mohiva.play.silhouette.api.LoginInfo import com.mohiva.play.silhouette.api.util.PasswordInfo import net.scalytica.symbiotic.postgres.SymbioticDb import play.api.{Configuration, Logger} import repository.PasswordAuthRepository import repository.postgres.ExtraColumnMappers import scala.concurrent.{ExecutionContext, Future} class PostgresPasswordAuthRepository @Inject()( configuration: Configuration, ec: ExecutionContext ) extends PasswordAuthRepository with SymbioticDb with ExtraColumnMappers { import profile.api._ private[this] val log = Logger(getClass) private[this] implicit val exec = ec override lazy val config = configuration.underlying val passwordInfoTable = TableQuery[PasswordInfoTable] private[this] def findQuery(loginInfo: LoginInfo) = { passwordInfoTable.filter { p => p.providerId === loginInfo.providerID && p.providerKey === loginInfo.providerKey } } override def find(loginInfo: LoginInfo): Future[Option[PasswordInfo]] = { db.run(findQuery(loginInfo).result.headOption).map(_.map(_.asPasswordInfo)) } override def add( loginInfo: LoginInfo, authInfo: PasswordInfo ): Future[PasswordInfo] = { val pir = PasswordInfoRow.init(loginInfo, authInfo) val action = passwordInfoTable returning passwordInfoTable.map(_.id) += pir db.run(action.transactionally).map(_ => authInfo) } override def update( loginInfo: LoginInfo, authInfo: PasswordInfo ): Future[PasswordInfo] = { val updAction = findQuery(loginInfo) .map(row => (row.hasher, row.password, row.salt)) .update((authInfo.hasher, authInfo.password, authInfo.salt)) db.run(updAction.transactionally).flatMap { num => if (0 < num) Future.successful(authInfo) else find(loginInfo).map(_.getOrElse { throw new IllegalStateException("Expected to find a PasswordInfo.") }) } } override def save( loginInfo: LoginInfo, authInfo: PasswordInfo ): Future[PasswordInfo] = { find(loginInfo).flatMap { case Some(_) => update(loginInfo, authInfo) case None => add(loginInfo, authInfo) } } override def remove(loginInfo: LoginInfo): Future[Unit] = { db.run(findQuery(loginInfo).delete.transactionally).map { res => if (0 < res) log.debug(s"No rows matching $loginInfo were removed") else log.debug(s"Removed $res rows matching $loginInfo") } } case class PasswordInfoRow( id: Option[Int], providerId: String, providerKey: String, hasher: String, password: String, salt: Option[String] ) { def asPasswordInfo: PasswordInfo = PasswordInfo(hasher, password, salt) } object PasswordInfoRow { def init(li: LoginInfo, ai: PasswordInfo): PasswordInfoRow = { PasswordInfoRow( None, li.providerID, li.providerKey, ai.hasher, ai.password, ai.salt ) } } class PasswordInfoTable(val tag: Tag) extends Table[PasswordInfoRow](tag, Some(dbSchema), "password_info") { val id = column[Int]("id", O.PrimaryKey, O.AutoInc) val providerId = column[String]("provider_id") val providerKey = column[String]("provider_key") val hasher = column[String]("hasher") val password = column[String]("password") val salt = column[Option[String]]("salt") // scalastyle:off method.name override def * = ( id.?, providerId, providerKey, hasher, password, salt ) <> ((PasswordInfoRow.apply _).tupled, PasswordInfoRow.unapply _) // scalastyle:on method.name } }
kpmeen/symbiotic
examples/symbiotic-server/app/repository/postgres/silhouette/PostgresPasswordAuthRepository.scala
Scala
apache-2.0
3,785
/* * Copyright (c) 2014-2015 by its authors. Some rights reserved. * See the project homepage at: http://www.monifu.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monifu.concurrent.schedulers import java.util.concurrent.{ScheduledExecutorService, TimeUnit} import monifu.concurrent.{Cancelable, UncaughtExceptionReporter} import monifu.concurrent.Scheduler._ import scala.concurrent.ExecutionContext import scala.concurrent.duration._ /** * An `AsyncScheduler` schedules tasks to happen in the future with the * given `ScheduledExecutorService` and the tasks themselves are executed on * the given `ExecutionContext`. */ final class AsyncScheduler private (s: ScheduledExecutorService, ec: ExecutionContext, r: UncaughtExceptionReporter) extends ReferenceScheduler { def scheduleOnce(initialDelay: FiniteDuration, r: Runnable): Cancelable = { scheduleOnce(initialDelay.length, initialDelay.unit, r) } def scheduleOnce(initialDelay: Long, unit: TimeUnit, r: Runnable) = { if (initialDelay <= 0) { execute(r) Cancelable() } else { val task = s.schedule(r, initialDelay, unit) Cancelable(task.cancel(true)) } } override def scheduleWithFixedDelay(initialDelay: FiniteDuration, delay: FiniteDuration, r: Runnable): Cancelable = { val task = s.scheduleWithFixedDelay(r, initialDelay.toMillis, delay.toMillis, TimeUnit.MILLISECONDS) Cancelable(task.cancel(false)) } override def scheduleWithFixedDelay(initialDelay: Long, delay: Long, unit: TimeUnit, r: Runnable): Cancelable = { val task = s.scheduleWithFixedDelay(r, initialDelay, delay, unit) Cancelable(task.cancel(false)) } override def scheduleAtFixedRate(initialDelay: FiniteDuration, period: FiniteDuration, r: Runnable): Cancelable = { val task = s.scheduleAtFixedRate(r, initialDelay.toMillis, period.toMillis, TimeUnit.MILLISECONDS) Cancelable(task.cancel(false)) } override def scheduleAtFixedRate(initialDelay: Long, period: Long, unit: TimeUnit, r: Runnable): Cancelable = { val task = s.scheduleAtFixedRate(r, initialDelay, period, unit) Cancelable(task.cancel(false)) } def execute(runnable: Runnable): Unit = ec.execute(runnable) def reportFailure(t: Throwable): Unit = r.reportFailure(t) val env = Environment(512, Platform.JVM) } object AsyncScheduler { def apply(schedulerService: ScheduledExecutorService, ec: ExecutionContext, reporter: UncaughtExceptionReporter): AsyncScheduler = new AsyncScheduler(schedulerService, ec, reporter) }
virtualirfan/monifu
core/jvm/src/main/scala/monifu/concurrent/schedulers/AsyncScheduler.scala
Scala
apache-2.0
3,086
package com.cloudray.scalapress.plugin.listings import org.scalatest.{OneInstancePerTest, FunSuite} import org.scalatest.mock.MockitoSugar import com.cloudray.scalapress.plugin.ecommerce._ import org.mockito.{ArgumentCaptor, Mockito} import com.cloudray.scalapress.settings.{Installation, InstallationDao} import com.cloudray.scalapress.plugin.ecommerce.domain.Order import com.cloudray.scalapress.plugin.listings.email.ListingCustomerNotificationService import com.cloudray.scalapress.plugin.listings.domain.{ListingsPlugin, ListingPackage} import com.cloudray.scalapress.item.{ItemDao, Item} import com.cloudray.scalapress.payments.{Transaction, TransactionDao} import com.cloudray.scalapress.framework.ScalapressContext /** @author Stephen Samuel */ class ListingCallbackProcessorTest extends FunSuite with OneInstancePerTest with MockitoSugar { val tx = new Transaction tx.id = 12 val listing = new Item listing.id = 96 listing.status = Item.STATUS_DISABLED listing.name = "big horse for sale" listing.listingPackage = new ListingPackage listing.listingPackage.id = 5472 listing.listingPackage.fee = 15 val installation = new Installation val plugin = new ListingsPlugin val callback = new ListingCallbackProcessor callback.context = new ScalapressContext callback.context.transactionDao = mock[TransactionDao] callback.listingsPluginDao = mock[ListingsPluginDao] Mockito.when(callback.listingsPluginDao.get).thenReturn(plugin) callback.context.installationDao = mock[InstallationDao] Mockito.when(callback.context.installationDao.get).thenReturn(installation) callback.context.itemDao = mock[ItemDao] callback.orderDao = mock[OrderDao] callback.listingCustomerNotificationService = mock[ListingCustomerNotificationService] test("invoking with a string looks up the listing by id") { Mockito.when(callback.context.itemDao.find(123456)).thenReturn(listing) callback.callback(tx, "123456") Mockito.verify(callback.context.itemDao).find(123456) } test("given a tx then it is added to order") { callback.callback(Some(tx), listing) val captor = ArgumentCaptor.forClass(classOf[Order]) Mockito.verify(callback.orderDao, Mockito.atLeastOnce).save(captor.capture) assert(captor.getValue.payments.contains(tx)) } test("if a listing package is free then verify no transaction is saved") { listing.listingPackage.fee = 0 callback.callback(Some(tx), listing) Mockito.verify(callback.context.transactionDao, Mockito.never).save(tx) } test("if the callback specifies no tx then verify no transaction is saved") { callback.callback(None, listing) Mockito.verify(callback.context.transactionDao, Mockito.never).save(tx) } test("if listing package is auto publish then change listing status to live") { listing.listingPackage.autoPublish = true callback.callback(None, listing) assert(listing.status == Item.STATUS_LIVE) } test("if listing package is not auto publish then do not change listing status") { listing.listingPackage.autoPublish = false callback.callback(None, listing) assert(listing.status == Item.STATUS_DISABLED) } test("order status is updated to paid") { val order = callback._order(listing) assert(Order.STATUS_PAID === order.status) } test("order uses internal ip") { val order = callback._order(listing) assert("127.0.0.1" === order.ipAddress) } test("order line is added from the listing details") { val order = callback._order(listing) val line = order.sortedLines(0) assert(line.description.contains("#" + listing.id)) assert(listing.listingPackage.fee === line.price) } test("emails are sent using the listing") { callback.callback(Some(tx), listing) Mockito.verify(callback.listingCustomerNotificationService).send(listing) } }
vidyacraghav/scalapress
src/test/scala/com/cloudray/scalapress/plugin/listings/ListingCallbackProcessorTest.scala
Scala
apache-2.0
4,049
/* * Copyright 2001-2013 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalactic private[scalactic] trait EquiSets[T] { thisEquiSets => val equivalence: Equivalence[T] class EquiSet[T] private (underlying: Vector[T]) { def isEmpty: Boolean = underlying.isEmpty def size: Int = underlying.size // def union[E](that: thisEquiSets.EquiSet[T]) = 1 } object EquiSet { def empty: EquiSet[T] = new EquiSet(Vector.empty) def apply(elems: T*): EquiSet[T] = new EquiSet(Vector(elems: _*)) } }
travisbrown/scalatest
src/main/scala/org/scalactic/EquiSets.scala
Scala
apache-2.0
1,066
package net.walend.intro2scala import net.walend.present.{TextLine, CodeBlock, LinkTextLine, SimpleSlide, Style} import net.walend.present.Shortcuts._ /** * * * @author dwalend * @since v0.1.2 */ object Spray { val SprayIntro = SimpleSlide("SprayIntro", t("Spray.io","http://spray.io/introduction/what-is-spray/"), l1("A Library For Web Micro Services and Clients"), l2("Asyncrhonous, Actor-Based, NIO-Based, Fast, Lightweight, Modular, and Testable","http://spray.io/introduction/what-is-spray/"), l2("A Spray HttpService Builds an HttpResponse for an HttpRequest Using a Route"), l2("A Spray Service Might Use All IO or All Cores With a Gentle Degradation"), l2("Timeout 500 Status Without Systemic Failure"), l1("Spray Provides Domain-Specific Languages"), l2("Make HttpRequests in Clients"), l2("Respond to HttpRequests in Micro Servers"), l1("Next Version to be Renamed Akka-Http, Built on Akka Streams","http://typesafe.com/blog/typesafe-gets-sprayed"), l2("Play2's Internals to be Rewritten to Use Akka-Http","http://typesafe.com/blog/typesafe-gets-sprayed") ) val SprayRoute = SimpleSlide("SprayRoute", t("Spray Route"), TextLine("A Spray Route is a Function That Takes a RequestContext",Style.HeadLine), TextLine("A RequestConext is an HttpRequest Plus Odd Bits",Style.SupportLine), TextLine("Route Does Not Return Anything, But Can Send an HttpResponse to an Akka Actor",Style.SupportLine), TextLine("Routes Are Made From Directives",Style.TertiaryLine), CodeBlock("""abstract class Directive[L <: shapeless.HList] { self => | def happly(f: L => Route): Route""".stripMargin), TextLine("A Shapeless HList is ...",Style.TertiaryLine), TextLine("... Try the Route DSL and See What Happens",Style.TertiaryLine) ) val SprayRouteDsl = SimpleSlide("SprayRouteDsl", t("Spray Routing Provides a DSL"), TextLine("(Conceptually) Converts an HttpRequest Into an HttpResponse",Style.SupportLine), CodeBlock("https://datasteward.example.edu/researcher/topics?skip=10&limit=5&sortBy=name&sortDirection=ascending"), CodeBlock(""" lazy val route:Route = | staticResources ~ | logRequestResponse("route",Logging.DebugLevel) { authenticatedUser } | } | | lazy val authenticatedUser:Route = authenticate(UserAuthenticator.basicUserAuthenticator) { user => | pathPrefix("qep"){qepRoute(user)} ~ | pathPrefix("researcher"){researcherRoute(user)} ~ | pathPrefix("steward"){stewardRoute(user)} | } | | def researcherRoute(user:User):Route = authorize(UserAuthenticator.authorizeResearcher(user)) { | path("topics") {getUserTopics(user)} ~ | path("queryHistory") {getUserQueryHistory(Some(user))} ~ | path("requestTopicAccess") {requestTopicAccess(user) } | } | | def getUserTopics(user:User):Route = get { | matchQueryParameters(Some(user)) {queryParameters:QueryParameters => | val researchersTopics = | StewardDatabase.db.selectTopicsForResearcher(queryParameters) | | complete(researchersTopics) | } | } |""".stripMargin) ) val SprayDirective = SimpleSlide("SprayDirective", t("A Spray Directive"), TextLine("Tried the parameters() Directive to Create New QueryParameters",Style.TertiaryLine), CodeBlock(""" def getUserTopics(userId:UserId):Route = get { | parameters(('userName.?,'state.?,'skip.as[Int].?,'limit.as[Int].?,'sortBy.as[String].?,'sortDirection.as[String].?).as[QueryParameters]) { | queryParameters => | val researchersTopics = StewardDatabase.db.selectTopicsForResearcher(queryParameters) | complete(researchersTopics) | } | } |""".stripMargin), TextLine("But It Didn't Quite Fit",Style.TertiaryLine), TextLine("User Name Can Come In Through Authentication (Researchers) or Prefix (Steward)",Style.TertiaryLine), TextLine("TopicState is an Enumeration, Needs Error Checking",Style.TertiaryLine), TextLine("Didn't Want to Cut-Paste That Much Code",Style.TertiaryLine), CodeBlock(""" import shapeless.{:: => shapelessConcat} | case class matchQueryParameters(researcherId:Option[UserId] = None) extends Directive[QueryParameters shapelessConcat HNil] { | import spray.routing.directives.ParameterDirectives._ | import spray.routing.directives.RouteDirectives.complete | import spray.routing.directives.RespondWithDirectives.respondWithStatus | | override def happly(f: (shapelessConcat[QueryParameters, HNil]) => Route): Route = { | parameters('state.?,'skip.as[Int].?,'limit.as[Int].?,'sortBy.as[String].?,'sortDirection.as[String].?) { (stateStringOption,skipOption,limitOption,sortByOption,sortOption) => | | val stateTry = TopicState.stateForStringOption(stateStringOption) | stateTry match { | case Success(stateOption) => { | val qp = QueryParameters(researcherId, | stateOption, | skipOption, | limitOption, | sortByOption, | SortOrder.sortOrderForStringOption(sortOption)) | f(shapelessConcat(qp,HNil)) | } | case Failure(ex) => badStateRoute(stateStringOption) | } | } | } | | def badStateRoute(stateStringOption:Option[String]):Route = { | respondWithStatus(StatusCodes.UnprocessableEntity) { | complete(s"State ${stateStringOption.getOrElse(s"$stateStringOption (stateStringOption should never be None at this point)")} unknown. Please specify one of ${TopicState.namesToStates.keySet}") | } | } | }""".stripMargin) ) val SprayNoDirective = SimpleSlide("SprayNoDirective", t("Better With a Higher Order Function"), CodeBlock(""" def matchQueryParameters(userName: Option[UserName])(parameterRoute:QueryParameters => Route): Route = { | | parameters('state.?,'skip.as[Int].?,'limit.as[Int].?,'sortBy.as[String].?,'sortDirection.as[String].?,'minDate.as[Date].?,'maxDate.as[Date].?) { | (stateStringOption,skipOption,limitOption,sortByOption,sortOption,minDate,maxDate) => | | val stateTry = TopicState.stateForStringOption(stateStringOption) | stateTry match { | case Success(stateOption) => | val qp = QueryParameters(userName, | stateOption, | skipOption, | limitOption, | sortByOption, | SortOrder.sortOrderForStringOption(sortOption), | minDate, | maxDate | ) | | parameterRoute(qp) | | case Failure(ex) => badStateRoute(stateStringOption) | } | } | } |""".stripMargin), CodeBlock(""" def getUserTopics(researcherId:UserName):Route = get { | //lookup topics for this user in the db | matchQueryParameters(Some(researcherId)){queryParameters:QueryParameters => | val researchersTopics = StewardDatabase.db.selectTopicsForResearcher(queryParameters) | complete(researchersTopics) | } | } |""".stripMargin) ) val ToAkkaHttp = SimpleSlide("ToAkkaHttp", t("Transition to Akka-Http"), l1("A Route Really Does Convert an HttpRequestContext Into a Promised HttpResponse."), l1("Akka-Http doesn't use Shapeless in directives anymore") //todo code clip of the new type of directive ) val slides = Seq(SprayIntro,SprayRoute,SprayRouteDsl,SprayDirective,SprayNoDirective,ToAkkaHttp) }
dwalend/ScalaJSPresent
src/main/scala/net/walend/intro2scala/Spray.scala
Scala
apache-2.0
8,986
package org.jetbrains.sbt.project import java.io.File import com.intellij.openapi.util.io.FileUtil import org.jetbrains.sbt.project.structure.{JvmOpts, SbtOpts} import org.junit.Test import org.junit.Assert._ class JvmOptsTest { private val input = """ |# My jvm options |-Xmx2G |-Dhoodlump=bloom """.stripMargin private val expected = Seq( "-Xmx2G", "-Dhoodlump=bloom" ) @Test def testLoad(): Unit = { val optsDir = FileUtil.createTempDirectory("jvmOptsTest","",true) val optsFile = new File(optsDir,".jvmopts") FileUtil.writeToFile(optsFile, input) val opts = JvmOpts.loadFrom(optsDir) assertEquals(expected, opts) } }
JetBrains/intellij-scala
scala/scala-impl/test/org/jetbrains/sbt/project/JvmOptsTest.scala
Scala
apache-2.0
693
package simplez import simplez._ import simplez.std.future._ import simplez.std.option._ import simplez.std.list._ import org.specs2.mutable._ import scala.concurrent._ import scala.concurrent.ExecutionContext.Implicits._ class FunctorSpec extends Specification { val C = Functor[Future].compose[Option] val P = Functor[List].product[Option] "A functor" should { "be composable" in { C.map(Future.successful { Option(2) })(_ * 3) must beSome(6).await C.map(Future.successful { None: Option[Int] })(_ * 3) must beNone.await } """be "productable"""" in { P.map((1 :: 2 :: 3 :: Nil, Some(3)))(_ * 3) must beEqualTo((3 :: 6 :: 9 :: Nil, Some(9))) } } }
inoio/simplez
main/src/test/scala/simplez/FunctorSpec.scala
Scala
bsd-2-clause
695
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy import java.util.concurrent.CountDownLatch import scala.collection.JavaConverters._ import org.apache.spark.{SecurityManager, SparkConf} import org.apache.spark.internal.Logging import org.apache.spark.metrics.MetricsSystem import org.apache.spark.network.TransportContext import org.apache.spark.network.netty.SparkTransportConf import org.apache.spark.network.sasl.SaslServerBootstrap import org.apache.spark.network.server.{TransportServer, TransportServerBootstrap} import org.apache.spark.network.shuffle.ExternalShuffleBlockHandler import org.apache.spark.network.util.TransportConf import org.apache.spark.util.{ShutdownHookManager, Utils} /** * Provides a server from which Executors can read shuffle files (rather than reading directly from * each other), to provide uninterrupted access to the files in the face of executors being turned * off or killed. * * Optionally requires SASL authentication in order to read. See [[SecurityManager]]. */ private[deploy] class ExternalShuffleService(sparkConf: SparkConf, securityManager: SecurityManager) extends Logging { protected val masterMetricsSystem = MetricsSystem.createMetricsSystem("shuffleService", sparkConf, securityManager) private val enabled = sparkConf.getBoolean("spark.shuffle.service.enabled", false) private val port = sparkConf.getInt("spark.shuffle.service.port", 7337) private val useSasl: Boolean = securityManager.isAuthenticationEnabled() private val transportConf = SparkTransportConf.fromSparkConf(sparkConf, "shuffle", numUsableCores = 0) private val blockHandler = newShuffleBlockHandler(transportConf) private val transportContext: TransportContext = new TransportContext(transportConf, blockHandler, true) private var server: TransportServer = _ private val shuffleServiceSource = new ExternalShuffleServiceSource(blockHandler) /** Create a new shuffle block handler. Factored out for subclasses to override. */ protected def newShuffleBlockHandler(conf: TransportConf): ExternalShuffleBlockHandler = { new ExternalShuffleBlockHandler(conf, null) } /** Starts the external shuffle service if the user has configured us to. */ def startIfEnabled() { if (enabled) { start() } } /** Start the external shuffle service */ def start() { require(server == null, "Shuffle server already started") logInfo(s"Starting shuffle service on port $port with useSasl = $useSasl") val bootstraps: Seq[TransportServerBootstrap] = if (useSasl) { Seq(new SaslServerBootstrap(transportConf, securityManager)) } else { Nil } server = transportContext.createServer(port, bootstraps.asJava) masterMetricsSystem.registerSource(shuffleServiceSource) masterMetricsSystem.start() } /** Clean up all shuffle files associated with an application that has exited. */ def applicationRemoved(appId: String): Unit = { blockHandler.applicationRemoved(appId, true /* cleanupLocalDirs */) } def stop() { if (server != null) { server.close() server = null } } } /** * A main class for running the external shuffle service. */ object ExternalShuffleService extends Logging { @volatile private var server: ExternalShuffleService = _ private val barrier = new CountDownLatch(1) def main(args: Array[String]): Unit = { main(args, (conf: SparkConf, sm: SecurityManager) => new ExternalShuffleService(conf, sm)) } /** A helper main method that allows the caller to call this with a custom shuffle service. */ private[spark] def main( args: Array[String], newShuffleService: (SparkConf, SecurityManager) => ExternalShuffleService): Unit = { Utils.initDaemon(log) val sparkConf = new SparkConf Utils.loadDefaultSparkProperties(sparkConf) val securityManager = new SecurityManager(sparkConf) // we override this value since this service is started from the command line // and we assume the user really wants it to be running sparkConf.set("spark.shuffle.service.enabled", "true") server = newShuffleService(sparkConf, securityManager) server.start() logDebug("Adding shutdown hook") // force eager creation of logger ShutdownHookManager.addShutdownHook { () => logInfo("Shutting down shuffle service.") server.stop() barrier.countDown() } // keep running until the process is terminated barrier.await() } }
sh-cho/cshSpark
deploy/ExternalShuffleService.scala
Scala
apache-2.0
5,276
/* * Copyright 2015 Webtrends (http://www.webtrends.com) * * See the LICENCE.txt file distributed with this work for additional * information regarding copyright ownership. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.webtrends.harness.component.zookeeper import akka.actor.{ActorSystem, Identify} import akka.pattern.ask import akka.testkit.TestKit import akka.util.Timeout import com.webtrends.harness.component.zookeeper.config.ZookeeperSettings import com.webtrends.harness.component.zookeeper.discoverable.DiscoverableService import org.apache.curator.test.TestingServer import com.typesafe.config.{Config, ConfigFactory} import org.apache.curator.x.discovery.UriSpec import org.specs2.mutable.SpecificationWithJUnit import org.specs2.time.NoTimeConversions import scala.concurrent.Await import scala.concurrent.duration._ class DiscoverableServiceSpec extends SpecificationWithJUnit with NoTimeConversions { val path = "/discovery/test" val testServiceName = "TestService" val zkServer = new TestingServer() implicit val system = ActorSystem("test", loadConfig) lazy val zkActor = system.actorOf(ZookeeperActor.props(ZookeeperSettings(system.settings.config.getConfig("wookiee-zookeeper")))) implicit val to = Timeout(2 seconds) Await.result(zkActor ? Identify("xyz123"), 2 seconds) lazy val service = DiscoverableService() Thread.sleep(5000) sequential "The discoverable service" should { " make a service discoverable " in { val res = Await.result(service.makeDiscoverable(path, "d3f2248f-2652-4ce5-9caa-5ea2ed28b1a5", testServiceName, None, 2552, new UriSpec(s"akka.tcp://server@[Server]:2552/$testServiceName")), 1000 milliseconds) res must be equalTo true } " get an instance of a discoverable service" in { val res2 = Await.result(service.getInstance(path, testServiceName), 1000 milliseconds) res2.getName must be equalTo testServiceName } } step { TestKit.shutdownActorSystem(system) zkServer.close } def loadConfig: Config = { ConfigFactory.parseString(""" wookiee-zookeeper { quorum = "%s" } """.format(zkServer.getConnectString) ).withFallback(ConfigFactory.load()).resolve } }
mjwallin1/wookiee-zookeeper
src/test/scala/com/webtrends/harness/component/zookeeper/DiscoverableServiceSpec.scala
Scala
apache-2.0
2,796
package com.emotioncity.soriento.testmodels import com.emotioncity.soriento.ODocumentReader import com.emotioncity.soriento.annotations.Embedded import com.orientechnologies.orient.core.record.impl.ODocument import com.emotioncity.soriento.RichODocumentImpl._ /** * Created by stream on 31.03.15. */ case class Home(name: String, @Embedded family: Family)
dimparf/Soriento
src/test/scala/com/emotioncity/soriento/testmodels/Home.scala
Scala
apache-2.0
362
class A { private val x: List[Int] = List(1) def foo = x.head // foo inferred type is this.x.scala$collection$immutable$List$$A } class B extends A { foo }
som-snytt/dotty
tests/pos/i1723.scala
Scala
apache-2.0
163
package com.twitter.scrooge.java_generator import apache_java_thrift.{ConstructorRequiredStruct, DeepValidationStruct, DeepValidationUnion} import com.github.mustachejava.Mustache import com.twitter.scrooge.ast._ import com.twitter.scrooge.frontend.{ResolvedDocument, TypeResolver, _} import com.twitter.scrooge.testutil.Spec import com.twitter.scrooge.testutil.Utils.getFileContents import com.twitter.util.Try import java.util.{EnumSet, List => JList, Map => JMap, Set => JSet} import org.apache.thrift.protocol.{TBinaryProtocol, TField, TProtocolException, TStruct, TType} import org.apache.thrift.transport.TMemoryBuffer import scala.collection.JavaConverters._ import scala.collection.concurrent.TrieMap import thrift.apache_java_test._ class ApacheJavaGeneratorSpec extends Spec { object TestThriftResourceImporter extends Importer { val canonicalPaths: Seq[String] = Nil def lastModified(filename: String): Option[Long] = None def apply(filename: String): Option[FileContents] = Try(getFileContents("test_thrift/" + filename)) .map(data => FileContents(this, data, Some(filename))) .toOption } def generateDoc(str: String): Document = { val importer = TestThriftResourceImporter val parser = new ThriftParser(importer, true) val doc = parser.parse(str, parser.document) TypeResolver()(doc).document } val templateCache = new TrieMap[String, Mustache] def getGenerator(doc0: Document): ApacheJavaGenerator = new ApacheJavaGenerator( ResolvedDocument(doc0, TypeResolver()), "thrift", templateCache ) "Generator" should { System.setProperty("mustache.debug", "true") "populate enum controller" in { val doc = generateDoc(getFileContents("test_thrift/enum.thrift")) val controller = new EnumController(doc.enums(0), Set.empty, getGenerator(doc), doc.namespace("java")) controller.name must be("test") controller.constants(0).last must be(false) controller.constants(1).last must be(true) controller.constants(0).name must be("foo") controller.namespace must be("com.twitter.thrift") } "use an EnumSet for a set of enums" in { val obj = new StructWithEnumSet() obj.getCodes must be(null) obj.setCodes(java.util.EnumSet.of(ReturnCode.Good)) obj.getCodes.isInstanceOf[EnumSet[ReturnCode]] must be(true) obj.getCodes.size must be(1) obj.getCodesWithDefault.isInstanceOf[EnumSet[ReturnCode]] must be(true) obj.getCodesWithDefault.size must be(1) val copy = new StructWithEnumSet(obj) copy.getCodes.isInstanceOf[EnumSet[ReturnCode]] must be(true) copy.getCodes.size must be(1) copy.getCodesWithDefault.isInstanceOf[EnumSet[ReturnCode]] must be(true) copy.getCodesWithDefault.size must be(1) val prot = new TBinaryProtocol(new TMemoryBuffer(64)) obj.write(prot) val decoded = new StructWithEnumSet() decoded.read(prot) decoded.getCodes.isInstanceOf[EnumSet[ReturnCode]] must be(true) } "constructor required fields" should { "not be optional in the contructor with args" in { val struct = new ConstructorRequiredStruct( "test", 3, 4 ) struct.getConstructionRequiredField must be(3) struct.isSetConstructionRequiredField must be(true) } } "validate" should { "throw an exception when missing a field" in { val struct = new ConstructorRequiredStruct( null, 3, 4 ) val exception = intercept[TProtocolException](struct.validate()) val expected = "Required field 'requiredField' was not present! Struct: " + struct.toString exception.getMessage must be(expected) } "not throw when required fields are present" in { val struct = new ConstructorRequiredStruct( "present", 3, 4 ) struct.validate() } } "validateNewInstance" should { val validInstance = new ConstructorRequiredStruct() .setOptionalField(1) .setRequiredField("test") .setConstructionRequiredField(3) .setDefaultRequirednessField(4) val missingRequiredFieldInstance = new ConstructorRequiredStruct() .setOptionalField(1) .setConstructionRequiredField(3) .setDefaultRequirednessField(4) val missingConstructionRequiredFieldInstance = new ConstructorRequiredStruct() .setOptionalField(1) .setRequiredField("test") .setDefaultRequirednessField(4) val constructionRequiredErrorMessage = "Construction required field 'constructionRequiredField' in type 'ConstructorRequiredStruct' was not present." val requiredErrorMessage = "Required field 'requiredField' in type 'ConstructorRequiredStruct' was not present." def validateMissingConstructionRequiredField( struct: DeepValidationStruct, number: Int = 1 ): Unit = { val result = DeepValidationStruct.validateNewInstance(struct).asScala result must have size number result.foreach { errorMessage => errorMessage must be(constructionRequiredErrorMessage) } } def buildDeepValidationStruct( listField: JList[ConstructorRequiredStruct] = Seq(validInstance).asJava, setField: JSet[ConstructorRequiredStruct] = Set(validInstance).asJava, requiredField: ConstructorRequiredStruct = validInstance, inMapKey: JMap[ConstructorRequiredStruct, String] = Map(validInstance -> "value").asJava, inMapValue: JMap[String, ConstructorRequiredStruct] = Map("value" -> validInstance).asJava, crazyEmbedding: JMap[JSet[JList[ConstructorRequiredStruct]], JSet[ JList[ConstructorRequiredStruct] ]] = Map(Set(Seq(validInstance).asJava).asJava -> Set(Seq(validInstance).asJava).asJava).asJava, optionalField: ConstructorRequiredStruct = validInstance ): DeepValidationStruct = { new DeepValidationStruct( listField, setField, requiredField, inMapKey, inMapValue, crazyEmbedding ).setOptionalConstructorRequiredStruct(optionalField) } "return an empty list when required fields are present" in { val result = ConstructorRequiredStruct.validateNewInstance(validInstance).asScala result must be(List.empty) } "return a MissingRequiredField when missing a required field" in { val result = ConstructorRequiredStruct.validateNewInstance(missingRequiredFieldInstance).asScala result must be(List(requiredErrorMessage)) } "return a MissingConstructionRequiredField when missing a required field" in { val result = ConstructorRequiredStruct .validateNewInstance(missingConstructionRequiredFieldInstance).asScala result must be(List(constructionRequiredErrorMessage)) } "return an empty list when DeepValidationStruct is completely valid" in { val struct = buildDeepValidationStruct() val result = DeepValidationStruct.validateNewInstance(struct).asScala result must be(List.empty) } "return a MissingConstructionRequiredField when DeepValidationStruct has invalid requiredConstructorRequiredStruct" in { val struct = buildDeepValidationStruct(requiredField = missingConstructionRequiredFieldInstance) validateMissingConstructionRequiredField(struct) } "return a MissingConstructionRequiredField when DeepValidationStruct has invalid optionalConstructorRequiredStruct" in { val struct = buildDeepValidationStruct( optionalField = missingConstructionRequiredFieldInstance ) validateMissingConstructionRequiredField(struct) } "return a MissingConstructionRequiredField when DeepValidationStruct has invalid inList" in { val struct = buildDeepValidationStruct( listField = List(missingConstructionRequiredFieldInstance).asJava ) validateMissingConstructionRequiredField(struct) } "return a MissingConstructionRequiredField when DeepValidationStruct has invalid inSet" in { val struct = buildDeepValidationStruct( setField = Set(missingConstructionRequiredFieldInstance).asJava ) validateMissingConstructionRequiredField(struct) } "return a MissingConstructionRequiredField when DeepValidationStruct has invalid inMapKey" in { val struct = buildDeepValidationStruct( inMapKey = Map(missingConstructionRequiredFieldInstance -> "value").asJava ) validateMissingConstructionRequiredField(struct) } "return a MissingConstructionRequiredField when DeepValidationStruct has invalid inMapValue" in { val struct = buildDeepValidationStruct( inMapValue = Map("key" -> missingConstructionRequiredFieldInstance).asJava ) validateMissingConstructionRequiredField(struct) } "return a MissingConstructionRequiredField when DeepValidationStruct has invalid crazyEmbedding" in { val struct = buildDeepValidationStruct( crazyEmbedding = Map( Set(Seq(missingConstructionRequiredFieldInstance).asJava).asJava -> Set(Seq(missingConstructionRequiredFieldInstance).asJava).asJava ).asJava ) validateMissingConstructionRequiredField(struct, 2) } "return an empty list when DeepValidationUnion has valid constructorRequiredStruct" in { val struct = new DeepValidationUnion() struct.setConstructorRequiredStruct(validInstance) val result = DeepValidationUnion.validateNewInstance(struct).asScala result must be(List.empty) } "return a MissingConstructionRequiredField when DeepValidationUnion has invalid constructorRequiredStruct" in { val struct = new DeepValidationUnion() struct.setConstructorRequiredStruct(missingConstructionRequiredFieldInstance) val result = DeepValidationUnion.validateNewInstance(struct).asScala result must be(List(constructionRequiredErrorMessage)) } "return an empty list when DeepValidationUnion has an unrelated field" in { val struct = new DeepValidationUnion() struct.setOtherField(1L) val result = DeepValidationUnion.validateNewInstance(struct).asScala result must be(List.empty) } "return an error when DeepValidationUnion has no field set" in { val struct = new DeepValidationUnion() val result = DeepValidationUnion.validateNewInstance(struct).asScala result must be(List("No fields set for union type 'DeepValidationUnion'.")) } } "passthrough fields" should { "passthroughStruct" in { val pt2 = new PassThrough2(1, new PassThroughStruct(), new PassThroughStruct()) val pt1 = { val protocol = new TBinaryProtocol(new TMemoryBuffer(256)) pt2.write(protocol) val temp = new PassThrough() temp.read(protocol) temp } val pt2roundTripped = { val protocol = new TBinaryProtocol(new TMemoryBuffer(256)) pt1.write(protocol) val temp = new PassThrough2() temp.read(protocol) temp } pt2roundTripped must be(pt2) } "passthroughEnum" in { val pt8 = new PassThrough8(TwoEnum.Two) val pt7 = { val protocol = new TBinaryProtocol(new TMemoryBuffer(256)) pt8.write(protocol) val temp = new PassThrough7() temp.read(protocol) temp } val pt8roundTripped = { val protocol = new TBinaryProtocol(new TMemoryBuffer(256)) pt7.write(protocol) val temp = new PassThrough8() temp.read(protocol) temp } pt8roundTripped must be(pt8) } "passthroughUnion" in { val pthu3 = { val temp = new PassThroughUnion3() temp.setF2(new PassThrough2(1, new PassThroughStruct(), new PassThroughStruct())) temp } val pthu2 = { val protocol = new TBinaryProtocol(new TMemoryBuffer(256)) pthu3.write(protocol) val temp = new PassThroughUnion2() temp.read(protocol) temp } val pthu3roundTripped = { val protocol = new TBinaryProtocol(new TMemoryBuffer(256)) pthu2.write(protocol) val temp = new PassThroughUnion3() temp.read(protocol) temp } pthu3roundTripped must be(pthu3) } } "generate correctly escaped field annotation" in { val doc = generateDoc(getFileContents("test_thrift/annotations.thrift")) val gen = getGenerator(doc) val ctrl = new StructController(doc.structs(0), Set(), false, gen, doc.namespace("java")) gen.renderMustache("struct.mustache", ctrl).trim must include( "tmpFieldMap.put(\\"tag1\\", \\"\\\\\\"value\\\\\\"\\")") } "generate enum with fully-qualified class name" in { val doc = generateDoc(getFileContents("test_thrift/struct_field_with_same_name_and_type.thrift")) val gen = getGenerator(doc) val ctrl = new StructController(doc.structs(0), Set(), false, gen, doc.namespace("java")) gen.renderMustache("struct.mustache", ctrl).trim must include( "test_enum.OrganizationType.findByValue(iprot.readI32())") } } "Structs" should { "throw an error when the read field type is incorrect" in { val prot = new TBinaryProtocol(new TMemoryBuffer(10000)) prot.writeStructBegin(new TStruct("ConstructorRequiredStruct")) prot.writeFieldBegin(new TField("requiredField", TType.I64, 2)) prot.writeI64(4000) prot.writeFieldEnd() prot.writeFieldStop() prot.writeStructEnd() val ex = intercept[TProtocolException] { val readStruct = new ConstructorRequiredStruct() readStruct.read(prot) } ex.toString.contains("requiredField") must be(true) ex.toString.contains("actual=I64") must be(true) ex.toString.contains("expected=STRING") must be(true) } } }
twitter/scrooge
scrooge-generator-tests/src/test/scala/com/twitter/scrooge/java_generator/ApacheJavaGeneratorSpec.scala
Scala
apache-2.0
14,408
package example import org.scalatest.FunSuite import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner /** * This class implements a ScalaTest test suite for the methods in object * `Lists` that need to be implemented as part of this assignment. A test * suite is simply a collection of individual tests for some specific * component of a program. * * A test suite is created by defining a class which extends the type * `org.scalatest.FunSuite`. When running ScalaTest, it will automatically * find this class and execute all of its tests. * * Adding the `@RunWith` annotation enables the test suite to be executed * inside eclipse using the built-in JUnit test runner. * * You have two options for running this test suite: * * - Start the sbt console and run the "test" command * - Right-click this file in eclipse and chose "Run As" - "JUnit Test" */ @RunWith(classOf[JUnitRunner]) class ListsSuite extends FunSuite { /** * Tests are written using the `test` operator which takes two arguments: * * - A description of the test. This description has to be unique, no two * tests can have the same description. * - The test body, a piece of Scala code that implements the test * * The most common way to implement a test body is using the method `assert` * which tests that its argument evaluates to `true`. So one of the simplest * successful tests is the following: */ test("one plus one is two")(assert(1 + 1 == 2)) /** * In Scala, it is allowed to pass an argument to a method using the block * syntax, i.e. `{ argument }` instead of parentheses `(argument)`. * * This allows tests to be written in a more readable manner: */ test("one plus one is three?") { assert(1 + 1 == 2) // This assertion fails! Go ahead and fix it. } /** * One problem with the previous (failing) test is that ScalaTest will * only tell you that a test failed, but it will not tell you what was * the reason for the failure. The output looks like this: * * {{{ * [info] - one plus one is three? *** FAILED *** * }}} * * This situation can be improved by using a special equality operator * `===` instead of `==` (this is only possible in ScalaTest). So if you * run the next test, ScalaTest will show the following output: * * {{{ * [info] - details why one plus one is not three *** FAILED *** * [info] 2 did not equal 3 (ListsSuite.scala:67) * }}} * * We recommend to always use the `===` equality operator when writing tests. */ test("details why one plus one is not three") { assert(1 + 1 !== 3) // Fix me, please! } /** * In order to test the exceptional behavior of a methods, ScalaTest offers * the `intercept` operation. * * In the following example, we test the fact that the method `intNotZero` * throws an `IllegalArgumentException` if its argument is `0`. */ test("intNotZero throws an exception if its argument is 0") { intercept[IllegalArgumentException] { intNotZero(0) } } def intNotZero(x: Int): Int = { if (x == 0) throw new IllegalArgumentException("zero is not allowed") else x } /** * Now we finally write some tests for the list functions that have to be * implemented for this assignment. We fist import all members of the * `List` object. */ import Lists._ /** * We only provide two very basic tests for you. Write more tests to make * sure your `sum` and `max` methods work as expected. * * In particular, write tests for corner cases: negative numbers, zeros, * empty lists, lists with repeated elements, etc. * * It is allowed to have multiple `assert` statements inside one test, * however it is recommended to write an individual `test` statement for * every tested aspect of a method. */ test("sum of a few numbers") { assert(sum(List(1,2,0)) === 3) } test("sum of a list with a single number") { assert(sum(List(5)) === 5) } test("sum of a empty list is zero") { assert(sum(List.empty) === 0) } test("max of a few numbers") { assert(max(List(3, 7, 2)) === 7) } test("max of a single number is itself") { assert(max(List(7)) === 7) } test("max of a empty list throws NoSuchElementException") { intercept[NoSuchElementException] { max(List.empty) } } }
mateusduboli/coursera-progfun
example/src/test/scala/example/ListsSuite.scala
Scala
unlicense
4,414
package edu.cmu.cs.oak.nodes object UndefNode extends DNode { override def toXml = <Undef /> override def ifdefy() = List("") override def toString() = "" override def isEmpty() = true override def getChildren(): Seq[DNode] = null }
smba/oak
edu.cmu.cs.oak/src/main/scala/edu/cmu/cs/oak/nodes/UndefNode.scala
Scala
lgpl-3.0
246
package models import reactivemongo.bson.BSONObjectID import play.api.libs.json.{JsObject, Json} import play.modules.reactivemongo.json.BSONFormats case class Group( _id: Option[BSONObjectID] = None, responded: Option[Boolean] = None, invitees: Seq[JsObject] = Seq.empty, comments: Option[String] = None ) object Group { private implicit val objectIdFormat = BSONFormats.BSONObjectIDFormat implicit val format = Json.format[Group] }
michaelahlers/team-awesome-wedding
app/models/Group.scala
Scala
mit
449
package chat.tox.antox.av import java.util.concurrent.TimeUnit import chat.tox.antox.av.CallEndReason.CallEndReason import chat.tox.antox.av.CameraFacing.CameraFacing import chat.tox.antox.tox.ToxSingleton import chat.tox.antox.utils.AntoxLog import chat.tox.antox.wrapper.{CallNumber, ContactKey} import im.tox.tox4j.av.data._ import im.tox.tox4j.av.enums.{ToxavCallControl, ToxavFriendCallState} import im.tox.tox4j.exceptions.ToxException import org.apache.commons.collections4.queue.CircularFifoQueue import rx.lang.scala.JavaConversions._ import rx.lang.scala.schedulers.NewThreadScheduler import rx.lang.scala.subjects.BehaviorSubject import rx.lang.scala.{Observable, Subject} import scala.collection.mutable.ArrayBuffer import scala.concurrent.duration.Duration import scala.util.Try final case class Call(callNumber: CallNumber, contactKey: ContactKey, incoming: Boolean) { private val friendStateSubject = BehaviorSubject[Set[ToxavFriendCallState]](Set.empty[ToxavFriendCallState]) private def friendState: Set[ToxavFriendCallState] = friendStateSubject.getValue // only describes self state private val selfStateSubject = BehaviorSubject[SelfCallState](SelfCallState.DEFAULT) def selfStateObservable: Observable[SelfCallState] = selfStateSubject.asJavaObservable def selfState = selfStateSubject.getValue // is video enabled in any way val videoEnabledObservable = selfStateObservable.map(state => state.sendingVideo || state.receivingVideo) private val endedSubject = Subject[CallEndReason]() // called only once, when the call ends with the reason it ended def endedObservable: Observable[CallEndReason] = endedSubject.asJavaObservable //only for outgoing audio private val samplingRate = SamplingRate.Rate48k //in Hz private val audioLength = AudioLength.Length20 //in milliseconds private val channels = AudioChannels.Stereo val audioBufferLength = 3 // in frames val videoBufferLength = 3 // in frames val defaultRingTime = Duration(30, TimeUnit.SECONDS) // ringing by default (call should only be created if it is ringing) private val ringingSubject = BehaviorSubject[Boolean](true) def ringingObservable: Observable[Boolean] = ringingSubject.asJavaObservable def ringing = ringingSubject.getValue var started = false var startTime: Duration = Duration(0, TimeUnit.MILLISECONDS) def duration: Duration = Duration(System.currentTimeMillis(), TimeUnit.MILLISECONDS) - startTime //in milliseconds val enhancements: ArrayBuffer[CallEnhancement] = new ArrayBuffer() /** * Describes a state in which the call is not FINISHED or ERROR. * When the call is on hold or ringing (not yet answered) this will return true. */ def active: Boolean = isActive(friendState) && !selfState.ended private def isActive(state: Set[ToxavFriendCallState]): Boolean = { !state.contains(ToxavFriendCallState.FINISHED) && !state.contains(ToxavFriendCallState.ERROR) } def onHold: Boolean = friendState.isEmpty val audioCapture: AudioCapture = new AudioCapture(samplingRate.value, channels.value) val audioPlayer = new AudioPlayer(samplingRate.value, channels.value, audioBufferLength) private val videoFrameSubject = Subject[StridedYuvFrame]() def videoFrameObservable: Observable[StridedYuvFrame] = videoFrameSubject.onBackpressureDrop(_ => AntoxLog.debug("Dropped a video frame due to back-pressure.")) var cameraFrameBuffer: Option[CircularFifoQueue[NV21Frame]] = None // default value, not checked based on device capabilities private val cameraFacingSubject = BehaviorSubject[CameraFacing](CameraFacing.Front) def cameraFacingObservable: Observable[CameraFacing] = cameraFacingSubject.asJavaObservable private def frameSize = SampleCount(audioLength, samplingRate) private def logCallEvent(event: String): Unit = AntoxLog.debug(s"Call $callNumber belonging to $contactKey $event") // make sure the call ends eventually if it's still ringing endAfterTime(defaultRingTime) def startCall(sendingAudio: Boolean, sendingVideo: Boolean): Unit = { logCallEvent(s"started sending audio:$sendingAudio and video:$sendingVideo") ToxSingleton.toxAv.call( callNumber, if (sendingAudio) selfState.audioBitRate else BitRate.Disabled, if (sendingVideo) selfState.videoBitRate else BitRate.Disabled) selfStateSubject.onNext(selfState.copy(audioMuted = !sendingAudio, videoHidden = !sendingVideo)) } def answerCall(): Unit = { val sendingAudio: Boolean = true //always send audio val sendingVideo: Boolean = selfState.receivingVideo // only send video if we're receiving it logCallEvent(s"answered sending audio:$sendingAudio and video:$sendingVideo") ToxSingleton.toxAv.answer(callNumber, selfState.audioBitRate, selfState.videoBitRate) showFriendVideo() selfStateSubject.onNext(selfState.copy(audioMuted = !sendingAudio, videoHidden = !sendingVideo)) startCall() ringingSubject.onNext(false) } def onIncoming(audioEnabled: Boolean, videoEnabled: Boolean): Unit = { logCallEvent(s"incoming receiving audio:$audioEnabled and video:$videoEnabled") selfStateSubject.onNext(selfState.copy(receivingAudio = audioEnabled, receivingVideo = videoEnabled)) } //end the call after `ringTime` private def endAfterTime(ringTime: Duration): Unit = { Observable .timer(defaultRingTime) .subscribeOn(NewThreadScheduler()) .foreach(_ => { if (active && ringing) { val reason = if (incoming) { // call was missed CallEndReason.Missed } else { // call was unanswered CallEndReason.Unanswered } end(reason) } }) } def updateFriendState(state: Set[ToxavFriendCallState]): Unit = { logCallEvent(s"friend call state updated to $state") val answered: Boolean = friendState.isEmpty && isActive(state) && !incoming val ended: Boolean = !isActive(state) val newSelfState = selfState.copy( receivingAudio = state.contains(ToxavFriendCallState.SENDING_A), receivingVideo = state.contains(ToxavFriendCallState.SENDING_V) ) if (answered && !started) { startCall() ringingSubject.onNext(false) } else if (ended) { end() } else { if (newSelfState != selfState) selfStateSubject.onNext(newSelfState) } friendStateSubject.onNext(friendState) } private def startCall(): Unit = { assert(!started) started = true startTime = Duration(System.currentTimeMillis(), TimeUnit.MILLISECONDS) logCallEvent(event = s"started at time $startTime") new Thread(new Runnable { override def run(): Unit = { android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO) logCallEvent("audio send thread started") audioCapture.start() while (active) { val start = System.currentTimeMillis() if (selfState.sendingAudio) { try { ToxSingleton.toxAv.audioSendFrame(callNumber, audioCapture.readAudio(frameSize.value, channels.value), frameSize, channels, samplingRate) } catch { case e: ToxException[_] => AntoxLog.debug("Ignoring audio send frame exception.") } } val timeTaken = System.currentTimeMillis() - start if (timeTaken < audioLength.value.toMillis) Thread.sleep(audioLength.value.toMillis - timeTaken) } logCallEvent("audio send thread stopped") } }, "AudioSendThread").start() new Thread(new Runnable { override def run(): Unit = { logCallEvent("video send thread started ") while (active) { if (cameraFrameBuffer.isEmpty) Thread.sleep(100) val maybeCameraFrame = cameraFrameBuffer.flatMap(buffer => Option(buffer.poll())) maybeCameraFrame.foreach(cameraFrame => { if (active && selfState.sendingVideo && !ringing) { val startSendTime = System.currentTimeMillis() val yuvFrame = FormatConversions.nv21toYuv420(cameraFrame) //To log the delta time taken for encoding and sending in native libtoxcore val yuvConvertedTime = System.currentTimeMillis() try { ToxSingleton.toxAv.videoSendFrame(callNumber, yuvFrame.width, yuvFrame.height, yuvFrame.y, yuvFrame.u, yuvFrame.v) } catch { case e: ToxException[_] => AntoxLog.debug("Ignoring video send frame exception.") } println(s"sending frame took ${System.currentTimeMillis() - startSendTime}, libtoxcore took ${System.currentTimeMillis() - yuvConvertedTime}") } }) } } }, "VideoSendThread").start() audioPlayer.start() } def onAudioFrame(pcm: Array[Short], channels: AudioChannels, samplingRate: SamplingRate): Unit = { audioPlayer.bufferAudioFrame(pcm, channels.value, samplingRate.value) } def onVideoFrame(videoFrame: StridedYuvFrame): Unit = { videoFrameSubject.onNext(videoFrame) } def muteSelfAudio(): Unit = { selfStateSubject.onNext(selfState.copy(audioMuted = true)) ToxSingleton.toxAv.setAudioBitRate(callNumber, BitRate.Disabled) audioCapture.stop() } def unmuteSelfAudio(): Unit = { selfStateSubject.onNext(selfState.copy(audioMuted = false)) ToxSingleton.toxAv.setAudioBitRate(callNumber, selfState.audioBitRate) audioCapture.start() } def enableLoudspeaker(): Unit = { selfStateSubject.onNext(selfState.copy(loudspeakerEnabled = true)) } def disableLoudspeaker(): Unit = { selfStateSubject.onNext(selfState.copy(loudspeakerEnabled = false)) } def hideSelfVideo(): Unit = { selfStateSubject.onNext(selfState.copy(videoHidden = true)) } def showSelfVideo(): Unit = { selfStateSubject.onNext(selfState.copy(videoHidden = false)) } def muteFriendAudio(): Unit = { ToxSingleton.toxAv.callControl(callNumber, ToxavCallControl.MUTE_AUDIO) } def unmuteFriendAudio(): Unit = { ToxSingleton.toxAv.callControl(callNumber, ToxavCallControl.UNMUTE_AUDIO) } def hideFriendVideo(): Unit = { ToxSingleton.toxAv.callControl(callNumber, ToxavCallControl.HIDE_VIDEO) } def showFriendVideo(): Unit = { ToxSingleton.toxAv.callControl(callNumber, ToxavCallControl.SHOW_VIDEO) } def rotateCamera(): Unit = cameraFacingSubject.onNext(CameraFacing.swap(cameraFacingSubject.getValue)) def end(reason: CallEndReason = CallEndReason.Normal): Unit = { logCallEvent(s"ended reason:$reason") // only send a call control if the call wasn't ended unexpectedly if (reason != CallEndReason.Error) { Try(ToxSingleton.toxAv.callControl(callNumber, ToxavCallControl.CANCEL)) } selfStateSubject.onNext(selfState.copy(ended = true)) endedSubject.onNext(reason) endedSubject.onCompleted() onCallEnded() } private def onCallEnded(): Unit = { audioCapture.stop() cleanUp() } private def cleanUp(): Unit = { audioPlayer.cleanUp() audioCapture.cleanUp() } }
subliun/Antox
app/src/main/scala/chat/tox/antox/av/Call.scala
Scala
gpl-3.0
11,271
/* * * o o o o o * | o | |\\ /| | / * | o-o o--o o-o oo | | O | oo o-o OO o-o o o * | | | | | | | | | | | | | | | | \\ | | \\ / * O---oo-o o--O | o-o o-o-o o o o-o-o o o o-o o * | * o--o * o--o o o--o o o * | | | | o | | * O-Oo oo o-o o-O o-o o-O-o O-o o-o | o-O o-o * | \\ | | | | | | | | | | | | | |-' | | | \\ * o o o-o-o o o-o o-o o o o o | o-o o o-o o-o * * Logical Markov Random Fields (LoMRF). * * */ package lomrf.mln.inference import lomrf.logic.TRUE import lomrf.mln.model.{ AtomIdentityFunctionOps, EvidenceDB, MLN } import lomrf.mln.model.mrf._ import lomrf.util.time._ import lomrf.util.logging.Implicits._ import AtomIdentityFunctionOps._ import lomrf.logic.AtomSignatureOps._ import gnu.trove.map.hash.TIntObjectHashMap import optimus.algebra._ import optimus.optimization._ import optimus.algebra.AlgebraOps._ import scala.language.postfixOps import lomrf.util.collection.trove.TroveConversions._ import lomrf.mln.inference.RoundingScheme.RoundUp import optimus.optimization.enums.SolverLib.LpSolve import optimus.optimization.enums.{ PreSolve, SolverLib } import optimus.optimization.model.{ MPFloatVar, ModelSpec } import spire.syntax.cfor._ /** * This is an implementation of an approximate MAP inference algorithm for MLNs using Integer Linear Programming. * The original implementation of the algorithm can be found in: [[http://alchemy.cs.washington.edu/code/]]. * Details about the ILP algorithm can be found in: Tuyen N. Huynh and Raymond J. Mooney. Max-Margin Weight Learning for * Markov Logic Networks. In Proceedings of the European Conference on Machine Learning and Principles and Practice of * Knowledge Discovery in Databases (ECML-PKDD 2011), Vol. 2, pp. 81-96, 2011. * * @param mrf The ground Markov network * @param annotationDB Annotation database holding the ground truth values for non evidence * atoms. Required when performing loss augmented inference. * @param ilpRounding Rounding algorithm selection option (default is RoundUp) * @param ilpSolver Solver type selection option (default is LPSolve) */ final case class ILP( mrf: MRF, ilpSolver: SolverLib = LpSolve, ilpRounding: RoundingScheme = RoundUp, annotationDB: Option[EvidenceDB] = None) extends ModelSpec(ilpSolver) with MAPSolver { implicit val mln: MLN = mrf.mln def infer: MRFState = { val sTranslation = System.currentTimeMillis() /* Hash maps containing pairs of unique literal keys to LP variables [y] * and unique clause ids to LP variables [z]. */ val literalLPVars = new TIntObjectHashMap[MPFloatVar]() val clauseLPVars = new TIntObjectHashMap[MPFloatVar]() /** * A collection of expressions of the equation that we aim to maximize. * Each expression has the following form: * * {{{ weight * LP variable}}} */ var expressions = List[Expression]() val atomsIterator = mrf.atoms.iterator() // Step 1: Introduce variables for each ground atom while (atomsIterator.hasNext) { atomsIterator.advance() val atomID = math.abs(atomsIterator.key()) literalLPVars.put(atomID, MPFloatVar("y" + atomID, 0, 1)) /* In case of loss augmented inference, Hamming distance is used which * is equivalent to adding 1 to the coefficient of ground atom y if the * true (annotated) value of y is FALSE and subtracting 1 from the * coefficient of y if the true value of y is TRUE. */ if (annotationDB.isDefined) { logger.info("Running loss augmented inference...") val annotation = annotationDB.get(atomID.signature(mrf.mln)) val loss = if (annotation(atomID) == TRUE) -1.0 else 1.0 expressions ::= loss * literalLPVars.get(atomID) } } val constraintsIterator = mrf.constraints.iterator() while (constraintsIterator.hasNext) { constraintsIterator.advance() var constraints: List[Expression] = Nil // fetch the current constraint, i.e., current weighted ground clause or clique val constraint = constraintsIterator.value() logger.whenDebugEnabled { val decodedConstraint = constraint.decodeFeature(mrf.weightHard).getOrElse(logger.fatal(s"Cannot decode constraint $constraint")) logger.debug(s"Ground Clause: ${constraint.getWeight} $decodedConstraint") } // Step 1: Introduce variables for each ground atom and create possible constraints for (literal <- constraint.literals) { val atomID = math.abs(literal) val floatVar = literalLPVars.get(atomID) if ((constraint.getWeight > 0 || constraint.getWeight.isInfinite || constraint.getWeight.isNaN || constraint.getWeight == mrf.weightHard) && literal > 0) constraints ::= floatVar else if ((constraint.getWeight > 0 || constraint.getWeight.isInfinite || constraint.getWeight.isNaN || constraint.getWeight == mrf.weightHard) && literal < 0) constraints ::= (1 - floatVar) else if (constraint.getWeight < 0 && literal < 0) constraints ::= floatVar else constraints ::= (1 - floatVar) } logger.debug("Possible Constraints: [" + constraints.mkString(", ") + "]") val cid = constraint.id // Step 2: Create expressions for objective function (only for soft constraints) if (!constraint.getWeight.isInfinite && !constraint.getWeight.isNaN && constraint.getWeight != mrf.weightHard && constraint.getWeight != 0.0) { if (constraint.isUnit) { expressions ::= { if (constraint.literals(0) > 0) constraint.getWeight * literalLPVars.get(math.abs(constraint.literals(0))) else (-constraint.getWeight) * literalLPVars.get(math.abs(constraint.literals(0))) } } else { // there is no case where the same clause is going to create another z variable, so use put not putIfAbsent clauseLPVars.put(cid, MPFloatVar("z" + cid, 0, 1)) expressions ::= math.abs(constraint.getWeight) * clauseLPVars.get(cid) } } logger.debug("Expressions: [" + expressions.mkString(", ") + "]") // Step 3: Add constraints to the solver (don't introduce constraint for zero weighted constraints) if (constraint.isHardConstraint) { add(sum(constraints) >:= 1) logger.debug(constraints.mkString(" + ") + " >= 1") } else if (!constraint.isUnit && constraint.getWeight != 0.0) { val clauseVar = clauseLPVars.get(cid) if (constraint.getWeight > 0) { add(sum(constraints) >:= clauseVar) logger.debug(constraints.mkString(" + ") + " >= " + clauseVar.symbol) } else { for (c <- constraints) { add(c >:= clauseVar) logger.debug(c + " >= " + clauseVar.symbol) } } } } val eTranslation = System.currentTimeMillis() logger.info(msecTimeToText("Translation time: ", eTranslation - sTranslation)) logger.info( "\\nGround Atoms: " + mrf.numberOfAtoms + "\\nAtom Variables: " + literalLPVars.size + " + Clauses Variables: " + clauseLPVars.size + " = " + (literalLPVars.size + clauseLPVars.size)) val sSolver = System.currentTimeMillis() // Step 4: Optimize function subject to the constraints introduced maximize(sum(expressions)) start(PreSolve.CONSERVATIVE) release() val eSolver = System.currentTimeMillis() logger.info(msecTimeToText("Solver time: ", eSolver - sSolver)) logger.info( "\\n=========================== Solution ===========================" + "\\nAre constraints satisfied: " + checkConstraints() + "\\nSolution status: " + status.toString + "\\nObjective = " + objectiveValue) logger.whenDebugEnabled { literalLPVars.iterator.foreach { case (_: Int, v: MPFloatVar) => logger.debug(v.symbol + " = " + v.value.getOrElse("Value does not exist for this ground atom variable!")) } clauseLPVars.iterator.foreach { case (_: Int, v: MPFloatVar) => logger.debug(v.symbol + " = " + v.value.getOrElse("Value does not exist for this constraint variable")) } } // Create MRF state and assume every constraint to be unsatisfied val state = MRFState(mrf) state.unfixAll() // Search for fractional solutions and fix atom values of non fractional solutions var nonIntegralSolutionsCounter = 0 var fractionalSolutions = Vector[Int]() for ((id, lpVar) <- literalLPVars.iterator()) { val value = lpVar.value.getOrElse(logger.fatal(s"There is no solution for variable '${lpVar.symbol}'")) /* * Round values very close to 0 and 1 in using this naive approach because they * probably arise from rounding error of the solver. */ val normalisedValue = if (value > 0.99) 1.0 else value if (normalisedValue != 0.0 && normalisedValue != 1.0) { nonIntegralSolutionsCounter += 1 fractionalSolutions +:= id } else { val currentAtom = mrf.atoms.get(id) currentAtom.fixedValue = if (normalisedValue == 0.0) -1 else 1 currentAtom.state = if (normalisedValue == 0.0) false else true state.refineState(id) } } logger.info("Number of non-integral solutions: " + nonIntegralSolutionsCounter) assert(state.countUnfixAtoms() == nonIntegralSolutionsCounter, "Variables introduced are less than actual ground atoms!") val sRoundUp = System.currentTimeMillis() if (nonIntegralSolutionsCounter > 0) { /* * RoundUp algorithm: * * Used for rounding non integral solutions produced by an LP relaxed * solution. It can have different results from original alchemy implementation * for several key reasons. * * 1. The solver return a solution before rounding takes place if there are more * than one global optimus points in the objective function. In this case this * points should yield equivalent solution in terms of quality * * 2. Loss of significance in alchemy during subtraction of doubles and long doubles * (which have different precision) results in the phenomenon of catastrophic cancelation * effect. Therefore delta can be significantly larger than zero. * * Note: Better to keep delta >= 0 for true values and < for false. */ if (ilpRounding == RoundUp) { cfor(fractionalSolutions.size - 1)(_ >= 0, _ - 1){ i: Int => val id = fractionalSolutions(i) val currentAtom = mrf.atoms.get(id) if (state.computeDelta(id) >= 0) { currentAtom.fixedValue = 1 currentAtom.state = true } else { currentAtom.fixedValue = -1 currentAtom.state = false } state.refineState(id) } } // MaxWalkSAT algorithm else MaxWalkSAT(mrf).infer(state) } logger.debug("Unfixed atoms: " + state.countUnfixAtoms()) val eRoundUp = System.currentTimeMillis() logger.info(msecTimeToText("Roundup time: ", eRoundUp - sRoundUp)) state.printStatistics() logger.info(msecTimeToText( "Total ILP time: ", (eTranslation - sTranslation) + (eSolver - sSolver) + (eRoundUp - sRoundUp))) state } }
anskarl/LoMRF
src/main/scala/lomrf/mln/inference/ILP.scala
Scala
apache-2.0
11,640
/* * Copyright (c) 2015 Stojan Dimitrovski * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package me.stojan.siddhartha.test import org.scalatest.{FlatSpec, Matchers} abstract class UnitSpec extends FlatSpec with Matchers
hf/siddhartha
src/test/scala/me/stojan/siddhartha/test/UnitSpec.scala
Scala
mit
1,256
package org.squeryl.framework import org.squeryl.{AbstractSession, Session} trait DBConnector { def sessionCreator() : Option[() => AbstractSession] lazy val config = { new FileConfigReader("org.squeryl.tests.cfg") } } class FileConfigReader(fileName: String) { val file = new java.io.File(fileName) if(!file.exists) throw new Exception("No config file at: " + file.getAbsolutePath) val fis = new java.io.FileInputStream(file) val props = new java.util.Properties props.load(fis) fis.close def getProp(key: String): String = Option(props.getProperty(key)).getOrElse("missing key: " + key) def hasProps(keys : String*) : Boolean = { keys.map{key => Option(props.getProperty(key))}.flatten.size == keys.size } }
Krasnyanskiy/squeryl
src/test/scala/org/squeryl/framework/DBConnector.scala
Scala
apache-2.0
776
package no.nrk.samnorsk.wikiextractor import java.io.File import com.typesafe.scalalogging.slf4j.StrictLogging import scopt.RenderingMode.TwoColumns import scala.collection.parallel.ForkJoinTaskSupport import scala.concurrent.forkjoin.ForkJoinPool import scala.io.Source case class Config(from: String = "nno", to: String = "nob", limit: Option[Int] = None, topN: Int = 1, procs: Option[Int] = None, input: Option[File] = None, output: Option[File] = None, sourceTF: Int = 5, sourceIDF: Double = .5, transTF: Int = 5, transIDF: Double = .5) object DictionaryBuilder extends StrictLogging { def textToPairs(text: String, translator: ApertiumRunner): Traversable[(String, String)] = { for (sent <- SentenceSegmenter.segment(text); translation = translator.translate(sent); pair <- SimpleTextAligner.tokenDiscrepancy(sent, translation)) yield pair } def textToPairs(texts: Traversable[String], translator: ApertiumRunner): Traversable[(String, String)] = { val sents = texts.flatMap(SentenceSegmenter.segment) val translations = translator.translate(sents) for (combined <- sents.toSeq zip translations.toSeq; pair <- SimpleTextAligner.tokenDiscrepancy(combined._1, combined._2)) yield pair } def wikiToCounts(it: WikiIterator, translator: ApertiumRunner, counter: TranslationCounter[String, String], procs: Option[Int] = None): TranslationCounter[String, String] = { it.filter(_.length < 5000) .grouped(10000) .foreach(articles => { val par = articles.grouped(100).toSeq.par procs match { case Some(p) => par.tasksupport = new ForkJoinTaskSupport(new ForkJoinPool(p)) case _ => ; } par.map(textToPairs(_, translator)).seq.foreach(counter.update) }) counter } def main(args: Array[String]): Unit = { val parser = new scopt.OptionParser[Config]("DictionaryBuilder") { head("DictionaryBuilder", "0.1.0") opt[String]('d', "direction") .action((x, c) => x.split("-") match { case Array(from, to) => c.copy(from = from, to = to) }) .text("Translation direction (ex. nno-nob).") opt[Int]('l', "limit") .action((x, c) => c.copy(limit = Some(x))) .text("Maximum number of articles to process.") opt[String]('i', "input-file") .action((x, c) => c.copy(input = Some(new File(x)))) .text("Input wikipedia dump") .required() opt[String]('o', "output-file") .action((x, c) => c.copy(output = Some(new File(x)))) .text("Output dictionary file") .required() opt[Int]('S', "source-tf-filter") .action((x, c) => c.copy(sourceTF = x)) .text("Minimum term frequency for source words") opt[Double]('s', "source-df-filter") .action((x, c) => c.copy(sourceIDF = x)) .text("Maximum doc frequency for source words") opt[Int]('T', "trans-tf-filter") .action((x, c) => c.copy(transTF = x)) .text("Minimum term frequency for translated words") opt[Double]('t', "trans-df-filter") .action((x, c) => c.copy(transIDF = x)) .text("Maximum doc frequency for translated words") opt[Int]('n', "top-n") .action((x, c) => c.copy(topN = x)) .text("Number of translations to keep") opt[Int]('p', "procs").action((x, c) => c.copy(procs = Some(x))).text("Number of processors to use") } parser.parse(args, Config()) match { case Some(config) => config.limit match { case Some(l) => logger.info(s"Reading $l articles from ${config.input.get.getAbsolutePath}") case _ => logger.info(s"Reading all articles from ${config.input.get.getAbsolutePath}") } val translator = new LocalApertiumRunner(fromLanguage = config.from, toLanguage = config.to) val source = Source.fromFile(config.input.get) val it = new WikiIterator(source, limit = config.limit) val counter = wikiToCounts(it, translator, new TranslationCounter[String, String]( sourceTfFilter = config.sourceTF, sourceDfFilter = config.sourceIDF, transTfFilter = config.transTF, transDfFilter = config.transIDF, topN = Some(config.topN)), procs = config.procs) counter.write(config.output.get) source.close() case None => parser.renderUsage(TwoColumns) } } }
nrkno/samnorsk
SynonymCreator/src/main/scala/no/nrk/samnorsk/wikiextractor/DictionaryBuilder.scala
Scala
mit
4,514
package ru.yandex.mysqlDiff package vendor package mysql import java.sql._ import jdbc._ import diff._ import model._ import script._ import util._ object MysqlTestDataSourceParameters extends TestDataSourceParameters { override def defaultTestDsUrl = "jdbc:mysql://localhost:3306/mysql_diff_tests" override def testDsUser = "test" override def testDsPassword = "test" override val connectedContext = new MysqlConnectedContext(ds) } object MysqlOnlineTests extends OnlineTestsSupport(MysqlTestDataSourceParameters.connectedContext) { import connectedContext._ import context._ override def scriptPreamble = "SET storage_engine = InnoDB" "CAP-101" in { val t2 = checkTwoTables( "CREATE TABLE a (kk INT)", "CREATE TABLE a (id INT PRIMARY KEY AUTO_INCREMENT, kk INT)") // extra checks t2.primaryKey.get.columnNames.toList must_== List("id") t2.column("id").properties.find(MysqlAutoIncrementPropertyType) must_== Some(MysqlAutoIncrement(true)) } "diff unspecified default to script with default 0" in { val t2 = checkTwoTables( "CREATE TABLE b (x INT NOT NULL)", "CREATE TABLE b (x INT NOT NULL DEFAULT 0)") t2.column("x").properties.find(DefaultValuePropertyType) must_== Some(DefaultValue(NumberValue(0))) } "foreign keys with full params specification" in { ddlTemplate.dropTableWithExportedKeysIfExists("servers") ddlTemplate.dropTableWithExportedKeysIfExists("datacenters") execute("CREATE TABLE datacenters (id INT PRIMARY KEY)") val t2 = checkTwoTables( "CREATE TABLE servers (id INT, dc_id INT)", "CREATE TABLE servers (id INT, dc_id INT, " + "CONSTRAINT dc_fk FOREIGN KEY dc_idx (dc_id) REFERENCES datacenters(id))") t2.foreignKeys must haveSize(1) } "FOREIGN KEY with overlapping INDEX" in { ddlTemplate.dropTableWithExportedKeysIfExists("yyyy") ddlTemplate.dropTableWithExportedKeysIfExists("zzzz") recreateTable("CREATE TABLE zzzz (id INT PRIMARY KEY)") checkTwoTables( "CREATE TABLE yyyy (id INT, zzzz_id INT, CONSTRAINT zzzz_c FOREIGN KEY zzzz_i (zzzz_id) REFERENCES zzzz(id), INDEX(zzzz_id, id))", "CREATE TABLE yyyy (id INT, zzzz_id INT, CONSTRAINT zzzz_c FOREIGN KEY zzzz_i (zzzz_id) REFERENCES zzzz(id))" ) } "FOREIGN KEY with overlapping UNIQUE" in { ddlTemplate.dropTableWithExportedKeysIfExists("uuuu") ddlTemplate.dropTableWithExportedKeysIfExists("qqqq") recreateTable("CREATE TABLE qqqq (id INT PRIMARY KEY)") checkTwoTables( "CREATE TABLE uuuu (id INT, qqqq_id INT, CONSTRAINT qqqq_c FOREIGN KEY qqqq_i (qqqq_id) REFERENCES qqqq(id), UNIQUE(qqqq_id, id))", "CREATE TABLE uuuu (id INT, qqqq_id INT, CONSTRAINT qqqq_c FOREIGN KEY qqqq_i (qqqq_id) REFERENCES qqqq(id))" ) } "FOREIGN KEY with overlapping PRIMARY KEY" in { ddlTemplate.dropTableWithExportedKeysIfExists("mmmm") ddlTemplate.dropTableWithExportedKeysIfExists("nnnn") recreateTable("CREATE TABLE mmmm (id INT PRIMARY KEY)") checkTwoTables( "CREATE TABLE nnnn (id INT, mmmm_id INT, CONSTRAINT mmmm_c FOREIGN KEY mmmm_i (mmmm_id) REFERENCES mmmm(id), PRIMARY KEY(mmmm_id, id))", "CREATE TABLE nnnn (id INT, mmmm_id INT, CONSTRAINT mmmm_c FOREIGN KEY mmmm_i (mmmm_id) REFERENCES mmmm(id))" ) } "FOREIGN KEY ... ON UPDATE ... ON DELETE" in { ddlTemplate.dropTableWithExportedKeysIfExists("o_u_o_d") ddlTemplate.dropTableWithExportedKeysIfExists("o_u_o_d_pri") execute("CREATE TABLE o_u_o_d_pri (id INT PRIMARY KEY)") checkTwoTables( "CREATE TABLE o_u_o_d (a_id INT, id INT, " + "FOREIGN KEY (a_id) REFERENCES o_u_o_d_pri(id) ON UPDATE SET NULL)", "CREATE TABLE o_u_o_d (a_id INT, id INT, " + "FOREIGN KEY (a_id) REFERENCES o_u_o_d_pri(id) ON UPDATE CASCADE ON DELETE SET NULL)" ) } "diff, apply collate" in { jt.execute("DROP TABLE IF EXISTS collate_test") execute("CREATE TABLE collate_test (id VARCHAR(10)) COLLATE=cp1251_bin") val oldModel = jdbcModelExtractor.extractTable("collate_test") // checking properly extracted oldModel.options.properties must contain(MysqlCollateTableOption("cp1251_bin")) val newModel = modelParser.parseCreateTableScript( "CREATE TABLE collate_test (id VARCHAR(10)) COLLATE=cp866_bin") // checking properly parsed newModel.options.properties must contain(MysqlCollateTableOption("cp866_bin")) val diff = diffMaker.compareTables(oldModel, newModel) diff must beSomething val script = new Script(diffSerializer.alterScript(diff.get, newModel)) script.statements must haveSize(1) for (s <- script.statements) { jt.execute(scriptSerializer.serialize(s)) } val resultModel = jdbcModelExtractor.extractTable("collate_test") // checking patch properly applied resultModel.options.properties must contain(MysqlCollateTableOption("cp866_bin")) } "same engine" in { checkTable( "CREATE TABLE yaru_events (user_id BIGINT) ENGINE=InnoDB") } "change engine" in { jt.execute("DROP TABLE IF exists change_engine") jt.execute("CREATE TABLE change_engine (id INT) ENGINE=MyISAM") val d = jdbcModelExtractor.extractTable("change_engine") d.options.properties must contain(MysqlEngineTableOption("MyISAM")) val t = modelParser.parseCreateTableScript("CREATE TABLE change_engine (id INT) ENGINE=InnoDB") val script = new Script(diffMaker.compareTablesScript(d, t)) script.statements must notBeEmpty for (s <- script.statements) { jt.execute(scriptSerializer.serialize(s)) } val resultModel = jdbcModelExtractor.extractTable("change_engine") resultModel.options.properties must contain(MysqlEngineTableOption("InnoDB")) } "ENUM" in { checkTable("CREATE TABLE users (id INT, department ENUM('dev', 'mngr', 'adm') DEFAULT 'mngr')") } "bug with character set implies collation" in { jt.execute("DROP TABLE IF EXISTS moderated_tags") execute("CREATE TABLE moderated_tags (tag VARCHAR(255) CHARACTER SET utf8 NOT NULL, type INT(11) NOT NULL) DEFAULT CHARSET=utf8 COLLATE=utf8_bin") val s2 = "CREATE TABLE moderated_tags (tag VARCHAR(255) NOT NULL, type INT(11) NOT NULL) DEFAULT CHARSET=utf8 COLLATE=utf8_bin" val dbModel = jdbcModelExtractor.extractTable("moderated_tags") val localModel = modelParser.parseCreateTableScript(s2) // column collation is changed from utf8_bin to utf8_general_ci // (character set implies collation) diffMaker.compareTables(dbModel, localModel) must beLike { case Some(_) => true } } "bug with data type options equivalence" in { checkTable( "CREATE TABLE alive_apps (hostname VARCHAR(100) NOT NULL) DEFAULT CHARSET=utf8") } "bug with COLLATE before CHARACTER SET" in { checkTable( "CREATE TABLE IF NOT EXISTS tag_cloud_global (tag varchar(64) collate utf8_bin) DEFAULT CHARSET=utf8 COLLATE=utf8_bin") } "complex bug with charset and collation" in { checkTable( """ CREATE TABLE charset_collation (zz VARCHAR(3) CHARACTER SET latin1) ENGINE=InnoDB DEFAULT CHARSET=utf8 """) } "PRIMARY KEY not declared as NOT NULL" in { checkTable( "CREATE TABLE pk_null (id VARCHAR(10), PRIMARY KEY(id))") } "BOOLEAN with DEFAULT TRUE" in { checkTable( "CREATE TABLE boolean_with_default_true (available BOOLEAN NOT NULL default TRUE)") } "BOOLEAN with DEFAULT 1" in { checkTable( "CREATE TABLE boolean_with_default_true (available BOOLEAN NOT NULL default 1)") } "BIT" in { checkTable( """CREATE TABLE stud (aud BIT NOT NULL DEFAULT '\\0')""") } "TINYINT(1) DEFAULT '1'" in { checkTable( """CREATE TABLE tinyint_1 (a TINYINT(1) NOT NULL DEFAULT '1')""") } // http://bitbucket.org/stepancheg/mysql-diff/issue/24/default-value-for-tinyint "TINYINT(4)" in { checkTwoTables( "CREATE TABLE tinyint_4 (xx TINYINT(4) NOT NULL DEFAULT '0')", "CREATE TABLE tinyint_4 (xx TINYINT(4) NOT NULL DEFAULT '3')" ) } "DECIMAL DEFAULT VALUE" in { checkTable("CREATE TABLE decimal_dv (id INT, deci DECIMAL(10, 6) DEFAULT '0.000000')") } "MEDIUMINT UNSIGNED" in { checkTable( "CREATE TABLE service_with_mediumint_unsigned (bg_color MEDIUMINT UNSIGNED)") } "DEFAULT DATE 0000-00-00" in { checkTable( "CREATE TABLE date_0000000 (d DATE DEFAULT '0000-00-00')") } "DEFAULT DATE 0" in { checkTable( "CREATE TABLE date_0 (d DATE DEFAULT 0)") } "DEFAULT TIME 0" in { checkTable( "CREATE TABLE time_0 (d TIME DEFAULT 0)") } "DEFAULT DATETIME 0" in { checkTable( "CREATE TABLE datetime_0 (d DATETIME DEFAULT 0)") } "DEFAULT TIMESTAMP 0" in { checkTable( "CREATE TABLE timestamp_0 (d TIMESTAMP DEFAULT 0)") } "TIMESTAMP is NOT NULL by default" in { // TIMESTAMP columns are NOT NULL by default // http://dev.mysql.com/doc/refman/5.1/en/timestamp.html checkTable( "CREATE TABLE timestamp_nn (d TIMESTAMP DEFAULT '2010-09-23 17:18:19')") } "bug with PRIMARY KEY added" in { checkTwoTables( "CREATE TABLE im_cs1 (checksum VARCHAR(255), added INT, PRIMARY KEY(checksum), KEY(added))", "CREATE TABLE im_cs1 (checksum VARCHAR(255), added INT, KEY(added))") } "bug with two PRIMARY KEYs added" in { checkTwoTables( "CREATE TABLE im_cs (c1 INT, c2 INT, added INT, PRIMARY KEY(c1, c2))", "CREATE TABLE im_cs (c1 INT, c2 INT, added INT)") } "bug with non-unique CONSTRAINT names" in { val version = jt.query("SELECT @@version").single(rs => MysqlServerVersion.parse(rs.getString(1))) if (version < MysqlServerVersion(5, 1, 0)) { checkTwoDatabases( """ CREATE TABLE nu_a (id INT PRIMARY KEY); CREATE TABLE nu_b (a_id INT); CREATE TABLE nu_c (a_id INT, CONSTRAINT c866 FOREIGN KEY zc (a_id) REFERENCES nu_a(id)) """, """ CREATE TABLE nu_a (id INT PRIMARY KEY); CREATE TABLE nu_b (a_id INT, CONSTRAINT c866 FOREIGN KEY zb (a_id) REFERENCES nu_a(id)); CREATE TABLE nu_c (a_id INT) """) } } "MySQL 5.1 weird CONSTRAINT name FOREIGN KEY name behaviour" in { // MySQL 5.1 seems to use constraint name for foreign key name checkDatabase( """ CREATE TABLE nu_a (id INT PRIMARY KEY); CREATE TABLE nu_b (a_id INT, CONSTRAINT c866 FOREIGN KEY zb (a_id) REFERENCES nu_a(id)); CREATE TABLE nu_c (a_id INT) """ ) } "bug with ADD FOREIGN KEY referenced to newly created table" in { checkTwoDatabases( """ CREATE TABLE cities (id INT PRIMARY KEY) """, """ CREATE TABLE countries (id INT PRIMARY KEY); CREATE TABLE cities (id INT PRIMARY KEY, cid INT, CONSTRAINT ccid FOREIGN KEY icid (cid) REFERENCES countries(id)) """) } "with FOREIGN KEY unchanged" in { dropTable("coins") dropTable("collectors") execute("CREATE TABLE collectors (id INT PRIMARY KEY)") checkTable( "CREATE TABLE coins (coll_id INT," + " CONSTRAINT coin_fk FOREIGN KEY (coll_id) REFERENCES collectors(id))") } "UNIQUE unchanged" in { checkTable( "CREATE TABLE bears (id INT, name VARCHAR(10), CONSTRAINT name_uniq UNIQUE(name))") } "CREATE INDEX" in { checkTwoTables( """ CREATE TABLE movies (id INT, name VARCHAR(100)) """, """ CREATE TABLE movies (id INT, name VARCHAR(100)); CREATE INDEX movies_name_idx ON movies (name) """ ) } // http://bitbucket.org/stepancheg/mysql-diff/issue/21/support-indexes-with-leading-part "INDEX with leading part" in { checkTwoTables( """ CREATE TABLE index_lp (name VARCHAR(100), INDEX(name)) """, """ CREATE TABLE index_lp (name VARCHAR(100), INDEX(name(20))) """ ) } "PRIMARY KEY indexing leading part" in { checkTwoTables( """ CREATE TABLE pk_lp (name VARCHAR(100), PRIMARY KEY (name(9))) """, """ CREATE TABLE pk_lp (name VARCHAR(100), PRIMARY KEY (name)) """ ) } "TABLE AUTO_INCREMENT" in { checkTable( """ CREATE TABLE table_auto_inc (id INT NOT NULL PRIMARY KEY AUTO_INCREMENT) ENGINE=MyISAM AUTO_INCREMENT=11 """ ) } "TABLE AUTO_INCREMENT with increment" in { dropTable("aiwi") dropTable("aiwi2") val script = "CREATE TABLE aiwi (ID INT PRIMARY KEY AUTO_INCREMENT, v INT) ENGINE=MyISAM AUTO_INCREMENT=122" val script2 = script.replaceFirst("aiwi", "aiwi2") execute(script) execute(script2) execute("INSERT INTO aiwi (v) VALUES (1)") val dbModel = jdbcModelExtractor.extractTable("aiwi") val localModel = modelParser.parseCreateTableScript(script) diffMaker.compareTables(dbModel, localModel) must beLike { case None => true } diffMaker.compareTables(localModel, dbModel) must beLike { case None => true } val dbModel2 = jdbcModelExtractor.extractTable("aiwi2").copy(name="aiwi") diffMaker.compareTables(dbModel, dbModel2) must beLike { case None => true } diffMaker.compareTables(dbModel2, dbModel) must beLike { case None => true } } "TABLE change COMMENT" in { checkTwoTables( "CREATE TABLE table_comment_1 (id INT) COMMENT 'paper'", "CREATE TABLE table_comment_1 (id INT) COMMENT 'scissors'" ) } "TABLE add COMMENT" in { checkTwoTables( "CREATE TABLE table_comment_2 (id INT)", "CREATE TABLE table_comment_2 (id INT) COMMENT 'scissors'" ) } "COLUMN COMMENT" in { checkTwoTables( "CREATE TABLE bottles (height INT COMMENT 'in cm')", "CREATE TABLE bottles (height INT)" ) } "ENGINE copied in CREATE TABLE LIKE" in { // test repeated twice because I don't know what default engine is checkDatabase( """ CREATE TABLE engine_copied_template (id INT) ENGINE=MyISAM; CREATE TABLE engine_copied LIKE engine_copied_template """ ) checkDatabase( """ CREATE TABLE engine_copied_template (id INT) ENGINE=InnoDB; CREATE TABLE engine_copied LIKE engine_copied_template """ ) } } object MysqlDdlTemplateTests extends DdlTemplateTests(MysqlTestDataSourceParameters.connectedContext) { import MysqlTestDataSourceParameters.connectedContext._ import MysqlTestDataSourceParameters.connectedContext.context._ import ddlTemplate._ import jt.execute "dropTableWithExportedKeys" in { dropTableWithExportedKeysIfExists("dt_a") dropTableWithExportedKeysIfExists("dt_b") execute("CREATE TABLE dt_a (id INT PRIMARY KEY, b_id INT) ENGINE=InnoDB") execute("CREATE TABLE dt_b (id INT PRIMARY KEY, a_id INT) ENGINE=InnoDB") execute("ALTER TABLE dt_a ADD FOREIGN KEY (b_id) REFERENCES dt_b(id)") execute("ALTER TABLE dt_b ADD FOREIGN KEY (a_id) REFERENCES dt_a(id)") dropTableWithExportedKeys("dt_a") tableExists("dt_a") must beFalse tableExists("dt_b") must beTrue jdbcModelExtractor.extractTable("dt_b").foreignKeys must beEmpty } } // vim: set ts=4 sw=4 et:
hkerem/mysql-diff
src/main/scala/ru/yandex/mysqlDiff/vendor/mysql/mysql-tests.scala
Scala
bsd-3-clause
16,931
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License */ package org.apache.toree.kernel.interpreter.scala import java.io.ByteArrayOutputStream import java.util.concurrent.{ExecutionException, TimeoutException, TimeUnit} import com.typesafe.config.{Config, ConfigFactory} import jupyter.Displayers import org.apache.spark.SparkContext import org.apache.spark.sql.SparkSession import org.apache.spark.repl.Main import org.apache.toree.interpreter._ import org.apache.toree.kernel.api.{KernelLike, KernelOptions} import org.apache.toree.utils.TaskManager import org.slf4j.LoggerFactory import org.apache.toree.kernel.BuildInfo import org.apache.toree.kernel.protocol.v5.MIMEType import scala.annotation.tailrec import scala.collection.JavaConverters._ import scala.concurrent.{Await, Future} import scala.language.reflectiveCalls import scala.tools.nsc.Settings import scala.tools.nsc.interpreter.{IR, OutputStream} import scala.tools.nsc.util.ClassPath import scala.util.matching.Regex import scala.concurrent.duration.Duration class ScalaInterpreter(private val config:Config = ConfigFactory.load) extends Interpreter with ScalaInterpreterSpecific { import ScalaInterpreter._ ScalaDisplayers.ensureLoaded() protected var _kernel: KernelLike = _ protected def kernel: KernelLike = _kernel protected val logger = LoggerFactory.getLogger(this.getClass.getName) protected val _thisClassloader = this.getClass.getClassLoader protected val lastResultOut = new ByteArrayOutputStream() private[scala] var taskManager: TaskManager = _ /** Since the ScalaInterpreter can be started without a kernel, we need to ensure that we can compile things. Adding in the default classpaths as needed. */ def appendClassPath(settings: Settings): Settings = { settings.classpath.value = buildClasspath(_thisClassloader) settings.embeddedDefaults(_runtimeClassloader) settings } protected var settings: Settings = newSettings(List()) settings = appendClassPath(settings) private val maxInterpreterThreads: Int = { if(config.hasPath("max_interpreter_threads")) config.getInt("max_interpreter_threads") else TaskManager.DefaultMaximumWorkers } protected def newTaskManager(): TaskManager = new TaskManager(maximumWorkers = maxInterpreterThreads) /** * This has to be called first to initialize all the settings. * * @return The newly initialized interpreter */ override def init(kernel: KernelLike): Interpreter = { this._kernel = kernel val args = interpreterArgs(kernel) settings = newSettings(args) settings = appendClassPath(settings) // https://issues.apache.org/jira/browse/TOREE-132?focusedCommentId=15104495&page=com.atlassian.jira.plugin.system.issuetabpanels%3Acomment-tabpanel#comment-15104495 settings.usejavacp.value = true start() // ensure bindings are defined before allowing user code to run bindVariables() this } protected def bindVariables(): Unit = { bindKernelVariable(kernel) bindSparkSession() bindSparkContext() defineImplicits() } protected[scala] def buildClasspath(classLoader: ClassLoader): String = { def toClassLoaderList( classLoader: ClassLoader ): Seq[ClassLoader] = { @tailrec def toClassLoaderListHelper( aClassLoader: ClassLoader, theList: Seq[ClassLoader]):Seq[ClassLoader] = { if( aClassLoader == null ) return theList toClassLoaderListHelper( aClassLoader.getParent, aClassLoader +: theList ) } toClassLoaderListHelper(classLoader, Seq()) } val urls = toClassLoaderList(classLoader).flatMap{ case cl: java.net.URLClassLoader => cl.getURLs.toList case a => List() } urls.foldLeft("")((l, r) => ClassPath.join(l, r.toString)) } protected def interpreterArgs(kernel: KernelLike): List[String] = { import scala.collection.JavaConverters._ if (kernel == null || kernel.config == null) { List() } else { kernel.config.getStringList("interpreter_args").asScala.toList } } protected def maxInterpreterThreads(kernel: KernelLike): Int = { kernel.config.getInt("max_interpreter_threads") } protected def bindKernelVariable(kernel: KernelLike): Unit = { logger.warn(s"kernel variable: ${kernel}") // InterpreterHelper.kernelLike = kernel // interpret("import org.apache.toree.kernel.interpreter.scala.InterpreterHelper") // interpret("import org.apache.toree.kernel.api.Kernel") // // interpret(s"val kernel = InterpreterHelper.kernelLike.asInstanceOf[org.apache.toree.kernel.api.Kernel]") doQuietly { bind( "kernel", "org.apache.toree.kernel.api.Kernel", kernel, List( """@transient implicit""") ) } } override def interrupt(): Interpreter = { require(taskManager != null) // TODO: use SparkContext.setJobGroup to avoid killing all jobs kernel.sparkContext.cancelAllJobs() // give the task 100ms to complete before restarting the task manager import scala.concurrent.ExecutionContext.Implicits.global val finishedFuture = Future { while (taskManager.isExecutingTask) { Thread.sleep(10) } } try { Await.result(finishedFuture, Duration(100, TimeUnit.MILLISECONDS)) // Await returned, no need to interrupt tasks. } catch { case timeout: TimeoutException => // Force dumping of current task (begin processing new tasks) taskManager.restart() } this } override def interpret(code: String, silent: Boolean = false, output: Option[OutputStream]): (Results.Result, Either[ExecuteOutput, ExecuteFailure]) = { interpretBlock(code, silent) } def prepareResult(interpreterOutput: String, showType: Boolean = KernelOptions.showTypes, // false noTruncate: Boolean = KernelOptions.noTruncation, // false showOutput: Boolean = KernelOptions.showOutput // true ): (Option[Any], Option[String], Option[String]) = { if (interpreterOutput.isEmpty) { return (None, None, None) } var lastResult = Option.empty[Any] var lastResultAsString = "" val definitions = new StringBuilder val text = new StringBuilder interpreterOutput.split("\\n").foreach { case HigherOrderFunction(name, func, funcType) => definitions.append(s"$name: $func$funcType").append("\\n") case NamedResult(name, vtype, value) if read(name).nonEmpty => val result = read(name) lastResultAsString = result.map(String.valueOf(_)).getOrElse("") lastResult = result // magicOutput should be handled as result to properly // display based on MimeType. if(vtype != "org.apache.toree.magic.MagicOutput") { // default noTruncate = False // %truncation on ==> noTruncate = false -> display Value // %truncation off ==> noTruncate = true -> display lastResultAsString val defLine = (showType, noTruncate) match { case (true, true) => s"$name: $vtype = $lastResultAsString\\n" case (true, false) => lastResultAsString = value lastResult = Some(value) s"$name: $vtype = $value\\n" case (false, true) => s"$name = $lastResultAsString\\n" case (false, false) => lastResultAsString = value lastResult = Some(value) s"$name = $value\\n" } // suppress interpreter-defined values if ( defLine.matches("res\\\\d+(.*)[\\\\S\\\\s]") == false ) { definitions.append(defLine) } if(showType) { if(defLine.startsWith("res")) { val v = defLine.split("^res\\\\d+(:|=)\\\\s+")(1) lastResultAsString = v lastResult = Some(v) } else { lastResultAsString = defLine lastResult = Some(defLine) } } } case Definition(defType, name) => lastResultAsString = "" definitions.append(s"defined $defType $name\\n") case Import(name) => // do nothing with the line case line if lastResultAsString.contains(line) => // do nothing with the line case line => text.append(line).append("\\n") } (lastResult, if (definitions.nonEmpty && showOutput) Some(definitions.toString) else None, if (text.nonEmpty && showOutput) Some(text.toString) else None) } protected def interpretBlock(code: String, silent: Boolean = false): (Results.Result, Either[ExecuteOutput, ExecuteFailure]) = { logger.trace(s"Interpreting line: $code") val futureResult = interpretAddTask(code, silent) // Map the old result types to our new types val mappedFutureResult = interpretMapToCustomResult(futureResult) // Determine whether to provide an error or output val futureResultAndExecuteInfo = interpretMapToResultAndOutput(mappedFutureResult) // Block indefinitely until our result has arrived import scala.concurrent.duration._ Await.result(futureResultAndExecuteInfo, Duration.Inf) } protected def interpretMapToCustomResult(future: Future[IR.Result]): Future[Results.Result] = { import scala.concurrent.ExecutionContext.Implicits.global future map { case IR.Success => Results.Success case IR.Error => Results.Error case IR.Incomplete => Results.Incomplete } recover { case ex: ExecutionException => Results.Aborted } } protected def interpretMapToResultAndOutput(future: Future[Results.Result]): Future[(Results.Result, Either[Map[String, String], ExecuteError])] = { import scala.concurrent.ExecutionContext.Implicits.global future map { case result @ (Results.Success | Results.Incomplete) => val lastOutput = lastResultOut.toString("UTF-8").trim lastResultOut.reset() val (obj, defStr, text) = prepareResult(lastOutput, KernelOptions.showTypes, KernelOptions.noTruncation, KernelOptions.showOutput ) defStr.foreach(kernel.display.content(MIMEType.PlainText, _)) text.foreach(kernel.display.content(MIMEType.PlainText, _)) val output = obj.map(Displayers.display(_).asScala.toMap).getOrElse(Map.empty) (result, Left(output)) case Results.Error => val lastOutput = lastResultOut.toString("UTF-8").trim lastResultOut.reset() val (obj, defStr, text) = prepareResult(lastOutput) defStr.foreach(kernel.display.content(MIMEType.PlainText, _)) val output = interpretConstructExecuteError(text.get) (Results.Error, Right(output)) case Results.Aborted => (Results.Aborted, Right(null)) } } def bindSparkContext() = { val bindName = "sc" doQuietly { logger.info(s"Binding SparkContext into interpreter as $bindName") interpret(s"""def ${bindName}: ${classOf[SparkContext].getName} = kernel.sparkContext""") // NOTE: This is needed because interpreter blows up after adding // dependencies to SparkContext and Interpreter before the // cluster has been used... not exactly sure why this is the case // TODO: Investigate why the cluster has to be initialized in the kernel // to avoid the kernel's interpreter blowing up (must be done // inside the interpreter) logger.debug("Initializing Spark cluster in interpreter") // doQuietly { // interpret(Seq( // "val $toBeNulled = {", // " var $toBeNulled = sc.emptyRDD.collect()", // " $toBeNulled = null", // "}" // ).mkString("\\n").trim()) // } } } def bindSparkSession(): Unit = { val bindName = "spark" doQuietly { // TODO: This only adds the context to the main interpreter AND // is limited to the Scala interpreter interface logger.debug(s"Binding SparkSession into interpreter as $bindName") interpret(s"""def ${bindName}: ${classOf[SparkSession].getName} = kernel.sparkSession""") // interpret( // s""" // |def $bindName: ${classOf[SparkSession].getName} = { // | if (org.apache.toree.kernel.interpreter.scala.InterpreterHelper.sparkSession != null) { // | org.apache.toree.kernel.interpreter.scala.InterpreterHelper.sparkSession // | } else { // | val s = org.apache.spark.repl.Main.createSparkSession() // | org.apache.toree.kernel.interpreter.scala.InterpreterHelper.sparkSession = s // | s // | } // |} // """.stripMargin) } } def defineImplicits(): Unit = { val code = """ |import org.apache.spark.sql.SparkSession |import org.apache.spark.sql.SQLContext |import org.apache.spark.sql.SQLImplicits | |object implicits extends SQLImplicits with Serializable { | protected override def _sqlContext: SQLContext = SparkSession.builder.getOrCreate.sqlContext |} | |import implicits._ """.stripMargin doQuietly(interpret(code)) } override def classLoader: ClassLoader = _runtimeClassloader /** * Returns the language metadata for syntax highlighting */ override def languageInfo = LanguageInfo( "scala", BuildInfo.scalaVersion, fileExtension = Some(".scala"), pygmentsLexer = Some("scala"), mimeType = Some("text/x-scala"), codemirrorMode = Some("text/x-scala")) } object ScalaInterpreter { val HigherOrderFunction: Regex = """(\\w+):\\s+(\\(\\s*.*=>\\s*\\w+\\))(\\w+)\\s*.*""".r val NamedResult: Regex = """(\\w+):\\s+([^=]+)\\s+=\\s*(.*)""".r val Definition: Regex = """defined\\s+(\\w+)\\s+(.+)""".r val Import: Regex = """import\\s+([\\w\\.,\\{\\}\\s]+)""".r /** * Utility method to ensure that a temporary directory for the REPL exists for testing purposes. */ def ensureTemporaryFolder(): String = { val outputDir = Option(System.getProperty("spark.repl.class.outputDir")).getOrElse({ val execUri = System.getenv("SPARK_EXECUTOR_URI") val outputDir: String = Main.outputDir.getAbsolutePath System.setProperty("spark.repl.class.outputDir", outputDir) if (execUri != null) { System.setProperty("spark.executor.uri", execUri) } outputDir }) outputDir } }
apache/incubator-toree
scala-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/scala/ScalaInterpreter.scala
Scala
apache-2.0
15,472
package scala.meta package internal package tql import scala.collection.immutable.Seq import scala.collection.mutable.ListBuffer import scala.reflect._ import org.scalameta.algebra._ import scala.meta.tql._ object TraverserHelper { def traverseSeq[U, T <: U with AnyRef : ClassTag, A : Monoid]( f: Traverser[U]#Matcher[A], seq: Seq[T]): Option[(Seq[T], A)] = { val m = implicitly[Monoid[A]] var buffer = new ListBuffer[T]() var hasChanged = false var acc = m.zero for {t <- seq (a1 : T, a2) <- f(t) if classTag[T].runtimeClass.isInstance(a1) } { buffer.append(a1) acc += a2 hasChanged |= !(a1 eq t) } hasChanged |= seq.size != buffer.size //TODO this should be an error Some((if (hasChanged) collection.immutable.Seq(buffer: _*) else seq, acc)) } def traverseSeqOfSeq[U, T <: U with AnyRef : ClassTag, A : Monoid]( f: Traverser[U]#Matcher[A], seq: Seq[Seq[T]]): Option[(Seq[Seq[T]], A)] = { val m = implicitly[Monoid[A]] var buffer = new ListBuffer[Seq[T]]() var hasChanged = false var acc = m.zero for {t <- seq (a1, a2) <- traverseSeq(f, t) }{ buffer.append(a1) acc += a2 hasChanged |= !(a1 eq t) } hasChanged |= seq.size != buffer.size //TODO this should be an error Some((if (hasChanged) collection.immutable.Seq(buffer: _*) else seq, acc)) } def traverseOptionalSeq[U, T <: U with AnyRef: ClassTag, A: Monoid]( f: Traverser[U]#Matcher[A], a: Option[Seq[T]]): Option[(Option[Seq[T]], A)] = Some( a.flatMap (traverseSeq(f, _)) .collect{case (x: Seq[T], y) => (Some(x), y)} .getOrElse(None, implicitly[Monoid[A]].zero)) def optional[U, T <: U with AnyRef : ClassTag, A: Monoid]( f: Traverser[U]#Matcher[A], a: Option[T]): Option[(Option[T], A)] = Some(a .flatMap(f(_)) .collect{case (x: T, y) if classTag[T].runtimeClass.isInstance(x) => (Some(x), y)} .getOrElse((None, implicitly[Monoid[A]].zero))) }
beni55/scalameta
scalameta/tql/src/main/scala/scala/meta/internal/tql/TraverserHelper.scala
Scala
bsd-3-clause
2,130
package org.openmole.plugin.sampling import org.openmole.core.workflow.sampling.Factor package onefactor { import org.openmole.core.workflow.domain.DiscreteFromContextDomain trait OneFactorDSL { implicit class SamplingIsNominalFactor[D, T](f: Factor[D, T])(implicit domain: DiscreteFromContextDomain[D, T]) { def nominal(t: T) = NominalFactor(f, t, domain) } } } package object onefactor extends OneFactorDSL
openmole/openmole
openmole/plugins/org.openmole.plugin.sampling.onefactor/src/main/scala/org/openmole/plugin/sampling/onefactor/package.scala
Scala
agpl-3.0
435
package spire import spire.algebra._ import spire.algebra.partial._ import spire.laws.arb._ import spire.laws.Perm import spire.optional.partialIterable._ import spire.optional.mapIntIntPermutation._ import spire.std.boolean._ import spire.std.int._ import spire.syntax.eq._ import org.scalatest.{FunSuite, Matchers, NonImplicitAssertions} import org.scalatest.prop.Checkers import org.scalacheck.{Arbitrary, Gen} import org.scalacheck.Arbitrary._ import org.scalacheck.Prop._ class PartialSyntaxTest extends FunSuite with Checkers with BaseSyntaxTest with NonImplicitAssertions { implicit val IntGroup: Group[Int] = implicitly[AdditiveGroup[Int]].additive implicit val SeqIntEq: Eq[Seq[Int]] = spire.optional.genericEq.generic[Seq[Int]] test("Semigroupoid syntax")(check(forAll { (a: Seq[Int], b: Seq[Int]) => testSemigroupoidSyntax(a, b) })) test("Groupoid syntax")(check(forAll { (a: Seq[Int], b: Seq[Int]) => testGroupoidSyntax(a, b) })) test("Partial action syntax")(check(forAll { (seq: Seq[Int], perm: Perm) => testPartialActionSyntax(seq, perm.map) })) def testSemigroupoidSyntax[A: Semigroupoid: Eq](a: A, b: A) = { import spire.syntax.semigroupoid._ ((a |+|? b) === Semigroupoid[A].partialOp(a, b)) && ((a |+|?? b) === Semigroupoid[A].opIsDefined(a, b)) } def testGroupoidSyntax[A: Groupoid: Eq](a: A, b: A) = { import spire.syntax.groupoid._ (a.isId === Groupoid[A].isId(a)) && (a.leftId === Groupoid[A].leftId(a)) && (a.rightId === Groupoid[A].rightId(a)) && ((a |+|? b) === Groupoid[A].partialOp(a, b)) && ((a |+|?? b) === Groupoid[A].opIsDefined(a, b)) ((a |-|? b) === Groupoid[A].partialOpInverse(a, b)) && ((a |-|?? b) === Groupoid[A].opInverseIsDefined(a, b)) } def testPartialActionSyntax(seq: Seq[Int], perm: Map[Int, Int]) = { import spire.syntax.partialAction._ ((perm ?|+|> seq) === PartialAction[Seq[Int], Map[Int, Int]].partialActl(perm, seq)) && ((seq <|+|? perm) === PartialAction[Seq[Int], Map[Int, Int]].partialActr(seq, perm)) && ((perm ??|+|> seq) === PartialAction[Seq[Int], Map[Int, Int]].actlIsDefined(perm, seq)) && ((seq <|+|?? perm) === PartialAction[Seq[Int], Map[Int, Int]].actrIsDefined(seq, perm)) } }
guersam/spire
tests/src/test/scala/spire/PartialSyntaxTest.scala
Scala
mit
2,237