code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package config
import javax.inject.Named
import com.google.inject.{AbstractModule, Provides, Singleton}
import software.amazon.awssdk.regions.internal.util.EC2MetadataUtils
import software.amazon.awssdk.services.cloudformation.CloudFormationClient
import software.amazon.awssdk.services.cloudformation.model.{
DescribeStackResourceRequest,
DescribeStacksRequest
}
import software.amazon.awssdk.services.ec2.Ec2Client
import software.amazon.awssdk.services.ec2.model.DescribeTagsRequest
import software.amazon.awssdk.services.ec2.model.Filter
class StackConfig extends AbstractModule {
override def configure() = {}
@Provides
@Named("cfnStackName")
@Singleton
def cfnStackName(ec2Client: Ec2Client): String = {
import scala.collection.JavaConverters._
val instanceId = EC2MetadataUtils.getInstanceId
val resourceTypeFilter =
Filter.builder.name("resource-type").values("instance").build()
val resourceIdFilter =
Filter.builder().name("resource-id").values(instanceId).build()
val stackIdFilter =
Filter.builder().name("key").values("aws:cloudformation:stack-id").build()
val describeTagsRequest = DescribeTagsRequest
.builder()
.filters(resourceTypeFilter, resourceIdFilter, stackIdFilter)
.build()
val tags = ec2Client.describeTags(describeTagsRequest).tags().asScala
tags
.find(_.key() == "aws:cloudformation:stack-id")
.getOrElse(sys.error("aws:cloudformation:stack-id tag is compulsory"))
.value()
}
@Provides
@Named("s3Bucket")
@Singleton
def s3Bucket(cfnClient: CloudFormationClient,
@Named("cfnStackName") cfnStackName: String): String = {
val describeStackRequest = DescribeStackResourceRequest
.builder()
.stackName(cfnStackName)
.logicalResourceId("S3Bucket")
.build()
cfnClient
.describeStackResource(describeStackRequest)
.stackResourceDetail()
.physicalResourceId()
}
@Provides
@Named("sundialUrl")
@Singleton
def sundialUrl(cfnClient: CloudFormationClient,
@Named("cfnStackName") cfnStackName: String) = {
import scala.collection.JavaConverters._
val describeStackRequest =
DescribeStacksRequest.builder().stackName(cfnStackName).build()
val stack =
cfnClient.describeStacks(describeStackRequest).stacks().get(0)
stack
.outputs()
.asScala
.find(_.outputKey() == "WebAddress")
.get
.outputValue()
}
}
| gilt/sundial | app/config/StackConfig.scala | Scala | mit | 2,481 |
package org.dbpedia.spotlight.io
/**
* Copyright 2011 Pablo Mendes, Max Jakob
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.dbpedia.extraction.wikiparser.impl.simple.SimpleWikiParser
import org.dbpedia.spotlight.string.WikiMarkupStripper
import org.dbpedia.spotlight.model._
import org.dbpedia.extraction.wikiparser._
import org.dbpedia.extraction.sources.{MemorySource, WikiPage, Source, XMLSource}
import org.dbpedia.spotlight.log.SpotlightLog
import java.io.File
import xml.{XML, Elem}
import org.dbpedia.extraction.util.Language
/**
* Loads Occurrences from a wiki dump.
*/
object WikiOccurrenceSource
{
// split at paragraphs
val splitDocumentRegex = """(\\n|(<br\\s?/?>))(</?\\w+?\\s?/?>)?(\\n|(<br\\s?/?>))+"""
/**
* Creates an DBpediaResourceOccurrence Source from a dump file.
*/
def fromXMLDumpFile(dumpFile : File, language: Language) : OccurrenceSource =
{
new WikiOccurrenceSource(XMLSource.fromFile(dumpFile, language, _.namespace == Namespace.Main))
}
/**
* Creates an DBpediaResourceOccurrence Source from an XML root element.
*/
def fromXML(xml : Elem, language: Language) : OccurrenceSource =
{
new WikiOccurrenceSource(XMLSource.fromXML(xml, language))
}
/**
* Creates an DBpediaResourceOccurrence Source from an XML root element string.
*/
def fromXML(xmlString : String, language: Language) : OccurrenceSource =
{
val xml : Elem = XML.loadString("<dummy>" + xmlString + "</dummy>") // dummy necessary: when a string "<page><b>text</b></page>" is given, <page> is the root tag and can't be found with the command xml \\ "page"
new WikiOccurrenceSource(XMLSource.fromXML(xml, language))
}
/**
* Creates a DBpediaResourceOccurrence Source from a Wikipedia heldout paragraph file.
*
* @see WikipediaHeldoutCorpus in the eval module
*
* @param testFile Iterator of lines containing single MediaWiki paragraphs that
* were extracted as heldout data from the MediaWiki dump.
* @return
*/
def fromPigHeldoutFile(testFile: Iterator[String]): OccurrenceSource = {
new WikiOccurrenceSource(
new MemorySource(
testFile.map{ line =>
new WikiPage(new WikiTitle("Test Paragraph", Namespace.Main, Language.English), line.trim())
}.toTraversable.asInstanceOf[scala.collection.immutable.Traversable[org.dbpedia.extraction.sources.WikiPage]]
)
)
}
/**
* DBpediaResourceOccurrence Source which reads from a wiki pages source.
*/
private class WikiOccurrenceSource(wikiPages : Source) extends OccurrenceSource
{
val wikiParser = new SimpleWikiParser()
override def foreach[U](f : DBpediaResourceOccurrence => U) : Unit =
{
var pageCount = 0
var occCount = 0
for (wikiPage <- wikiPages)
{
// clean the wiki markup from everything but links
val cleanSource = WikiMarkupStripper.stripEverything(wikiPage.source)
// parse the (clean) wiki page
val pageNode = wikiParser( WikiPageUtil.copyWikiPage(wikiPage, cleanSource) )
// exclude redirect and disambiguation pages
if (!pageNode.isRedirect && !pageNode.isDisambiguation) {
// split the page node into paragraphs
val paragraphs = NodeUtil.splitNodes(pageNode.children, splitDocumentRegex)
var paragraphCount = 0
for (paragraph <- paragraphs)
{
paragraphCount += 1
val idBase = pageNode.title.encoded+"-p"+paragraphCount
getOccurrences(paragraph, idBase).foreach{occ => occCount += 1
f(occ)}
}
pageCount += 1
if (pageCount %5000 == 0) {
SpotlightLog.debug(this.getClass, "Processed %d Wikipedia definition pages (avarage %.2f links per page)", pageCount, occCount/pageCount.toDouble)
}
if (pageCount %100000 == 0) {
SpotlightLog.info(this.getClass, "Processed %d Wikipedia definition pages (avarage %.2f links per page)", pageCount, occCount/pageCount.toDouble)
}
}
}
}
}
def getOccurrences(paragraph : List[Node], occurrenceIdBase : String) : List[DBpediaResourceOccurrence] =
{
var paragraphText = ""
// collect URIs, surface forms and their offset in this paragraph
var occurrenceTriples = List[(String, String, Int)]()
for (node <- paragraph) {
node match {
// for text nodes, collect the paragraph text
case textNode : TextNode => paragraphText += textNode.text
// for wiki page link nodes collect URI, surface form and offset
// if the link points to a page in the Main namespace
case internalLink : InternalLinkNode => {
val surfaceFormOffset = paragraphText.length
var surfaceForm = internalLink.children.collect { case TextNode(text, _) => WikiMarkupStripper.stripMultiPipe(text) }.mkString("")
surfaceForm = surfaceForm.trim.replaceAll(""" \\(.+?\\)$""", "").replaceAll("""^(The|A) """, "") //TODO should be a filter/transformer instead of hardcoded?
paragraphText += surfaceForm
if (internalLink.destination.namespace == Namespace.Main && surfaceForm.nonEmpty) {
occurrenceTriples ::= new Tuple3(internalLink.destination.encoded, surfaceForm, surfaceFormOffset)
}
}
case _ =>
}
}
// make a Text instance and check if it is valid
val textInstance = new Text(paragraphText.replaceAll("""\\s""", " "))
var occurrenceCount = 0
// make an DBpediaResourceOccurrences
occurrenceTriples.map{ case (uri : String, sf : String, offset : Int) => {
occurrenceCount += 1
val id = occurrenceIdBase + "l" + occurrenceCount
new DBpediaResourceOccurrence(id, new DBpediaResource(uri), new SurfaceForm(sf), textInstance, offset, Provenance.Wikipedia) }
}
}
} | Skunnyk/dbpedia-spotlight-model | index/src/main/scala/org/dbpedia/spotlight/io/WikiOccurrenceSource.scala | Scala | apache-2.0 | 7,028 |
/*******************************************************************************
* Copyright (c) 2013 Guillaume DUBUISSON DUPLESSIS <guillaume.dubuisson_duplessis@insa-rouen.fr>.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Public License v3.0
* which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/gpl.html
*
* Contributors:
* Guillaume DUBUISSON DUPLESSIS <guillaume.dubuisson_duplessis@insa-rouen.fr> - initial API and implementation
******************************************************************************/
package list.P04
import util.ExerciseTemplate
trait P04 extends ExerciseTemplate {
/*
P04 (*) Find the number of elements of a list.
Example:
scala> length(List(1, 1, 2, 3, 5, 8))
res0: Int = 6
*/
val name = "P04 (Find the number of elements of a list)"
def length[T](l: List[T]): Int
test("Invoking length on a list should return its length") {
assert(length(List()) == 0)
assert(length(List(2,1)) == 2)
assert(length(List(1, 1, 2, 3, 5, 8)) == 6)
}
}
| GuillaumeDD/scala99problems | src/main/scala/list/P04/P04.scala | Scala | gpl-3.0 | 1,130 |
package org.mlflow.spark.autologging
import org.scalatest.FunSuite
object TestObject {
def myMethod: String = "hi"
}
abstract class TestAbstractClass {
protected def addNumbers(x: Int, y: Int): Int = x + y
protected val myProtectedVal: Int = 5
}
class RealClass extends TestAbstractClass {
private val myField: String = "myCoolVal"
def subclassMethod(x: Int): Int = x * x
}
class ReflectionUtilsSuite extends FunSuite {
test("Can get private & protected fields of an object via reflection") {
val obj = new RealClass()
val field0 = ReflectionUtils.getField(obj, "myField").asInstanceOf[String]
assert(field0 == "myCoolVal")
val field1 = ReflectionUtils.getField(obj, "myProtectedVal").asInstanceOf[Int]
assert(field1 == 5)
}
test("Can call methods via reflection") {
val obj = new RealClass()
val args0: Seq[Object] = Seq[Integer](3)
val res0 = ReflectionUtils.callMethod(obj, "subclassMethod", args0).asInstanceOf[Int]
assert(res0 == 9)
val args1: Seq[Object] = Seq[Integer](5, 6)
val res1 = ReflectionUtils.callMethod(obj, "addNumbers", args1).asInstanceOf[Int]
assert(res1 == 11)
}
test("Can get Scala object and call methods via reflection") {
val obj = ReflectionUtils.getScalaObjectByName("org.mlflow.spark.autologging.TestObject")
val res = ReflectionUtils.callMethod(obj, "myMethod", Seq.empty).asInstanceOf[String]
assert(res == "hi")
}
}
| mlflow/mlflow | mlflow/java/spark/src/test/scala/org/mlflow/spark/autologging/ReflectionUtilsSuite.scala | Scala | apache-2.0 | 1,438 |
package com.wavesplatform.api.http.requests
import play.api.libs.json.{Format, Json}
case class ReissueV2Request(sender: String, assetId: String, quantity: Long, reissuable: Boolean, fee: Long, timestamp: Option[Long] = None)
object ReissueV2Request {
implicit val reissueFormat: Format[ReissueV2Request] = Json.format
}
| wavesplatform/Waves | node/src/test/scala/com/wavesplatform/api/http/requests/ReissueV2Request.scala | Scala | mit | 326 |
package org.orbeon.dom.tree
trait WithData {
private var _data: AnyRef = _
def setData(data: AnyRef): Unit = _data = data
def getData: AnyRef = _data
}
| orbeon/orbeon-forms | dom/src/main/scala/org/orbeon/dom/tree/WithData.scala | Scala | lgpl-2.1 | 159 |
/**
* Copyright 2018 ZuInnoTe (Jรถrn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.flink.office.example.excel
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.api.scala._
import org.apache.flink.core.fs.FileSystem
import java.text.DateFormat
import java.text.SimpleDateFormat
import java.text.DecimalFormat
import java.text.NumberFormat
import java.util.Locale
import org.apache.flink.core.fs.Path
import org.zuinnote.flink.office.excel.SimpleExcelFlinkFileInputFormat
import org.zuinnote.flink.office.excel.SimpleExcelFlinkFileOutputFormat
import org.zuinnote.hadoop.office.format.common._
import org.zuinnote.hadoop.office.format.common.dao._
import org.apache.flink.api.scala._
import org.zuinnote.hadoop.office.format.common.HadoopOfficeReadConfiguration
import org.zuinnote.hadoop.office.format.common.HadoopOfficeWriteConfiguration
/**
* Author: Jรถrn Franke <zuinnote@gmail.com>
*
*/
/**
* Demonstrate the Flink DataSource / Data Sink of the HadoopOffice library
* Reads an Excel files skipping the header line (first line) using the Flink DatasSource and writes it back using Flink DataSource (without writing the header line)
* It automatically can detect the datatype in the Excel, so that you will get as an output a Flink dataset based on Flink Basic Types (e.g. string, byte, int, decimal etc.)
* This detection however, requires that the Excel is iterated twice (this can be configured, e.g. you can define to use only the first 10 lines for autodetection)
*/
object FlinkDSScalaExcelSimple {
val MIMETYPE_XLSX = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet";
val MIMETYPE_XLS = "application/vnd.ms-excel";
def main(args: Array[String]): Unit = {
val env = ExecutionEnvironment.getExecutionEnvironment
val params: ParameterTool = ParameterTool.fromArgs(args)
readwriteExcelDS(env,params.get("input"),params.get("output"))
env.execute("Flink Scala DataSource/DataSink HadoopOffice read/write Excel files using Simple format (converts Excel to basic data types)")
}
def readwriteExcelDS(env: ExecutionEnvironment, inputFile: String, outputFile: String): Unit = {
val hocr = new HadoopOfficeReadConfiguration()
hocr.setLocale(new Locale.Builder().setLanguageTag("de").build())
// load Excel file, in order to do the conversion correctly, we need to define the format for date and decimal
val dateFormat: SimpleDateFormat = DateFormat.getDateInstance(DateFormat.SHORT, Locale.US).asInstanceOf[SimpleDateFormat] //important: even for non-US excel files US must be used most of the time, because this is how Excel stores them internally
val decimalFormat: DecimalFormat = NumberFormat.getInstance(Locale.GERMAN).asInstanceOf[DecimalFormat]
hocr.setSimpleDateFormat(dateFormat)
hocr.setSimpleDecimalFormat(decimalFormat)
hocr.setReadHeader(true) // the Excel file contains in the first line the header
// we have maxInferRows = -1 , which means we read the full Excel file first to infer the underlying schema
val maxInferRows = -1
val inputFormat = new SimpleExcelFlinkFileInputFormat(hocr, maxInferRows)
val excelInData = env.readFile(inputFormat, inputFile)
// create dataset
val excelData = excelInData.map{ // note that each row is just an Array of Objects (Array[AnyRef]) of simple datatypes, e.g. int, long, string etc.
row => { // note the following step can be skipped, but it is just to illustrate what is returned by the inputformat
val destArray = new Array[AnyRef](row.length)
for (i <- 0 to destArray.length-1) {
destArray(i)=row(i)
}
destArray
}
}
// write Excel file
val howc = new HadoopOfficeWriteConfiguration(new Path(outputFile).getName())
howc.setMimeType(MIMETYPE_XLSX)
howc.setLocale(new Locale.Builder().setLanguageTag("de").build())
howc.setSimpleDateFormat(dateFormat)
howc.setSimpleDecimalFormat(decimalFormat)
val defaultSheetName = "Sheet2"
val header = null // is an Array of Strings, if null then no header line is written
val outputFormat = new SimpleExcelFlinkFileOutputFormat(howc, header,defaultSheetName)
excelData.write(outputFormat, outputFile)
}
}
| ZuInnoTe/hadoopoffice | examples/scala-flinkds-excel-simple/src/main/scala/org/zuinnote/flink/office/example/excel/FlinkDSScalaExcelSimple.scala | Scala | apache-2.0 | 4,795 |
/**
* Copyright (C) 2011 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xml
import org.orbeon.saxon.om.NodeInfo
import org.orbeon.scaxon.NodeConversions._
import org.orbeon.scaxon.SimplePath._
import org.scalatest.funspec.AnyFunSpec
class SaxonUtilsTest extends AnyFunSpec {
describe("The `makeNCName` function") {
it("must not allow an empty name") {
intercept[IllegalArgumentException] {
SaxonUtils.makeNCName("", keepFirstIfPossible = true)
}
}
it("must not allow a blank name") {
intercept[IllegalArgumentException] {
SaxonUtils.makeNCName(" ", keepFirstIfPossible = true)
}
}
val expected =
List(
("foo" , "foo", true),
("_foo_" , " foo ", true),
("_42foos" , "42foos", true),
("_2foos" , "42foos", false),
("foo_bar_", "foo(bar)", true)
)
for ((out, in, keepFirstIfPossible) <- expected)
it(s"must convert `$in` with `keepFirstIfPossible = $keepFirstIfPossible`") {
assert(out === SaxonUtils.makeNCName(in, keepFirstIfPossible))
}
}
describe("The `buildNodePath` function") {
val doc: NodeInfo =
<xh:html
xmlns:fr="http://orbeon.org/oxf/xml/form-runner"
xmlns:xf="http://www.w3.org/2002/xforms"
xmlns:xh="http://www.w3.org/1999/xhtml">
<xh:head>
<xf:model>
<xf:instance>
<fr:databound-select1 id="control-3-control" appearance="minimal" resource="" bind="control-3-bind">
<xf:label ref="$form-resources/control-3/label"/>
<xf:hint ref="$form-resources/control-3/hint"/>
<xf:alert ref="$fr-resources/detail/labels/alert"/>
<xf:itemset ref="item">
<xf:label ref="label"/>
<xf:value ref="value"/>
</xf:itemset>
</fr:databound-select1>
</xf:instance>
</xf:model>
</xh:head>
<xh:body>
<xf:input ref="@resource">
<xf:label lang="en">Resource URL</xf:label>
<xf:hint lang="en">HTTP URL returning data used to populate the dropdown</xf:hint>
</xf:input>
<xf:input ref="xf:itemset/@ref">
<xf:label>Items</xf:label>
<xf:hint>XPath expression returning one node for each item</xf:hint>
</xf:input>
<xf:input ref="xf:itemset/xf:label/@ref">
<xf:label>Label</xf:label>
<xf:hint>XPath expression relative to an item node</xf:hint>
</xf:input>
<xf:input ref="xf:itemset/xf:value/@ref">
<xf:label>Value</xf:label>
<xf:hint>XPath expression relative to an item node</xf:hint>
</xf:input>
</xh:body>
</xh:html>
val expected = List[(String, NodeInfo, List[String])](
(
"root node",
doc.root,
Nil
),
(
"root element",
doc.rootElement,
List(
"*:html[namespace-uri() = 'http://www.w3.org/1999/xhtml']"
)
),
(
"first `*:label` element",
doc descendant "*:label" head,
List(
"*:html[namespace-uri() = 'http://www.w3.org/1999/xhtml']",
"*:head[namespace-uri() = 'http://www.w3.org/1999/xhtml'][1]",
"*:model[namespace-uri() = 'http://www.w3.org/2002/xforms'][1]",
"*:instance[namespace-uri() = 'http://www.w3.org/2002/xforms'][1]",
"*:databound-select1[namespace-uri() = 'http://orbeon.org/oxf/xml/form-runner'][1]",
"*:label[namespace-uri() = 'http://www.w3.org/2002/xforms'][1]"
)
),
(
"first `appearance` attribute",
doc descendant * att "appearance" head,
List(
"*:html[namespace-uri() = 'http://www.w3.org/1999/xhtml']",
"*:head[namespace-uri() = 'http://www.w3.org/1999/xhtml'][1]",
"*:model[namespace-uri() = 'http://www.w3.org/2002/xforms'][1]",
"*:instance[namespace-uri() = 'http://www.w3.org/2002/xforms'][1]",
"*:databound-select1[namespace-uri() = 'http://orbeon.org/oxf/xml/form-runner'][1]",
"@appearance"
)
),
(
"`*:input` element at index 3",
doc descendant "*:input" apply 3,
List(
"*:html[namespace-uri() = 'http://www.w3.org/1999/xhtml']",
"*:body[namespace-uri() = 'http://www.w3.org/1999/xhtml'][1]",
"*:input[namespace-uri() = 'http://www.w3.org/2002/xforms'][4]"
)
)
)
for ((description, node, path) <- expected)
it(description) {
assert(path === SaxonUtils.buildNodePath(node))
}
}
} | orbeon/orbeon-forms | src/test/scala/org/orbeon/oxf/xml/SaxonUtilsTest.scala | Scala | lgpl-2.1 | 5,432 |
package com.twitter.finagle.memcached.integration
import com.twitter.conversions.DurationOps._
import com.twitter.finagle.Memcached.Server
import com.twitter.finagle.client.StackClient
import com.twitter.finagle.loadbalancer.LoadBalancerFactory
import com.twitter.finagle.memcached.integration.MemcachedSslTest.{
chainCert,
clientCert,
clientKey,
serverCert,
serverKey
}
import com.twitter.finagle.memcached.integration.external.InProcessMemcached
import com.twitter.finagle.memcached.partitioning.MemcachedPartitioningService
import com.twitter.finagle.memcached.protocol.{Command, Response}
import com.twitter.finagle.memcached.util.AtomicMap
import com.twitter.finagle.memcached.{Interpreter, InterpreterService, TwemcacheClient}
import com.twitter.finagle.naming.BindingFactory
import com.twitter.finagle.partitioning.param
import com.twitter.finagle.service.TimeoutFilter
import com.twitter.finagle.ssl.client.SslClientConfiguration
import com.twitter.finagle.ssl.server.SslServerConfiguration
import com.twitter.finagle.ssl.{KeyCredentials, TrustCredentials}
import com.twitter.finagle.{Address, ListeningServer, Memcached, Name, ServiceFactory, Stack}
import com.twitter.hashing.KeyHasher
import com.twitter.io.{Buf, TempFile}
import com.twitter.util.{Await, Awaitable}
import java.net.{InetAddress, InetSocketAddress}
import org.scalatest.BeforeAndAfterAll
import org.scalatest.funsuite.AnyFunSuite
object MemcachedSslTest {
private val chainCert = TempFile.fromResourcePath("/ssl/certs/svc-test-chain.cert.pem")
// deleteOnExit is handled by TempFile
private val serverCert = TempFile.fromResourcePath("/ssl/certs/svc-test-server.cert.pem")
// deleteOnExit is handled by TempFile
private val serverKey = TempFile.fromResourcePath("/ssl/keys/svc-test-server-pkcs8.key.pem")
// deleteOnExit is handled by TempFile
private val clientCert = TempFile.fromResourcePath("/ssl/certs/svc-test-client.cert.pem")
// deleteOnExit is handled by TempFile
private val clientKey = TempFile.fromResourcePath("/ssl/keys/svc-test-client-pkcs8.key.pem")
// deleteOnExit is handled by TempFile
}
class MemcachedSslTest extends AnyFunSuite with BeforeAndAfterAll {
protected[this] def await[T](awaitable: Awaitable[T]): T = Await.result(awaitable, 15.seconds)
class SslMemcachedServer(serverConfig: SslServerConfiguration) {
private[this] val service: InterpreterService = {
val interpreter = new Interpreter(new AtomicMap(InProcessMemcached.initialMaps))
new InterpreterService(interpreter)
}
private[this] val serverSpec: Server =
Memcached.server.withTransport.tls(serverConfig).withLabel("finagle")
private[this] var server: Option[ListeningServer] = None
def start(): ListeningServer = {
val address = new InetSocketAddress(InetAddress.getLoopbackAddress, 0)
server = Some(serverSpec.serve(address, service))
server.get
}
def stop(blocking: Boolean = false): Unit = {
server.foreach { server =>
if (blocking) Await.result(server.close())
else server.close()
this.server = None
}
}
}
private[this] def newClientStack(): Stack[ServiceFactory[Command, Response]] = {
// create a partitioning aware finagle client by inserting the PartitioningService appropriately
StackClient
.newStack[Command, Response]
.insertAfter(
BindingFactory.role,
MemcachedPartitioningService.module
)
}
def createClient(dest: Name, label: String, clientConfig: SslClientConfiguration) = {
TwemcacheClient(
Memcached.client
.configured(param.KeyHasher(KeyHasher.KETAMA))
.configured(TimeoutFilter.Param(10000.milliseconds))
.configured(param.EjectFailedHost(false))
.configured(LoadBalancerFactory.ReplicateAddresses(2))
.withStack(newClientStack())
.withTransport.tls(clientConfig)
.newService(dest, label)
)
}
private val serverConfig1 = SslServerConfiguration(
keyCredentials = KeyCredentials.CertAndKey(serverCert, serverKey),
trustCredentials = TrustCredentials.CertCollection(chainCert)
)
val server1 = new SslMemcachedServer(serverConfig1)
private val serverConfig2 = SslServerConfiguration(
keyCredentials = KeyCredentials.CertKeyAndChain(serverCert, serverKey, chainCert)
)
val server2 = new SslMemcachedServer(serverConfig2)
override def afterAll(): Unit = {
server1.stop()
server2.stop()
}
test("server configured with 3rdparty credentials") {
val clientConfig = SslClientConfiguration(
keyCredentials = KeyCredentials.CertsAndKey(clientCert, clientKey),
trustCredentials = TrustCredentials.CertCollection(chainCert)
)
val client = createClient(
Name.bound(Address(server1.start().boundAddress.asInstanceOf[InetSocketAddress])),
"test_ssl_client",
clientConfig)
await(client.set("foo", Buf.Utf8("bar")))
assert(await(client.get("foo")).get == Buf.Utf8("bar"))
await(client.close())
}
test("server configured with self provided credentials") {
val clientConfig = SslClientConfiguration(
keyCredentials = KeyCredentials.CertsAndKey(clientCert, clientKey),
trustCredentials = TrustCredentials.CertCollection(chainCert)
)
val client = createClient(
Name.bound(Address(server2.start().boundAddress.asInstanceOf[InetSocketAddress])),
"test_ssl_client",
clientConfig)
await(client.set("foo", Buf.Utf8("bar")))
assert(await(client.get("foo")).get == Buf.Utf8("bar"))
await(client.close())
}
}
| twitter/finagle | finagle-memcached/src/test/scala/com/twitter/finagle/memcached/integration/MemcachedSslTest.scala | Scala | apache-2.0 | 5,599 |
/*
* Copyright 2014 Claude Mamo
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package controllers
import play.api.mvc._
object Application extends Controller {
def index = Action { implicit request =>
Ok(views.html.index())
}
} | hivefans/kafka-web-console | app/controllers/Application.scala | Scala | apache-2.0 | 758 |
package controllers
import play.api._
import play.api.mvc._
import play.api.data._
import play.api.data.Forms._
import models._
import services.UserService
object UserController extends Controller {
/** Formๅฎ็พฉ */
val userForm = Form(
tuple(
"name" -> nonEmptyText,
"email" -> email,
"password" -> nonEmptyText))
/** ๅๆ็ป้ข้ขๆฐ */
def entryInit = Action { implicit request =>
val filledForm = userForm.fill("user name", "email address", "password")
Ok(views.html.user.entry(flash.get("result").getOrElse(""), filledForm))
}
/** ใฆใผใถใผ็ป้ฒ้ขๆฐ */
def entrySubmit = Action { implicit request =>
userForm.bindFromRequest.fold(
errors => {
BadRequest(views.html.user.entry("error", errors))
},
success => {
val (name, email, password) = success
UserService.entry(name,email,password) match {
case Some(id) => {
UserService.findByPk(id) match {
case Some(u) => Ok(views.html.user.entrySubmit(u))
case None => Redirect("/user/entry").flashing("result" -> "user not found")
}
}
case None => Redirect("/user/entry").flashing("result" -> "entry failure")
}
})
}
}
| khonda/playframeworkPractice | app/controllers/UserController.scala | Scala | mit | 1,266 |
package ul
import collection.mutable.{ListBuffer, ArrayBuffer, HashMap, Queue}
import xml.{Node}
/** Basic class representing introspectable attribute */
class IAttr(
var __tag: String,
var __name: String = "",
var __descr: String = "",
var __value: Any = "",
var __values: Seq[Any] = Nil,
var __show: Boolean = true,
var __ro: Boolean = false,
var __fix: Boolean = false,
var __opts: HashMap[String,Any] = new HashMap[String,Any],
var __units: String = "",
var __min: Option[Any] = None,
var __max: Option[Any] = None,
var __regex: Option[String] = None,
var __getCb: ArrayBuffer[(IAttr)=>Unit] =
new ArrayBuffer[(IAttr)=>Unit],
var __setCb: ArrayBuffer[(IAttr,Any)=>Unit] =
new ArrayBuffer[(IAttr,Any)=>Unit],
var __descrs:Seq[String] = Nil
) {
def copy: IAttr = new IAttr(
__tag, __name, __descr, __value, __values,
__show, __ro, __fix,
new HashMap[String,Any] ++ opts,
__units, __min, __max, __regex,
__getCb, __setCb, __descrs)
def copyTo( a: IAttr ) = {
if (this.__tag == a.__tag) {
a.__name = __name;
a.__descr = __descr;
a.__value = __value;
a.__values = __values;
a.__show = __show;
a.__ro = __ro;
a.__fix = __fix;
a.__opts.clear;
a.__opts ++= __opts;
a.__units = __units;
a.__min = __min;
a.__max = __max;
a.__regex = __regex;
a.__getCb = __getCb;
a.__setCb = __setCb;
a.__descrs = __descrs;
}
}
def value: Any = {
if (__getCb != null) {for (cb <- __getCb) cb(this)}
__value
}
def value_=(newValue: Any) = {
if (__setCb != null) {for (cb <- __setCb) cb(this, newValue)}
__value = newValue
}
def get = value
def set(newValue: Any) = {value = newValue}
def i:Int = __value match {
case v:Int => v
case v:Long => v.toInt
case v:Double => v.toInt
case v:String => v.toInt
case v:Boolean => if (v) 1 else 0
case _ => 0
}
def i_=(newValue: Int) = { value = newValue }
def l:Long = __value match {
case v:Int => v.toLong
case v:Long => v
case v:Double => v.toLong
case v:String => v.toLong
case v:Boolean => if (v) 1L else 0L
case _ => 0L
}
def l_=(newValue: Long) = { value = newValue }
def f:Float = __value match {
case v:Int => v.toFloat
case v:Long => v.toFloat
case v:Float => v
case v:Double => v.toFloat
case v:String => v.toFloat
case v:Boolean => if (v) 1.0f else 0.0f
case _ => 0.0f
}
def d_=(newValue: Double) = { value = newValue }
def d:Double = __value match {
case v:Int => v.toDouble
case v:Long => v.toDouble
case v:Float => v.toDouble
case v:Double => v
case v:String => v.toDouble
case v:Boolean => if (v) 1.0 else 0.0
case _ => 0.0
}
def f_=(newValue: Double) = { value = newValue }
def s:String = if (__value.isInstanceOf[String]) __value.asInstanceOf[String] else __value.toString
def s_=(newValue: String) = { value = newValue }
def b:Boolean = __value match {
case v:Boolean => v
case v:Int => v != 0
case v:Long => v != 0L
case v:Double => v != 0.0
case v:String => v.toBoolean
case _ => false
}
def b_=(newValue: Boolean) = { value = newValue }
def o:IObjT = __value.asInstanceOf[IObjT]
def o_=(newValue: IObjT) = { value = newValue }
var __valueDef = __value
def valueDef = this.__valueDef
def valueDef_=(newValueDef: Any) = { __valueDef = newValueDef }
def values = this.__values
def values_=(newValues: Seq[Any]) = { __values = newValues }
/// string used for attribute type identification
def typeStr: String = {
__value match {
case v:Int => "int"
case v:Long => "long"
case v:Double => "float"
case v:String => "str"
case v:Boolean => "bool"
case v:IObjT => "iobj"
case null => "null"
case _ => "none"
}
}
def tag = __tag
def tag_=(newTag: String) = { __tag = newTag }
def name = __name
def name_=(newName: String) = { __name = newName }
def descr = __descr
def descr_=(newDescr: String) = { __descr = newDescr }
def descrs = __descrs;
def descrs_=(newDescrs:Seq[String]) = { __descrs = newDescrs }
def show = __show
def show_=(newShow: Boolean) = { __show = newShow }
def ro = __ro
def ro_=(newRO: Boolean) = { __ro = newRO }
def fix = __fix
def fix_=(newFix: Boolean) = { __fix = newFix }
def opts = __opts
def opts_=(newOpts: HashMap[String, Any]) = { __opts = newOpts }
def units = __units
def units_=(newUnits: String) = { __units = newUnits }
def min = __min
def min_=(newValue: Any) = {
__min = newValue match {
case null => None
//case v:Option[Any] => v
case v => Option(v)
}
}
def max = __max
def max_=(newValue: Any) = {
__max = newValue match {
case null => None
//case v:Option[Any] => v
case v => Option(v)
}
}
def regex = __regex
def regex_=(newValue: Any) = {
__regex = newValue match {
case null => None
//case v:Option[String] => v
case v:String => Option(v)
case v => Option(v.toString)
}
}
def getCb: ArrayBuffer[(IAttr)=>Unit] = __getCb
def getCb_= (newGetCb: ArrayBuffer[(IAttr)=>Unit]) = { __getCb = newGetCb }
def setCb: ArrayBuffer[(IAttr,Any)=>Unit] = __setCb
def setCb_= (newSetCb: ArrayBuffer[(IAttr,Any)=>Unit]) = { __setCb = newSetCb }
override def toString = value.toString
def fromString( newVal: String ): Boolean = { // parse input string into attribute value
var result = false
__value match {
case v:Int =>
try { value = newVal.toInt; result = true }
catch { case _:Throwable => }
case v:Long =>
try { value = newVal.toLong; result = true }
catch { case _:Throwable => }
case v:Double =>
try { value = newVal.toDouble; result = true }
catch { case _:Throwable => }
case v:String => value = newVal; result = true
case v:Boolean =>
try { value = newVal.toBoolean; result = true }
catch { case _:Throwable => }
case v:IObjT => false
case null => false
case _ => false
}
return result
}
}
/// Trait representing object with attributes
trait IObjT {
val __attrs = new ArrayBuffer[IAttr]
val __tag: String = ""
val __attrsIdx = HashMap[String, Int]()
attrsReindex
def attrsReindex = {
__attrsIdx.clear()
for (i <- 0 to __attrs.length-1) {
__attrsIdx(__attrs(i).tag) = i
}
}
def attrAdd(newAttr: IAttr, reindex: Boolean=false) = { __attrs += newAttr; if (reindex) attrsReindex }
def attrsAdd(newAttrs:Seq[IAttr]):IObjT = { __attrs ++= newAttrs; attrsReindex; this }
def attrsAdd(newAttrs:IObjT):IObjT = { __attrs ++= newAttrs.attrs; attrsReindex; this }
def attrs = __attrs
def attrs_= (newAttrs: ArrayBuffer[IAttr]) = { __attrs.clear(); __attrs ++= newAttrs }
def attrsNum = __attrs.length
def attrsTags: Seq[String] = for (a <- __attrs) yield a.tag
def attrsHaveTag( tag: String): Boolean = __attrsIdx.contains(tag)
def attr( tag: String ): IAttr = {
if ( attrsHaveTag(tag) ) __attrs( attrIndex(tag) )
else null
}
def attrVal( tag: String ): Any = {
attrIndex(tag) match {
case -1 => null
case i => __attrs( i ).value
}
}
def attrIndex( tag: String ): Int = {
if ( attrsHaveTag(tag) ) __attrsIdx(tag)
else -1
}
def attrDel( tag: String ) = {
if ( attrsHaveTag(tag) ) { __attrs.remove( attrIndex(tag) ); attrsReindex }
}
def attrsDel( tags: Seq[String] ) = {
for (tag <- tags) {
if ( attrsHaveTag(tag) ) { __attrs.remove( attrIndex(tag) ); attrsReindex }
}
}
def attrsToMap: Map[String, Any] = {
(for (a <- __attrs) yield (a.tag, a.value)).toMap
}
def attrsFromMap( map: Map[String, Any] ) = {
for ((t,v) <- map if attrsHaveTag(t)) attr(t).value = v
}
def attrsCopy:IObj = {
val o = new IObj;
for (a <- attrs) o.attrAdd(a.copy)
o.attrsReindex
o
}
def attrsFromIObj( o: IObjT ) = {
for (a <- o.attrs if (attrsHaveTag(a.tag))) a.copyTo(attr(a.tag))
}
def attrsToConf: String = {
var conf = new StringBuilder
for (a <- __attrs) {
conf ++= a.tag; conf ++= " = "; conf ++= a.toString(); conf ++= "\\n"
}
return conf.toString
}
def attrsFromConf( conf: String ) = {
for ( s <- conf.split("\\n") ) {
"(\\\\w+) = (.*)".r.findFirstMatchIn( s ) match {
case Some(m) =>
if ((m.groupCount == 2) && (attrsHaveTag(m.group(1)))) {
attr(m.group(1)).fromString(m.group(2))
}
case _ =>
}
}
}
def attrsToXML: Node = {
var nodes = new Queue[Node]()
for (a <- __attrs) {
nodes += <attr tag={a.tag} value={a.toString} />
}
return <iobj tag={__tag}>{nodes}</iobj>
}
def attrsFromXML( node:scala.xml.Node ) = {
node match {
case <iobj>{attrs @ _*}</iobj> =>
for (a <- attrs) {
if ( attrsHaveTag((a \\ "@tag").text) )
attr((a \\ "@tag").text).fromString((a \\ "@value").text)
}
}
}
}
/// Class representing object with attributes
class IObj(__attrsInit:Seq[IAttr] = Nil) extends IObjT {
attrsAdd(__attrsInit);
/** Get attribute by its tag */
def apply( tag: String ) = {
if (attrsHaveTag(tag)) attr(tag)
else null
}
/** Set attribute by its tag */
def update( tag:String, newValue:Any ) = {
if (attrsHaveTag(tag)) {
val a = attr(tag);
a.__value match {
case v:Boolean =>
newValue match {
case nv:Boolean => a.__value = nv
case nv:Byte => a.__value = nv != 0
case nv:Short => a.__value = nv != 0
case nv:Int => a.__value = nv != 0
case nv:Long => a.__value = nv != 0L
case nv:Float => a.__value = nv != 0.0F
case nv:Double => a.__value = nv != 0.0
case nv:String => a.__value = (nv.toLowerCase == "true") || (nv == "1")
case _ =>
}
case v:Byte =>
newValue match {
case nv:Boolean => a.__value = if (nv) 1 else 0
case nv:Byte => a.__value = nv
case nv:Short => a.__value = nv
case nv:Int => a.__value = nv
case nv:Long => a.__value = nv
case nv:Float => a.__value = nv
case nv:Double => a.__value = nv
case nv:String => try { a.__value = java.lang.Byte.parseByte(nv) } catch { case _:Throwable => }
case _ =>
}
case v:Short =>
newValue match {
case nv:Boolean => a.__value = if (nv) 1 else 0
case nv:Byte => a.__value = nv
case nv:Short => a.__value = nv
case nv:Int => a.__value = nv
case nv:Long => a.__value = nv
case nv:Float => a.__value = nv
case nv:Double => a.__value = nv
case nv:String => try { a.__value = java.lang.Short.parseShort(nv) } catch { case _:Throwable => }
case _ =>
}
case v:Int =>
newValue match {
case nv:Boolean => a.__value = if (nv) 1 else 0
case nv:Byte => a.__value = nv
case nv:Short => a.__value = nv
case nv:Int => a.__value = nv
case nv:Long => a.__value = nv
case nv:Float => a.__value = nv
case nv:Double => a.__value = nv
case nv:String => try { a.__value = java.lang.Integer.parseInt(nv) } catch { case _:Throwable => }
case _ =>
}
case v:Long =>
newValue match {
case nv:Boolean => a.__value = if (nv) 1 else 0
case nv:Byte => a.__value = nv
case nv:Short => a.__value = nv
case nv:Int => a.__value = nv
case nv:Long => a.__value = nv
case nv:Float => a.__value = nv
case nv:Double => a.__value = nv
case nv:String => try { a.__value = java.lang.Long.parseLong(nv) } catch { case _:Throwable => }
case _ =>
}
case v:Double =>
newValue match {
case nv:Boolean => a.__value = if (nv) 1 else 0
case nv:Byte => a.__value = nv
case nv:Short => a.__value = nv
case nv:Int => a.__value = nv
case nv:Long => a.__value = nv
case nv:Float => a.__value = nv
case nv:Double => a.__value = nv
case nv:String => try { a.__value = java.lang.Double.parseDouble(nv) } catch { case _:Throwable => }
case _ =>
}
case v:String =>
a.__value = newValue.toString;
case _ =>
}
}
}
}
| edartuz/muterm2 | repo/src/ul/IObj.scala | Scala | mit | 14,893 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.rdd
import java.sql.{Connection, ResultSet}
import scala.reflect.ClassTag
import org.apache.spark.{Partition, SparkContext, TaskContext}
import org.apache.spark.api.java.{JavaRDD, JavaSparkContext}
import org.apache.spark.api.java.JavaSparkContext.fakeClassTag
import org.apache.spark.api.java.function.{Function => JFunction}
import org.apache.spark.internal.Logging
import org.apache.spark.util.NextIterator
private[spark] class JdbcPartition(idx: Int, val lower: Long, val upper: Long) extends Partition {
override def index: Int = idx
}
// TODO: Expose a jdbcRDD function in SparkContext and mark this as semi-private
/**
* An RDD that executes a SQL query on a JDBC connection and reads results.
* For usage example, see test case JdbcRDDSuite.
*
* @param getConnection a function that returns an open Connection.
* The RDD takes care of closing the connection.
* @param sql the text of the query.
* The query must contain two ? placeholders for parameters used to partition the results.
* E.g. "select title, author from books where ? <= id and id <= ?"
* @param lowerBound the minimum value of the first placeholder
* @param upperBound the maximum value of the second placeholder
* The lower and upper bounds are inclusive.
* @param numPartitions the number of partitions.
* Given a lowerBound of 1, an upperBound of 20, and a numPartitions of 2,
* the query would be executed twice, once with (1, 10) and once with (11, 20)
* @param mapRow a function from a ResultSet to a single row of the desired result type(s).
* This should only call getInt, getString, etc; the RDD takes care of calling next.
* The default maps a ResultSet to an array of Object.
*/
class JdbcRDD[T: ClassTag](
sc: SparkContext,
getConnection: () => Connection,
sql: String,
lowerBound: Long,
upperBound: Long,
numPartitions: Int,
mapRow: (ResultSet) => T = JdbcRDD.resultSetToObjectArray _)
extends RDD[T](sc, Nil) with Logging {
override def getPartitions: Array[Partition] = {
// bounds are inclusive, hence the + 1 here and - 1 on end
val length = BigInt(1) + upperBound - lowerBound
(0 until numPartitions).map { i =>
val start = lowerBound + ((i * length) / numPartitions)
val end = lowerBound + (((i + 1) * length) / numPartitions) - 1
new JdbcPartition(i, start.toLong, end.toLong)
}.toArray
}
override def compute(thePart: Partition, context: TaskContext): Iterator[T] = new NextIterator[T]
{
context.addTaskCompletionListener{ context => closeIfNeeded() }
val part = thePart.asInstanceOf[JdbcPartition]
val conn = getConnection()
val stmt = conn.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY)
// setFetchSize(Integer.MIN_VALUE) is a mysql driver specific way to force streaming results,
// rather than pulling entire resultset into memory.
// see http://dev.mysql.com/doc/refman/5.0/en/connector-j-reference-implementation-notes.html
if (conn.getMetaData.getURL.matches("jdbc:mysql:.*")) {
stmt.setFetchSize(Integer.MIN_VALUE)
logInfo("statement fetch size set to: " + stmt.getFetchSize + " to force MySQL streaming ")
}
stmt.setLong(1, part.lower)
stmt.setLong(2, part.upper)
val rs = stmt.executeQuery()
override def getNext(): T = {
if (rs.next()) {
mapRow(rs)
} else {
finished = true
null.asInstanceOf[T]
}
}
override def close() {
try {
if (null != rs) {
rs.close()
}
} catch {
case e: Exception => logWarning("Exception closing resultset", e)
}
try {
if (null != stmt) {
stmt.close()
}
} catch {
case e: Exception => logWarning("Exception closing statement", e)
}
try {
if (null != conn) {
conn.close()
}
logInfo("closed connection")
} catch {
case e: Exception => logWarning("Exception closing connection", e)
}
}
}
}
object JdbcRDD {
def resultSetToObjectArray(rs: ResultSet): Array[Object] = {
Array.tabulate[Object](rs.getMetaData.getColumnCount)(i => rs.getObject(i + 1))
}
trait ConnectionFactory extends Serializable {
@throws[Exception]
def getConnection: Connection
}
/**
* Create an RDD that executes a SQL query on a JDBC connection and reads results.
* For usage example, see test case JavaAPISuite.testJavaJdbcRDD.
*
* @param connectionFactory a factory that returns an open Connection.
* The RDD takes care of closing the connection.
* @param sql the text of the query.
* The query must contain two ? placeholders for parameters used to partition the results.
* E.g. "select title, author from books where ? <= id and id <= ?"
* @param lowerBound the minimum value of the first placeholder
* @param upperBound the maximum value of the second placeholder
* The lower and upper bounds are inclusive.
* @param numPartitions the number of partitions.
* Given a lowerBound of 1, an upperBound of 20, and a numPartitions of 2,
* the query would be executed twice, once with (1, 10) and once with (11, 20)
* @param mapRow a function from a ResultSet to a single row of the desired result type(s).
* This should only call getInt, getString, etc; the RDD takes care of calling next.
* The default maps a ResultSet to an array of Object.
*/
def create[T](
sc: JavaSparkContext,
connectionFactory: ConnectionFactory,
sql: String,
lowerBound: Long,
upperBound: Long,
numPartitions: Int,
mapRow: JFunction[ResultSet, T]): JavaRDD[T] = {
val jdbcRDD = new JdbcRDD[T](
sc.sc,
() => connectionFactory.getConnection,
sql,
lowerBound,
upperBound,
numPartitions,
(resultSet: ResultSet) => mapRow.call(resultSet))(fakeClassTag)
new JavaRDD[T](jdbcRDD)(fakeClassTag)
}
/**
* Create an RDD that executes a SQL query on a JDBC connection and reads results. Each row is
* converted into a `Object` array. For usage example, see test case JavaAPISuite.testJavaJdbcRDD.
*
* @param connectionFactory a factory that returns an open Connection.
* The RDD takes care of closing the connection.
* @param sql the text of the query.
* The query must contain two ? placeholders for parameters used to partition the results.
* E.g. "select title, author from books where ? <= id and id <= ?"
* @param lowerBound the minimum value of the first placeholder
* @param upperBound the maximum value of the second placeholder
* The lower and upper bounds are inclusive.
* @param numPartitions the number of partitions.
* Given a lowerBound of 1, an upperBound of 20, and a numPartitions of 2,
* the query would be executed twice, once with (1, 10) and once with (11, 20)
*/
def create(
sc: JavaSparkContext,
connectionFactory: ConnectionFactory,
sql: String,
lowerBound: Long,
upperBound: Long,
numPartitions: Int): JavaRDD[Array[Object]] = {
val mapRow = new JFunction[ResultSet, Array[Object]] {
override def call(resultSet: ResultSet): Array[Object] = {
resultSetToObjectArray(resultSet)
}
}
create(sc, connectionFactory, sql, lowerBound, upperBound, numPartitions, mapRow)
}
}
| gioenn/xSpark | core/src/main/scala/org/apache/spark/rdd/JdbcRDD.scala | Scala | apache-2.0 | 8,240 |
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package viper.carbon.modules.components
import viper.silver.{ast => sil}
import viper.carbon.boogie.{Statements, Stmt}
import viper.silver.ast.LocalVar
/**
* Contributes to the translation of one or several statements.
*/
trait StmtComponent extends Component {
/**
* Potentially contributes to the translation of a statement. If no contribution
* is desired, then [[viper.carbon.boogie.Statements.EmptyStmt]] can be used as a
* return value.
*
* The pair (a,b) is used as follows: a is used at the beginning of the translation so
* far, and b at the end.
*/
def handleStmt(s: sil.Stmt): (Stmt, Stmt)
/**
* This method is called when translating a "fresh" statement, and by default does nothing
*/
def freshReads(fb: Seq[LocalVar]): Stmt = Statements.EmptyStmt
/**
* This method is called at the beginning of translating a constraining read permission block,
* and by default does nothing.
*/
def enterConstrainingBlock(fb: sil.Constraining): Stmt = Statements.EmptyStmt
/**
* This method is called at the end of translating a constaining read permission block,
* and by default does nothing.
*/
def leaveConstrainingBlock(fb: sil.Constraining): Stmt = Statements.EmptyStmt
}
| sccblom/vercors | viper/carbon/src/main/scala/viper/carbon/modules/components/StmtComponent.scala | Scala | mpl-2.0 | 1,456 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ly.stealth.mesos.kafka
import org.junit.{After, Before, Test}
import org.junit.Assert._
import java.util
import scala.collection.JavaConversions._
import java.io.{ByteArrayOutputStream, PrintStream}
import Util.Period
class CliTest extends MesosTestCase {
val out: ByteArrayOutputStream = new ByteArrayOutputStream()
@Before
override def before {
super.before
startHttpServer()
Cli.api = Config.api
Cli.out = new PrintStream(out, true)
startZkServer()
}
@After
override def after {
Cli.out = System.out
stopHttpServer()
super.after
stopZkServer()
}
@Test
def help {
exec("help")
assertOutContains("Usage:")
assertOutContains("scheduler")
assertOutContains("broker")
assertOutContains("topic")
// command help
for (command <- "help scheduler broker topic".split(" ")) {
exec("help " + command)
assertOutContains("Usage: " + command)
}
}
@Test
def broker_list{
Scheduler.cluster.addBroker(new Broker("0"))
Scheduler.cluster.addBroker(new Broker("1"))
Scheduler.cluster.addBroker(new Broker("2"))
exec("broker list")
assertOutContains("brokers:")
assertOutContains("id: 0")
assertOutContains("id: 1")
assertOutContains("id: 2")
}
@Test
def broker_add {
exec("broker add 0 --cpus=0.1 --mem=128")
assertOutContains("broker added:")
assertOutContains("id: 0")
assertOutContains("cpus:0.10, mem:128")
assertEquals(1, Scheduler.cluster.getBrokers.size())
val broker = Scheduler.cluster.getBroker("0")
assertEquals(0.1, broker.cpus, 0.001)
assertEquals(128, broker.mem)
}
@Test
def broker_update {
val broker = Scheduler.cluster.addBroker(new Broker("0"))
exec("broker update 0 --failover-delay=10s --failover-max-delay=20s --options=log.dirs=/tmp/kafka-logs")
assertOutContains("broker updated:")
assertOutContains("delay:10s, max-delay:20s")
assertOutContains("options: log.dirs=/tmp/kafka-logs")
assertEquals(new Period("10s"), broker.failover.delay)
assertEquals(new Period("20s"), broker.failover.maxDelay)
assertEquals(Util.parseMap("log.dirs=/tmp/kafka-logs"), broker.options)
}
@Test
def broker_remove {
Scheduler.cluster.addBroker(new Broker("0"))
exec("broker remove 0")
assertOutContains("broker 0 removed")
assertNull(Scheduler.cluster.getBroker("0"))
}
@Test
def broker_start_stop {
val broker0 = Scheduler.cluster.addBroker(new Broker("0"))
val broker1 = Scheduler.cluster.addBroker(new Broker("1"))
exec("broker start * --timeout=0")
assertOutContains("brokers scheduled to start:")
assertOutContains("id: 0")
assertOutContains("id: 1")
assertTrue(broker0.active)
assertTrue(broker1.active)
exec("broker stop 0 --timeout=0")
assertOutContains("broker scheduled to stop:")
assertOutContains("id: 0")
assertFalse(broker0.active)
assertTrue(broker1.active)
exec("broker stop 1 --timeout=0")
assertOutContains("broker scheduled to stop:")
assertOutContains("id: 1")
assertFalse(broker0.active)
assertFalse(broker1.active)
}
@Test
def broker_start_stop_timeout {
val broker = Scheduler.cluster.addBroker(new Broker("0"))
try { exec("broker start 0 --timeout=1ms"); fail() }
catch { case e: Cli.Error => assertTrue(e.getMessage, e.getMessage.contains("broker start timeout")) }
assertTrue(broker.active)
broker.task = new Broker.Task("id", "slave", "executor", "host", _state = Broker.State.RUNNING)
try { exec("broker stop 0 --timeout=1ms"); fail() }
catch { case e: Cli.Error => assertTrue(e.getMessage, e.getMessage.contains("broker stop timeout")) }
assertFalse(broker.active)
}
@Test
def topic_list {
exec("topic list")
assertOutContains("no topics")
Scheduler.cluster.topics.addTopic("t0")
Scheduler.cluster.topics.addTopic("t1")
Scheduler.cluster.topics.addTopic("x")
// list all
exec("topic list")
assertOutContains("topics:")
assertOutContains("t0")
assertOutContains("t1")
assertOutContains("x")
// name filtering
exec("topic list t*")
assertOutContains("t0")
assertOutContains("t1")
assertOutNotContains("x")
}
@Test
def topic_add {
exec("topic add t0")
assertOutContains("topic added:")
assertOutContains("name: t0")
exec("topic list")
assertOutContains("topic:")
assertOutContains("name: t0")
assertOutContains("partitions: 0:[0]")
exec("topic add t1 --partition 2")
exec("topic list t1")
assertOutContains("topic:")
assertOutContains("name: t1")
assertOutContains("partitions: 0:[0], 1:[0]")
}
@Test
def topic_update {
Scheduler.cluster.topics.addTopic("t0")
exec("topic update t0 --options=flush.ms=5000")
assertOutContains("topic updated:")
assertOutContains("name: t0")
exec("topic list")
assertOutContains("topic:")
assertOutContains("t0")
assertOutContains("flush.ms=5000")
}
@Test
def topic_rebalance {
val cluster: Cluster = Scheduler.cluster
val rebalancer: Rebalancer = cluster.rebalancer
cluster.addBroker(new Broker("0"))
cluster.addBroker(new Broker("1"))
assertFalse(rebalancer.running)
cluster.topics.addTopic("t")
exec("topic rebalance *")
assertTrue(rebalancer.running)
assertOutContains("Rebalance started")
}
@Test
def usage_errors {
// no command
try { exec(""); fail() }
catch { case e: Cli.Error => assertTrue(e.getMessage, e.getMessage.contains("command required")) }
// no id
try { exec("broker add"); fail() }
catch { case e: Cli.Error => assertTrue(e.getMessage, e.getMessage.contains("argument required")) }
// invalid command
try { exec("unsupported 0"); fail() }
catch { case e: Cli.Error => assertTrue(e.getMessage, e.getMessage.contains("unsupported command")) }
}
@Test
def connection_refused {
HttpServer.stop()
try {
try { exec("broker add 0"); fail() }
catch { case e: Cli.Error => assertTrue(e.getMessage, e.getMessage.contains("Connection refused")) }
} finally {
HttpServer.start()
}
}
private def assertOutContains(s: String): Unit = assertTrue("" + out, out.toString.contains(s))
private def assertOutNotContains(s: String): Unit = assertFalse("" + out, out.toString.contains(s))
private def exec(cmd: String): Unit = {
out.reset()
val args = new util.ArrayList[String]()
for (arg <- cmd.split(" "))
if (!cmd.isEmpty) args.add(arg)
Cli.exec(args.toArray(new Array[String](args.length)))
}
}
| yonglehou/kafka-1 | src/test/ly/stealth/mesos/kafka/CliTest.scala | Scala | apache-2.0 | 7,468 |
package hex
package format
package pgn
//Object model of a pgn
//TODO waiting to decide which format we take for hex. Maybe standard sgf instead
import scala._
case class Pgn(
tags: List[Tag],
turns: List[Turn]) {
/*
def updateTurn(fullMove: Int, f: Turn => Turn) = {
val index = fullMove - 1
(turns lift index).fold(this) { turn =>
copy(turns = turns.updated(index, f(turn)))
}
}
def updatePly(ply: Int, f: Move => Move) = {
val fullMove = (ply + 1) / 2
val color = Color(ply % 2 == 1)
updateTurn(fullMove, _.update(color, f))
}
def updateLastPly(f: Move => Move) = updatePly(nbPlies, f)
def nbPlies = turns.foldLeft(0)(_ + _.count)
def moves = turns.flatMap { t =>
List(t.white, t.black).flatten
}
override def toString = "%s\\n\\n%s %s".format(
tags mkString "\\n",
turns mkString " ",
tags find (_.name == Tag.Result) map (_.value) getOrElse ""
).trim
*/
}
case class Turn(
number: Int,
white: Option[Move],
black: Option[Move]) {
/*
def update(color: Color, f: Move => Move) = color.fold(
copy(white = white map f),
copy(black = black map f)
)
def updateLast(f: Move => Move) = {
black.map(m => copy(black = f(m).some)) orElse
white.map(m => copy(white = f(m).some))
} | this
def isEmpty = white.isEmpty && black.isEmpty
def plyOf(color: Color) = number * 2 - color.fold(1, 0)
def count = List(white, black) count (_.isDefined)
override def toString = {
val text = (white, black) match {
case (Some(w), Some(b)) if w.isLong => s" $w $number... $b"
case (Some(w), Some(b)) => s" $w $b"
case (Some(w), None) => s" $w"
case (None, Some(b)) => s".. $b"
case _ => ""
}
s"$number.$text"
}
*/
}
object Turn {
/*
def fromMoves(moves: List[Move], ply: Int): List[Turn] = {
moves.foldLeft((List[Turn](), ply)) {
case ((turns, p), move) if p % 2 == 1 =>
(Turn((p + 1) / 2, move.some, none) :: turns) -> (p + 1)
case ((Nil, p), move) =>
(Turn((p + 1) / 2, none, move.some) :: Nil) -> (p + 1)
case ((t :: tt, p), move) =>
(t.copy(black = move.some) :: tt) -> (p + 1)
}
}._1.reverse
*/
}
case class Move(
san: String,
nag: Option[Int] = None,
comment: Option[String] = None,
opening: Option[String] = None,
result: Option[String] = None,
variation: List[Turn] = Nil,
// time left for the user who made the move, after he made it
timeLeft: Option[Int] = None) {
/*
def isLong = comment.isDefined || variation.nonEmpty
def timeString(time: Int) = Clock.timeString(time)
private def clockString: Option[String] =
timeLeft.map(time => "[%clk " + timeString(time) + "]")
override def toString = {
val nagSymbol = nag.fold("") { code => Nag(code).fold(" $" + code)(_.symbol) }
val commentOrTime =
if (comment.isDefined || timeLeft.isDefined || opening.isDefined || result.isDefined)
List(clockString, opening, result, comment).flatten.mkString(" { ", " ", " }")
else ""
val variationString = if (variation.isEmpty) "" else variation.mkString(" (", " ", ")")
s"$san$nagSymbol$commentOrTime$variationString"
}
*/
}
| ThomasCabaret/scalahex | src/main/scala/format/pgn/model.scala | Scala | mit | 3,288 |
package org.bfn.ninetynineprobs
object P09 {
private def packAcc[T](ls : List[T], curr : List[T], acc : List[List[T]])
: List[List[T]] = (ls, curr) match {
case (Nil, Nil) if acc == Nil => Nil // special case for empty lists
case (Nil, _) => curr :: acc
case ((h :: t), Nil) => packAcc(t, List(h), acc)
case ((a :: t1), (b :: _)) if a == b => packAcc(t1, a :: curr, acc)
case ((a :: t1), (b :: _)) if a != b => packAcc(t1, List(a), curr :: acc)
}
/**
* Pack consecutive duplicates of list elements into sublists
**/
def pack[T](ls : List[T]) : List[List[T]] =
P05.reverse(packAcc(ls, Nil, Nil))
}
| bfontaine/99Scala | src/main/scala/P09.scala | Scala | mit | 654 |
package NFNcore.Lambda
import Logging.{DEBUGMSG, Debuglevel}
import NFNcore.NFNNode
import scala.collection.mutable.Map
//Function inside the Krivine machine
case class Func(expr: Vector[KrivineInstruction], numOfParams: Int)
case class Krivine(nfnNode: NFNNode, krivineThread: KrivineThread){
//Function environment
var funcEnv: Map[NFNName, Func] = Map()
var instructions : Vector[KrivineInstruction] = Vector()
def apply(instructions: Vector[KrivineInstruction]): Vector[KrivineInstruction] = {
this.instructions = instructions;
execute(instructions, Vector(), Map(), 0)
}
def execute(instructions: Vector[KrivineInstruction], stack: Vector[Vector[KrivineInstruction]],
env: Map[Int, Vector[KrivineInstruction]], varoffset: Int) : Vector[KrivineInstruction] = {
if(instructions.isEmpty) {
return Vector(RESULT(""))
}
DEBUGMSG(Debuglevel.DEBUG, "Executing Krivine Instruction: " + instructions.head.toString() + " varoffset: " + varoffset)
instructions.head match {
case NOP() => {
return (execute(instructions, stack, env, varoffset))
}
case ACCESS(varname, varnum) => {
val offset = 0 //if(varname.startsWith("_")) varoffset else 0 //use varoffset only for function parameters //TODO problem with recursion, since old vars start also with _1
val inst = env.getOrElse(varnum + offset, Nil)
if(inst != Nil){
return execute((inst ++ instructions.tail).toVector, stack, env, varoffset)
}
else{
return execute(instructions.tail ++ List(VARIABLE(varname, varnum)), stack, env, varoffset)
}
}
case GRAB(varname, varnum) => {
return execute(instructions.tail, stack.tail, env += varnum -> stack.head, varoffset)
}
case PUSH(elm) => {
return execute(instructions.tail, Vector(elm) ++ stack, env, varoffset)
}
case VARIABLE(name, varnum) => {
if(!stack.isEmpty){ //TODO this is also required for other datatypes, do it generic? appliable?
return Vector(VARIABLE(name, varnum)) ++ execute(stack.head, stack.tail, env, varoffset)
}
else {
return Vector(VARIABLE(name, varnum))
}
}
case NUMBER(v) => {
return Vector(NUMBER(v))
}
case STRING(s) => {
return Vector(STRING(s))
}
case NFNName(comps) => {
return Vector(NFNName(comps))
}
case LISTINST(l) => {
return Vector(LISTINST(l.map(e => execute(e, stack, env, varoffset))))
}
case CALLINST(fname, num, params) => {
if(funcEnv.contains(fname)){
return lambdafunction(fname, params, env, this)
}
else{
var buildin = new KrivineBuildIn(nfnNode)
return buildin(fname, params, env, varoffset, this)
}
}
case IFELSEINST(condition, fulfilled, notfulfilled) => {
val res = execute(condition, stack, env, varoffset)
var s = 0;
res.head match{
case NUMBER(s) => {
if(s == 0){
return execute(notfulfilled, stack, env, varoffset) //false
}
else{
return execute(fulfilled, stack, env, varoffset) //true
}
}
}
}
case FUNCTIONINST(name, numOfParams, startVarNum, expr, prog) => {
funcEnv += name -> Func(expr, numOfParams)
return execute(prog, stack, env, varoffset)
}
case NFNInterestInst(i) =>{
return Vector(NFNInterestInst(i))
}
case NFNContentInst(c) => {
return Vector(NFNContentInst(c))
}
case WAIT() => {
DEBUGMSG(Debuglevel.DEBUG, "Thread waiting is ")
Thread.currentThread().wait()
return execute(instructions.tail, stack, env, varoffset)
}
case _ => {
return Vector(RESULT("Error"))
}
}
}
def lambdafunction(fname: NFNName, params: Vector[Vector[KrivineInstruction]], env: Map[Int, Vector[KrivineInstruction]], krivine: Krivine): Vector[KrivineInstruction] = {
val func = funcEnv(fname)
val numOfParams = func.numOfParams
var fenv: Map[Int, Vector[KrivineInstruction]] = Map()
fenv = fenv ++ env
var i = 0
val offset = if(fenv.isEmpty) 0 else fenv.keys.max
for(i <- 0 to numOfParams - 1){ //TODO not functional, a bit "hacky"
fenv += i + offset + 1 -> params(i) //TODO how to do secure recursion with this numbers?
}
//link and prepare function code!
val code = linkFunctionCode(func.expr, offset)
return krivine.execute(code, Vector(), fenv, offset)
}
def linkFunctionCode(function: Vector[KrivineInstruction], offset: Int): Vector[KrivineInstruction] = {
return function.map {
inst => inst match{
case ACCESS(varname, varnum) => ACCESS(varname, (varnum + offset))
//case GRAB(varname, varnum) => GRAB(varname, varnum+offset) //offset here required (may necessary to add this line to use abstraction in functions)?
case PUSH(k) => PUSH(linkFunctionCode(k, offset))
case CALLINST(fname, num, params) => CALLINST(fname, num, params.map { p => linkFunctionCode(p, offset) })
case IFELSEINST(cond, fulfilled, notfulfilled) => IFELSEINST(linkFunctionCode(cond, offset), linkFunctionCode(fulfilled, offset), linkFunctionCode(notfulfilled, offset))
//case FUNCTIONINST(fname, numOfParam, startParam, expr, prog) =>
case _ => inst
}
}
}
}
| blacksheeep/NFN | src/main/scala/NFNcore/Lambda/Krivine.scala | Scala | mit | 5,529 |
package com.twitter.finagle.http.filter
import com.twitter.conversions.DurationOps._
import com.twitter.finagle.Service
import com.twitter.finagle.http.{Method, Request, Response, Version}
import com.twitter.logging.{BareFormatter, Logger, StringHandler}
import com.twitter.util.{Await, Future, Time}
import java.time.ZonedDateTime
import org.scalatest.FunSuite
class LoggingFilterTest extends FunSuite {
test("log") {
val logger = Logger.get("access")
logger.setLevel(Logger.INFO)
val stringHandler = new StringHandler(BareFormatter, Some(Logger.INFO))
logger.addHandler(stringHandler)
logger.setUseParentHandlers(false)
val request = Request("/search.json")
request.method = Method.Get
request.xForwardedFor = "10.0.0.1"
request.referer = "http://www.example.com/"
request.userAgent = "User Agent"
request.version = Version.Http11
val formatter = new CommonLogFormatter
val service = new Service[Request, Response] {
def apply(request: Request): Future[Response] = {
val response = Response()
response.statusCode = 123
response.write("hello")
Future.value(response)
}
}
val filter = (new LoggingFilter(logger, formatter)).andThen(service)
Time.withTimeAt(Time.fromSeconds(1302121932)) { _ =>
Await.result(filter(request), 1.second)
}
stringHandler.get == ("""127\\.0\\.0\\.1 - - \\[06/Apr/2011:20:32:12 \\+0000\\] "GET /search\\.json HTTP/1\\.1" 123 5 [0-9]+ "User Agent"""" + "\\n")
}
val UnescapedEscaped =
Seq(
// boundaries
("", ""),
("hello\\n", "hello\\\\n"),
("\\nhello", "\\\\nhello"),
// low ascii and special characters
("\\u0000", "\\\\x00"),
("\\u0001", "\\\\x01"),
("\\u0002", "\\\\x02"),
("\\u0003", "\\\\x03"),
("\\u0004", "\\\\x04"),
("\\u0005", "\\\\x05"),
("\\u0006", "\\\\x06"),
("\\u0007", "\\\\x07"),
("\\u0008", "\\\\b"),
("\\u0009", "\\\\t"),
("\\u000a", "\\\\n"),
("\\u000b", "\\\\v"),
("\\u000c", "\\\\x0c"),
("\\u000d", "\\\\r"),
("\\u000e", "\\\\x0e"),
("\\u000f", "\\\\x0f"),
("\\u0010", "\\\\x10"),
("\\u0011", "\\\\x11"),
("\\u0012", "\\\\x12"),
("\\u0013", "\\\\x13"),
("\\u0014", "\\\\x14"),
("\\u0015", "\\\\x15"),
("\\u0016", "\\\\x16"),
("\\u0017", "\\\\x17"),
("\\u0018", "\\\\x18"),
("\\u0019", "\\\\x19"),
("\\u001a", "\\\\x1a"),
("\\u001b", "\\\\x1b"),
("\\u001c", "\\\\x1c"),
("\\u001d", "\\\\x1d"),
("\\u001e", "\\\\x1e"),
("\\u001f", "\\\\x1f"),
("\\u0020", " "),
("\\u0021", "!"),
("\\"", "\\\\\\""),
("\\u0023", "#"),
("\\u0024", "$"),
("\\u0025", "%"),
("\\u0026", "&"),
("\\u0027", "'"),
("\\u0028", "("),
("\\u0029", ")"),
("\\u002a", "*"),
("\\u002b", "+"),
("\\u002c", ","),
("\\u002d", "-"),
("\\u002e", "."),
("\\u002f", "/"),
("\\u0030", "0"),
("\\u0031", "1"),
("\\u0032", "2"),
("\\u0033", "3"),
("\\u0034", "4"),
("\\u0035", "5"),
("\\u0036", "6"),
("\\u0037", "7"),
("\\u0038", "8"),
("\\u0039", "9"),
("\\u003a", ":"),
("\\u003b", ";"),
("\\u003c", "<"),
("\\u003d", "="),
("\\u003e", ">"),
("\\u003f", "?"),
("\\u0040", "@"),
("\\u0041", "A"),
("\\u0042", "B"),
("\\u0043", "C"),
("\\u0044", "D"),
("\\u0045", "E"),
("\\u0046", "F"),
("\\u0047", "G"),
("\\u0048", "H"),
("\\u0049", "I"),
("\\u004a", "J"),
("\\u004b", "K"),
("\\u004c", "L"),
("\\u004d", "M"),
("\\u004e", "N"),
("\\u004f", "O"),
("\\u0050", "P"),
("\\u0051", "Q"),
("\\u0052", "R"),
("\\u0053", "S"),
("\\u0054", "T"),
("\\u0055", "U"),
("\\u0056", "V"),
("\\u0057", "W"),
("\\u0058", "X"),
("\\u0059", "Y"),
("\\u005a", "Z"),
("\\u005b", "["),
("\\\\", "\\\\\\\\"),
("\\u005d", "]"),
("\\u005e", "^"),
("\\u005f", "_"),
("\\u0060", "`"),
("\\u0061", "a"),
("\\u0062", "b"),
("\\u0063", "c"),
("\\u0064", "d"),
("\\u0065", "e"),
("\\u0066", "f"),
("\\u0067", "g"),
("\\u0068", "h"),
("\\u0069", "i"),
("\\u006a", "j"),
("\\u006b", "k"),
("\\u006c", "l"),
("\\u006d", "m"),
("\\u006e", "n"),
("\\u006f", "o"),
("\\u0070", "p"),
("\\u0071", "q"),
("\\u0072", "r"),
("\\u0073", "s"),
("\\u0074", "t"),
("\\u0075", "u"),
("\\u0076", "v"),
("\\u0077", "w"),
("\\u0078", "x"),
("\\u0079", "y"),
("\\u007a", "z"),
("\\u007b", "{"),
("\\u007c", "|"),
("\\u007d", "}"),
("\\u007e", "~"),
("\\u007f", "\\\\x7f"),
("\\u0080", "\\\\xc2\\\\x80"),
("\\u00e9", "\\\\xc3\\\\xa9"), // รฉ
("\\u2603", "\\\\xe2\\\\x98\\\\x83") // snowman
)
test("escape() escapes non-printable, non-ASCII") {
UnescapedEscaped.foreach {
case (input, escaped) =>
assert(LogFormatter.escape(input) == escaped)
}
}
test("DateFormat keeps consistent") {
val logFormatter = new CommonLogFormatter
val timeGMT: ZonedDateTime = ZonedDateTime.parse("2012-06-30T12:30:40Z[GMT]")
assert(timeGMT.format(logFormatter.DateFormat) == "30/Jun/2012:12:30:40 +0000")
}
}
| luciferous/finagle | finagle-http/src/test/scala/com/twitter/finagle/http/filter/LoggingFilterTest.scala | Scala | apache-2.0 | 5,315 |
package org.jetbrains.plugins.scala
package lang
package parser
package parsing
package types
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder
import org.jetbrains.plugins.scala.lang.parser.parsing.expressions._
/**
* @author Alexander Podkhalyuzin
* Date: 06.02.2008
*/
/*
* AnnotType ::= {Annotation} SimpleType
*/
object AnnotType extends AnnotType {
override protected val annotation = Annotation
override protected val simpleType = SimpleType
}
trait AnnotType {
protected val annotation: Annotation
protected val simpleType: SimpleType
def parse(builder: ScalaPsiBuilder, isPattern: Boolean, multipleSQBrackets: Boolean = true): Boolean = {
val annotMarker = builder.mark
var isAnnotation = false
//parse Simple type
if (simpleType.parse(builder, isPattern, multipleSQBrackets)) {
val annotationsMarker = builder.mark
while (!builder.newlineBeforeCurrentToken && annotation.parse(builder,
countLinesAfterAnnotation = false)) {isAnnotation = true}
if (isAnnotation) annotationsMarker.done(ScalaElementTypes.ANNOTATIONS) else annotationsMarker.drop()
if (isAnnotation) annotMarker.done(ScalaElementTypes.ANNOT_TYPE) else annotMarker.drop()
true
} else {
annotMarker.rollbackTo()
false
}
}
} | whorbowicz/intellij-scala | src/org/jetbrains/plugins/scala/lang/parser/parsing/types/AnnotType.scala | Scala | apache-2.0 | 1,319 |
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.indexer
import org.ensime.util.EnsimeSpec
import DescriptorParser.{ parse, parseType }
import ClassName._
import scala.util.Try
class DescriptorParserSpec extends EnsimeSpec {
private val SZ = ClassName(PackageName(List("scalaz", "syntax")), "ToApplicativeOps$ApplicativeIdV$$anonfun$ฮท$1")
private val S = ClassName(PackageName(List("java", "lang")), "String")
private val A = ArrayDescriptor
private val D = Descriptor
private val I = PrimitiveInt
private val V = PrimitiveVoid
private val Z = PrimitiveBoolean
private val root = PackageName(Nil)
"DescriptorParser" should "fail to parse the empty string" in {
intercept[Exception](parse(""))
}
it should "fail to parse a bad string" in {
intercept[Exception](parse("not valid"))
}
it should "parse descriptors without parameters" in {
parse("()V") should ===(D(Nil, PrimitiveVoid))
parse("()Ljava/lang/String;") should ===(D(Nil, S))
parse("()[Ljava/lang/String;") should ===(D(Nil, A(S)))
parse("()[[Ljava/lang/String;") should ===(D(Nil, A(A(S))))
parse("()[[[Ljava/lang/String;") should ===(D(Nil, A(A(A(S)))))
}
it should "handle multiple object parameters" in {
parse("(I[IILjava/lang/String;Z)V") should ===(D(List(I, A(I), I, S, Z), V))
}
it should "be invertible" in {
def invert(desc: String) =
parse(desc).descriptorString shouldBe desc
invert("(I[IILjava/lang/String;Z)V")
}
"DescriptorParser's JVM internal mode" should "fail to parse the empty string" in {
intercept[Exception](parseType(""))
}
it should "fail to parse a bad string" in {
intercept[Exception](parseType("not valid"))
}
it should "handle $_- in package names" in {
parseType("Lcom/-$random_/Foo;") should ===(ClassName(PackageName(List("com", "-$random_")), "Foo"))
}
it should "handle examples" in {
parseType("Lscalaz/syntax/ToApplicativeOps$ApplicativeIdV$$anonfun$ฮท$1;") should ===(SZ)
parseType("Ljava/lang/String;") should ===(S)
parseType("[Ljava/lang/String;") should ===(A(S))
parseType("[[Ljava/lang/String;") should ===(A(A(S)))
parseType("V") should ===(V)
parseType("LMyAnnotation;") should ===(ClassName(root, "MyAnnotation"))
// of course, SUN break their own rules for package names (capitals)
Try(parseType("Lcom/sun/tools/corba/se/idl/toJavaPortable/NameModifierImpl;")).success
// hmmm, apache, what???? dashes in package names????
Try(parseType("Lorg/spark-project/guava/annotations/VisibleForTesting;")).success
}
it should "be invertible" in {
def invert(desc: String) =
parseType(desc).internalString shouldBe desc
invert("Ljava/lang/String;")
invert("[[Ljava/lang/String;")
}
}
| espinhogr/ensime-server | core/src/test/scala/org/ensime/indexer/DescriptorParserSpec.scala | Scala | gpl-3.0 | 2,872 |
package cucumber.runtime.scala
import _root_.java.util.{List => JList}
import _root_.gherkin.formatter.model.Step
import _root_.java.lang.reflect.Modifier
import _root_.cucumber.runtime.snippets.SnippetGenerator
import _root_.cucumber.api.scala.ScalaDsl
import _root_.cucumber.runtime.io.ResourceLoader
import _root_.cucumber.runtime.io.ClasspathResourceLoader
import _root_.cucumber.runtime.Backend
import _root_.cucumber.runtime.UnreportedStepExecutor
import _root_.cucumber.runtime.Glue
import collection.JavaConversions._
class ScalaBackend(ignore:ResourceLoader) extends Backend {
private var snippetGenerator = new SnippetGenerator(new ScalaSnippetGenerator())
private var instances:Seq[ScalaDsl] = Nil
def getStepDefinitions = instances.flatMap(_.stepDefinitions)
def getBeforeHooks = instances.flatMap(_.beforeHooks)
def getAfterHooks = instances.flatMap(_.afterHooks)
def disposeWorld() {
instances = Nil
}
def getSnippet(step: Step) = snippetGenerator.getSnippet(step)
def buildWorld() {
//I don't believe scala has to do anything to clean out it's world
}
def loadGlue(glue: Glue, gluePaths: JList[String]) {
val cl = new ClasspathResourceLoader(Thread.currentThread().getContextClassLoader)
val packages = gluePaths map { cucumber.runtime.io.MultiLoader.packageName(_) }
val dslClasses = packages flatMap { cl.getDescendants(classOf[ScalaDsl], _) } filter { cls =>
try {
cls.getDeclaredConstructor()
true
} catch {
case e => false
}
}
val (clsClasses, objClasses) = dslClasses partition { cls =>
try {
Modifier.isPublic (cls.getConstructor().getModifiers)
} catch {
case e => false
}
}
val objInstances = objClasses map {cls =>
val instField = cls.getDeclaredField("MODULE$")
instField.setAccessible(true)
instField.get(null).asInstanceOf[ScalaDsl]
}
val clsInstances = (clsClasses map {_.newInstance()})
instances = objInstances ++ clsInstances
getStepDefinitions map {glue.addStepDefinition(_)}
getBeforeHooks map {glue.addBeforeHook(_)}
getAfterHooks map {glue.addAfterHook(_)}
}
def setUnreportedStepExecutor(executor:UnreportedStepExecutor) {}
}
| nilswloka/cucumber-jvm | scala/src/main/scala/cucumber/runtime/scala/ScalaBackend.scala | Scala | mit | 2,256 |
/*
* Copyright 2018 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers
import play.api.mvc._
import service._
import service.KeystoreService._
import scala.concurrent.Future
import config.AppSettings
import config.Settings
import play.api.i18n.Messages.Implicits._
import play.api.Play.current
object TaxYearSelectionController extends TaxYearSelectionController with AppSettings {
def keystore: KeystoreService = KeystoreService
}
trait TaxYearSelectionController extends RedirectController {
settings: Settings =>
def onYearSelected = withWriteSession { implicit request =>
val kkey = request.form.filterKeys(_.contains("TaxYear")).map { case (k,v) => k.drop(7) }.mkString (",")
if (kkey.nonEmpty) {
val data = request.data ++ sessionData(kkey,request.form("previous"))
TaxYearSelection() go Forward.using(data)
} else {
Future.successful(Ok(views.html.taxyearselection(Array[String](), true, settings.NUMBER_OF_YEARS)))
}
}
val onPageLoad = withReadSession { implicit request =>
Future.successful(Ok(views.html.taxyearselection(request.data.getOrElse(SELECTED_INPUT_YEARS_KEY, "").split(","), false, settings.NUMBER_OF_YEARS)))
}
def onBack() = withWriteSession { implicit request =>
TaxYearSelection() go Backward
}
protected def sessionData(kkey: String, previous: String): Map[String,String] = {
val m = if (!previous.isEmpty) {
val deletedYears = previous.split(",").diff(kkey.split(","))
deletedYears.flatMap {
(year)=>
if (year == "2015") // to do 2016+
List(P1_DB_KEY, P1_DC_KEY, P2_DB_KEY, P2_DC_KEY, P1_TRIGGER_DC_KEY, P2_TRIGGER_DC_KEY, TRIGGER_DATE_KEY, TE_YES_NO_KEY, FIRST_DC_YEAR_KEY, DB_FLAG_PREFIX+year, DC_FLAG_PREFIX+year)
else if (year.toInt >= 2016)
List(TRIGGER_DATE_KEY, TRIGGER_DC_KEY, FIRST_DC_YEAR_KEY, DB_PREFIX+year, DC_PREFIX+year, DB_FLAG_PREFIX+year, DC_FLAG_PREFIX+year, TH_PREFIX+year, AI_PREFIX+year, TA_PREFIX+year, TI_YES_NO_KEY_PREFIX)
else
List(DB_PREFIX+year, DC_PREFIX+year)
}.map((k)=>(k,"")).toList.toMap
} else {
Map[String,String]()
}
m ++ Map((SELECTED_INPUT_YEARS_KEY->kkey), (CURRENT_INPUT_YEAR_KEY->PageLocation.START.toString))
}
}
| hmrc/paac-frontend | app/controllers/TaxYearSelectionController.scala | Scala | apache-2.0 | 2,807 |
package looty
package views
import cgta.serland.SerBuilder
import japgolly.scalajs.react.BackendScope
import japgolly.scalajs.react.React
import japgolly.scalajs.react.ReactComponentB
import japgolly.scalajs.react.SyntheticEvent
import looty.model.StashTabIdx
import looty.model.parsers.ItemParser
import looty.poeapi.PoeCacher
import looty.poeapi.PoeTypes.Leagues.League
import looty.poeapi.PoeTypes.StashTab
import looty.poeapi.PoeTypes.StashTabInfo
import looty.views.widgets.Select2Widget
import looty.views.widgets.SelectCharacterWidget
import looty.views.widgets.SelectLeagueWidget
import looty.views.widgets.SelectStashWidget
import org.scalajs.dom
import org.scalajs.dom.html.Input
import org.scalajs.dom.KeyboardEvent
import org.scalajs.dom.ext.KeyCode
import org.scalajs.jquery.JQuery
import org.scalajs.jquery.JQueryEventObject
import scala.concurrent.Future
import scala.scalajs.js
import scala.scalajs.js.Dynamic
//////////////////////////////////////////////////////////////
// Copyright (c) 2015 Ben Jackman, Jeff Gomberg
// All Rights Reserved
// please contact ben@jackman.biz or jeff@cgtanalytics.com
// for licensing inquiries
// Created by bjackman @ 7/17/15 1:45 AM
//////////////////////////////////////////////////////////////
object VisualStashTabWidget {
val component = {
import japgolly.scalajs.react.vdom.prefix_<^._
val O = Dynamic.literal
ReactComponentB[VisualStashTabWidget]("VisualStashTabWidget")
.render((props) =>
<.div(
^.position := "absolute",
^.backgroundColor := "black",
^.top := "100px",
^.left := "100px",
^.height := "800px",
^.width := "800px"
)
)
.build
}
}
case class VisualStashTabWidget(tab: StashTab, tabInfo: StashTabInfo) {
def apply() = VisualStashTabWidget.component(this)
}
object UnderlayViewWidget {
class Component(pc: PoeCacher) {
case class State(
league: Option[League],
stashTabInfo: Option[StashTabInfo]
)
case class Backend(T: BackendScope[_, State]) {
def setLeague(league: League) { T.modState(_.copy(league = Some(league))) }
def setStashTabInfo(stashTabInfo: StashTabInfo) { T.modState(_.copy(stashTabInfo = Some(stashTabInfo))) }
}
val component = {
import japgolly.scalajs.react.vdom.prefix_<^._
val O = Dynamic.literal
ReactComponentB[UnderlayViewWidget]("UnderlayViewWidget")
.initialState(State(None, None))
.backend(Backend)
.render { (p, s, b) =>
<.div(
<.div(
SelectLeagueWidget(s.league, b.setLeague)(),
<.div(^.display := "inline-block")(s.league.map { l => SelectStashWidget(l, s.stashTabInfo, () => pc.getStashTabInfos(l.rpcName).map(_.toSeq), b.setStashTabInfo)()}),
<.a(^.href := "javascript:void(0)", ^.className := "ctrl-btn", "resize")
),
<.div(
s.stashTabInfo.map(sti => VisualStashTabWidget(null, sti)())
)
// ,
// VisualStashTabWidget(null, null)()
)
}
.build
}
}
}
case class UnderlayViewWidget() {
def apply(pc: PoeCacher) = new UnderlayViewWidget.Component(pc).component(this)
}
class UnderlayView(implicit val pc: PoeCacher) extends View {
object Rect {implicit val ser = SerBuilder.forCase(this.apply _)}
case class Rect(var top: Double, var left: Double, var w: Double, var h: Double)
val rect = Rect(100, 100, 800, 800)
val itemDetailHover = new ItemDetailHover()
var el: JQuery = _
var tabEl: JQuery = _
var tabIdx = 1
val league = "Standard"
override def start(el: JQuery): Unit = {
val mel = el.get(0).asInstanceOf[dom.Element]
val root = UnderlayViewWidget()(pc)
React.render(root, mel)
}
def old(el: JQuery): Unit = {
this.el = el
val r = rect
this.tabEl = jq(s"<div style='position:absolute;top:${r.top}px;left:${r.left}px; height:${r.h}px;width:${r.w}px;background-color:black'></div>")
this.el.append(tabEl)
val ti = tabIdx
jq(window).on("keydown", f)
el.append(itemDetailHover.el)
getAndRenderTab(tabIdx)
}
def getAndRenderTab(i: Int) {
pc.getStashTab(league, i).foreach { tab =>
pc.getStashTabInfos(league).foreach { infos =>
val info = infos(i)
renderTab(tab, info)
}
}
}
val f: js.Function1[KeyboardEvent, _] = (e: KeyboardEvent) => {
e.keyCode match {
case KeyCode.left =>
tabIdx = Math.max(0, tabIdx - 1)
getAndRenderTab(tabIdx)
case KeyCode.right =>
tabIdx = tabIdx + 1
getAndRenderTab(tabIdx)
case _ =>
}
}
override def stop(): Unit = {
jq(window).off("keydown", f)
}
def calibrate() {
}
def renderTab(tab: StashTab, tabInfo: StashTabInfo) {
tabEl.empty()
val dx = rect.w / 12
val dy = rect.h / 12
for {
item <- tab.allItems(None)
if !item.isInSocket
x <- item.x.toOption
y <- item.y.toOption
w = item.w
h = item.h
} {
val t = y * dy
val l = x * dx
val iw = w * dx
val ih = h * dy
val itemEl = jq(s"<img width='${iw}px' height='${ih}px' src='${item.getIconUrl}' style='border:2px solid white;position:absolute;top:${t}px;left:${l}px;height:${ih};width:${iw}'></img>")
tabEl.append(itemEl)
def on(e: JQueryEventObject): js.Any = {
val ci = ItemParser.parseItem(item, StashTabIdx(tabIdx), tabInfo.n)
itemDetailHover.setFirstItem(Some(ci))
val de = e.asInstanceOf[js.Dynamic]
itemDetailHover.show(
x = de.clientX.asInstanceOf[Double],
y = de.clientY.asInstanceOf[Double],
compare = false
)
}
def off(e: JQueryEventObject): js.Any = {
itemDetailHover.setFirstItem(None)
itemDetailHover.hide()
}
itemEl.mouseenter(on _)
itemEl.mouseleave(off _)
}
}
} | benjaminjackman/looty | looty/src/main/scala/looty/views/UnderlayView.scala | Scala | gpl-2.0 | 5,953 |
package com.github.gdefacci.bdd
class SelfDescribeF1[A,B](description:String, function:A => B, filePosition:Option[FilePosition]) extends (A => B) {
def apply(a:A):B = function(a)
override def toString = description
}
| gdefacci/bdd | core/src/main/scala/com/github/gdefacci/bdd/SelfDescribeF1.scala | Scala | mit | 223 |
/*
* La Trobe University - Distributed Deep Learning System
* Copyright 2016 Matthias Langer (t3l@threelights.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package edu.latrobe.math
import breeze.linalg._
import edu.latrobe._
import scala.collection._
/*
object SVec {
/*
@inline
final def derive(length: Int,
indices: TraversableOnce[Int],
values: TraversableOnce[Real])
: SVec = apply(length, indices.toArray, values.toArray)
*/
/*
final def tabulate(length: Int)(fn: Int => Real): SVec = {
val result = zeros(length)
var i = 0
while (i < length) {
result.update(i, fn(i))
i += 1
}
result
}
*/
/*
@inline
final def derive(value0: Real)
: DenseMatrix[Real] = DenseMatrix.create(1, 1, Array(value0))
@inline
final def derive(rows: Int,value0: Real, valuesN: Real*)
: DenseMatrix[Real] = derive(rows, value0 :: valuesN.toList)
@inline
final def derive[T](rows: Int, values: TraversableOnce[T])
: DenseMatrix[T] = {
val data = values.toArray
val cols = data.length / rows
require(data.length == rows * cols)
DenseMatrix.create(rows, cols, data)
}
final def empty[T]: DenseMatrix[T] = DenseMatrix.zeros[T](0, 0)
*/
}
*/ | bashimao/ltudl | base/src/main/scala/edu/latrobe/math/SVec.scala | Scala | apache-2.0 | 1,778 |
package models
import scala.slick.driver.MySQLDriver.simple._
object DAO {
/**
* TableQuery for each Table
*/
val missedCalls = TableQuery[MissedCalls]
val users = TableQuery[Users]
val admins = TableQuery[Admins]
val appRegs = TableQuery[AppRegs]
val userEntries = TableQuery[UserEntries]
val whitelistedItems = TableQuery[WhitelistedItems]
val signinStatuses = TableQuery[SigninStatuses]
val signingupUserEntries = TableQuery[SigningupUserEntries]
/**
* database handle
*/
lazy val db: Database = Database.forURL(url = "jdbc:mysql://localhost:3306/missed_calls_db", driver = "com.mysql.jdbc.Driver", user = "root", password = "root")
/**
* creates the tables
*/
def createTables = db.withSession(implicit session => {
missedCalls.ddl.create
users.ddl.create
admins.ddl.create
appRegs.ddl.create
whitelistedItems.ddl.create
userEntries.ddl.create
signinStatuses.ddl.create
signingupUserEntries.ddl.create
})
/**
* writes admin to the database admin table
*/
def saveAdmin(admin: Admin) = db.withTransaction(implicit session => {
admins += admin
})
/**
* writes missed call data to missed calls table
*/
def saveMissedCall(missedCall: MissedCall) = db.withSession(implicit session => {
missedCalls += missedCall
})
/**
* returns a user option for given email
*/
def findOneByEmail(email: String) = db.withTransaction(implicit tx => {
val userQuery = for(user <- users.filter(_.email === email)) yield user
userQuery.firstOption
})
/**
* returns a admin option for given email
*/
def findOneAdminByEmail(email: String) = db.withTransaction(implicit tx => {
val adminQuery = for(admin <- admins.filter(_.email === email)) yield admin
adminQuery.firstOption
})
/**
* check admin credentials
*/
def checkAdmin(email: String, password: String): Boolean = db.withTransaction(implicit tx => {
val adminQuery = for(admin <- admins.filter(_.email === email).filter(_.password === password)) yield admin
adminQuery.exists.run
})
/**
* check if app is reged
*/
def isAppReg(simId: String, phno: String): Boolean = db.withTransaction(implicit tx => {
val appRegSimQuery = for(appReg <- appRegs.filter(_.simId === simId)) yield appReg
val appRegPhnoQuery = for(appReg <- appRegs.filter(_.phoneNumber === phno)) yield appReg
(appRegSimQuery.exists.run || appRegPhnoQuery.exists.run)
})
/**
* reg the app
*/
def regApp(appReg: AppReg) = db.withSession(implicit session => {
appRegs += appReg
})
/**
* un reg the app
*/
def appUnReg(phno: String) = db.withTransaction(implicit tx => {
val appUnRegQuery = for(appReg <- appRegs.filter(_.phoneNumber === phno)) yield appReg
appUnRegQuery.delete
})
/**
* check if app or device with particular sim is registered
*/
def isSimExists(simId: String): Boolean = db.withTransaction(implicit tx => {
val simRegQuery = for(appReg <- appRegs.filter(_.simId === simId.trim())) yield appReg
simRegQuery.exists.run
})
/**
* check is its white listed
*/
def isWhitelisted(simId: String): Boolean = db.withTransaction(implicit tx => {
val whitelistQuery = for(appReg <- appRegs.filter(_.simId === simId).filter(_.whitelisted === 'Y')) yield appReg
whitelistQuery.exists.run
})
/**
* white list the sim
*/
def whitelist(simId: String) = db.withTransaction(implicit tx => {
val appRegQ = for(appReg <- appRegs.filter(_.simId === simId)) yield appReg.whitelisted
appRegQ.update('Y')
val appRegQ2 = for(appReg <- appRegs.filter(_.simId === simId)) yield appReg
appRegQ2.firstOption.foreach{
appReg => {
val date = new java.util.Date()
val t = new java.sql.Timestamp(date.getTime())
whitelistedItems += WhitelistedItem(appReg.simId, appReg.androidPhoneNumber,t)
}
}
})
/**
* remove from white list
*/
def blacklist(simId: String) = db.withTransaction(implicit tx => {
val appRegQ = for(appReg <- appRegs.filter(_.simId === simId)) yield appReg.whitelisted
appRegQ.update('N')
val blacklistQuery = for(whitelistedItem <- whitelistedItems.filter(_.simId === simId)) yield whitelistedItem
blacklistQuery.delete
})
/**
* check if email exists
*/
def checkUserEmailAvailability(email: String) : Boolean = db.withTransaction(implicit tx => {
val emailQ = for(user <- users.filter(_.email === email)) yield user
emailQ.exists.run
})
/**
* check if phno exists
*/
def checkUserPhnoAvailability(phno: String): Boolean = db.withTransaction(implicit tx => {
val phnoQ = for(user <- users.filter(_.phno === phno)) yield user
phnoQ.exists.run
})
/**
* save user returning id
*/
def saveUser(user: User): Long = db.withTransaction(implicit tx => {
def userAutoId = users returning users.map(_.id) into {
case (_, id) => id
}
userAutoId.insert(user)
})
/**
* check if missed call exists with in certain time span
*/
def isMissedCallInInterval(userId: Long, phno: String, minutes: Int): Boolean = db.withTransaction(implicit tx => {
val date = new java.util.Date
val mins = date.getMinutes() - minutes
date.setMinutes(mins)
val t = new java.sql.Timestamp(date.getTime())
println(t.toString)
val timeQ = for(userEntry <- userEntries.filter(_.userId === userId).filter(_.timestamp >= t);
missedCall <- missedCalls
.filter(_.phno === phno)
.filter(_.timestamp >= t)) yield missedCall
val exists = timeQ.exists.run
exists
})
/**
* check if missed call exists with in certain time span
*/
def isMissedCallInIntervalForSigningupUser(su: SigningupUser, phno: String, minutes: Int): Boolean = db.withTransaction(implicit tx => {
val date = new java.util.Date
val mins = date.getMinutes() - minutes
date.setMinutes(mins)
val t = new java.sql.Timestamp(date.getTime())
println(t.toString)
val timeQ = for(signingupUserEntry <- signingupUserEntries.filter(_.email === su.email).filter(_.phno === su.phno).filter(_.timestamp >= t);
missedCall <- missedCalls
.filter(_.phno === phno)
.filter(_.timestamp >= t)) yield missedCall
val exists = timeQ.exists.run
exists
})
/**
* get phno from email
*/
def getPhno(email: String) = db.withTransaction(implicit tx => {
val phnoQ = for(user <- users.filter(_.email === email)) yield user.phno
phnoQ.firstOption
})
/**
* get phno from white list randomly
*/
def getPhnoFromWhiteList() = db.withTransaction(implicit tx => {
val whiteQ = for(whiteList <- whitelistedItems) yield whiteList.androidPhoneNumber
val list = whiteQ.list()
val length = list.length
val rand = scala.util.Random
Option(list(rand.nextInt(length)))
})
/**
* save user entry along with timestamp
*/
def insertUserEntry(userId: Long) = db.withSession(implicit session => {
val dt = new java.util.Date
val time = new java.sql.Timestamp(dt.getTime())
userEntries += UserEntry(userId, time)
})
/**
* save signingup timestamp of user
*/
def insertSigningupUserEntry(su: SigningupUser) = db.withTransaction(implicit tx => {
val date = new java.util.Date
val t = new java.sql.Timestamp(date.getTime)
signingupUserEntries += SigningupUserEntry(su.email, su.phno, t)
})
/**
* returns signin status option
*/
def isTryingToSignin(email: String) = db.withTransaction(implicit tx => {
val signinStatusQ = for((user, signinStatus) <- users.filter(_.email === email).innerJoin(signinStatuses).on(_.id === _.userId)) yield signinStatus.status
signinStatusQ.firstOption
})
/**
* update signin status flag to 1
*/
def signinOn(email: String) = db.withTransaction(implicit tx => {
val userQ = for(user <- users.filter(_.email === email)) yield user
userQ.firstOption match {
case Some(user) => {
val signinQ = for(signin <- signinStatuses.filter(_.userId === user.id)) yield signin.status
signinQ.update(1)
}
case None => {}
}
})
/**
*
* update sigin status flag to 0
*/
def signinOff(email: String) = db.withTransaction(implicit tx => {
val userQ = for(user <- users.filter(_.email === email)) yield user
userQ.firstOption match {
case Some(user) => {
val signinQ = for(signin <- signinStatuses.filter(_.userId === user.id)) yield signin.status
signinQ.update(0)
}
case None => {}
}
})
/**
* save sigin status entry
*/
def saveSigninStatusEntry(userId: Long) = db.withSession(implicit session => {
signinStatuses += SigninStatus(userId, 0)
})
} | pamu/FooService | FooService2/app/models/DAO.scala | Scala | apache-2.0 | 8,664 |
package svstm.transactions
import svstm.exceptions.CommitException
import scala.concurrent.stm.svstm.SVSTMTxnExecutor
class TopLevelReadWriteTransaction(number: Int, parent: ReadWriteTransaction = null) extends ReadWriteTransaction(number, parent) {
def isWriteTransaction = !boxesWritten.isEmpty
def tryCommit() = {
if(isWriteTransaction){
SVSTMTxnExecutor.commitLock.lock()
try{
if(isValidCommit()){
val newTxNumber = SVSTMTxnExecutor.mostRecentNumber.get() + 1
doCommit(newTxNumber)
SVSTMTxnExecutor.mostRecentNumber.incrementAndGet()
} else {
throw CommitException
}
} finally {
SVSTMTxnExecutor.commitLock.unlock()
}
}
}
def isValidCommit() : Boolean = {
for((vBox, vBoxBody) <- bodiesRead; if vBox.body != vBoxBody) {
return false
}
true
}
def doCommit(newTxNumber: Int) = {
for((vBox, newValue) <- boxesWritten)
yield vBox.commit(newValue, newTxNumber)
}
} | fcristovao/SVSTM | src/main/scala/svstm/transactions/TopLevelReadWriteTransaction.scala | Scala | apache-2.0 | 958 |
package cromwell.webservice
case class QueryParameter(key: String, value: String)
object Patterns {
val WorkflowName = """
(?x) # Turn on comments and whitespace insensitivity.
( # Begin capture.
[a-zA-Z][a-zA-Z0-9_]* # WDL identifier naming pattern of an initial alpha character followed by zero
# or more alphanumeric or underscore characters.
) # End capture.
""".trim.r
val CallFullyQualifiedName = """
(?x) # Turn on comments and whitespace insensitivity.
( # Begin outer capturing group for FQN.
(?:[a-zA-Z][a-zA-Z0-9_]*) # Inner noncapturing group for top-level workflow name. This is the WDL
# identifier naming pattern of an initial alpha character followed by zero
# or more alphanumeric or underscore characters.
(?:\\.[a-zA-Z][a-zA-Z0-9_]*){1} # Inner noncapturing group for call name, a literal dot followed by a WDL
# identifier. Currently this is quantified to {1} since the call name is
# mandatory and nested workflows are not supported. This could be changed
# to + or a different quantifier if these assumptions change.
) # End outer capturing group for FQN.
(?: # Begin outer noncapturing group for shard.
\\. # Literal dot.
(\\d+) # Captured shard digits.
)? # End outer optional noncapturing group for shard.
""".trim.r // The trim is necessary as (?x) must be at the beginning of the regex.
}
| ohsu-comp-bio/cromwell | engine/src/main/scala/cromwell/webservice/webservice_.scala | Scala | bsd-3-clause | 2,091 |
package com.twitter.util
import java.io.ByteArrayInputStream
import java.io.ByteArrayOutputStream
import java.io.ObjectInputStream
import java.io.ObjectOutputStream
import java.util.Locale
import java.util.TimeZone
import java.util.concurrent.TimeUnit
import org.scalatest.concurrent.Eventually
import org.scalatest.concurrent.IntegrationPatience
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks
import com.twitter.conversions.DurationOps._
import java.time.Instant
import java.time.OffsetDateTime
import java.time.ZoneOffset
import java.time.ZonedDateTime
import org.scalatest.wordspec.AnyWordSpec
trait TimeLikeSpec[T <: TimeLike[T]] extends AnyWordSpec with ScalaCheckDrivenPropertyChecks {
val ops: TimeLikeOps[T]
import ops._
"Top, Bottom, Undefined, Nanoseconds(_), Finite(_)" should {
val easyVs = Seq(Zero, Top, Bottom, Undefined, fromNanoseconds(1), fromNanoseconds(-1))
val vs = easyVs ++ Seq(fromNanoseconds(Long.MaxValue - 1), fromNanoseconds(Long.MinValue + 1))
"behave like boxed doubles" in {
assert((Top compare Undefined) < 0)
assert((Bottom compare Top) < 0)
assert((Undefined compare Undefined) == 0)
assert((Top compare Top) == 0)
assert((Bottom compare Bottom) == 0)
assert(Top + Duration.Top == Top)
assert(Bottom - Duration.Bottom == Undefined)
assert(Top - Duration.Top == Undefined)
assert(Bottom + Duration.Bottom == Bottom)
}
"complementary diff" in {
// Note that this doesn't always hold because of two's
// complement arithmetic.
for (a <- easyVs; b <- easyVs)
assert((a diff b) == -(b diff a))
}
"complementary compare" in {
for (a <- vs; b <- vs) {
val x = a compare b
val y = b compare a
assert(((x == 0 && y == 0) || (x < 0 != y < 0)) == true)
}
}
"commutative max" in {
for (a <- vs; b <- vs)
assert((a max b) == (b max a))
}
"commutative min" in {
for (a <- vs; b <- vs)
assert((a min b) == (b min a))
}
"handle underflows" in {
assert(fromNanoseconds(Long.MinValue) - 1.nanosecond == Bottom)
assert(fromMicroseconds(Long.MinValue) - 1.nanosecond == Bottom)
}
"handle overflows" in {
assert(fromNanoseconds(Long.MaxValue) + 1.nanosecond == Top)
assert(fromMicroseconds(Long.MaxValue) + 1.nanosecond == Top)
}
"Nanoseconds(_) extracts only finite values, in nanoseconds" in {
for (t <- Seq(Top, Bottom, Undefined))
assert(t match {
case Nanoseconds(_) => false
case _ => true
})
for (ns <- Seq(Long.MinValue, -1, 0, 1, Long.MaxValue); t = fromNanoseconds(ns))
assert(t match {
case Nanoseconds(`ns`) => true
case _ => false
})
}
"Finite(_) extracts only finite values" in {
for (t <- Seq(Top, Bottom, Undefined))
assert(t match {
case Finite(_) => false
case _ => true
})
for (ns <- Seq(Long.MinValue, -1, 0, 1, Long.MaxValue); t = fromNanoseconds(ns))
assert(t match {
case Finite(`t`) => true
case _ => false
})
}
"roundtrip through serialization" in {
for (v <- vs) {
val bytes = new ByteArrayOutputStream
val out = new ObjectOutputStream(bytes)
out.writeObject(v)
val in = new ObjectInputStream(new ByteArrayInputStream(bytes.toByteArray))
assert(in.readObject() == v)
}
}
"has correct isZero behaviour" in {
for (t <- Seq(Top, Bottom, Undefined, fromNanoseconds(1L))) {
assert(t.isZero == false)
}
for (z <- Seq(
Zero,
fromNanoseconds(0),
fromMicroseconds(0),
fromFractionalSeconds(0),
fromMilliseconds(0),
fromSeconds(0),
fromMinutes(0)
)) {
assert(z.isZero == true)
}
}
}
"Top" should {
"be impermeable to finite arithmetic" in {
assert(Top - 0.seconds == Top)
assert(Top - 100.seconds == Top)
assert(Top - Duration.fromNanoseconds(Long.MaxValue) == Top)
}
"become undefined when subtracted from itself, or added to bottom" in {
assert(Top - Duration.Top == Undefined)
assert(Top + Duration.Bottom == Undefined)
}
"not be equal to the maximum value" in {
assert(fromNanoseconds(Long.MaxValue) != Top)
}
"always be max" in {
assert((Top max fromSeconds(1)) == Top)
assert((Top max fromFractionalSeconds(1.0)) == Top)
assert((Top max fromNanoseconds(Long.MaxValue)) == Top)
assert((Top max Bottom) == Top)
}
"greater than everything else" in {
assert(fromSeconds(0) < Top)
assert(fromFractionalSeconds(Double.MaxValue) < Top)
assert(fromNanoseconds(Long.MaxValue) < Top)
}
"equal to itself" in {
assert(Top == Top)
}
"more or less equals only to itself" in {
assert(Top.moreOrLessEquals(Top, Duration.Top) == true)
assert(Top.moreOrLessEquals(Top, Duration.Zero) == true)
assert(Top.moreOrLessEquals(Bottom, Duration.Top) == true)
assert(Top.moreOrLessEquals(Bottom, Duration.Zero) == false)
assert(Top.moreOrLessEquals(fromSeconds(0), Duration.Top) == true)
assert(Top.moreOrLessEquals(fromSeconds(0), Duration.Bottom) == false)
}
"Undefined diff to Top" in {
assert((Top diff Top) == Duration.Undefined)
}
}
"Bottom" should {
"be impermeable to finite arithmetic" in {
assert(Bottom + 0.seconds == Bottom)
assert(Bottom + 100.seconds == Bottom)
assert(Bottom + Duration.fromNanoseconds(Long.MaxValue) == Bottom)
}
"become undefined when added with Top or subtracted by bottom" in {
assert(Bottom + Duration.Top == Undefined)
assert(Bottom - Duration.Bottom == Undefined)
}
"always be min" in {
assert((Bottom min Top) == Bottom)
assert((Bottom min fromNanoseconds(0)) == Bottom)
}
"less than everything else" in {
assert(Bottom < fromSeconds(0))
assert(Bottom < fromNanoseconds(Long.MaxValue))
assert(Bottom < fromNanoseconds(Long.MinValue))
}
"less than Top" in {
assert(Bottom < Top)
}
"equal to itself" in {
assert(Bottom == Bottom)
}
"more or less equals only to itself" in {
assert(Bottom.moreOrLessEquals(Bottom, Duration.Top) == true)
assert(Bottom.moreOrLessEquals(Bottom, Duration.Zero) == true)
assert(Bottom.moreOrLessEquals(Top, Duration.Bottom) == false)
assert(Bottom.moreOrLessEquals(Top, Duration.Zero) == false)
assert(Bottom.moreOrLessEquals(fromSeconds(0), Duration.Top) == true)
assert(Bottom.moreOrLessEquals(fromSeconds(0), Duration.Bottom) == false)
}
"Undefined diff to Bottom" in {
assert((Bottom diff Bottom) == Duration.Undefined)
}
}
"Undefined" should {
"be impermeable to any arithmetic" in {
assert(Undefined + 0.seconds == Undefined)
assert(Undefined + 100.seconds == Undefined)
assert(Undefined + Duration.fromNanoseconds(Long.MaxValue) == Undefined)
}
"become undefined when added with Top or subtracted by bottom" in {
assert(Undefined + Duration.Top == Undefined)
assert(Undefined - Duration.Undefined == Undefined)
}
"always be max" in {
assert((Undefined max Top) == Undefined)
assert((Undefined max fromNanoseconds(0)) == Undefined)
}
"greater than everything else" in {
assert(fromSeconds(0) < Undefined)
assert(Top < Undefined)
assert(fromNanoseconds(Long.MaxValue) < Undefined)
}
"equal to itself" in {
assert(Undefined == Undefined)
}
"not more or less equal to anything" in {
assert(Undefined.moreOrLessEquals(Undefined, Duration.Top) == false)
assert(Undefined.moreOrLessEquals(Undefined, Duration.Zero) == false)
assert(Undefined.moreOrLessEquals(Top, Duration.Undefined) == true)
assert(Undefined.moreOrLessEquals(Top, Duration.Zero) == false)
assert(Undefined.moreOrLessEquals(fromSeconds(0), Duration.Top) == false)
assert(Undefined.moreOrLessEquals(fromSeconds(0), Duration.Undefined) == true)
}
"Undefined on diff" in {
assert((Undefined diff Top) == Duration.Undefined)
assert((Undefined diff Bottom) == Duration.Undefined)
assert((Undefined diff fromNanoseconds(123)) == Duration.Undefined)
}
}
"values" should {
"reflect their underlying value" in {
val nss = Seq(
2592000000000000000L, // 30000.days
1040403005001003L, // 12.days+1.hour+3.seconds+5.milliseconds+1.microsecond+3.nanoseconds
123000000000L, // 123.seconds
1L
)
for (ns <- nss) {
val t = fromNanoseconds(ns)
assert(t.inNanoseconds == ns)
assert(t.inMicroseconds == ns / 1000L)
assert(t.inMilliseconds == ns / 1000000L)
assert(t.inLongSeconds == ns / 1000000000L)
assert(t.inMinutes == ns / 60000000000L)
assert(t.inHours == ns / 3600000000000L)
assert(t.inDays == ns / 86400000000000L)
}
}
}
"inSeconds" should {
"equal inLongSeconds when in 32-bit range" in {
val nss = Seq(
315370851000000000L, // 3650.days+3.hours+51.seconds
1040403005001003L, // 12.days+1.hour+3.seconds+5.milliseconds+1.microsecond+3.nanoseconds
1L
)
for (ns <- nss) {
val t = fromNanoseconds(ns)
assert(t.inLongSeconds == t.inSeconds)
}
}
"clamp value to Int.MinValue or MaxValue when out of range" in {
val longNs = 2160000000000000000L // 25000.days
assert(fromNanoseconds(longNs).inSeconds == Int.MaxValue)
assert(fromNanoseconds(-longNs).inSeconds == Int.MinValue)
}
}
"rounding" should {
"maintain top and bottom" in {
assert(Top.floor(1.hour) == Top)
assert(Bottom.floor(1.hour) == Bottom)
}
"divide by zero" in {
assert(Zero.floor(Duration.Zero) == Undefined)
assert(fromSeconds(1).floor(Duration.Zero) == Top)
assert(fromSeconds(-1).floor(Duration.Zero) == Bottom)
}
"deal with undefineds" in {
assert(Undefined.floor(0.seconds) == Undefined)
assert(Undefined.floor(Duration.Top) == Undefined)
assert(Undefined.floor(Duration.Bottom) == Undefined)
assert(Undefined.floor(Duration.Undefined) == Undefined)
}
"round to itself" in {
for (s <- Seq(Long.MinValue, -1, 1, Long.MaxValue); t = s.nanoseconds)
assert(t.floor(t.inNanoseconds.nanoseconds) == t)
}
}
"floor" should {
"round down" in {
assert(60.seconds.floor(1.minute) == 60.seconds)
assert(100.seconds.floor(1.minute) == 60.seconds)
assert(119.seconds.floor(1.minute) == 60.seconds)
assert(120.seconds.floor(1.minute) == 120.seconds)
}
}
"ceiling" should {
"round up" in {
assert(60.seconds.ceil(1.minute) == 60.seconds)
assert(100.seconds.ceil(1.minute) == 120.seconds)
assert(119.seconds.ceil(1.minute) == 120.seconds)
assert(120.seconds.ceil(1.minute) == 120.seconds)
}
}
"from*" should {
"never over/under flow nanos" in {
for (v <- Seq(Long.MinValue, Long.MaxValue)) {
fromNanoseconds(v) match {
case Nanoseconds(ns) => assert(ns == v)
}
}
}
"overflow millis" in {
val millis = TimeUnit.NANOSECONDS.toMillis(Long.MaxValue)
fromMilliseconds(millis) match {
case Nanoseconds(ns) => assert(ns == millis * 1e6)
}
assert(fromMilliseconds(millis + 1) == Top)
}
"underflow millis" in {
val millis = TimeUnit.NANOSECONDS.toMillis(Long.MinValue)
fromMilliseconds(millis) match {
case Nanoseconds(ns) => assert(ns == millis * 1e6)
}
assert(fromMilliseconds(millis - 1) == Bottom)
}
}
}
class TimeFormatTest extends AnyWordSpec {
"TimeFormat" should {
"format correctly with non US locale" in {
val locale = Locale.GERMAN
val format = "EEEE"
val timeFormat = new TimeFormat(format, Some(locale))
val day = "Donnerstag"
assert(timeFormat.parse(day).format(format, locale) == day)
}
"set UTC timezone as default" in {
val format = "HH:mm"
val timeFormat = new TimeFormat(format)
assert(timeFormat.parse("16:04").format(format) == "16:04")
}
"set non-UTC timezone correctly" in {
val format = "HH:mm"
val timeFormat = new TimeFormat(format, TimeZone.getTimeZone("EST"))
assert(timeFormat.parse("16:04").format(format) == "21:04")
}
}
}
trait TimeOps { val ops: Time.type = Time }
class TimeTest
extends AnyWordSpec
with TimeOps
with TimeLikeSpec[Time]
with Eventually
with IntegrationPatience {
"Time" should {
"work in collections" in {
val t0 = Time.fromSeconds(100)
val t1 = Time.fromSeconds(100)
assert(t0 == t1)
assert(t0.hashCode == t1.hashCode)
val pairs = List((t0, "foo"), (t1, "bar"))
assert(pairs.groupBy { case (time: Time, value: String) => time } == Map(t0 -> pairs))
}
"now should be now" in {
assert((Time.now.inMillis - System.currentTimeMillis).abs < 20L)
}
"withTimeAt" in {
val t0 = new Time(123456789L)
Time.withTimeAt(t0) { _ =>
assert(Time.now == t0)
Thread.sleep(50)
assert(Time.now == t0)
}
assert((Time.now.inMillis - System.currentTimeMillis).abs < 20L)
}
"withTimeAt nested" in {
val t0 = new Time(123456789L)
val t1 = t0 + 10.minutes
Time.withTimeAt(t0) { _ =>
assert(Time.now == t0)
Time.withTimeAt(t1) { _ => assert(Time.now == t1) }
assert(Time.now == t0)
}
assert((Time.now.inMillis - System.currentTimeMillis).abs < 20L)
}
"withTimeAt threaded" in {
val t0 = new Time(314159L)
val t1 = new Time(314160L)
Time.withTimeAt(t0) { tc =>
assert(Time.now == t0)
Thread.sleep(50)
assert(Time.now == t0)
tc.advance(Duration.fromNanoseconds(1))
assert(Time.now == t1)
tc.set(t0)
assert(Time.now == t0)
@volatile var threadTime: Option[Time] = None
val thread = new Thread {
override def run(): Unit = {
threadTime = Some(Time.now)
}
}
thread.start()
thread.join()
assert(threadTime.get != t0)
}
assert((Time.now.inMillis - System.currentTimeMillis).abs < 20L)
}
"withTimeFunction" in {
val t0 = Time.now
var t = t0
Time.withTimeFunction(t) { _ =>
assert(Time.now == t0)
Thread.sleep(50)
assert(Time.now == t0)
val delta = 100.milliseconds
t += delta
assert(Time.now == t0 + delta)
}
}
"withCurrentTimeFrozen" in {
val t0 = new Time(123456789L)
Time.withCurrentTimeFrozen { _ =>
val t0 = Time.now
Thread.sleep(50)
assert(Time.now == t0)
}
assert((Time.now.inMillis - System.currentTimeMillis).abs < 20L)
}
"advance" in {
val t0 = new Time(123456789L)
val delta = 5.seconds
Time.withTimeAt(t0) { tc =>
assert(Time.now == t0)
tc.advance(delta)
assert(Time.now == (t0 + delta))
}
assert((Time.now.inMillis - System.currentTimeMillis).abs < 20L)
}
"sleep" in {
Time.withCurrentTimeFrozen { ctl =>
val ctx = Local.save()
val r = new Runnable {
def run(): Unit = {
Local.restore(ctx)
Time.sleep(5.seconds)
}
}
@volatile var x = 0
val t = new Thread(r)
t.start()
assert(t.isAlive == true)
eventually {
assert(t.getState == Thread.State.TIMED_WAITING)
}
ctl.advance(5.seconds)
t.join()
assert(t.isAlive == false)
}
}
"compare" in {
assert(10.seconds.afterEpoch < 11.seconds.afterEpoch)
assert(10.seconds.afterEpoch == 10.seconds.afterEpoch)
assert(11.seconds.afterEpoch > 10.seconds.afterEpoch)
assert(Time.fromMilliseconds(Long.MaxValue) > Time.now)
}
"equals" in {
assert(Time.Top == Time.Top)
assert(Time.Top != Time.Bottom)
assert(Time.Top != Time.Undefined)
assert(Time.Bottom != Time.Top)
assert(Time.Bottom == Time.Bottom)
assert(Time.Bottom != Time.Undefined)
assert(Time.Undefined != Time.Top)
assert(Time.Undefined != Time.Bottom)
assert(Time.Undefined == Time.Undefined)
val now = Time.now
assert(now == now)
assert(now == Time.fromNanoseconds(now.inNanoseconds))
assert(now != now + 1.nanosecond)
}
"+ delta" in {
assert(10.seconds.afterEpoch + 5.seconds == 15.seconds.afterEpoch)
}
"- delta" in {
assert(10.seconds.afterEpoch - 5.seconds == 5.seconds.afterEpoch)
}
"- time" in {
assert(10.seconds.afterEpoch - 5.seconds.afterEpoch == 5.seconds)
}
"max" in {
assert((10.seconds.afterEpoch max 5.seconds.afterEpoch) == 10.seconds.afterEpoch)
assert((5.seconds.afterEpoch max 10.seconds.afterEpoch) == 10.seconds.afterEpoch)
}
"min" in {
assert((10.seconds.afterEpoch min 5.seconds.afterEpoch) == 5.seconds.afterEpoch)
assert((5.seconds.afterEpoch min 10.seconds.afterEpoch) == 5.seconds.afterEpoch)
}
"moreOrLessEquals" in {
val now = Time.now
assert(now.moreOrLessEquals(now + 1.second, 1.second) == true)
assert(now.moreOrLessEquals(now - 1.seconds, 1.second) == true)
assert(now.moreOrLessEquals(now + 2.seconds, 1.second) == false)
assert(now.moreOrLessEquals(now - 2.seconds, 1.second) == false)
}
"floor" in {
val format = new TimeFormat("yyyy-MM-dd HH:mm:ss.SSS")
val t0 = format.parse("2010-12-24 11:04:07.567")
assert(t0.floor(1.millisecond) == t0)
assert(t0.floor(10.milliseconds) == format.parse("2010-12-24 11:04:07.560"))
assert(t0.floor(1.second) == format.parse("2010-12-24 11:04:07.000"))
assert(t0.floor(5.second) == format.parse("2010-12-24 11:04:05.000"))
assert(t0.floor(1.minute) == format.parse("2010-12-24 11:04:00.000"))
assert(t0.floor(1.hour) == format.parse("2010-12-24 11:00:00.000"))
}
"since" in {
val t0 = Time.now
val t1 = t0 + 10.seconds
assert(t1.since(t0) == 10.seconds)
assert(t0.since(t1) == (-10).seconds)
}
"sinceEpoch" in {
val t0 = Time.epoch + 100.hours
assert(t0.sinceEpoch == 100.hours)
}
"sinceNow" in {
Time.withCurrentTimeFrozen { _ =>
val t0 = Time.now + 100.hours
assert(t0.sinceNow == 100.hours)
}
}
"fromFractionalSeconds" in {
val tolerance = 2.microseconds // we permit 1us slop
forAll { (i: Int) =>
assert(
Time.fromSeconds(i).moreOrLessEquals(Time.fromFractionalSeconds(i.toDouble), tolerance)
)
}
forAll { (d: Double) =>
val magic = 9223372036854775L // cribbed from Time.fromMicroseconds
val microseconds = d * 1.second.inMicroseconds
whenever(microseconds > -magic && microseconds < magic) {
assert(
Time
.fromMicroseconds(microseconds.toLong)
.moreOrLessEquals(Time.fromFractionalSeconds(d), tolerance)
)
}
}
forAll { (l: Long) =>
val seconds: Double = l.toDouble / 1.second.inNanoseconds
assert(
Time.fromFractionalSeconds(seconds).moreOrLessEquals(Time.fromNanoseconds(l), tolerance)
)
}
}
"fromMicroseconds" in {
assert(Time.fromMicroseconds(0).inNanoseconds == 0L)
assert(Time.fromMicroseconds(-1).inNanoseconds == -1L * 1000L)
assert(Time.fromMicroseconds(Long.MaxValue).inNanoseconds == Long.MaxValue)
assert(Time.fromMicroseconds(Long.MaxValue - 1) == Time.Top)
assert(Time.fromMicroseconds(Long.MinValue) == Time.Bottom)
assert(Time.fromMicroseconds(Long.MinValue + 1) == Time.Bottom)
val currentTimeMicros = System.currentTimeMillis() * 1000
assert(
Time
.fromMicroseconds(currentTimeMicros)
.inNanoseconds == currentTimeMicros.microseconds.inNanoseconds
)
}
"fromMillis" in {
assert(Time.fromMilliseconds(0).inNanoseconds == 0L)
assert(Time.fromMilliseconds(-1).inNanoseconds == -1L * 1000000L)
assert(Time.fromMilliseconds(Long.MaxValue).inNanoseconds == Long.MaxValue)
assert(Time.fromMilliseconds(Long.MaxValue - 1) == Time.Top)
assert(Time.fromMilliseconds(Long.MinValue) == Time.Bottom)
assert(Time.fromMilliseconds(Long.MinValue + 1) == Time.Bottom)
val currentTimeMs = System.currentTimeMillis
assert(Time.fromMilliseconds(currentTimeMs).inNanoseconds == currentTimeMs * 1000000L)
}
"fromMinutes" in {
assert(Time.fromMinutes(0).inNanoseconds == 0L)
assert(Time.fromMinutes(-1).inNanoseconds == -60L * 1000000000L)
assert(Time.fromMinutes(Int.MaxValue).inNanoseconds == Long.MaxValue)
assert(Time.fromMinutes(Int.MaxValue) == Time.Top)
assert(Time.fromMinutes(Int.MinValue) == Time.Bottom)
}
"fromHours" in {
assert(Time.fromHours(1).inNanoseconds == Time.fromMinutes(60).inNanoseconds)
assert(Time.fromHours(0).inNanoseconds == 0L)
assert(Time.fromHours(-1).inNanoseconds == -3600L * 1000000000L)
assert(Time.fromHours(Int.MaxValue).inNanoseconds == Long.MaxValue)
assert(Time.fromHours(Int.MaxValue) == Time.Top)
assert(Time.fromHours(Int.MinValue) == Time.Bottom)
}
"fromDays" in {
assert(Time.fromDays(1).inNanoseconds == Time.fromHours(24).inNanoseconds)
assert(Time.fromDays(0).inNanoseconds == 0L)
assert(Time.fromDays(-1).inNanoseconds == -3600L * 24L * 1000000000L)
assert(Time.fromDays(Int.MaxValue).inNanoseconds == Long.MaxValue)
assert(Time.fromDays(Int.MaxValue) == Time.Top)
assert(Time.fromDays(Int.MinValue) == Time.Bottom)
}
"until" in {
val t0 = Time.now
val t1 = t0 + 10.seconds
assert(t0.until(t1) == 10.seconds)
assert(t1.until(t0) == (-10).seconds)
}
"untilEpoch" in {
val t0 = Time.epoch - 100.hours
assert(t0.untilEpoch == 100.hours)
}
"untilNow" in {
Time.withCurrentTimeFrozen { _ =>
val t0 = Time.now - 100.hours
assert(t0.untilNow == 100.hours)
}
}
"toInstant" in {
Time.withCurrentTimeFrozen { _ =>
val instant = Instant.ofEpochMilli(Time.now.inMilliseconds)
assert(instant.toEpochMilli == Time.now.inMillis)
// java.time.Instant:getNano returns the nanoseconds of the second
assert(instant.getNano == Time.now.inNanoseconds % Duration.NanosPerSecond)
}
}
"toZonedDateTime" in {
Time.withCurrentTimeFrozen { _ =>
val zonedDateTime =
ZonedDateTime.ofInstant(Instant.ofEpochMilli(Time.now.inMilliseconds), ZoneOffset.UTC)
assert(Time.now.toZonedDateTime == zonedDateTime)
// java.time.Instant:getNano returns the nanoseconds of the second
assert(zonedDateTime.getNano == Time.now.inNanoseconds % Duration.NanosPerSecond)
}
}
"toOffsetDateTime" in {
Time.withCurrentTimeFrozen { _ =>
val offsetDateTime =
OffsetDateTime.ofInstant(Instant.ofEpochMilli(Time.now.inMilliseconds), ZoneOffset.UTC)
assert(Time.now.toOffsetDateTime == offsetDateTime)
// java.time.Instant:getNano returns the nanoseconds of the second
assert(offsetDateTime.getNano == Time.now.inNanoseconds % Duration.NanosPerSecond)
}
}
}
}
| twitter/util | util-core/src/test/scala/com/twitter/util/TimeTest.scala | Scala | apache-2.0 | 23,910 |
package org.tmoerman.plongeur.tda.cluster
import org.tmoerman.plongeur.tda.Distances._
import org.tmoerman.plongeur.tda.Model._
import org.tmoerman.plongeur.tda.cluster.Clustering._
import org.tmoerman.plongeur.tda.cluster.SmileClustering.createLocalClustering
import smile.clustering.HierarchicalClustering
import smile.clustering.linkage._
import scala.util.Try
object SmileClustering extends Serializable {
def createLocalClustering(localDataPoints: Seq[DataPoint],
distances: Array[Array[Double]],
clusteringMethod: String) = new LocalClustering {
val linkage = createLinkage(clusteringMethod, distances)
val hierarchicalClustering = new HierarchicalClustering(linkage)
override def heights(includeDiameter: Boolean = true): Seq[Double] =
if (includeDiameter)
hierarchicalClustering.getHeight :+ distances.flatten.max
else
hierarchicalClustering.getHeight
override def labels(scaleSelection: ScaleSelection): Seq[Any] =
localDataPoints match {
case Nil => Nil
case _ :: Nil => 0 :: Nil
case _ =>
val cutoff = scaleSelection(heights(true))
lazy val attempt = hierarchicalClustering.partition(cutoff).toSeq
lazy val backup = Stream.fill(localDataPoints.size)(0)
Try(attempt).getOrElse(backup)
}
override def debug = hierarchicalClustering.getTree.map(_.mkString(", ")).mkString("\\n")
}
private def createLinkage(method: String, distanceMatrix: Array[Array[Double]]) =
method.toLowerCase match {
case "complete" => new CompleteLinkage(distanceMatrix)
case "single" => new SingleLinkage(distanceMatrix)
case "ward" => new WardLinkage(distanceMatrix)
case "upgma" => new UPGMALinkage(distanceMatrix)
case "upgmc" => new UPGMCLinkage(distanceMatrix)
case "wpgma" => new WPGMALinkage(distanceMatrix)
case "wpgmc" => new WPGMCLinkage(distanceMatrix)
case _ => throw new IllegalArgumentException(s"Unknown linkage method: $method")
}
}
/**
* Recycled a few methods from smile-scala, which is not released as a Maven artifact.
*
* @author Thomas Moerman
*/
object SimpleSmileClusteringProvider extends LocalClusteringProvider with Serializable {
/**
* @see LocalClusteringProvider
*/
def apply(localDataPoints: Seq[DataPoint], params: ClusteringParams = ClusteringParams()): LocalClustering = {
import params._
val distances = distanceMatrix(localDataPoints, distance)
createLocalClustering(localDataPoints, distances, clusteringMethod)
}
} | tmoerman/plongeur | scala/plongeur-spark/src/main/scala/org/tmoerman/plongeur/tda/cluster/SmileClustering.scala | Scala | mit | 2,652 |
package com.electronwill.niol
/**
* Thrown when there isn't enough space to write the data. It is thrown before any data is written.
*
* @param msg the message
*/
class NotEnoughSpaceException(msg: String) extends Exception(msg) {
def this(required: Int, avail: Int) = this(s"Cannot write $required bytes: writableBytes = $avail")
def this(nValues: Int, required: Int, avail: Int) =
this(s"Cannot write $nValues value(s) ($required bytes): writableBytes = $avail")
}
/**
* Thrown when there isn't enough data to read. It is thrown before any data is read.
*
* @param msg the message
*/
class NotEnoughDataException(msg: String) extends Exception(msg) {
def this(required: Int, avail: Int) = this(s"Cannot read $required bytes: readableBytes = $avail")
def this(nValues: Int, required: Int, avail: Int) =
this(s"Cannot read $nValues value(s) ($required bytes): readableBytes = $avail")
}
/**
* Thrown when a `write` operation couldn't complete as expected. It is thrown when there isn't
* enough space and some data has already been written.
*
* @param msg the message
*/
class IncompleteWriteException(msg: String) extends Exception(msg) {
def this(nValues: Int, v: String = "value") = this(s"Tried to write $nValues $v(s) but couldn't finish")
def this(expected: Int, actual: Int, v: String) =
this(s"Tried to write $expected ${v}s, actually wrote $actual")
}
/**
* Thrown when a `read` operation couldn't complete as expected. It is thrown when there isn't
* enough data to retrive and some data has already been read.
*
* @param msg the message
*/
class IncompleteReadException(msg: String) extends Exception(msg) {
def this(nValues: Int, v: String = "value") = this(s"Tried to read $nValues $v(s) but couldn't finish")
def this(expected: Int, actual: Int, v: String) =
this(s"Tried to read $expected ${v}s, actually got $actual")}
| TheElectronWill/Niol | main/src/com/electronwill/niol/exceptions.scala | Scala | lgpl-3.0 | 1,889 |
package akkaviz.protocol
import org.scalatest.{Matchers, FunSuite}
class ProtocolIOTest extends FunSuite with Matchers {
test("Roundtrip Server") {
val msg = Spawned("ref")
IO.readServer(IO.write(msg)) shouldBe msg
}
test("Roundtrip Client") {
val msg = SetEnabled(true)
IO.readClient(IO.write(msg)) shouldBe msg
}
}
| blstream/akka-viz | monitoring/src/test/scala/akkaviz/protocol/ProtocolIOTest.scala | Scala | mit | 347 |
package com.github.tomwadeson.scalafp.data
import com.github.tomwadeson.scalafp.data.Option.{None, Some}
import com.github.tomwadeson.scalafp.typeclasses.Applicative
import org.scalatest.{FlatSpec, Matchers}
class OptionSpec extends FlatSpec with Matchers {
val w: Option[Int] = Some(-1)
val x: Option[Int] = Some(1)
val y: Option[Int] = None
"Option" should "define getOrElse" in {
x.getOrElse(42) should be(1)
y.getOrElse(42) should be(42)
}
it should "provide a Functor instance" in {
import com.github.tomwadeson.scalafp.typeclasses.Functor.ops._
x.map(_ + 1) should be(Some(2))
y.map(_ + 1) should be(None)
}
it should "provide an Applicative instance" in {
import com.github.tomwadeson.scalafp.typeclasses.Applicative.ops._
val f: Option[Int => Int] = Applicative[Option].pure(_ + 10)
x <*> f should be(Some(11))
y <*> f should be(None)
}
it should "provide a Monad instance" in {
import com.github.tomwadeson.scalafp.typeclasses.Monad.ops._
val f: Int => Option[Boolean] = (x => if (x > 0) Some(true) else Some(false))
(w >>= f) should be(Some(false))
(x >>= f) should be(Some(true))
(y >>= f) should be(None)
}
it should "provide a MonadPlus instance" in {
import com.github.tomwadeson.scalafp.typeclasses.MonadPlus.ops._
val p: Int => Boolean = (_ > 0)
w.filter(p) should be(None)
x.filter(p) should be(x)
y.filter(p) should be(None)
}
it should "support for-comprehensions" in {
import com.github.tomwadeson.scalafp.typeclasses.MonadPlus.ops._
val expr1 = for {
w <- w
if w == -1
x <- x
} yield x
expr1 should be(Some(1))
val expr2 = for {
y <- y
z <- expr1
} yield z
expr2 should be(None)
}
}
| tomwadeson/scala-fp | src/test/scala/com/github/tomwadeson/scalafp/data/OptionSpec.scala | Scala | mit | 1,786 |
package es.weso.shacl.converter
import cats._
import cats.implicits._
import es.weso._
import es.weso.rdf.jena.RDFAsJenaModel
import es.weso.shex.implicits.eqShEx._
import es.weso.shex.implicits.showShEx._
import es.weso.utils.IOUtils
import munit.CatsEffectSuite
class shacl2ShExTest extends CatsEffectSuite {
shouldConvertSHACLShEx(
"""|prefix : <http://example.org/>
|prefix sh: <http://www.w3.org/ns/shacl#>
|:S a sh:NodeShape ;
| sh:nodeKind sh:IRI .
""".stripMargin,
"""|prefix : <http://example.org/>
|prefix sh: <http://www.w3.org/ns/shacl#>
|:S IRI
""".stripMargin)
shouldConvertSHACLShEx(
"""|prefix : <http://example.org/>
|prefix sh: <http://www.w3.org/ns/shacl#>
|prefix xsd: <http://www.w3.org/2001/XMLSchema#>
|
|:S a sh:NodeShape ;
| sh:datatype xsd:string .
""".stripMargin,
"""|prefix : <http://example.org/>
|prefix sh: <http://www.w3.org/ns/shacl#>
|prefix xsd: <http://www.w3.org/2001/XMLSchema#>
|:S xsd:string
""".stripMargin)
/* The following test is commented.
It fails because ShExC doesn't allow several components in node constraints
See: https://github.com/shexSpec/shex/issues/106
shouldConvertSHACLShEx(
"""|prefix : <http://example.org/>
|prefix sh: <http://www.w3.org/ns/shacl#>
|prefix xsd: <http://www.w3.org/2001/XMLSchema#>
|
|:S a sh:NodeShape ;
| sh:datatype xsd:string ;
| sh:nodeKind sh:Literal .
""".stripMargin,
"""|prefix : <http://example.org/>
|prefix sh: <http://www.w3.org/ns/shacl#>
|prefix xsd: <http://www.w3.org/2001/XMLSchema#>
|:S Literal xsd:string
""".stripMargin)
*/
shouldConvertSHACLShEx(
"""|prefix : <http://example.org/>
|prefix sh: <http://www.w3.org/ns/shacl#>
|prefix xsd: <http://www.w3.org/2001/XMLSchema#>
|
|:S a sh:NodeShape ;
| sh:in ("hi" 2) ;
""".stripMargin,
"""|prefix : <http://example.org/>
|prefix sh: <http://www.w3.org/ns/shacl#>
|prefix xsd: <http://www.w3.org/2001/XMLSchema#>
|:S ["hi" 2]
""".stripMargin)
shouldConvertSHACLShEx(
"""|prefix : <http://example.org/>
|prefix sh: <http://www.w3.org/ns/shacl#>
|:PS a sh:PropertyShape ;
| sh:path :p ;
| sh:nodeKind sh:IRI .
""".stripMargin,
"""|prefix : <http://example.org/>
|prefix sh: <http://www.w3.org/ns/shacl#>
|:PS { :p IRI }
""".stripMargin)
shouldConvertSHACLShEx(
"""|prefix : <http://example.org/>
|prefix sh: <http://www.w3.org/ns/shacl#>
|:S a sh:NodeShape ;
| sh:property :PS .
|:PS
| sh:path :p ;
| sh:nodeKind sh:IRI .
""".stripMargin,
"""|prefix : <http://example.org/>
|prefix sh: <http://www.w3.org/ns/shacl#>
|:S { &:PS }
|
|_:1 {
| $:PS :p IRI
|}
""".stripMargin)
def shouldConvertSHACLShEx(strSHACL: String, expected: String): Unit = {
test(s"Should convert: $strSHACL to ShEx and obtain: $expected") {
val cmp = RDFAsJenaModel.fromString(strSHACL, "TURTLE", None).flatMap(_.use(shaclRDF => for {
shacl <- RDF2Shacl.getShacl(shaclRDF)
shexConverted <- IOUtils.fromES(Shacl2ShEx.shacl2ShEx(shacl).leftMap(e => s"Error in conversion: $e"))
expectedSchema <- shex.Schema.fromString(expected, "ShExC")
} yield (shexConverted, expectedSchema, shacl)))
cmp.map(values => {
val (converted, expected, shacl) = values
val (schema,shapeMap) = converted
if (Eq[shex.Schema].eqv(schema,expected)) {
assertEquals(true,true)
} else {
pprint.log(shacl)
pprint.log(schema)
pprint.log(expected)
fail(s"SHACL2ShEx schemas are not equal: SHACL:\\n${shacl}\\n---\\nSHACL converted to ShEx:\\n${schema.show}\\nExpected:\\n${expected.show}")
}
}
)
}
}
}
| labra/shaclex | modules/converter/src/test/scala/es/weso/shacl/converter/shacl2ShExTest.scala | Scala | mit | 4,486 |
package com.nulabinc.backlog.migration.common.services
import java.nio.file.Path
import cats.Foldable.ops._
import cats.Monad
import cats.Monad.ops._
import cats.data.Validated.{Invalid, Valid}
import com.nulabinc.backlog.migration.common.codec.{StatusMappingDecoder, StatusMappingEncoder}
import com.nulabinc.backlog.migration.common.domain.BacklogStatuses
import com.nulabinc.backlog.migration.common.domain.mappings._
import com.nulabinc.backlog.migration.common.dsl.{ConsoleDSL, StorageDSL}
import com.nulabinc.backlog.migration.common.errors.{
MappingFileError,
MappingFileNotFound,
MappingValidationError,
ValidationError
}
import com.nulabinc.backlog.migration.common.formatters.Formatter
import com.nulabinc.backlog.migration.common.validators.MappingValidatorNec
private case class MergedStatusMapping[A](
mergeList: Seq[StatusMapping[A]],
addedList: Seq[StatusMapping[A]]
)
private object MergedStatusMapping {
def empty[A]: MergedStatusMapping[A] =
MergedStatusMapping[A](mergeList = Seq(), addedList = Seq())
}
object StatusMappingFileService {
import com.nulabinc.backlog.migration.common.messages.ConsoleMessages.{
Mappings => MappingMessages
}
import com.nulabinc.backlog.migration.common.shared.syntax._
/**
* Create mapping files.
* - statuses.csv Link the source and destination states. User must edit to link it.
* - statuses_list.csv List of items that can be specified in statuses.csv
*/
def init[A, F[_]: Monad: StorageDSL: ConsoleDSL](
mappingFilePath: Path,
mappingListPath: Path,
srcItems: Seq[A],
dstItems: BacklogStatuses
)(implicit
formatter: Formatter[StatusMapping[A]],
encoder: StatusMappingEncoder[A],
decoder: StatusMappingDecoder[A],
header: MappingHeader[StatusMapping[_]]
): F[Unit] =
for {
exists <- StorageDSL[F].exists(mappingFilePath)
_ <-
if (exists) {
for {
records <- StorageDSL[F].read(mappingFilePath, MappingFileService.readLine)
mappings = MappingDecoder.status(records)
result = merge(mappings, srcItems)
_ <-
if (result.addedList.nonEmpty)
for {
_ <- StorageDSL[F].writeNewFile(
mappingFilePath,
MappingEncoder.status(result.mergeList)
)
_ <- ConsoleDSL[F].println(
MappingMessages.statusMappingMerged(mappingFilePath, result.addedList)
)
} yield ()
else
ConsoleDSL[F].println(MappingMessages.statusMappingNoChanges)
} yield ()
} else {
val result = merge(Seq(), srcItems)
for {
_ <- StorageDSL[F].writeNewFile(
mappingFilePath,
MappingEncoder.status(result.mergeList)
)
_ <- ConsoleDSL[F].println(
MappingMessages.statusMappingCreated(mappingFilePath)
)
} yield ()
}
_ <- StorageDSL[F].writeNewFile(
mappingListPath,
MappingEncoder.statusList(dstItems)
)
} yield ()
/**
* List of items that can be specified in statuses.csv
*
* @param path
* @param dstItems
* @param decoder
* @tparam A
* @tparam F
* @return
*/
def execute[A, F[_]: Monad: StorageDSL: ConsoleDSL](
path: Path,
dstItems: BacklogStatuses
)(implicit
decoder: StatusMappingDecoder[A]
): F[Either[MappingFileError, Seq[ValidatedStatusMapping[A]]]] = {
val result = for {
_ <- StorageDSL[F].exists(path).orError(MappingFileNotFound("status", path)).handleError
unvalidated <- getMappings(path).handleError
validated <- validateMappings(unvalidated, dstItems).lift.handleError
} yield validated
result.value
}
/**
* Deserialize a mapping file.
*
* @param path
* @param decoder
* @tparam A
* @tparam F
* @return
*/
def getMappings[A, F[_]: Monad: ConsoleDSL: StorageDSL](path: Path)(implicit
decoder: StatusMappingDecoder[A]
): F[Either[MappingFileError, Seq[StatusMapping[A]]]] =
for {
records <- StorageDSL[F].read(path, MappingFileService.readLine)
mappings = MappingDecoder.status(records)
} yield Right(mappings)
/**
* Validate mappings
* @param mappings
* @param dstItems
* @tparam A
* @return
*/
def validateMappings[A](
mappings: Seq[StatusMapping[A]],
dstItems: BacklogStatuses
): Either[MappingFileError, Seq[ValidatedStatusMapping[A]]] = {
val results = mappings
.map(MappingValidatorNec.validateStatusMapping(_, dstItems))
.foldLeft(ValidationResults.empty[A]) { (acc, item) =>
item match {
case Valid(value) => acc.copy(values = acc.values :+ value)
case Invalid(error) => acc.copy(errors = acc.errors ++ error.toList)
}
}
results.toResult
}
/**
* Merge old mappings and new items.
*
* @param mappings
* @param srcItems
* @tparam A
* @return
*/
private def merge[A](
mappings: Seq[StatusMapping[A]],
srcItems: Seq[A]
): MergedStatusMapping[A] =
srcItems.foldLeft(MergedStatusMapping.empty[A]) { (acc, item) =>
mappings.find(_.src == item) match {
case Some(value) =>
acc.copy(mergeList = acc.mergeList :+ value)
case None =>
val mapping = StatusMapping.create(item)
acc.copy(
mergeList = acc.mergeList :+ mapping,
addedList = acc.addedList :+ mapping
)
}
}
private case class ValidationResults[A](
values: Seq[ValidatedStatusMapping[A]] = Seq(),
errors: List[ValidationError] = List()
) {
def toResult: Either[MappingFileError, Seq[ValidatedStatusMapping[A]]] =
if (errors.nonEmpty) Left(MappingValidationError(MappingType.Status, values, errors))
else Right(values)
}
private object ValidationResults {
def empty[A]: ValidationResults[A] = ValidationResults[A]()
}
}
| nulab/backlog-migration-common | core/src/main/scala/com/nulabinc/backlog/migration/common/services/StatusMappingFileService.scala | Scala | mit | 6,104 |
package pl.touk.nussknacker.engine.avro.schema
object PaymentNotCompatible extends TestSchemaWithRecord {
val stringSchema: String =
s"""
|{
| "type": "record",
| "name": "Payment",
| "fields": [
| {
| "name": "id",
| "type": "string"
| },
| {
| "name": "amount",
| "type": "double"
| },
| {
| "name": "currency",
| "type": ${Currency.stringSchema}
| },
| {
| "name": "company",
| "type": ${Company.stringSchema}
| },
| {
| "name": "products",
| "type": {
| "type": "array",
| "items": ${Product.stringSchema}
| }
| },
| {
| "name": "vat",
| "type": ["int", "null"]
| },
| {
| "name": "cnt",
| "type": ["int", "null"],
| "default": 0
| },
| {
| "name": "attributes",
| "type":[{
| "type": "map",
| "values": "string"
| }, "null"],
| "default": {}
| },
| {
| "name": "date",
| "type": ["int"]
| }
| ]
|}
""".stripMargin
val exampleData = PaymentV2.exampleData ++ Map("attributes" -> Map(), "date" -> 189123)
}
| TouK/nussknacker | engine/flink/avro-components-utils/src/test/scala/pl/touk/nussknacker/engine/avro/schema/PaymentNotCompatible.scala | Scala | apache-2.0 | 1,470 |
package org.au9ustine.gloin.model
import org.scalatest.FlatSpec
/**
* Created by shaotch on 12/22/13.
*/
class InfoHashTest extends FlatSpec {
"InfoHashTest" should "be produce SHA-1" in {
val actual: String = InfoHash.create("Hello, World")
assertResult("907d14fb3af2b0d4f18c2d46abe8aedce17367bd")(actual)
}
"InfoHashTest" should "be produce other digests, e.g. SHA-256, SHA-512" in {
var actual: String = InfoHash.create("Hello, World", "Sha256")
assertResult("03675ac53ff9cd1535ccc7dfcdfa2c458c5218371f418dc136f2d19ac1fbe8a5")(actual)
actual = InfoHash.create("Hello, World", "sHA-512")
assertResult("45546d4d71407e82ecda31eba5bf74b65bc092b0436a2409a6b615c1f78fdb2d3da371758f07a65b5d2b3ee8fa9ea0c772dd1eff884c4c77d4290177b002ccdc")(actual)
actual = InfoHash.create("Hello, World", "jfdksl")
assertResult("907d14fb3af2b0d4f18c2d46abe8aedce17367bd")(actual)
}
}
| au9ustine/org.au9ustine.gloin | src/test/scala/org/au9ustine/gloin/model/InfoHashTest.scala | Scala | apache-2.0 | 930 |
package com.topper.plugin
import tools.nsc.Global
import scala.collection.SortedSet
object PluginArgs {
var projectName: String = ""
}
class RuntimePlugin(val global: Global) extends tools.nsc.plugins.Plugin {
val name = "canve"
val description = "extracts type relationships and call graph during compilation"
val components = List[tools.nsc.plugins.PluginComponent](
new PluginPhase(this.global) // TODO: is the `this` really required here?
)
/*
* overriding a callback function called by scalac for handling scalac arguments
*/
override def processOptions(opts: List[String], error: String => Unit) {
val projNameArgPrefix = "projectName:"
for ( opt <- opts ) {
if (opt.startsWith(projNameArgPrefix)) {
PluginArgs.projectName = opt.substring(projNameArgPrefix.length)
Log("instrumenting project " + PluginArgs.projectName + "...")
}
else
error("Unknown invocation parameter passed to the CANVE compiler plugin: " + opt)
}
if (!opts.exists(_.startsWith("projectName")))
throw new RuntimeException("canve compiler plugin invoked without a project name argument")
}
}
| gtopper/extractor | src/main/scala/com/topper/plugin/Plugin.scala | Scala | mit | 1,175 |
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.persistence.chronicle
import akka.actor.ActorSystem
import akka.persistence.SnapshotMetadata
import akka.serialization.SerializationExtension
import org.scalatest.Matchers
import org.scalatest.WordSpecLike
import com.typesafe.config.ConfigFactory
import ExtensionSerializer.SnapshotMetadataSerializer
import ExtensionSerializer.SnapshotPayloadSerializer
import ReplicationProtocol.ConsumeCommand
import ReplicationProtocol.JournalAppendPayload
import ReplicationProtocol.JournalClearPayload
import ReplicationProtocol.JournalDeletePayload
import ReplicationProtocol.JournalRotatePayload
import ReplicationProtocol.PublishCommand
import ReplicationProtocol.SnapshotClearPayload
import ReplicationProtocol.SnapshotCreatePayload
import ReplicationProtocol.SnapshotRemovePayload
@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner])
class SerializerSpec extends WordSpecLike with Matchers {
import ReplicationProtocol._
val config = ConfigFactory.parseString(s"""
# Disable plugin.
akka.persistence {
journal {
plugin = "akka.persistence.journal.inmem"
}
snapshot-store {
plugin = "akka.persistence.snapshot-store.local"
}
}
""").withFallback(ConfigFactory.load)
val system = ActorSystem("default", config)
implicit val extension = SerializationExtension(system)
val messageList = Seq(
JournalClearPayload,
SnapshotClearPayload,
JournalClearPayload("meta-0"),
JournalRotatePayload("meta-0"),
JournalAppendPayload(Seq.empty),
JournalAppendPayload(Array(Array[Byte](1, 2, 3))),
JournalAppendPayload(Seq(Array[Byte](1, 2, 3), Array[Byte](4, 5, 6))),
JournalDeletePayload("persistence-id", 123),
SnapshotClearPayload("meta-0"),
SnapshotCreatePayload("meta-1".getBytes, Array[Byte](1, 2, 3, 4, 5, 6, 7)),
SnapshotRemovePayload("meta-2".getBytes),
PublishCommand(JournalClearPayload),
ConsumeCommand(SnapshotClearPayload),
PublishCommand(JournalAppendPayload(Seq.empty)),
ConsumeCommand(JournalAppendPayload(Seq.empty)),
PublishCommand(JournalAppendPayload(Array(Array[Byte](1, 2, 3)))),
ConsumeCommand(JournalAppendPayload(Array(Array[Byte](1, 2, 3)))),
PublishCommand(JournalAppendPayload(Seq(Array[Byte](1, 2, 3), Array[Byte](4, 5, 6)))),
ConsumeCommand(JournalAppendPayload(Seq(Array[Byte](1, 2, 3), Array[Byte](4, 5, 6)))),
PublishCommand(JournalDeletePayload("meta-1", 123)),
ConsumeCommand(JournalDeletePayload("meta-2", 123)),
PublishCommand(SnapshotCreatePayload("meta-3".getBytes, Array[Byte](1, 2, 3, 4, 5, 6, 7))),
ConsumeCommand(SnapshotCreatePayload("meta-4".getBytes, Array[Byte](1, 2, 3, 4, 5, 6, 7))),
PublishCommand(SnapshotRemovePayload("meta-5".getBytes)),
ConsumeCommand(SnapshotRemovePayload("meta-6".getBytes)),
ConsumeCommand(JournalClearPayload)
)
def verify(source: AnyRef) = {
val serializer = extension.findSerializerFor(source)
val content = serializer.toBinary(source)
val target = serializer.fromBinary(content)
source should be(target)
}
"serializer" must {
"verify command equality" in {
PublishCommand(JournalAppendPayload(Seq(Array[Byte](1, 2, 3)))) should be(PublishCommand(JournalAppendPayload(List(Array[Byte](1, 2, 3)))))
PublishCommand(JournalAppendPayload(Seq(Array[Byte](1, 2, 3)))) should not be (PublishCommand(JournalAppendPayload(List(Array[Byte](1, 4, 3)))))
PublishCommand(SnapshotCreatePayload("meta-1".getBytes, Array[Byte](1, 2, 3, 4, 5, 6, 7))) should be(PublishCommand(SnapshotCreatePayload("meta-1".getBytes, Array[Byte](1, 2, 3, 4, 5, 6, 7))))
PublishCommand(SnapshotCreatePayload("meta-2".getBytes, Array[Byte](1, 2, 3, 8, 5, 6, 7))) should not be (PublishCommand(SnapshotCreatePayload("meta-2".getBytes, Array[Byte](1, 2, 3, 4, 5, 6, 7))))
PublishCommand(SnapshotRemovePayload("meta-1".getBytes)) should be(PublishCommand(SnapshotRemovePayload("meta-1".getBytes)))
PublishCommand(SnapshotRemovePayload("meta-1".getBytes)) should not be (PublishCommand(SnapshotRemovePayload("meta-2".getBytes)))
}
"provide round trip serialization" in {
messageList foreach { verify }
}
"verify SnapshotMetadataSerializer" in {
val sourceList = Seq(
SnapshotMetadata("", 0, 0),
SnapshotMetadata("persistence id", 123, 456)
)
def verify(source: SnapshotMetadata) = {
val content = SnapshotMetadataSerializer.encode(source)
val target = SnapshotMetadataSerializer.decode(content)
source should be(target)
}
sourceList foreach (verify(_))
}
"verify SnapshotPayloadSerializer" in {
val source = "snapshot paylaod"
val content = SnapshotPayloadSerializer.encode(source)
val target = SnapshotPayloadSerializer.decode[String](content)
source should be(target)
}
}
}
| carrot-garden/akka-persistence-chronicle | src/test/scala/akka/persistence/chronicle/SerializationSuite.scala | Scala | apache-2.0 | 4,979 |
package biology
import com.github.nscala_time.time.Imports._
class Spawn(val spawningLocations: List[SpawningLocation]) {
def this() = this(List.empty[SpawningLocation])
def getSitesWhereFishAreSpawning(date: LocalDateTime): List[SpawningLocation] = {
date.getHourOfDay match {
case 0 => spawningLocations.filter(x => x.timeToSpawn(date))
case _ => List.empty[SpawningLocation]
}
}
def isItSpawningSeason(date: LocalDateTime): Boolean = {
spawningLocations.count(x => x.canSpawn(date)) > 0
}
}
| shawes/zissou | src/main/scala/biology/Spawn.scala | Scala | mit | 532 |
package main.scala
import org.apache.spark.sql.DataFrame
import org.apache.spark.SparkContext
import org.apache.spark.sql.functions.sum
import org.apache.spark.sql.functions.udf
/**
* TPC-H Query 10
* Savvas Savvides <savvas@purdue.edu>
*
*/
class Q10 extends TpchQuery {
override def execute(sc: SparkContext, schemaProvider: TpchSchemaProvider): DataFrame = {
// this is used to implicitly convert an RDD to a DataFrame.
val sqlContext = new org.apache.spark.sql.SQLContext(sc)
import sqlContext.implicits._
import schemaProvider._
val decrease = udf { (x: Double, y: Double) => x * (1 - y) }
val flineitem = lineitem.filter($"l_returnflag" === "R")
order.filter($"o_orderdate" < "1994-01-01" && $"o_orderdate" >= "1993-10-01")
.join(customer, $"o_custkey" === customer("c_custkey"))
.join(nation, $"c_nationkey" === nation("n_nationkey"))
.join(flineitem, $"o_orderkey" === flineitem("l_orderkey"))
.select($"c_custkey", $"c_name",
decrease($"l_extendedprice", $"l_discount").as("volume"),
$"c_acctbal", $"n_name", $"c_address", $"c_phone", $"c_comment")
.groupBy($"c_custkey", $"c_name", $"c_acctbal", $"c_phone", $"n_name", $"c_address", $"c_comment")
.agg(sum($"volume").as("revenue"))
.sort($"revenue".desc)
.limit(20)
}
}
| ssavvides/tpch-spark | src/main/scala/Q10.scala | Scala | mit | 1,338 |
/*
* Copyright 2016 Nicolas Rinaudo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kantan.codecs.resource
import kantan.codecs.collection.Factory
import scala.annotation.unchecked.{uncheckedVariance => uV}
import scala.collection.mutable.Buffer
import scala.reflect.ClassTag
@SuppressWarnings(
Array(
"org.wartremover.warts.Var",
"org.wartremover.warts.Throw",
"org.wartremover.warts.While",
"org.wartremover.warts.Null"
)
)
trait VersionSpecificResourceIterator[+A] { self: ResourceIterator[A] =>
def to[F[_]](implicit factory: Factory[A @uV, F[A @uV]]): F[A @uV] = foldLeft(factory.newBuilder)(_ += _).result
def toList: List[A] = to[List]
def toArray[AA >: A](implicit ct: ClassTag[AA]): Array[AA] = toIterator.toArray
def toBuffer[AA >: A]: Buffer[AA] = toIterator.toBuffer
def toIndexedSeq: IndexedSeq[A] = to[IndexedSeq]
def toIterable: Iterable[A] = to[Iterable]
def toSeq: Seq[A] = to[Seq]
def seq: Seq[A] = to[Seq]
def toSet[AA >: A]: Set[AA] = toIterator.toSet
def toVector: Vector[A] = to[Vector]
def toTraversable: Traversable[A] = to[Traversable]
def toStream: Stream[A] = if(hasNext) Stream.cons(next(), toStream) else Stream.empty
def toIterator: Iterator[A] = new Iterator[A] {
override def hasNext: Boolean = self.hasNext
override def next(): A = self.next()
}
}
| nrinaudo/kantan.codecs | core/shared/src/main/scala-2.12/kantan/codecs/resource/VersionSpecificResourceIterator.scala | Scala | apache-2.0 | 2,204 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.filters.csrf
import java.util.Optional
import javax.inject.{ Inject, Provider, Singleton }
import akka.stream.Materializer
import com.typesafe.config.ConfigMemorySize
import play.api._
import play.api.http.{ HttpConfiguration, HttpErrorHandler }
import play.api.inject.{ Binding, Module, bind }
import play.api.libs.crypto.{ CSRFTokenSigner, CSRFTokenSignerProvider }
import play.api.libs.typedmap.TypedKey
import play.api.mvc.Results._
import play.api.mvc._
import play.core.j.{ JavaContextComponents, JavaHelpers }
import play.filters.csrf.CSRF.{ CSRFHttpErrorHandler, _ }
import play.mvc.Http
import play.utils.Reflect
import scala.compat.java8.FutureConverters
import scala.concurrent.Future
/**
* CSRF configuration.
*
* @param tokenName The name of the token.
* @param cookieName If defined, the name of the cookie to read the token from/write the token to.
* @param secureCookie If using a cookie, whether it should be secure.
* @param httpOnlyCookie If using a cookie, whether it should have the HTTP only flag.
* @param postBodyBuffer How much of the POST body should be buffered if checking the body for a token.
* @param signTokens Whether tokens should be signed.
* @param checkMethod Returns true if a request for that method should be checked.
* @param checkContentType Returns true if a request for that content type should be checked.
* @param headerName The name of the HTTP header to check for tokens from.
* @param shouldProtect A function that decides based on the headers of the request if a check is needed.
* @param bypassCorsTrustedOrigins Whether to bypass the CSRF check if the CORS filter trusts this origin
*/
case class CSRFConfig(
tokenName: String = "csrfToken",
cookieName: Option[String] = None,
secureCookie: Boolean = false,
httpOnlyCookie: Boolean = false,
createIfNotFound: RequestHeader => Boolean = CSRFConfig.defaultCreateIfNotFound,
postBodyBuffer: Long = 102400,
signTokens: Boolean = true,
checkMethod: String => Boolean = !CSRFConfig.SafeMethods.contains(_),
checkContentType: Option[String] => Boolean = _ => true,
headerName: String = "Csrf-Token",
shouldProtect: RequestHeader => Boolean = _ => false,
bypassCorsTrustedOrigins: Boolean = true) {
// Java builder methods
def this() = this(cookieName = None)
import java.{ util => ju }
import play.core.j.{ RequestHeaderImpl => JRequestHeaderImpl }
import play.mvc.Http.{ RequestHeader => JRequestHeader }
import scala.compat.java8.FunctionConverters._
import scala.compat.java8.OptionConverters._
def withTokenName(tokenName: String) = copy(tokenName = tokenName)
def withHeaderName(headerName: String) = copy(headerName = headerName)
def withCookieName(cookieName: ju.Optional[String]) = copy(cookieName = cookieName.asScala)
def withSecureCookie(isSecure: Boolean) = copy(secureCookie = isSecure)
def withHttpOnlyCookie(isHttpOnly: Boolean) = copy(httpOnlyCookie = isHttpOnly)
def withCreateIfNotFound(pred: ju.function.Predicate[JRequestHeader]) =
copy(createIfNotFound = pred.asScala.compose(new JRequestHeaderImpl(_)))
def withPostBodyBuffer(bufsize: Long) = copy(postBodyBuffer = bufsize)
def withSignTokens(signTokens: Boolean) = copy(signTokens = signTokens)
def withMethods(checkMethod: ju.function.Predicate[String]) = copy(checkMethod = checkMethod.asScala)
def withContentTypes(checkContentType: ju.function.Predicate[Optional[String]]) =
copy(checkContentType = checkContentType.asScala.compose(_.asJava))
def withShouldProtect(shouldProtect: ju.function.Predicate[JRequestHeader]) =
copy(shouldProtect = shouldProtect.asScala.compose(new JRequestHeaderImpl(_)))
def withBypassCorsTrustedOrigins(bypass: Boolean) = copy(bypassCorsTrustedOrigins = bypass)
}
object CSRFConfig {
private val SafeMethods = Set("GET", "HEAD", "OPTIONS")
private def defaultCreateIfNotFound(request: RequestHeader) = {
// If the request isn't accepting HTML, then it won't be rendering a form, so there's no point in generating a
// CSRF token for it.
import play.api.http.MimeTypes._
(request.method == "GET" || request.method == "HEAD") && (request.accepts(HTML) || request.accepts(XHTML))
}
private[play] val HeaderNoCheck = "nocheck"
def fromConfiguration(conf: Configuration): CSRFConfig = {
val config = conf.getDeprecatedWithFallback("play.filters.csrf", "csrf")
val methodWhiteList = config.get[Seq[String]]("method.whiteList").toSet
val methodBlackList = config.get[Seq[String]]("method.blackList").toSet
val checkMethod: String => Boolean = if (methodWhiteList.nonEmpty) {
!methodWhiteList.contains(_)
} else {
if (methodBlackList.isEmpty) {
_ => true
} else {
methodBlackList.contains
}
}
val contentTypeWhiteList = config.get[Seq[String]]("contentType.whiteList").toSet
val contentTypeBlackList = config.get[Seq[String]]("contentType.blackList").toSet
val checkContentType: Option[String] => Boolean = if (contentTypeWhiteList.nonEmpty) {
_.forall(!contentTypeWhiteList.contains(_))
} else {
if (contentTypeBlackList.isEmpty) {
_ => true
} else {
_.exists(contentTypeBlackList.contains)
}
}
val protectHeaders = config.get[Option[Map[String, String]]]("header.protectHeaders").getOrElse(Map.empty)
val bypassHeaders = config.get[Option[Map[String, String]]]("header.bypassHeaders").getOrElse(Map.empty)
val shouldProtect: RequestHeader => Boolean = { rh =>
def foundHeaderValues(headersToCheck: Map[String, String]) = {
headersToCheck.exists {
case (name, "*") => rh.headers.get(name).isDefined
case (name, value) => rh.headers.get(name).contains(value)
}
}
(protectHeaders.isEmpty || foundHeaderValues(protectHeaders)) && !foundHeaderValues(bypassHeaders)
}
CSRFConfig(
tokenName = config.get[String]("token.name"),
cookieName = config.get[Option[String]]("cookie.name"),
secureCookie = config.get[Boolean]("cookie.secure"),
httpOnlyCookie = config.get[Boolean]("cookie.httpOnly"),
postBodyBuffer = config.get[ConfigMemorySize]("body.bufferSize").toBytes,
signTokens = config.get[Boolean]("token.sign"),
checkMethod = checkMethod,
checkContentType = checkContentType,
headerName = config.get[String]("header.name"),
shouldProtect = shouldProtect,
bypassCorsTrustedOrigins = config.get[Boolean]("bypassCorsTrustedOrigins")
)
}
}
@Singleton
class CSRFConfigProvider @Inject() (config: Configuration) extends Provider[CSRFConfig] {
lazy val get = CSRFConfig.fromConfiguration(config)
}
object CSRF {
private[csrf] val filterLogger = play.api.Logger("play.filters.CSRF")
/**
* A CSRF token
*/
case class Token(name: String, value: String)
/**
* INTERNAL API: used for storing tokens on the request
*/
case class TokenInfo(name: String, value: String, reSignedValue: Option[String] = None) {
def toToken = {
// Try to get the re-signed token first, then get the "new" token.
Token(name, reSignedValue getOrElse value)
}
}
object TokenInfo {
def apply(token: Token): TokenInfo = {
val Token(name, value) = token
TokenInfo(name, value)
}
def apply(token: Token, reSignedToken: String): TokenInfo = apply(token).copy(reSignedValue = Some(reSignedToken))
}
object Token {
val InfoAttr = TypedKey[TokenInfo]("TOKEN_INFO")
}
/**
* Extract token from current request
*/
def getToken(implicit request: RequestHeader): Option[Token] = {
request.attrs.get(Token.InfoAttr).map(_.toToken)
}
/**
* Extract token from current Java request
*
* @param requestHeader The request to extract the token from
* @return The token, if found.
*/
def getToken(requestHeader: play.mvc.Http.RequestHeader): Optional[Token] = {
Optional.ofNullable(getToken(requestHeader.asScala()).orNull)
}
/**
* A token provider, for generating and comparing tokens.
*
* This abstraction allows the use of randomised tokens.
*/
trait TokenProvider {
/** Generate a token */
def generateToken: String
/** Compare two tokens */
def compareTokens(tokenA: String, tokenB: String): Boolean
}
class TokenProviderProvider @Inject() (config: CSRFConfig, tokenSigner: CSRFTokenSigner) extends Provider[TokenProvider] {
override val get = config.signTokens match {
case true => new SignedTokenProvider(tokenSigner)
case false => new UnsignedTokenProvider(tokenSigner)
}
}
class ConfigTokenProvider(config: => CSRFConfig, tokenSigner: CSRFTokenSigner) extends TokenProvider {
lazy val underlying = new TokenProviderProvider(config, tokenSigner).get
def generateToken = underlying.generateToken
override def compareTokens(tokenA: String, tokenB: String) = underlying.compareTokens(tokenA, tokenB)
}
class SignedTokenProvider(tokenSigner: CSRFTokenSigner) extends TokenProvider {
def generateToken = tokenSigner.generateSignedToken
def compareTokens(tokenA: String, tokenB: String) = tokenSigner.compareSignedTokens(tokenA, tokenB)
}
class UnsignedTokenProvider(tokenSigner: CSRFTokenSigner) extends TokenProvider {
def generateToken = tokenSigner.generateToken
override def compareTokens(tokenA: String, tokenB: String) = {
java.security.MessageDigest.isEqual(tokenA.getBytes("utf-8"), tokenB.getBytes("utf-8"))
}
}
/**
* This trait handles the CSRF error.
*/
trait ErrorHandler {
/** Handle a result */
def handle(req: RequestHeader, msg: String): Future[Result]
}
class CSRFHttpErrorHandler @Inject() (httpErrorHandler: HttpErrorHandler) extends ErrorHandler {
import play.api.http.Status.FORBIDDEN
def handle(req: RequestHeader, msg: String) = httpErrorHandler.onClientError(req, FORBIDDEN, msg)
}
object DefaultErrorHandler extends ErrorHandler {
def handle(req: RequestHeader, msg: String) = Future.successful(Forbidden(msg))
}
class JavaCSRFErrorHandlerAdapter @Inject() (underlying: CSRFErrorHandler, contextComponents: JavaContextComponents) extends ErrorHandler {
def handle(request: RequestHeader, msg: String) =
JavaHelpers.invokeWithContext(request, contextComponents, req => underlying.handle(req, msg))
}
class JavaCSRFErrorHandlerDelegate @Inject() (delegate: ErrorHandler) extends CSRFErrorHandler {
import play.core.Execution.Implicits.trampoline
def handle(requestHeader: Http.RequestHeader, msg: String) =
FutureConverters.toJava(delegate.handle(requestHeader.asScala(), msg).map(_.asJava))
}
object ErrorHandler {
def bindingsFromConfiguration(environment: Environment, configuration: Configuration): Seq[Binding[_]] = {
Reflect.bindingsFromConfiguration[ErrorHandler, CSRFErrorHandler, JavaCSRFErrorHandlerAdapter, JavaCSRFErrorHandlerDelegate, CSRFHttpErrorHandler](environment, configuration,
"play.filters.csrf.errorHandler", "CSRFErrorHandler")
}
}
}
/**
* The CSRF module.
*/
class CSRFModule extends Module {
def bindings(environment: Environment, configuration: Configuration) = Seq(
bind[play.libs.crypto.CSRFTokenSigner].to(classOf[play.libs.crypto.DefaultCSRFTokenSigner]),
bind[CSRFTokenSigner].toProvider[CSRFTokenSignerProvider],
bind[CSRFConfig].toProvider[CSRFConfigProvider],
bind[CSRF.TokenProvider].toProvider[CSRF.TokenProviderProvider],
bind[CSRFFilter].toSelf
) ++ ErrorHandler.bindingsFromConfiguration(environment, configuration)
}
/**
* The CSRF components.
*/
trait CSRFComponents {
def configuration: Configuration
def csrfTokenSigner: CSRFTokenSigner
def httpErrorHandler: HttpErrorHandler
def httpConfiguration: HttpConfiguration
implicit def materializer: Materializer
lazy val csrfConfig: CSRFConfig = CSRFConfig.fromConfiguration(configuration)
lazy val csrfTokenProvider: CSRF.TokenProvider = new CSRF.TokenProviderProvider(csrfConfig, csrfTokenSigner).get
lazy val csrfErrorHandler: CSRF.ErrorHandler = new CSRFHttpErrorHandler(httpErrorHandler)
lazy val csrfFilter: CSRFFilter = new CSRFFilter(csrfConfig, csrfTokenSigner, httpConfiguration.session, csrfTokenProvider, csrfErrorHandler)
lazy val csrfCheck: CSRFCheck = CSRFCheck(csrfConfig, csrfTokenSigner, httpConfiguration.session)
lazy val csrfAddToken: CSRFAddToken = CSRFAddToken(csrfConfig, csrfTokenSigner, httpConfiguration.session)
}
| hagl/playframework | framework/src/play-filters-helpers/src/main/scala/play/filters/csrf/csrf.scala | Scala | apache-2.0 | 12,613 |
package dpla.ingestion3.entries.reports
import dpla.ingestion3.premappingreports._
import org.apache.log4j.{LogManager, Logger}
import scala.util.Failure
/**
* PreMappingReporterMain and PreMappingReporter, for generating QA reports
* on harvested documents.
*
* Example invocation:
*
* $ sbt "run-main dpla.ingestion3.PreMappingReporterMain \\
* /path/to/harvested-data.avro /path/to/pre-mapping-report local[2] xml
*/
/**
* PreMappingReporter, the driver class.
*
* The design patters for PreMappingReporter are the same as those for Reporter.
*
* @param inputURI Input URI or file path
* @param outputURI Output URI or file path
* @param sparkMasterName Spark master name, e.g. "local[1]"
* @param inputDataType The data type of the input data ("xml", "json")
*/
/**
* Entry point for running a pre-mapping report.
*/
object PreMappingReporterMain {
def usage(): Unit = {
println(
"""
|Usage:
|
|PreMappingReporterMain <input> <output> <spark master> <input data type>
""".stripMargin)
}
val logger: Logger = LogManager.getLogger("PreMappingReporter")
def main(args: Array[String]): Unit = {
if (args.length < 4) {
usage()
System.err.println("Incorrect invocation arguments")
sys.exit(1)
}
val input = args(0)
val output = args(1)
val sparkMasterName = args(2)
val inputDataType = args(3)
val result = new PreMappingReporter(input, output, sparkMasterName, inputDataType)
.writeAllReports
result match {
case Failure(e) => logger.error(e.toString)
case _ => Unit
}
}
}
| dpla/ingestion3 | src/main/scala/dpla/ingestion3/entries/reports/PreMappingReporterMain.scala | Scala | mit | 1,667 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.linker.frontend.optimizer
import scala.language.implicitConversions
import scala.annotation.{switch, tailrec}
import scala.collection.mutable
import scala.util.control.{NonFatal, ControlThrowable, TailCalls}
import scala.util.control.TailCalls.{done => _, _} // done is a too generic term
import org.scalajs.ir._
import org.scalajs.ir.Names._
import org.scalajs.ir.OriginalName.NoOriginalName
import org.scalajs.ir.Trees._
import org.scalajs.ir.Types._
import org.scalajs.logging._
import org.scalajs.linker.interface._
import org.scalajs.linker.interface.unstable.RuntimeClassNameMapperImpl
import org.scalajs.linker.standard._
import org.scalajs.linker.backend.emitter.LongImpl
import org.scalajs.linker.backend.emitter.Transients._
/** Optimizer core.
* Designed to be "mixed in" [[IncOptimizer#MethodImpl#Optimizer]].
* This is the core of the optimizer. It contains all the smart things the
* optimizer does. To perform inlining, it relies on abstract protected
* methods to identify the target of calls.
*/
private[optimizer] abstract class OptimizerCore(config: CommonPhaseConfig) {
import OptimizerCore._
type MethodID <: AbstractMethodID
val myself: MethodID
// Uncomment and adapt to print debug messages only during one method
//lazy val debugThisMethod: Boolean =
// myself.toString() == "java.lang.FloatingPointBits$.numberHashCode;D;I"
/** Returns the body of a method. */
protected def getMethodBody(method: MethodID): MethodDef
/** Returns the list of possible targets for a dynamically linked call. */
protected def dynamicCall(intfName: ClassName,
methodName: MethodName): List[MethodID]
/** Returns the target of a static call. */
protected def staticCall(className: ClassName, namespace: MemberNamespace,
methodName: MethodName): MethodID
/** Returns the list of ancestors of a class or interface. */
protected def getAncestorsOf(className: ClassName): List[ClassName]
/** Tests whether the given module class has an elidable accessor.
* In other words, whether it is safe to discard a LoadModule of that
* module class which is not used.
*/
protected def hasElidableModuleAccessor(moduleClassName: ClassName): Boolean
/** Tests whether the given class is inlineable.
*
* @return
* `None` if the class is not inlineable, `Some(structure)` if it is.
*/
protected def tryNewInlineableClass(
className: ClassName): Option[InlineableClassStructure]
private val localNameAllocator = new FreshNameAllocator.Local
/** An allocated local variable name is mutable iff it belongs to this set. */
private var mutableLocalNames: Set[LocalName] = Set.empty
private val labelNameAllocator = new FreshNameAllocator.Label
/** A list of backups for all updates done to States so far (excluding
* those done in rolled back optimistic branches).
*
* This list grows (from the head) every time the value of a `State` changes.
* Each time, a `StateBackup` is prepended with the previous value.
*
* When starting an optimistic branch in `tryOrRollback`, we take a snapshot
* of the current chain of backups. When doing a rollback, we restore all
* the backups that have been added to the chain since the snapshot. We can
* do this by comparing the nodes of the chain with `eq`.
*
* Manipulations of this list are amortized O(1). The act of modifying the
* value of a `State` "pays for" a) making the backup and b) restoring the
* backup. Indeed, a backup is restored at most once.
*/
private var stateBackupChain: List[StateBackup] = Nil
private var disableOptimisticOptimizations: Boolean = false
private var rollbacksCount: Int = 0
private val attemptedInlining = mutable.ListBuffer.empty[MethodID]
private var curTrampolineId = 0
private val useRuntimeLong = !config.coreSpec.esFeatures.allowBigIntsForLongs
/** The record type for inlined `RuntimeLong`. */
private lazy val inlinedRTLongStructure =
tryNewInlineableClass(LongImpl.RuntimeLongClass).get
/** The name of the `lo` field of in the record type of `RuntimeLong`. */
private lazy val inlinedRTLongLoField =
inlinedRTLongStructure.recordType.fields(0).name
/** The name of the `lo` field of in the record type of `RuntimeLong`. */
private lazy val inlinedRTLongHiField =
inlinedRTLongStructure.recordType.fields(1).name
private val intrinsics =
Intrinsics.buildIntrinsics(config.coreSpec.esFeatures)
def optimize(thisType: Type, originalDef: MethodDef): MethodDef = {
try {
val MethodDef(static, name, originalName, params, resultType, optBody) =
originalDef
val body = optBody getOrElse {
throw new AssertionError("Methods to optimize must be concrete")
}
val (newParams, newBody1) = try {
transformIsolatedBody(Some(myself), thisType, params, resultType, body,
Set.empty)
} catch {
case _: TooManyRollbacksException =>
localNameAllocator.clear()
mutableLocalNames = Set.empty
labelNameAllocator.clear()
stateBackupChain = Nil
disableOptimisticOptimizations = true
transformIsolatedBody(Some(myself), thisType, params, resultType,
body, Set.empty)
}
val newBody =
if (originalDef.methodName == NoArgConstructorName) tryElimStoreModule(newBody1)
else newBody1
MethodDef(static, name, originalName, newParams, resultType,
Some(newBody))(originalDef.optimizerHints, None)(originalDef.pos)
} catch {
case NonFatal(cause) =>
throw new OptimizeException(myself, attemptedInlining.distinct.toList, cause)
case e: Throwable =>
// This is a fatal exception. Don't wrap, just output debug info error
Console.err.println(exceptionMsg(
myself, attemptedInlining.distinct.toList, e))
throw e
}
}
/** Try and eliminate a StoreModule followed only by trivial statements. */
private def tryElimStoreModule(body: Tree): Tree = {
implicit val pos = body.pos
body match {
case StoreModule(_, _) =>
Skip()
case Block(stats) =>
val (before, from) = stats.span(!_.isInstanceOf[StoreModule])
if (from.isEmpty) {
body
} else {
val after = from.tail
val afterIsTrivial = after.forall {
case Assign(Select(This(), _, _), _:Literal | _:VarRef) =>
true
case Assign(SelectStatic(_, _), _:Literal | _:VarRef) =>
true
case _ =>
false
}
if (afterIsTrivial) Block(before ::: after)
else body
}
case _ =>
body
}
}
private def newSimpleState[A](initialValue: A): SimpleState[A] =
new SimpleState[A](this, initialValue)
private def addStateBackup(backup: StateBackup): Unit =
stateBackupChain ::= backup
private def freshLocalNameWithoutOriginalName(base: LocalName,
mutable: Boolean): LocalName = {
val result = localNameAllocator.freshName(base)
if (mutable)
mutableLocalNames += result
result
}
private def freshLocalName(base: LocalName, originalName: OriginalName,
mutable: Boolean): (LocalName, OriginalName) = {
val newName = freshLocalNameWithoutOriginalName(base, mutable)
val newOriginalName = originalNameForFresh(base, originalName, newName)
(newName, newOriginalName)
}
private def freshLocalName(base: Binding.Name,
mutable: Boolean): (LocalName, OriginalName) = {
base match {
case Binding.This =>
freshLocalName(LocalThisNameForFresh, thisOriginalName, mutable)
case Binding.Local(name, originalName) =>
freshLocalName(name, originalName, mutable)
}
}
private def freshLabelName(base: LabelName): LabelName =
labelNameAllocator.freshName(base)
// Just a helper to make the callsites more understandable
private def localIsMutable(name: LocalName): Boolean = mutableLocalNames(name)
private def tryOrRollback(body: CancelFun => TailRec[Tree])(
fallbackFun: () => TailRec[Tree]): TailRec[Tree] = {
if (disableOptimisticOptimizations) {
fallbackFun()
} else {
val trampolineId = curTrampolineId
val localNameAllocatorSnapshot = localNameAllocator.snapshot()
val savedMutableLocalNames = mutableLocalNames
val labelNameAllocatorSnapshot = labelNameAllocator.snapshot()
val savedStateBackupChain = stateBackupChain
body { () =>
throw new RollbackException(trampolineId, localNameAllocatorSnapshot,
savedMutableLocalNames, labelNameAllocatorSnapshot,
savedStateBackupChain, fallbackFun)
}
}
}
private def isSubclass(lhs: ClassName, rhs: ClassName): Boolean =
getAncestorsOf(lhs).contains(rhs)
private val isSubclassFun = isSubclass _
private def isSubtype(lhs: Type, rhs: Type): Boolean = {
assert(lhs != NoType)
assert(rhs != NoType)
Types.isSubtype(lhs, rhs)(isSubclassFun) || {
(lhs, rhs) match {
case (LongType | ClassType(BoxedLongClass),
ClassType(LongImpl.RuntimeLongClass)) =>
true
case (ClassType(LongImpl.RuntimeLongClass),
ClassType(BoxedLongClass)) =>
true
case _ =>
false
}
}
}
/** Transforms a statement.
*
* For valid expression trees, it is always the case that
* {{{
* transformStat(tree)
* ===
* pretransformExpr(tree)(finishTransformStat)
* }}}
*/
private def transformStat(tree: Tree)(implicit scope: Scope): Tree =
transform(tree, isStat = true)
/** Transforms an expression.
*
* It is always the case that
* {{{
* transformExpr(tree)
* ===
* pretransformExpr(tree)(finishTransformExpr)
* }}}
*/
private def transformExpr(tree: Tree)(implicit scope: Scope): Tree =
transform(tree, isStat = false)
/** Transforms a tree. */
private def transform(tree: Tree, isStat: Boolean)(
implicit scope: Scope): Tree = {
@inline implicit def pos = tree.pos
val result = tree match {
// Definitions
case VarDef(_, _, _, _, rhs) =>
/* A local var that is last (or alone) in its block is not terribly
* useful. Get rid of it.
* (Non-last VarDefs in blocks are handled in transformBlock.)
*/
transformStat(rhs)
// Control flow constructs
case tree: Block =>
transformBlock(tree, isStat)
case Labeled(ident @ LabelIdent(label), tpe, body) =>
trampoline {
pretransformLabeled(label, if (isStat) NoType else tpe, body, isStat,
usePreTransform = false)(finishTransform(isStat))
}
case Assign(lhs, rhs) =>
val cont = { (preTransLhs: PreTransform) =>
resolveLocalDef(preTransLhs) match {
case PreTransRecordTree(lhsTree, lhsOrigType, lhsCancelFun) =>
val recordType = lhsTree.tpe.asInstanceOf[RecordType]
def buildInner(trhs: PreTransform): TailRec[Tree] = {
resolveLocalDef(trhs) match {
case PreTransRecordTree(rhsTree, rhsOrigType, rhsCancelFun) =>
if (rhsTree.tpe != recordType || rhsOrigType != lhsOrigType)
lhsCancelFun()
TailCalls.done(Assign(lhsTree.asInstanceOf[AssignLhs], rhsTree))
case _ =>
lhsCancelFun()
}
}
pretransformExpr(rhs) { trhs =>
(trhs.tpe.base, lhsOrigType) match {
case (LongType, RefinedType(
ClassType(LongImpl.RuntimeLongClass), true, false)) =>
/* The lhs is a stack-allocated RuntimeLong, but the rhs is
* a primitive Long. We expand the primitive Long into a
* new stack-allocated RuntimeLong so that we do not need
* to cancel.
*/
expandLongValue(trhs) { expandedRhs =>
buildInner(expandedRhs)
}
case _ =>
buildInner(trhs)
}
}
case PreTransTree(lhsTree, _) =>
TailCalls.done(Assign(lhsTree.asInstanceOf[AssignLhs], transformExpr(rhs)))
}
}
trampoline {
lhs match {
case lhs: Select =>
pretransformSelectCommon(lhs, isLhsOfAssign = true)(cont)
case lhs: JSSelect =>
pretransformJSSelect(lhs, isLhsOfAssign = true)(cont)
case _ =>
pretransformExpr(lhs)(cont)
}
}
case Return(expr, label) =>
val info = scope.env.labelInfos(label.name)
val newLabel = LabelIdent(info.newName)
if (!info.acceptRecords) {
val newExpr = transformExpr(expr)
info.returnedTypes.value ::= (newExpr.tpe, RefinedType(newExpr.tpe))
Return(newExpr, newLabel)
} else trampoline {
pretransformNoLocalDef(expr) { texpr =>
texpr match {
case PreTransRecordTree(newExpr, origType, cancelFun) =>
info.returnedTypes.value ::= (newExpr.tpe, origType)
TailCalls.done(Return(newExpr, newLabel))
case PreTransTree(newExpr, tpe) =>
info.returnedTypes.value ::= (newExpr.tpe, tpe)
TailCalls.done(Return(newExpr, newLabel))
}
}
}
case If(cond, thenp, elsep) =>
val newCond = transformExpr(cond)
newCond match {
case BooleanLiteral(condValue) =>
if (condValue) transform(thenp, isStat)
else transform(elsep, isStat)
case _ =>
val newThenp = transform(thenp, isStat)
val newElsep = transform(elsep, isStat)
val refinedType =
constrainedLub(newThenp.tpe, newElsep.tpe, tree.tpe)
foldIf(newCond, newThenp, newElsep)(refinedType)
}
case While(cond, body) =>
val newCond = transformExpr(cond)
newCond match {
case BooleanLiteral(false) => Skip()
case _ => While(newCond, transformStat(body))
}
case DoWhile(body, cond) =>
val newBody = transformStat(body)
val newCond = transformExpr(cond)
newCond match {
case BooleanLiteral(false) => newBody
case _ => DoWhile(newBody, newCond)
}
case ForIn(obj, keyVar @ LocalIdent(name), originalName, body) =>
val newObj = transformExpr(obj)
val (newName, newOriginalName) =
freshLocalName(name, originalName, mutable = false)
val localDef = LocalDef(RefinedType(AnyType), mutable = false,
ReplaceWithVarRef(newName, newSimpleState(true), None))
val newBody = {
val bodyScope = scope.withEnv(scope.env.withLocalDef(name, localDef))
transformStat(body)(bodyScope)
}
ForIn(newObj, LocalIdent(newName)(keyVar.pos), newOriginalName, newBody)
case TryCatch(block, errVar @ LocalIdent(name), originalName, handler) =>
val newBlock = transform(block, isStat)
val (newName, newOriginalName) =
freshLocalName(name, originalName, mutable = false)
val localDef = LocalDef(RefinedType(AnyType), true,
ReplaceWithVarRef(newName, newSimpleState(true), None))
val newHandler = {
val handlerScope = scope.withEnv(scope.env.withLocalDef(name, localDef))
transform(handler, isStat)(handlerScope)
}
val refinedType = constrainedLub(newBlock.tpe, newHandler.tpe, tree.tpe)
TryCatch(newBlock, LocalIdent(newName)(errVar.pos), newOriginalName,
newHandler)(refinedType)
case TryFinally(block, finalizer) =>
val newBlock = transform(block, isStat)
val newFinalizer = transformStat(finalizer)
TryFinally(newBlock, newFinalizer)
case Throw(expr) =>
Throw(transformExpr(expr))
case Match(selector, cases, default) =>
val newSelector = transformExpr(selector)
newSelector match {
case selectorValue: MatchableLiteral =>
val body = cases.collectFirst {
case (alts, body) if alts.exists(matchableLiteral_===(_, selectorValue)) => body
}.getOrElse(default)
transform(body, isStat)
case _ =>
Match(newSelector,
cases map (c => (c._1, transform(c._2, isStat))),
transform(default, isStat))(tree.tpe)
}
// Scala expressions
case New(className, ctor, args) =>
New(className, ctor, args map transformExpr)
case StoreModule(className, value) =>
StoreModule(className, transformExpr(value))
case tree: Select =>
trampoline {
pretransformSelectCommon(tree, isLhsOfAssign = false)(
finishTransform(isStat = false))
}
case tree: Apply =>
trampoline {
pretransformApply(tree, isStat, usePreTransform = false)(
finishTransform(isStat))
}
case tree: ApplyStatically =>
trampoline {
pretransformStaticApply(tree, isStat, usePreTransform = false)(
finishTransform(isStat))
}
case tree: ApplyStatic =>
trampoline {
pretransformApplyStatic(tree, isStat, usePreTransform = false)(
finishTransform(isStat))
}
case ApplyDynamicImport(flags, className, method, args) =>
ApplyDynamicImport(flags, className, method, args.map(transformExpr(_)))
case tree: UnaryOp =>
trampoline {
pretransformUnaryOp(tree)(finishTransform(isStat))
}
case tree: BinaryOp =>
trampoline {
pretransformBinaryOp(tree)(finishTransform(isStat))
}
case NewArray(tpe, lengths) =>
NewArray(tpe, lengths map transformExpr)
case ArrayValue(tpe, elems) =>
ArrayValue(tpe, elems map transformExpr)
case ArrayLength(array) =>
ArrayLength(transformExpr(array))
case ArraySelect(array, index) =>
ArraySelect(transformExpr(array), transformExpr(index))(tree.tpe)
case RecordValue(tpe, elems) =>
RecordValue(tpe, elems map transformExpr)
case IsInstanceOf(expr, testType) =>
trampoline {
pretransformExpr(expr) { texpr =>
val result = {
if (isSubtype(texpr.tpe.base, testType)) {
if (texpr.tpe.isNullable)
JSBinaryOp(JSBinaryOp.!==, finishTransformExpr(texpr), Null())
else
Block(finishTransformStat(texpr), BooleanLiteral(true))
} else {
if (texpr.tpe.isExact)
Block(finishTransformStat(texpr), BooleanLiteral(false))
else
IsInstanceOf(finishTransformExpr(texpr), testType)
}
}
TailCalls.done(result)
}
}
case AsInstanceOf(arg, tpe) =>
trampoline {
pretransformExpr(arg) { targ =>
foldAsInstanceOf(targ, tpe)(finishTransform(isStat))
}
}
case GetClass(expr) =>
trampoline {
pretransformExpr(expr) { texpr =>
def constant(typeRef: TypeRef): TailRec[Tree] =
TailCalls.done(Block(finishTransformStat(texpr), ClassOf(typeRef)))
texpr.tpe match {
case RefinedType(ClassType(LongImpl.RuntimeLongClass), true, false) =>
constant(ClassRef(BoxedLongClass))
case RefinedType(ClassType(className), true, false) =>
constant(ClassRef(className))
case RefinedType(ArrayType(arrayTypeRef), true, false) =>
constant(arrayTypeRef)
case _ =>
TailCalls.done(GetClass(finishTransformExpr(texpr)))
}
}
}
case Clone(expr) =>
Clone(transformExpr(expr))
case IdentityHashCode(expr) =>
IdentityHashCode(transformExpr(expr))
// JavaScript expressions
case JSNew(ctor, args) =>
JSNew(transformExpr(ctor), transformExprsOrSpreads(args))
case JSPrivateSelect(qualifier, className, field) =>
JSPrivateSelect(transformExpr(qualifier), className, field)
case tree: JSSelect =>
trampoline {
pretransformJSSelect(tree, isLhsOfAssign = false)(
finishTransform(isStat))
}
case tree: JSFunctionApply =>
trampoline {
pretransformJSFunctionApply(tree, isStat, usePreTransform = false)(
finishTransform(isStat))
}
case JSMethodApply(receiver, method, args) =>
JSMethodApply(transformExpr(receiver), transformExpr(method),
transformExprsOrSpreads(args))
case JSSuperSelect(superClass, qualifier, item) =>
JSSuperSelect(transformExpr(superClass), transformExpr(qualifier),
transformExpr(item))
case JSSuperMethodCall(superClass, receiver, method, args) =>
JSSuperMethodCall(transformExpr(superClass), transformExpr(receiver),
transformExpr(method), transformExprsOrSpreads(args))
case JSSuperConstructorCall(args) =>
JSSuperConstructorCall(transformExprsOrSpreads(args))
case JSImportCall(arg) =>
JSImportCall(transformExpr(arg))
case JSDelete(qualifier, item) =>
JSDelete(transformExpr(qualifier), transformExpr(item))
case JSUnaryOp(op, lhs) =>
JSUnaryOp(op, transformExpr(lhs))
case JSBinaryOp(op, lhs, rhs) =>
JSBinaryOp(op, transformExpr(lhs), transformExpr(rhs))
case JSArrayConstr(items) =>
JSArrayConstr(transformExprsOrSpreads(items))
case JSObjectConstr(fields) =>
JSObjectConstr(fields.map { field =>
(transformExpr(field._1), transformExpr(field._2))
})
// Atomic expressions
case _:VarRef | _:This =>
trampoline {
pretransformExpr(tree)(finishTransform(isStat))
}
case Closure(arrow, captureParams, params, restParam, body, captureValues) =>
transformClosureCommon(arrow, captureParams, params, restParam, body,
captureValues.map(transformExpr))
case CreateJSClass(className, captureValues) =>
CreateJSClass(className, captureValues.map(transformExpr))
// Trees that need not be transformed
case _:Skip | _:Debugger | _:LoadModule | _:SelectStatic |
_:SelectJSNativeMember | _:JSNewTarget | _:JSImportMeta |
_:LoadJSConstructor | _:LoadJSModule | _:JSLinkingInfo |
_:JSGlobalRef | _:JSTypeOfGlobalRef | _:Literal =>
tree
case _ =>
throw new IllegalArgumentException(
s"Invalid tree in transform of class ${tree.getClass.getName}: $tree")
}
if (isStat) keepOnlySideEffects(result)
else result
}
private def transformClosureCommon(arrow: Boolean,
captureParams: List[ParamDef], params: List[ParamDef],
restParam: Option[ParamDef], body: Tree,
newCaptureValues: List[Tree])(
implicit scope: Scope, pos: Position): Closure = {
val thisType = if (arrow) NoType else AnyType
val (allNewParams, newBody) = transformIsolatedBody(None, thisType,
captureParams ++ params ++ restParam, AnyType, body, scope.implsBeingInlined)
val (newCaptureParams, newParams, newRestParam) = {
val (c, t) = allNewParams.splitAt(captureParams.size)
if (restParam.isDefined) (c, t.init, Some(t.last))
else (c, t, None)
}
Closure(arrow, newCaptureParams, newParams, newRestParam, newBody, newCaptureValues)
}
private def transformBlock(tree: Block, isStat: Boolean)(
implicit scope: Scope): Tree = {
def transformList(stats: List[Tree])(
implicit scope: Scope): Tree = stats match {
case last :: Nil =>
transform(last, isStat)
case (VarDef(nameIdent, originalName, vtpe, mutable, rhs)) :: rest =>
trampoline {
pretransformExpr(rhs) { trhs =>
withBinding(Binding(nameIdent, originalName, vtpe, mutable, trhs)) {
(restScope, cont1) =>
val newRest = transformList(rest)(restScope)
cont1(PreTransTree(newRest, RefinedType(newRest.tpe)))
} (finishTransform(isStat))
}
}
case stat :: rest =>
val transformedStat = transformStat(stat)
if (transformedStat.tpe == NothingType) transformedStat
else Block(transformedStat, transformList(rest))(stat.pos)
case Nil => // silence the exhaustivity warning in a sensible way
Skip()(tree.pos)
}
transformList(tree.stats)(scope)
}
/** Pretransforms a list of trees as a list of [[PreTransform]]s.
* This is a convenience method to use pretransformExpr on a list.
*/
private def pretransformExprs(trees: List[Tree])(
cont: List[PreTransform] => TailRec[Tree])(
implicit scope: Scope): TailRec[Tree] = {
trees match {
case first :: rest =>
pretransformExpr(first) { tfirst =>
pretransformExprs(rest) { trest =>
cont(tfirst :: trest)
}
}
case Nil =>
cont(Nil)
}
}
/** Pretransforms two trees as a pair of [[PreTransform]]s.
* This is a convenience method to use pretransformExpr on two trees.
*/
private def pretransformExprs(tree1: Tree, tree2: Tree)(
cont: (PreTransform, PreTransform) => TailRec[Tree])(
implicit scope: Scope): TailRec[Tree] = {
pretransformExpr(tree1) { ttree1 =>
pretransformExpr(tree2) { ttree2 =>
cont(ttree1, ttree2)
}
}
}
/** Pretransforms a tree and a list of trees as [[PreTransform]]s.
* This is a convenience method to use pretransformExpr.
*/
private def pretransformExprs(first: Tree, rest: List[Tree])(
cont: (PreTransform, List[PreTransform]) => TailRec[Tree])(
implicit scope: Scope): TailRec[Tree] = {
pretransformExpr(first) { tfirst =>
pretransformExprs(rest) { trest =>
cont(tfirst, trest)
}
}
}
/** Pretransforms a tree to get a refined type while avoiding to force
* things we might be able to optimize by folding and aliasing.
*/
private def pretransformExpr(tree: Tree)(cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = tailcall {
@inline implicit def pos = tree.pos
tree match {
case tree: Block =>
pretransformBlock(tree)(cont)
case VarRef(LocalIdent(name)) =>
val localDef = scope.env.localDefs.getOrElse(name, {
throw new AssertionError(
s"Cannot find local def '$name' at $pos\\n" +
s"While optimizing $myself\\n" +
s"Env is ${scope.env}\\n" +
s"Inlining ${scope.implsBeingInlined}")
})
cont(localDef.toPreTransform)
case This() =>
val localDef = scope.env.thisLocalDef.getOrElse {
throw new AssertionError(
s"Found invalid 'this' at $pos\\n" +
s"While optimizing $myself\\n" +
s"Env is ${scope.env}\\n" +
s"Inlining ${scope.implsBeingInlined}")
}
cont(localDef.toPreTransform)
case tree: If =>
pretransformIf(tree)(cont)
case Match(selector, cases, default) =>
val newSelector = transformExpr(selector)
newSelector match {
case selectorValue: MatchableLiteral =>
val body = cases.collectFirst {
case (alts, body) if alts.exists(matchableLiteral_===(_, selectorValue)) => body
}.getOrElse(default)
pretransformExpr(body)(cont)
case _ =>
cont(Match(newSelector,
cases map (c => (c._1, transformExpr(c._2))),
transformExpr(default))(tree.tpe).toPreTransform)
}
case Labeled(ident @ LabelIdent(label), tpe, body) =>
pretransformLabeled(label, tpe, body, isStat = false,
usePreTransform = true)(cont)
case New(className, ctor, args) =>
pretransformExprs(args) { targs =>
pretransformNew(AllocationSite.Tree(tree), className, ctor, targs)(cont)
}
case tree: Select =>
pretransformSelectCommon(tree, isLhsOfAssign = false)(cont)
case tree: Apply =>
pretransformApply(tree, isStat = false,
usePreTransform = true)(cont)
case tree: ApplyStatically =>
pretransformStaticApply(tree, isStat = false,
usePreTransform = true)(cont)
case tree: ApplyStatic =>
pretransformApplyStatic(tree, isStat = false,
usePreTransform = true)(cont)
case tree: UnaryOp =>
pretransformUnaryOp(tree)(cont)
case tree: BinaryOp =>
pretransformBinaryOp(tree)(cont)
case tree: JSSelect =>
pretransformJSSelect(tree, isLhsOfAssign = false)(cont)
case tree: JSFunctionApply =>
pretransformJSFunctionApply(tree, isStat = false,
usePreTransform = true)(cont)
case JSArrayConstr(items) =>
/* Trying to virtualize more than 64 items in a JS array is probably
* a bad idea, and will slow down the optimizer for no good reason.
* See for example #2943.
*/
if (items.size > 64 || items.exists(_.isInstanceOf[JSSpread])) {
/* TODO This means spread in array constr does not compose under
* this optimization. We could improve this with a
* pretransformExprsOrSpreads() or something like that.
*/
cont(JSArrayConstr(transformExprsOrSpreads(items)).toPreTransform)
} else {
val itemsNoSpread = items.asInstanceOf[List[Tree]]
pretransformExprs(itemsNoSpread) { titems =>
tryOrRollback { cancelFun =>
val itemBindings = for {
(titem, index) <- titems.zipWithIndex
} yield {
Binding.temp(LocalName("x" + index), AnyType, mutable = false, titem)
}
withNewLocalDefs(itemBindings) { (itemLocalDefs, cont1) =>
val replacement = InlineJSArrayReplacement(
itemLocalDefs.toVector, cancelFun)
val localDef = LocalDef(
RefinedType(AnyType, isExact = false, isNullable = false),
mutable = false,
replacement)
cont1(localDef.toPreTransform)
} (cont)
} { () =>
cont(PreTransTree(JSArrayConstr(titems.map(finishTransformExpr)),
RefinedType(AnyType, isExact = false, isNullable = false)))
}
}
}
case AsInstanceOf(expr, tpe) =>
pretransformExpr(expr) { texpr =>
foldAsInstanceOf(texpr, tpe)(cont)
}
case Closure(arrow, captureParams, params, restParam, body, captureValues) =>
pretransformExprs(captureValues) { tcaptureValues =>
def default(): TailRec[Tree] = {
val newClosure = transformClosureCommon(arrow, captureParams,
params, restParam, body, tcaptureValues.map(finishTransformExpr))
cont(PreTransTree(
newClosure,
RefinedType(AnyType, isExact = false, isNullable = false)))
}
if (!arrow || restParam.isDefined) {
/* TentativeClosureReplacement assumes there are no rest
* parameters, because that would not be inlineable anyway.
* Likewise, it assumes that there is no binding for `this` nor for
* `new.target`, which is only true for arrow functions.
* So we never try to inline non-arrow Closures, nor Closures with
* a rest parameter. There are few use cases for either anyway.
*/
default()
} else {
tryOrRollback { cancelFun =>
val captureBindings = for {
(ParamDef(nameIdent, originalName, tpe, mutable), value) <-
captureParams zip tcaptureValues
} yield {
Binding(nameIdent, originalName, tpe, mutable, value)
}
withNewLocalDefs(captureBindings) { (captureLocalDefs, cont1) =>
val replacement = TentativeClosureReplacement(
captureParams, params, body, captureLocalDefs,
alreadyUsed = newSimpleState(false), cancelFun)
val localDef = LocalDef(
RefinedType(AnyType, isExact = false, isNullable = false),
mutable = false,
replacement)
cont1(localDef.toPreTransform)
} (cont)
} { () =>
default()
}
}
}
case _ =>
cont(transformExpr(tree).toPreTransform)
}
}
private def pretransformBlock(tree: Block)(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
def pretransformList(stats: List[Tree])(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = stats match {
case last :: Nil =>
pretransformExpr(last)(cont)
case (VarDef(nameIdent, originalName, vtpe, mutable, rhs)) :: rest =>
pretransformExpr(rhs) { trhs =>
withBinding(Binding(nameIdent, originalName, vtpe, mutable, trhs)) {
(restScope, cont1) =>
pretransformList(rest)(cont1)(restScope)
} (cont)
}
case stat :: rest =>
implicit val pos = tree.pos
val transformedStat = transformStat(stat)
transformedStat match {
case Skip() =>
pretransformList(rest)(cont)
case _ =>
if (transformedStat.tpe == NothingType)
cont(PreTransTree(transformedStat, RefinedType.Nothing))
else {
pretransformList(rest) { trest =>
cont(PreTransBlock(transformedStat, trest))
}
}
}
case Nil => // silence the exhaustivity warning in a sensible way
TailCalls.done(Skip()(tree.pos))
}
pretransformList(tree.stats)(cont)(scope)
}
private def pretransformIf(tree: If)(cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
implicit val pos = tree.pos
val If(cond, thenp, elsep) = tree
val newCond = transformExpr(cond)
newCond match {
case BooleanLiteral(condValue) =>
if (condValue)
pretransformExpr(thenp)(cont)
else
pretransformExpr(elsep)(cont)
case _ =>
tryOrRollback { cancelFun =>
pretransformExprs(thenp, elsep) { (tthenp, telsep) =>
if (tthenp.tpe.isNothingType) {
cont(PreTransBlock(
If(newCond, finishTransformStat(tthenp), Skip())(NoType),
telsep))
} else if (telsep.tpe.isNothingType) {
val negCond = finishTransformExpr(
foldUnaryOp(UnaryOp.Boolean_!, newCond.toPreTransform))
cont(PreTransBlock(
If(negCond, finishTransformStat(telsep), Skip())(NoType),
tthenp))
} else {
(resolveLocalDef(tthenp), resolveLocalDef(telsep)) match {
case (PreTransRecordTree(thenTree, thenOrigType, thenCancelFun),
PreTransRecordTree(elseTree, elseOrigType, elseCancelFun)) =>
val commonType = {
if (thenTree.tpe == elseTree.tpe && thenOrigType == elseOrigType)
thenTree.tpe
else
cancelFun()
}
val refinedOrigType =
constrainedLub(thenOrigType, elseOrigType, tree.tpe)
cont(PreTransRecordTree(
If(newCond, thenTree, elseTree)(commonType),
refinedOrigType,
cancelFun))
case (tthenpNoLocalDef, telsepNoLocalDef) =>
val newThenp = finishTransformExpr(tthenpNoLocalDef)
val newElsep = finishTransformExpr(telsepNoLocalDef)
val refinedType =
constrainedLub(newThenp.tpe, newElsep.tpe, tree.tpe)
cont(foldIf(newCond, newThenp, newElsep)(
refinedType).toPreTransform)
}
}
}
} { () =>
val newThenp = transformExpr(thenp)
val newElsep = transformExpr(elsep)
val refinedType =
constrainedLub(newThenp.tpe, newElsep.tpe, tree.tpe)
cont(foldIf(newCond, newThenp, newElsep)(
refinedType).toPreTransform)
}
}
}
private def pretransformSelectCommon(tree: Select, isLhsOfAssign: Boolean)(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
val Select(qualifier, className, field) = tree
pretransformExpr(qualifier) { preTransQual =>
pretransformSelectCommon(tree.tpe, preTransQual, className, field,
isLhsOfAssign)(cont)(scope, tree.pos)
}
}
private def pretransformSelectCommon(expectedType: Type,
preTransQual: PreTransform, className: ClassName, field: FieldIdent,
isLhsOfAssign: Boolean)(
cont: PreTransCont)(
implicit scope: Scope, pos: Position): TailRec[Tree] = {
preTransQual match {
case PreTransLocalDef(LocalDef(_, _,
InlineClassBeingConstructedReplacement(_, fieldLocalDefs, cancelFun))) =>
val fieldLocalDef = fieldLocalDefs(FieldID(className, field))
if (!isLhsOfAssign || fieldLocalDef.mutable) {
cont(fieldLocalDef.toPreTransform)
} else {
/* This is an assignment to an immutable field of a inlineable class
* being constructed, but that does not appear at the "top-level" of
* one of its constructors. We cannot handle those, so we cancel.
* (Assignments at the top-level are normal initializations of these
* fields, and are transformed as vals in inlineClassConstructor.)
*/
cancelFun()
}
case PreTransLocalDef(LocalDef(_, _,
InlineClassInstanceReplacement(_, fieldLocalDefs, cancelFun))) =>
val fieldLocalDef = fieldLocalDefs(FieldID(className, field))
if (!isLhsOfAssign || fieldLocalDef.mutable) {
cont(fieldLocalDef.toPreTransform)
} else {
/* In an ideal world, this should not happen (assigning to an
* immutable field of an already constructed object). However, since
* we cannot IR-check that this does not happen (see #1021), this is
* effectively allowed by the IR spec. We are therefore not allowed
* to crash. We cancel instead. This will become an actual field
* (rather than an optimized local val) which is not considered pure
* (for that same reason).
*/
cancelFun()
}
// Select the lo or hi "field" of a Long literal
case PreTransLit(LongLiteral(value)) if useRuntimeLong =>
val itemName = field.name
assert(itemName == inlinedRTLongLoField ||
itemName == inlinedRTLongHiField)
assert(expectedType == IntType)
val resultValue =
if (itemName == inlinedRTLongLoField) value.toInt
else (value >>> 32).toInt
cont(PreTransLit(IntLiteral(resultValue)))
case _ =>
resolveLocalDef(preTransQual) match {
case PreTransRecordTree(newQual, origType, cancelFun) =>
val recordType = newQual.tpe.asInstanceOf[RecordType]
val recordField = recordType.findField(field.name)
val sel = RecordSelect(newQual, field)(recordField.tpe)
sel.tpe match {
case _: RecordType =>
cont(PreTransRecordTree(sel, RefinedType(expectedType), cancelFun))
case _ =>
cont(PreTransTree(sel, RefinedType(sel.tpe)))
}
case PreTransTree(newQual, _) =>
cont(PreTransTree(Select(newQual, className, field)(expectedType),
RefinedType(expectedType)))
}
}
}
private def pretransformNew(allocationSite: AllocationSite,
className: ClassName, ctor: MethodIdent, targs: List[PreTransform])(
cont: PreTransCont)(
implicit scope: Scope, pos: Position): TailRec[Tree] = {
tryNewInlineableClass(className) match {
case Some(structure) =>
tryOrRollback { cancelFun =>
inlineClassConstructor(allocationSite, className, structure,
ctor, targs, cancelFun)(cont)
} { () =>
cont(PreTransTree(
New(className, ctor, targs.map(finishTransformExpr)),
RefinedType(ClassType(className), isExact = true, isNullable = false)))
}
case None =>
cont(PreTransTree(
New(className, ctor, targs.map(finishTransformExpr)),
RefinedType(ClassType(className), isExact = true, isNullable = false)))
}
}
/** Resolves any LocalDef in a [[PreTransform]]. */
private def resolveLocalDef(preTrans: PreTransform): PreTransGenTree = {
implicit val pos = preTrans.pos
preTrans match {
case PreTransBlock(bindingsAndStats, result) =>
resolveLocalDef(result) match {
case PreTransRecordTree(tree, tpe, cancelFun) =>
PreTransRecordTree(finishTransformBindings(bindingsAndStats, tree),
tpe, cancelFun)
case PreTransTree(tree, tpe) =>
PreTransTree(finishTransformBindings(bindingsAndStats, tree), tpe)
}
case _:PreTransUnaryOp | _:PreTransBinaryOp | _:PreTransJSBinaryOp =>
PreTransTree(finishTransformExpr(preTrans))
case PreTransLocalDef(localDef @ LocalDef(tpe, _, replacement)) =>
replacement match {
case ReplaceWithRecordVarRef(name, recordType, used, cancelFun) =>
used.value = true
PreTransRecordTree(
VarRef(LocalIdent(name))(recordType), tpe, cancelFun)
case InlineClassInstanceReplacement(structure, fieldLocalDefs, cancelFun) =>
val recordType = structure.recordType
if (!isImmutableType(recordType))
cancelFun()
PreTransRecordTree(
RecordValue(recordType, structure.fieldIDs.map(
id => fieldLocalDefs(id).newReplacement)),
tpe, cancelFun)
case _ =>
PreTransTree(localDef.newReplacement, localDef.tpe)
}
case preTrans: PreTransGenTree =>
preTrans
}
}
/** Resolves any [[RecordType]] in a [[PreTransform]].
*
* If `preTrans` would resolve to a `PreTransRecordTree`, returns a `Some`
* of its (lowered) [[RecordType]] and its `cancelFun`. Otherwise, returns
* `None`.
*
* Note that the record type is not the same as `preTrans.tpe.base`, which
* is the *original* type of the tree (not lowered to a record type).
*/
private def resolveRecordType(
preTrans: PreTransform): Option[(RecordType, CancelFun)] = {
preTrans match {
case PreTransBlock(_, result) =>
resolveRecordType(result)
case _:PreTransUnaryOp | _:PreTransBinaryOp | _:PreTransJSBinaryOp =>
None
case PreTransLocalDef(localDef @ LocalDef(tpe, _, replacement)) =>
replacement match {
case ReplaceWithRecordVarRef(name, recordType, used, cancelFun) =>
Some((recordType, cancelFun))
case InlineClassInstanceReplacement(structure, fieldLocalDefs, cancelFun) =>
Some((structure.recordType, cancelFun))
case _ =>
None
}
case PreTransRecordTree(tree, _, cancelFun) =>
Some((tree.tpe.asInstanceOf[RecordType], cancelFun))
case PreTransTree(_, _) =>
None
}
}
/** Combines pretransformExpr and resolveLocalDef in one convenience method. */
private def pretransformNoLocalDef(tree: Tree)(
cont: PreTransGenTree => TailRec[Tree])(
implicit scope: Scope): TailRec[Tree] = {
pretransformExpr(tree) { ttree =>
cont(resolveLocalDef(ttree))
}
}
/** Finishes a pretransform, either a statement or an expression. */
private def finishTransform(isStat: Boolean): PreTransCont = { preTrans =>
TailCalls.done {
if (isStat) finishTransformStat(preTrans)
else finishTransformExpr(preTrans)
}
}
/** Finishes an expression pretransform to get a normal [[Tree]].
* This method (together with finishTransformStat) must not be called more
* than once per pretransform and per translation.
* By "per translation", we mean in an alternative path through
* `tryOrRollback`. It could still be called several times as long as
* it is once in the 'try' part and once in the 'fallback' part.
*/
private def finishTransformExpr(preTrans: PreTransform): Tree = {
implicit val pos = preTrans.pos
preTrans match {
case PreTransBlock(bindingsAndStats, result) =>
finishTransformBindings(bindingsAndStats, finishTransformExpr(result))
case PreTransUnaryOp(op, lhs) =>
UnaryOp(op, finishTransformExpr(lhs))
case PreTransBinaryOp(op, lhs, rhs) =>
BinaryOp(op, finishTransformExpr(lhs), finishTransformExpr(rhs))
case PreTransJSBinaryOp(op, lhs, rhs) =>
JSBinaryOp(op, finishTransformExpr(lhs), finishTransformExpr(rhs))
case PreTransLocalDef(localDef) =>
localDef.newReplacement
/* In general, it is not OK to allocate a new instance of an inlined
* class from its record value, because that can break object identity
* (not to mention we have no idea what the primary constructor does).
* However, for RuntimeLong specifically, it is OK. It is useful to do
* so because it allows us not to cancel the original stack allocation
* of the Long value, which means that all previous usages of it can
* stay on stack.
*
* We do something similar in LocalDef.newReplacement.
*/
case PreTransRecordTree(tree, tpe, _)
if tpe.base == ClassType(LongImpl.RuntimeLongClass) =>
tree match {
case RecordValue(_, List(lo, hi)) =>
createNewLong(lo, hi)
case recordVarRef: VarRef =>
createNewLong(recordVarRef)
case _ =>
val varRefIdent = LocalIdent(
freshLocalNameWithoutOriginalName(LocalName("x"), mutable = false))
val recordVarDef =
VarDef(varRefIdent, NoOriginalName, tree.tpe, mutable = false, tree)
Block(recordVarDef, createNewLong(recordVarDef.ref))
}
case PreTransRecordTree(_, _, cancelFun) =>
cancelFun()
case PreTransTree(tree, _) =>
tree
}
}
/** Finishes a statement pretransform to get a normal [[Tree]].
* This method (together with finishTransformExpr) must not be called more
* than once per pretransform and per translation.
* By "per translation", we mean in an alternative path through
* `tryOrRollback`. It could still be called several times as long as
* it is once in the 'try' part and once in the 'fallback' part.
*/
private def finishTransformStat(stat: PreTransform): Tree = stat match {
case PreTransBlock(bindingsAndStats, result) =>
finishTransformBindings(bindingsAndStats, finishTransformStat(result))
case PreTransUnaryOp(_, lhs) =>
finishTransformStat(lhs)
case PreTransBinaryOp(op, lhs, rhs) =>
// Here we need to preserve the side-effects of integer division/modulo
import BinaryOp._
val newLhs = finishTransformStat(lhs)
def finishNoSideEffects: Tree =
Block(newLhs, finishTransformStat(rhs))(stat.pos)
op match {
case Int_/ | Int_% =>
rhs match {
case PreTransLit(IntLiteral(r)) if r != 0 =>
finishNoSideEffects
case _ =>
Block(newLhs, BinaryOp(op, IntLiteral(0)(stat.pos),
finishTransformExpr(rhs))(stat.pos))(stat.pos)
}
case Long_/ | Long_% =>
rhs match {
case PreTransLit(LongLiteral(r)) if r != 0L =>
finishNoSideEffects
case _ =>
Block(newLhs, BinaryOp(op, LongLiteral(0L)(stat.pos),
finishTransformExpr(rhs))(stat.pos))(stat.pos)
}
case _ =>
finishNoSideEffects
}
case PreTransJSBinaryOp(op, lhs, rhs) =>
if (op == JSBinaryOp.=== || op == JSBinaryOp.!==)
Block(finishTransformStat(lhs), finishTransformStat(rhs))(stat.pos)
else // other operators can have side effects that we must preserve
finishTransformExpr(stat)
case PreTransLocalDef(_) =>
Skip()(stat.pos)
case PreTransRecordTree(tree, _, _) =>
keepOnlySideEffects(tree)
case PreTransTree(tree, _) =>
keepOnlySideEffects(tree)
}
/** Finishes the bindings and statements followed by a result to get a
* normal [[Tree]].
* This method must not be called more than once per `BindingOrStat` and
* per translation.
* By "per translation", we mean in an alternative path through
* `tryOrRollback`. It could still be called several times as long as
* it is once in the 'try' part and once in the 'fallback' part.
*/
private def finishTransformBindings(bindingsAndStats: List[BindingOrStat],
result: Tree): Tree = {
bindingsAndStats.foldRight(result) {
case (Left(PreTransBinding(originalName, localDef, value)), innerBody) =>
implicit val pos = value.pos
val LocalDef(tpe, mutable, replacement) = localDef
val (name, used) = (replacement: @unchecked) match {
case ReplaceWithVarRef(name, used, _) =>
(name, used)
case ReplaceWithRecordVarRef(name, _, used, _) =>
(name, used)
}
if (used.value) {
val ident = LocalIdent(name)
val varDef = resolveLocalDef(value) match {
case PreTransRecordTree(valueTree, valueTpe, cancelFun) =>
val recordType = valueTree.tpe.asInstanceOf[RecordType]
if (!isImmutableType(recordType))
cancelFun()
VarDef(ident, originalName, recordType, mutable, valueTree)
case PreTransTree(valueTree, valueTpe) =>
VarDef(ident, originalName, tpe.base, mutable, valueTree)
}
Block(varDef, innerBody)
} else {
val valueSideEffects = finishTransformStat(value)
Block(valueSideEffects, innerBody)
}
case (Right(stat), innerBody) =>
Block(stat, innerBody)(innerBody.pos)
}
}
/** Keeps only the side effects of a Tree (overapproximation). */
private def keepOnlySideEffects(stat: Tree): Tree = stat match {
case _:VarRef | _:This | _:Literal | _:SelectStatic =>
Skip()(stat.pos)
case VarDef(_, _, _, _, rhs) =>
keepOnlySideEffects(rhs)
case Block(init :+ last) =>
keepOnlySideEffects(last) match {
case Skip() => keepOnlySideEffects(Block(init)(stat.pos))
case lastEffects => Block(init, lastEffects)(stat.pos)
}
case LoadModule(moduleClassName) =>
if (hasElidableModuleAccessor(moduleClassName)) Skip()(stat.pos)
else stat
case NewArray(_, lengths) =>
Block(lengths.map(keepOnlySideEffects))(stat.pos)
case Select(qualifier, _, _) =>
keepOnlySideEffects(qualifier)
case Closure(_, _, _, _, _, captureValues) =>
Block(captureValues.map(keepOnlySideEffects))(stat.pos)
case UnaryOp(_, arg) =>
keepOnlySideEffects(arg)
case If(cond, thenp, elsep) =>
(keepOnlySideEffects(thenp), keepOnlySideEffects(elsep)) match {
case (Skip(), Skip()) => keepOnlySideEffects(cond)
case (newThenp, newElsep) => If(cond, newThenp, newElsep)(NoType)(stat.pos)
}
case BinaryOp(op, lhs, rhs) =>
// Here we need to preserve the side-effects of integer division/modulo
import BinaryOp._
implicit val pos = stat.pos
val newLhs = keepOnlySideEffects(lhs)
def finishNoSideEffects: Tree =
Block(newLhs, keepOnlySideEffects(rhs))
op match {
case Int_/ | Int_% =>
rhs match {
case IntLiteral(r) if r != 0 =>
finishNoSideEffects
case _ =>
Block(newLhs, BinaryOp(op, IntLiteral(0), rhs))
}
case Long_/ | Long_% =>
rhs match {
case LongLiteral(r) if r != 0L =>
finishNoSideEffects
case _ =>
Block(newLhs, BinaryOp(op, LongLiteral(0L), rhs))
}
case _ =>
finishNoSideEffects
}
case RecordValue(_, elems) =>
Block(elems.map(keepOnlySideEffects))(stat.pos)
case RecordSelect(record, _) =>
keepOnlySideEffects(record)
case _ =>
stat
}
private def pretransformApply(tree: Apply, isStat: Boolean,
usePreTransform: Boolean)(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
val Apply(flags, receiver, methodIdent, args) = tree
implicit val pos = tree.pos
pretransformExprs(receiver, args) { (treceiver, targs) =>
pretransformApply(flags, treceiver, methodIdent, targs, tree.tpe, isStat,
usePreTransform)(cont)
}
}
private def pretransformApply(flags: ApplyFlags, treceiver: PreTransform,
methodIdent: MethodIdent, targs: List[PreTransform], resultType: Type,
isStat: Boolean, usePreTransform: Boolean)(
cont: PreTransCont)(
implicit scope: Scope, pos: Position): TailRec[Tree] = {
val methodName = methodIdent.name
def treeNotInlined = {
cont(PreTransTree(Apply(flags, finishTransformExpr(treceiver), methodIdent,
targs.map(finishTransformExpr))(resultType), RefinedType(resultType)))
}
treceiver.tpe.base match {
case NothingType =>
cont(treceiver)
case NullType =>
// Apply on null is UB, just create a well-typed tree.
cont(Block(finishTransformStat(treceiver), Throw(Null())).toPreTransform)
case _ =>
if (methodName.isReflectiveProxy) {
// Never inline reflective proxies
treeNotInlined
} else {
val className = boxedClassForType(treceiver.tpe.base)
val namespace = MemberNamespace.forNonStaticCall(flags)
/* When the receiver has an exact type, we can use static resolution
* even for a dynamic call.
* Otherwise, if the receiver has an ArrayType, we should perform
* dynamic resolution in the Array[T] class. However, we don't model
* the Array[T] class family, so we cannot do that. We emulate the
* result by using static resolution in the representative class
* (which is j.l.Object) instead. (addMethodCalled in Infos.scala
* does the same thing.)
*/
val useStaticResolution =
treceiver.tpe.isExact || treceiver.tpe.base.isInstanceOf[ArrayType]
val impls =
if (useStaticResolution) List(staticCall(className, namespace, methodName))
else dynamicCall(className, methodName)
val allocationSites =
(treceiver :: targs).map(_.tpe.allocationSite)
if (impls.isEmpty || impls.exists(impl =>
scope.implsBeingInlined((allocationSites, impl)))) {
// isEmpty could happen, have to leave it as is for the TypeError
treeNotInlined
} else if (impls.size == 1) {
val target = impls.head
val intrinsicCode = intrinsics(flags, target)
if (intrinsicCode >= 0) {
callIntrinsic(intrinsicCode, flags, Some(treceiver), methodName,
targs, isStat, usePreTransform)(cont)
} else if (target.inlineable && (
target.shouldInline ||
shouldInlineBecauseOfArgs(target, treceiver :: targs))) {
/* When inlining a single method, the declared type of the `this`
* value is its enclosing class.
*/
val receiverType = ClassType(target.enclosingClassName)
inline(allocationSites, Some((receiverType, treceiver)), targs,
target, isStat, usePreTransform)(cont)
} else {
treeNotInlined
}
} else {
if (canMultiInline(impls)) {
/* When multi-inlining, we cannot use the enclosing class of the
* target method as the declared type of the receiver, since we
* have no guarantee that the receiver is in fact of that
* particular class. It could be of any of the classes that the
* targets belong to. Therefore, we have to keep the receiver's
* static type as a declared type, which is our only safe choice.
*/
val receiverType = treceiver.tpe.base
inline(allocationSites, Some((receiverType, treceiver)), targs,
impls.head, isStat, usePreTransform)(cont)
} else {
treeNotInlined
}
}
}
}
}
private def canMultiInline(impls: List[MethodID]): Boolean = {
// TODO? Inline multiple non-forwarders with the exact same body?
impls.forall(impl => impl.isForwarder && impl.inlineable) &&
(getMethodBody(impls.head).body.get match {
// Trait impl forwarder
case ApplyStatic(flags, staticCls, MethodIdent(methodName), _) =>
impls.tail.forall(getMethodBody(_).body.get match {
case ApplyStatic(`flags`, `staticCls`, MethodIdent(`methodName`), _) =>
true
case _ =>
false
})
// Shape of forwards to default methods
case ApplyStatically(flags, This(), className, MethodIdent(methodName), args) =>
impls.tail.forall(getMethodBody(_).body.get match {
case ApplyStatically(`flags`, This(), `className`, MethodIdent(`methodName`), _) =>
true
case _ =>
false
})
// Bridge method
case Apply(flags, This(), MethodIdent(methodName), referenceArgs) =>
impls.tail.forall(getMethodBody(_).body.get match {
case Apply(`flags`, This(), MethodIdent(`methodName`), implArgs) =>
referenceArgs.zip(implArgs) forall {
case (MaybeUnbox(_, unboxID1), MaybeUnbox(_, unboxID2)) =>
unboxID1 == unboxID2
}
case _ =>
false
})
case body =>
throw new AssertionError("Invalid forwarder shape: " + body)
})
}
private def boxedClassForType(tpe: Type): ClassName = (tpe: @unchecked) match {
case ClassType(className) =>
if (className == BoxedLongClass && useRuntimeLong)
LongImpl.RuntimeLongClass
else
className
case AnyType => ObjectClass
case UndefType => BoxedUnitClass
case BooleanType => BoxedBooleanClass
case CharType => BoxedCharacterClass
case ByteType => BoxedByteClass
case ShortType => BoxedShortClass
case IntType => BoxedIntegerClass
case LongType =>
if (useRuntimeLong) LongImpl.RuntimeLongClass
else BoxedLongClass
case FloatType => BoxedFloatClass
case DoubleType => BoxedDoubleClass
case StringType => BoxedStringClass
case ArrayType(_) => ObjectClass
}
private def pretransformStaticApply(tree: ApplyStatically, isStat: Boolean,
usePreTransform: Boolean)(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
val ApplyStatically(flags, receiver, className,
methodIdent @ MethodIdent(methodName), args) = tree
implicit val pos = tree.pos
def treeNotInlined0(transformedReceiver: Tree, transformedArgs: List[Tree]) =
cont(PreTransTree(ApplyStatically(flags, transformedReceiver, className,
methodIdent, transformedArgs)(tree.tpe), RefinedType(tree.tpe)))
def treeNotInlined =
treeNotInlined0(transformExpr(receiver), args.map(transformExpr))
if (methodName.isReflectiveProxy) {
// Never inline reflective proxies
treeNotInlined
} else {
val target = staticCall(className, MemberNamespace.forNonStaticCall(flags),
methodName)
pretransformExprs(receiver, args) { (treceiver, targs) =>
val intrinsicCode = intrinsics(flags, target)
if (intrinsicCode >= 0) {
callIntrinsic(intrinsicCode, flags, Some(treceiver), methodName,
targs, isStat, usePreTransform)(cont)
} else {
val shouldInline = target.inlineable && (
target.shouldInline ||
shouldInlineBecauseOfArgs(target, treceiver :: targs))
val allocationSites =
(treceiver :: targs).map(_.tpe.allocationSite)
val beingInlined =
scope.implsBeingInlined((allocationSites, target))
if (shouldInline && !beingInlined) {
val receiverType = ClassType(target.enclosingClassName)
inline(allocationSites, Some((receiverType, treceiver)), targs,
target, isStat, usePreTransform)(cont)
} else {
treeNotInlined0(finishTransformExpr(treceiver),
targs.map(finishTransformExpr))
}
}
}
}
}
private def pretransformApplyStatic(tree: ApplyStatic, isStat: Boolean,
usePreTransform: Boolean)(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
val ApplyStatic(flags, className,
methodIdent @ MethodIdent(methodName), args) = tree
implicit val pos = tree.pos
def treeNotInlined0(transformedArgs: List[Tree]) =
cont(PreTransTree(ApplyStatic(flags, className, methodIdent,
transformedArgs)(tree.tpe), RefinedType(tree.tpe)))
def treeNotInlined = treeNotInlined0(args.map(transformExpr))
val target = staticCall(className, MemberNamespace.forStaticCall(flags),
methodName)
pretransformExprs(args) { targs =>
val intrinsicCode = intrinsics(flags, target)
if (intrinsicCode >= 0) {
callIntrinsic(intrinsicCode, flags, None, methodName, targs,
isStat, usePreTransform)(cont)
} else {
val shouldInline = target.inlineable && (
target.shouldInline || shouldInlineBecauseOfArgs(target, targs))
val allocationSites = targs.map(_.tpe.allocationSite)
val beingInlined =
scope.implsBeingInlined((allocationSites, target))
if (shouldInline && !beingInlined) {
inline(allocationSites, None, targs, target,
isStat, usePreTransform)(cont)
} else {
treeNotInlined0(targs.map(finishTransformExpr))
}
}
}
}
private def pretransformJSSelect(tree: JSSelect, isLhsOfAssign: Boolean)(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
val JSSelect(qual, item) = tree
implicit val pos = tree.pos
pretransformExprs(qual, item) { (tqual, titem0) =>
val titem = optimizeJSBracketSelectItem(titem0)
def default: TailRec[Tree] = {
cont(PreTransTree(foldJSSelect(finishTransformExpr(tqual),
finishTransformExpr(titem))))
}
titem match {
case _ if isLhsOfAssign =>
default
case PreTransLit(itemLit) =>
itemLit match {
case IntLiteral(itemInt) =>
tqual match {
case PreTransLocalDef(LocalDef(_, false,
InlineJSArrayReplacement(itemLocalDefs, _))) =>
if (itemInt >= 0 && itemInt < itemLocalDefs.size)
cont(itemLocalDefs(itemInt).toPreTransform)
else
cont(PreTransLit(Undefined()))
case _ =>
default
}
case StringLiteral("length") =>
tqual match {
case PreTransLocalDef(LocalDef(_, false,
InlineJSArrayReplacement(itemLocalDefs, _))) =>
cont(PreTransLit(IntLiteral(itemLocalDefs.size)))
case _ =>
default
}
case _ =>
default
}
case _ =>
default
}
}
}
private def optimizeJSBracketSelectItem(item: PreTransform): PreTransform = {
item match {
case PreTransLit(StringLiteral(s)) =>
scala.util.Try(s.toInt).toOption match {
case Some(intValue) if intValue.toString == s =>
PreTransLit(IntLiteral(intValue)(item.pos))
case _ =>
item
}
case _ =>
item
}
}
private def pretransformJSFunctionApply(tree: JSFunctionApply,
isStat: Boolean, usePreTransform: Boolean)(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
val JSFunctionApply(fun, args) = tree
implicit val pos = tree.pos
if (args.exists(_.isInstanceOf[JSSpread])) {
cont(JSFunctionApply(transformExpr(fun),
transformExprsOrSpreads(args)).toPreTransform)
} else {
val argsNoSpread = args.asInstanceOf[List[Tree]]
pretransformExpr(fun) { tfun =>
tfun match {
case PreTransLocalDef(LocalDef(_, false,
closure @ TentativeClosureReplacement(
captureParams, params, body, captureLocalDefs,
alreadyUsed, cancelFun)))
if !alreadyUsed.value && argsNoSpread.size <= params.size =>
alreadyUsed.value = true
val missingArgCount = params.size - argsNoSpread.size
val expandedArgs =
if (missingArgCount == 0) argsNoSpread
else argsNoSpread ::: List.fill(missingArgCount)(Undefined())
pretransformExprs(expandedArgs) { targs =>
/* In a JS function, the *declared* type of the `this` value is
* always `AnyType`, like all the other parameters. In a
* `JSFunctionApply`, its *actual* value is always `undefined`,
* by spec of what `JSFunctionApply` does.
*/
inlineBody(
Some((AnyType, PreTransLit(Undefined()))),
captureParams ++ params, AnyType, body,
captureLocalDefs.map(_.toPreTransform) ++ targs, isStat,
usePreTransform)(cont)
}
case _ =>
cont(JSFunctionApply(finishTransformExpr(tfun),
argsNoSpread.map(transformExpr)).toPreTransform)
}
}
}
}
private def transformExprsOrSpreads(trees: List[TreeOrJSSpread])(
implicit scope: Scope): List[TreeOrJSSpread] = {
/* This is basically a flatMap, but we do it manually because flatMap would
* generate many garbage intermediate lists, when in fact the case JSSpread
* should be fairly rare. In general, we avoid flatMaps over collections in
* OptimizerCore.
*/
val builder = List.newBuilder[TreeOrJSSpread]
trees.foreach {
case spread: JSSpread =>
implicit val pos = spread.pos
val newSpreadItems = trampoline {
pretransformExpr(spread.items) { tspreadItems =>
TailCalls.done {
tspreadItems match {
case PreTransLocalDef(LocalDef(_, false,
InlineJSArrayReplacement(itemLocalDefs, _))) =>
JSArrayConstr(
itemLocalDefs.toList.map(_.newReplacement(spread.pos)))
case _ =>
finishTransformExpr(tspreadItems)
}
}
}
}
newSpreadItems match {
case JSArrayConstr(newFirsts) => builder ++= newFirsts
case _ => builder += JSSpread(newSpreadItems)
}
case tree: Tree =>
builder += transformExpr(tree)
}
builder.result()
}
private val ClassNamesThatShouldBeInlined = Set(
"scala.Predef$$less$colon$less",
"scala.Predef$$eq$colon$eq",
"scala.reflect.ManifestFactory$ByteManifest$",
"scala.reflect.ManifestFactory$ShortManifest$",
"scala.reflect.ManifestFactory$CharManifest$",
"scala.reflect.ManifestFactory$IntManifest$",
"scala.reflect.ManifestFactory$LongManifest$",
"scala.reflect.ManifestFactory$FloatManifest$",
"scala.reflect.ManifestFactory$DoubleManifest$",
"scala.reflect.ManifestFactory$BooleanManifest$",
"scala.reflect.ManifestFactory$UnitManifest$",
"scala.reflect.ManifestFactory$AnyManifest$",
"scala.reflect.ManifestFactory$ObjectManifest$",
"scala.reflect.ManifestFactory$AnyValManifest$",
"scala.reflect.ManifestFactory$NullManifest$",
"scala.reflect.ManifestFactory$NothingManifest$"
).map(ClassName(_))
private def shouldInlineBecauseOfArgs(target: MethodID,
receiverAndArgs: List[PreTransform]): Boolean = {
def isTypeLikelyOptimizable(tpe: RefinedType): Boolean = tpe.base match {
case ClassType(className) =>
ClassNamesThatShouldBeInlined.contains(className)
case _ =>
false
}
def isLocalOnlyInlineType(tpe: RefinedType): Boolean = {
/* RuntimeLong is @inline so that *local* box/unbox pairs and instances
* can be eliminated. But we don't want that to force inlining of a
* method only because we pass it an instance of RuntimeLong.
*/
tpe.base match {
case ClassType(LongImpl.RuntimeLongClass) => true
case _ => false
}
}
def isLikelyOptimizable(arg: PreTransform): Boolean = arg match {
case PreTransBlock(_, result) =>
isLikelyOptimizable(result)
case PreTransLocalDef(localDef) =>
(localDef.replacement match {
case TentativeClosureReplacement(_, _, _, _, _, _) => true
case ReplaceWithRecordVarRef(_, _, _, _) => true
case InlineClassBeingConstructedReplacement(_, _, _) => true
case InlineClassInstanceReplacement(_, _, _) => true
case _ =>
isTypeLikelyOptimizable(localDef.tpe)
}) && !isLocalOnlyInlineType(localDef.tpe)
case PreTransRecordTree(_, _, _) =>
!isLocalOnlyInlineType(arg.tpe)
case _ =>
isTypeLikelyOptimizable(arg.tpe)
}
receiverAndArgs.exists(isLikelyOptimizable) || {
target.is(ClassTagModuleClass, ClassTagApplyMethodName) &&
(receiverAndArgs.tail.head match {
case PreTransTree(ClassOf(_), _) => true
case _ => false
})
}
}
private def inline(allocationSites: List[AllocationSite],
optReceiver: Option[(Type, PreTransform)],
args: List[PreTransform], target: MethodID, isStat: Boolean,
usePreTransform: Boolean)(
cont: PreTransCont)(
implicit scope: Scope, pos: Position): TailRec[Tree] = {
require(target.inlineable)
attemptedInlining += target
val MethodDef(flags, _, _, formals, resultType, optBody) = getMethodBody(target)
assert(flags.namespace.isStatic == optReceiver.isEmpty,
"There must be receiver if and only if the method is not static")
val body = optBody.getOrElse {
throw new AssertionError("A method to inline must be conrete")
}
def finishTransformArgsAsStat(): Tree = {
val newOptReceiver =
optReceiver.fold[Tree](Skip())(r => finishTransformStat(r._2))
val newArgs = args.map(finishTransformStat(_))
Block(newOptReceiver :: newArgs)
}
body match {
case Skip() =>
assert(isStat, "Found Skip() in expression position")
cont(PreTransTree(
finishTransformArgsAsStat(),
RefinedType.NoRefinedType))
case _: Literal =>
cont(PreTransTree(
Block(finishTransformArgsAsStat(), body),
RefinedType(body.tpe)))
case This() if args.isEmpty =>
assert(optReceiver.isDefined,
"There was a This(), there should be a receiver")
cont(optReceiver.get._2)
case Select(This(), className, field) if formals.isEmpty =>
assert(optReceiver.isDefined,
"There was a This(), there should be a receiver")
pretransformSelectCommon(body.tpe, optReceiver.get._2, className, field,
isLhsOfAssign = false)(cont)
case Assign(lhs @ Select(This(), className, field), VarRef(LocalIdent(rhsName)))
if formals.size == 1 && formals.head.name.name == rhsName =>
assert(isStat, "Found Assign in expression position")
assert(optReceiver.isDefined,
"There was a This(), there should be a receiver")
pretransformSelectCommon(lhs.tpe, optReceiver.get._2, className, field,
isLhsOfAssign = true) { preTransLhs =>
// TODO Support assignment of record
cont(PreTransTree(
Assign(finishTransformExpr(preTransLhs).asInstanceOf[AssignLhs],
finishTransformExpr(args.head)),
RefinedType.NoRefinedType))
}
case _ =>
val targetID = (allocationSites, target)
inlineBody(optReceiver, formals, resultType, body, args, isStat,
usePreTransform)(cont)(scope.inlining(targetID), pos)
}
}
private def inlineBody(optReceiver: Option[(Type, PreTransform)],
formals: List[ParamDef], resultType: Type, body: Tree,
args: List[PreTransform], isStat: Boolean,
usePreTransform: Boolean)(
cont: PreTransCont)(
implicit scope: Scope, pos: Position): TailRec[Tree] = tailcall {
val optReceiverBinding = optReceiver map { receiver =>
Binding(Binding.This, receiver._1, false, receiver._2)
}
assert(formals.size == args.size,
"argument count mismatch: " +
s"inlineBody was called with formals $formals but args $args")
val argsBindings = for {
(ParamDef(nameIdent, originalName, tpe, mutable), arg) <- formals zip args
} yield {
Binding(nameIdent, originalName, tpe, mutable, arg)
}
withBindings(optReceiverBinding ++: argsBindings) { (bodyScope, cont1) =>
implicit val scope = bodyScope
if (usePreTransform) {
assert(!isStat, "Cannot use pretransform in statement position")
pretransformExpr(body)(cont1)
} else {
cont1(PreTransTree(transform(body, isStat)))
}
} (cont) (scope.withEnv(OptEnv.Empty))
}
private def callIntrinsic(code: Int, flags: ApplyFlags,
optTReceiver: Option[PreTransform], methodName: MethodName,
targs: List[PreTransform], isStat: Boolean, usePreTransform: Boolean)(
cont: PreTransCont)(
implicit scope: Scope, pos: Position): TailRec[Tree] = {
import Intrinsics._
lazy val newReceiver = finishTransformExpr(optTReceiver.get)
lazy val newArgs = targs.map(finishTransformExpr)
@inline def contTree(result: Tree) = cont(result.toPreTransform)
@inline def StringClassType = ClassType(BoxedStringClass)
def defaultApply(resultType: Type): TailRec[Tree] =
contTree(Apply(flags, newReceiver, MethodIdent(methodName), newArgs)(resultType))
def cursoryArrayElemType(tpe: ArrayType): Type = {
if (tpe.arrayTypeRef.dimensions != 1) AnyType
else (tpe.arrayTypeRef.base match {
case PrimRef(elemType) => elemType
case ClassRef(_) => AnyType
})
}
(code: @switch) match {
// java.lang.System
case ArrayCopy =>
assert(isStat, "System.arraycopy must be used in statement position")
val List(src, srcPos, dest, destPos, length) = newArgs
contTree(Transient(SystemArrayCopy(src, srcPos, dest, destPos, length)))
// scala.runtime.ScalaRunTime object
case ArrayApply =>
val List(array, index) = newArgs
array.tpe match {
case arrayTpe @ ArrayType(ArrayTypeRef(base, _)) =>
val elemType = cursoryArrayElemType(arrayTpe)
contTree(ArraySelect(array, index)(elemType))
case _ =>
defaultApply(AnyType)
}
case ArrayUpdate =>
val List(tarray, tindex, tvalue) = targs
tarray.tpe.base match {
case arrayTpe @ ArrayType(ArrayTypeRef(base, depth)) =>
val array = finishTransformExpr(tarray)
val index = finishTransformExpr(tindex)
val elemType = cursoryArrayElemType(arrayTpe)
val select = ArraySelect(array, index)(elemType)
foldAsInstanceOf(tvalue, elemType) { tunboxedValue =>
contTree(Assign(select, finishTransformExpr(tunboxedValue)))
}
case _ =>
defaultApply(AnyType)
}
case ArrayLength =>
targs.head.tpe.base match {
case _: ArrayType =>
contTree(Trees.ArrayLength(newArgs.head))
case _ =>
defaultApply(IntType)
}
// java.lang.Integer
case IntegerNLZ =>
contTree(newArgs.head match {
case IntLiteral(value) => IntLiteral(Integer.numberOfLeadingZeros(value))
case newArg => Transient(NumberOfLeadingZeroes(newArg))
})
// java.lang.Long
case LongToString =>
pretransformApply(ApplyFlags.empty, targs.head,
MethodIdent(LongImpl.toString_), Nil, StringClassType,
isStat, usePreTransform)(
cont)
case LongCompare =>
pretransformApply(ApplyFlags.empty, targs.head,
MethodIdent(LongImpl.compareToRTLong), targs.tail, IntType,
isStat, usePreTransform)(
cont)
case LongDivideUnsigned =>
pretransformApply(ApplyFlags.empty, targs.head,
MethodIdent(LongImpl.divideUnsigned), targs.tail,
ClassType(LongImpl.RuntimeLongClass), isStat, usePreTransform)(
cont)
case LongRemainderUnsigned =>
pretransformApply(ApplyFlags.empty, targs.head,
MethodIdent(LongImpl.remainderUnsigned), targs.tail,
ClassType(LongImpl.RuntimeLongClass), isStat, usePreTransform)(
cont)
// scala.collection.mutable.ArrayBuilder
case GenericArrayBuilderResult =>
val List(runtimeClass, array) = newArgs
val (resultType, isExact) = runtimeClass match {
case ClassOf(elemTypeRef) => (ArrayType(ArrayTypeRef.of(elemTypeRef)), true)
case _ => (AnyType, false)
}
cont(PreTransTree(
Transient(NativeArrayWrapper(runtimeClass, array)(resultType)),
RefinedType(resultType, isExact = isExact, isNullable = false)))
case ArrayBuilderZeroOf =>
contTree(finishTransformExpr(targs.head) match {
case ClassOf(PrimRef(tpe)) =>
/* Note that for CharType we produce a literal int instead of char.
* This ensures that we fill up the JS array with numbers 0 rather
* than boxed '\\0'. We need to do this because the result() method
* (see intrinsic right above) will directly feed that JS array to
* `makeNativeArrayWrapper`, which expects an array of numbers when
* building an `Array[Char]`.
*/
tpe match {
case CharType => IntLiteral(0)
case NoType => Undefined()
case _ => zeroOf(tpe)
}
case ClassOf(_) =>
Null()
case runtimeClass =>
Transient(ZeroOf(runtimeClass))
})
// java.lang.Class
case ClassGetComponentType =>
newReceiver match {
case ClassOf(ArrayTypeRef(base, depth)) =>
contTree(ClassOf(
if (depth == 1) base
else ArrayTypeRef(base, depth - 1)))
case ClassOf(ClassRef(_)) =>
contTree(Null())
case receiver =>
defaultApply(ClassType(ClassClass))
}
case ClassGetName =>
newReceiver match {
case BlockOrAlone(stats, ClassOf(typeRef)) =>
def mappedClassName(className: ClassName): String = {
RuntimeClassNameMapperImpl.map(
config.coreSpec.semantics.runtimeClassNameMapper,
className.nameString)
}
val nameString = typeRef match {
case primRef: PrimRef =>
primRef.displayName
case ClassRef(className) =>
mappedClassName(className)
case ArrayTypeRef(primRef: PrimRef, dimensions) =>
"[" * dimensions + primRef.charCode
case ArrayTypeRef(ClassRef(className), dimensions) =>
"[" * dimensions + "L" + mappedClassName(className) + ";"
}
contTree(Block(stats, StringLiteral(nameString)))
case BlockOrAlone(stats, GetClass(expr)) =>
contTree(Block(stats,
Transient(ObjectClassName(expr))))
case _ =>
defaultApply(StringClassType)
}
// java.lang.reflect.Array
case ArrayNewInstance =>
newArgs.head match {
case ClassOf(elementTypeRef) =>
val arrayTypeRef = ArrayTypeRef.of(elementTypeRef)
contTree(NewArray(arrayTypeRef, List(newArgs.tail.head)))
case _ =>
defaultApply(AnyType)
}
// js.special
case ObjectLiteral =>
def default =
defaultApply(AnyType)
val List(tprops) = targs
tprops match {
case PreTransMaybeBlock(bindingsAndStats,
PreTransLocalDef(LocalDef(
RefinedType(ClassType(JSWrappedArrayClass), _, _),
false,
InlineClassInstanceReplacement(_, wrappedArrayFields, _)))) =>
assert(wrappedArrayFields.size == 1)
val jsArray = wrappedArrayFields.head._2
jsArray.replacement match {
case InlineJSArrayReplacement(elemLocalDefs, _)
if elemLocalDefs.forall(e => isSubtype(e.tpe.base, ClassType(Tuple2Class))) =>
val fields: List[(Tree, Tree)] = for {
(elemLocalDef, idx) <- elemLocalDefs.toList.zipWithIndex
} yield {
elemLocalDef match {
case LocalDef(RefinedType(ClassType(Tuple2Class), _, _), false,
InlineClassInstanceReplacement(structure, tupleFields, _)) =>
val List(key, value) = structure.fieldIDs.map(tupleFields)
(key.newReplacement, value.newReplacement)
case _ =>
val flags = ApplyFlags.empty
val key = Apply(flags, elemLocalDef.newReplacement,
MethodIdent(TupleFirstMethodName), Nil)(AnyType)
val value = Apply(flags, elemLocalDef.newReplacement,
MethodIdent(TupleSecondMethodName), Nil)(AnyType)
(key, value)
}
}
val resultTree = JSObjectConstr(fields)
contTree(Block(finishTransformStat(optTReceiver.get),
finishTransformBindings(bindingsAndStats, resultTree)))
case _ =>
default
}
case _ =>
tprops.tpe match {
case RefinedType(ClassType(NilClass), _, false) =>
contTree(Block(finishTransformStat(tprops), JSObjectConstr(Nil)))
case _ =>
default
}
}
// TypedArray conversions
case ByteArrayToInt8Array =>
contTree(Transient(ArrayToTypedArray(newArgs.head, ByteRef)))
case ShortArrayToInt16Array =>
contTree(Transient(ArrayToTypedArray(newArgs.head, ShortRef)))
case CharArrayToUint16Array =>
contTree(Transient(ArrayToTypedArray(newArgs.head, CharRef)))
case IntArrayToInt32Array =>
contTree(Transient(ArrayToTypedArray(newArgs.head, IntRef)))
case FloatArrayToFloat32Array =>
contTree(Transient(ArrayToTypedArray(newArgs.head, FloatRef)))
case DoubleArrayToFloat64Array =>
contTree(Transient(ArrayToTypedArray(newArgs.head, DoubleRef)))
case Int8ArrayToByteArray =>
contTree(Transient(TypedArrayToArray(newArgs.head, ByteRef)))
case Int16ArrayToShortArray =>
contTree(Transient(TypedArrayToArray(newArgs.head, ShortRef)))
case Uint16ArrayToCharArray =>
contTree(Transient(TypedArrayToArray(newArgs.head, CharRef)))
case Int32ArrayToIntArray =>
contTree(Transient(TypedArrayToArray(newArgs.head, IntRef)))
case Float32ArrayToFloatArray =>
contTree(Transient(TypedArrayToArray(newArgs.head, FloatRef)))
case Float64ArrayToDoubleArray =>
contTree(Transient(TypedArrayToArray(newArgs.head, DoubleRef)))
}
}
private def inlineClassConstructor(allocationSite: AllocationSite,
className: ClassName, structure: InlineableClassStructure,
ctor: MethodIdent, args: List[PreTransform], cancelFun: CancelFun)(
cont: PreTransCont)(
implicit scope: Scope, pos: Position): TailRec[Tree] = {
val initialFieldBindings = for {
RecordType.Field(name, originalName, tpe, mutable) <- structure.recordType.fields
} yield {
Binding(Binding.Local(name.toLocalName, originalName), tpe, mutable,
PreTransTree(zeroOf(tpe)))
}
withNewLocalDefs(initialFieldBindings) { (initialFieldLocalDefList, cont1) =>
val initialFieldLocalDefs =
structure.fieldIDs.zip(initialFieldLocalDefList).toMap
inlineClassConstructorBody(allocationSite, structure, initialFieldLocalDefs,
className, className, ctor, args, cancelFun) { (finalFieldLocalDefs, cont2) =>
cont2(LocalDef(
RefinedType(ClassType(className), isExact = true,
isNullable = false, allocationSite = allocationSite),
mutable = false,
InlineClassInstanceReplacement(structure, finalFieldLocalDefs,
cancelFun)).toPreTransform)
} (cont1)
} (cont)
}
private def inlineClassConstructorBody(
allocationSite: AllocationSite, structure: InlineableClassStructure,
inputFieldsLocalDefs: Map[FieldID, LocalDef], className: ClassName,
ctorClass: ClassName, ctor: MethodIdent, args: List[PreTransform],
cancelFun: CancelFun)(
buildInner: (Map[FieldID, LocalDef], PreTransCont) => TailRec[Tree])(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = tailcall {
val target = staticCall(ctorClass, MemberNamespace.Constructor, ctor.name)
val targetID = (allocationSite :: args.map(_.tpe.allocationSite), target)
if (scope.implsBeingInlined.contains(targetID))
cancelFun()
val targetMethodDef = getMethodBody(target)
val formals = targetMethodDef.args
val stats = targetMethodDef.body.get match {
case Block(stats) => stats
case singleStat => List(singleStat)
}
val argsBindings = for {
(ParamDef(nameIdent, originalName, tpe, mutable), arg) <- formals zip args
} yield {
Binding(nameIdent, originalName, tpe, mutable, arg)
}
withBindings(argsBindings) { (bodyScope, cont1) =>
val thisLocalDef = LocalDef(
RefinedType(ClassType(className), isExact = true, isNullable = false),
false,
InlineClassBeingConstructedReplacement(structure, inputFieldsLocalDefs, cancelFun))
val statsScope = bodyScope.inlining(targetID).withEnv(
bodyScope.env.withThisLocalDef(thisLocalDef))
inlineClassConstructorBodyList(allocationSite, structure, thisLocalDef,
inputFieldsLocalDefs, className, stats, cancelFun)(
buildInner)(cont1)(statsScope)
} (cont) (scope.withEnv(OptEnv.Empty))
}
private def inlineClassConstructorBodyList(
allocationSite: AllocationSite, structure: InlineableClassStructure,
thisLocalDef: LocalDef, inputFieldsLocalDefs: Map[FieldID, LocalDef],
className: ClassName, stats: List[Tree], cancelFun: CancelFun)(
buildInner: (Map[FieldID, LocalDef], PreTransCont) => TailRec[Tree])(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
stats match {
case This() :: rest =>
inlineClassConstructorBodyList(allocationSite, structure, thisLocalDef,
inputFieldsLocalDefs, className, rest, cancelFun)(buildInner)(cont)
case Assign(s @ Select(ths: This, className, field), value) :: rest
if !inputFieldsLocalDefs(FieldID(className, field)).mutable =>
pretransformExpr(value) { tvalue =>
val fieldID = FieldID(className, field)
val originalName = structure.fieldOriginalName(fieldID)
val binding = Binding(
Binding.Local(field.name.toLocalName, originalName),
s.tpe, false, tvalue)
withNewLocalDef(binding) { (localDef, cont1) =>
if (localDef.contains(thisLocalDef)) {
/* Uh oh, there is a `val x = ...this...`. We can't keep it,
* because this field will not be updated with `newThisLocalDef`.
*/
cancelFun()
}
val newFieldsLocalDefs =
inputFieldsLocalDefs.updated(fieldID, localDef)
val newThisLocalDef = LocalDef(thisLocalDef.tpe, false,
InlineClassBeingConstructedReplacement(structure, newFieldsLocalDefs, cancelFun))
val restScope =
scope.withEnv(scope.env.withThisLocalDef(newThisLocalDef))
inlineClassConstructorBodyList(allocationSite, structure,
newThisLocalDef, newFieldsLocalDefs, className, rest, cancelFun)(
buildInner)(cont1)(restScope)
} (cont)
}
/* if (cond)
* throw e
* else
* this.outer = value
*
* becomes
*
* this.outer =
* if (cond) throw e
* else value
*
* Typical shape of initialization of outer pointer of inner classes.
*/
case If(cond, th: Throw, Assign(Select(This(), _, _), value)) :: rest =>
// work around a bug of the compiler (these should be @-bindings)
val stat = stats.head.asInstanceOf[If]
val ass = stat.elsep.asInstanceOf[Assign]
val lhs = ass.lhs
inlineClassConstructorBodyList(allocationSite, structure, thisLocalDef,
inputFieldsLocalDefs, className,
Assign(lhs, If(cond, th, value)(lhs.tpe)(stat.pos))(ass.pos) :: rest,
cancelFun)(buildInner)(cont)
case ApplyStatically(flags, ths: This, superClass, superCtor, args) :: rest
if flags.isConstructor =>
pretransformExprs(args) { targs =>
inlineClassConstructorBody(allocationSite, structure,
inputFieldsLocalDefs, className, superClass, superCtor, targs,
cancelFun) { (outputFieldsLocalDefs, cont1) =>
val newThisLocalDef = LocalDef(thisLocalDef.tpe, false,
InlineClassBeingConstructedReplacement(structure, outputFieldsLocalDefs, cancelFun))
val restScope =
scope.withEnv(scope.env.withThisLocalDef(newThisLocalDef))
inlineClassConstructorBodyList(allocationSite, structure,
newThisLocalDef, outputFieldsLocalDefs,
className, rest, cancelFun)(buildInner)(cont1)(restScope)
} (cont)
}
case VarDef(nameIdent, originalName, tpe, mutable, rhs) :: rest =>
pretransformExpr(rhs) { trhs =>
withBinding(Binding(nameIdent, originalName, tpe, mutable, trhs)) { (restScope, cont1) =>
inlineClassConstructorBodyList(allocationSite, structure,
thisLocalDef, inputFieldsLocalDefs,
className, rest, cancelFun)(buildInner)(cont1)(restScope)
} (cont)
}
case stat :: rest =>
val transformedStat = transformStat(stat)
transformedStat match {
case Skip() =>
inlineClassConstructorBodyList(allocationSite, structure,
thisLocalDef, inputFieldsLocalDefs,
className, rest, cancelFun)(buildInner)(cont)
case _ =>
if (transformedStat.tpe == NothingType)
cont(PreTransTree(transformedStat, RefinedType.Nothing))
else {
inlineClassConstructorBodyList(allocationSite, structure,
thisLocalDef, inputFieldsLocalDefs,
className, rest, cancelFun)(buildInner) { tinner =>
cont(PreTransBlock(transformedStat, tinner))
}
}
}
case Nil =>
buildInner(inputFieldsLocalDefs, cont)
}
}
private def foldIf(cond: Tree, thenp: Tree, elsep: Tree)(tpe: Type)(
implicit pos: Position): Tree = {
import BinaryOp._
@inline def default = If(cond, thenp, elsep)(tpe)
cond match {
case BooleanLiteral(v) =>
if (v) thenp
else elsep
case _ =>
@inline def negCond =
finishTransformExpr(foldUnaryOp(UnaryOp.Boolean_!, cond.toPreTransform))
if (thenp.tpe == BooleanType && elsep.tpe == BooleanType) {
(cond, thenp, elsep) match {
case (_, BooleanLiteral(t), BooleanLiteral(e)) =>
if (t == e) Block(keepOnlySideEffects(cond), thenp)
else if (t) cond
else negCond
case (_, BooleanLiteral(false), _) =>
foldIf(negCond, elsep, BooleanLiteral(false))(tpe) // canonical && form
case (_, _, BooleanLiteral(true)) =>
foldIf(negCond, BooleanLiteral(true), thenp)(tpe) // canonical || form
/* if (lhs === null) rhs === null else lhs === rhs
* -> lhs === rhs
* This is the typical shape of a lhs == rhs test where
* the equals() method has been inlined as a reference
* equality test.
*/
case (JSBinaryOp(JSBinaryOp.===, VarRef(lhsIdent), Null()),
JSBinaryOp(JSBinaryOp.===, VarRef(rhsIdent), Null()),
JSBinaryOp(JSBinaryOp.===, VarRef(lhsIdent2), VarRef(rhsIdent2)))
if lhsIdent2 == lhsIdent && rhsIdent2 == rhsIdent =>
elsep
// Example: (x > y) || (x == y) -> (x >= y)
case (BinaryOp(op1 @ (Int_== | Int_!= | Int_< | Int_<= | Int_> | Int_>=), l1, r1),
BooleanLiteral(true),
BinaryOp(op2 @ (Int_== | Int_!= | Int_< | Int_<= | Int_> | Int_>=), l2, r2))
if ((l1.isInstanceOf[Literal] || l1.isInstanceOf[VarRef]) &&
(r1.isInstanceOf[Literal] || r1.isInstanceOf[VarRef]) &&
(l1 == l2 && r1 == r2)) =>
val canBeEqual =
((op1 == Int_==) || (op1 == Int_<=) || (op1 == Int_>=)) ||
((op2 == Int_==) || (op2 == Int_<=) || (op2 == Int_>=))
val canBeLessThan =
((op1 == Int_!=) || (op1 == Int_<) || (op1 == Int_<=)) ||
((op2 == Int_!=) || (op2 == Int_<) || (op2 == Int_<=))
val canBeGreaterThan =
((op1 == Int_!=) || (op1 == Int_>) || (op1 == Int_>=)) ||
((op2 == Int_!=) || (op2 == Int_>) || (op2 == Int_>=))
finishTransformExpr(
fold3WayIntComparison(canBeEqual, canBeLessThan,
canBeGreaterThan, l1.toPreTransform, r1.toPreTransform))
// Example: (x >= y) && (x <= y) -> (x == y)
case (BinaryOp(op1 @ (Int_== | Int_!= | Int_< | Int_<= | Int_> | Int_>=), l1, r1),
BinaryOp(op2 @ (Int_== | Int_!= | Int_< | Int_<= | Int_> | Int_>=), l2, r2),
BooleanLiteral(false))
if ((l1.isInstanceOf[Literal] || l1.isInstanceOf[VarRef]) &&
(r1.isInstanceOf[Literal] || r1.isInstanceOf[VarRef]) &&
(l1 == l2 && r1 == r2)) =>
val canBeEqual =
((op1 == Int_==) || (op1 == Int_<=) || (op1 == Int_>=)) &&
((op2 == Int_==) || (op2 == Int_<=) || (op2 == Int_>=))
val canBeLessThan =
((op1 == Int_!=) || (op1 == Int_<) || (op1 == Int_<=)) &&
((op2 == Int_!=) || (op2 == Int_<) || (op2 == Int_<=))
val canBeGreaterThan =
((op1 == Int_!=) || (op1 == Int_>) || (op1 == Int_>=)) &&
((op2 == Int_!=) || (op2 == Int_>) || (op2 == Int_>=))
finishTransformExpr(
fold3WayIntComparison(canBeEqual, canBeLessThan,
canBeGreaterThan, l1.toPreTransform, r1.toPreTransform))
case _ => default
}
} else {
(thenp, elsep) match {
case (Skip(), Skip()) => keepOnlySideEffects(cond)
case (Skip(), _) => foldIf(negCond, elsep, thenp)(tpe)
case _ => default
}
}
}
}
private def pretransformUnaryOp(tree: UnaryOp)(cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
implicit val pos = tree.pos
val UnaryOp(op, arg) = tree
pretransformExpr(arg) { tlhs =>
expandLongOps(foldUnaryOp(op, tlhs))(cont)
}
}
private def pretransformBinaryOp(tree: BinaryOp)(cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
implicit val pos = tree.pos
val BinaryOp(op, lhs, rhs) = tree
pretransformExprs(lhs, rhs) { (tlhs, trhs) =>
expandLongOps(foldBinaryOp(op, tlhs, trhs))(cont)
}
}
private def expandLongValue(value: PreTransform)(cont: PreTransCont)(
implicit scope: Scope, pos: Position): TailRec[Tree] = {
assert(useRuntimeLong)
/* To force the expansion, we first store the `value` in a temporary
* variable of type `RuntimeLong` (not `Long`, otherwise we would go into
* infinite recursion), then we create a `new RuntimeLong` with its lo and
* hi part. Basically, we're doing:
*
* val t: RuntimeLong = value
* new RuntimeLong(t.lo__I(), t.hi__I())
*/
val tName = LocalName("t")
val rtLongClassType = ClassType(LongImpl.RuntimeLongClass)
val rtLongBinding = Binding.temp(tName, rtLongClassType, mutable = false,
value)
withBinding(rtLongBinding) { (scope1, cont1) =>
implicit val scope = scope1
val tRef = VarRef(LocalIdent(tName))(rtLongClassType)
val newTree = New(LongImpl.RuntimeLongClass,
MethodIdent(LongImpl.initFromParts),
List(Apply(ApplyFlags.empty, tRef, MethodIdent(LongImpl.lo), Nil)(IntType),
Apply(ApplyFlags.empty, tRef, MethodIdent(LongImpl.hi), Nil)(IntType)))
pretransformExpr(newTree)(cont1)
} (cont)
}
private def expandLongOps(pretrans: PreTransform)(cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
implicit val pos = pretrans.pos
def rtLongClassType = ClassType(LongImpl.RuntimeLongClass)
def expandLongModuleOp(methodName: MethodName,
arg: PreTransform): TailRec[Tree] = {
val receiver = LoadModule(LongImpl.RuntimeLongModuleClass).toPreTransform
pretransformApply(ApplyFlags.empty, receiver, MethodIdent(methodName),
arg :: Nil, rtLongClassType, isStat = false,
usePreTransform = true)(
cont)
}
def expandUnaryOp(methodName: MethodName, arg: PreTransform,
resultType: Type = rtLongClassType): TailRec[Tree] = {
pretransformApply(ApplyFlags.empty, arg, MethodIdent(methodName), Nil,
resultType, isStat = false, usePreTransform = true)(
cont)
}
def expandBinaryOp(methodName: MethodName, lhs: PreTransform,
rhs: PreTransform,
resultType: Type = rtLongClassType): TailRec[Tree] = {
pretransformApply(ApplyFlags.empty, lhs, MethodIdent(methodName), rhs :: Nil,
resultType, isStat = false, usePreTransform = true)(
cont)
}
pretrans match {
case PreTransUnaryOp(op, arg) if useRuntimeLong =>
import UnaryOp._
(op: @switch) match {
case IntToLong =>
expandLongModuleOp(LongImpl.fromInt, arg)
case LongToInt =>
expandUnaryOp(LongImpl.toInt, arg, IntType)
case LongToDouble =>
expandUnaryOp(LongImpl.toDouble, arg, DoubleType)
case DoubleToLong =>
expandLongModuleOp(LongImpl.fromDouble, arg)
case LongToFloat =>
expandUnaryOp(LongImpl.toFloat, arg, FloatType)
case _ =>
cont(pretrans)
}
case PreTransBinaryOp(op, lhs, rhs) if useRuntimeLong =>
import BinaryOp._
(op: @switch) match {
case Long_+ => expandBinaryOp(LongImpl.+, lhs, rhs)
case Long_- =>
lhs match {
case PreTransLit(LongLiteral(0L)) =>
expandUnaryOp(LongImpl.UNARY_-, rhs)
case _ =>
expandBinaryOp(LongImpl.-, lhs, rhs)
}
case Long_* => expandBinaryOp(LongImpl.*, lhs, rhs)
case Long_/ => expandBinaryOp(LongImpl./, lhs, rhs)
case Long_% => expandBinaryOp(LongImpl.%, lhs, rhs)
case Long_& => expandBinaryOp(LongImpl.&, lhs, rhs)
case Long_| => expandBinaryOp(LongImpl.|, lhs, rhs)
case Long_^ => expandBinaryOp(LongImpl.^, lhs, rhs)
case Long_<< => expandBinaryOp(LongImpl.<<, lhs, rhs)
case Long_>>> => expandBinaryOp(LongImpl.>>>, lhs, rhs)
case Long_>> => expandBinaryOp(LongImpl.>>, lhs, rhs)
case Long_== => expandBinaryOp(LongImpl.===, lhs, rhs)
case Long_!= => expandBinaryOp(LongImpl.!==, lhs, rhs)
case Long_< => expandBinaryOp(LongImpl.<, lhs, rhs)
case Long_<= => expandBinaryOp(LongImpl.<=, lhs, rhs)
case Long_> => expandBinaryOp(LongImpl.>, lhs, rhs)
case Long_>= => expandBinaryOp(LongImpl.>=, lhs, rhs)
case _ =>
cont(pretrans)
}
case _ =>
cont(pretrans)
}
}
private def foldUnaryOp(op: UnaryOp.Code, arg: PreTransform)(
implicit pos: Position): PreTransform = {
import UnaryOp._
@inline def default = PreTransUnaryOp(op, arg)
(op: @switch) match {
case Boolean_! =>
arg match {
case PreTransLit(BooleanLiteral(v)) =>
PreTransLit(BooleanLiteral(!v))
case PreTransUnaryOp(Boolean_!, x) => x
case PreTransBinaryOp(innerOp, l, r) =>
val newOp = (innerOp: @switch) match {
case BinaryOp.=== => BinaryOp.!==
case BinaryOp.!== => BinaryOp.===
case BinaryOp.Int_== => BinaryOp.Int_!=
case BinaryOp.Int_!= => BinaryOp.Int_==
case BinaryOp.Int_< => BinaryOp.Int_>=
case BinaryOp.Int_<= => BinaryOp.Int_>
case BinaryOp.Int_> => BinaryOp.Int_<=
case BinaryOp.Int_>= => BinaryOp.Int_<
case BinaryOp.Long_== => BinaryOp.Long_!=
case BinaryOp.Long_!= => BinaryOp.Long_==
case BinaryOp.Long_< => BinaryOp.Long_>=
case BinaryOp.Long_<= => BinaryOp.Long_>
case BinaryOp.Long_> => BinaryOp.Long_<=
case BinaryOp.Long_>= => BinaryOp.Long_<
case BinaryOp.Double_== => BinaryOp.Double_!=
case BinaryOp.Double_!= => BinaryOp.Double_==
case BinaryOp.Boolean_== => BinaryOp.Boolean_!=
case BinaryOp.Boolean_!= => BinaryOp.Boolean_==
case _ => -1
}
if (newOp == -1) default
else PreTransBinaryOp(newOp, l, r)
case PreTransJSBinaryOp(innerOp, l, r) =>
val newOp = innerOp match {
case JSBinaryOp.=== => JSBinaryOp.!==
case JSBinaryOp.!== => JSBinaryOp.===
case _ => -1
}
if (newOp == -1) default
else PreTransJSBinaryOp(newOp, l, r)
case _ =>
default
}
// Widening conversions
case CharToInt =>
arg match {
case PreTransLit(CharLiteral(v)) =>
PreTransLit(IntLiteral(v.toInt))
case _ =>
default
}
case ByteToInt =>
arg match {
case PreTransLit(ByteLiteral(v)) =>
PreTransLit(IntLiteral(v.toInt))
case _ =>
default
}
case ShortToInt =>
arg match {
case PreTransLit(ShortLiteral(v)) =>
PreTransLit(IntLiteral(v.toInt))
case _ =>
default
}
case IntToLong =>
arg match {
case PreTransLit(IntLiteral(v)) =>
PreTransLit(LongLiteral(v.toLong))
case _ =>
default
}
case IntToDouble =>
arg match {
case PreTransLit(IntLiteral(v)) =>
PreTransLit(DoubleLiteral(v.toDouble))
case _ =>
default
}
case FloatToDouble =>
arg match {
case PreTransLit(FloatLiteral(v)) =>
PreTransLit(DoubleLiteral(v.toDouble))
case _ =>
default
}
// Narrowing conversions
case IntToChar =>
arg match {
case PreTransLit(IntLiteral(v)) =>
PreTransLit(CharLiteral(v.toChar))
case PreTransUnaryOp(CharToInt, x) =>
x
case _ =>
default
}
case IntToByte =>
arg match {
case PreTransLit(IntLiteral(v)) =>
PreTransLit(ByteLiteral(v.toByte))
case PreTransUnaryOp(ByteToInt, x) =>
x
case _ =>
default
}
case IntToShort =>
arg match {
case PreTransLit(IntLiteral(v)) =>
PreTransLit(ShortLiteral(v.toShort))
case PreTransUnaryOp(ShortToInt, x) =>
x
case _ =>
default
}
case LongToInt =>
arg match {
case PreTransLit(LongLiteral(v)) =>
PreTransLit(IntLiteral(v.toInt))
case PreTransUnaryOp(IntToLong, x) =>
x
case PreTransBinaryOp(BinaryOp.Long_+, x, y) =>
foldBinaryOp(BinaryOp.Int_+,
foldUnaryOp(LongToInt, x),
foldUnaryOp(LongToInt, y))
case PreTransBinaryOp(BinaryOp.Long_-, x, y) =>
foldBinaryOp(BinaryOp.Int_-,
foldUnaryOp(LongToInt, x),
foldUnaryOp(LongToInt, y))
case _ =>
default
}
case DoubleToInt =>
arg match {
case PreTransLit(DoubleLiteral(v)) =>
PreTransLit(IntLiteral(v.toInt))
case PreTransUnaryOp(IntToDouble, x) =>
x
case _ =>
default
}
case DoubleToFloat =>
arg match {
case PreTransLit(DoubleLiteral(v)) =>
PreTransLit(FloatLiteral(v.toFloat))
case PreTransUnaryOp(FloatToDouble, x) =>
x
case _ =>
default
}
// Long <-> Double
case LongToDouble =>
arg match {
case PreTransLit(LongLiteral(v)) =>
PreTransLit(DoubleLiteral(v.toDouble))
case PreTransUnaryOp(IntToLong, x) =>
foldUnaryOp(IntToDouble, x)
case _ =>
default
}
case DoubleToLong =>
arg match {
case PreTransLit(DoubleLiteral(v)) =>
PreTransLit(LongLiteral(v.toLong))
case PreTransUnaryOp(IntToDouble, x) =>
foldUnaryOp(IntToLong, x)
case _ =>
default
}
// Long -> Float
case LongToFloat =>
arg match {
case PreTransLit(LongLiteral(v)) =>
PreTransLit(FloatLiteral(v.toFloat))
case PreTransUnaryOp(IntToLong, x) =>
foldUnaryOp(DoubleToFloat, foldUnaryOp(IntToDouble, x))
case _ =>
default
}
case _ =>
default
}
}
/** Performs === for two literals.
* The result is always known statically.
*
* Bytes, Shorts, Ints, Floats and Doubles all live in the same "space" for
* `===` comparison, since they all upcast as primitive numbers. If
* `isJSStrictEq` is false, they are compared with `equals()` instead of
* `==` so that `NaN === NaN` and `+0.0 !== -0.0`.
*
* Chars and Longs, however, never compare as `===`, since they are boxed
* chars and instances of `RuntimeLong`, respectively---unless we are using
* `BigInt`s for `Long`s, in which case those can be `===`.
*/
private def literal_===(lhs: Literal, rhs: Literal,
isJSStrictEq: Boolean): Boolean = {
object AnyNumLiteral {
def unapply(tree: Literal): Option[Double] = tree match {
case ByteLiteral(v) => Some(v.toDouble)
case ShortLiteral(v) => Some(v.toDouble)
case IntLiteral(v) => Some(v.toDouble)
case FloatLiteral(v) => Some(v.toDouble)
case DoubleLiteral(v) => Some(v)
case _ => None
}
}
(lhs, rhs) match {
case (BooleanLiteral(l), BooleanLiteral(r)) => l == r
case (StringLiteral(l), StringLiteral(r)) => l == r
case (ClassOf(l), ClassOf(r)) => l == r
case (AnyNumLiteral(l), AnyNumLiteral(r)) => if (isJSStrictEq) l == r else l.equals(r)
case (LongLiteral(l), LongLiteral(r)) => l == r && !useRuntimeLong
case (Undefined(), Undefined()) => true
case (Null(), Null()) => true
case _ => false
}
}
/** Performs `===` for two matchable literals.
*
* This corresponds to the test used by a `Match` at run-time, to decide
* which case is selected.
*
* The result is always known statically.
*/
private def matchableLiteral_===(lhs: MatchableLiteral,
rhs: MatchableLiteral): Boolean = {
(lhs, rhs) match {
case (IntLiteral(l), IntLiteral(r)) => l == r
case (StringLiteral(l), StringLiteral(r)) => l == r
case (Null(), Null()) => true
case _ => false
}
}
private def constantFoldBinaryOp_except_String_+(op: BinaryOp.Code,
lhs: Literal, rhs: Literal)(implicit pos: Position): Literal = {
import BinaryOp._
@inline def int(lit: Literal): Int = (lit: @unchecked) match {
case IntLiteral(value) => value
}
@inline def long(lit: Literal): Long = (lit: @unchecked) match {
case LongLiteral(value) => value
}
@inline def float(lit: Literal): Float = (lit: @unchecked) match {
case FloatLiteral(value) => value
}
@inline def double(lit: Literal): Double = (lit: @unchecked) match {
case DoubleLiteral(value) => value
}
@inline def boolean(lit: Literal): Boolean = (lit: @unchecked) match {
case BooleanLiteral(value) => value
}
(op: @switch) match {
case === => BooleanLiteral(literal_===(lhs, rhs, isJSStrictEq = false))
case !== => BooleanLiteral(!literal_===(lhs, rhs, isJSStrictEq = false))
case String_+ =>
throw new IllegalArgumentException(
"constFoldBinaryOp_except_String_+ must not be called for String_+")
case Int_+ => IntLiteral(int(lhs) + int(rhs))
case Int_- => IntLiteral(int(lhs) - int(rhs))
case Int_* => IntLiteral(int(lhs) * int(rhs))
case Int_/ => IntLiteral(int(lhs) / int(rhs))
case Int_% => IntLiteral(int(lhs) % int(rhs))
case Int_| => IntLiteral(int(lhs) | int(rhs))
case Int_& => IntLiteral(int(lhs) & int(rhs))
case Int_^ => IntLiteral(int(lhs) ^ int(rhs))
case Int_<< => IntLiteral(int(lhs) << int(rhs))
case Int_>>> => IntLiteral(int(lhs) >>> int(rhs))
case Int_>> => IntLiteral(int(lhs) >> int(rhs))
case Int_== => BooleanLiteral(int(lhs) == int(rhs))
case Int_!= => BooleanLiteral(int(lhs) != int(rhs))
case Int_< => BooleanLiteral(int(lhs) < int(rhs))
case Int_<= => BooleanLiteral(int(lhs) <= int(rhs))
case Int_> => BooleanLiteral(int(lhs) > int(rhs))
case Int_>= => BooleanLiteral(int(lhs) >= int(rhs))
case Long_+ => LongLiteral(long(lhs) + long(rhs))
case Long_- => LongLiteral(long(lhs) - long(rhs))
case Long_* => LongLiteral(long(lhs) * long(rhs))
case Long_/ => LongLiteral(long(lhs) / long(rhs))
case Long_% => LongLiteral(long(lhs) % long(rhs))
case Long_| => LongLiteral(long(lhs) | long(rhs))
case Long_& => LongLiteral(long(lhs) & long(rhs))
case Long_^ => LongLiteral(long(lhs) ^ long(rhs))
case Long_<< => LongLiteral(long(lhs) << int(rhs))
case Long_>>> => LongLiteral(long(lhs) >>> int(rhs))
case Long_>> => LongLiteral(long(lhs) >> int(rhs))
case Long_== => BooleanLiteral(long(lhs) == long(rhs))
case Long_!= => BooleanLiteral(long(lhs) != long(rhs))
case Long_< => BooleanLiteral(long(lhs) < long(rhs))
case Long_<= => BooleanLiteral(long(lhs) <= long(rhs))
case Long_> => BooleanLiteral(long(lhs) > long(rhs))
case Long_>= => BooleanLiteral(long(lhs) >= long(rhs))
case Float_+ => FloatLiteral(float(lhs) + float(rhs))
case Float_- => FloatLiteral(float(lhs) - float(rhs))
case Float_* => FloatLiteral(float(lhs) * float(rhs))
case Float_/ => FloatLiteral(float(lhs) / float(rhs))
case Float_% => FloatLiteral(float(lhs) % float(rhs))
case Double_+ => DoubleLiteral(double(lhs) + double(rhs))
case Double_- => DoubleLiteral(double(lhs) - double(rhs))
case Double_* => DoubleLiteral(double(lhs) * double(rhs))
case Double_/ => DoubleLiteral(double(lhs) / double(rhs))
case Double_% => DoubleLiteral(double(lhs) % double(rhs))
case Double_== => BooleanLiteral(double(lhs) == double(rhs))
case Double_!= => BooleanLiteral(double(lhs) != double(rhs))
case Double_< => BooleanLiteral(double(lhs) < double(rhs))
case Double_<= => BooleanLiteral(double(lhs) <= double(rhs))
case Double_> => BooleanLiteral(double(lhs) > double(rhs))
case Double_>= => BooleanLiteral(double(lhs) >= double(rhs))
case Boolean_== => BooleanLiteral(boolean(lhs) == boolean(rhs))
case Boolean_!= => BooleanLiteral(boolean(lhs) != boolean(rhs))
case Boolean_| => BooleanLiteral(boolean(lhs) | boolean(rhs))
case Boolean_& => BooleanLiteral(boolean(lhs) & boolean(rhs))
}
}
/** Translate literals to their Scala.js String representation. */
private def foldToStringForString_+(preTrans: PreTransform)(
implicit pos: Position): PreTransform = preTrans match {
case PreTransLit(literal) =>
def constant(s: String): PreTransform =
PreTransLit(StringLiteral(s))
def forFloatingPoint(value: Double): PreTransform =
jsNumberToString(value).fold(preTrans)(s => constant(s))
literal match {
case CharLiteral(value) => constant(value.toString)
case ByteLiteral(value) => constant(value.toString)
case ShortLiteral(value) => constant(value.toString)
case IntLiteral(value) => constant(value.toString)
case LongLiteral(value) => constant(value.toString)
case FloatLiteral(value) => forFloatingPoint(value)
case DoubleLiteral(value) => forFloatingPoint(value)
case BooleanLiteral(value) => constant(value.toString)
case Null() => constant("null")
case Undefined() => constant("undefined")
case _ => preTrans
}
case _ =>
preTrans
}
/* Following the ECMAScript 6 specification */
private def jsNumberToString(value: Double): Option[String] = {
if (1.0.toString == "1") {
// We are in a JS environment, so the host .toString() is the correct one.
Some(value.toString)
} else {
value match {
case _ if value.isNaN => Some("NaN")
case 0 => Some("0")
case _ if value < 0 => jsNumberToString(-value).map("-" + _)
case _ if value.isInfinity => Some("Infinity")
case _ if value.isValidInt => Some(value.toInt.toString)
case _ => None
}
}
}
private def foldBinaryOp(op: BinaryOp.Code, lhs: PreTransform,
rhs: PreTransform)(
implicit pos: Position): PreTransform = {
import BinaryOp._
def constant(lhsLit: Literal, rhsLit: Literal): PreTransform =
PreTransLit(constantFoldBinaryOp_except_String_+(op, lhsLit, rhsLit))
def nonConstant(): PreTransform = foldBinaryOpNonConstant(op, lhs, rhs)
(lhs, rhs) match {
case (PreTransLit(lhsLit), PreTransLit(rhsLit)) =>
op match {
case String_+ =>
nonConstant()
case Int_/ | Int_% =>
rhsLit match {
case IntLiteral(0) => nonConstant()
case _ => constant(lhsLit, rhsLit)
}
case Long_/ | Long_% =>
rhsLit match {
case LongLiteral(0) => nonConstant()
case _ => constant(lhsLit, rhsLit)
}
case _ =>
constant(lhsLit, rhsLit)
}
case _ =>
nonConstant()
}
}
private val MaybeHijackedPrimNumberClasses = {
/* In theory, we could figure out the ancestors from the global knowledge,
* but that would be overkill.
*/
Set(BoxedByteClass, BoxedShortClass, BoxedIntegerClass, BoxedFloatClass,
BoxedDoubleClass, ObjectClass, ClassName("java.lang.CharSequence"),
ClassName("java.io.Serializable"), ClassName("java.lang.Comparable"),
ClassName("java.lang.Number"))
}
private def foldBinaryOpNonConstant(op: BinaryOp.Code, lhs: PreTransform,
rhs: PreTransform)(
implicit pos: Position): PreTransform = {
import BinaryOp._
@inline def default =
PreTransBinaryOp(op, lhs, rhs)
(op: @switch) match {
case === | !== =>
// Try to optimize as a primitive JS strict equality
def canBePrimitiveNum(tpe: RefinedType): Boolean = tpe.base match {
case AnyType | ByteType | ShortType | IntType | FloatType | DoubleType =>
true
case ClassType(className) =>
/* If `className` is a concrete superclass of a boxed number class,
* then it can be exact, and in that case we know that it cannot be
* a primitive number. In practice this happens only for
* `java.lang.Object`, and especially for code generated for
* non-local returns in Scala.
*/
!tpe.isExact && MaybeHijackedPrimNumberClasses.contains(className)
case _ =>
false
}
def isWhole(tpe: RefinedType): Boolean = tpe.base match {
case ByteType | ShortType | IntType =>
true
case ClassType(className) =>
className == BoxedByteClass ||
className == BoxedShortClass ||
className == BoxedIntegerClass
case _ =>
false
}
val lhsTpe = lhs.tpe
val rhsTpe = rhs.tpe
val canOptimizeAsJSStrictEq = (
!canBePrimitiveNum(lhsTpe) ||
!canBePrimitiveNum(rhsTpe) ||
(isWhole(lhsTpe) && isWhole(rhsTpe))
)
if (canOptimizeAsJSStrictEq) {
foldJSBinaryOp(
if (op == ===) JSBinaryOp.=== else JSBinaryOp.!==,
lhs, rhs)
} else {
default
}
case String_+ =>
// Here things can be constant!
val lhs1 = foldToStringForString_+(lhs)
val rhs1 = foldToStringForString_+(rhs)
@inline def stringDefault = PreTransBinaryOp(String_+, lhs1, rhs1)
(lhs1, rhs1) match {
case (PreTransLit(StringLiteral(s1)), PreTransLit(StringLiteral(s2))) =>
PreTransLit(StringLiteral(s1 + s2))
case (_, PreTransLit(StringLiteral(""))) =>
foldBinaryOp(op, rhs1, lhs1)
case (PreTransLit(StringLiteral("")), _) if rhs1.tpe.base == StringType =>
rhs1
case (_, PreTransBinaryOp(String_+, rl, rr)) =>
foldBinaryOp(String_+, PreTransBinaryOp(String_+, lhs1, rl), rr)
case (PreTransBinaryOp(String_+, ll, PreTransLit(StringLiteral(lr))),
PreTransLit(StringLiteral(r))) =>
PreTransBinaryOp(String_+, ll, PreTransLit(StringLiteral(lr + r)))
case (PreTransBinaryOp(String_+, PreTransLit(StringLiteral("")), lr), _) =>
PreTransBinaryOp(String_+, lr, rhs1)
case _ =>
stringDefault
}
case Int_+ =>
(lhs, rhs) match {
case (_, PreTransLit(IntLiteral(_))) =>
foldBinaryOp(Int_+, rhs, lhs)
case (PreTransLit(IntLiteral(0)), _) =>
rhs
case (PreTransLit(IntLiteral(x)),
PreTransBinaryOp(innerOp @ (Int_+ | Int_-),
PreTransLit(IntLiteral(y)), z)) =>
foldBinaryOp(innerOp, PreTransLit(IntLiteral(x + y)), z)
case _ => default
}
case Int_- =>
(lhs, rhs) match {
case (_, PreTransLit(IntLiteral(r))) =>
foldBinaryOp(Int_+, lhs, PreTransLit(IntLiteral(-r)))
case (PreTransLit(IntLiteral(x)),
PreTransBinaryOp(Int_+, PreTransLit(IntLiteral(y)), z)) =>
foldBinaryOp(Int_-,
PreTransLit(IntLiteral(x - y)), z)
case (PreTransLit(IntLiteral(x)),
PreTransBinaryOp(Int_-, PreTransLit(IntLiteral(y)), z)) =>
foldBinaryOp(Int_+, PreTransLit(IntLiteral(x - y)), z)
case (_, PreTransBinaryOp(Int_-, PreTransLit(IntLiteral(0)), x)) =>
foldBinaryOp(Int_+, lhs, x)
case _ => default
}
case Int_* =>
(lhs, rhs) match {
case (_, PreTransLit(IntLiteral(_))) =>
foldBinaryOp(Int_*, rhs, lhs)
case (PreTransLit(IntLiteral(x)), _) =>
x match {
case -1 => foldBinaryOp(Int_-, PreTransLit(IntLiteral(0)), rhs)
case 0 => PreTransBlock(finishTransformStat(rhs), lhs)
case 1 => rhs
// Exact power of 2
case _ if (x & (x - 1)) == 0 =>
/* Note that this would match 0, but 0 is handled above.
* It will also match Int.MinValue, but that is not a problem
* as the optimization also works (if you need convincing,
* simply interpret the multiplication as unsigned).
*/
foldBinaryOp(Int_<<, rhs,
PreTransLit(IntLiteral(Integer.numberOfTrailingZeros(x))))
case _ => default
}
case _ => default
}
case Int_/ =>
(lhs, rhs) match {
case (_, PreTransLit(IntLiteral(1))) =>
lhs
case (_, PreTransLit(IntLiteral(-1))) =>
foldBinaryOp(Int_-, PreTransLit(IntLiteral(0)), lhs)
case _ => default
}
case Int_% =>
(lhs, rhs) match {
case (_, PreTransLit(IntLiteral(1 | -1))) =>
Block(finishTransformStat(lhs), IntLiteral(0)).toPreTransform
case _ => default
}
case Int_| =>
(lhs, rhs) match {
case (_, PreTransLit(IntLiteral(_))) => foldBinaryOp(Int_|, rhs, lhs)
case (PreTransLit(IntLiteral(0)), _) => rhs
case (PreTransLit(IntLiteral(-1)), _) =>
PreTransBlock(finishTransformStat(rhs), lhs)
case (PreTransLit(IntLiteral(x)),
PreTransBinaryOp(Int_|, PreTransLit(IntLiteral(y)), z)) =>
foldBinaryOp(Int_|, PreTransLit(IntLiteral(x | y)), z)
case _ => default
}
case Int_& =>
(lhs, rhs) match {
case (_, PreTransLit(IntLiteral(_))) => foldBinaryOp(Int_&, rhs, lhs)
case (PreTransLit(IntLiteral(-1)), _) => rhs
case (PreTransLit(IntLiteral(0)), _) =>
PreTransBlock(finishTransformStat(rhs), lhs)
case (PreTransLit(IntLiteral(x)),
PreTransBinaryOp(Int_&, PreTransLit(IntLiteral(y)), z)) =>
foldBinaryOp(Int_&, PreTransLit(IntLiteral(x & y)), z)
case _ => default
}
case Int_^ =>
(lhs, rhs) match {
case (_, PreTransLit(IntLiteral(_))) => foldBinaryOp(Int_^, rhs, lhs)
case (PreTransLit(IntLiteral(0)), _) => rhs
case (PreTransLit(IntLiteral(x)),
PreTransBinaryOp(Int_^, PreTransLit(IntLiteral(y)), z)) =>
foldBinaryOp(Int_^, PreTransLit(IntLiteral(x ^ y)), z)
case _ => default
}
case Int_<< =>
(lhs, rhs) match {
case (PreTransLit(IntLiteral(0)), _) =>
PreTransBlock(finishTransformStat(rhs), lhs)
case (PreTransBinaryOp(Int_<<, x, PreTransLit(IntLiteral(y))),
PreTransLit(IntLiteral(z))) =>
val dist = (y & 31) + (z & 31)
if (dist >= 32)
PreTransTree(Block(finishTransformStat(x), IntLiteral(0)))
else
PreTransBinaryOp(Int_<<, x, PreTransLit(IntLiteral(dist)))
case (_, PreTransLit(IntLiteral(y))) =>
val dist = y & 31
if (dist == 0)
lhs
else
PreTransBinaryOp(Int_<<, lhs, PreTransLit(IntLiteral(dist)))
case _ => default
}
case Int_>>> =>
(lhs, rhs) match {
case (PreTransLit(IntLiteral(0)), _) =>
PreTransBlock(finishTransformStat(rhs), lhs)
case (PreTransBinaryOp(Int_>>>, x, PreTransLit(IntLiteral(y))),
PreTransLit(IntLiteral(z))) =>
val dist = (y & 31) + (z & 31)
if (dist >= 32)
PreTransTree(Block(finishTransformStat(x), IntLiteral(0)))
else
PreTransBinaryOp(Int_>>>, x, PreTransLit(IntLiteral(dist)))
case (PreTransBinaryOp(op @ (Int_| | Int_& | Int_^),
PreTransLit(IntLiteral(x)), y),
z @ PreTransLit(IntLiteral(zValue))) =>
foldBinaryOp(
op,
PreTransLit(IntLiteral(x >>> zValue)),
foldBinaryOp(Int_>>>, y, z))
case (_, PreTransLit(IntLiteral(y))) =>
val dist = y & 31
if (dist == 0)
lhs
else
PreTransBinaryOp(Int_>>>, lhs, PreTransLit(IntLiteral(dist)))
case _ => default
}
case Int_>> =>
(lhs, rhs) match {
case (PreTransLit(IntLiteral(0 | -1)), _) =>
PreTransBlock(finishTransformStat(rhs), lhs)
case (PreTransBinaryOp(Int_>>, x, PreTransLit(IntLiteral(y))),
PreTransLit(IntLiteral(z))) =>
val dist = Math.min((y & 31) + (z & 31), 31)
PreTransBinaryOp(Int_>>, x, PreTransLit(IntLiteral(dist)))
case (PreTransBinaryOp(Int_>>>, x, PreTransLit(IntLiteral(y))),
PreTransLit(IntLiteral(_))) if (y & 31) != 0 =>
foldBinaryOp(Int_>>>, lhs, rhs)
case (_, PreTransLit(IntLiteral(y))) =>
val dist = y & 31
if (dist == 0)
lhs
else
PreTransBinaryOp(Int_>>, lhs, PreTransLit(IntLiteral(dist)))
case _ => default
}
case Long_+ =>
(lhs, rhs) match {
case (_, PreTransLit(LongLiteral(_))) => foldBinaryOp(Long_+, rhs, lhs)
case (PreTransLit(LongLiteral(0)), _) => rhs
case (PreTransLit(LongLiteral(x)),
PreTransBinaryOp(innerOp @ (Long_+ | Long_-),
PreTransLit(LongLiteral(y)), z)) =>
foldBinaryOp(innerOp, PreTransLit(LongLiteral(x + y)), z)
case _ => default
}
case Long_- =>
(lhs, rhs) match {
case (_, PreTransLit(LongLiteral(r))) =>
foldBinaryOp(Long_+, PreTransLit(LongLiteral(-r)), lhs)
case (PreTransLit(LongLiteral(x)),
PreTransBinaryOp(Long_+, PreTransLit(LongLiteral(y)), z)) =>
foldBinaryOp(Long_-, PreTransLit(LongLiteral(x - y)), z)
case (PreTransLit(LongLiteral(x)),
PreTransBinaryOp(Long_-, PreTransLit(LongLiteral(y)), z)) =>
foldBinaryOp(Long_+, PreTransLit(LongLiteral(x - y)), z)
case (_, PreTransBinaryOp(BinaryOp.Long_-,
PreTransLit(LongLiteral(0L)), x)) =>
foldBinaryOp(Long_+, lhs, x)
case _ => default
}
case Long_* =>
(lhs, rhs) match {
case (_, PreTransLit(LongLiteral(_))) =>
foldBinaryOp(Long_*, rhs, lhs)
case (PreTransLit(LongLiteral(x)), _) =>
x match {
case -1L => foldBinaryOp(Long_-, PreTransLit(LongLiteral(0)), rhs)
case 0L => PreTransBlock(finishTransformStat(rhs), lhs)
case 1L => rhs
// Exact power of 2
case _ if (x & (x - 1L)) == 0L =>
/* Note that this would match 0L, but 0L is handled above.
* It will also match Long.MinValue, but that is not a problem
* as the optimization also works (if you need convincing,
* simply interpret the multiplication as unsigned).
*/
foldBinaryOp(Long_<<, rhs, PreTransLit(
IntLiteral(java.lang.Long.numberOfTrailingZeros(x))))
case _ => default
}
case _ => default
}
case Long_/ =>
(lhs, rhs) match {
case (_, PreTransLit(LongLiteral(1))) =>
lhs
case (_, PreTransLit(LongLiteral(-1))) =>
foldBinaryOp(Long_-, PreTransLit(LongLiteral(0)), lhs)
case (LongFromInt(x), LongFromInt(PreTransLit(y: IntLiteral)))
if y.value != -1 =>
LongFromInt(foldBinaryOp(Int_/, x, PreTransLit(y)))
case _ => default
}
case Long_% =>
(lhs, rhs) match {
case (_, PreTransLit(LongLiteral(1L | -1L))) =>
Block(finishTransformStat(lhs), LongLiteral(0L)).toPreTransform
case (LongFromInt(x), LongFromInt(y)) =>
LongFromInt(foldBinaryOp(Int_%, x, y))
case _ => default
}
case Long_| =>
(lhs, rhs) match {
case (_, PreTransLit(LongLiteral(_))) =>
foldBinaryOp(Long_|, rhs, lhs)
case (PreTransLit(LongLiteral(0)), _) =>
rhs
case (PreTransLit(LongLiteral(-1)), _) =>
PreTransBlock(finishTransformStat(rhs), lhs)
case (PreTransLit(LongLiteral(x)),
PreTransBinaryOp(Long_|, PreTransLit(LongLiteral(y)), z)) =>
foldBinaryOp(Long_|, PreTransLit(LongLiteral(x | y)), z)
case _ => default
}
case Long_& =>
(lhs, rhs) match {
case (_, PreTransLit(LongLiteral(_))) =>
foldBinaryOp(Long_&, rhs, lhs)
case (PreTransLit(LongLiteral(-1)), _) =>
rhs
case (PreTransLit(LongLiteral(0)), _) =>
PreTransBlock(finishTransformStat(rhs), lhs)
case (PreTransLit(LongLiteral(x)),
PreTransBinaryOp(Long_&, PreTransLit(LongLiteral(y)), z)) =>
foldBinaryOp(Long_&, PreTransLit(LongLiteral(x & y)), z)
case _ => default
}
case Long_^ =>
(lhs, rhs) match {
case (_, PreTransLit(LongLiteral(_))) =>
foldBinaryOp(Long_^, rhs, lhs)
case (PreTransLit(LongLiteral(0)), _) =>
rhs
case (PreTransLit(LongLiteral(x)),
PreTransBinaryOp(Long_^, PreTransLit(LongLiteral(y)), z)) =>
foldBinaryOp(Long_^, PreTransLit(LongLiteral(x ^ y)), z)
case _ => default
}
case Long_<< =>
(lhs, rhs) match {
case (_, PreTransLit(IntLiteral(x))) if x % 64 == 0 => lhs
case _ => default
}
case Long_>>> =>
(lhs, rhs) match {
case (_, PreTransLit(IntLiteral(x))) if x % 64 == 0 => lhs
case _ => default
}
case Long_>> =>
(lhs, rhs) match {
case (_, PreTransLit(IntLiteral(x))) if x % 64 == 0 => lhs
case _ => default
}
case Long_== | Long_!= =>
val positive = (op == Long_==)
(lhs, rhs) match {
case (LongFromInt(x), LongFromInt(y)) =>
foldBinaryOp(if (positive) Int_== else Int_!=, x, y)
case (LongFromInt(x), PreTransLit(LongLiteral(y))) =>
assert(y > Int.MaxValue || y < Int.MinValue)
Block(finishTransformStat(x),
BooleanLiteral(!positive)).toPreTransform
case (PreTransBinaryOp(Long_+, PreTransLit(LongLiteral(x)), y),
PreTransLit(LongLiteral(z))) =>
foldBinaryOp(op, y, PreTransLit(LongLiteral(z - x)))
case (PreTransBinaryOp(Long_-, PreTransLit(LongLiteral(x)), y),
PreTransLit(LongLiteral(z))) =>
foldBinaryOp(op, y, PreTransLit(LongLiteral(x - z)))
case (PreTransBinaryOp(Long_^, PreTransLit(LongLiteral(x)), y),
PreTransLit(LongLiteral(z))) =>
foldBinaryOp(op, y, PreTransLit(LongLiteral(x ^ z)))
case (PreTransLit(LongLiteral(_)), _) => foldBinaryOp(op, rhs, lhs)
case _ => default
}
case Long_< | Long_<= | Long_> | Long_>= =>
def flippedOp = (op: @switch) match {
case Long_< => Long_>
case Long_<= => Long_>=
case Long_> => Long_<
case Long_>= => Long_<=
}
def intOp = (op: @switch) match {
case Long_< => Int_<
case Long_<= => Int_<=
case Long_> => Int_>
case Long_>= => Int_>=
}
(lhs, rhs) match {
case (_, PreTransLit(LongLiteral(Long.MinValue))) =>
if (op == Long_< || op == Long_>=) {
Block(finishTransformStat(lhs),
BooleanLiteral(op == Long_>=)).toPreTransform
} else {
foldBinaryOp(if (op == Long_<=) Long_== else Long_!=, lhs, rhs)
}
case (_, PreTransLit(LongLiteral(Long.MaxValue))) =>
if (op == Long_> || op == Long_<=) {
Block(finishTransformStat(lhs),
BooleanLiteral(op == Long_<=)).toPreTransform
} else {
foldBinaryOp(if (op == Long_>=) Long_== else Long_!=, lhs, rhs)
}
case (LongFromInt(x), LongFromInt(y)) =>
foldBinaryOp(intOp, x, y)
case (LongFromInt(x), PreTransLit(LongLiteral(y))) =>
assert(y > Int.MaxValue || y < Int.MinValue)
val result =
if (y > Int.MaxValue) op == Long_< || op == Long_<=
else op == Long_> || op == Long_>=
Block(finishTransformStat(x), BooleanLiteral(result)).toPreTransform
/* x + y.toLong > z
* -x on both sides
* requires x + y.toLong not to overflow, and z - x likewise
* y.toLong > z - x
*/
case (PreTransBinaryOp(Long_+, PreTransLit(LongLiteral(x)), y @ LongFromInt(_)),
PreTransLit(LongLiteral(z)))
if canAddLongs(x, Int.MinValue) &&
canAddLongs(x, Int.MaxValue) &&
canSubtractLongs(z, x) =>
foldBinaryOp(op, y, PreTransLit(LongLiteral(z-x)))
/* x - y.toLong > z
* -x on both sides
* requires x - y.toLong not to overflow, and z - x likewise
* -(y.toLong) > z - x
*/
case (PreTransBinaryOp(Long_-, PreTransLit(LongLiteral(x)), y @ LongFromInt(_)),
PreTransLit(LongLiteral(z)))
if canSubtractLongs(x, Int.MinValue) &&
canSubtractLongs(x, Int.MaxValue) &&
canSubtractLongs(z, x) =>
if (z-x != Long.MinValue) {
// Since -(y.toLong) does not overflow, we can negate both sides
foldBinaryOp(flippedOp, y, PreTransLit(LongLiteral(-(z-x))))
} else {
/* -(y.toLong) > Long.MinValue
* Depending on the operator, this is either always true or
* always false.
*/
val result = (op == Long_>) || (op == Long_>=)
Block(finishTransformStat(y),
BooleanLiteral(result)).toPreTransform
}
/* x.toLong + y.toLong > Int.MaxValue.toLong
*
* This is basically testing whether x+y overflows in positive.
* If x <= 0 or y <= 0, this cannot happen -> false.
* If x > 0 and y > 0, this can be detected with x+y < 0.
* Therefore, we rewrite as:
*
* x > 0 && y > 0 && x+y < 0.
*
* This requires to evaluate x and y once.
*/
case (PreTransBinaryOp(Long_+, LongFromInt(x), LongFromInt(y)),
PreTransLit(LongLiteral(Int.MaxValue))) =>
trampoline {
/* HACK: We use an empty scope here for `withNewLocalDefs`.
* It's OKish to do that because we're only defining Ints, and
* we know withNewLocalDefs is not going to try and inline things
* when defining ints, so we cannot go into infinite inlining.
*/
val emptyScope = Scope.Empty
withNewLocalDefs(List(
Binding.temp(LocalName("x"), IntType, false, x),
Binding.temp(LocalName("y"), IntType, false, y))) {
(tempsLocalDefs, cont) =>
val List(tempXDef, tempYDef) = tempsLocalDefs
val tempX = tempXDef.newReplacement
val tempY = tempYDef.newReplacement
cont(AndThen(AndThen(
BinaryOp(Int_>, tempX, IntLiteral(0)),
BinaryOp(Int_>, tempY, IntLiteral(0))),
BinaryOp(Int_<, BinaryOp(Int_+, tempX, tempY), IntLiteral(0))
).toPreTransform)
} (finishTransform(isStat = false))(emptyScope)
}.toPreTransform
case (PreTransLit(LongLiteral(_)), _) =>
foldBinaryOp(flippedOp, rhs, lhs)
case _ => default
}
case Float_* =>
(lhs, rhs) match {
case (_, PreTransLit(FloatLiteral(_))) =>
foldBinaryOp(Float_*, rhs, lhs)
case (PreTransLit(FloatLiteral(1)), _) =>
rhs
case (PreTransLit(FloatLiteral(-1)),
PreTransBinaryOp(Float_*, PreTransLit(FloatLiteral(-1)), z)) =>
z
case _ => default
}
case Float_/ =>
(lhs, rhs) match {
case (_, PreTransLit(FloatLiteral(1))) =>
lhs
case (_, PreTransLit(FloatLiteral(-1))) =>
foldBinaryOp(Float_*, PreTransLit(FloatLiteral(-1)), lhs)
case _ => default
}
case Float_% =>
(lhs, rhs) match {
case _ => default
}
case Double_* =>
(lhs, rhs) match {
case (_, PreTransLit(DoubleLiteral(_))) =>
foldBinaryOp(Double_*, rhs, lhs)
case (PreTransLit(DoubleLiteral(1)), _) =>
rhs
case (PreTransLit(DoubleLiteral(-1)),
PreTransBinaryOp(Double_*, PreTransLit(DoubleLiteral(-1)), z)) =>
z
case _ => default
}
case Double_/ =>
(lhs, rhs) match {
case (_, PreTransLit(DoubleLiteral(1))) =>
lhs
case (_, PreTransLit(DoubleLiteral(-1))) =>
foldBinaryOp(Double_*, PreTransLit(DoubleLiteral(-1)), lhs)
case _ => default
}
case Double_% =>
(lhs, rhs) match {
case _ => default
}
case Boolean_== | Boolean_!= =>
val positive = (op == Boolean_==)
(lhs, rhs) match {
case (PreTransLit(_), _) =>
foldBinaryOp(op, rhs, lhs)
case (PreTransLit(BooleanLiteral(l)), _) =>
if (l == positive) rhs
else foldUnaryOp(UnaryOp.Boolean_!, rhs)
case _ =>
default
}
case Boolean_| =>
(lhs, rhs) match {
case (_, PreTransLit(BooleanLiteral(false))) => lhs
case (PreTransLit(BooleanLiteral(false)), _) => rhs
case _ => default
}
case Boolean_& =>
(lhs, rhs) match {
case (_, PreTransLit(BooleanLiteral(true))) => lhs
case (PreTransLit(BooleanLiteral(true)), _) => rhs
case _ => default
}
case Int_== | Int_!= =>
(lhs, rhs) match {
case (PreTransBinaryOp(Int_+, PreTransLit(IntLiteral(x)), y),
PreTransLit(IntLiteral(z))) =>
foldBinaryOp(op, y, PreTransLit(IntLiteral(z - x)))
case (PreTransBinaryOp(Int_-, PreTransLit(IntLiteral(x)), y),
PreTransLit(IntLiteral(z))) =>
foldBinaryOp(op, y, PreTransLit(IntLiteral(x - z)))
case (PreTransBinaryOp(Int_^, PreTransLit(IntLiteral(x)), y),
PreTransLit(IntLiteral(z))) =>
foldBinaryOp(op, y, PreTransLit(IntLiteral(x ^ z)))
case (PreTransLit(_), _) => foldBinaryOp(op, rhs, lhs)
case _ => default
}
case Int_< | Int_<= | Int_> | Int_>= =>
def flippedOp = (op: @switch) match {
case Int_< => Int_>
case Int_<= => Int_>=
case Int_> => Int_<
case Int_>= => Int_<=
}
(lhs, rhs) match {
case (_, PreTransLit(IntLiteral(y))) =>
y match {
case Int.MinValue =>
if (op == Int_< || op == Int_>=) {
Block(finishTransformStat(lhs),
BooleanLiteral(op == Int_>=)).toPreTransform
} else {
foldBinaryOp(if (op == Int_<=) Int_== else Int_!=, lhs, rhs)
}
case Int.MaxValue =>
if (op == Int_> || op == Int_<=) {
Block(finishTransformStat(lhs),
BooleanLiteral(op == Int_<=)).toPreTransform
} else {
foldBinaryOp(if (op == Int_>=) Int_== else Int_!=, lhs, rhs)
}
case _ if y == Int.MinValue + 1 && (op == Int_< || op == Int_>=) =>
foldBinaryOp(if (op == Int_<) Int_== else Int_!=, lhs,
PreTransLit(IntLiteral(Int.MinValue)))
case _ if y == Int.MaxValue - 1 && (op == Int_> || op == Int_<=) =>
foldBinaryOp(if (op == Int_>) Int_== else Int_!=, lhs,
PreTransLit(IntLiteral(Int.MaxValue)))
case _ => default
}
case (PreTransLit(IntLiteral(_)), _) =>
foldBinaryOp(flippedOp, rhs, lhs)
case _ => default
}
case _ =>
default
}
}
private def fold3WayIntComparison(canBeEqual: Boolean, canBeLessThan: Boolean,
canBeGreaterThan: Boolean, lhs: PreTransform, rhs: PreTransform)(
implicit pos: Position): PreTransform = {
import BinaryOp._
if (canBeEqual) {
if (canBeLessThan) {
if (canBeGreaterThan) {
Block(
finishTransformStat(lhs),
finishTransformStat(rhs),
BooleanLiteral(true)).toPreTransform
} else {
foldBinaryOp(Int_<=, lhs, rhs)
}
} else {
if (canBeGreaterThan)
foldBinaryOp(Int_>=, lhs, rhs)
else
foldBinaryOp(Int_==, lhs, rhs)
}
} else {
if (canBeLessThan) {
if (canBeGreaterThan)
foldBinaryOp(Int_!=, lhs, rhs)
else
foldBinaryOp(Int_<, lhs, rhs)
} else {
if (canBeGreaterThan) {
foldBinaryOp(Int_>, lhs, rhs)
} else {
Block(
finishTransformStat(lhs),
finishTransformStat(rhs),
BooleanLiteral(false)).toPreTransform
}
}
}
}
private def foldJSBinaryOp(op: JSBinaryOp.Code, lhs: PreTransform,
rhs: PreTransform)(
implicit pos: Position): PreTransform = {
import JSBinaryOp._
def default: PreTransform =
PreTransJSBinaryOp(op, lhs, rhs)
op match {
case JSBinaryOp.=== | JSBinaryOp.!== =>
val positive = (op == JSBinaryOp.===)
(lhs, rhs) match {
case (PreTransLit(l), PreTransLit(r)) =>
val isEq = literal_===(l, r, isJSStrictEq = true)
PreTransLit(BooleanLiteral(if (positive) isEq else !isEq))
case (_, PreTransLit(Null())) if !lhs.tpe.isNullable =>
Block(
finishTransformStat(lhs),
BooleanLiteral(!positive)).toPreTransform
case (PreTransLit(_), _) => foldBinaryOp(op, rhs, lhs)
case _ => default
}
case _ =>
default
}
}
private def foldAsInstanceOf(arg: PreTransform, tpe: Type)(
cont: PreTransCont): TailRec[Tree] = {
if (isSubtype(arg.tpe.base, tpe))
cont(arg)
else
cont(AsInstanceOf(finishTransformExpr(arg), tpe)(arg.pos).toPreTransform)
}
private def foldJSSelect(qualifier: Tree, item: Tree)(
implicit pos: Position): Tree = {
// !!! Must be in sync with scala.scalajs.runtime.LinkingInfo
import config.coreSpec._
(qualifier, item) match {
case (JSLinkingInfo(), StringLiteral("productionMode")) =>
BooleanLiteral(semantics.productionMode)
case (JSLinkingInfo(), StringLiteral("esVersion")) =>
IntLiteral(esFeatures.esVersion.edition)
case (JSLinkingInfo(), StringLiteral("assumingES6")) =>
BooleanLiteral(esFeatures.useECMAScript2015Semantics)
case (JSLinkingInfo(), StringLiteral("version")) =>
StringLiteral(ScalaJSVersions.current)
case _ =>
JSSelect(qualifier, item)
}
}
def transformIsolatedBody(optTarget: Option[MethodID],
thisType: Type, params: List[ParamDef], resultType: Type,
body: Tree,
alreadyInlining: Set[Scope.InliningID]): (List[ParamDef], Tree) = {
val (paramLocalDefs, newParamDefs) = (for {
p @ ParamDef(ident @ LocalIdent(name), originalName, ptpe, mutable) <- params
} yield {
val (newName, newOriginalName) = freshLocalName(name, originalName, mutable)
val localDef = LocalDef(RefinedType(ptpe), mutable,
ReplaceWithVarRef(newName, newSimpleState(true), None))
val newParamDef = ParamDef(LocalIdent(newName)(ident.pos),
newOriginalName, ptpe, mutable)(p.pos)
((name -> localDef), newParamDef)
}).unzip
val thisLocalDef =
if (thisType == NoType) None
else {
Some(LocalDef(
RefinedType(thisType, isExact = false, isNullable = false),
false, ReplaceWithThis()))
}
val inlining = optTarget.fold(alreadyInlining) { target =>
val allocationSiteCount =
paramLocalDefs.size + (if (thisLocalDef.isDefined) 1 else 0)
val allocationSites =
List.fill(allocationSiteCount)(AllocationSite.Anonymous)
alreadyInlining + ((allocationSites, target))
}
val env = {
val envWithThis =
thisLocalDef.fold(OptEnv.Empty)(OptEnv.Empty.withThisLocalDef(_))
envWithThis.withLocalDefs(paramLocalDefs)
}
val scope = Scope.Empty.inlining(inlining).withEnv(env)
val newBody = transform(body, resultType == NoType)(scope)
(newParamDefs, newBody)
}
private def pretransformLabeled(oldLabelName: LabelName, resultType: Type,
body: Tree, isStat: Boolean, usePreTransform: Boolean)(
cont: PreTransCont)(
implicit scope: Scope, pos: Position): TailRec[Tree] = tailcall {
val newLabel = freshLabelName(oldLabelName)
def doMakeTree(newBody: Tree, returnedTypes: List[Type]): Tree = {
val refinedType =
returnedTypes.reduce(constrainedLub(_, _, resultType))
val returnCount = returnedTypes.size - 1
tryOptimizePatternMatch(oldLabelName, newLabel, refinedType,
returnCount, newBody) getOrElse {
Labeled(LabelIdent(newLabel), refinedType, newBody)
}
}
val info = new LabelInfo(newLabel, acceptRecords = usePreTransform,
returnedTypes = newSimpleState(Nil))
val bodyScope = scope.withEnv(scope.env.withLabelInfo(oldLabelName, info))
if (usePreTransform) {
assert(!isStat, "Cannot use pretransform in statement position")
tryOrRollback { cancelFun =>
pretransformExpr(body) { tbody0 =>
val returnedTypes0 = info.returnedTypes.value
if (returnedTypes0.isEmpty) {
// no return to that label, we can eliminate it
cont(tbody0)
} else {
val tbody = resolveLocalDef(tbody0)
val (newBody, returnedTypes) = tbody match {
case PreTransRecordTree(bodyTree, origType, _) =>
(bodyTree, (bodyTree.tpe, origType) :: returnedTypes0)
case PreTransTree(bodyTree, tpe) =>
(bodyTree, (bodyTree.tpe, tpe) :: returnedTypes0)
}
val (actualTypes, origTypes) = returnedTypes.unzip
val refinedOrigType =
origTypes.reduce(constrainedLub(_, _, resultType))
actualTypes.collectFirst {
case actualType: RecordType => actualType
}.fold[TailRec[Tree]] {
// None of the returned types are records
cont(PreTransTree(
doMakeTree(newBody, actualTypes), refinedOrigType))
} { recordType =>
if (actualTypes.exists(t => t != recordType && t != NothingType))
cancelFun()
val resultTree = doMakeTree(newBody, actualTypes)
if (origTypes.exists(t => t != refinedOrigType && !t.isNothingType))
cancelFun()
cont(PreTransRecordTree(resultTree, refinedOrigType, cancelFun))
}
}
} (bodyScope)
} { () =>
pretransformLabeled(oldLabelName, resultType, body, isStat,
usePreTransform = false)(cont)
}
} else {
val newBody = transform(body, isStat)(bodyScope)
val returnedTypes0 = info.returnedTypes.value.map(_._1)
if (returnedTypes0.isEmpty) {
// no return to that label, we can eliminate it
cont(PreTransTree(newBody, RefinedType(newBody.tpe)))
} else {
val returnedTypes = newBody.tpe :: returnedTypes0
val tree = doMakeTree(newBody, returnedTypes)
cont(PreTransTree(tree, RefinedType(tree.tpe)))
}
}
}
/** Tries and optimizes the remainings of a pattern match as if/elses.
*
* !!! There is quite of bit of code duplication with
* GenJSCode.genOptimizedMatchEndLabeled.
*/
def tryOptimizePatternMatch(oldLabelName: LabelName, newLabelName: LabelName,
refinedType: Type, returnCount: Int, body: Tree): Option[Tree] = {
// Heuristic for speed: only try to optimize labels likely named 'matchEnd...'
val isMaybeMatchEndLabel = {
val oldEncodedName = oldLabelName.encoded
oldEncodedName.length >= 8 && oldEncodedName(0) == 'm' &&
oldEncodedName(1) == 'a' && oldEncodedName(2) == 't' // stop here
}
if (!isMaybeMatchEndLabel) {
None
} else {
body match {
case Block(stats) =>
@tailrec
def createRevAlts(xs: List[Tree],
acc: List[(Tree, Tree)]): (List[(Tree, Tree)], Tree) = xs match {
case If(cond, body, Skip()) :: xr =>
createRevAlts(xr, (cond, body) :: acc)
case remaining =>
(acc, Block(remaining)(remaining.head.pos))
}
val (revAlts, elsep) = createRevAlts(stats, Nil)
if (revAlts.size == returnCount - 1) {
def tryDropReturn(body: Tree): Option[Tree] = body match {
case BlockOrAlone(prep, Return(result, LabelIdent(`newLabelName`))) =>
val result1 =
if (refinedType == NoType) keepOnlySideEffects(result)
else result
Some(Block(prep, result1)(body.pos))
case _ =>
None
}
@tailrec
def constructOptimized(revAlts: List[(Tree, Tree)],
elsep: Tree): Option[Tree] = {
revAlts match {
case (cond, body) :: revAltsRest =>
// cannot use flatMap due to tailrec
tryDropReturn(body) match {
case Some(newBody) =>
constructOptimized(revAltsRest,
foldIf(cond, newBody, elsep)(refinedType)(cond.pos))
case None =>
None
}
case Nil =>
Some(elsep)
}
}
tryDropReturn(elsep).flatMap(constructOptimized(revAlts, _))
} else {
None
}
case _ =>
None
}
}
}
private def withBindings(bindings: List[Binding])(
buildInner: (Scope, PreTransCont) => TailRec[Tree])(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
withNewLocalDefs(bindings) { (localDefs, cont1) =>
val newEnv = bindings.zip(localDefs).foldLeft(scope.env) {
(prevEnv, bindingAndLocalDef) =>
prevEnv.withLocalDef(bindingAndLocalDef._1.name, bindingAndLocalDef._2)
}
buildInner(scope.withEnv(newEnv), cont1)
} (cont)
}
private def withBinding(binding: Binding)(
buildInner: (Scope, PreTransCont) => TailRec[Tree])(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
withNewLocalDef(binding) { (localDef, cont1) =>
buildInner(scope.withEnv(scope.env.withLocalDef(binding.name, localDef)),
cont1)
} (cont)
}
private def withNewLocalDefs(bindings: List[Binding])(
buildInner: (List[LocalDef], PreTransCont) => TailRec[Tree])(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
bindings match {
case first :: rest =>
withNewLocalDef(first) { (firstLocalDef, cont1) =>
withNewLocalDefs(rest) { (restLocalDefs, cont2) =>
buildInner(firstLocalDef :: restLocalDefs, cont2)
} (cont1)
} (cont)
case Nil =>
buildInner(Nil, cont)
}
}
private def isImmutableType(tpe: Type): Boolean = tpe match {
case RecordType(fields) =>
fields.forall(f => !f.mutable && isImmutableType(f.tpe))
case _ =>
true
}
private def withNewLocalDef(binding: Binding)(
buildInner: (LocalDef, PreTransCont) => TailRec[Tree])(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = tailcall {
val Binding(bindingName, declaredType, mutable, value) = binding
implicit val pos = value.pos
def withDedicatedVar(tpe: RefinedType): TailRec[Tree] = {
val rtLongClassType = ClassType(LongImpl.RuntimeLongClass)
if (tpe.base == LongType && declaredType != rtLongClassType &&
useRuntimeLong) {
/* If the value's type is a primitive Long, and the declared type is
* not RuntimeLong, we want to force the expansion of the primitive
* Long (which we know is in fact a RuntimeLong) into a local variable,
* and then its two components into a Record. This makes sure that all
* Longs are stack-allocated when they are put in a var/val, even if
* they came from a method call or other opaque sources, and also if a
* var is initialized with a literal long.
*
* We only do all that if the library contains a inlineable version of
* RuntimeLong.
*/
expandLongValue(value) { expandedValue =>
val expandedBinding = Binding(bindingName, rtLongClassType,
mutable, expandedValue)
withNewLocalDef(expandedBinding)(buildInner)(cont)
}
} else {
// Otherwise, we effectively declare a new binding
val (newName, newOriginalName) = freshLocalName(bindingName, mutable)
val used = newSimpleState(false)
val (replacement, refinedType) = resolveRecordType(value) match {
case Some((recordType, cancelFun)) =>
(ReplaceWithRecordVarRef(newName, recordType, used, cancelFun), value.tpe)
case None =>
(ReplaceWithVarRef(newName, used, None), tpe)
}
val localDef = LocalDef(refinedType, mutable, replacement)
val preTransBinding = PreTransBinding(newOriginalName, localDef, value)
buildInner(localDef, { tinner =>
cont(addPreTransBinding(preTransBinding, tinner))
})
}
}
if (value.tpe.isNothingType) {
cont(value)
} else if (mutable) {
withDedicatedVar(RefinedType(declaredType))
} else {
def computeRefinedType(): RefinedType = bindingName match {
case _ if value.tpe.isExact || declaredType == AnyType =>
/* If the value's type is exact, or if the declared type is `AnyType`,
* the declared type cannot have any new information to give us, so
* we directly return `value.tpe`. This avoids a useless `isSubtype`
* call, which creates dependencies for incremental optimization.
*
* In addition, for the case `declaredType == AnyType` there is a
* stronger reason: we don't actually know that `this` is non-null in
* that case, since it could be the `this` value of a JavaScript
* function, which can accept `null`. (As of this writing, this is
* theoretical, because the only place where we use a declared type
* of `AnyType` is in `JSFunctionApply`, where the actual value for
* `this` is always `undefined`.)
*/
value.tpe
case _: Binding.Local =>
/* When binding a something else than `this`, we do not receive the
* non-null information. Moreover, there is no situation where the
* declared type would bring any new information, since that would
* not be valid IR in the first place. Therefore, to avoid a useless
* call to `isSubtype`, we directly return `value.tpe`.
*/
value.tpe
case Binding.This =>
/* When binding to `this`, if the declared type is not `AnyType`,
* we are in a situation where
* a) we know the value must be non-null, and
* b) the declaredType may bring more precise information than
* value.tpe.base (typically when inlining a polymorphic method
* that ends up having only one target in a subclass).
* We can refine the type here based on that knowledge.
*/
val improvedBaseType =
if (isSubtype(value.tpe.base, declaredType)) value.tpe.base
else declaredType
val isExact = false // We catch the case value.tpe.isExact earlier
RefinedType(improvedBaseType, isExact, isNullable = false)
}
value match {
case PreTransBlock(bindingsAndStats, result) =>
withNewLocalDef(binding.copy(value = result))(buildInner) { tresult =>
cont(addPreTransBindings(bindingsAndStats, tresult))
}
case PreTransLocalDef(localDef) if !localDef.mutable =>
val refinedType = computeRefinedType()
val newLocalDef = if (refinedType == value.tpe) {
localDef
} else {
/* Only adjust if the replacement if ReplaceWithThis or
* ReplaceWithVarRef, because other types have nothing to gain
* (e.g., ReplaceWithConstant) or we want to keep them unwrapped
* because they are examined in optimizations (notably all the
* types with virtualized objects).
*/
localDef.replacement match {
case _:ReplaceWithThis | _:ReplaceWithVarRef =>
LocalDef(refinedType, mutable = false,
ReplaceWithOtherLocalDef(localDef))
case _ =>
localDef
}
}
buildInner(newLocalDef, cont)
case PreTransTree(literal: Literal, _) =>
/* A `Literal` always has the most precise type it could ever have.
* There is no point using `computeRefinedType()`.
*/
buildInner(LocalDef(value.tpe, false,
ReplaceWithConstant(literal)), cont)
case PreTransTree(VarRef(LocalIdent(refName)), _)
if !localIsMutable(refName) =>
buildInner(LocalDef(computeRefinedType(), false,
ReplaceWithVarRef(refName, newSimpleState(true), None)), cont)
case _ =>
withDedicatedVar(computeRefinedType())
}
}
}
/** Adds a [[PreTransBinding]] in front a result [[PreTransform]].
*
* This can force the binding if the result is a [[PreTransGenTree]].
*/
private def addPreTransBinding(binding: PreTransBinding,
result: PreTransform): PreTransform = {
/* This is not the same as
* addPreTransBindings(Left(binding) :: Nil, result)
* because this function is able to optimize the case
* result: PreTransLocalDef
* if `!result.contains(binding) && !binding.isAlreadyUsed`.
*/
result match {
case result: PreTransResult
if !result.contains(binding.localDef) && !binding.isAlreadyUsed =>
/* Eager dce of the binding to avoid unnecessary nesting in
* PreTransBlock, for better optimization.
*/
PreTransBlock(finishTransformStat(binding.value), result)
case _ =>
addPreTransBindings(Left(binding) :: Nil, result)
}
}
/** Adds a sequence of [[PreTransBinding]]s and statements in front a result
* [[PreTransform]].
*
* This can force the bindings if the result is a [[PreTransGenTree]].
*/
private def addPreTransBindings(bindingsAndStats: List[BindingOrStat],
result: PreTransform): PreTransform = {
result match {
case result: PreTransBlock =>
PreTransBlock(bindingsAndStats, result)
case result: PreTransResult =>
PreTransBlock(bindingsAndStats, result)
case PreTransRecordTree(tree, tpe, cancelFun) =>
PreTransRecordTree(
finishTransformBindings(bindingsAndStats, tree),
tpe, cancelFun)
case PreTransTree(tree, tpe) =>
PreTransTree(
finishTransformBindings(bindingsAndStats, tree),
tpe)
}
}
/** Finds a type as precise as possible which is a supertype of lhs and rhs
* but still a subtype of upperBound.
* Requires that lhs and rhs be subtypes of upperBound, obviously.
*/
private def constrainedLub(lhs: RefinedType, rhs: RefinedType,
upperBound: Type): RefinedType = {
if (upperBound == NoType) RefinedType(upperBound)
else if (lhs == rhs) lhs
else if (lhs.isNothingType) rhs
else if (rhs.isNothingType) lhs
else {
RefinedType(constrainedLub(lhs.base, rhs.base, upperBound),
false, lhs.isNullable || rhs.isNullable)
}
}
/** Finds a type as precise as possible which is a supertype of lhs and rhs
* but still a subtype of upperBound.
* Requires that lhs and rhs be subtypes of upperBound, obviously.
*/
private def constrainedLub(lhs: Type, rhs: Type, upperBound: Type): Type = {
// TODO Improve this
if (upperBound == NoType) upperBound
else if (lhs == rhs) lhs
else if (lhs == NothingType) rhs
else if (rhs == NothingType) lhs
else upperBound
}
/** Trampolines a pretransform */
private def trampoline(tailrec: => TailRec[Tree]): Tree = {
// scalastyle:off return
curTrampolineId += 1
val myTrampolineId = curTrampolineId
try {
var rec = () => tailrec
while (true) {
try {
return rec().result
} catch {
case e: RollbackException if e.trampolineId == myTrampolineId =>
rollbacksCount += 1
if (rollbacksCount > MaxRollbacksPerMethod)
throw new TooManyRollbacksException
localNameAllocator.restore(e.localNameAllocatorSnapshot)
mutableLocalNames = e.savedMutableLocalNames
labelNameAllocator.restore(e.labelNameAllocatorSnapshot)
val savedStateBackupChain = e.savedStateBackupChain
var stateBackupsToRestore = stateBackupChain
while (stateBackupsToRestore ne savedStateBackupChain) {
stateBackupsToRestore.head.restore()
stateBackupsToRestore = stateBackupsToRestore.tail
}
stateBackupChain = savedStateBackupChain
rec = e.cont
}
}
throw new AssertionError("Reached end of infinite loop")
} finally {
curTrampolineId -= 1
}
// scalastyle:on return
}
}
private[optimizer] object OptimizerCore {
/** When creating a `freshName` based on a `Binding.This`, use this name as
* base.
*/
private val LocalThisNameForFresh = LocalName("this")
private val thisOriginalName: OriginalName = OriginalName("this")
private val Tuple2Class = ClassName("scala.Tuple2")
private val NilClass = ClassName("scala.collection.immutable.Nil$")
private val JSWrappedArrayClass = ClassName("scala.scalajs.js.WrappedArray")
private val ClassTagModuleClass = ClassName("scala.reflect.ClassTag$")
private val ObjectCloneName = MethodName("clone", Nil, ClassRef(ObjectClass))
private val TupleFirstMethodName = MethodName("_1", Nil, ClassRef(ObjectClass))
private val TupleSecondMethodName = MethodName("_2", Nil, ClassRef(ObjectClass))
private val ClassTagApplyMethodName =
MethodName("apply", List(ClassRef(ClassClass)), ClassRef(ClassName("scala.reflect.ClassTag")))
final class InlineableClassStructure(
/** `List[ownerClassName -> fieldDef]`. */
private val allFields: List[(ClassName, FieldDef)]) {
private[OptimizerCore] val fieldIDs: List[FieldID] =
allFields.map(field => FieldID(field._1, field._2))
private[OptimizerCore] val recordType: RecordType = {
val allocator = new FreshNameAllocator.Field
val recordFields = for {
(className, f @ FieldDef(flags, FieldIdent(name), originalName, ftpe)) <- allFields
} yield {
assert(!flags.namespace.isStatic,
s"unexpected static field in InlineableClassStructure at ${f.pos}")
RecordType.Field(allocator.freshName(name), originalName, ftpe,
flags.isMutable)
}
RecordType(recordFields)
}
private val recordFieldNames: Map[FieldID, RecordType.Field] = {
val elems = for (((className, fieldDef), recordField) <- allFields.zip(recordType.fields))
yield FieldID(className, fieldDef) -> recordField
elems.toMap
}
private[OptimizerCore] def fieldOriginalName(fieldID: FieldID): OriginalName =
recordFieldNames(fieldID).originalName
override def equals(that: Any): Boolean = that match {
case that: InlineableClassStructure =>
this.allFields == that.allFields
case _ =>
false
}
override def hashCode(): Int = allFields.##
override def toString(): String = {
allFields
.map(f => s"${f._1.nameString}::${f._2.name.name.nameString}: ${f._2.ftpe}")
.mkString("InlineableClassStructure(", ", ", ")")
}
}
private final val MaxRollbacksPerMethod = 256
private final class TooManyRollbacksException
extends scala.util.control.ControlThrowable
private val AnonFunctionClassPrefix = "sjsr_AnonFunction"
private type CancelFun = () => Nothing
private type PreTransCont = PreTransform => TailRec[Tree]
private final case class RefinedType private (base: Type, isExact: Boolean,
isNullable: Boolean)(val allocationSite: AllocationSite, dummy: Int = 0) {
def isNothingType: Boolean = base == NothingType
}
private object RefinedType {
def apply(base: Type, isExact: Boolean, isNullable: Boolean,
allocationSite: AllocationSite): RefinedType =
new RefinedType(base, isExact, isNullable)(allocationSite)
def apply(base: Type, isExact: Boolean, isNullable: Boolean): RefinedType =
RefinedType(base, isExact, isNullable, AllocationSite.Anonymous)
def apply(tpe: Type): RefinedType = tpe match {
case AnyType | ClassType(_) | ArrayType(_) =>
RefinedType(tpe, isExact = false, isNullable = true)
case NullType =>
RefinedType(tpe, isExact = true, isNullable = true)
case NothingType | UndefType | BooleanType | CharType | LongType |
StringType | NoType =>
RefinedType(tpe, isExact = true, isNullable = false)
case ByteType | ShortType | IntType | FloatType | DoubleType |
RecordType(_) =>
/* At run-time, a byte will answer true to `x.isInstanceOf[Int]`,
* therefore `byte`s must be non-exact. The same reasoning applies to
* other primitive numeric types.
*/
RefinedType(tpe, isExact = false, isNullable = false)
}
val NoRefinedType = RefinedType(NoType)
val Nothing = RefinedType(NothingType)
}
/**
* Global, lexical identity of an inlined object, given by the source
* location of its allocation.
*
* A crucial property of AllocationSite is that there is a finite amount of
* them, function of the program source. It is not permitted to create
* AllocationSites out of trees generated by the optimizer, as it would
* potentially grow the supply to an infinite amount.
*/
private sealed abstract class AllocationSite
private object AllocationSite {
object Anonymous extends AllocationSite {
override def toString(): String = "AllocationSite(<anonymous>)"
}
def Tree(tree: Tree): AllocationSite = new TreeAllocationSite(tree)
private class TreeAllocationSite(
private val node: Tree) extends AllocationSite {
override def equals(that: Any): Boolean = that match {
case that: TreeAllocationSite => this.node eq that.node
case _ => false
}
override def hashCode(): Int =
System.identityHashCode(node)
override def toString(): String =
s"AllocationSite($node)"
}
}
private final case class LocalDef(
tpe: RefinedType,
mutable: Boolean,
replacement: LocalDefReplacement) {
def toPreTransform(implicit pos: Position): PreTransform = {
replacement match {
case ReplaceWithConstant(value) => PreTransTree(value)
case _ => PreTransLocalDef(this)
}
}
def newReplacement(implicit pos: Position): Tree =
newReplacementInternal(replacement)
@tailrec
private def newReplacementInternal(replacement: LocalDefReplacement)(
implicit pos: Position): Tree = replacement match {
case ReplaceWithVarRef(name, used, _) =>
used.value = true
VarRef(LocalIdent(name))(tpe.base)
/* Allocate an instance of RuntimeLong on the fly.
* See the comment in finishTransformExpr about why it is desirable and
* safe to do so.
*/
case ReplaceWithRecordVarRef(name, recordType, used, _)
if tpe.base == ClassType(LongImpl.RuntimeLongClass) =>
used.value = true
createNewLong(VarRef(LocalIdent(name))(recordType))
case ReplaceWithRecordVarRef(_, _, _, cancelFun) =>
cancelFun()
case ReplaceWithThis() =>
This()(tpe.base)
case ReplaceWithOtherLocalDef(localDef) =>
newReplacementInternal(localDef.replacement)
case ReplaceWithConstant(value) =>
value
case TentativeClosureReplacement(_, _, _, _, _, cancelFun) =>
cancelFun()
case InlineClassBeingConstructedReplacement(_, _, cancelFun) =>
cancelFun()
/* Allocate an instance of RuntimeLong on the fly.
* See the comment in finishTransformExpr about why it is desirable and
* safe to do so.
*/
case InlineClassInstanceReplacement(structure, fieldLocalDefs, _)
if tpe.base == ClassType(LongImpl.RuntimeLongClass) =>
val List(loField, hiField) = structure.fieldIDs
val lo = fieldLocalDefs(loField).newReplacement
val hi = fieldLocalDefs(hiField).newReplacement
createNewLong(lo, hi)
case InlineClassInstanceReplacement(_, _, cancelFun) =>
cancelFun()
case InlineJSArrayReplacement(_, cancelFun) =>
cancelFun()
}
def contains(that: LocalDef): Boolean = {
(this eq that) || (replacement match {
case ReplaceWithOtherLocalDef(localDef) =>
localDef.contains(that)
case TentativeClosureReplacement(_, _, _, captureLocalDefs, _, _) =>
captureLocalDefs.exists(_.contains(that))
case InlineClassBeingConstructedReplacement(_, fieldLocalDefs, _) =>
fieldLocalDefs.valuesIterator.exists(_.contains(that))
case InlineClassInstanceReplacement(_, fieldLocalDefs, _) =>
fieldLocalDefs.valuesIterator.exists(_.contains(that))
case InlineJSArrayReplacement(elemLocalDefs, _) =>
elemLocalDefs.exists(_.contains(that))
case _:ReplaceWithVarRef | _:ReplaceWithRecordVarRef |
_:ReplaceWithThis | _:ReplaceWithConstant =>
false
})
}
}
private sealed abstract class LocalDefReplacement
private final case class ReplaceWithVarRef(name: LocalName,
used: SimpleState[Boolean],
longOpTree: Option[() => Tree]) extends LocalDefReplacement
private final case class ReplaceWithRecordVarRef(name: LocalName,
recordType: RecordType,
used: SimpleState[Boolean],
cancelFun: CancelFun) extends LocalDefReplacement
private final case class ReplaceWithThis() extends LocalDefReplacement
/** An alias to another `LocalDef`, used only to refine the type of that
* `LocalDef` in a specific scope.
*
* This happens when refining the type of a `this` binding in an inlined
* method body.
*/
private final case class ReplaceWithOtherLocalDef(localDef: LocalDef)
extends LocalDefReplacement
private final case class ReplaceWithConstant(
value: Tree) extends LocalDefReplacement
private final case class TentativeClosureReplacement(
captureParams: List[ParamDef], params: List[ParamDef], body: Tree,
captureValues: List[LocalDef],
alreadyUsed: SimpleState[Boolean],
cancelFun: CancelFun) extends LocalDefReplacement
private final case class InlineClassBeingConstructedReplacement(
structure: InlineableClassStructure,
fieldLocalDefs: Map[FieldID, LocalDef],
cancelFun: CancelFun) extends LocalDefReplacement
private final case class InlineClassInstanceReplacement(
structure: InlineableClassStructure,
fieldLocalDefs: Map[FieldID, LocalDef],
cancelFun: CancelFun) extends LocalDefReplacement
private final case class InlineJSArrayReplacement(
elemLocalDefs: Vector[LocalDef],
cancelFun: CancelFun) extends LocalDefReplacement
private final class LabelInfo(
val newName: LabelName,
val acceptRecords: Boolean,
/** (actualType, originalType), actualType can be a RecordType. */
val returnedTypes: SimpleState[List[(Type, RefinedType)]])
private class OptEnv(
val thisLocalDef: Option[LocalDef],
val localDefs: Map[LocalName, LocalDef],
val labelInfos: Map[LabelName, LabelInfo]) {
def withThisLocalDef(rep: LocalDef): OptEnv =
new OptEnv(Some(rep), localDefs, labelInfos)
def withLocalDef(oldName: LocalName, rep: LocalDef): OptEnv =
new OptEnv(thisLocalDef, localDefs + (oldName -> rep), labelInfos)
def withLocalDef(oldName: Binding.Name, rep: LocalDef): OptEnv = {
oldName match {
case Binding.This => withThisLocalDef(rep)
case Binding.Local(name, _) => withLocalDef(name, rep)
}
}
def withLocalDefs(reps: List[(LocalName, LocalDef)]): OptEnv =
new OptEnv(thisLocalDef, localDefs ++ reps, labelInfos)
def withLabelInfo(oldName: LabelName, info: LabelInfo): OptEnv =
new OptEnv(thisLocalDef, localDefs, labelInfos + (oldName -> info))
override def toString(): String = {
"thisLocalDef:\\n " + thisLocalDef.fold("<none>")(_.toString()) + "\\n" +
"localDefs:" + localDefs.mkString("\\n ", "\\n ", "\\n") +
"labelInfos:" + labelInfos.mkString("\\n ", "\\n ", "")
}
}
private object OptEnv {
val Empty: OptEnv = new OptEnv(None, Map.empty, Map.empty)
}
private class Scope(val env: OptEnv,
val implsBeingInlined: Set[Scope.InliningID]) {
def withEnv(env: OptEnv): Scope =
new Scope(env, implsBeingInlined)
def inlining(impl: Scope.InliningID): Scope = {
assert(!implsBeingInlined(impl), s"Circular inlining of $impl")
new Scope(env, implsBeingInlined + impl)
}
def inlining(impls: Set[Scope.InliningID]): Scope = {
val intersection = implsBeingInlined.intersect(impls)
assert(intersection.isEmpty, s"Circular inlining of $intersection")
new Scope(env, implsBeingInlined ++ impls)
}
}
private object Scope {
type InliningID = (List[AllocationSite], AbstractMethodID)
val Empty: Scope = new Scope(OptEnv.Empty, Set.empty)
}
/** The result of pretransformExpr().
*
* A `PreTransform` is a virtualized representation of an expression. It
* serves two major purposes:
* - Holding references to virtual objects that are being partially
* evaluated (see notably `PreTransLocalDef`) or stack-allocated as
* records (see notably `PreTransRecordTree`).
* - Keep arguments of nodes that are potentially side-effect-free as
* virtual as possible, so that, should their value not be used, the
* variables that are referenced can also be dead-code-eliminated.
*
* A `PreTransform` has a `tpe` as precisely refined as if a full
* `transformExpr()` had been performed.
* It is also not dependent on the environment anymore. In some sense, it
* has "captured" its environment at definition site.
*/
private sealed abstract class PreTransform {
def pos: Position
val tpe: RefinedType
def contains(localDef: LocalDef): Boolean = this match {
case PreTransBlock(bindingsAndStats, result) =>
result.contains(localDef) || bindingsAndStats.exists {
case Left(PreTransBinding(_, _, value)) => value.contains(localDef)
case Right(_) => false
}
case PreTransUnaryOp(_, lhs) =>
lhs.contains(localDef)
case PreTransBinaryOp(_, lhs, rhs) =>
lhs.contains(localDef) || rhs.contains(localDef)
case PreTransJSBinaryOp(_, lhs, rhs) =>
lhs.contains(localDef) || rhs.contains(localDef)
case PreTransLocalDef(thisLocalDef) =>
thisLocalDef.contains(localDef)
case _: PreTransGenTree =>
false
}
}
/** A pretransformed binding, part of a [[PreTransBlock]].
*
* Even though it is not encoded in the type system, `localDef.replacement`
* must be a [[ReplaceWithVarRef]] or a [[ReplaceWithRecordVarRef]].
*/
private final case class PreTransBinding(originalName: OriginalName,
localDef: LocalDef, value: PreTransform) {
assert(
localDef.replacement.isInstanceOf[ReplaceWithVarRef] ||
localDef.replacement.isInstanceOf[ReplaceWithRecordVarRef],
"Cannot create a PreTransBinding with non-var-ref replacement " +
localDef.replacement)
def isAlreadyUsed: Boolean = (localDef.replacement: @unchecked) match {
case ReplaceWithVarRef(_, used, _) => used.value
case ReplaceWithRecordVarRef(_, _, used, _) => used.value
}
}
private type BindingOrStat = Either[PreTransBinding, Tree]
/** A pretransformed block, with bindings and statements followed by a result.
*
* Statements in `bindingsAndStats` should have been stripped of their
* side-effect-free parts. They should not be `VarDef`s.
*
* Variable definitions should be `PreTransBinding`s instead, so that they
* can eventually be dead-code-eliminated should their value never be used.
*/
private final class PreTransBlock private (
val bindingsAndStats: List[BindingOrStat],
val result: PreTransResult) extends PreTransform {
def pos: Position = result.pos
val tpe = result.tpe
assert(bindingsAndStats.nonEmpty)
override def toString(): String =
s"PreTransBlock($bindingsAndStats,$result)"
}
private object PreTransBlock {
def apply(bindingsAndStats: List[BindingOrStat],
result: PreTransResult): PreTransform = {
if (bindingsAndStats.isEmpty) result
else new PreTransBlock(bindingsAndStats, result)
}
def apply(bindingsAndStats: List[BindingOrStat],
result: PreTransBlock): PreTransform = {
new PreTransBlock(bindingsAndStats ::: result.bindingsAndStats,
result.result)
}
def apply(binding: PreTransBinding, result: PreTransBlock): PreTransform = {
new PreTransBlock(Left(binding) :: result.bindingsAndStats, result.result)
}
@deprecated(
"You shouldn't be trying to create a PreTransBlock from a Tree stat " +
"when the static type of the result is already a PreTransRecordTree. " +
"Prefer directly creating the relevant PreTransRecordTree",
"forever")
def apply(stat: Tree, result: PreTransRecordTree): PreTransRecordTree = {
PreTransRecordTree(Block(stat, result.tree)(result.pos), result.tpe,
result.cancelFun)
}
@deprecated(
"You shouldn't be trying to create a PreTransBlock from a Tree stat " +
"when the static type of the result is already a PreTransTree. " +
"Prefer directly creating the relevant PreTransTree",
"forever")
def apply(stat: Tree, result: PreTransTree): PreTransTree =
PreTransTree(Block(stat, result.tree)(result.pos), result.tpe)
def apply(stat: Tree, result: PreTransform): PreTransform = {
if (stat.isInstanceOf[Skip]) result
else {
result match {
case PreTransBlock(innerBindingsAndStats, innerResult) =>
new PreTransBlock(Right(stat) :: innerBindingsAndStats, innerResult)
case result: PreTransResult =>
new PreTransBlock(Right(stat) :: Nil, result)
case PreTransRecordTree(tree, tpe, cancelFun) =>
PreTransRecordTree(Block(stat, tree)(tree.pos), tpe, cancelFun)
case PreTransTree(tree, tpe) =>
PreTransTree(Block(stat, tree)(tree.pos), tpe)
}
}
}
def unapply(preTrans: PreTransBlock): Some[(List[BindingOrStat], PreTransResult)] =
Some(preTrans.bindingsAndStats, preTrans.result)
}
private object PreTransMaybeBlock {
def unapply(preTrans: PreTransform): Some[(List[BindingOrStat], PreTransform)] = {
preTrans match {
case PreTransBlock(bindingsAndStats, result) =>
Some((bindingsAndStats, result))
case _ =>
Some((Nil, preTrans))
}
}
}
/** A `PreTransform` that can be the result of a `PreTransBlock`.
*
* This is basically any `PreTransform` except:
* - `PreTransBlock` itself (as `PreTransBlock`s flatten out)
* - `PreTransGenTree` subclasses, as they would force the `PreTransBlock`
* to become a `PreTransGenTree` itself.
*/
private sealed abstract class PreTransResult extends PreTransform
/** A `PreTransform` for a `UnaryOp`. */
private final case class PreTransUnaryOp(op: UnaryOp.Code,
lhs: PreTransform)(implicit val pos: Position)
extends PreTransResult {
val tpe: RefinedType = RefinedType(UnaryOp.resultTypeOf(op))
}
/** A `PreTransform` for a `BinaryOp`. */
private final case class PreTransBinaryOp(op: BinaryOp.Code,
lhs: PreTransform, rhs: PreTransform)(implicit val pos: Position)
extends PreTransResult {
val tpe: RefinedType = RefinedType(BinaryOp.resultTypeOf(op))
}
/** A `PreTransform` for a `JSBinaryOp`. */
private final case class PreTransJSBinaryOp(op: JSBinaryOp.Code,
lhs: PreTransform, rhs: PreTransform)(implicit val pos: Position)
extends PreTransResult {
val tpe: RefinedType = RefinedType(JSBinaryOp.resultTypeOf(op))
}
private object PreTransJSBinaryOp {
def isWorthPreTransforming(op: JSBinaryOp.Code): Boolean =
op == JSBinaryOp.=== || op == JSBinaryOp.!==
}
/** A virtual reference to a `LocalDef`. */
private final case class PreTransLocalDef(localDef: LocalDef)(
implicit val pos: Position) extends PreTransResult {
val tpe: RefinedType = localDef.tpe
}
/** Either a `PreTransTree` or a `PreTransRecordTree`.
*
* This is the result type `resolveLocalDef`.
*/
private sealed abstract class PreTransGenTree extends PreTransform
/** A completely transformed `Tree` with a `RecordType` wrapped in
* `PreTransform`.
*
* The `tpe` of a `PreTransRecordTree` is the refined *original* type of
* the expression (such as a `ClassType` for a stack-allocated object),
* whereas `tree.tpe` is always the lowered `RecordType`.
*/
private final case class PreTransRecordTree(tree: Tree,
tpe: RefinedType, cancelFun: CancelFun) extends PreTransGenTree {
def pos: Position = tree.pos
assert(tree.tpe.isInstanceOf[RecordType],
s"Cannot create a PreTransRecordTree with non-record type ${tree.tpe}")
}
/** A completely transformed `Tree` wrapped in `PreTransform`.
*
* The `Tree` cannot have a `RecordType`. If it had, it should/would be a
* `PreTranRecordTree` instead.
*/
private final case class PreTransTree(tree: Tree,
tpe: RefinedType) extends PreTransGenTree {
def pos: Position = tree.pos
assert(!tree.tpe.isInstanceOf[RecordType],
s"Cannot create a Tree with record type ${tree.tpe}")
}
private object PreTransTree {
def apply(tree: Tree): PreTransTree = {
val refinedTpe: RefinedType = tree match {
case BlockOrAlone(_,
_:LoadModule | _:NewArray | _:ArrayValue | _:GetClass |
_:ClassOf) =>
RefinedType(tree.tpe, isExact = true, isNullable = false)
case _ =>
RefinedType(tree.tpe)
}
PreTransTree(tree, refinedTpe)
}
}
private implicit class OptimizerTreeOps private[OptimizerCore] (
private val self: Tree)
extends AnyVal {
def toPreTransform: PreTransform = {
self match {
case UnaryOp(op, lhs) =>
PreTransUnaryOp(op, lhs.toPreTransform)(self.pos)
case BinaryOp(op, lhs, rhs) =>
PreTransBinaryOp(op, lhs.toPreTransform, rhs.toPreTransform)(self.pos)
case JSBinaryOp(op, lhs, rhs) if PreTransJSBinaryOp.isWorthPreTransforming(op) =>
PreTransJSBinaryOp(op, lhs.toPreTransform, rhs.toPreTransform)(self.pos)
case _ =>
PreTransTree(self)
}
}
}
/** Extractor for a `PreTransTree` that contains a `Literal`. */
private object PreTransLit {
def apply(tree: Literal): PreTransTree =
PreTransTree(tree)
def unapply(preTrans: PreTransTree): Option[Literal] = preTrans.tree match {
case tree: Literal => Some(tree)
case _ => None
}
}
private final case class Binding(name: Binding.Name, declaredType: Type,
mutable: Boolean, value: PreTransform)
private object Binding {
sealed abstract class Name
case object This extends Name
final case class Local(name: LocalName, originalName: OriginalName)
extends Name
def apply(localIdent: LocalIdent, originalName: OriginalName,
declaredType: Type, mutable: Boolean, value: PreTransform): Binding = {
apply(Local(localIdent.name, originalName), declaredType,
mutable, value)
}
def temp(baseName: LocalName, declaredType: Type, mutable: Boolean,
value: PreTransform): Binding = {
apply(Local(baseName, NoOriginalName), declaredType, mutable, value)
}
}
private object LongFromInt {
def apply(x: PreTransform)(implicit pos: Position): PreTransform = x match {
case PreTransLit(IntLiteral(v)) =>
PreTransLit(LongLiteral(v))
case _ =>
PreTransUnaryOp(UnaryOp.IntToLong, x)
}
def unapply(tree: PreTransform): Option[PreTransform] = tree match {
case PreTransLit(LongLiteral(v)) if v.toInt == v =>
Some(PreTransLit(IntLiteral(v.toInt)(tree.pos)))
case PreTransUnaryOp(UnaryOp.IntToLong, x) =>
Some(x)
case _ =>
None
}
}
private object AndThen {
def apply(lhs: Tree, rhs: Tree)(implicit pos: Position): Tree =
If(lhs, rhs, BooleanLiteral(false))(BooleanType)
}
/** Creates a new instance of `RuntimeLong` from a record of its `lo` and
* `hi` parts.
*/
private def createNewLong(recordVarRef: VarRef)(
implicit pos: Position): Tree = {
val RecordType(List(loField, hiField)) = recordVarRef.tpe
createNewLong(
RecordSelect(recordVarRef, FieldIdent(loField.name))(IntType),
RecordSelect(recordVarRef, FieldIdent(hiField.name))(IntType))
}
/** Creates a new instance of `RuntimeLong` from its `lo` and `hi` parts. */
private def createNewLong(lo: Tree, hi: Tree)(
implicit pos: Position): Tree = {
New(LongImpl.RuntimeLongClass, MethodIdent(LongImpl.initFromParts),
List(lo, hi))
}
/** Tests whether `x + y` is valid without falling out of range. */
private def canAddLongs(x: Long, y: Long): Boolean =
if (y >= 0) x+y >= x
else x+y < x
/** Tests whether `x - y` is valid without falling out of range. */
private def canSubtractLongs(x: Long, y: Long): Boolean =
if (y >= 0) x-y <= x
else x-y > x
/** Tests whether `-x` is valid without falling out of range. */
private def canNegateLong(x: Long): Boolean =
x != Long.MinValue
private final class Intrinsics(intrinsicsMap: Map[(ClassName, MethodName), Int]) {
def apply(flags: ApplyFlags, target: AbstractMethodID): Int = {
if (flags.isPrivate || flags.isConstructor) {
-1
} else {
val key = (target.enclosingClassName, target.methodName)
intrinsicsMap.getOrElse(key, -1)
}
}
}
private object Intrinsics {
final val ArrayCopy = 1
final val ArrayApply = ArrayCopy + 1
final val ArrayUpdate = ArrayApply + 1
final val ArrayLength = ArrayUpdate + 1
final val IntegerNLZ = ArrayLength + 1
final val LongToString = IntegerNLZ + 1
final val LongCompare = LongToString + 1
final val LongDivideUnsigned = LongCompare + 1
final val LongRemainderUnsigned = LongDivideUnsigned + 1
final val ArrayBuilderZeroOf = LongRemainderUnsigned + 1
final val GenericArrayBuilderResult = ArrayBuilderZeroOf + 1
final val ClassGetComponentType = GenericArrayBuilderResult + 1
final val ClassGetName = ClassGetComponentType + 1
final val ArrayNewInstance = ClassGetName + 1
final val ObjectLiteral = ArrayNewInstance + 1
final val ByteArrayToInt8Array = ObjectLiteral + 1
final val ShortArrayToInt16Array = ByteArrayToInt8Array + 1
final val CharArrayToUint16Array = ShortArrayToInt16Array + 1
final val IntArrayToInt32Array = CharArrayToUint16Array + 1
final val FloatArrayToFloat32Array = IntArrayToInt32Array + 1
final val DoubleArrayToFloat64Array = FloatArrayToFloat32Array + 1
final val Int8ArrayToByteArray = DoubleArrayToFloat64Array + 1
final val Int16ArrayToShortArray = Int8ArrayToByteArray + 1
final val Uint16ArrayToCharArray = Int16ArrayToShortArray + 1
final val Int32ArrayToIntArray = Uint16ArrayToCharArray + 1
final val Float32ArrayToFloatArray = Int32ArrayToIntArray + 1
final val Float64ArrayToDoubleArray = Float32ArrayToFloatArray + 1
private def m(name: String, paramTypeRefs: List[TypeRef],
resultTypeRef: TypeRef): MethodName = {
MethodName(name, paramTypeRefs, resultTypeRef)
}
private val V = VoidRef
private val I = IntRef
private val J = LongRef
private val O = ClassRef(ObjectClass)
private val ClassClassRef = ClassRef(ClassClass)
private val StringClassRef = ClassRef(BoxedStringClass)
private val SeqClassRef = ClassRef(ClassName("scala.collection.Seq"))
private val JSObjectClassRef = ClassRef(ClassName("scala.scalajs.js.Object"))
private val JSArrayClassRef = ClassRef(ClassName("scala.scalajs.js.Array"))
private def a(base: NonArrayTypeRef): ArrayTypeRef = ArrayTypeRef(base, 1)
private def typedarrayClassRef(baseName: String): ClassRef =
ClassRef(ClassName(s"scala.scalajs.js.typedarray.${baseName}Array"))
// scalastyle:off line.size.limit
private val baseIntrinsics: List[(ClassName, List[(MethodName, Int)])] = List(
ClassName("java.lang.System$") -> List(
m("arraycopy", List(O, I, O, I, I), V) -> ArrayCopy
),
ClassName("scala.runtime.ScalaRunTime$") -> List(
m("array_apply", List(O, I), O) -> ArrayApply,
m("array_update", List(O, I, O), V) -> ArrayUpdate,
m("array_length", List(O), I) -> ArrayLength
),
ClassName("java.lang.Integer$") -> List(
m("numberOfLeadingZeros", List(I), I) -> IntegerNLZ
),
ClassName("scala.collection.mutable.ArrayBuilder$") -> List(
m("scala$collection$mutable$ArrayBuilder$$zeroOf", List(ClassClassRef), O) -> ArrayBuilderZeroOf,
m("scala$collection$mutable$ArrayBuilder$$genericArrayBuilderResult", List(ClassClassRef, JSArrayClassRef), O) -> GenericArrayBuilderResult
),
ClassName("java.lang.Class") -> List(
m("getComponentType", Nil, ClassClassRef) -> ClassGetComponentType,
m("getName", Nil, StringClassRef) -> ClassGetName
),
ClassName("java.lang.reflect.Array$") -> List(
m("newInstance", List(ClassClassRef, I), O) -> ArrayNewInstance
),
ClassName("scala.scalajs.js.special.package$") -> List(
m("objectLiteral", List(SeqClassRef), JSObjectClassRef) -> ObjectLiteral
),
ClassName("scala.scalajs.js.typedarray.package$") -> List(
m("byteArray2Int8Array", List(a(ByteRef)), typedarrayClassRef("Int8")) -> ByteArrayToInt8Array,
m("shortArray2Int16Array", List(a(ShortRef)), typedarrayClassRef("Int16")) -> ShortArrayToInt16Array,
m("charArray2Uint16Array", List(a(CharRef)), typedarrayClassRef("Uint16")) -> CharArrayToUint16Array,
m("intArray2Int32Array", List(a(IntRef)), typedarrayClassRef("Int32")) -> IntArrayToInt32Array,
m("floatArray2Float32Array", List(a(FloatRef)), typedarrayClassRef("Float32")) -> FloatArrayToFloat32Array,
m("doubleArray2Float64Array", List(a(DoubleRef)), typedarrayClassRef("Float64")) -> DoubleArrayToFloat64Array,
m("int8Array2ByteArray", List(typedarrayClassRef("Int8")), a(ByteRef)) -> Int8ArrayToByteArray,
m("int16Array2ShortArray", List(typedarrayClassRef("Int16")), a(ShortRef)) -> Int16ArrayToShortArray,
m("uint16Array2CharArray", List(typedarrayClassRef("Uint16")), a(CharRef)) -> Uint16ArrayToCharArray,
m("int32Array2IntArray", List(typedarrayClassRef("Int32")), a(IntRef)) -> Int32ArrayToIntArray,
m("float32Array2FloatArray", List(typedarrayClassRef("Float32")), a(FloatRef)) -> Float32ArrayToFloatArray,
m("float64Array2DoubleArray", List(typedarrayClassRef("Float64")), a(DoubleRef)) -> Float64ArrayToDoubleArray
)
)
private val runtimeLongIntrinsics: List[(ClassName, List[(MethodName, Int)])] = List(
ClassName("java.lang.Long$") -> List(
m("toString", List(J), ClassRef(BoxedStringClass)) -> LongToString,
m("compare", List(J, J), I) -> LongCompare,
m("divideUnsigned", List(J, J), J) -> LongDivideUnsigned,
m("remainderUnsigned", List(J, J), J) -> LongRemainderUnsigned
)
)
// scalastyle:on line.size.limit
def buildIntrinsics(esFeatures: ESFeatures): Intrinsics = {
val allIntrinsics =
if (esFeatures.allowBigIntsForLongs) baseIntrinsics
else baseIntrinsics ++ runtimeLongIntrinsics
val intrinsicsMap = (for {
(className, methodsAndCodes) <- allIntrinsics
(methodName, code) <- methodsAndCodes
} yield {
(className, methodName) -> code
}).toMap
new Intrinsics(intrinsicsMap)
}
}
private trait StateBackup {
def restore(): Unit
}
private class SimpleState[A](owner: OptimizerCore, private var _value: A) {
def value: A = _value
def value_=(v: A): Unit = {
if (v.asInstanceOf[AnyRef] ne _value.asInstanceOf[AnyRef]) {
owner.addStateBackup(new Backup(_value))
_value = v
}
}
private class Backup(savedValue: A) extends StateBackup {
override def restore(): Unit = value = savedValue
}
}
trait AbstractMethodID {
def enclosingClassName: ClassName
def methodName: MethodName
def inlineable: Boolean
def shouldInline: Boolean
def isForwarder: Boolean
final def is(className: ClassName, methodName: MethodName): Boolean =
this.enclosingClassName == className && this.methodName == methodName
}
/** Parts of [[GenIncOptimizer#MethodImpl]] with decisions about optimizations. */
abstract class MethodImpl {
def methodName: MethodName
def optimizerHints: OptimizerHints
def originalDef: MethodDef
def thisType: Type
protected type Attributes = MethodImpl.Attributes
protected def attributes: Attributes
final def inlineable: Boolean = attributes.inlineable
final def shouldInline: Boolean = attributes.shouldInline
final def isForwarder: Boolean = attributes.isForwarder
protected def computeNewAttributes(): Attributes = {
val MethodDef(_, MethodIdent(methodName), _, params, _, optBody) = originalDef
val body = optBody getOrElse {
throw new AssertionError("Methods in optimizer must be concrete")
}
val isForwarder = body match {
// Shape of forwarders to trait impls
case ApplyStatic(_, impl, method, args) =>
((args.size == params.size + 1) &&
(args.head.isInstanceOf[This]) &&
(args.tail.zip(params).forall {
case (VarRef(LocalIdent(aname)),
ParamDef(LocalIdent(pname), _, _, _)) => aname == pname
case _ => false
}))
// Shape of forwards to default methods
case ApplyStatically(_, This(), className, method, args) =>
args.size == params.size &&
args.zip(params).forall {
case (VarRef(LocalIdent(aname)), ParamDef(LocalIdent(pname), _, _, _)) =>
aname == pname
case _ =>
false
}
// Shape of bridges for generic methods
case Apply(_, This(), method, args) =>
(args.size == params.size) &&
args.zip(params).forall {
case (MaybeUnbox(VarRef(LocalIdent(aname)), _),
ParamDef(LocalIdent(pname), _, _, _)) => aname == pname
case _ => false
}
case _ => false
}
val inlineable = !optimizerHints.noinline
val shouldInline = inlineable && {
optimizerHints.inline || isForwarder || {
body match {
case _:Skip | _:This | _:Literal =>
true
// Shape of accessors
case Select(This(), _, _) if params.isEmpty =>
true
case Assign(Select(This(), _, _), VarRef(_)) if params.size == 1 =>
true
// Shape of trivial call-super constructors
case Block(stats)
if params.isEmpty && methodName.isConstructor &&
stats.forall(isTrivialConstructorStat) =>
true
// Simple method
case SimpleMethodBody() =>
true
case _ =>
false
}
}
}
MethodImpl.Attributes(inlineable, shouldInline, isForwarder)
}
}
object MethodImpl {
final case class Attributes(
inlineable: Boolean,
shouldInline: Boolean,
isForwarder: Boolean
)
}
private object MaybeUnbox {
def unapply(tree: Tree): Some[(Tree, Any)] = tree match {
case AsInstanceOf(arg, tpe) =>
Some((arg, tpe))
case _ =>
Some((tree, ()))
}
}
private val TraitInitSimpleMethodName = SimpleMethodName("$init$")
private def isTrivialConstructorStat(stat: Tree): Boolean = stat match {
case This() =>
true
case ApplyStatically(_, This(), _, _, Nil) =>
true
case ApplyStatic(_, _, MethodIdent(methodName), This() :: Nil) =>
methodName.simpleName == TraitInitSimpleMethodName
case _ =>
false
}
private object SimpleMethodBody {
@tailrec
final def unapply(body: Tree): Boolean = body match {
case New(_, _, args) => areSimpleArgs(args)
case Apply(_, receiver, _, args) => areSimpleArgs(receiver :: args)
case ApplyStatically(_, receiver, _, _, args) => areSimpleArgs(receiver :: args)
case ApplyStatic(_, _, _, args) => areSimpleArgs(args)
case Select(qual, _, _) => isSimpleArg(qual)
case IsInstanceOf(inner, _) => isSimpleArg(inner)
case Block(List(inner, Undefined())) =>
unapply(inner)
case AsInstanceOf(inner, _) => unapply(inner)
case _ => isSimpleArg(body)
}
private def areSimpleArgs(args: List[Tree]): Boolean =
args.forall(isSimpleArg)
@tailrec
private def isSimpleArg(arg: Tree): Boolean = arg match {
case New(_, _, Nil) => true
case Apply(_, receiver, _, Nil) => isTrivialArg(receiver)
case ApplyStatically(_, receiver, _, _, Nil) => isTrivialArg(receiver)
case ApplyStatic(_, _, _, Nil) => true
case ArrayLength(array) => isTrivialArg(array)
case ArraySelect(array, index) => isTrivialArg(array) && isTrivialArg(index)
case AsInstanceOf(inner, _) => isSimpleArg(inner)
case UnaryOp(_, inner) => isSimpleArg(inner)
case _ =>
isTrivialArg(arg)
}
private def isTrivialArg(arg: Tree): Boolean = arg match {
case _:VarRef | _:This | _:Literal | _:LoadModule =>
true
case _ =>
false
}
}
private object BlockOrAlone {
def unapply(tree: Tree): Some[(List[Tree], Tree)] = Some(tree match {
case Block(init :+ last) => (init, last)
case _ => (Nil, tree)
})
}
private def exceptionMsg(myself: AbstractMethodID,
attemptedInlining: List[AbstractMethodID], cause: Throwable) = {
val buf = new StringBuilder()
buf.append("The Scala.js optimizer crashed while optimizing " + myself +
": " + cause.toString)
buf.append("\\nMethods attempted to inline:\\n")
for (m <- attemptedInlining) {
buf.append("* ")
buf.append(m)
buf.append('\\n')
}
buf.toString
}
private class RollbackException(val trampolineId: Int,
val localNameAllocatorSnapshot: FreshNameAllocator.Snapshot[LocalName],
val savedMutableLocalNames: Set[LocalName],
val labelNameAllocatorSnapshot: FreshNameAllocator.Snapshot[LabelName],
val savedStateBackupChain: List[StateBackup],
val cont: () => TailRec[Tree]) extends ControlThrowable
class OptimizeException(val myself: AbstractMethodID,
val attemptedInlining: List[AbstractMethodID], cause: Throwable
) extends Exception(exceptionMsg(myself, attemptedInlining, cause), cause)
private abstract class FreshNameAllocator[N <: Name] private (
initialMap: Map[N, Int]) {
import FreshNameAllocator._
private var usedNamesToNextCounter: Map[N, Int] = initialMap
def clear(): Unit = usedNamesToNextCounter = initialMap
def freshName(base: N): N = {
if (!usedNamesToNextCounter.contains(base)) {
usedNamesToNextCounter = usedNamesToNextCounter.updated(base, 1)
base
} else {
var i = usedNamesToNextCounter(base)
var result = nameWithSuffix(base, "$" + i)
while (usedNamesToNextCounter.contains(result)) {
i += 1
result = nameWithSuffix(base, "$" + i)
}
usedNamesToNextCounter =
usedNamesToNextCounter.updated(base, i + 1).updated(result, 1)
result
}
}
protected def nameWithSuffix(name: N, suffix: String): N
def snapshot(): Snapshot[N] = new Snapshot(usedNamesToNextCounter)
def restore(snapshot: Snapshot[N]): Unit =
usedNamesToNextCounter = snapshot.usedNamesToNextCounter
}
private object FreshNameAllocator {
/** List of local and label names that the emitter will avoid in JS
* identifiers, and therefore will rewrite with non-ASCII characters.
*
* Since we're renaming all local and label symbols through fresh
* allocators anyway, we take the opportunity to rename them in a nice way
* (with ASCII characters only).
*/
private val EmitterReservedJSIdentifiers = List(
"arguments", "break", "case", "catch", "class", "const", "continue",
"debugger", "default", "delete", "do", "else", "enum", "eval",
"export", "extends", "false", "finally", "for", "function", "if",
"implements", "import", "in", "instanceof", "interface", "let", "new",
"null", "package", "private", "protected", "public", "return",
"static", "super", "switch", "this", "throw", "true", "try", "typeof",
"undefined", "var", "void", "while", "with", "yield"
)
private val InitialLocalMap: Map[LocalName, Int] =
EmitterReservedJSIdentifiers.map(i => LocalName(i) -> 1).toMap
final class Local extends FreshNameAllocator[LocalName](InitialLocalMap) {
protected def nameWithSuffix(name: LocalName, suffix: String): LocalName =
name.withSuffix(suffix)
}
private val InitialLabelMap: Map[LabelName, Int] =
EmitterReservedJSIdentifiers.map(i => LabelName(i) -> 1).toMap
final class Label extends FreshNameAllocator[LabelName](InitialLabelMap) {
protected def nameWithSuffix(name: LabelName, suffix: String): LabelName =
name.withSuffix(suffix)
}
private val InitialFieldMap: Map[FieldName, Int] =
Map.empty
final class Field extends FreshNameAllocator[FieldName](InitialFieldMap) {
protected def nameWithSuffix(name: FieldName, suffix: String): FieldName =
name.withSuffix(suffix)
}
final class Snapshot[N <: Name] private[FreshNameAllocator] (
private[FreshNameAllocator] val usedNamesToNextCounter: Map[N, Int])
}
def originalNameForFresh(base: Name, originalName: OriginalName,
freshName: Name): OriginalName = {
if (originalName.isDefined || (freshName eq base)) originalName
else OriginalName(base)
}
final class FieldID private (val ownerClassName: ClassName, val name: FieldName) {
override def equals(that: Any): Boolean = that match {
case that: FieldID =>
this.ownerClassName == that.ownerClassName &&
this.name == that.name
case _ =>
false
}
override def hashCode(): Int =
ownerClassName.## ^ name.##
override def toString(): String =
s"FieldID($ownerClassName, $name)"
}
object FieldID {
def apply(ownerClassName: ClassName, field: FieldIdent): FieldID =
new FieldID(ownerClassName, field.name)
def apply(ownerClassName: ClassName, fieldDef: FieldDef): FieldID =
new FieldID(ownerClassName, fieldDef.name.name)
}
}
| scala-js/scala-js | linker/shared/src/main/scala/org/scalajs/linker/frontend/optimizer/OptimizerCore.scala | Scala | apache-2.0 | 210,370 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ly.stealth.mesos.kafka.interface.impl
import kafka.utils.{ZkUtils => KafkaZkUtils}
import kafka.admin.{AdminUtils => KafkaAdminUtils}
import java.util.Properties
import ly.stealth.mesos.kafka.interface.{AdminUtilsProxy, FeatureSupport}
import scala.collection.Map
class AdminUtils(zkUrl: String) extends AdminUtilsProxy {
private val DEFAULT_TIMEOUT_MS = 30000
private val zkUtils = KafkaZkUtils(zkUrl, DEFAULT_TIMEOUT_MS, DEFAULT_TIMEOUT_MS, isZkSecurityEnabled = false)
override def fetchAllTopicConfigs(): Map[String, Properties] = KafkaAdminUtils.fetchAllTopicConfigs(zkUtils)
override def createOrUpdateTopicPartitionAssignmentPathInZK(
topic: String,
partitionReplicaAssignment: Map[Int, Seq[Int]],
config: Properties,
update: Boolean
): Unit = KafkaAdminUtils.createOrUpdateTopicPartitionAssignmentPathInZK(zkUtils, topic, partitionReplicaAssignment, config, update)
override def changeTopicConfig(
topic: String,
configs: Properties
): Unit = KafkaAdminUtils.changeTopicConfig(zkUtils, topic, configs)
override def fetchEntityConfig(
entityType: String,
entity: String
): Properties = KafkaAdminUtils.fetchEntityConfig(zkUtils, entityType, entity)
override def changeClientIdConfig(
clientId: String,
configs: Properties
): Unit = KafkaAdminUtils.changeClientIdConfig(zkUtils, clientId, configs)
override def fetchAllEntityConfigs(entityType: String): Map[String, Properties]
= KafkaAdminUtils.fetchAllEntityConfigs(zkUtils, entityType)
override def assignReplicasToBrokers(
ids: Seq[Int],
nPartitions: Int,
replicationFactor: Int,
fixedStartIndex: Int,
startPartitionId: Int
): Map[Int, Seq[Int]] = {
KafkaAdminUtils.assignReplicasToBrokers(ids, nPartitions, replicationFactor, fixedStartIndex, startPartitionId)
}
override val features: FeatureSupport = FeatureSupport(quotas = true, genericEntityConfigs = true)
}
| tc-dc/kafka-mesos | src/scala/iface/0_9/ly/stealth/mesos/kafka/interface/impl/AdminUtils.scala | Scala | apache-2.0 | 2,744 |
package com.tierline.scala.activemodel
import com.tierline.scala.activemodel.singletenant.domain._
import com.typesafe.config.ConfigFactory
import org.squeryl.PrimitiveTypeMode._
import org.squeryl.Session
abstract class Schema(configName: String) extends ActiveModelSchema {
val config = ConfigFactory.load("database.conf")
val dbConf = config.getConfig(configName)
databaseAdapter = H2(dbConf)
val goods = Table[Goods]
val cart = Table[Cart]
val cartToGoods = oneToManyRelation(cart, goods).via((c, g) => c.id === g.cartId)
val keyValue = Table[KeyValue]
}
object TestSchema extends Schema("database") {
override def sessionFactory = { () =>
Session.create(
this.databaseAdapter.dataSource.getConnection,
this.databaseAdapter.adapter)
}
}
object EnvironmentConfigTestSchema extends ActiveModelSchema {
val goods = Table[Goods]
val cart = Table[Cart]
val cartToGoods = oneToManyRelation(cart, goods).via((c, g) => c.id === g.cartId)
val keyValue = Table[KeyValue]
override def sessionFactory = { () =>
Session.create(
this.databaseAdapter.dataSource.getConnection,
this.databaseAdapter.adapter)
}
}
| tierline/scala-activemodel | src/test/scala/com/tierline/scala/activemodel/TestSchema.scala | Scala | mit | 1,181 |
package controllers.loan
import com.google.inject.Inject
import models.loan.event.SaveCommands.CreateLoanCommand
import no.uio.musit.MusitResults.{MusitError, MusitSuccess}
import no.uio.musit.models.MuseumId
import no.uio.musit.security.Authenticator
import no.uio.musit.service.MusitController
import services.loan.LoanService
import controllers._
import play.api.mvc.ControllerComponents
class LoanController @Inject()(
val controllerComponents: ControllerComponents,
val authService: Authenticator,
val loanService: LoanService
) extends MusitController {
def createLoan(mid: MuseumId) =
MusitSecureAction().async(parse.json) { implicit request =>
implicit val currUser = implicitly(request.user)
val jsr = request.body.validate[CreateLoanCommand]
saveRequest[CreateLoanCommand, Long](jsr) { cmd =>
loanService.createLoan(mid, cmd.toDomain).map(_.map(_.underlying))
}
}
def findActiveLoan(mid: MuseumId) =
MusitSecureAction().async { implicit request =>
loanService.findActiveLoans(mid).map {
case MusitSuccess(res) => listAsPlayResult(res)
case err: MusitError => internalErr(err)
}
}
}
| MUSIT-Norway/musit | service_backend/app/controllers/loan/LoanController.scala | Scala | gpl-2.0 | 1,205 |
package edu.eckerd.google.api.language
import com.google.api.services.admin.{directory => jDirectory}
import org.scalatest.{FlatSpec, Matchers}
import edu.eckerd.google.api.services.{directory => sDirectory}
import edu.eckerd.google.api.services.Scopes._
class javaConversionsSpec extends FlatSpec with Matchers {
val ListScopes = CALENDAR :: ADMIN_DIRECTORY ::: DRIVE
"scalaGroupAsJavaGroupConversion" should "convert a scala group to java group" in {
val group = sDirectory.models.Group("TestGroup", "test@test.com")
val jGroup = JavaConversions.scalaGroupAsJavaGroupConversion(group)
jGroup shouldBe a [jDirectory.model.Group]
}
it should "maintain the group name through conversion" in {
val name = "TestGroup"
val group = sDirectory.models.Group(name, "test@test.com")
val jGroup = JavaConversions.scalaGroupAsJavaGroupConversion(group)
jGroup.getName === name
}
it should "maintain the email through conversion" in {
val email = "test@test.com"
val group = sDirectory.models.Group("TestGroup", email)
val jGroup = JavaConversions.scalaGroupAsJavaGroupConversion(group)
jGroup.getEmail === email
}
it should "convert a id of None to null" in {
val group = sDirectory.models.Group("TestGroup", "test@test.com")
val jGroup = JavaConversions.scalaGroupAsJavaGroupConversion(group)
jGroup.getId === null
}
it should "convert an id of Some(string) to string" in {
val string = "115242516671582"
val group = sDirectory.models.Group("TestGroup", "test@test.com", id = Some(string))
val jGroup = JavaConversions.scalaGroupAsJavaGroupConversion(group)
jGroup.getId === string
}
it should "convert a description of None to null" in {
val group = sDirectory.models.Group("TestGroup", "test@test.com")
val jGroup = JavaConversions.scalaGroupAsJavaGroupConversion(group)
jGroup.getDescription === null
}
it should "convert a description of Some(string) to string" in {
val string = "Best Group Ever"
val group = sDirectory.models.Group("TestGroup", "test@test.com", description = Some(string))
val jGroup = JavaConversions.scalaGroupAsJavaGroupConversion(group)
jGroup.getDescription === string
}
it should "convert a directMembersCount of None to null" in {
val group = sDirectory.models.Group("TestGroup", "test@test.com")
val jGroup = JavaConversions.scalaGroupAsJavaGroupConversion(group)
jGroup.getDirectMembersCount === null
}
it should "convert a directMembersCount of Some(long) to long" in {
val long = 3L
val group = sDirectory.models.Group("TestGroup", "test@test.com", directMemberCount = Some(long))
val jGroup = JavaConversions.scalaGroupAsJavaGroupConversion(group)
jGroup.getDirectMembersCount === long
}
it should "convert adminCreated of None to null" in {
val group = sDirectory.models.Group("TestGroup", "test@test.com")
val jGroup = JavaConversions.scalaGroupAsJavaGroupConversion(group)
jGroup.getAdminCreated === null
}
it should "convert adminCreated of Some(bool) to bool" in {
val bool = true
val group = sDirectory.models.Group("TestGroup", "test@test.com", adminCreated = Some(bool))
val jGroup = JavaConversions.scalaGroupAsJavaGroupConversion(group)
jGroup.getAdminCreated === bool
}
"javaGroupAsScalaGroupConversion" should "return a Scala Group" in {
val name = "TestGroup"
val email = "test@test.com"
val javaGroup = new jDirectory.model.Group()
.setName(name)
.setEmail(email)
val sGroup = JavaConversions.javaGroupAsScalaGroupConversion(javaGroup)
sGroup shouldBe a [sDirectory.models.Group]
}
it should "maintain the name through conversion" in {
val name = "TestGroup"
val email = "test@test.com"
val javaGroup = new jDirectory.model.Group()
.setName(name)
.setEmail(email)
val sGroup = JavaConversions.javaGroupAsScalaGroupConversion(javaGroup)
sGroup.name === name
}
it should "throw an error if the name is blank" in {
val email = "test@test.com"
val javaGroup = new jDirectory.model.Group()
.setEmail(email)
intercept[Throwable]{
JavaConversions.javaGroupAsScalaGroupConversion(javaGroup)
}
}
it should "maintain the email through conversion" in {
val name = "TestGroup"
val email = "test@test.com"
val javaGroup = new jDirectory.model.Group()
.setName(name)
.setEmail(email)
val sGroup = JavaConversions.javaGroupAsScalaGroupConversion(javaGroup)
sGroup.email === email
}
it should "throw an error if the email is blank" in {
val name = "TestGroup"
val javaGroup = new jDirectory.model.Group()
.setName(name)
intercept[Throwable]{
JavaConversions.javaGroupAsScalaGroupConversion(javaGroup)
}
}
it should "return a None if the id is null" in {
val name = "TestGroup"
val email = "test@test.com"
val javaGroup = new jDirectory.model.Group()
.setName(name)
.setEmail(email)
val sGroup = JavaConversions.javaGroupAsScalaGroupConversion(javaGroup)
sGroup.id === None
}
it should "return Some(id) if the id is set" in {
val name = "TestGroup"
val email = "test@test.com"
val id = "11514754716654q1a"
val javaGroup = new jDirectory.model.Group()
.setName(name)
.setEmail(email)
.setId(id)
val sGroup = JavaConversions.javaGroupAsScalaGroupConversion(javaGroup)
sGroup.id.get === id
}
it should "return a None if the description is null" in {
val name = "TestGroup"
val email = "test@test.com"
val javaGroup = new jDirectory.model.Group()
.setName(name)
.setEmail(email)
val sGroup = JavaConversions.javaGroupAsScalaGroupConversion(javaGroup)
sGroup.description === None
}
it should "return Some(description) if the description is set" in {
val name = "TestGroup"
val email = "test@test.com"
val description = "Best Group Ever"
val javaGroup = new jDirectory.model.Group()
.setName(name)
.setEmail(email)
.setDescription(description)
val sGroup = JavaConversions.javaGroupAsScalaGroupConversion(javaGroup)
sGroup.description.get === description
}
it should "return a None if the members count is null" in {
val name = "TestGroup"
val email = "test@test.com"
val javaGroup = new jDirectory.model.Group()
.setName(name)
.setEmail(email)
val sGroup = JavaConversions.javaGroupAsScalaGroupConversion(javaGroup)
sGroup.directMemberCount === None
}
it should "return Some(long) if the members count is set" in {
val name = "TestGroup"
val email = "test@test.com"
val count = 3L
val javaGroup = new jDirectory.model.Group()
.setName(name)
.setEmail(email)
.setDirectMembersCount(count)
val sGroup = JavaConversions.javaGroupAsScalaGroupConversion(javaGroup)
sGroup.directMemberCount.get === count
}
it should "return a None for Members" in {
val name = "TestGroup"
val email = "test@test.com"
val javaGroup = new jDirectory.model.Group()
.setName(name)
.setEmail(email)
val sGroup = JavaConversions.javaGroupAsScalaGroupConversion(javaGroup)
sGroup.members === None
}
it should "return a None if the Admin Created is null" in {
val name = "TestGroup"
val email = "test@test.com"
val javaGroup = new jDirectory.model.Group()
.setName(name)
.setEmail(email)
val sGroup = JavaConversions.javaGroupAsScalaGroupConversion(javaGroup)
sGroup.adminCreated === None
}
it should "return Some(bool) if the Admin Created is set" in {
val name = "TestGroup"
val email = "test@test.com"
val bool = true
val javaGroup = new jDirectory.model.Group()
.setName(name)
.setEmail(email)
.setAdminCreated(bool)
val sGroup = JavaConversions.javaGroupAsScalaGroupConversion(javaGroup)
sGroup.adminCreated.get === bool
}
"scalaGroupsAsJavaGroupsconversion" should "return a Java Groups" in {
val listGroups = List[sDirectory.models.Group](
sDirectory.models.Group("TestName1", "test01@test.com"),
sDirectory.models.Group("TestName2", "test02@test.com")
)
val pageToken = "pageTokenTest"
val sGroups = sDirectory.models.Groups(
Some(listGroups),
Some(pageToken)
)
val jGroups = JavaConversions.scalaGroupsAsJavaGroupsConversion(sGroups)
jGroups shouldBe a [jDirectory.model.Groups]
}
it should "have the same pagetoken after conversion" in {
val listGroups = List[sDirectory.models.Group](
sDirectory.models.Group("TestName1", "test01@test.com"),
sDirectory.models.Group("TestName2", "test02@test.com")
)
val pageToken = "pageTokenTest"
val sGroups = sDirectory.models.Groups(
Some(listGroups),
Some(pageToken)
)
val jGroups = JavaConversions.scalaGroupsAsJavaGroupsConversion(sGroups)
jGroups.getNextPageToken === pageToken
}
it should "return a null page token if the pageToken is None" in {
val listGroups = List[sDirectory.models.Group](
sDirectory.models.Group("TestName1", "test01@test.com"),
sDirectory.models.Group("TestName2", "test02@test.com")
)
val sGroups = sDirectory.models.Groups(
Some(listGroups),
None
)
val jGroups = JavaConversions.scalaGroupsAsJavaGroupsConversion(sGroups)
jGroups.getNextPageToken === null
}
it should "have converted a list of Groups to Java Format" in {
val listGroups = List[sDirectory.models.Group](
sDirectory.models.Group("TestName1", "test01@test.com"),
sDirectory.models.Group("TestName2", "test02@test.com")
)
val pageToken = "pageTokenTest"
val sGroups = sDirectory.models.Groups(
Some(listGroups),
Some(pageToken)
)
val jGroups = JavaConversions.scalaGroupsAsJavaGroupsConversion(sGroups)
jGroups.getGroups.get(0) === JavaConversions.scalaGroupAsJavaGroupConversion( listGroups.head)
jGroups.getGroups.get(1) === JavaConversions.scalaGroupAsJavaGroupConversion( listGroups.drop(1).head)
}
it should "have a null groups if groups was None" in {
val pageToken = "pageTokenTest"
val sGroups = sDirectory.models.Groups(
None,
Some(pageToken)
)
val jGroups = JavaConversions.scalaGroupsAsJavaGroupsConversion(sGroups)
jGroups.getGroups === null
}
object javaGroupsAsScalaGroupsSetup {
import collection.JavaConverters._
val PageToken = "Next"
val Group1Name = "Test1"
val Group1Email = s"$Group1Name@test.com"
val Group2Name = "Test2"
val Group2Email = s"$Group2Name@test.com"
val Group1 = new jDirectory.model.Group()
.setName(Group1Name)
.setEmail(Group1Email)
val Group2 = new jDirectory.model.Group()
.setName(Group2Name)
.setEmail(Group2Email)
val Groups = List(Group1, Group2).asJava
val javaGroups = new jDirectory.model.Groups()
.setGroups(Groups)
.setNextPageToken(PageToken)
}
"javaGroupsAsScalaGroupsConversion" should "return a Scala Groups" in {
import javaGroupsAsScalaGroupsSetup.javaGroups
val scalaGroups = JavaConversions.javaGroupsAsScalaGroupsConversion(javaGroups)
scalaGroups shouldBe a [sDirectory.models.Groups]
}
it should "have the same page token after conversion" in {
import javaGroupsAsScalaGroupsSetup.javaGroups
val scalaGroups = JavaConversions.javaGroupsAsScalaGroupsConversion(javaGroups)
import javaGroupsAsScalaGroupsSetup.PageToken
scalaGroups.nextPageToken.get === PageToken
}
it should "have A List of Groups" in {
import javaGroupsAsScalaGroupsSetup.javaGroups
val scalaGroups = JavaConversions.javaGroupsAsScalaGroupsConversion(javaGroups)
scalaGroups.groups.get shouldBe a [List[_]]
}
}
| EckerdCollege/google-api-scala | src/test/scala/edu/eckerd/google/api/language/javaConversionsSpec.scala | Scala | apache-2.0 | 11,853 |
/*
* Copyright 2017 Mediative
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mediative.amadou
import org.scalatest.{WordSpec, Matchers}
import com.typesafe.config.ConfigFactory
import java.util.Properties
object ConfigLoaderSpec {
case class Database(url: String, properties: Properties)
}
class ConfigLoaderSpec extends WordSpec with Matchers with ConfigLoader {
import ConfigLoaderSpec.Database
"propertiesValueReader" should {
"load from given path" in {
val config =
ConfigFactory.parseString("""
database {
url = "jdbc:postgresql:testdb"
properties = src/test/resources/config-reader-spec.properties
}
""")
val db = config.as[Database]("database")
db.properties.size should be(2)
db.properties.getProperty("user") should be("john")
db.properties.getProperty("pass") should be("secret")
}
"be empty when no path is given" in {
val config = ConfigFactory.parseString("""
database.url = "jdbc:postgresql:testdb"
""")
val db = config.as[Database]("database")
db.properties.isEmpty should be(true)
}
"fail when given path does not exist" in {
val config =
ConfigFactory.parseString("""
database {
url = "jdbc:postgresql:testdb"
properties = src/test/resources/doesn-not-exists.properties
}
""")
the[java.io.FileNotFoundException] thrownBy {
config.as[Database]("database")
} should have message "src/test/resources/doesn-not-exists.properties (No such file or directory)"
}
}
}
| mediative/amadou | core/src/test/scala/com.mediative.amadou/ConfigLoaderSpec.scala | Scala | apache-2.0 | 2,138 |
package actors
import akka.actor.ActorRef
import akka.actor.Actor
import akka.actor.ActorLogging
import akka.event.LoggingReceive
import akka.actor.ActorSystem
import akka.actor.Props
class ChatUserActor(room:ActorRef, out:ActorRef) extends Actor with ActorLogging {
override def preStart() = {
room ! JoinChatRoom
}
def receive = LoggingReceive {
case ChatMessage(name, text) if sender == room =>
val result:String = name + ":" + text
out ! result
case (text:String) =>
room ! ChatMessage(text.split(":")(0), text.split(":")(1))
case other =>
log.error("issue - not expected: " + other)
}
}
object ChatUserActor {
def props(system:ActorSystem)(out:ActorRef) = Props(new ChatUserActor(ChatRoomActor(system), out))
}
| tnddn/iv-web | portal/rest-portal/app/actors/ChatUserActor.scala | Scala | apache-2.0 | 771 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers
import config.TestFixtureBase
import controllers.reference.SkillTypeController
import model.exchange.AssessorSkill
import play.api.libs.json.Json
import play.api.mvc._
import play.api.test.FakeRequest
import play.api.test.Helpers._
import testkit.UnitWithAppSpec
import scala.concurrent.Future
class SkillTypeControllerSpec extends UnitWithAppSpec {
"all skills" must {
"return all skills" in new TestFixture {
val res: Future[Result] = controller.allSkills.apply(FakeRequest())
status(res) mustBe OK
Json.fromJson[List[AssessorSkill]](Json.parse(contentAsString(res))).get mustBe AssessorSkill.AllSkillsWithLabels
}
}
trait TestFixture extends TestFixtureBase {
val controller = new SkillTypeController(stubControllerComponents(playBodyParsers = stubPlayBodyParsers(materializer)))
}
}
| hmrc/fset-faststream | test/controllers/SkillTypeControllerSpec.scala | Scala | apache-2.0 | 1,455 |
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU Affero GPL v3, the copyright holders add the following
* Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3
* licence, when you create a Related Module, this Related Module is
* not considered as a part of the work and may be distributed under the
* license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>.
*
*************************************************************************************
*/
package com.normation.rudder.web.model
import net.liftweb.util.Helpers
import com.normation.rudder.domain.policies.DirectiveId
import net.liftweb.common.Box
import scala.collection.mutable.Buffer
import net.liftweb.util.BaseField
import bootstrap.liftweb.LiftSpringApplicationContext.inject
import net.liftweb.common._
import org.joda.time.{ DateTime, LocalDate, LocalTime, Duration, Period }
import org.joda.time.format._
import com.normation.utils.Utils._
import java.util.Locale
import org.slf4j.LoggerFactory
import scala.xml._
import net.liftweb.http._
import js._
import JsCmds._
import JE._
import net.liftweb.util.Helpers._
import com.normation.cfclerk.domain.{ VariableSpec, TechniqueId, Technique }
import com.normation.exceptions.TechnicalException
import org.slf4j.LoggerFactory
import com.normation.utils.HashcodeCaching
/**
* A displayable field has 2 methods :
* -> toHtmlNodeSeq and toFormNodeSeq : to display the form
*/
trait DisplayableField extends {
def toHtmlNodeSeq: NodeSeq
def toFormNodeSeq: NodeSeq
}
sealed trait SectionChildField extends DisplayableField with Loggable {
//retrieve the value as a client string
def toClient: String
// get current sections and variables in sub section
def getAllSectionFields: List[SectionField] = this match {
case variable: DirectiveField => Nil
case section: SectionField =>
section :: section.childFields.flatMap(_.getAllSectionFields).toList
}
/*
* Convention: displayHtml is a "read only"
* version of toFormNodeSeq
*/
def displayHtml: Text
/**
* Remove duplicate section of that section.
* Only mutlivalued section actually have something
* to do here
*/
final def removeDuplicateSections : Unit = this match {
case m:MultivaluedSectionField => m.doRemoveDuplicateSections //stop recursion here: no multivalued section on multivalued section
case other : DirectiveField => ()
case other : SectionField => other.childFields.foreach { _.removeDuplicateSections }
}
}
trait DirectiveField extends BaseField with SectionChildField {
val id: String
require(nonEmpty(id), "A field ID can not be null nor empty")
def manifest: Manifest[ValueType]
override def required_? = true
// deprecated but has to be defined
def is = get
/* parseClient / toClient : get and set value from/to
* web ui.
*
* this.is should be invariant with
* parseClient(toClient).
*
*/
//Set value from a client value.
//update list error accordingly
def parseClient(s: String): Unit
private var description: String = ""
override def displayName = description
def displayName_=(s: String): Unit = description = s
//long description
private var longDescription: String = ""
def tooltip = longDescription
def tooltip_=(s: String): Unit = longDescription = s
private var mayBeEmpty: Boolean = false
def optional = mayBeEmpty
def optional_=(b: Boolean): Unit = mayBeEmpty = b
//long description
private var zone: Option[String] = None
def section = zone
def section_=(s: Option[String]): Unit = zone = s
/**
* Get possible values for that fields, if supported.
* If not supported, return none.
* filter may be given
*/
def getPossibleValues(filters: (ValueType => Boolean)*): Option[Set[ValueType]]
def getDefaultValue: ValueType
/**
* Define if the field is readonly or not
*/
private var readOnly: Boolean = false
def isReadOnly = readOnly
def isReadOnly_=(s:Boolean):Unit = readOnly = s
override def displayHtml = Text(toClient)
def tooltipElem = {
if (tooltip == "") {
NodeSeq.Empty
} else {
val tooltipid = Helpers.nextFuncName
<span class="tw-bs" ><span tooltipid={tooltipid} class="ruddericon tooltipable glyphicon glyphicon-question-sign" title=""></span></span>
<div class="tooltipContent" id={tooltipid}>{tooltip}</div>
}
}
def display(value: NodeSeq) = {
<tr>
<td class="directiveVarLabel">
{ displayName + { if (optional) " (optional)" else "" } }: {tooltipElem}
</td>
<td class="directiveVarValue">{ value }</td>
</tr>
}
override def toFormNodeSeq = {
toForm match {
case Failure(m, _, _) =>
val errorMess = "Can not map field %s to an input, error message: %s"
logger.error(errorMess.format(displayName, m))
NodeSeq.Empty
case Empty =>
val errorMess = "Can not map field %s to an input, " +
"form representation of the field was empty"
logger.error(errorMess.format(displayName))
NodeSeq.Empty
case Full(form) =>
display( form)
}
}
def toHtmlNodeSeq = display( displayValue )
// This is only used when showing a PT, hence the values are the default values
def displayValue: NodeSeq = {
displayHtml match {
case value: NodeSeq if value.toString.trim == "" => <span></span>
case value: NodeSeq => Text("(defaults to ") ++ value ++ Text(")")
}
}
}
object DirectiveField {
val logger = LoggerFactory.getLogger(classOf[DirectiveField])
}
trait SectionField extends SectionChildField {
def name: String
def childFields: Seq[SectionChildField]
def values: Map[String, () => String]
def mapValueSeq: Map[String, Seq[String]]
def displayHtml = Text(toClient)
// A Section may be displayed or not-displayed by default
// for the current user (we need to figure out
// how to specify it per user)
def displayedByDefault : Boolean
// this is the user selected value :
// - the user may not have choosen anything, so the default applied
// - the user want to have the section displayed
// - the user want to have the section hidden
var displayed : Option[Boolean] = Option.empty[Boolean]
def isMultivalued = this match {
case _: MultivaluedSectionField => true
case _ => false
}
/**
* Ajax method to define the visibility status of a section
* Takes sectionId : the id of the section as a parameter
* Caution : it mutates the current field
*/
def visibilityCallBack(sectionId: String) : net.liftweb.http.GUIDJsExp = {
SHtml.ajaxCall(
JsRaw("")
, (v:String) => {
displayed = Some(!displayed.getOrElse(displayedByDefault))
JsRaw("""
$('#%s').toggleClass("foldedSection").toggleClass("unfoldedSection"); """.format(sectionId))
}
)
}
/**
* Based on the default visibility and user selected visibility,
* returns the proper display classes
*/
def visibilityClasses : String = {
displayed.getOrElse(displayedByDefault) match {
case true => "unfoldedSection"
case false => "foldedSection"
}
}
}
case class SectionFieldImp(
val name : String,
val childFields : Seq[SectionChildField],
val displayedByDefault : Boolean,
// Only variables of the current section have entries in the values map
// the key of type String is the id (variable name),
// the value is a function which should be called at validation time
val values : Map[String, () => String]) extends SectionField with HashcodeCaching {
def copy(): Nothing = throw new TechnicalException("Can't copy DirectiveFieldGroup, it contains mutable datas")
def toClient = childFields.mkString
def mapValueSeq: Map[String, Seq[String]] = values.map { case (k, v) => (k, Seq(v())) }
// If a section is empty, we want to hide it.
override def toFormNodeSeq: NodeSeq = {
val childrenXml = childFields map (f => f.toFormNodeSeq)
val sectionId = Helpers.nextFuncName
val changeVisibility = visibilityCallBack(sectionId)
// set the method name for the ajax call back (the guid plus the mandatory () to define/call it
val methodName = changeVisibility.guid + "()"
val classes = "sectionFieldset foldableSection " + visibilityClasses
if(childrenXml.isEmpty) NodeSeq.Empty
else
<tr><td colspan="2">
<div id={sectionId} class={classes}>
<div class="section-title" onClick={methodName}>Section: { name }</div>
<table class="directiveSectionDef">
{ childrenXml }
</table>
</div>
</td></tr> ++ Script(JsRaw(""" function %s { %s } """.format(methodName, changeVisibility.toJsCmd)))
}
override def toHtmlNodeSeq = {
val childrenXml = childFields map (f => f.toHtmlNodeSeq)
if(childrenXml.isEmpty) NodeSeq.Empty
else
<tr><td colspan="2">
<div>
<div class="section-title">Section: { name }</div>
<table class="directiveSectionDisplay">
<tbody>
{ childrenXml }
</tbody>
</table>
</div>
</td></tr>
}
}
case class MultivaluedSectionField(
val sections : Seq[SectionField]
, private val newSection: () => SectionField
, val displayedByDefault: Boolean
, val readOnlySection : Boolean
) extends SectionField with HashcodeCaching {
require(!sections.isEmpty)
val name: String = sections.head.name
def childFields: Seq[SectionChildField] = allSections.foldLeft(Seq[SectionChildField]())((seq, section) => seq ++ section.childFields)
def values: Map[String, () => String] = allSections.foldLeft(Map[String, () => String]())((map, child) => map ++ child.values)
private val htmlId = Helpers.nextFuncName
private def logError(box: Box[_]): Unit = box match {
case Failure(m, _, _) => logger.error(m)
case Empty => logger.error("Empty value was returned")
case _ => //ok
}
private val allSections = sections.toBuffer
def toClient: String = childFields.mkString
def add(section: SectionField = newSection()): Int = {
if (!readOnlySection) {
synchronized {
allSections += section
allSections.size - 1
}
} else {
allSections.size-1
}
}
/**
* Remove list group with index "index"
* @param index
* @return the new size of the otherSections, or an error
*/
def delete(index: Int): Box[Int] = {
if (!readOnlySection) {
synchronized {
if (index < 0) {
Failure("Index must be a positive integer")
} else if (index >= allSections.size) {
Failure("Index (%s) must be lesser than number of sections (%s)".format(index, allSections.size))
} else {
allSections remove index
Full(allSections.size)
}
}
} else {
Failure("Cannot modify read only parameters")
}
}
def doRemoveDuplicateSections : Unit = {
val sects = allSections.map{ sect => sect.getAllSectionFields.map { _.mapValueSeq } }.zipWithIndex
//find duplicates: set of ids to remove
val toRemove = sects.map { case (s, i) =>
sects.collect { case(s2, i2) if i2 > i && s == s2 => i2 }
}.flatten.toSet
//section to keep
val toKeep = sects.collect { case (_,i) if(!toRemove.contains(i)) => allSections(i) }
//ok, remove duplicate: swap current section with toKeep
synchronized {
allSections.clear
allSections ++= toKeep
}
()
}
def size = synchronized { allSections.size }
def iterator = synchronized { allSections.iterator }
/**
* Return the Map of (variable name -> seq of values)
* with values ordered by listname index:
* for each variable name "key", values(key)(i) belongs
* to the same iteration of listname.
*/
def mapValueSeq: Map[String, Seq[String]] = {
import scala.collection.mutable.{ Buffer, Map }
val map = Map[String, Buffer[String]]()
for {
sect <- allSections
(name, values) <- sect.mapValueSeq
} {
if (!map.isDefinedAt(name))
map(name) = Buffer[String]()
map(name) ++= values
}
map.toMap
}
/**
* Simple form presentation: each section is iterated, and a
* delete button is added to them.
* A add button is added at the bottom.
* @return
*/
def toFormNodeSeq: NodeSeq = {
<tr id={ htmlId }>{ content }</tr>
}
private def content: NodeSeq = {
<td colspan="2">
<div class="directiveGroup">{
(allSections.zipWithIndex.map {
case (section, i) =>
val sectionId = Helpers.nextFuncName
val changeVisibility = section.visibilityCallBack(sectionId)
// set the method name for the ajax call back (the guid plus the mandatory () to define/call it
val methodName = changeVisibility.guid + "()"
val classes = "groupFieldset foldableSection " + section.visibilityClasses
<div id={sectionId} class={classes}>
<div class="section-title" onClick={methodName}>{ "%s #%s".format(name, i + 1) }</div>
{ showFormEntry(section, i) }
{ // showAddAnother under the last element
if ((i + 1) == size) {
showAddAnother()
} else {
NodeSeq.Empty
}
}
<hr class="spacer"/>
</div> ++ Script(JsRaw(""" function %s { %s } """.format(methodName, changeVisibility.toJsCmd)))
})
}</div>
</td> ++ Script(OnLoad(JsVar("""
$("input").not("#treeSearch").keydown( function(event) {
processKey(event , 'policyConfigurationSave')
} ); """)
))
}
private def showAddAnother(): NodeSeq = {
if (!readOnlySection) {
<div class="directiveAddGroup">{
SHtml.ajaxSubmit("Add another", { () =>
add()
//refresh UI - all item of that group
SetHtml(htmlId, this.content) & postModificationJS()
})
}</div>
} else {
NodeSeq.Empty
}
}
private def showFormEntry(section: SectionField, i: Int): NodeSeq = {
<table class="directiveGroupDef">
<tbody>
{ section.childFields map (f => f.toFormNodeSeq) }
</tbody>
</table>
<div class="textright directiveDeleteGroup">{
if (!readOnlySection) {
val attr = if (size > 1) ("" -> "") else ("disabled" -> "true")
SHtml.ajaxSubmit("Delete", { () =>
logError(delete(i))
//refresh UI - all item of that group
SetHtml(htmlId, this.content) & postModificationJS()
},
attr)
}
}</div>
}
/**
* Command to correct display and behaviour after modifying sections
*/
private[this] def postModificationJS() : JsExp = {
JsRaw("""correctButtons(); createTooltip(); """)
}
override def toHtmlNodeSeq = {
<tr><td colspan="2">
<div class="directiveGroup">{
(allSections.map { sect =>
<div class="groupFieldset">
<div class="section-title">{ "%s".format(name) }</div>
<table class="directiveGroupDisplay">
<tbody>
{ sect.toHtmlNodeSeq }
</tbody>
</table>
</div>
})
}</div>
</td></tr>
}
}
/**
* A stateful class that maintains information about
* a Directive and every things needed in the web part to
* configure it (fields, etc).
*
* If it the Technique provides expected reports, we don't show anything (for the moment)
*
* @parameter Directive
* Directive: the Directive for witch this editor is build
*/
case class DirectiveEditor(
// techniqueId / directiveId here.
val techniqueId : TechniqueId
, val directiveId : DirectiveId
, val name : String
, val description : String
, val sectionField : SectionField
, val variableSpecs : Map[String, VariableSpec]
, val providesExpectedReports: Boolean
) extends HashcodeCaching {
// We do not remove duplicate in case of meta-technique
def removeDuplicateSections : Unit = providesExpectedReports match {
case true => Unit
case false => sectionField.removeDuplicateSections
}
/**
* Get the map of (varname, list(values)),
* as awaited by LDAPRuleID
*/
def mapValueSeq: Map[String, Seq[String]] = {
def mergeMap(m1:Map[String, Seq[String]], m2:Map[String, Seq[String]]) = {
val res = scala.collection.mutable.Map[String,Seq[String]]() ++ m1
for {
(k,v) <- m2
} {
res(k) = res.getOrElse(k, Seq()) ++ v
}
res.toMap
}
sectionField.getAllSectionFields.foldLeft(Map[String, Seq[String]]()) { (map, sect) =>
mergeMap(map, sect.mapValueSeq)
}
}
def toFormNodeSeq: NodeSeq = {
<div class="variableDefinition">
<table class="directiveVarDef">
{ if (!providesExpectedReports) sectionField.childFields.flatMap(_.toFormNodeSeq) }
</table>
</div>
}
def toHtmlNodeSeq: NodeSeq = {
<div class="policyDisplay">
<div class="variableDefinition">
<br/>
<div>Variables to be defined for this Technique</div>
<table class="directiveVarDisplay">
{ sectionField.childFields.flatMap(_.toHtmlNodeSeq) }
</table>
</div>
</div>
}
}
| Kegeruneku/rudder | rudder-web/src/main/scala/com/normation/rudder/web/model/DirectiveEditor.scala | Scala | agpl-3.0 | 18,687 |
package info.mornlight.oneopus
import java.io.File
import javafx.beans.property.ReadOnlyObjectWrapper
import javafx.beans.value.ObservableValue
import javafx.collections.FXCollections
import javafx.event.EventHandler
import javafx.scene.control.TableColumn.CellDataFeatures
import javafx.scene.control._
import javafx.scene.input._
import javafx.scene.layout.VBox
import javafx.util.Callback
import info.mornlight.oneopus.action.{Action, KeyActionMapper, Keys}
import info.mornlight.oneopus.fsutils.FsUtils
import info.mornlight.oneopus.input.ClipboardUtils
import info.mornlight.oneopus.ui._
import info.mornlight.oneopus.utils.Shell
import org.apache.commons.lang3.StringEscapeUtils
import info.mornlight.oneopus.utils.Formatter
import scala.collection.JavaConversions._
class ListerPane(val lister: Lister, fmt: Formatter) extends VBox with Controller {
private val pathPane = new PathPane
private val view = new TableView[File]
private var bar: ListerBar = null
val items = FXCollections.observableArrayList[File]
val keyMapper = new KeyActionMapper
getChildren.addAll(pathPane, view)
initKeyMapper()
initUi()
updateUi()
def initKeyMapper() {
keyMapper.register(Keys.Copy, Action.Copy)
keyMapper.register(Keys.Cut, Action.Cut)
keyMapper.register(Keys.Paste, Action.Paste)
keyMapper.register(Keys.Backspace, Action.GoUp)
keyMapper.register(Keys.Enter, Action.Open)
keyMapper.register(Keys.Escape, Action.Escape)
keyMapper.register(new KeyCodeCombination(KeyCode.STAR), Action.FilterMode)
keyMapper.register(new KeyCodeCombination(KeyCode.SLASH, KeyCombination.SHIFT_DOWN), Action.CommandMode)
keyMapper.register(Keys.Escape, Action.Escape)
}
//Nautilus expects files to be supplied with a x-special/gnome-copied-files MIME type with data beginning with the cut/copy action, a newline character, and the URL of the file.
def executeAction(action: Action.Value) {
action match {
case Action.GoUp => goUp()
case Action.Open => enterDir()
case Action.Copy => doActionCopy()
case Action.Cut => doActionCut()
case Action.Paste => doActionPaste()
case Action.Escape => enterNormalMode()
case Action.FilterMode => enterFilterMode()
case Action.CommandMode => enterCommandMode()
}
}
def enterNormalMode() {
lister.mode = ListerMode.Normal
if (bar != null) {
getChildren.remove(bar)
bar = null
}
}
def enterFindMode(input: String) {
lister.mode = ListerMode.Find
bar = new FindBar
getChildren.add(bar)
bar.focusInput()
bar.asInstanceOf[FindBar].setInput(input)
}
def enterFilterMode() {
lister.mode = ListerMode.Filter
bar = new FilterBar
getChildren.add(bar)
bar.focusInput()
}
def enterCommandMode() {
lister.mode = ListerMode.Command
bar = new CommandBar
getChildren.add(bar)
bar.focusInput()
}
def doActionCopy() {
val files = view.getSelectionModel.getSelectedItems
val clipboard = Clipboard.getSystemClipboard
val content = new ClipboardContent
content.putFiles(files.toList)
clipboard.setContent(content)
ClipboardUtils.setTransferModes(clipboard, Array(TransferMode.COPY, TransferMode.LINK))
}
def doActionCut() {
val files = view.getSelectionModel.getSelectedItems
val clipboard = Clipboard.getSystemClipboard
val content = new ClipboardContent
content.putFiles(files.toList)
clipboard.setContent(content)
ClipboardUtils.setTransferModes(clipboard, Array(TransferMode.MOVE))
}
def doActionPaste() {
val clipboard = Clipboard.getSystemClipboard
if (clipboard.hasFiles) {
val modes = ClipboardUtils.getTransferModes(clipboard)
} else if (clipboard.hasImage) {
val file = FsUtils.getAvailableName(lister.path, "image file", "png")
FsUtils.createFile(file, clipboard.getImage, false)
} else if (clipboard.hasHtml) {
val file = FsUtils.getAvailableName(lister.path, "html file", "html")
FsUtils.createFile(file, clipboard.getHtml, false)
} else if (clipboard.hasString) {
val file = FsUtils.getAvailableName(lister.path, "text file", "txt")
FsUtils.createFile(file, clipboard.getString, false)
}
}
def initUi() {
view.getSelectionModel.setSelectionMode(SelectionMode.MULTIPLE)
view.setItems(items)
view.setOnKeyPressed(new EventHandler[KeyEvent] {
def handle(e: KeyEvent) {
if (lister.mode != ListerMode.Normal) return
val action = keyMapper.convert(e)
if (action != null) {
e.consume()
executeAction(action)
}
//
val str = StringEscapeUtils.unescapeJava(e.getCharacter)
if (str != null && !str.charAt(0).isControl) {
enterFindMode(e.getCharacter)
}
}
})
val colName = new TableColumn[File, String]("Name")
colName.setMinWidth(20)
colName.setCellValueFactory(new Callback[TableColumn.CellDataFeatures[File, String], ObservableValue[String]] {
def call(features: CellDataFeatures[File, String]): ObservableValue[String] = new ReadOnlyObjectWrapper(features.getValue.getName)
})
//colName.setCellFactory(cellFactory);
val colSize = new TableColumn[File, String]("Size")
colSize.setMinWidth(20)
colSize.setCellValueFactory(new Callback[TableColumn.CellDataFeatures[File, String], ObservableValue[String]] {
def call(features: CellDataFeatures[File, String]): ObservableValue[String] = {
val size = features.getValue.length()
val str = fmt.formatFileSize(size)
new ReadOnlyObjectWrapper(str)
}
})
val colType = new TableColumn[File, String]("Type")
colType.setMinWidth(20)
colType.setCellValueFactory(new Callback[TableColumn.CellDataFeatures[File, String], ObservableValue[String]] {
def call(features: CellDataFeatures[File, String]): ObservableValue[String] = {
val str = if(features.getValue.isDirectory) "Directory" else "File"
new ReadOnlyObjectWrapper(str)
}
})
val colModified = new TableColumn[File, String]("Modified")
colModified.setMinWidth(20)
colModified.setCellValueFactory(new Callback[TableColumn.CellDataFeatures[File, String], ObservableValue[String]] {
def call(features: CellDataFeatures[File, String]): ObservableValue[String] = {
val file = features.getValue
val str = fmt.formatFileDate(file.lastModified())
new ReadOnlyObjectWrapper(str)
}
})
view.getColumns.addAll(colName, colSize, colType, colModified)
}
def goUp() {
lister.path = new File(lister.path.getParent)
updateUi()
}
def enterDir() {
val file = view.getSelectionModel.getSelectedItem
if(file.isDirectory) {
lister.path = file
updateUi()
} else {
Shell.open(file)
}
}
def updateUi() {
pathPane.path = lister.path.toString
items.clear()
for(name <- lister.path.list()) {
items.add(new File(lister.path, name))
}
view.getSelectionModel.clearSelection()
view.getSelectionModel.select(0)
view.getFocusModel.focus(0)
//list.requestFocus()
}
}
| xiaodongw/oneopus | app/src/main/scala/info/mornlight/oneopus/ListerPane.scala | Scala | apache-2.0 | 7,194 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.blob.api
import org.locationtech.geomesa.utils.geotools.{SftBuilder, SimpleFeatureTypes}
object GeoMesaBlobStoreSFT {
val BlobFeatureTypeName = "blob"
val IdFieldName = "storeId"
val GeomFieldName = "geom"
val FilenameFieldName = "filename"
val DtgFieldName = "dtg"
val ThumbnailFieldName = "thumbnail"
// TODO: Add metadata hashmap?
// TODO GEOMESA-1186 allow for configurable geometry types
val sft = new SftBuilder()
.stringType(FilenameFieldName)
.stringType(IdFieldName, index = true)
.geometry(GeomFieldName, default = true)
.date(DtgFieldName, default = true)
.stringType(ThumbnailFieldName)
.userData(SimpleFeatureTypes.MIXED_GEOMETRIES, "true")
.build(BlobFeatureTypeName)
}
| mdzimmerman/geomesa | geomesa-blobstore/geomesa-blobstore-api/src/main/scala/org/locationtech/geomesa/blob/api/GeoMesaBlobStoreSFT.scala | Scala | apache-2.0 | 1,252 |
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU Affero GPL v3, the copyright holders add the following
* Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3
* licence, when you create a Related Module, this Related Module is
* not considered as a part of the work and may be distributed under the
* license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>.
*
*************************************************************************************
*/
package com.normation.cfclerk.services
import com.normation.cfclerk.domain._
import net.liftweb.common.Box
class DummyPolicyTranslator extends Cf3PromisesFileWriterService {
def readTemplateFromFileSystem(techniques:Set[TechniqueId]) : Box[Map[Cf3PromisesFileTemplateId, Cf3PromisesFileTemplateCopyInfo]] = null
def prepareCf3PromisesFileTemplate(policyContainer : Cf3PolicyDraftContainer, extraVariables : Map[String, Variable]) : PreparedTemplates = {
null
}
def movePromisesToFinalPosition(folders : Seq[PromisesFinalMoveInfo]) : Seq[PromisesFinalMoveInfo]= {
null
}
def writePromisesFiles(fileSet: Set[Cf3PromisesFileTemplateCopyInfo], variableSet: Seq[STVariable], outPath: String, csvLines: Seq[String]): Unit = {
}
/**
* Concatenate all the variables for each policy Instances.
* @param policyContainer
* @return
*/
def prepareAllCf3PolicyDraftVariables(policyContainer: Cf3PolicyDraftContainer) = null
}
| VinceMacBuche/cf-clerk | src/test/scala/com/normation/cfclerk/services/DummyPolicyTranslator.scala | Scala | agpl-3.0 | 2,632 |
package org.eso.ias.asce
import java.util.concurrent.ThreadFactory
/**
* The thread factory for the threads spawn by the {@link ComputingElement}
* instrumented with its runningID.
*/
class CompEleThreadFactory(val runningID: String) extends ThreadFactory
{
override def newThread(r: Runnable): Thread = {
val t = new Thread(r,runningID);
t.setDaemon(true)
t
}
} | IntegratedAlarmSystem-Group/ias | CompElement/src/main/scala/org/eso/ias/asce/CompEleThreadFactory.scala | Scala | lgpl-3.0 | 388 |
package com.example.akka
import scala.reflect.ClassTag
import spray.json._
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
import akka.http.scaladsl.model.{HttpEntity, StatusCode}
/**
* Holds potential error response with the HTTP status and optional body
*
* @param responseStatus the status code
* @param response the optional body
*/
case class ErrorResponseException(responseStatus: StatusCode, response: Option[HttpEntity]) extends Exception
trait DefaultJsonFormats extends DefaultJsonProtocol with SprayJsonSupport {
/**
* Computes ``RootJsonFormat`` for type ``A`` if ``A`` is object
*/
def jsonObjectFormat[A : ClassTag]: RootJsonFormat[A] = new RootJsonFormat[A] {
val ct: ClassTag[A] = implicitly[ClassTag[A]]
def write(obj: A): JsValue = JsObject("value" -> JsString(ct.runtimeClass.getSimpleName))
def read(json: JsValue): A = ct.runtimeClass.newInstance().asInstanceOf[A]
}
}
| pjfanning/swagger-akka-http-sample | src/main/scala/com/example/akka/DefaultJsonFormats.scala | Scala | apache-2.0 | 942 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.Module
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.serializer.ModuleSerializationTest
import scala.util.Random
class BottleSerialTest extends ModuleSerializationTest {
override def test(): Unit = {
val input = Tensor[Float](10).apply1(e => Random.nextFloat())
val bottle = new Bottle[Float](Linear[Float](10, 2).
asInstanceOf[Module[Float]], 2, 2).setName("bottle")
runSerializationTest(bottle, input)
}
}
| wzhongyuan/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/nn/BottleSpec.scala | Scala | apache-2.0 | 1,146 |
import sampler.distribution.DistributionImplicits
import sampler.samplable.SamplableSyntax
import sampler.samplable.WithoutReplacementImplicits
import sampler.empirical.EmpiricalImplicits
import sampler.io.{Meta, Rounding}
import sampler.maths.RangeCheckImplicits
package object sampler
extends SamplableSyntax
with DistributionImplicits
with WithoutReplacementImplicits
with EmpiricalImplicits
with RangeCheckImplicits
with Rounding
with Meta
| tearne/Sampler | sampler-core/src/main/scala/sampler/package.scala | Scala | apache-2.0 | 463 |
package in.suhj.eridown.elements.block
import in.suhj.eridown.core._
import in.suhj.eridown.option.Option.blocks
import scala.collection.mutable.ListBuffer
import xml.Utility.escape
case class Code(content: String, language: String) extends Element {
def render = {
val lang = if (language.nonEmpty) s""" class="language-$language"""" else ""
s"""<pre$lang><code>${escape(content)}</code></pre>"""
}
}
case class CodeFence(fence: String, language: String) extends Element {
def render = fence
override def integrate(targets: List[Element]): IntegrateResult = {
val content = new ListBuffer[String]
var canIntegrate = true
var index = 0
while (canIntegrate && index < targets.length) {
val target = targets(index)
target match {
case CodeFence(text, "") => canIntegrate = false
case item => content += item.rawText
}
index += 1
}
(Code(content.mkString("\\n"), language), index + 1)
}
}
object CodeFenceGenerator extends Generator {
def generate(text: String): Option[GenerateResult] = {
val scanner = Scanner(text)
var indent = 0
while (scanner.atWhitespace) {
scanner.skip(1)
indent += 1
}
val fence =
if (scanner.reads("```")) {
scanner.mark()
scanner.skip(3)
while (scanner.currentChar == '`') {
scanner.skip(1)
}
scanner.extract
} else if (scanner.reads("~~~")) {
scanner.mark()
scanner.skip(3)
while (scanner.currentChar == '~') {
scanner.skip(1)
}
scanner.extract
} else ""
if (fence.isEmpty) return None
scanner.mark()
scanner.findAny(List(' ', '\\n'))
val lang = scanner.extract.trim
scanner.skipToNextLine()
Some(CodeFence(fence, lang), scanner.position)
}
} | raon0211/eridown | src/main/scala/in/suhj/eridown/elements/block/Code.scala | Scala | mit | 2,101 |
package no.edh.pawnstars.pieces
trait Color
case class White() extends Color
case class Black() extends Color | eirikdal/mkchess | src/main/scala/no/edh/pawnstars/pieces/Color.scala | Scala | gpl-2.0 | 110 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.system
import org.apache.samza.checkpoint.TestCheckpointTool.MockCheckpointManagerFactory
import org.apache.samza.config._
import org.apache.samza.coordinator.MockSystemFactory
import org.apache.samza.job.local.ThreadJobFactory
import org.apache.samza.{Partition, SamzaException}
import org.junit.Assert._
import org.junit.Test
import scala.collection.mutable
import scala.collection.JavaConverters._
import scala.collection.JavaConversions._
class TestRegexSystemStreamPartitionMatcher {
val sspSet = mutable.Set(new SystemStreamPartition("test", "stream1", new Partition(0)))
sspSet.add(new SystemStreamPartition("test", "stream1", new Partition(1)))
sspSet.add(new SystemStreamPartition("test", "stream1", new Partition(2)))
@Test
def testFilterWithMatcherConfigRegex() {
val config = getConfig("[1-2]")
val expectedSspSet = mutable.Set(new SystemStreamPartition("test", "stream1", new Partition(1)))
expectedSspSet.add(new SystemStreamPartition("test", "stream1", new Partition(2)))
val filteredSet = new RegexSystemStreamPartitionMatcher().filter(sspSet.asJava, config)
assertEquals(2, filteredSet.size)
assertEquals(expectedSspSet.asJava, filteredSet)
}
private def getConfig(regex: String): MapConfig = {
new MapConfig(mutable.Map(
TaskConfig.CHECKPOINT_MANAGER_FACTORY -> classOf[MockCheckpointManagerFactory].getCanonicalName,
TaskConfig.INPUT_STREAMS -> "test.stream1",
JobConfig.STREAM_JOB_FACTORY_CLASS -> classOf[ThreadJobFactory].getCanonicalName,
JobConfig.SSP_MATCHER_CLASS -> JobConfig.SSP_MATCHER_CLASS_REGEX,
JobConfig.SSP_MATCHER_CONFIG_REGEX -> regex,
(SystemConfig.SYSTEM_FACTORY format "test") -> classOf[MockSystemFactory].getCanonicalName))
}
@Test
def testFilterWithMatcherConfigRegexWithNomatches() {
val config = getConfig("--")
val filteredSet = new RegexSystemStreamPartitionMatcher().filter(sspSet, config)
assertEquals(0, filteredSet.size)
}
@Test(expected = classOf[SamzaException])
def testFilterWithNoMatcherConfigRegex() {
val config = new MapConfig(mutable.Map(
TaskConfig.CHECKPOINT_MANAGER_FACTORY -> classOf[MockCheckpointManagerFactory].getCanonicalName,
TaskConfig.INPUT_STREAMS -> "test.stream1",
JobConfig.STREAM_JOB_FACTORY_CLASS -> classOf[ThreadJobFactory].getCanonicalName,
JobConfig.SSP_MATCHER_CLASS -> JobConfig.SSP_MATCHER_CONFIG_REGEX,
(SystemConfig.SYSTEM_FACTORY format "test") -> classOf[MockSystemFactory].getCanonicalName))
new RegexSystemStreamPartitionMatcher().filter(sspSet, config)
}
} | InnovaCo/samza | samza-core/src/test/scala/org/apache/samza/system/TestRegexSystemStreamPartitionMatcher.scala | Scala | apache-2.0 | 3,434 |
/** This will run a loooong time if Set's builder copies a
* complete new Set for every element.
*/
object Test {
def main(args: Array[String]): Unit = {
val a = new Array[Long](1000000)
(1 to 10000) foreach (i => a(i) = i)
val s = collection.mutable.Set(collection.immutable.ArraySeq.unsafeWrapArray(a): _*)
assert(s.sum > 0)
}
}
| scala/scala | test/files/run/adding-growing-set.scala | Scala | apache-2.0 | 354 |
package com.twitter.finagle.zipkin.thrift
import org.scalatest.FunSuite
import com.twitter.util.Time
import com.twitter.finagle.tracing.{Flags, SpanId, TraceId}
import org.scalatest.junit.JUnitRunner
import org.junit.runner.RunWith
@RunWith(classOf[JUnitRunner])
class SpanTest extends FunSuite {
test("Span should serialize properly") {
val ann = ZipkinAnnotation(Time.now, "value", Endpoint(1, 2))
val traceId = TraceId(Some(SpanId(123)), Some(SpanId(123)), SpanId(123), None, Flags().setDebug)
val span = Span(traceId, Some("service"), Some("name"), Seq(ann), Seq(), Endpoint(123, 123))
val tspan = span.toThrift
assert(tspan.isSetAnnotations)
val host = tspan.getAnnotations.get(0).getHost
assert(host.getService_name == "service")
assert(tspan.isSetName)
assert(tspan.getName == "name")
!tspan.isSetBinary_annotations
assert(tspan.isSetId)
assert(tspan.getId == 123)
assert(tspan.isSetParent_id)
assert(tspan.getParent_id == 123)
assert(tspan.isSetTrace_id)
assert(tspan.getTrace_id == 123)
assert(tspan.isDebug)
}
}
| lukiano/finagle | finagle-zipkin/src/test/scala/com/twitter/finagle/zipkin/thrift/SpanTest.scala | Scala | apache-2.0 | 1,097 |
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.io
import java.nio.ByteBuffer
trait BufferPool {
def acquire(): ByteBuffer
def release(buf: ByteBuffer)
}
/**
* INTERNAL API
*
* A buffer pool which keeps a free list of direct buffers of a specified default
* size in a simple fixed size stack.
*
* If the stack is full the buffer is de-referenced and available to be
* freed by normal garbage collection.
*
* Using a direct ByteBuffer when dealing with NIO operations has been proven
* to be faster than wrapping on-heap Arrays. There is ultimately no performance
* benefit to wrapping in-heap JVM data when writing with NIO.
*/
private[akka] class DirectByteBufferPool(defaultBufferSize: Int, maxPoolEntries: Int) extends BufferPool {
private[this] val pool: Array[ByteBuffer] = new Array[ByteBuffer](maxPoolEntries)
private[this] var buffersInPool: Int = 0
def acquire(): ByteBuffer =
takeBufferFromPool()
def release(buf: ByteBuffer): Unit =
offerBufferToPool(buf)
private def allocate(size: Int): ByteBuffer =
ByteBuffer.allocateDirect(size)
private final def takeBufferFromPool(): ByteBuffer = {
val buffer = pool.synchronized {
if (buffersInPool > 0) {
buffersInPool -= 1
pool(buffersInPool)
} else null
}
// allocate new and clear outside the lock
if (buffer == null)
allocate(defaultBufferSize)
else {
buffer.clear()
buffer
}
}
private final def offerBufferToPool(buf: ByteBuffer): Unit =
pool.synchronized {
if (buffersInPool < maxPoolEntries) {
pool(buffersInPool) = buf
buffersInPool += 1
} // else let the buffer be gc'd
}
}
| jmnarloch/akka.js | akka-js-actor/jvm/src/main/scala/akka/io/DirectByteBufferPool.scala | Scala | bsd-3-clause | 1,736 |
package database
import java.util.concurrent.TimeUnit
import scala.concurrent.Await
import scala.concurrent.duration.Duration
import org.mongodb.scala._
object Helpers {
implicit class DocumentObservable[C](val observable: Observable[Document])
extends ImplicitObservable[Document] {
override val converter: (Document) => String = (doc) => doc.toJson
}
implicit class GenericObservable[C](val observable: Observable[C])
extends ImplicitObservable[C] {
override val converter: (C) => String = (doc) => doc.toString
}
trait ImplicitObservable[C] {
val observable: Observable[C]
val converter: (C) => String
def results(): Seq[C] =
Await.result(observable.toFuture(), Duration(20, TimeUnit.SECONDS))
def headResult() =
Await.result(observable.head(), Duration(10, TimeUnit.SECONDS))
def printResults(initial: String = "") = {
if (initial.length > 0) print(initial)
results().foreach(res => println(converter(res)))
}
def printHeadResult(initial: String = ""): Unit =
println(s"${initial}${converter(headResult())}")
}
}
| jpdias/kugsha | src/main/scala/database/Helpers.scala | Scala | mit | 1,115 |
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.operators
import monix.eval.Task
import monix.execution.Cancelable
import monix.reactive.{BaseConcurrencySuite, Observable}
import scala.concurrent.duration._
import scala.concurrent.{Await, Future, Promise}
object ScanTaskConcurrencySuite extends BaseConcurrencySuite {
val cancelTimeout = 30.seconds
val cancelIterations = 1000
test("scanTask should work for synchronous children") { implicit s =>
val count = 10000L
val expected = 3L * count * (count - 1) / 2
for (_ <- 0 until 100) {
val sum = Observable.range(0, count)
.scanEval(Task.now(0L))((_, x) => Task.now(x * 3))
.sumL
.runToFuture
val result = Await.result(sum, 30.seconds)
assertEquals(result, expected)
}
}
test("scanTask should work for asynchronous children") { implicit s =>
val count = 10000L
val expected = 3L * count * (count - 1) / 2
for (_ <- 0 until 100) {
val sum = Observable.range(0, count)
.scanEval(Task.now(0L))((_, x) => Task.evalAsync(x * 3))
.sumL
.runToFuture
val result = Await.result(sum, 30.seconds)
assertEquals(result, expected)
}
}
test(s"scanTask should be cancellable, test 1, count $cancelIterations (issue #468)") { implicit s =>
def never(): (Future[Unit], Task[Int]) = {
val isCancelled = Promise[Unit]()
val ref = Task.create[Int]((_, _) => Cancelable(() => isCancelled.success(())))
(isCancelled.future, ref)
}
for (i <- 0 until cancelIterations) {
val (isCancelled, ref) = never()
val c = Observable(1).scanEval(Task.now(0))((_, _) => ref).subscribe()
// Creating race condition
if (i % 2 == 0) {
s.executeAsync(() => c.cancel())
} else {
c.cancel()
}
Await.result(isCancelled, cancelTimeout)
}
}
}
| Wogan/monix | monix-reactive/jvm/src/test/scala/monix/reactive/internal/operators/ScanTaskConcurrencySuite.scala | Scala | apache-2.0 | 2,542 |
package com.xhachi.gae4s.json
import java.text.SimpleDateFormat
import org.json4s._
import org.json4s.ext.EnumNameSerializer
import org.json4s.native.JsonMethods.{parse => parseByJson4s}
import org.json4s.native.Serialization.write
import scala.collection.mutable
object Json extends Json(None)
/**
* TODO: ใใฃใจๆฑ็จ็ใซไฝฟใใใใๅ่จญ่จใใ
*/
class Json(dateFormat: Option[String]) {
protected val enumSeq = new mutable.HashSet[Enumeration]
protected val typeHintTargetSeq = new mutable.HashSet[Class[_]]
implicit protected var _formats: Formats = DefaultFormats
def formats: Formats = _formats
def addEnum[E <: Enumeration](enums: E*): Unit = synchronized {
var unregistered = enums.filter(!enumSeq.contains(_))
if (0 < unregistered.size) {
enumSeq ++= unregistered
buildFormats()
}
}
def addTypeHintTarget(targetClass: Class[_]*): Unit = synchronized {
var unregistered = targetClass.filter(!typeHintTargetSeq.contains(_))
if (0 < unregistered.size) {
typeHintTargetSeq ++= unregistered
buildFormats()
}
}
private def buildFormats(): Unit = {
_formats = dateFormat match {
case Some(f) =>
new DefaultFormats {
override def dateFormatter = new SimpleDateFormat(f)
override val typeHintFieldName: String = "class"
override val typeHints: TypeHints = FullTypeHints(typeHintTargetSeq.toList)
} ++ enumSeq.map(e => new EnumNameSerializer(e))
case None =>
new DefaultFormats {
override def dateFormatter = DefaultFormats.losslessDate()
override val typeHintFieldName: String = "class"
override val typeHints: TypeHints = FullTypeHints(typeHintTargetSeq.toList)
} ++ enumSeq.map(e => new EnumNameSerializer(e))
}
}
buildFormats()
/**
* Convert JSON String to case class.
*
* @param value Instance of case class
* @tparam E Class of case class
* @return Converted case class instance
*/
def parseAs[E: Manifest](value: String): E = parse(value).extract[E]
/**
* Convert JValue to case class.
*
* @param value Instance of JValue
* @tparam E Class of case class
* @return Converted case class instance
*/
def extractAs[E: Manifest](value: JValue): E = value.extract[E]
/**
* Convert JSON String to JValue.
*
* @param value String as JSON
* @return Converted JValue
*/
def parse(value: String) = parseByJson4s(value)
/**
* Convert case class to JSON String.
*
* @param value Instance of case class
* @tparam E Class of case class
* @return Converted json string
*/
def stringify[E <: AnyRef : Manifest](value: E): String = write[E](value)
/**
* Convert case class to JValue.
*
* @param value Instance of case class
* @tparam E Class of case class
* @return Converted JValue
*/
def decompose[E <: AnyRef](value: E): JValue = Extraction.decompose(value)
/**
* Convert JValue to JSON String.
*
* @param value Instance of JValue
* @return Converted JSON String
*/
def stringify(value: JValue): String = write(value)
}
| thachi/gae4s | core/src/main/scala/com/xhachi/gae4s/json/Json.scala | Scala | apache-2.0 | 3,156 |
/**
* FILE: HarvestResource.scala
* PERCORSO /Codice/sgad/servertier/src/main/scala/sgad/servertier/businesslogic/operations
* DATA CREAZIONE: 24 Febbraio 2014
* AUTORE: ProTech
* EMAIL: protech.unipd@gmail.com
*
* Questo file รจ proprietร del gruppo ProTech, viene rilasciato sotto licenza Apache v2.
*
* DIARIO DELLE MODIFICHE:
* 2014-02-24 - Creazione della classe - Segantin Fabio
*/
package sgad.servertier.businesslogic.operations
import sgad.servertier.dataaccess.data.userdata.UserData
/**
* Classe per la gestione dell'operazione di raccolta risorse da un edificio.
*/
class HarvestResource extends Operation {
/**
* Metodo di esecuzione della raccolta.
* @param userData Dati dell'utente su cui verrร effettuata l'operazione.
* @param data Dati accompagnatori alla richiesta dell'operazione.
* @param loginAuthorization Autorizzazione a operare richieste di login. Di default รจ false.
* @param registrationAuthorization Autorizzazione a operare richieste di registrazione. Di default รจ false.
* @param userAuthorization Autorizzazione a operare richieste di user. Di default รจ false.
* @param internalAuthorization Autorizzazione a operare richieste interne. Di default รจ false.
* @return Stringa di risposta in seguito all'operazione di raccolta risorse da un edificio.
*/
def execute(userData: UserData, data: String, loginAuthorization: Boolean, registrationAuthorization: Boolean,
userAuthorization: Boolean, internalAuthorization: Boolean): String = {
val harvestTime = System.currentTimeMillis / 1000L
var answer = "data:false, authorization:false"
if (userAuthorization) {
//controllo autorizzazione
try {
OperationFactory.getOperation("UpdateUserData").execute(userData, "", internalAuthorization = true)
val dataMap = decodeData(data)
if (userData.getAuthenticationData.getAuthenticationString == dataMap("authentication")) {
val key = dataMap("key") //controllo se i dati sono parserizzati corretamente altrimenti ritorno falso
val building = userData.getOwnedBuilding(key)
val resource = building.getBuilding.getProductedResource //recupero le risorse prodotte dall'edificio
if (resource != null && building.getIsFinished) {
//se produce
val productionTime = resource.getRelativeTime //prendo il tempo necessario alla produzioneรฌ
val productedResources = Math.min(Math.floor((harvestTime - building.getTime) / productionTime).toInt * resource.getQuantity, resource.getMaxQuantity) //calcolo la quantita di risorse che avrebbe prodotto
val currentOwnedResources = userData.getOwnedResource(resource.getResource.getKey)
currentOwnedResources.setQuantity(currentOwnedResources.getQuantity + productedResources) //aggiorno la quantitร di risorsa posseduta e ritorno true
building.setTime(
if (productedResources == resource.getMaxQuantity)
harvestTime
else
harvestTime - harvestTime % productionTime
)
answer = "data:{result:true,quantity:" + productedResources + "}, messages:" + parsePiggy(userData.getPiggy)
} else {
answer = "data:false, precondition:false, messages:" + parsePiggy(userData.getPiggy)
}
} else {
answer = "data:false, authentication:false"
}
} catch {
case c: Exception => answer = "data:false, exception:true"
}
}
"{" + answer + "}"
}
}
| protechunipd/SGAD | Codice/sgad/servertier/src/main/scala/sgad/servertier/businesslogic/operations/HarvestResource.scala | Scala | apache-2.0 | 3,397 |
/*
* Copyright 2015 University of Basel, Graphics and Vision Research Group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scalismo.registration
import java.io.File
import java.net.URLDecoder
import breeze.linalg.DenseVector
import scalismo.common.interpolation.{
BSplineImageInterpolator,
BSplineImageInterpolator2D,
BSplineImageInterpolator3D,
NearestNeighborInterpolator
}
import scalismo.{numerics, ScalismoTestSuite}
import scalismo.common.{EuclideanSpace2D, Field, PointId, RealSpace}
import scalismo.geometry._
import scalismo.image.{DiscreteImageDomain2D, DiscreteImageDomain3D}
import scalismo.io.{ImageIO, MeshIO}
import scalismo.kernels.{DiagonalKernel, GaussianKernel}
import scalismo.numerics.{GridSampler, LBFGSOptimizer, UniformSampler}
import scalismo.transformations.{
ProductTransformationSpace,
RigidTransformation,
Rotation2D,
Rotation3D,
RotationSpace2D,
Scaling2D,
Scaling3D,
Translation2D,
Translation3D,
TranslationAfterRotation2D,
TranslationAfterRotation3D,
TranslationAfterScalingAfterRotation2D,
TranslationAfterScalingAfterRotation3D,
TranslationSpace2D,
TranslationSpace3D
}
import scalismo.statisticalmodel.{GaussianProcess, LowRankGaussianProcess}
import scalismo.utils.Random
import scala.language.implicitConversions
class RegistrationTests extends ScalismoTestSuite {
implicit val random = Random(42)
implicit def doubleToFloat(d: Double): Float = d.toFloat
describe("A 2D rigid landmark based registration") {
it("can retrieve correct parameters") {
val points: IndexedSeq[Point[_2D]] = IndexedSeq(Point(0.0, 0.0), Point(1.0, 4.0), Point(2.0, 0.0))
val c = Point(1.0, 4 / 3.0)
for (angle <- (1 until 16).map(i => math.Pi / i)) {
val rotation = Rotation2D(-angle, Point2D(0, 0))
val translation = Translation2D(EuclideanVector2D(1.0, 1.5))
val compositeTransformation = TranslationAfterRotation2D(translation, rotation)
val transformedPoints =
points.map((pt: Point[_2D]) => compositeTransformation(pt))
val regResult =
LandmarkRegistration.rigid2DLandmarkRegistration(points.zip(transformedPoints), center = Point2D(0, 0))
val alignedPoints = points.map((pt: Point[_2D]) => regResult(pt))
transformedPoints(0)(0) should be(alignedPoints(0)(0) +- 0.0001)
transformedPoints(0)(1) should be(alignedPoints(0)(1) +- 0.0001)
transformedPoints(1)(0) should be(alignedPoints(1)(0) +- 0.0001)
transformedPoints(1)(1) should be(alignedPoints(1)(1) +- 0.0001)
transformedPoints(2)(0) should be(alignedPoints(2)(0) +- 0.0001)
transformedPoints(2)(1) should be(alignedPoints(2)(1) +- 0.0001)
}
}
}
describe("A 3D rigid landmark based registration") {
val path = getClass.getResource("/facemesh.stl").getPath
val mesh = MeshIO.readMesh(new File(URLDecoder.decode(path, "UTF-8"))).get
val translation = Translation3D(EuclideanVector3D(1.5, 1.0, 3.5))
val rotation = Rotation3D(Math.PI, -Math.PI / 2.0, -Math.PI, center = Point3D(0, 0, 0))
val trans = TranslationAfterRotation3D(translation, rotation)
val rigidTransformed = mesh transform trans
val regResult = LandmarkRegistration.rigid3DLandmarkRegistration(
mesh.pointSet.points.zip(rigidTransformed.pointSet.points).toIndexedSeq,
Point(0, 0, 0)
)
//should not test on parameters here since many euler angles can lead to the same rotation matrix
val rigidRegTransformed = mesh transform regResult
it("can retrieve correct parameters") {
for ((p, i) <- rigidRegTransformed.pointSet.points.zipWithIndex) {
val id = PointId(i)
p(0) should be(rigidTransformed.pointSet.point(id)(0) +- 0.0001)
p(1) should be(rigidTransformed.pointSet.point(id)(1) +- 0.0001)
p(2) should be(rigidTransformed.pointSet.point(id)(2) +- 0.0001)
}
}
it("Rigid Transformation forth and back of a mesh gives the same points") {
val inverseTrans = regResult.inverse
val transformed = mesh.transform(regResult).transform(inverseTrans)
for ((p, i) <- transformed.pointSet.points.zipWithIndex) {
val id = PointId(i)
p(0) should be(mesh.pointSet.point(id)(0) +- 0.0001)
p(1) should be(mesh.pointSet.point(id)(1) +- 0.0001)
p(2) should be(mesh.pointSet.point(id)(2) +- 0.0001)
}
}
it("can retrieve correct transformations when requested with a different center") {
// pick any center
val anyCenter = Point(1254, 488, 78)
val newCenterRegResult = LandmarkRegistration.rigid3DLandmarkRegistration(
mesh.pointSet.points.zip(rigidTransformed.pointSet.points).toIndexedSeq,
anyCenter
)
val rigidRegNewCenterTransformed = mesh transform newCenterRegResult
for ((p, i) <- rigidRegNewCenterTransformed.pointSet.points.zipWithIndex) {
val id = PointId(i)
p(0) should be(rigidTransformed.pointSet.point(id)(0) +- 0.0001)
p(1) should be(rigidTransformed.pointSet.point(id)(1) +- 0.0001)
p(2) should be(rigidTransformed.pointSet.point(id)(2) +- 0.0001)
}
}
}
describe("A 2D similarity landmark based registration") {
it("can transform the points appropriately") {
val points: IndexedSeq[Point[_2D]] = IndexedSeq(Point(0.0, 0.0), Point(1.0, 4.0), Point(2.0, 0.0))
val c = Point(1.0, 4 / 3.0)
for (angle <- (1 until 16).map(i => math.Pi / i)) {
val rotation = Rotation2D(-angle, c)
val translation = Translation2D(EuclideanVector2D(1.0, 1.5))
val scalingFactor = 2.0 //scala.util.Random.nextDouble()
val similarityTransformation =
TranslationAfterScalingAfterRotation2D(translation, Scaling2D(scalingFactor), rotation)
val transformedPoints =
points.map((pt: Point[_2D]) => similarityTransformation(pt))
val regResult =
LandmarkRegistration.similarity2DLandmarkRegistration(points.zip(transformedPoints), center = Point2D(0, 0))
val alignedPoints = points.map(regResult)
transformedPoints(0)(0) should be(alignedPoints(0)(0) +- 0.0001)
transformedPoints(0)(1) should be(alignedPoints(0)(1) +- 0.0001)
transformedPoints(1)(0) should be(alignedPoints(1)(0) +- 0.0001)
transformedPoints(1)(1) should be(alignedPoints(1)(1) +- 0.0001)
transformedPoints(2)(0) should be(alignedPoints(2)(0) +- 0.0001)
transformedPoints(2)(1) should be(alignedPoints(2)(1) +- 0.0001)
}
}
}
describe("A 3D similarity landmark based registration") {
it("can transform the mesh appropriately") {
val path = getClass.getResource("/facemesh.stl").getPath
val mesh = MeshIO.readMesh(new File(URLDecoder.decode(path, "UTF-8"))).get
val translation = Translation3D(EuclideanVector3D(1.5, 1.0, 3.5))
val rotation = Rotation3D(Math.PI, -Math.PI / 2.0, -Math.PI, Point3D(0, 0, 0))
val scaling = Scaling3D(2.0)
val trans = TranslationAfterScalingAfterRotation3D(translation, scaling, rotation)
val translatedRotatedScaled = mesh transform trans
val regResult = LandmarkRegistration.similarity3DLandmarkRegistration(
mesh.pointSet.points.zip(translatedRotatedScaled.pointSet.points).toIndexedSeq,
Point(0, 0, 0)
)
//should not test on parameters here since many euler angles can lead to the same rotation matrix
val regSim = mesh transform regResult
for ((p, i) <- regSim.pointSet.points.zipWithIndex.take(100)) {
val id = PointId(i)
p(0) should be(translatedRotatedScaled.pointSet.point(id)(0) +- 0.0001)
p(1) should be(translatedRotatedScaled.pointSet.point(id)(1) +- 0.0001)
p(2) should be(translatedRotatedScaled.pointSet.point(id)(2) +- 0.0001)
}
}
}
describe("A 2D image registration") {
it("Recovers the correct parameters for a translation transform") {
val testImgUrl = getClass.getResource("/dm128.vtk").getPath
val discreteFixedImage = ImageIO.read2DScalarImage[Float](new File(URLDecoder.decode(testImgUrl, "UTF-8"))).get
val fixedImage = discreteFixedImage.interpolateDifferentiable(BSplineImageInterpolator2D[Float](2))
val transformationSpace = TranslationSpace2D
val translationParams = DenseVector[Double](-10.0, 5.0)
val translationTransform = transformationSpace.transformationForParameters(translationParams)
val transformedLena = fixedImage compose translationTransform
val domain = discreteFixedImage.domain
val metricSampler = GridSampler(DiscreteImageDomain2D(domain.boundingBox, IntVector(20, 20)))
val regIt = Registration(
MeanSquaresMetric(fixedImage, transformedLena, transformationSpace, metricSampler),
L2Regularizer[_2D](transformationSpace),
regularizationWeight = 0.0,
LBFGSOptimizer(maxNumberOfIterations = 300)
).iterator(DenseVector.zeros[Double](transformationSpace.numberOfParameters))
val regResult = regIt.toSeq.last
-regResult.parameters(0) should be(translationParams(0) +- 0.01)
-regResult.parameters(1) should be(translationParams(1) +- 0.01)
}
it("Recovers the correct parameters for a rotation transform") {
val testImgUrl = getClass.getResource("/dm128.vtk").getPath
val discreteFixedImage = ImageIO.read2DScalarImage[Float](new File(URLDecoder.decode(testImgUrl, "UTF-8"))).get
val fixedImage = discreteFixedImage.interpolateDifferentiable(BSplineImageInterpolator2D[Float](3))
val domain = discreteFixedImage.domain
val center = ((domain.boundingBox.oppositeCorner - domain.origin) * 0.5).toPoint
val transformationSpace = RotationSpace2D(center)
val rotationParams = DenseVector[Double](math.Pi / 8.0)
val transform = transformationSpace.transformationForParameters(rotationParams)
val transformedLena = fixedImage compose transform
val metricSampler = GridSampler(DiscreteImageDomain2D(domain.boundingBox, IntVector(20, 20)))
val metric = MeanSquaresMetric(transformedLena, fixedImage, transformationSpace, metricSampler)
val regIter = Registration(
metric,
L2Regularizer(transformationSpace),
0.0,
numerics.LBFGSOptimizer(maxNumberOfIterations = 100)
).iterator(DenseVector.zeros[Double](transformationSpace.numberOfParameters))
val regResult = regIter.toSeq.last
regResult.parameters(0) should be(rotationParams(0) +- 0.01)
}
it("Recovers the correct parameters for a gp transform") {
val testImgUrl = getClass.getResource("/dm128.vtk").getPath
val discreteFixedImage = ImageIO.read2DScalarImage[Float](new File(URLDecoder.decode(testImgUrl, "UTF-8"))).get
val fixedImage = discreteFixedImage.interpolateDifferentiable(BSplineImageInterpolator2D[Float](3))
val domain = discreteFixedImage.domain
val gp = GaussianProcess(Field(RealSpace[_2D], (_: Point[_2D]) => EuclideanVector.zeros[_2D]),
DiagonalKernel(GaussianKernel[_2D](50.0) * 50.0, 2))
val sampler = UniformSampler(domain.boundingBox, numberOfPoints = 200)
val lowRankGp = LowRankGaussianProcess.approximateGPNystrom(gp, sampler, numBasisFunctions = 3)
val gpParams = DenseVector.ones[Double](lowRankGp.rank)
val transformationSpace = GaussianProcessTransformationSpace(lowRankGp)
val groundTruthTransform = transformationSpace.transformationForParameters(gpParams)
val transformedLena = fixedImage compose groundTruthTransform
val metricSampler = GridSampler(DiscreteImageDomain2D(domain.boundingBox, IntVector(20, 20)))
val metric = MeanSquaresMetric(transformedLena, fixedImage, transformationSpace, metricSampler)
val regIt =
Registration(metric, L2Regularizer(transformationSpace), 0.0, LBFGSOptimizer(maxNumberOfIterations = 300))
.iterator(DenseVector.zeros[Double](transformationSpace.numberOfParameters))
val regResult = regIt.toSeq.last
for (i <- 0 until regResult.parameters.size) {
regResult.parameters(i) should be(gpParams(0) +- 0.1)
}
}
it("Recovers the correct parameters for a gp transform with a nn interpolated gp") {
val testImgUrl = getClass.getResource("/dm128.vtk").getPath
val discreteFixedImage = ImageIO.read2DScalarImage[Float](new File(URLDecoder.decode(testImgUrl, "UTF-8"))).get
val fixedImage = discreteFixedImage.interpolateDifferentiable(BSplineImageInterpolator2D[Float](3))
val domain = discreteFixedImage.domain
val gp = GaussianProcess(Field(RealSpace[_2D], (_: Point[_2D]) => EuclideanVector.zeros[_2D]),
DiagonalKernel(GaussianKernel[_2D](50.0) * 50.0, 2))
val sampler = UniformSampler(domain.boundingBox, numberOfPoints = 200)
val lowRankGp = LowRankGaussianProcess.approximateGPNystrom(gp, sampler, numBasisFunctions = 3)
val nnInterpolatedGp = lowRankGp.discretize(domain).interpolate(NearestNeighborInterpolator())
val transformationSpace = GaussianProcessTransformationSpace(nnInterpolatedGp)
val gpParams = DenseVector.ones[Double](lowRankGp.rank)
val groundTruthTransform = transformationSpace.transformationForParameters(gpParams)
val transformedLena = fixedImage compose groundTruthTransform
val metricSampler = GridSampler(DiscreteImageDomain2D(domain.boundingBox, IntVector(20, 20)))
val metric = MeanSquaresMetric(transformedLena, fixedImage, transformationSpace, metricSampler)
val regIt = Registration(
metric,
L2Regularizer(transformationSpace),
regularizationWeight = 0.0,
LBFGSOptimizer(maxNumberOfIterations = 300)
).iterator(DenseVector.zeros[Double](transformationSpace.numberOfParameters))
val regResult = regIt.toSeq.last
for (i <- 0 until regResult.parameters.size) {
regResult.parameters(i) should be(gpParams(0) +- 0.1)
}
}
it("Recovers the correct parameters for a composed rigid and gp transform") {
val testImgUrl = getClass.getResource("/dm128.vtk").getPath
val discreteFixedImage = ImageIO.read2DScalarImage[Float](new File(URLDecoder.decode(testImgUrl, "UTF-8"))).get
val fixedImage = discreteFixedImage.interpolateDifferentiable(BSplineImageInterpolator(3))
val domain = discreteFixedImage.domain
val gp = GaussianProcess(Field(EuclideanSpace2D, (_: Point[_2D]) => EuclideanVector.zeros[_2D]),
DiagonalKernel(GaussianKernel[_2D](20.0) * 50.0, 2))
val lowRankGp =
LowRankGaussianProcess.approximateGPCholesky(domain, gp, 0.1, NearestNeighborInterpolator()).truncate(5)
val translationSpace = TranslationSpace2D
val gpTransformationSpace = GaussianProcessTransformationSpace2D(lowRankGp)
val transformationSpace =
ProductTransformationSpace(
translationSpace,
gpTransformationSpace
)
val gtParams = DenseVector.vertcat(DenseVector.ones[Double](translationSpace.numberOfParameters) * 10.0,
DenseVector.ones[Double](gpTransformationSpace.numberOfParameters) * 1.0)
val groundTruthTransform = transformationSpace.transformationForParameters(gtParams)
val transformedLena = fixedImage compose groundTruthTransform
val metricSampler = GridSampler(DiscreteImageDomain2D(domain.boundingBox, IntVector(20, 20)))
val metric = MeanSquaresMetric(transformedLena, fixedImage, transformationSpace, metricSampler)
val regIt = Registration(
metric,
L2Regularizer(transformationSpace),
regularizationWeight = 0.0,
LBFGSOptimizer(maxNumberOfIterations = 300)
).iterator(DenseVector.zeros[Double](transformationSpace.numberOfParameters))
val regItPrinting = for (it <- regIt) yield {
println(it.value)
println(it.parameters)
it
}
val regResult = regItPrinting.toSeq.last
for (i <- 0 until regResult.parameters.size) {
regResult.parameters(i) should be(gtParams(i) +- 0.1)
}
}
}
describe("A 3D image registration") {
val testImgUrl = getClass.getResource("/3ddm.nii").getPath
val discreteFixedImage = ImageIO.read3DScalarImage[Float](new File(URLDecoder.decode(testImgUrl, "UTF-8"))).get
val fixedImage = discreteFixedImage.interpolateDifferentiable(BSplineImageInterpolator3D[Float](3))
val transformationSpace = TranslationSpace3D
val domain = discreteFixedImage.domain
it("Recovers the correct parameters for a translation transform") {
val translationParams = DenseVector[Double](-10.0, 0.0, 0.0)
val translationTransform = TranslationSpace3D.transformationForParameters(translationParams)
val transformed = fixedImage compose translationTransform
val metricSampler = GridSampler(DiscreteImageDomain3D(domain.boundingBox, IntVector(20, 20, 20)))
val metric = MeanSquaresMetric(fixedImage, transformed, transformationSpace, metricSampler)
val regIt = Registration(
metric,
L2Regularizer(transformationSpace),
regularizationWeight = 0.0,
LBFGSOptimizer(maxNumberOfIterations = 300)
).iterator(DenseVector.zeros[Double](transformationSpace.numberOfParameters))
val regResult = regIt.toSeq.last
-regResult.parameters(0) should be(translationParams(0) +- 0.01)
-regResult.parameters(1) should be(translationParams(1) +- 0.01)
-regResult.parameters(2) should be(translationParams(2) +- 0.01)
}
}
}
| unibas-gravis/scalismo | src/test/scala/scalismo/registration/RegistrationTests.scala | Scala | apache-2.0 | 18,185 |
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package phase
import akka.actor.ActorRef
abstract class MultiPhasedProgress {
private[phase] def skipPhases(phases: Seq[PhaseDetails]) {
phases.foreach(skipPhase)
}
protected def skipPhase(phaseDetails: PhaseDetails) {
beginPhase(phaseDetails)
endPhase(phaseDetails)
}
private[phase] def inPhase[T](phaseDetails: PhaseDetails)(action: => T): T = {
beginPhase(phaseDetails)
val result = action
endPhase(phaseDetails)
result
}
protected def beginPhase(phaseDetails: PhaseDetails)
protected def endPhase(phaseDetails: PhaseDetails)
private[phase] def finish(result: Any)
}
private[phase] class NotifyingActorProgress(progressActor: ActorRef, phasesDetails: Seq[PhaseDetails])
extends MultiPhasedProgress {
progressActor ! Init(phasesDetails)
protected def beginPhase(phaseDetails: PhaseDetails) {
progressActor ! BeginPhase(phaseDetails)
}
protected def endPhase(phaseDetails: PhaseDetails) {
progressActor ! EndPhase(phaseDetails)
}
private[phase] def finish(result: Any) {
progressActor ! Finish(result)
}
}
class ChainRunner[-In, +Out](chain: PhasesChain[In, Out], progress: MultiPhasedProgress) {
def run(in: In): Out = {
val result = chain.run(progress)(in)
progress.finish(result)
result
}
}
object ChainRunner {
def actorBased[In, Out](chain: PhasesChain[In, Out], progressActor: ActorRef): ChainRunner[In, Out] = {
val progress = new NotifyingActorProgress(progressActor, chain.phasesDetails)
new ChainRunner(chain, progress)
}
} | arkadius/scala-phases-chain | src/main/scala/phase/MultiPhasedProgress.scala | Scala | apache-2.0 | 2,168 |
package pl.newicom.dddd.aggregate
import pl.newicom.dddd.messaging.MetaData
import pl.newicom.dddd.office.CaseRef
case class CommandHandlerContext[C <: Config](caseRef: CaseRef, config: C, commandMetaData: MetaData)
| AndreyLadniy/akka-ddd | akka-ddd-core/src/main/scala/pl/newicom/dddd/aggregate/CommandHandlerContext.scala | Scala | mit | 218 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.routing.sird
import java.net.{ URI, URL }
import play.api.mvc.RequestHeader
class RequiredQueryStringParameter(paramName: String) extends QueryStringParameterExtractor[String] {
def unapply(qs: QueryString): Option[String] = qs.get(paramName).flatMap(_.headOption)
}
class OptionalQueryStringParameter(paramName: String) extends QueryStringParameterExtractor[Option[String]] {
def unapply(qs: QueryString): Option[Option[String]] = Some(qs.get(paramName).flatMap(_.headOption))
}
class SeqQueryStringParameter(paramName: String) extends QueryStringParameterExtractor[Seq[String]] {
def unapply(qs: QueryString): Option[Seq[String]] = Some(qs.getOrElse(paramName, Nil))
}
trait QueryStringParameterExtractor[T] {
import QueryStringParameterExtractor._
def unapply(qs: QueryString): Option[T]
def unapply(req: RequestHeader): Option[T] = unapply(req.queryString)
def unapply(uri: URI): Option[T] = unapply(parse(uri.getRawQuery))
def unapply(uri: URL): Option[T] = unapply(parse(uri.getQuery))
}
object QueryStringParameterExtractor {
private def parse(query: String): QueryString =
Option(query).fold(Map.empty[String, Seq[String]]) {
_.split("&").map {
_.span(_ != '=') match {
case (key, v) => key -> v.drop(1) // '=' prefix
}
}.groupBy(_._1).mapValues(_.toSeq.map(_._2))
}
def required(name: String) = new RequiredQueryStringParameter(name)
def optional(name: String) = new OptionalQueryStringParameter(name)
def seq(name: String) = new SeqQueryStringParameter(name)
}
| Shruti9520/playframework | framework/src/play/src/main/scala/play/api/routing/sird/QueryStringExtractors.scala | Scala | apache-2.0 | 1,644 |
package net.databinder.dispatch.times
trait Times {
lazy val http = new Http("api.nytimes.com")
val api_key: String
val service: String
val version: Int
def exec(action: String, params: Map[String, Any]) =
http(
("/svc" :: service :: "v" + version :: action :: Nil).mkString("/")
) ?< (params + ("api-key" -> api_key))
def exec(action: String): Http#Request = exec(action, Map[String, Any]())
}
case class People(api_key: String) extends Times {
val service = "timespeople/api";
val version = 1
def profile(user_id: Int) = exec("/user/" + user_id + "/profile.js");
}
case class Search(api_key: String) extends Times {
val service = "search"
val version = 1
def search(query: String) = exec("/article", Map("query" -> query))
}
case class Community(api_key: String) extends Times {
val service = "community"
val version = 2
def recent = exec("comments/recent.json")
}
| n8han/Databinder-for-Wicket | databinder-dispatch/src/main/scala/net/databinder/dispatch/Times.scala | Scala | lgpl-2.1 | 929 |
package io.vamp.common.akka
import akka.actor.{ Actor, ActorSystem, DiagnosticActorLogging }
import akka.event.Logging.MDC
import io.vamp.common.{ Namespace, NamespaceProvider }
trait CommonActorLogging extends DiagnosticActorLogging {
this: NamespaceProvider โ
override def mdc(currentMessage: Any): MDC = Map("namespace" โ namespace.name)
}
trait CommonSupportForActors
extends Actor
with ReplyActor
with CommonProvider
with CommonActorLogging
with ActorExecutionContextProvider {
implicit lazy val actorSystem: ActorSystem = context.system
implicit lazy val namespace: Namespace = context.parent.path.elements match {
case "user" :: ns :: _ โ Namespace(ns)
case other โ throw new RuntimeException(s"No namespace for: $other")
}
}
| dragoslav/vamp | common/src/main/scala/io/vamp/common/akka/CommonSupportForActors.scala | Scala | apache-2.0 | 796 |
import sbt._
object Dependencies {
final val Scala_2_11 = "2.11.12"
final val Scala_2_12 = "2.12.6"
final val Scala_2_13 = "2.13.1"
final val Play_2_5 = "2.5.19"
final val Play_2_6 = "2.6.19"
final val Play_2_7 = "2.7.4"
final val Play_2_8 = "2.8.3"
def playServer(includePlayVersion: String): ModuleID = {
"com.typesafe.play" %% "play" % includePlayVersion
}
def playTest(includePlayVersion: String): ModuleID = {
"com.typesafe.play" %% "play-test" % includePlayVersion
}
val scalaTest: ModuleID = {
"org.scalatest" %% "scalatest" % "3.1.1"
}
}
| jeffmay/play-test-ops | project/Dependencies.scala | Scala | apache-2.0 | 590 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api
import org.apache.flink.api.common.ExecutionConfig
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.common.typeutils.TypeSerializer
import org.apache.flink.api.java.typeutils.ResultTypeQueryable
import org.apache.flink.api.java.{DataSet => JavaDataSet}
import org.apache.flink.api.scala.typeutils.{CaseClassSerializer, CaseClassTypeInfo, ScalaNothingTypeInfo, TypeUtils}
import _root_.scala.reflect.ClassTag
import language.experimental.macros
/**
* The Flink Scala API. [[org.apache.flink.api.scala.ExecutionEnvironment]] is the starting-point
* of any Flink program. It can be used to read from local files, HDFS, or other sources.
* [[org.apache.flink.api.scala.DataSet]] is the main abstraction of data in Flink. It provides
* operations that create new DataSets via transformations.
* [[org.apache.flink.api.scala.GroupedDataSet]] provides operations on grouped data that results
* from [[org.apache.flink.api.scala.DataSet.groupBy()]].
*
* Use [[org.apache.flink.api.scala.ExecutionEnvironment.getExecutionEnvironment]] to obtain
* an execution environment. This will either create a local environment or a remote environment,
* depending on the context where your program is executing.
*/
package object scala {
// We have this here so that we always have generated TypeInformationS when
// using the Scala API
implicit def createTypeInformation[T]: TypeInformation[T] = macro TypeUtils.createTypeInfo[T]
// createTypeInformation does not fire for Nothing in some situations, which is probably
// a compiler bug. The following line is a workaround for this.
// (See TypeInformationGenTest.testNothingTypeInfoIsAvailableImplicitly)
implicit val scalaNothingTypeInfo: TypeInformation[Nothing] = new ScalaNothingTypeInfo()
// We need to wrap Java DataSet because we need the scala operations
private[flink] def wrap[R: ClassTag](set: JavaDataSet[R]) = new DataSet[R](set)
// Checks if object has explicit type information using ResultTypeQueryable
private[flink] def explicitFirst[T](
funcOrInputFormat: AnyRef,
typeInfo: TypeInformation[T]): TypeInformation[T] = funcOrInputFormat match {
case rtq: ResultTypeQueryable[T] => rtq.getProducedType
case _ => typeInfo
}
private[flink] def fieldNames2Indices(
typeInfo: TypeInformation[_],
fields: Array[String]): Array[Int] = {
typeInfo match {
case ti: CaseClassTypeInfo[_] =>
val result = ti.getFieldIndices(fields)
if (result.contains(-1)) {
throw new IllegalArgumentException("Fields '" + fields.mkString(", ") +
"' are not valid for '" + ti.toString + "'.")
}
result
case _ =>
throw new UnsupportedOperationException("Specifying fields by name is only" +
"supported on Case Classes (for now).")
}
}
def getCallLocationName(depth: Int = 3) : String = {
val st = Thread.currentThread().getStackTrace()
if (st.length < depth) {
"<unknown>"
} else {
st(depth).toString
}
}
def createTuple2TypeInformation[T1, T2](
t1: TypeInformation[T1],
t2: TypeInformation[T2])
: TypeInformation[(T1, T2)] =
new CaseClassTypeInfo[(T1, T2)](
classOf[(T1, T2)],
Array(t1, t2),
Seq(t1, t2),
Array("_1", "_2")) {
override def createSerializer(executionConfig: ExecutionConfig): TypeSerializer[(T1, T2)] = {
val fieldSerializers: Array[TypeSerializer[_]] = new Array[TypeSerializer[_]](getArity)
for (i <- 0 until getArity) {
fieldSerializers(i) = types(i).createSerializer(executionConfig)
}
new CaseClassSerializer[(T1, T2)](classOf[(T1, T2)], fieldSerializers) {
override def createInstance(fields: Array[AnyRef]) = {
(fields(0).asInstanceOf[T1], fields(1).asInstanceOf[T2])
}
}
}
}
}
| DieBauer/flink | flink-scala/src/main/scala/org/apache/flink/api/scala/package.scala | Scala | apache-2.0 | 4,749 |
/*
* Copyright 2014โ2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.qscript.analysis
import slamdata.Predef._
import quasar.fp.ski.ฮบ
import quasar.contrib.pathy.{AFile, ADir, APath}
import quasar.qscript._
import quasar.common.{JoinType, SortDir}
import matryoshka.data.Fix
import pathy.Path._
import org.specs2.scalaz.DisjunctionMatchers
import scalaz._, Scalaz._
class CardinalitySpec extends quasar.Qspec with QScriptHelpers with DisjunctionMatchers {
sequential
val empty: APath => Id[Int] = ฮบ(0)
import qstdsl._
"Cardinality" should {
"Read" should {
"always returns 1 for any file" in {
val compile = Cardinality.read[AFile].calculate(empty)
val afile = rootDir </> dir("path") </> dir("to") </> file("file")
compile(Const[Read[AFile], Int](Read(afile))) must_== 1
}
"always returns 1 for any dir" in {
val compile = Cardinality.read[ADir].calculate(empty)
val adir = rootDir </> dir("path") </> dir("to") </> dir("dir")
compile(Const[Read[ADir], Int](Read(adir))) must_== 1
}
}
"ShiftedRead" should {
"returns what 'pathCard' is returning for given file" in {
val fileCardinality = 50
val pathCard = ฮบ(fileCardinality)
val compile = Cardinality.shiftedRead[AFile].calculate[Id](pathCard)
val afile = rootDir </> dir("path") </> dir("to") </> file("file")
compile(Const[ShiftedRead[AFile], Int](ShiftedRead(afile, ExcludeId))) must_== fileCardinality
}
"returns what 'pathCard' is returning for given dir" in {
val dirCardinality = 55
val pathCard = ฮบ(dirCardinality)
val compile = Cardinality.shiftedRead[ADir].calculate[Id](pathCard)
val adir = rootDir </> dir("path") </> dir("to") </> dir("dir")
compile(Const[ShiftedRead[ADir], Int](ShiftedRead(adir, ExcludeId))) must_== dirCardinality
}
}
"QScriptCore" should {
val compile = Cardinality.qscriptCore[Fix].calculate(empty)
"Map" should {
"returns cardinality of already processed part of qscript" in {
val cardinality = 40
val map = quasar.qscript.Map(cardinality, func.ProjectKeyS(func.Hole, "key"))
compile(map) must_== cardinality
}
}
/**
* Cardinality depends on how many buckets there are created. If it is Constant
* then cardinality == 1 otherwise it is something in range [0, card]. Middle
* value is chosen however same as with Filter we might consider returning
* a Tuple2[Int, Int] as a range of values instead of Int
*/
"Reduce" should {
"returns cardinality of 1 when bucket is Const" in {
val bucket: List[FreeMap] = Nil
val repair: FreeMapA[ReduceIndex] = func.ReduceIndex(0.right)
val reduce = Reduce(100, bucket, List.empty, repair)
compile(reduce) must_== 1
}
"returns cardinality of half fo already processed part of qscript" in {
val cardinality = 100
val bucket: List[FreeMap] = List(func.ProjectKeyS(func.Hole, "country"))
val repair: FreeMapA[ReduceIndex] = func.ReduceIndex(0.right)
val reduce = Reduce(cardinality, bucket, List.empty, repair)
compile(reduce) must_== cardinality / 2
}
}
"Sort" should {
"returns cardinality of already processed part of qscript" in {
val cardinality = 60
def bucket = List(func.ProjectKeyS(func.Hole, "key"))
def order = (func.ProjectKeyS(func.Hole, "key"), SortDir.asc).wrapNel
val sort = quasar.qscript.Sort(cardinality, bucket, order)
compile(sort) must_== cardinality
}
}
"Filter" should {
/**
* Since filter can return cardinality of a range [0, card] a middle value
* was chosen - card / 2.
* It is worth considering changing signature of Cardinality typeclass to
* return Tuple2[Int, Int] representing range. Then the result would be
* range (0, card)
*/
"returns half of cardinality of already processed part of qscript" in {
val cardinality = 50
def fun: FreeMap = func.Lt(func.ProjectKeyS(func.Hole, "age"), func.Constant(json.int(24)))
val filter = quasar.qscript.Filter(cardinality, fun)
compile(filter) must_== cardinality / 2
}
}
"Subset" should {
"returns cardinality equal to count if sel is Take & count is constant" in {
val count = 20
val cardinality = 50
def fromQS: FreeQS = free.Hole
def countQS: FreeQS = constFreeQS(count)
val take = quasar.qscript.Subset(cardinality, fromQS, Take, countQS)
compile(take) must_== count
}
"returns cardinality equal to count if sel is Sample & count is constant" in {
val count = 20
val cardinality = 50
def fromQS: FreeQS = free.Hole
def countQS: FreeQS = constFreeQS(count)
val take = quasar.qscript.Subset(cardinality, fromQS, Sample, countQS)
compile(take) must_== count
}
"returns cardinality equal to (card - count) if sel is Drop & count is constant" in {
val count = 20
val cardinality = 50
def fromQS: FreeQS = free.Hole
def countQS: FreeQS = constFreeQS(count)
val take = quasar.qscript.Subset(cardinality, fromQS, Drop, countQS)
compile(take) must_== cardinality - count
}
"returns cardinality equal to card / 2 regardles of sel if count is NOT constant" in {
val cardinality = 50
def fromQS: FreeQS = free.Hole
def countQS: FreeQS = free.Hole
compile(quasar.qscript.Subset(cardinality, fromQS, Take, countQS)) must_== cardinality / 2
compile(quasar.qscript.Subset(cardinality, fromQS, Sample, countQS)) must_== cardinality / 2
compile(quasar.qscript.Subset(cardinality, fromQS, Drop, countQS)) must_== cardinality / 2
}
}
"LeftShift" should {
/**
* Question why 10x not 5x or 1000x ?
* LeftShifts flattens the structure. Thus the range has potentail size
* from [cardinality, infinity]. It is really hard to determin a concrete value
* just by spectating a static information. To get more accurate data we will
* most probably need some statistics.
* Other approach is to change the Cardinality typeclss to return Option[Int]
* and all occurance of LeftShift would return None
* For now the x10 approach was proposed as a value.
*/
"returns cardinality of 10 x cardinality of already processed part of qscript" in {
val cardinality = 60
val fun = recFunc.Eq(recFunc.ProjectKeyS(recFunc.Hole, "key"), recFunc.Constant(json.str("value")))
val joinFunc: JoinFunc = func.LeftSide
val leftShift = LeftShift(cardinality, fun, IdOnly, ShiftType.Array, OnUndefined.Omit, joinFunc)
compile(leftShift) must_== cardinality * 10
}
}
"Union" should {
"returns cardinality of sum lBranch + rBranch" in {
val cardinality = 100
def fun(country: String): FreeMap =
func.Eq(func.ProjectKeyS(func.Hole, "country"), func.Constant(json.str(country)))
def left: FreeQS = free.Map(free.Hole, func.ProjectKeyS(func.Hole, "key"))
def right: FreeQS = free.Filter(free.Hole, fun("US"))
val union = quasar.qscript.Union(cardinality, left, right)
compile(union) must_== cardinality + (cardinality / 2)
}
}
"Unreferenced" should {
"returns cardinality of 1" in {
compile(Unreferenced()) must_== 1
}
}
}
"ProjectBucket" should {
"returns cardinality of already processed part of qscript" in {
val compile = Cardinality.projectBucket[Fix].calculate(empty)
val cardinality = 45
def fun: FreeMap = func.Lt(func.ProjectKeyS(func.Hole, "age"), func.Constant(json.int(24)))
val bucket = BucketKey(cardinality, fun, fun)
compile(bucket) must_== cardinality
}
}
"EquiJoin" should {
"returns cardinality of multiplication lBranch * rBranch" in {
val compile = Cardinality.equiJoin[Fix].calculate(empty)
val cardinality = 100
val fun: FreeMap =
func.Eq(func.ProjectKeyS(func.Hole, "key"), func.Constant(json.str("val")))
def left: FreeQS = free.Map(free.Hole, func.ProjectKeyS(func.Hole, "key"))
def right: FreeQS = free.Filter(free.Hole, fun)
val joinFunc: JoinFunc = func.LeftSide
val join = quasar.qscript.EquiJoin(cardinality, left, right, List((fun, fun)), JoinType.Inner, joinFunc)
compile(join) must_== cardinality * (cardinality / 2)
}
}
"ThetaJoin" should {
"return cardinality of multiplication lBranch * rBranch" in {
val compile = Cardinality.thetaJoin[Fix].calculate(empty)
val cardinality = 100
val fun: FreeMap =
func.Eq(func.ProjectKeyS(func.Hole, "key"), func.Constant(json.str("val")))
def left: FreeQS = free.Map(free.Hole, func.ProjectKeyS(func.Hole, "key"))
def right: FreeQS = free.Filter(free.Hole, fun)
val joinFunc: JoinFunc = func.LeftSide
val join = quasar.qscript.ThetaJoin(cardinality, left, right, joinFunc, JoinType.Inner, joinFunc)
compile(join) must_== cardinality * (cardinality / 2)
}
}
"DeadEnd" should {
"return cardinality of 1" in {
val compile = Cardinality.deadEnd.calculate(empty)
compile(Const(Root)) must_== 1
}
}
}
private def constFreeQS(v: Int): FreeQS =
free.Map(free.Unreferenced, func.Constant(json.int(v)))
}
| jedesah/Quasar | connector/src/test/scala/quasar/qscript/analysis/CardinalitySpec.scala | Scala | apache-2.0 | 10,479 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import java.util.concurrent.{ExecutorService, TimeUnit}
import scala.collection.mutable
import scala.concurrent.Future
import scala.concurrent.duration._
import org.mockito.ArgumentMatchers.{any, eq => meq}
import org.mockito.Mockito.{mock, spy, verify, when}
import org.scalatest.{BeforeAndAfterEach, PrivateMethodTester}
import org.apache.spark.executor.{ExecutorMetrics, TaskMetrics}
import org.apache.spark.internal.config.DYN_ALLOCATION_TESTING
import org.apache.spark.rpc.{RpcCallContext, RpcEndpoint, RpcEndpointRef, RpcEnv}
import org.apache.spark.scheduler._
import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages._
import org.apache.spark.scheduler.cluster.CoarseGrainedSchedulerBackend
import org.apache.spark.storage.BlockManagerId
import org.apache.spark.util.{ManualClock, ThreadUtils}
/**
* A test suite for the heartbeating behavior between the driver and the executors.
*/
class HeartbeatReceiverSuite
extends SparkFunSuite
with BeforeAndAfterEach
with PrivateMethodTester
with LocalSparkContext {
private val executorId1 = "1"
private val executorId2 = "2"
// Shared state that must be reset before and after each test
private var scheduler: TaskSchedulerImpl = null
private var heartbeatReceiver: HeartbeatReceiver = null
private var heartbeatReceiverRef: RpcEndpointRef = null
private var heartbeatReceiverClock: ManualClock = null
// Helper private method accessors for HeartbeatReceiver
private val _executorLastSeen = PrivateMethod[collection.Map[String, Long]]('executorLastSeen)
private val _executorTimeoutMs = PrivateMethod[Long]('executorTimeoutMs)
private val _killExecutorThread = PrivateMethod[ExecutorService]('killExecutorThread)
/**
* Before each test, set up the SparkContext and a custom [[HeartbeatReceiver]]
* that uses a manual clock.
*/
override def beforeEach(): Unit = {
super.beforeEach()
val conf = new SparkConf()
.setMaster("local[2]")
.setAppName("test")
.set(DYN_ALLOCATION_TESTING, true)
sc = spy(new SparkContext(conf))
scheduler = mock(classOf[TaskSchedulerImpl])
when(sc.taskScheduler).thenReturn(scheduler)
when(scheduler.nodeBlacklist).thenReturn(Predef.Set[String]())
when(scheduler.sc).thenReturn(sc)
heartbeatReceiverClock = new ManualClock
heartbeatReceiver = new HeartbeatReceiver(sc, heartbeatReceiverClock)
heartbeatReceiverRef = sc.env.rpcEnv.setupEndpoint("heartbeat", heartbeatReceiver)
when(scheduler.executorHeartbeatReceived(any(), any(), any(), any())).thenReturn(true)
}
/**
* After each test, clean up all state and stop the [[SparkContext]].
*/
override def afterEach(): Unit = {
super.afterEach()
scheduler = null
heartbeatReceiver = null
heartbeatReceiverRef = null
heartbeatReceiverClock = null
}
test("task scheduler is set correctly") {
assert(heartbeatReceiver.scheduler === null)
heartbeatReceiverRef.askSync[Boolean](TaskSchedulerIsSet)
assert(heartbeatReceiver.scheduler !== null)
}
test("normal heartbeat") {
heartbeatReceiverRef.askSync[Boolean](TaskSchedulerIsSet)
addExecutorAndVerify(executorId1)
addExecutorAndVerify(executorId2)
triggerHeartbeat(executorId1, executorShouldReregister = false)
triggerHeartbeat(executorId2, executorShouldReregister = false)
val trackedExecutors = getTrackedExecutors
assert(trackedExecutors.size === 2)
assert(trackedExecutors.contains(executorId1))
assert(trackedExecutors.contains(executorId2))
}
test("reregister if scheduler is not ready yet") {
addExecutorAndVerify(executorId1)
// Task scheduler is not set yet in HeartbeatReceiver, so executors should reregister
triggerHeartbeat(executorId1, executorShouldReregister = true)
}
test("reregister if heartbeat from unregistered executor") {
heartbeatReceiverRef.askSync[Boolean](TaskSchedulerIsSet)
// Received heartbeat from unknown executor, so we ask it to re-register
triggerHeartbeat(executorId1, executorShouldReregister = true)
assert(getTrackedExecutors.isEmpty)
}
test("reregister if heartbeat from removed executor") {
heartbeatReceiverRef.askSync[Boolean](TaskSchedulerIsSet)
addExecutorAndVerify(executorId1)
addExecutorAndVerify(executorId2)
// Remove the second executor but not the first
removeExecutorAndVerify(executorId2)
// Now trigger the heartbeats
// A heartbeat from the second executor should require reregistering
triggerHeartbeat(executorId1, executorShouldReregister = false)
triggerHeartbeat(executorId2, executorShouldReregister = true)
val trackedExecutors = getTrackedExecutors
assert(trackedExecutors.size === 1)
assert(trackedExecutors.contains(executorId1))
assert(!trackedExecutors.contains(executorId2))
}
test("expire dead hosts") {
val executorTimeout = heartbeatReceiver.invokePrivate(_executorTimeoutMs())
heartbeatReceiverRef.askSync[Boolean](TaskSchedulerIsSet)
addExecutorAndVerify(executorId1)
addExecutorAndVerify(executorId2)
triggerHeartbeat(executorId1, executorShouldReregister = false)
triggerHeartbeat(executorId2, executorShouldReregister = false)
// Advance the clock and only trigger a heartbeat for the first executor
heartbeatReceiverClock.advance(executorTimeout / 2)
triggerHeartbeat(executorId1, executorShouldReregister = false)
heartbeatReceiverClock.advance(executorTimeout)
heartbeatReceiverRef.askSync[Boolean](ExpireDeadHosts)
// Only the second executor should be expired as a dead host
verify(scheduler).executorLost(meq(executorId2), any())
val trackedExecutors = getTrackedExecutors
assert(trackedExecutors.size === 1)
assert(trackedExecutors.contains(executorId1))
assert(!trackedExecutors.contains(executorId2))
}
test("expire dead hosts should kill executors with replacement (SPARK-8119)") {
// Set up a fake backend and cluster manager to simulate killing executors
val rpcEnv = sc.env.rpcEnv
val fakeClusterManager = new FakeClusterManager(rpcEnv)
val fakeClusterManagerRef = rpcEnv.setupEndpoint("fake-cm", fakeClusterManager)
val fakeSchedulerBackend = new FakeSchedulerBackend(scheduler, rpcEnv, fakeClusterManagerRef)
when(sc.schedulerBackend).thenReturn(fakeSchedulerBackend)
// Register fake executors with our fake scheduler backend
// This is necessary because the backend refuses to kill executors it does not know about
fakeSchedulerBackend.start()
val dummyExecutorEndpoint1 = new FakeExecutorEndpoint(rpcEnv)
val dummyExecutorEndpoint2 = new FakeExecutorEndpoint(rpcEnv)
val dummyExecutorEndpointRef1 = rpcEnv.setupEndpoint("fake-executor-1", dummyExecutorEndpoint1)
val dummyExecutorEndpointRef2 = rpcEnv.setupEndpoint("fake-executor-2", dummyExecutorEndpoint2)
fakeSchedulerBackend.driverEndpoint.askSync[Boolean](
RegisterExecutor(executorId1, dummyExecutorEndpointRef1, "1.2.3.4", 0, Map.empty, Map.empty))
fakeSchedulerBackend.driverEndpoint.askSync[Boolean](
RegisterExecutor(executorId2, dummyExecutorEndpointRef2, "1.2.3.5", 0, Map.empty, Map.empty))
heartbeatReceiverRef.askSync[Boolean](TaskSchedulerIsSet)
addExecutorAndVerify(executorId1)
addExecutorAndVerify(executorId2)
triggerHeartbeat(executorId1, executorShouldReregister = false)
triggerHeartbeat(executorId2, executorShouldReregister = false)
// Adjust the target number of executors on the cluster manager side
assert(fakeClusterManager.getTargetNumExecutors === 0)
sc.requestTotalExecutors(2, 0, Map.empty)
assert(fakeClusterManager.getTargetNumExecutors === 2)
assert(fakeClusterManager.getExecutorIdsToKill.isEmpty)
// Expire the executors. This should trigger our fake backend to kill the executors.
// Since the kill request is sent to the cluster manager asynchronously, we need to block
// on the kill thread to ensure that the cluster manager actually received our requests.
// Here we use a timeout of O(seconds), but in practice this whole test takes O(10ms).
val executorTimeout = heartbeatReceiver.invokePrivate(_executorTimeoutMs())
heartbeatReceiverClock.advance(executorTimeout * 2)
heartbeatReceiverRef.askSync[Boolean](ExpireDeadHosts)
val killThread = heartbeatReceiver.invokePrivate(_killExecutorThread())
killThread.shutdown() // needed for awaitTermination
killThread.awaitTermination(10L, TimeUnit.SECONDS)
// The target number of executors should not change! Otherwise, having an expired
// executor means we permanently adjust the target number downwards until we
// explicitly request new executors. For more detail, see SPARK-8119.
assert(fakeClusterManager.getTargetNumExecutors === 2)
assert(fakeClusterManager.getExecutorIdsToKill === Set(executorId1, executorId2))
}
/** Manually send a heartbeat and return the response. */
private def triggerHeartbeat(
executorId: String,
executorShouldReregister: Boolean): Unit = {
val metrics = TaskMetrics.empty
val blockManagerId = BlockManagerId(executorId, "localhost", 12345)
val executorUpdates = new ExecutorMetrics(Array(123456L, 543L, 12345L, 1234L, 123L,
12L, 432L, 321L, 654L, 765L))
val response = heartbeatReceiverRef.askSync[HeartbeatResponse](
Heartbeat(executorId, Array(1L -> metrics.accumulators()), blockManagerId, executorUpdates))
if (executorShouldReregister) {
assert(response.reregisterBlockManager)
} else {
assert(!response.reregisterBlockManager)
// Additionally verify that the scheduler callback is called with the correct parameters
verify(scheduler).executorHeartbeatReceived(
meq(executorId),
meq(Array(1L -> metrics.accumulators())),
meq(blockManagerId),
meq(executorUpdates))
}
}
private def addExecutorAndVerify(executorId: String): Unit = {
assert(
heartbeatReceiver.addExecutor(executorId).map { f =>
ThreadUtils.awaitResult(f, 10.seconds)
} === Some(true))
}
private def removeExecutorAndVerify(executorId: String): Unit = {
assert(
heartbeatReceiver.removeExecutor(executorId).map { f =>
ThreadUtils.awaitResult(f, 10.seconds)
} === Some(true))
}
private def getTrackedExecutors: collection.Map[String, Long] = {
// We may receive undesired SparkListenerExecutorAdded from LocalSchedulerBackend,
// so exclude it from the map. See SPARK-10800.
heartbeatReceiver.invokePrivate(_executorLastSeen()).
filterKeys(_ != SparkContext.DRIVER_IDENTIFIER)
}
}
// TODO: use these classes to add end-to-end tests for dynamic allocation!
/**
* Dummy RPC endpoint to simulate executors.
*/
private class FakeExecutorEndpoint(override val rpcEnv: RpcEnv) extends RpcEndpoint {
override def receive: PartialFunction[Any, Unit] = {
case _ =>
}
}
/**
* Dummy scheduler backend to simulate executor allocation requests to the cluster manager.
*/
private class FakeSchedulerBackend(
scheduler: TaskSchedulerImpl,
rpcEnv: RpcEnv,
clusterManagerEndpoint: RpcEndpointRef)
extends CoarseGrainedSchedulerBackend(scheduler, rpcEnv) {
protected override def doRequestTotalExecutors(requestedTotal: Int): Future[Boolean] = {
clusterManagerEndpoint.ask[Boolean](
RequestExecutors(requestedTotal, localityAwareTasks, hostToLocalTaskCount, Set.empty))
}
protected override def doKillExecutors(executorIds: Seq[String]): Future[Boolean] = {
clusterManagerEndpoint.ask[Boolean](KillExecutors(executorIds))
}
}
/**
* Dummy cluster manager to simulate responses to executor allocation requests.
*/
private class FakeClusterManager(override val rpcEnv: RpcEnv) extends RpcEndpoint {
private var targetNumExecutors = 0
private val executorIdsToKill = new mutable.HashSet[String]
def getTargetNumExecutors: Int = targetNumExecutors
def getExecutorIdsToKill: Set[String] = executorIdsToKill.toSet
override def receiveAndReply(context: RpcCallContext): PartialFunction[Any, Unit] = {
case RequestExecutors(requestedTotal, _, _, _) =>
targetNumExecutors = requestedTotal
context.reply(true)
case KillExecutors(executorIds) =>
executorIdsToKill ++= executorIds
context.reply(true)
}
}
| Aegeaner/spark | core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala | Scala | apache-2.0 | 13,184 |
package org.littlewings.hbase.example
case class Book(isbn13: String, title: String, authors: Array[String], price: Int, summary: String)
| kazuhira-r/hbase-examples | hbase-getting-started/src/test/scala/org/littlewings/hbase/example/Book.scala | Scala | mit | 139 |
package co.theasi.plotly.writer
import org.json4s._
import org.json4s.native.JsonMethods._
import org.scalatest._
import co.theasi.plotly._
class FigureWriterSpec extends FlatSpec with Matchers with Inside {
val testZData = Vector(Vector(1, 2, 3), Vector(4, 5, 6))
val testZData2 = Vector(Vector(1, 2, 4), Vector(5, 8, 9))
"plotAsJson" should "serialize grid layouts with 3D plots" in {
val figure = GridFigure(2, 1)
.plot(0, 0) { ThreeDPlot().withSurface(testZData) }
.plot(1, 0) { ThreeDPlot().withSurface(testZData2)}
val columnUidMap = Map(
"z-0-0" -> "uid1",
"z-0-1" -> "uid2",
"z-0-2" -> "uid3",
"z-1-0" -> "uid4",
"z-1-1" -> "uid5",
"z-1-2" -> "uid6"
)
val drawnGrid = GridFile("file-id", "file-name", columnUidMap)
val jobj = FigureWriter.plotAsJson(figure, drawnGrid, "test-file")
val figobj = jobj \\ "figure"
// Check that the surfaces get plotted on the correct scenes
val JArray(data) = figobj \\ "data"
data(0) \\ "zsrc" shouldEqual JString("file-id:uid1,uid2,uid3")
data(1) \\ "zsrc" shouldEqual JString("file-id:uid4,uid5,uid6")
data(0) \\ "scene" shouldEqual JString("scene")
data(1) \\ "scene" shouldEqual JString("scene2")
// Check that the scenes are in the correct place
val layout = figobj \\ "layout"
List("scene", "scene2").foreach { scene =>
inside(layout \\ scene \\ "domain" \\ "x") {
case JArray(List(JDouble(start), JDouble(finish))) =>
start shouldEqual 0.0
finish shouldEqual 1.0
}
}
inside(layout \\ "scene" \\ "domain" \\ "y") {
case JArray(List(JDouble(start), JDouble(finish))) =>
start should be >= 0.5
finish shouldEqual 1.0 +- 1e-5
}
inside(layout \\ "scene2" \\ "domain" \\ "y") {
case JArray(List(JDouble(start), JDouble(finish))) =>
start shouldEqual 0.0 +- 1e-5
finish should be <= 0.5
}
}
it should "serialize simple 3D layouts with multiple surfaces" in {
val figure = SinglePlotFigure().plot {
ThreeDPlot().withSurface(testZData).withSurface(testZData2)
}
val columnUidMap = Map(
"z-0-0" -> "uid1",
"z-0-1" -> "uid2",
"z-0-2" -> "uid3",
"z-1-0" -> "uid4",
"z-1-1" -> "uid5",
"z-1-2" -> "uid6"
)
val drawnGrid = GridFile("file-id", "file-name", columnUidMap)
val jobj = FigureWriter.plotAsJson(figure, drawnGrid, "test-file")
val figobj = jobj \\ "figure"
// Check that the surfaces get plotted on the correct scenes
val JArray(data) = figobj \\ "data"
data(0) \\ "zsrc" shouldEqual JString("file-id:uid1,uid2,uid3")
data(1) \\ "zsrc" shouldEqual JString("file-id:uid4,uid5,uid6")
data(0) \\ "scene" shouldEqual JString("scene")
data(1) \\ "scene" shouldEqual JString("scene")
}
}
| ASIDataScience/scala-plotly-client | src/test/scala/co/theasi/plotly/writer/FigureWriterSpec.scala | Scala | mit | 2,842 |
import sbt._
import Keys._
import play.Project._
object ApplicationBuild extends Build {
val appName = "simplest"
val appVersion = "1.0-SNAPSHOT"
val mandubianRepo = Seq(
"Mandubian repository snapshots" at "https://github.com/mandubian/mandubian-mvn/raw/master/snapshots/",
"Mandubian repository releases" at "https://github.com/mandubian/mandubian-mvn/raw/master/releases/"
)
val appDependencies = Seq(
"org.mandubian" %% "play-actor-room" % "0.2"
)
val main = play.Project(appName, appVersion, appDependencies).settings(
resolvers ++= mandubianRepo
)
}
| mandubian/play-actor-room | samples/simplest/project/Build.scala | Scala | apache-2.0 | 606 |
package cz.jenda.pidifrky.data
import java.text.Collator
import java.util.Locale
import android.location.Location
import cz.jenda.pidifrky.data.pojo.{Card, Merchant}
/**
* @author Jenda Kolena, jendakolena@gmail.com
*/
object CardOrdering {
object Implicits {
implicit final val ByName = CardOrdering.ByName
}
case object ByName extends Ordering[Card] {
private val c = Collator.getInstance(new Locale("cs", "CZ"))
override def compare(x: Card, y: Card): Int = {
val opt = for {
first <- Option(x)
second <- Option(y)
} yield {
c.compare(first.name, second.name)
}
opt.getOrElse(0)
}
}
case class ByDistance(currentLocation: Location) extends Ordering[Card] {
override def compare(x: Card, y: Card): Int = {
val opt = for {
first <- Option(x)
second <- Option(y)
firstDistance <- first.getDistance(currentLocation)
secondDistance <- second.getDistance(currentLocation)
} yield {
firstDistance.compareTo(secondDistance)
}
opt.getOrElse(ByName.compare(x, y))
}
}
}
object MerchantOrdering {
object Implicits {
implicit final val ByName = MerchantOrdering.ByName
}
case object ByName extends Ordering[Merchant] {
private val c = Collator.getInstance(new Locale("cs", "CZ"))
override def compare(x: Merchant, y: Merchant): Int = {
val opt = for {
first <- Option(x)
second <- Option(y)
} yield {
c.compare(first.name, second.name)
}
opt.getOrElse(0)
}
}
case class ByDistance(currentLocation: Location) extends Ordering[Merchant] {
override def compare(x: Merchant, y: Merchant): Int = {
val opt = for {
first <- Option(x)
second <- Option(y)
firstDistance <- first.getDistance(currentLocation)
secondDistance <- second.getDistance(currentLocation)
} yield {
firstDistance.compareTo(secondDistance)
}
opt.getOrElse(ByName.compare(x, y))
}
}
}
| jendakol/pidifrky | client/src/main/scala/cz/jenda/pidifrky/data/orderings.scala | Scala | apache-2.0 | 2,074 |
// Based on ucar.ma2.Index, portions of which were developed by the Unidata Program at the University Corporation for Atmospheric Research.
package nasa.nccs.cdapi.tensors
import java.util.Formatter
import nasa.nccs.cdapi.cdm.CDSVariable
import scala.collection.mutable.ListBuffer
import ucar.ma2
import ucar.nc2.constants.AxisType
import ucar.nc2.dataset.{CoordinateAxis1D, CoordinateAxis1DTime}
import ucar.nc2.time.CalendarPeriod.Field._
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
object CDCoordIndex {
def factory(index: CDCoordIndex): CDCoordIndex = new CDCoordIndex(index.getShape, index.getStride, index.getOffset )
def factory(shape: Array[Int], stride: Array[Int]=Array.emptyIntArray, offset: Int = 0): CDCoordIndex = new CDCoordIndex(shape, stride, offset )
}
class CDCoordIndex( protected val shape: Array[Int], _stride: Array[Int]=Array.emptyIntArray, protected val offset: Int = 0 ) {
protected val rank: Int = shape.length
protected val stride = if( _stride.isEmpty ) computeStrides(shape) else _stride
def this( index: CDCoordIndex ) = this( index.shape, index.stride, index.offset )
def getRank: Int = rank
def getShape: Array[Int] = shape.clone
def getStride: Array[Int] = stride.clone
def getShape(index: Int): Int = shape(index)
def getSize: Int = shape.filter( _ > 0 ).product
def getOffset: Int = offset
def getReducedShape: Array[Int] = { ( for( idim <- ( 0 until rank) ) yield if( stride(idim) == 0 ) 1 else shape( idim ) ).toArray }
override def toString: String = "{ Shape: " + shape.mkString("[ ",", "," ], Stride: " + stride.mkString("[ ",", "," ]") + " Offset: " + offset + " } ")
def broadcasted: Boolean = {
for( i <- (0 until rank) ) if( (stride(i) == 0) && (shape(i) > 1) ) return true
false
}
def getCoordIndices( flatIndex: Int ): IndexedSeq[Int] = {
var currElement = flatIndex
currElement -= offset
for( ii <-(0 until rank ) ) yield if (shape(ii) < 0) { -1 } else {
val coordIndex = currElement / stride(ii)
currElement -= coordIndex * stride(ii)
coordIndex
}
}
def getStorageIndex( coordIndices: Array[Int] ): Int = {
assert( coordIndices.length == rank, "Wrong number of coordinates in getStorageIndex for Array of rank %d: %d".format( rank, coordIndices.length) )
var value: Int = offset
for( ii <-(0 until rank ); if (shape(ii) >= 0) ) {
value += coordIndices(ii) * stride(ii)
}
value
}
def computeStrides( shape: Array[Int] ): Array[Int] = {
var product: Int = 1
var strides = for (ii <- (shape.length - 1 to 0 by -1); thisDim = shape(ii) ) yield
if (thisDim >= 0) {
val curr_stride = product
product *= thisDim
curr_stride
} else { 0 }
return strides.reverse.toArray
}
def flip(index: Int): CDCoordIndex = {
assert ( (index >= 0) && (index < rank), "Illegal rank index: " + index )
val new_index = if (shape(index) >= 0) {
val _offset = offset + stride(index) * (shape(index) - 1)
val _stride = stride.clone
_stride(index) = -stride(index)
new CDCoordIndex( shape, _stride, _offset )
} else new CDCoordIndex( this )
return new_index
}
def section( ranges: List[ma2.Range] ): CDCoordIndex = {
assert(ranges.size == rank, "Bad ranges [] length")
for( ii <-(0 until rank); r = ranges(ii); if ((r != null) && (r != ma2.Range.VLEN)) ) {
assert ((r.first >= 0) && (r.first < shape(ii)), "Bad range starting value at index " + ii + " == " + r.first)
assert ((r.last >= 0) && (r.last < shape(ii)), "Bad range ending value at index " + ii + " == " + r.last)
}
var _offset: Int = offset
val _shape: Array[Int] = Array.fill[Int](rank)(0)
val _stride: Array[Int] = Array.fill[Int](rank)(0)
for( ii <-(0 until rank); r = ranges(ii) ) {
if (r == null) {
_shape(ii) = shape(ii)
_stride(ii) = stride(ii)
}
else {
_shape(ii) = r.length
_stride(ii) = stride(ii) * r.stride
_offset += stride(ii) * r.first
}
}
CDCoordIndex.factory( _shape, _stride, _offset )
}
def reduce: CDCoordIndex = {
val c: CDCoordIndex = this
for( ii <-(0 until rank); if (shape(ii) == 1) ) {
val newc: CDCoordIndex = c.reduce(ii)
return newc.reduce
}
return c
}
def reduce(dim: Int): CDCoordIndex = {
assert((dim >= 0) && (dim < rank), "illegal reduce dim " + dim )
assert( (shape(dim) == 1), "illegal reduce dim " + dim + " : length != 1" )
val _shape = ListBuffer[Int]()
val _stride = ListBuffer[Int]()
for( ii <-(0 until rank); if (ii != dim) ) {
_shape.append( shape(ii) )
_stride.append( stride(ii) )
}
CDCoordIndex.factory( _shape.toArray, _stride.toArray, offset )
}
def transpose(index1: Int, index2: Int): CDCoordIndex = {
assert((index1 >= 0) && (index1 < rank), "illegal index in transpose " + index1 )
assert((index2 >= 0) && (index2 < rank), "illegal index in transpose " + index1 )
val _shape = shape.clone()
val _stride = stride.clone()
_stride(index1) = stride(index2)
_stride(index2) = stride(index1)
_shape(index1) = shape(index2)
_shape(index2) = shape(index1)
CDCoordIndex.factory( _shape, _stride, offset )
}
def permute(dims: Array[Int]): CDCoordIndex = {
assert( (dims.length == shape.length), "illegal shape in permute " + dims )
for (dim <- dims) if ((dim < 0) || (dim >= rank)) throw new Exception( "illegal shape in permute " + dims )
val _shape = ListBuffer[Int]()
val _stride = ListBuffer[Int]()
for( i <-(0 until dims.length) ) {
_stride.append( stride(dims(i) ) )
_shape.append( shape(dims(i)) )
}
CDCoordIndex.factory( _shape.toArray, _stride.toArray, offset )
}
def broadcast( dim: Int, size: Int ): CDCoordIndex = {
assert( shape(dim) == 1, "Can't broadcast a dimension with size > 1" )
val _shape = shape.clone()
val _stride = stride.clone()
_shape(dim) = size
_stride(dim) = 0
CDCoordIndex.factory( _shape, _stride, offset )
}
def broadcast( bcast_shape: Array[Int] ): CDCoordIndex = {
assert ( bcast_shape.length == rank, "Can't broadcast shape (%s) to (%s)".format( shape.mkString(","), bcast_shape.mkString(",") ) )
val _shape = shape.clone()
val _stride = stride.clone()
for( idim <- (0 until rank ); bsize = bcast_shape(idim); size0 = shape(idim); if( bsize != size0 ) ) {
assert((size0 == 1) || (bsize == size0), "Can't broadcast shape (%s) to (%s)".format(shape.mkString(","), bcast_shape.mkString(",")))
_shape(idim) = bsize
_stride(idim) = 0
}
CDCoordIndex.factory( _shape, _stride, offset )
}
}
trait CDCoordMap {
def dimIndex: Int
val nBins: Int
def map( coordIndices: Array[Int] ): Array[Int]
def mapShape( shape: Array[Int] ): Array[Int] = { val new_shape=shape.clone; new_shape(dimIndex)=nBins; new_shape }
}
class CDCoordArrayMap( val dimIndex: Int, val nBins: Int, val mapArray: Array[Int] ) extends CDCoordMap {
def map( coordIndices: Array[Int] ): Array[Int] = {
val result = coordIndices.clone()
result( dimIndex ) = mapArray( coordIndices(dimIndex) )
result
}
}
object CDTimeCoordMap {
val logger = org.slf4j.LoggerFactory.getLogger(this.getClass)
def getTimeCycleMap( step: String, cycle: String, variable: CDSVariable ): CDCoordArrayMap = {
val dimIndex: Int = variable.getAxisIndex( 't' )
val coordinateAxis: CoordinateAxis1D = variable.dataset.getCoordinateAxis( 't' ) match {
case caxis: CoordinateAxis1D => caxis;
case x => throw new Exception("Coordinate Axis type %s can't currently be binned".format(x.getClass.getName))
}
val units = coordinateAxis.getUnitsString
coordinateAxis.getAxisType match {
case AxisType.Time =>
lazy val timeAxis: CoordinateAxis1DTime = CoordinateAxis1DTime.factory(variable.dataset.ncDataset, coordinateAxis, new Formatter())
step match {
case "month" =>
if (cycle == "year") {
new CDCoordArrayMap( dimIndex, 12, timeAxis.getCalendarDates.map( _.getFieldValue(Month)-1 ).toArray )
} else {
val year_offset = timeAxis.getCalendarDate(0).getFieldValue(Year)
val binIndices: Array[Int] = timeAxis.getCalendarDates.map( cdate => cdate.getFieldValue(Month)-1 + cdate.getFieldValue(Year) - year_offset ).toArray
new CDCoordArrayMap( dimIndex, Math.ceil(coordinateAxis.getShape(0)/12.0).toInt, binIndices )
}
case "year" =>
val year_offset = timeAxis.getCalendarDate(0).getFieldValue(Year)
val binIndices: Array[Int] = timeAxis.getCalendarDates.map( cdate => cdate.getFieldValue(Year) - year_offset ).toArray
new CDCoordArrayMap( dimIndex, Math.ceil(coordinateAxis.getShape(0)/12.0).toInt, binIndices )
case x => throw new Exception("Binning not yet implemented for this step type: %s".format(step))
}
case x => throw new Exception("Binning not yet implemented for this axis type: %s".format(x.getClass.getName))
}
}
}
| nasa-nccs-cds/CDAPI | src/main/scala/nasa/nccs/cdapi/tensors/CDIndex.scala | Scala | gpl-2.0 | 9,203 |
package org.jetbrains.plugins.scala
package javaHighlighting
import org.jetbrains.plugins.scala.annotator._
/**
* Author: Svyatoslav Ilinskiy
* Date: 7/8/15
*/
class JavaHighlightingTest extends JavaHighltightingTestBase {
def testProtected() = {
val scala =
"""
|class MeaningOfLifeSpec {
| val c = new UltimateQuestion {}
| def meaningOfLifeScala() {
| c.meaningOfLife()
| }
|}
""".stripMargin
val java =
"""
|public class UltimateQuestion {
| protected int meaningOfLife() {
| return 42; //Answer to the Ultimate Question of Life, the Universe, and Everything
| }
|}
""".stripMargin
assertNoErrors(messagesFromScalaCode(scala, java))
}
def testTraitIsAbstract(): Unit = {
val scalaCode = "trait MooSCL4289"
val javaCode =
"""
|public class TestSCL4289 {
| public static void main(String[] args) {
| new MooSCL4289();
| }
|}
""".stripMargin
assertMatches(messagesFromJavaCode(scalaCode, javaCode, "TestSCL4289")) {
case Error("new MooSCL4289()", CannotBeInstantianted()) :: Nil =>
}
}
def testCallByNameParameterNoPrimitives(): Unit = {
val scala =
"""
|object MooSCL8823 {
| def ensure(f: => Unit): Unit = ???
|}
""".stripMargin
val java =
"""
|import scala.runtime.AbstractFunction0;
|import scala.runtime.BoxedUnit;
|
|public class SCL8823 {
| public static void main( String[] args ) {
| MooSCL8823.ensure(new AbstractFunction0<BoxedUnit>() {
| public BoxedUnit apply() {
| System.out.println("foo");
| return BoxedUnit.UNIT;
| }
| });
| }
|}
""".stripMargin
assertNoErrors(messagesFromJavaCode(scala, java, "SCL8823"))
}
def testValueTypes(): Unit = {
val scala =
"""
|class Order(val limitPrice: Price, val qty: Quantity)
|class Prices(val prices: java.util.List[Price])
|
|class Price(val doubleVal: Double) extends AnyVal
|class Quantity(val doubleVal: Double) extends AnyVal
|class Bar
|class BarWrapper(val s: Bar) extends AnyVal
|class BarWrappers(val bars: java.util.List[BarWrapper])
|
""".stripMargin
val java =
"""
|import java.util.ArrayList;
|
|public class JavaHighlightingValueTypes {
|
| public static void main(String[] args) {
| Order o = new Order(19.0, 10);
| System.out.println("Hello World! " + o.limitPrice());
| Price p = new Price(10);
|
| Prices pr = new Prices(new ArrayList<Price>());
| BarWrappers barWrappers = new BarWrappers(new ArrayList<Bar>());
|
| doublePrice(new Price(10.0));
| doublePrice(42.0);
| }
|
| public static void doublePrice(Price p) {
| System.out.println(p.doubleVal() * 2);
| }
|
|}
""".stripMargin
assertMatches(messagesFromJavaCode(scala, java, javaClassName = "JavaHighlightingValueTypes")) {
case Error("(42.0)", CannotBeApplied()) :: Nil =>
}
}
def testOptionApply(): Unit = {
val java =
"""
|import scala.Option;
|
|public abstract class OptionApply {
|
| public OptionApply() {
| setAction(Option.apply("importVCardFile"));
| }
|
| public abstract void setAction(Option<String> bar);
|}
""".stripMargin
assertNoErrors(messagesFromJavaCode(scalaFileText = "", java, javaClassName = "OptionApply"))
}
def testAccessBacktick(): Unit = {
val scala =
"""
|import scala.beans.BeanProperty
|
|case class TestAccessBacktick(@BeanProperty `type`:String)
""".stripMargin
val java =
"""
|public class TestJavaAAA {
| public static void main(String[] args) {
| TestAccessBacktick t = new TestAccessBacktick("42");
| t.type();
| t.getType();
| t.get$u0060type$u0060();
| }
|}
""".stripMargin
assertMatches(messagesFromJavaCode(scala, java, javaClassName = "TestJavaAAA")) {
case Error("get$u0060type$u0060", CannotResolveMethod()) :: Nil =>
}
}
def testMultipleThrowStatements(): Unit = {
val scala = ""
val java =
"""
|import scala.concurrent.Await;
|import scala.concurrent.Future;
|import scala.concurrent.duration.Duration;
|
|import java.util.concurrent.TimeoutException;
|
|public class ThrowsJava {
| public void bar(Future<Integer> scalaFuture) {
| try {
| Await.ready(scalaFuture, Duration.Inf());
| } catch (InterruptedException e) {
| e.printStackTrace();
| } catch (TimeoutException e) {
| e.printStackTrace();
| }
| }
|}
""".stripMargin
assertNoErrors(messagesFromJavaCode(scala, java, javaClassName = "ThrowsJava"))
}
def testOverrideFinal(): Unit = {
val scala = ""
val java =
"""
|import scala.Function1;
|import scala.concurrent.ExecutionContext;
|
|public abstract class Future<T> implements scala.concurrent.Future<T> {
|
| @Override
| public scala.concurrent.Future<T> withFilter(Function1<T, Object> pred, ExecutionContext executor) {
| return null;
| }
|}
""".stripMargin
assertNoErrors(messagesFromJavaCode(scala, java, "Future"))
}
def testSCL5617Option(): Unit = {
val scala = ""
val java =
"""
|import scala.Function1;
|import scala.Option;
|import scala.runtime.BoxedUnit;
|import java.util.concurrent.atomic.AtomicReference;
|import scala.runtime.AbstractFunction1;
|
|public class SCL5617 {
| public static void main(String[] args) {
| AtomicReference<Function1<Object, BoxedUnit>> f = new AtomicReference<Function1<Object, BoxedUnit>>(new AbstractFunction1<Object, BoxedUnit>() {
| public BoxedUnit apply(Object o) {
| Option<String> option = Option.empty();
| return BoxedUnit.UNIT;
| }
| });
|
| Option<Function1<Object, BoxedUnit>> o = Option.apply(f.get());
| }
|}
|
""".stripMargin
assertNoErrors(messagesFromJavaCode(scala, java, "SCL5617"))
}
def testCaseClassImplement() = {
val scala = "case class CaseClass()"
val java =
"""
|public class CaseClassExtended extends CaseClass {
|
|}
""".stripMargin
assertNoErrors(messagesFromJavaCode(scala, java, javaClassName = "CaseClassExtended"))
}
def testOverrideDefaultWithStaticSCL8861(): Unit = {
def scala =
"""
|class TestKit2SCL8861 extends TestKitBase2SCL8861
|
|object TestKit2SCL8861 {
| def awaitCond(interval: String = ???): Boolean = {
| ???
| }
|}
|trait TestKitBase2SCL8861 {
| def awaitCond(interval: String = ???) = ???
|}
""".stripMargin
val java =
"""
|public class SCL8861 extends TestKit2SCL8861 {
|
|}
""".stripMargin
assertNoErrors(messagesFromJavaCode(scala, java, "SCL8861"))
}
def testClassParameter(): Unit = {
val scala =
"""
|class ScalaClass (var name: String, var surname: String)
|
|object Start {
| def main(args: Array[String]) {
| val scalaClassObj = new ScalaClass("Dom", "Sien")
| println(scalaClassObj.name)
| println(scalaClassObj.surname)
|
| val javaClassObj = new JavaClass("Dom2", "Sien2", 31)
| println(javaClassObj.name)
| println(javaClassObj.surname)
| println(javaClassObj.getAge)
| }
|}
""".stripMargin
val java =
"""
|public class JavaClass extends ScalaClass {
| private int age;
|
| public JavaClass(String name, String surname, int age) {
| super(name, surname);
| this.age = age;
| }
|
| public int getAge() {
| return age;
| }
|
| public void setAge(int age) {
| this.age = age;
| }
|}
""".stripMargin
assertNoErrors(messagesFromJavaCode(scala, java, "JavaClass"))
assertNoErrors(messagesFromScalaCode(scala, java))
}
def testSCL3390ParamAccessor(): Unit = {
val scalaCode =
"""
|object ScalaClient {
| def main(args: Array[String]) {
| new Sub(1).x
| }
|}
|
|class Super(val x: Int)
|
|class Sub(x: Int) extends Super(x)
""".stripMargin
val javaCode =
"""
|public class JavaClientSCL3390 {
| public static void main(String[] args) {
| new Sub(1).x();
| }
|}
""".stripMargin
assertNoErrors(messagesFromJavaCode(scalaCode, javaCode, "JavaClientSCL3390"))
assertNoErrors(messagesFromScalaCode(scalaCode, javaCode))
}
def testSCL3498ExistentialTypesFromJava(): Unit = {
val javaCode =
"""
|public @interface Transactional {
| Class<? extends Throwable>[] noRollbackFor() default {};
|}
""".stripMargin
val scalaCode =
"""
|@Transactional(noRollbackFor = Array(classOf[RuntimeException])) // expected Array[Class[_ <: Throwable] found Array[Class[RuntimeException]]
|class A
""".stripMargin
assertNoErrors(messagesFromScalaCode(scalaCode, javaCode))
}
def testResolvePublicJavaFieldSameNameAsMethod(): Unit = {
val scalaCode =
"""
|package SCL3679
|
|object ResolvePublicJavaFieldSameNameAsMethod {
| def main(args: Array[String]) {
| println("foo")
| new ResolvePublicJavaFieldSameNameAsMethodJavaClass().hasIsCompressed
| }
|}
""".stripMargin
val javaCode =
"""
|package SCL3679;
|
|public class ResolvePublicJavaFieldSameNameAsMethodJavaClass {
| public boolean hasIsCompressed;
| public boolean hasIsCompressed() {
| System.out.println("In the method!");
| return hasIsCompressed;
| }
|
|}
""".stripMargin
assertNoErrors(messagesFromScalaCode(scalaCode, javaCode))
}
def testGenericsPlainInnerClass(): Unit = {
val scalaCode =
"""
|trait FSM[S, D] {
| final class TransformHelper {}
| final def transform(): TransformHelper = ???
|}
|
|
|abstract class Base[S, D] extends FSM[S, D]
""".stripMargin
val javaCode =
"""
|public class SCL8866A extends Base<String, String> {}
""".stripMargin
assertNoErrors(messagesFromJavaCode(scalaCode, javaCode, javaClassName = "SCL8866A"))
}
def testOverrideScalaFromJavaUpperBound(): Unit = {
val scalaCode =
"""
|trait SCL5852WrapsSomething[T] {
| def wrap[A <: T](toWrap: A): A
|}
""".stripMargin
val javaCode =
"""
|public class SCL5852WrapsFoo implements SCL5852WrapsSomething<String> {
| @Override
| public <A extends String> A wrap(A toWrap) {
| return null;
| }
|}
""".stripMargin
assertNoErrors(messagesFromJavaCode(scalaCode, javaCode, javaClassName = "SCL5852WrapsFoo"))
}
def testGenericsParameterizedInnerClass(): Unit = {
val scalaCode =
"""
|abstract class FSM[S, D] {
| class TransformHelper[T]
| def transform(): TransformHelper[Int] = ???
|}
|
|abstract class Base extends FSM[Int, String] {
| override def transform(): TransformHelper[Int] = ???
|}
""".stripMargin
val javaCode =
"""
|public class SCL8866B extends Base {
|
|}
""".stripMargin
assertNoErrors(messagesFromJavaCode(scalaCode, javaCode, "SCL8866B"))
}
def testDefaultConstructorArguments(): Unit = {
val scalaCode =
"""
|class MooSCL7582(j: Int)(d: Int = j)
""".stripMargin
val javaCode =
"""
|public class TestSCL7582 {
| public static void main(String[] args) {
| MooSCL7582 m = new MooSCL7582(1, MooSCL7582.$lessinit$greater$default$2(1));
| }
|}
""".stripMargin
assertNoErrors(messagesFromJavaCode(scalaCode, javaCode, "TestSCL7582"))
}
def testSpecializedFields(): Unit = {
val scalaCode = "class SpecClass[@specialized(Int) T](val t: T, val s: String)"
val javaCode =
"""
|public class Pair extends SpecClass<Integer> {
| public Pair(SpecClass<Integer> i) {
| super(i.t, "");
| }
|}
""".stripMargin
assertNoErrors(messagesFromJavaCode(scalaCode, javaCode, "Pair"))
}
def testConstructorReturnTypeNull(): Unit = {
val scalaCode =
"""
|class Scala(val s: String) {
| def this(i: Integer) = this(i.toString)
|}
""".stripMargin
val javaCode =
"""
|import java.util.stream.Stream;
|
|public class SCL9412 {
| Stream<Scala> testScala() {
| return Stream.of(1).map(Scala::new);
| }
|}
""".stripMargin
assertNoErrors(messagesFromJavaCode(scalaCode, javaCode, "SCL9412"))
}
def testHigherKinded(): Unit = {
val scalaCode =
"""
|class BarSCL9661A[F, T[F]]() extends scala.AnyRef {
| def foo(t: T[F]): T[F] = t
|}
""".stripMargin
val javaCode =
"""
|import java.util.*;
|
|public class SCL9661A {
| public void create() {
| BarSCL9661A<String, List> bar = new BarSCL9661A<>();
| bar.foo(new ArrayList<Integer>());
| }
|}
""".stripMargin
assertNoErrors(messagesFromJavaCode(scalaCode, javaCode, "SCL9661A"))
}
def testSCL9661(): Unit = {
val scalaCode =
"""
|object Moo extends scala.AnyRef {
| def builder[M]() : Builder[M] = ???
|
| class Builder[+Mat] {
| def graph[S <: Shape](graph : Graph[S, _]) : S = { ??? }
| }
|}
|
|class UniformFanOutShape[I, O] extends Shape
|abstract class Shape
|trait Graph[+S <: Shape, +M]
""".stripMargin
val javaCode =
"""
|public class SCL9661 {
| public void create() {
| UniformFanOutShape<String, String> ass = Moo.builder().graph(null);
| }
|}
""".stripMargin
assertNoErrors(messagesFromJavaCode(scalaCode, javaCode, "SCL9661"))
}
}
| jeantil/intellij-scala | test/org/jetbrains/plugins/scala/javaHighlighting/JavaHighlightingTest.scala | Scala | apache-2.0 | 15,568 |
/*
* Masking2D.scala
* (FScape)
*
* Copyright (c) 2001-2022 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Affero General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* contact@sciss.de
*/
package de.sciss.fscape.graph
import de.sciss.fscape.Graph.{ProductReader, RefMapIn}
import de.sciss.fscape.UGenSource.unwrap
import de.sciss.fscape.stream.{StreamIn, StreamOut}
import de.sciss.fscape.{GE, UGen, UGenGraph, UGenIn, UGenInLike, UGenSource, stream}
import scala.collection.immutable.{IndexedSeq => Vec}
object Masking2D extends ProductReader[Masking2D] {
override def read(in: RefMapIn, key: String, arity: Int): Masking2D = {
require (arity == 8)
val _fg = in.readGE()
val _bg = in.readGE()
val _rows = in.readGE()
val _columns = in.readGE()
val _threshNoise = in.readGE()
val _threshMask = in.readGE()
val _blurRows = in.readGE()
val _blurColumns = in.readGE()
new Masking2D(_fg, _bg, _rows, _columns, _threshNoise, _threshMask, _blurRows, _blurColumns)
}
}
final case class Masking2D(fg : GE,
bg : GE,
rows : GE,
columns : GE,
threshNoise: GE,
threshMask : GE,
blurRows : GE,
blurColumns: GE,
)
extends UGenSource.SingleOut {
protected def makeUGens(implicit b: UGenGraph.Builder): UGenInLike =
unwrap(this, Vector(fg.expand, bg.expand, rows.expand, columns.expand,
threshNoise.expand, threshMask.expand, blurRows.expand, blurColumns.expand))
protected def makeUGen(args: Vec[UGenIn])(implicit b: UGenGraph.Builder): UGen.SingleOut =
UGen.SingleOut(this, inputs = args)
private[fscape] def makeStream(args: Vec[StreamIn])(implicit b: stream.Builder): StreamOut = {
val Vec(fg, bg, rows, columns, threshNoise, threshMask, blurRows, blurColumns) = args: @unchecked
stream.Masking2D(fg = fg.toDouble, bg = bg.toDouble, rows = rows.toInt, columns = columns.toInt,
threshNoise = threshNoise.toDouble, threshMask = threshMask.toDouble,
blurRows = blurRows.toInt, blurColumns = blurColumns.toInt)
}
}
| Sciss/FScape-next | core/shared/src/main/scala/de/sciss/fscape/graph/Masking2D.scala | Scala | agpl-3.0 | 2,366 |
//
// Copyright (c) 2015 IronCore Labs
//
package com.ironcorelabs.davenport
package codec
import argonaut._
import scodec.bits.ByteVector
import org.scalacheck.Arbitrary
import Arbitrary.arbitrary
import scalaz._, Scalaz._
class ByteVectorDecoderSpec extends TestBase {
val jsonValue = "\u03BB4life"
val stringWithUnicode = s"""{"key":"$jsonValue"}"""
val utf8Bytes = ByteVector.encodeUtf8(stringWithUnicode).right.get
val utf16Bytes = ByteVector.view(stringWithUnicode.getBytes("UTF16"))
implicit def arbByteDecoder[A: Arbitrary] = Arbitrary {
arbitrary[ByteVector => A].map(f => ByteVectorDecoder(f.map(_.right)))
}
"ByteVectorDecoder" should {
"decode bytes as identity" in { ByteVectorDecoder.IdDecoder(utf16Bytes).value shouldBe utf16Bytes }
"decode string as utf8" in { ByteVectorDecoder.StringDecoder(utf8Bytes).value shouldBe stringWithUnicode }
"decode json utf8bytes" in {
val result = ByteVectorDecoder.fromDecodeJson(DecodeJson.of[Map[String, String]])(utf8Bytes).value
result.get("key").value shouldBe jsonValue
}
"fail decode json utf16Bytes" in {
val failure = ByteVectorDecoder.fromDecodeJson(DecodeJson.of[Map[String, String]])(utf16Bytes).leftValue
failure.cause.value shouldBe a[java.nio.charset.MalformedInputException]
failure.message should include("Couldn't decode")
}
"fail decode invalid json bytes" in {
val failure = ByteVectorDecoder.fromDecodeJson(DecodeJson.of[Map[String, String]])(utf8Bytes.drop(1)).leftValue
failure.cause shouldBe None
failure.message should include("Json parse failed with")
}
"fail decode valid json but invalid decoder" in {
val failure = ByteVectorDecoder.fromDecodeJson(DecodeJson.of[Int])(utf8Bytes).leftValue
failure.cause shouldBe None
failure.message should include("Failed to decode json giving excuse")
}
"have lawful scalaz typeclasses" in {
//Function equality isn't possible so I chose to evaluate it at a single point and prove that the functor laws hold given that point.
implicit def equal[A: Equal] = Equal.equalBy[ByteVectorDecoder[A], A](_.apply(utf8Bytes).getOrElse(throw new Exception("Decoding bytes failed")))
import scalaz.scalacheck.ScalazProperties
check(ScalazProperties.functor.laws[ByteVectorDecoder])
}
}
}
| BobWall23/davenport | src/test/scala/com/ironcorelabs/davenport/codec/ByteVectorDecoderSpec.scala | Scala | mit | 2,354 |
/*
* Copyright 2013 - 2020 Outworkers Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.outworkers.phantom.tables
import com.outworkers.phantom.dsl._
case class PrimaryCollectionRecord(
index: List[String],
set: Set[String],
map: Map[String, String],
name: String,
value: Int
)
abstract class PrimaryCollectionTable extends Table[PrimaryCollectionTable, PrimaryCollectionRecord] {
object listIndex extends ListColumn[String] with PartitionKey
object setCol extends SetColumn[String] with PrimaryKey
object mapCol extends MapColumn[String, String] with PrimaryKey
object name extends StringColumn with PrimaryKey
object value extends IntColumn
}
case class NestedCollections(
id: UUID,
text: String,
nestedList: List[List[String]],
nestedListSet: List[Set[String]],
props: Map[String, List[String]],
doubleProps: Map[Set[String], List[String]]
)
abstract class NestedCollectionTable extends Table[
NestedCollectionTable,
NestedCollections
] {
object id extends UUIDColumn with PartitionKey
object text extends StringColumn
object nestedList extends ListColumn[List[String]]
object nestedListSet extends ListColumn[Set[String]]
object props extends MapColumn[String, List[String]]
object doubleProps extends MapColumn[Set[String], List[String]]
}
| outworkers/phantom | phantom-dsl/src/test/scala/com/outworkers/phantom/tables/PrimaryCollectionTable.scala | Scala | apache-2.0 | 1,828 |
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.sparta.driver.writer
import akka.event.slf4j.SLF4JLogging
import org.apache.spark.sql.DataFrame
import com.stratio.sparta.driver.exception.DriverException
import com.stratio.sparta.driver.helper.SchemaHelper
import com.stratio.sparta.driver.step.Trigger
import com.stratio.sparta.sdk.pipeline.output.Output
import org.apache.spark.sql.types.StructType
import scala.util.{Failure, Success, Try}
trait TriggerWriter extends DataFrameModifier with SLF4JLogging {
//scalastyle:off
def writeTriggers(dataFrame: DataFrame,
triggers: Seq[Trigger],
inputTableName: String,
outputs: Seq[Output]): Unit = {
val sparkSession = dataFrame.sparkSession
if (triggers.nonEmpty && isCorrectTableName(inputTableName)) {
if (!sparkSession.catalog.tableExists(inputTableName)) {
dataFrame.createOrReplaceTempView(inputTableName)
log.debug(s"Registering temporal table in Spark with name: $inputTableName")
}
val tempTables = triggers.flatMap(trigger => {
log.debug(s"Executing query in Spark: ${trigger.sql}")
val queryDf = Try(sparkSession.sql(trigger.sql)) match {
case Success(sqlResult) => sqlResult
case Failure(exception: org.apache.spark.sql.AnalysisException) =>
log.warn("Warning running analysis in Catalyst in the query ${trigger.sql} in trigger ${trigger.name}",
exception.message)
throw DriverException(exception.getMessage, exception)
case Failure(exception) =>
log.warn(s"Warning running sql in the query ${trigger.sql} in trigger ${trigger.name}", exception.getMessage)
throw DriverException(exception.getMessage, exception)
}
val outputTableName = trigger.triggerWriterOptions.tableName.getOrElse(trigger.name)
val saveOptions = Map(Output.TableNameKey -> outputTableName) ++
trigger.triggerWriterOptions.partitionBy.fold(Map.empty[String, String]) {partition =>
Map(Output.PartitionByKey -> partition)} ++
trigger.triggerWriterOptions.primaryKey.fold(Map.empty[String, String]) {key =>
Map(Output.PrimaryKey -> key)}
if (!queryDf.rdd.isEmpty()) {
val autoCalculatedFieldsDf =
applyAutoCalculateFields(queryDf,
trigger.triggerWriterOptions.autoCalculateFields,
StructType(queryDf.schema.fields ++
SchemaHelper.getStreamWriterPkFieldsMetadata(trigger.triggerWriterOptions.primaryKey)))
if (isCorrectTableName(trigger.name) && !sparkSession.catalog.tableExists(trigger.name)) {
autoCalculatedFieldsDf.createOrReplaceTempView(trigger.name)
log.debug(s"Registering temporal table in Spark with name: ${trigger.name}")
}
else log.warn(s"The trigger ${trigger.name} have incorrect name, is impossible to register as temporal table")
trigger.triggerWriterOptions.outputs.foreach(outputName =>
outputs.find(output => output.name == outputName) match {
case Some(outputWriter) => Try {
outputWriter.save(autoCalculatedFieldsDf, trigger.triggerWriterOptions.saveMode, saveOptions)
} match {
case Success(_) =>
log.debug(s"Trigger data stored in $outputTableName")
case Failure(e) =>
log.error(s"Something goes wrong. Table: $outputTableName")
log.error(s"Schema. ${autoCalculatedFieldsDf.schema}")
log.error(s"Head element. ${autoCalculatedFieldsDf.head}")
log.error(s"Error message : ${e.getMessage}")
}
case None => log.error(s"The output in the trigger : $outputName not match in the outputs")
})
Option(trigger.name)
} else None
})
tempTables.foreach(tableName =>
if (isCorrectTableName(tableName) && sparkSession.catalog.tableExists(tableName)) {
sparkSession.catalog.dropTempView(tableName)
log.debug(s"Dropping temporal table in Spark with name: $tableName")
} else log.debug(s"Impossible to drop table in Spark with name: $tableName"))
if (isCorrectTableName(inputTableName) && sparkSession.catalog.tableExists(inputTableName)) {
sparkSession.catalog.dropTempView(inputTableName)
log.debug(s"Dropping temporal table in Spark with name: $inputTableName")
} else log.debug(s"Impossible to drop table in Spark: $inputTableName")
} else {
if (triggers.nonEmpty && !isCorrectTableName(inputTableName))
log.warn(s"Incorrect table name $inputTableName and the triggers could have errors and not have been " +
s"executed")
}
}
//scalastyle:on
private def isCorrectTableName(tableName: String): Boolean =
tableName.nonEmpty && tableName != "" &&
tableName.toLowerCase != "select" &&
tableName.toLowerCase != "project" &&
!tableName.contains("-") && !tableName.contains("*") && !tableName.contains("/")
}
| diegohurtado/sparta | driver/src/main/scala/com/stratio/sparta/driver/writer/TriggerWriter.scala | Scala | apache-2.0 | 5,738 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.sj.common.engine.core.entities
import com.bwsw.sj.common.utils.StreamLiterals
import com.fasterxml.jackson.annotation.JsonIgnore
/**
* Provides a wrapper for t-stream transaction that is formed
* by [[com.bwsw.sj.common.utils.EngineLiterals.inputStreamingType]] engine.
*
* @param key a key for check on duplicate
* @param outputMetadata information (stream -> partition) - where data should be placed
* @param duplicateCheck whether a message should be checked on duplicate or not. If it is None than a default value is used
* ([[com.bwsw.sj.common.si.model.instance.InputInstance.duplicateCheck]])
* @param data message data
* @tparam T type of data containing in a message
*/
case class InputEnvelope[T <: AnyRef](key: String,
outputMetadata: Seq[(String, Int)],
data: T,
duplicateCheck: Option[Boolean] = None) extends Envelope {
streamType = StreamLiterals.inputDummy
@JsonIgnore
override def equals(obj: Any): Boolean = obj match {
case i: InputEnvelope[_] =>
key == i.key &&
outputMetadata.toList == i.outputMetadata.toList &&
data == i.data &&
duplicateCheck == i.duplicateCheck &&
streamType == i.streamType &&
id == i.id &&
stream == i.stream &&
(tags sameElements i.tags) &&
partition == i.partition
case _ => super.equals(obj)
}
} | bwsw/sj-platform | core/sj-common/src/main/scala/com/bwsw/sj/common/engine/core/entities/InputEnvelope.scala | Scala | apache-2.0 | 2,335 |
package me.frmr.stripe
import net.liftweb.common._
import net.liftweb.json._
import net.liftweb.util.Helpers._
import scala.reflect.macros.Context
import scala.reflect.runtime.universe._
import scala.collection.mutable.ListBuffer
/**
* The common ancestor of any class that represents a Stripe
* data structure.
**/
abstract class StripeObject {
/**
* Return the raw JSON AST representation of the Stripe
* data structure this class represents. Use this only if the
* class representing your data structure doesn't already
* provide a method for accessing the field you need.
*
* I'll try really hard to make sure that doesn't happen too
* often, but no guarantees. :)
**/
def raw: Option[JValue]
implicit val formats = DefaultFormats
/**
* Transform the underlyingData of this StripeObject then attempt to extract an
* instance of the class T from it. This is the general implementation of extracting
* values from the JSON API response from Stripe.
*
* You should only use this if you're accessing a piece of data from the raw response
* that we don't support. If you do find need to use this, we'd love it if you opened
* a pull request!
*
* @param transformer The function that transforms the original data into the structure containing the data we want.
* @return A Full[T] if the extraction was successful, a Failure otherwise.
**/
def valueFor[T](transformer: (JValue)=>JValue)(implicit mf: Manifest[T]) =
tryo(transformer(raw.getOrElse(JNothing)).extract[T](formats, mf)).filterNot(_ == null)
/**
* Create a copy of the StripeObject with the raw JValue representation attached.
*
* @param raw The raw JValue representation to attach.
**/
def withRaw(raw: JValue): StripeObject
}
| farmdawgnation/streifen | src/main/scala/me/frmr/stripe/StripeObject.scala | Scala | apache-2.0 | 1,779 |
package ssh
import org.apache.sshd.server.PublickeyAuthenticator
import org.apache.sshd.server.session.ServerSession
import java.security.PublicKey
import service.SshKeyService
import servlet.Database
import javax.servlet.ServletContext
class PublicKeyAuthenticator(context: ServletContext) extends PublickeyAuthenticator with SshKeyService {
override def authenticate(username: String, key: PublicKey, session: ServerSession): Boolean = {
Database(context) withTransaction {
getPublicKeys(username).exists { sshKey =>
SshUtil.str2PublicKey(sshKey.publicKey) match {
case Some(publicKey) => key.equals(publicKey)
case _ => false
}
}
}
}
}
| Muscipular/gitbucket | src/main/scala/ssh/PublicKeyAuthenticator.scala | Scala | apache-2.0 | 703 |
/*
*
* o o o o o
* | o | |\\ /| | /
* | o-o o--o o-o oo | | O | oo o-o OO o-o o o
* | | | | | | | | | | | | | | | | \\ | | \\ /
* O---oo-o o--O | o-o o-o-o o o o-o-o o o o-o o
* |
* o--o
* o--o o o--o o o
* | | | | o | |
* O-Oo oo o-o o-O o-o o-O-o O-o o-o | o-O o-o
* | \\ | | | | | | | | | | | | | |-' | | | \\
* o o o-o-o o o-o o-o o o o o | o-o o o-o o-o
*
* Logical Markov Random Fields (LoMRF).
*
*
*/
package lomrf.mln.learning.supervision.metric
import lomrf.logic.EvidenceAtom
/**
* A metric is defined by a distance function over individual evidence atoms and
* distance over sequences of evidence atoms by specifying a matcher function.
*/
trait Metric {
/**
* Distance for ground evidence atoms. The function must obey to the following properties:
*
* {{{
* 1. d(x, y) >= 0 for all x, y and d(x, y) = 0 if and only if x = y
* 2. d(x, y) = d(y, x) for all x, y
* 3. d(x, y) + d(y, z) >= d(x, z) for all x, y, z (triangle inequality)
* }}}
*
* @see [[lomrf.logic.EvidenceAtom]]
* @param xAtom an evidence atom
* @param yAtom another evidence atom
* @return a distance for the given evidence atoms
*/
def distance(xAtom: EvidenceAtom, yAtom: EvidenceAtom): Double
/**
* Distance over sequences of evidence atoms. The function requires a matcher over double
* numbers in order to find an assignment between individual evidence atoms.
*
* @see [[lomrf.mln.learning.supervision.metric.Matcher]]
* @param xAtomSeq a sequence of evidence atoms
* @param yAtomSeq another sequence of evidence atoms
* @param matcher a matcher function for the assignment problem
* @return a distance for the given sequences of evidence atoms
*/
final def distance(
xAtomSeq: IndexedSeq[EvidenceAtom],
yAtomSeq: IndexedSeq[EvidenceAtom],
matcher: Matcher[Double]): Double = matcher {
xAtomSeq map (x => yAtomSeq map (y => distance(x, y)))
}
}
| anskarl/LoMRF | src/main/scala/lomrf/mln/learning/supervision/metric/Metric.scala | Scala | apache-2.0 | 2,231 |
trait T {
def foo = {
object A
def a(x: Any) = x == A
a(A)
}
}
object Test {
def main(args: Array[String]) {
val t = new T{}
println(t.foo)
}
} | felixmulder/scala | test/files/run/t4565_1.scala | Scala | bsd-3-clause | 172 |
package com.github.mdr.ascii.layout
import org.scalatest.{ Matchers, FlatSpec }
import com.github.mdr.ascii.graph.Graph
import com.github.mdr.ascii.layout.RoundTripSpecification._
class RoundTripTest extends FlatSpec with Matchers {
"Round trip" should ("not overwrite an arrow") in {
checkRoundTrip(Graph.fromDiagram("""
โญโโโโโฎ โญโโโโโโโฎ
โaaffโ โafcfadโ
โ db โ โ ced โ
โฐโโโโฌโฏ โฐโโโฌโโฌโโฏ
โ โ โ
โ โ โฐโโโโโโฎ
โ โฐโโโฎ โ
โ โ โ
v v โ
โญโโโโโโโโโโโโฎ โญโโโโโโฎ โ
โddabcfabcbeโ โeeed โ โ
โ ccfda โ โ cb โ โ
โฐโโโโโฌโโโโโโโฏ โฐโโโฌโโโฏ โ
โ โ ^ โ <-- was a bug where the edge segment below would be elevated into the arrow
โ โญโโโโโโโโผโโผโโโฏ
โ โ โ โ
v v โ โ
โญโโโโโโโโโโโโโฎ v โ
โ d โ โญโโโโดโโฎ
โ d โ โ c โ
โbaecababeedcโ โฐโโโโโโฏ
โฐโโโโโโโโโโโโโฏ
"""), unicodeLayoutPrefs) should be(true)
}
} | jlmauduy/ascii-graphs | src/test/scala/com/github/mdr/ascii/layout/RoundTripTest.scala | Scala | mit | 1,488 |
package org.dele.text.maen.extracts
import org.dele.text.maen.test.TestAtom._
import org.dele.text.maen.{AtomPropMatcherLib, TInput, TMatchResultPool}
import org.dele.text.maen.matchers.SubMatchCheckerLib
import org.dele.text.maen.matchers.TMatcher._
import org.dele.text.maen.{TInput, TMatchResultPool}
import org.scalatest.ShouldMatchers
import org.scalatest.testng.TestNGSuite
import org.testng.annotations.Test
/**
* Created by jiaji on 2016-02-12.
*/
class ExtractTest extends TestNGSuite with ShouldMatchers {
import org.dele.text.maen.AtomPropMatcherLib._
import org.dele.text.maen.TestHelper._
import Extract._
import org.dele.text.maen.test.TestAtom._
import org.dele.text.maen.test.TestInput._
import org.dele.text.maen.matchers.SubMatchCheckerLib._
import org.dele.text.maen.AtomPropMatcherLib._
implicit val subMatchCheckerLib = EmptySubMatchCheckerLib
@Test
def t1 = {
val entityMatcher = fromAtomMatcher(E(EmptyRegexDict, Array("Company", "Organization")))
val andMatcher = fromAtomMatcher(F(EmptyRegexDict, "and"))
val matcherId:MId = "mid"
val matchers = matchersOrderedAllPositive(Seq(entityMatcher, andMatcher, entityMatcher), EmptyCheckerIds, Option(matcherId))
val input:TInput = fromAtomArrayEng(IndexedSeq(
textAtom("Now"),
entityAtom("FBI", "Organization", "OrgEntity"),
textAtom("and"),
entityAtom("Microsoft", "Company", "OrgEntity")
))
var extractDef = extractEntities("_entities", matcherId, "Company")
var matches = matchers.m(DummyResultPool(input))
var extracts = matches.flatMap(m => extractDef.process(m).instances)
extracts.size shouldBe(1)
extractDef = extractEntities("_entities", matcherId, "Company", "Organization")
extracts = matches.flatMap(m => extractDef.process(m).instances)
extracts.size shouldBe(2)
extractDef = extractWhole("_entities", Option(matcherId))
extracts = matches.flatMap(m => extractDef.process(m).instances)
extracts.size shouldBe(1)
}
private val _extractDef =
"""
|{
| "extractDefSets": [
| {
| "domain": "CyberAttack",
| "relEntExtracts": {
| "name": "_related_entities",
| "atomMatcherDefs": [ "E(Country)" ]
| },
| "extractBlocks": [
| "_testblock_entities:_blocking_entities"
| ],
| "extractDefs": [
| {
| "extractName": "_entities",
| "matcherId": "CyberAttack.org-cmp",
| "atomMatcherDef": "E(Company)"
| },
| {
| "extractName": "_entities",
| "matcherId": "CyberAttack.org-list",
| "atomMatcherDef": "E(Company | Organization)"
| },
| {
| "extractName": "_testblock_entities",
| "matcherId": "CyberAttack.org-list",
| "atomMatcherDef": "E(Company | Organization)"
| },
| {
| "extractName": "_blocking_entities",
| "matcherId": "CyberAttack.org-list",
| "atomMatcherDef": "E(Company | Organization)"
| },
| {
| "extractName": "_indicator",
| "matcherId": "CyberAttack.org-list"
| }
| ]
| }
| ]
|}
""".stripMargin
@Test
def testBlocking = {
val input:TInput = fromAtomArrayEng(IndexedSeq(
textAtom("Now"),
entityAtom("Anonymous", "Organization", "OrgEntity"),
textAtom("launch"),
textAtom("attack"),
textAtom("against"),
entityAtom("FBI", "Organization", "OrgEntity"),
textAtom("and"),
entityAtom("Microsoft", "Company", "OrgEntity")
))
val orgCompanyMatcher = fromAtomMatcher(E(EmptyRegexDict, Array("Company", "Organization")), EmptyCheckerIds, Option("CyberAttack.org-cmp"))
val orgListMatcher = matchersOrderedAllPositive(Seq(orgCompanyMatcher, fromAtomMatcher(FExact("and")), orgCompanyMatcher), EmptyCheckerIds, Option("CyberAttack.org-list"))
val resultPools = DummyResultPool(input)
val matches = orgListMatcher.m(resultPools)
val taggedExtractDefSet = extractDefs.getExtractDefSet("CyberAttack")
matches.foreach(
ma => {
val ex = taggedExtractDefSet.run(ma, EmptyRelatedEntityCheckerIds)
ex.mkString(" ")
}
)
}
@Test
def testRelEnt = {
val input:TInput = fromAtomArrayEng(IndexedSeq(
entityAtom("US", "Country", "GeoEntity"),
entityAtom("Anonymous", "Organization", "OrgEntity"),
textAtom("launch"),
textAtom("attack"),
textAtom("against"),
entityAtom("FBI", "Organization", "OrgEntity"),
textAtom("and"),
entityAtom("Microsoft", "Company", "OrgEntity"),
textAtom("x"),
entityAtom("US", "Country", "GeoEntity")
))
val orgCompanyMatcher = fromAtomMatcher(E(EmptyRegexDict, Array("Company", "Organization")), EmptyCheckerIds, Option("CyberAttack.org-cmp"))
val orgListMatcher = matchersOrderedAllPositive(Seq(orgCompanyMatcher, fromAtomMatcher(FExact("and")), orgCompanyMatcher), EmptyCheckerIds, Option("CyberAttack.org-list"))
val resultPools = TMatchResultPool.create(input, StaticSubMatchCheckerLib)
val matches = orgListMatcher.m(resultPools)
val taggedExtractDefSet = extractDefs.getExtractDefSet("CyberAttack")
matches.foreach(
ma => {
val ex = taggedExtractDefSet.run(ma, List("Lng"))
ex.mkString(" ")
}
)
}
// {
// "extractName": "_entities",
// "matcherId": "mid3",
// "atomMatcherDef": "EA(category, ThreatActor|Government)"
// },
val extractDefs = Extract.fromJson(_extractDef)
@Test
def t2 = {
val input:TInput = fromAtomArrayEng(IndexedSeq(
textAtom("Now"),
entityAtom("Anonymous", "Organization", "OrgEntity"),
textAtom("launch"),
textAtom("attack"),
textAtom("against"),
entityAtom("FBI", "Organization", "OrgEntity"),
textAtom("and"),
entityAtom("Microsoft", "Company", "OrgEntity")
))
val orgCompanyMatcher = fromAtomMatcher(E(EmptyRegexDict, Array("Company", "Organization")), EmptyCheckerIds, Option("CyberAttack.org-cmp"))
val orgListMatcher = matchersOrderedAllPositive(Seq(orgCompanyMatcher, fromAtomMatcher(FExact("and")), orgCompanyMatcher), EmptyCheckerIds, Option("CyberAttack.org-list"))
val matches = orgListMatcher.m(DummyResultPool(input))
val taggedExtractDefSet = extractDefs.getExtractDefSet("CyberAttack")
matches.foreach(
ma => {
val ex = taggedExtractDefSet.run(ma, EmptyRelatedEntityCheckerIds)
ex.mkString(" ")
}
)
}
}
| new2scala/text-util | maen/src/test/scala/org/dele/text/maen/extracts/ExtractTest.scala | Scala | apache-2.0 | 6,609 |
package rta.model.actions
import android.content.Context
import android.content.pm.PackageManager
import rta.common.Root
import rta.logging.Logging
import scala.collection.convert.decorateAsScala._
sealed abstract class AlterApplication extends Action with Logging {
def appOrPackage: String
def resolver: AlterApplication.Resolver
override def kind: ActionKind =
classOf[AlterApplication] -> None // TODO differentiate
}
object AlterApplication {
type Resolver = (String, PackageManager) => String
object Resolver {
lazy val fromPackage: Resolver = (pkg, _) => pkg
lazy val fromAppName: Resolver = (app, pm) => {
val matches = pm.getInstalledApplications(0).iterator().asScala.filter(appInfo =>
pm.getApplicationLabel(appInfo) == app
).toList
matches match {
case Nil => sys.error(s"No application with name $app found")
case info :: Nil => info.packageName
case _ => sys.error(s"Ambiguous application name $app")
}
}
}
final case class Launch(appOrPackage: String, resolver: Resolver) extends AlterApplication {
def execute()(implicit ctx: Context): Unit = {
val pm = ctx.getPackageManager
ctx.startActivity(pm.getLaunchIntentForPackage(resolver(appOrPackage, pm)))
}
}
final case class Kill(appOrPackage: String, resolver: Resolver) extends AlterApplication {
import Root._
def execute()(implicit ctx: Context): Unit = {
val pkg = resolver(appOrPackage, ctx.getPackageManager)
run(s"am force-stop $pkg")
}
override def prepare()(implicit ctx: Context): Unit =
run(grantPermission(ctx, "android.permission.FORCE_STOP_PACKAGES"))
}
}
| kjanosz/RuleThemAll | app/src/main/scala/rta/model/actions/AlterApplication.scala | Scala | apache-2.0 | 1,693 |
/*
Deduction Tactics
Copyright (C) 2012-2015 Raymond Dodge
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.rayrobdod.deductionTactics
package consoleView
import com.rayrobdod.boardGame.RectangularSpace
/**
* @author Raymond Dodge
* @since 2012 Dec 20
* @version a.6.0
*/
object SpaceInfoPrinter
{
private def out = System.out
def apply(space:RectangularSpace[SpaceClass]):Unit = {
val spaceClass:SpaceClass = space.typeOfSpace;
out.print("Type of space: ");
out.println( spaceClass match {
case FreePassageSpaceClass() => "Passible";
case AllyPassageSpaceClass() => "Passible if not occupied by enemy";
case UniPassageSpaceClass() => "Passible if not occupied";
case ImpassibleSpaceClass() => "Impassible";
case AttackOnlySpaceClass() => "Impassable, but attackable";
case FlyingPassageSpaceClass() => "Passible if Flying";
case FirePassageSpaceClass() => "Passible if Fire";
case SlowPassageSpaceClass() => "Passible, slowly"
case _ => "Unknown"
})
}
}
| rayrobdod/deductionTactics | src/main/scala/com/rayrobdod/deductionTactics/consoleView/SpacePrinter.scala | Scala | gpl-3.0 | 1,613 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.