code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
/* * Copyright (C) 2016-2017 Lightbend Inc. <https://www.lightbend.com> * * based on https://github.com/lagom/lagom/blob/1.3.3/dev/service-registry/service-locator/src/main/java/com/lightbend/lagom/discovery/impl/ServiceRegistryImpl.java * */ package me.alexray.lagom.kube.discovery.impl import java.net.URI import java.util.concurrent.TimeUnit import javax.inject.{Inject, Named} import akka.NotUsed import akka.actor.ActorRef import akka.pattern.ask import akka.util.Timeout import com.lightbend.lagom.internal.javadsl.registry.{RegisteredService, ServiceRegistry, ServiceRegistryService} import com.lightbend.lagom.javadsl.api.ServiceCall import com.lightbend.lagom.javadsl.api.transport.NotFound import org.pcollections.PSequence import play.api.Logger import scala.concurrent.Future import scala.concurrent.duration.Duration import me.alexray.lagom.kube.discovery.KubeServiceRegistryActor import scala.language.implicitConversions class KubeServiceRegistryImpl @Inject() (@Named(KubeServiceRegistryModule.KUBE_SERVICE_REGISTRY_ACTOR) registry: ActorRef) extends ServiceRegistry { import me.alexray.lagom.converters.ServiceCallConverter._ private val logger: Logger = Logger(this.getClass) implicit val timeout = Timeout(Duration.create(5, TimeUnit.SECONDS)) import scala.concurrent.ExecutionContext.Implicits.global override def register(name: String): ServiceCall[ServiceRegistryService, NotUsed] = (service: ServiceRegistryService) => { logger.debug("register invoked, name=[" + name + "], request=[" + service + "]") (registry ? KubeServiceRegistryActor.Register(name, service)).map(_ => NotUsed) } override def unregister(name: String): ServiceCall[NotUsed, NotUsed] = (request: NotUsed) => { logger.debug("unregister invoked, name=[" + name + "], request=[" + request + "]") registry ! KubeServiceRegistryActor.Remove(name) Future.successful(NotUsed) } override def lookup(name: String): ServiceCall[NotUsed, URI] = (request: NotUsed) => { logger.debug("locate invoked, name=[" + name + "], request=[" + request + "]") (registry ? KubeServiceRegistryActor.Lookup(name)).mapTo[Option[URI]].map { case Some(uri) => logger.debug("Location of service name=[" + name + "] is " + uri) uri case None => logger.debug("Service name=[" + name + "] has not been registered") throw new NotFound(name) } } override def registeredServices(): ServiceCall[NotUsed, PSequence[RegisteredService]] = (request: NotUsed) => { (registry ? KubeServiceRegistryActor.GetRegisteredServices).mapTo[KubeServiceRegistryActor.RegisteredServices].map(_.services) } }
AlexanderRay/lagom-on-kube
lagomKubeServiceRegister/src/main/scala/me/alexray/lagom/kube/discovery/impl/KubeServiceRegistryImpl.scala
Scala
apache-2.0
2,675
package org.jetbrains.plugins.scala.failed.resolve import org.jetbrains.plugins.scala.PerfCycleTests import org.junit.experimental.categories.Category /** * Created by kate on 3/25/16. */ @Category(Array(classOf[PerfCycleTests])) class OverloadedResolutionTest extends FailedResolveTest("overloadedResolution") { def testSCL2911(): Unit = doTest() def testSCL7890(): Unit = doTest() }
whorbowicz/intellij-scala
test/org/jetbrains/plugins/scala/failed/resolve/OverloadedResolutionTest.scala
Scala
apache-2.0
399
/** * Copyright (C) 2013 Orbeon, Inc. * * This program is free software; you can redistribute it and/or modify it under the terms of the * GNU Lesser General Public License as published by the Free Software Foundation; either version * 2.1 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * The full text of the license is available at http://www.gnu.org/copyleft/lesser.html */ package org.orbeon.oxf.fr import java.util.{List => JList} import org.orbeon.oxf.externalcontext.ExternalContext.Request import org.orbeon.oxf.fr.FormRunnerCommon._ import org.orbeon.oxf.fr.Names._ import org.orbeon.oxf.util.CoreUtils._ import org.orbeon.oxf.util.CoreCrossPlatformSupport import org.orbeon.oxf.util.CoreCrossPlatformSupport.properties import org.orbeon.oxf.util.StringUtils._ import org.orbeon.oxf.xforms.action.XFormsAPI._ import org.orbeon.oxf.xml.SaxonUtils import org.orbeon.saxon.om.{Item, NodeInfo} import org.orbeon.scaxon.SimplePath._ import scala.jdk.CollectionConverters._ // NOTE: Language is currently assumed to be only the plain language part, e.g. "en", "it", "zh". trait FormRunnerLang { // TEMP: Picked a different name or `fullOptJS` fails! import FormRunnerLangPrivate._ // The client passes "*" or blank to indicate that there is no current app/form name def hasAppForm(app: String, form: String) = app != "*" && app.nonEmpty && form != "*" && form.nonEmpty def getAppForm(app: String, form: String) = hasAppForm(app, form) option AppForm(app, form) def currentLang = topLevelModel(ResourcesModel).get.unsafeGetVariableAsNodeInfo("lang").stringValue def currentFRLang = topLevelModel(ResourcesModel).get.unsafeGetVariableAsNodeInfo("fr-lang").stringValue def currentFRResources = topLevelModel(ResourcesModel).get.unsafeGetVariableAsNodeInfo("fr-fr-resources") //@XPathFunction def currentFormResources = topLevelModel(ResourcesModel).get.unsafeGetVariableAsNodeInfo("fr-form-resources") def formResourcesInLang(lang: String): NodeInfo = formResourcesInLang(topLevelModel(FormModel).get.getInstance(FormResources).documentInfo.rootElement, lang) def formResourcesInLang(formResourcesRootElem: NodeInfo, lang: String): NodeInfo = (formResourcesRootElem / *).find(_.attValue("*:lang") == lang).getOrElse(currentFormResources) // List of available languages for the given form // Empty if the form doesn't have resources // If all of the form's resources are filtered via property, return the first language of the form, if any. //@XPathFunction def getFormLangSelection(app: String, form: String, formLanguages: JList[String]): List[String] = { val appForm = getAppForm(app, form) val allowedFormLanguages = formLanguages.asScala.toList filter isAllowedLang(appForm) val defaultLanguage = getDefaultLang(appForm) // Reorder to put default language first if it is allowed if (allowedFormLanguages contains defaultLanguage) defaultLanguage :: (allowedFormLanguages filterNot (_ == defaultLanguage)) else allowedFormLanguages } // Find the best match for the current form language // Can be null (empty sequence) if there are no resources (or no allowed resources) in the form //@XPathFunction def selectFormLang(app: String, form: String, requestedLang: String, formLangs: JList[String]): String = { val appForm = getAppForm(app, form) val availableFormLangs = getFormLangSelection(app, form, formLangs) val actualRequestedLang = findRequestedLang(appForm, requestedLang) filter isAllowedLang(appForm) selectLangUseDefault(appForm, actualRequestedLang, availableFormLangs).orNull } // Get the Form Runner language // If possible, try to match the form language, otherwise //@XPathFunction def selectFormRunnerLang(app: String, form: String, requestedLang: String, formRunnerLangs: JList[String]): String = { val appForm = getAppForm(app, form) val actualRequestedLang = findRequestedLang(appForm, requestedLang) filter isAllowedLang(appForm) selectLangUseDefault(appForm, actualRequestedLang, formRunnerLangs.asScala.toList).orNull } // Get the default language for the given app/form // If none is configured, return the global default "en" // Public for unit tests def getDefaultLang(appForm: Option[AppForm]): String = { val suffix = appForm.toList flatMap (_.toList) properties.getNonBlankString("oxf.fr.default-language" :: suffix mkString ".") map cleanLanguage getOrElse "en" } // Return a predicate telling whether a language is allowed based on properties. If app/form are specified, then the // result applies to that app/form, otherwise it is valid globally for Form Runner. // Public for unit tests def isAllowedLang(appForm: Option[AppForm]): String => Boolean = { val suffix = appForm.toList flatMap (_.toList) val set = stringOptionToSet(properties.getNonBlankString("oxf.fr.available-languages" :: suffix mkString ".")) map cleanLanguage // If none specified via property or property contains a wildcard, all languages are considered available if (set.isEmpty || set("*")) _ => true else set } // The requested language, trying a few things in order (given parameter, request, session, default) // Public for unit tests def findRequestedLang(appForm: Option[AppForm], requestedLang: String): Option[String] = { val request = CoreCrossPlatformSupport.externalContext.getRequest def fromHeader = request.getFirstHeader (frc.LiferayLanguageHeader) map cleanLanguage def fromRequest = request.getFirstParamAsString(frc.LanguageParam) map cleanLanguage def fromSession = stringFromSession(request, frc.LanguageParam) requestedLang.trimAllToOpt orElse fromHeader orElse fromRequest orElse fromSession orElse Some(getDefaultLang(appForm)) } // Whether there is a Saxon XPath numberer for the given language //@XPathFunction def hasXPathNumberer(lang: String): Boolean = SaxonUtils.hasXPathNumberer(lang) private object FormRunnerLangPrivate { private val OldLocaleRe = "([a-z|A-Z]{2,3})(?:_.*)?".r // We support incoming languages of the form `en_US` (not in the IETF BCP 47 format) for backward compatibility reasons // and because for historical reasons Java's `Locale.toString` produces that kind of strings containing an underscore. // // The proxy portlet passes the result of `LanguageUtil.getLanguageId`, which is in `Locale.toString` format. Since // Liferay 6.2, there is a `LanguageUtil.getBCP47LanguageId` method which we should use if possible. // // The only language codes currently in use in `languages.xml` which have an associated country are `zh_CN` and `zh_TW`. // So we explicitly map those to `zh-Hans` and `zh-Hant` even though Java 1.7's doesn't do this: // // Locale.CHINA.getScript == "" // // References: // // - https://github.com/orbeon/orbeon-forms/issues/2688 // - https://github.com/orbeon/orbeon-forms/issues/2700 // - https://docs.oracle.com/javase/7/docs/api/java/util/Locale.html // - https://docs.liferay.com/portal/6.1/javadocs/com/liferay/portal/kernel/language/LanguageUtil.html#getLanguageId(javax.servlet.http.HttpServletRequest) // - https://docs.liferay.com/portal/6.2/javadocs/com/liferay/portal/kernel/language/LanguageUtil.html#getBCP47LanguageId(javax.servlet.http.HttpServletRequest) // def cleanLanguage(lang: String): String = lang.trimAllToEmpty match { case OldLocaleRe("zh_CN") => "zh-Hans" case OldLocaleRe("zh_TW") => "zh-Hant" case OldLocaleRe(oldLang) => oldLang case newLang => newLang } def selectLangUseDefault( appForm : Option[AppForm], requestedLang : Option[String], availableLangs : List[String] ): Option[String] = { def matchingLanguage = availableLangs intersect requestedLang.toList headOption def defaultLanguage = availableLangs intersect List(getDefaultLang(appForm)) headOption def firstLanguage = availableLangs headOption matchingLanguage orElse defaultLanguage orElse firstLanguage } def stringFromSession(request: Request, name: String) = request.sessionOpt flatMap (_.getAttribute(frc.LanguageParam)) map { case item: Item => item.getStringValue case other => other.toString } } } object FormRunnerLang extends FormRunnerLang
orbeon/orbeon-forms
form-runner/shared/src/main/scala/org/orbeon/oxf/fr/FormRunnerLang.scala
Scala
lgpl-2.1
8,804
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.plan.batch.sql.agg import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.scala._ import org.apache.flink.table.api.scala._ import org.apache.flink.table.api.{TableException, Types} import org.apache.flink.table.planner.plan.utils.JavaUserDefinedAggFunctions.{VarSum1AggFunction, VarSum2AggFunction} import org.apache.flink.table.planner.utils.{BatchTableTestUtil, TableTestBase} import org.apache.flink.table.runtime.typeutils.DecimalTypeInfo import org.junit.Test abstract class AggregateTestBase extends TableTestBase { protected val util: BatchTableTestUtil = batchTestUtil() util.addTableSource("MyTable", Array[TypeInformation[_]]( Types.BYTE, Types.SHORT, Types.INT, Types.LONG, Types.FLOAT, Types.DOUBLE, Types.BOOLEAN, Types.STRING, Types.LOCAL_DATE, Types.LOCAL_TIME, Types.LOCAL_DATE_TIME, DecimalTypeInfo.of(30, 20), DecimalTypeInfo.of(10, 5)), Array("byte", "short", "int", "long", "float", "double", "boolean", "string", "date", "time", "timestamp", "decimal3020", "decimal105")) util.addTableSource[(Int, Long, String)]("MyTable1", 'a, 'b, 'c) @Test def testAvg(): Unit = { util.verifyPlanWithType( """ |SELECT AVG(`byte`), | AVG(`short`), | AVG(`int`), | AVG(`long`), | AVG(`float`), | AVG(`double`), | AVG(`decimal3020`), | AVG(`decimal105`) |FROM MyTable """.stripMargin) } @Test def testSum(): Unit = { util.verifyPlanWithType( """ |SELECT SUM(`byte`), | SUM(`short`), | SUM(`int`), | SUM(`long`), | SUM(`float`), | SUM(`double`), | SUM(`decimal3020`), | SUM(`decimal105`) |FROM MyTable """.stripMargin) } @Test def testCount(): Unit = { util.verifyPlanWithType( """ |SELECT COUNT(`byte`), | COUNT(`short`), | COUNT(`int`), | COUNT(`long`), | COUNT(`float`), | COUNT(`double`), | COUNT(`decimal3020`), | COUNT(`decimal105`), | COUNT(`boolean`), | COUNT(`date`), | COUNT(`time`), | COUNT(`timestamp`), | COUNT(`string`) |FROM MyTable """.stripMargin) } @Test def testCountStart(): Unit = { util.verifyPlanWithType("SELECT COUNT(*) FROM MyTable") } @Test def testCannotCountOnMultiFields(): Unit = { val sql = "SELECT b, COUNT(a, c) FROM MyTable1 GROUP BY b" thrown.expect(classOf[TableException]) thrown.expectMessage("We now only support the count of one field") util.verifyPlan(sql) } @Test def testMinWithFixLengthType(): Unit = { util.verifyPlanWithType( """ |SELECT MIN(`byte`), | MIN(`short`), | MIN(`int`), | MIN(`long`), | MIN(`float`), | MIN(`double`), | MIN(`decimal3020`), | MIN(`decimal105`), | MIN(`boolean`), | MIN(`date`), | MIN(`time`), | MIN(`timestamp`) |FROM MyTable """.stripMargin) } @Test def testMinWithVariableLengthType(): Unit = { util.verifyPlanWithType("SELECT MIN(`string`) FROM MyTable") } @Test def testMaxWithFixLengthType(): Unit = { util.verifyPlanWithType( """ |SELECT MAX(`byte`), | MAX(`short`), | MAX(`int`), | MAX(`long`), | MAX(`float`), | MAX(`double`), | MAX(`decimal3020`), | MAX(`decimal105`), | MAX(`boolean`), | MAX(`date`), | MAX(`time`), | MAX(`timestamp`) |FROM MyTable """.stripMargin) } @Test def testMaxWithVariableLengthType(): Unit = { util.verifyPlanWithType("SELECT MAX(`string`) FROM MyTable") } @Test def testAggregateWithoutFunction(): Unit = { util.verifyPlan("SELECT a, b FROM MyTable1 GROUP BY a, b") } @Test def testAggregateWithoutGroupBy(): Unit = { util.verifyPlan("SELECT AVG(a), SUM(b), COUNT(c) FROM MyTable1") } @Test def testAggregateWithFilter(): Unit = { util.verifyPlan("SELECT AVG(a), SUM(b), COUNT(c) FROM MyTable1 WHERE a = 1") } @Test def testAggregateWithFilterOnNestedFields(): Unit = { util.addTableSource[(Int, Long, (Int, Long))]("MyTable2", 'a, 'b, 'c) util.verifyPlan("SELECT AVG(a), SUM(b), COUNT(c), SUM(c._1) FROM MyTable2 WHERE a = 1") } @Test def testGroupAggregate(): Unit = { util.verifyPlan("SELECT a, SUM(b), COUNT(c) FROM MyTable1 GROUP BY a") } @Test def testGroupAggregateWithFilter(): Unit = { util.verifyPlan("SELECT a, SUM(b), count(c) FROM MyTable1 WHERE a = 1 GROUP BY a") } @Test def testAggNotSupportMerge(): Unit = { util.addFunction("var_sum", new VarSum2AggFunction) util.verifyPlan("SELECT b, var_sum(a) FROM MyTable1 GROUP BY b") } @Test def testPojoAccumulator(): Unit = { util.addFunction("var_sum", new VarSum1AggFunction) util.verifyPlan("SELECT b, var_sum(a) FROM MyTable1 GROUP BY b") } @Test def testGroupByWithConstantKey(): Unit = { val sql = """ |SELECT a, MAX(b), c FROM (SELECT a, 'test' AS c, b FROM MyTable1) t GROUP BY a, c """.stripMargin util.verifyPlan(sql) } // TODO supports group sets }
bowenli86/flink
flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/agg/AggregateTestBase.scala
Scala
apache-2.0
6,417
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.datasources import java.util.{Locale, ServiceConfigurationError, ServiceLoader} import scala.collection.JavaConverters._ import scala.language.{existentials, implicitConversions} import scala.util.{Failure, Success, Try} import org.apache.hadoop.fs.Path import org.apache.spark.deploy.SparkHadoopUtil import org.apache.spark.internal.Logging import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute import org.apache.spark.sql.catalyst.catalog.{BucketSpec, CatalogStorageFormat, CatalogTable, CatalogUtils} import org.apache.spark.sql.catalyst.expressions.Attribute import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap import org.apache.spark.sql.execution.SparkPlan import org.apache.spark.sql.execution.command.DataWritingCommand import org.apache.spark.sql.execution.datasources.csv.CSVFileFormat import org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider import org.apache.spark.sql.execution.datasources.json.JsonFileFormat import org.apache.spark.sql.execution.datasources.orc.OrcFileFormat import org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat import org.apache.spark.sql.execution.streaming._ import org.apache.spark.sql.execution.streaming.sources.{RateStreamProvider, TextSocketSourceProvider} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.sources._ import org.apache.spark.sql.streaming.OutputMode import org.apache.spark.sql.types.{CalendarIntervalType, StructField, StructType} import org.apache.spark.sql.util.SchemaUtils import org.apache.spark.util.Utils /** * The main class responsible for representing a pluggable Data Source in Spark SQL. In addition to * acting as the canonical set of parameters that can describe a Data Source, this class is used to * resolve a description to a concrete implementation that can be used in a query plan * (either batch or streaming) or to write out data using an external library. * * From an end user's perspective a DataSource description can be created explicitly using * [[org.apache.spark.sql.DataFrameReader]] or CREATE TABLE USING DDL. Additionally, this class is * used when resolving a description from a metastore to a concrete implementation. * * Many of the arguments to this class are optional, though depending on the specific API being used * these optional arguments might be filled in during resolution using either inference or external * metadata. For example, when reading a partitioned table from a file system, partition columns * will be inferred from the directory layout even if they are not specified. * * @param paths A list of file system paths that hold data. These will be globbed before and * qualified. This option only works when reading from a [[FileFormat]]. * @param userSpecifiedSchema An optional specification of the schema of the data. When present * we skip attempting to infer the schema. * @param partitionColumns A list of column names that the relation is partitioned by. This list is * generally empty during the read path, unless this DataSource is managed * by Hive. In these cases, during `resolveRelation`, we will call * `getOrInferFileFormatSchema` for file based DataSources to infer the * partitioning. In other cases, if this list is empty, then this table * is unpartitioned. * @param bucketSpec An optional specification for bucketing (hash-partitioning) of the data. * @param catalogTable Optional catalog table reference that can be used to push down operations * over the datasource to the catalog service. */ case class DataSource( sparkSession: SparkSession, className: String, paths: Seq[String] = Nil, userSpecifiedSchema: Option[StructType] = None, partitionColumns: Seq[String] = Seq.empty, bucketSpec: Option[BucketSpec] = None, options: Map[String, String] = Map.empty, catalogTable: Option[CatalogTable] = None) extends Logging { case class SourceInfo(name: String, schema: StructType, partitionColumns: Seq[String]) lazy val providingClass: Class[_] = DataSource.lookupDataSource(className, sparkSession.sessionState.conf) lazy val sourceInfo: SourceInfo = sourceSchema() private val caseInsensitiveOptions = CaseInsensitiveMap(options) private val equality = sparkSession.sessionState.conf.resolver bucketSpec.map { bucket => SchemaUtils.checkColumnNameDuplication( bucket.bucketColumnNames, "in the bucket definition", equality) SchemaUtils.checkColumnNameDuplication( bucket.sortColumnNames, "in the sort definition", equality) } /** * Get the schema of the given FileFormat, if provided by `userSpecifiedSchema`, or try to infer * it. In the read path, only managed tables by Hive provide the partition columns properly when * initializing this class. All other file based data sources will try to infer the partitioning, * and then cast the inferred types to user specified dataTypes if the partition columns exist * inside `userSpecifiedSchema`, otherwise we can hit data corruption bugs like SPARK-18510. * This method will try to skip file scanning whether `userSpecifiedSchema` and * `partitionColumns` are provided. Here are some code paths that use this method: * 1. `spark.read` (no schema): Most amount of work. Infer both schema and partitioning columns * 2. `spark.read.schema(userSpecifiedSchema)`: Parse partitioning columns, cast them to the * dataTypes provided in `userSpecifiedSchema` if they exist or fallback to inferred * dataType if they don't. * 3. `spark.readStream.schema(userSpecifiedSchema)`: For streaming use cases, users have to * provide the schema. Here, we also perform partition inference like 2, and try to use * dataTypes in `userSpecifiedSchema`. All subsequent triggers for this stream will re-use * this information, therefore calls to this method should be very cheap, i.e. there won't * be any further inference in any triggers. * * @param format the file format object for this DataSource * @param getFileIndex [[InMemoryFileIndex]] for getting partition schema and file list * @return A pair of the data schema (excluding partition columns) and the schema of the partition * columns. */ private def getOrInferFileFormatSchema( format: FileFormat, getFileIndex: () => InMemoryFileIndex): (StructType, StructType) = { lazy val tempFileIndex = getFileIndex() val partitionSchema = if (partitionColumns.isEmpty) { // Try to infer partitioning, because no DataSource in the read path provides the partitioning // columns properly unless it is a Hive DataSource tempFileIndex.partitionSchema } else { // maintain old behavior before SPARK-18510. If userSpecifiedSchema is empty used inferred // partitioning if (userSpecifiedSchema.isEmpty) { val inferredPartitions = tempFileIndex.partitionSchema inferredPartitions } else { val partitionFields = partitionColumns.map { partitionColumn => userSpecifiedSchema.flatMap(_.find(c => equality(c.name, partitionColumn))).orElse { val inferredPartitions = tempFileIndex.partitionSchema val inferredOpt = inferredPartitions.find(p => equality(p.name, partitionColumn)) if (inferredOpt.isDefined) { logDebug( s"""Type of partition column: $partitionColumn not found in specified schema |for $format. |User Specified Schema |===================== |${userSpecifiedSchema.orNull} | |Falling back to inferred dataType if it exists. """.stripMargin) } inferredOpt }.getOrElse { throw new AnalysisException(s"Failed to resolve the schema for $format for " + s"the partition column: $partitionColumn. It must be specified manually.") } } StructType(partitionFields) } } val dataSchema = userSpecifiedSchema.map { schema => StructType(schema.filterNot(f => partitionSchema.exists(p => equality(p.name, f.name)))) }.orElse { format.inferSchema( sparkSession, caseInsensitiveOptions, tempFileIndex.allFiles()) }.getOrElse { throw new AnalysisException( s"Unable to infer schema for $format. It must be specified manually.") } // We just print a waring message if the data schema and partition schema have the duplicate // columns. This is because we allow users to do so in the previous Spark releases and // we have the existing tests for the cases (e.g., `ParquetHadoopFsRelationSuite`). // See SPARK-18108 and SPARK-21144 for related discussions. try { SchemaUtils.checkColumnNameDuplication( (dataSchema ++ partitionSchema).map(_.name), "in the data schema and the partition schema", equality) } catch { case e: AnalysisException => logWarning(e.getMessage) } (dataSchema, partitionSchema) } /** Returns the name and schema of the source that can be used to continually read data. */ private def sourceSchema(): SourceInfo = { providingClass.getConstructor().newInstance() match { case s: StreamSourceProvider => val (name, schema) = s.sourceSchema( sparkSession.sqlContext, userSpecifiedSchema, className, caseInsensitiveOptions) SourceInfo(name, schema, Nil) case format: FileFormat => val path = caseInsensitiveOptions.getOrElse("path", { throw new IllegalArgumentException("'path' is not specified") }) // Check whether the path exists if it is not a glob pattern. // For glob pattern, we do not check it because the glob pattern might only make sense // once the streaming job starts and some upstream source starts dropping data. val hdfsPath = new Path(path) if (!SparkHadoopUtil.get.isGlobPath(hdfsPath)) { val fs = hdfsPath.getFileSystem(sparkSession.sessionState.newHadoopConf()) if (!fs.exists(hdfsPath)) { throw new AnalysisException(s"Path does not exist: $path") } } val isSchemaInferenceEnabled = sparkSession.sessionState.conf.streamingSchemaInference val isTextSource = providingClass == classOf[text.TextFileFormat] // If the schema inference is disabled, only text sources require schema to be specified if (!isSchemaInferenceEnabled && !isTextSource && userSpecifiedSchema.isEmpty) { throw new IllegalArgumentException( "Schema must be specified when creating a streaming source DataFrame. " + "If some files already exist in the directory, then depending on the file format " + "you may be able to create a static DataFrame on that directory with " + "'spark.read.load(directory)' and infer schema from it.") } val (dataSchema, partitionSchema) = getOrInferFileFormatSchema(format, () => { // The operations below are expensive therefore try not to do them if we don't need to, // e.g., in streaming mode, we have already inferred and registered partition columns, // we will never have to materialize the lazy val below val globbedPaths = checkAndGlobPathIfNecessary(checkEmptyGlobPath = false, checkFilesExist = false) createInMemoryFileIndex(globbedPaths) }) SourceInfo( s"FileSource[$path]", StructType(dataSchema ++ partitionSchema), partitionSchema.fieldNames) case _ => throw new UnsupportedOperationException( s"Data source $className does not support streamed reading") } } /** Returns a source that can be used to continually read data. */ def createSource(metadataPath: String): Source = { providingClass.getConstructor().newInstance() match { case s: StreamSourceProvider => s.createSource( sparkSession.sqlContext, metadataPath, userSpecifiedSchema, className, caseInsensitiveOptions) case format: FileFormat => val path = caseInsensitiveOptions.getOrElse("path", { throw new IllegalArgumentException("'path' is not specified") }) new FileStreamSource( sparkSession = sparkSession, path = path, fileFormatClassName = className, schema = sourceInfo.schema, partitionColumns = sourceInfo.partitionColumns, metadataPath = metadataPath, options = caseInsensitiveOptions) case _ => throw new UnsupportedOperationException( s"Data source $className does not support streamed reading") } } /** Returns a sink that can be used to continually write data. */ def createSink(outputMode: OutputMode): Sink = { providingClass.getConstructor().newInstance() match { case s: StreamSinkProvider => s.createSink(sparkSession.sqlContext, caseInsensitiveOptions, partitionColumns, outputMode) case fileFormat: FileFormat => val path = caseInsensitiveOptions.getOrElse("path", { throw new IllegalArgumentException("'path' is not specified") }) if (outputMode != OutputMode.Append) { throw new AnalysisException( s"Data source $className does not support $outputMode output mode") } new FileStreamSink(sparkSession, path, fileFormat, partitionColumns, caseInsensitiveOptions) case _ => throw new UnsupportedOperationException( s"Data source $className does not support streamed writing") } } /** * Create a resolved [[BaseRelation]] that can be used to read data from or write data into this * [[DataSource]] * * @param checkFilesExist Whether to confirm that the files exist when generating the * non-streaming file based datasource. StructuredStreaming jobs already * list file existence, and when generating incremental jobs, the batch * is considered as a non-streaming file based data source. Since we know * that files already exist, we don't need to check them again. */ def resolveRelation(checkFilesExist: Boolean = true): BaseRelation = { val relation = (providingClass.getConstructor().newInstance(), userSpecifiedSchema) match { // TODO: Throw when too much is given. case (dataSource: SchemaRelationProvider, Some(schema)) => dataSource.createRelation(sparkSession.sqlContext, caseInsensitiveOptions, schema) case (dataSource: RelationProvider, None) => dataSource.createRelation(sparkSession.sqlContext, caseInsensitiveOptions) case (_: SchemaRelationProvider, None) => throw new AnalysisException(s"A schema needs to be specified when using $className.") case (dataSource: RelationProvider, Some(schema)) => val baseRelation = dataSource.createRelation(sparkSession.sqlContext, caseInsensitiveOptions) if (baseRelation.schema != schema) { throw new AnalysisException(s"$className does not allow user-specified schemas.") } baseRelation // We are reading from the results of a streaming query. Load files from the metadata log // instead of listing them using HDFS APIs. case (format: FileFormat, _) if FileStreamSink.hasMetadata( caseInsensitiveOptions.get("path").toSeq ++ paths, sparkSession.sessionState.newHadoopConf()) => val basePath = new Path((caseInsensitiveOptions.get("path").toSeq ++ paths).head) val fileCatalog = new MetadataLogFileIndex(sparkSession, basePath, userSpecifiedSchema) val dataSchema = userSpecifiedSchema.orElse { format.inferSchema( sparkSession, caseInsensitiveOptions, fileCatalog.allFiles()) }.getOrElse { throw new AnalysisException( s"Unable to infer schema for $format at ${fileCatalog.allFiles().mkString(",")}. " + "It must be specified manually") } HadoopFsRelation( fileCatalog, partitionSchema = fileCatalog.partitionSchema, dataSchema = dataSchema, bucketSpec = None, format, caseInsensitiveOptions)(sparkSession) // This is a non-streaming file based datasource. case (format: FileFormat, _) => val globbedPaths = checkAndGlobPathIfNecessary(checkEmptyGlobPath = true, checkFilesExist = checkFilesExist) val useCatalogFileIndex = sparkSession.sqlContext.conf.manageFilesourcePartitions && catalogTable.isDefined && catalogTable.get.tracksPartitionsInCatalog && catalogTable.get.partitionColumnNames.nonEmpty val (fileCatalog, dataSchema, partitionSchema) = if (useCatalogFileIndex) { val defaultTableSize = sparkSession.sessionState.conf.defaultSizeInBytes val index = new CatalogFileIndex( sparkSession, catalogTable.get, catalogTable.get.stats.map(_.sizeInBytes.toLong).getOrElse(defaultTableSize)) (index, catalogTable.get.dataSchema, catalogTable.get.partitionSchema) } else { val index = createInMemoryFileIndex(globbedPaths) val (resultDataSchema, resultPartitionSchema) = getOrInferFileFormatSchema(format, () => index) (index, resultDataSchema, resultPartitionSchema) } HadoopFsRelation( fileCatalog, partitionSchema = partitionSchema, dataSchema = dataSchema.asNullable, bucketSpec = bucketSpec, format, caseInsensitiveOptions)(sparkSession) case _ => throw new AnalysisException( s"$className is not a valid Spark SQL Data Source.") } relation match { case hs: HadoopFsRelation => SchemaUtils.checkColumnNameDuplication( hs.dataSchema.map(_.name), "in the data schema", equality) SchemaUtils.checkColumnNameDuplication( hs.partitionSchema.map(_.name), "in the partition schema", equality) DataSourceUtils.verifyReadSchema(hs.fileFormat, hs.dataSchema) case _ => SchemaUtils.checkColumnNameDuplication( relation.schema.map(_.name), "in the data schema", equality) } relation } /** * Creates a command node to write the given [[LogicalPlan]] out to the given [[FileFormat]]. * The returned command is unresolved and need to be analyzed. */ private def planForWritingFileFormat( format: FileFormat, mode: SaveMode, data: LogicalPlan): InsertIntoHadoopFsRelationCommand = { // Don't glob path for the write path. The contracts here are: // 1. Only one output path can be specified on the write path; // 2. Output path must be a legal HDFS style file system path; // 3. It's OK that the output path doesn't exist yet; val allPaths = paths ++ caseInsensitiveOptions.get("path") val outputPath = if (allPaths.length == 1) { val path = new Path(allPaths.head) val fs = path.getFileSystem(sparkSession.sessionState.newHadoopConf()) path.makeQualified(fs.getUri, fs.getWorkingDirectory) } else { throw new IllegalArgumentException("Expected exactly one path to be specified, but " + s"got: ${allPaths.mkString(", ")}") } val caseSensitive = sparkSession.sessionState.conf.caseSensitiveAnalysis PartitioningUtils.validatePartitionColumn(data.schema, partitionColumns, caseSensitive) val fileIndex = catalogTable.map(_.identifier).map { tableIdent => sparkSession.table(tableIdent).queryExecution.analyzed.collect { case LogicalRelation(t: HadoopFsRelation, _, _, _) => t.location }.head } // For partitioned relation r, r.schema's column ordering can be different from the column // ordering of data.logicalPlan (partition columns are all moved after data column). This // will be adjusted within InsertIntoHadoopFsRelation. InsertIntoHadoopFsRelationCommand( outputPath = outputPath, staticPartitions = Map.empty, ifPartitionNotExists = false, partitionColumns = partitionColumns.map(UnresolvedAttribute.quoted), bucketSpec = bucketSpec, fileFormat = format, options = options, query = data, mode = mode, catalogTable = catalogTable, fileIndex = fileIndex, outputColumnNames = data.output.map(_.name)) } /** * Writes the given [[LogicalPlan]] out to this [[DataSource]] and returns a [[BaseRelation]] for * the following reading. * * @param mode The save mode for this writing. * @param data The input query plan that produces the data to be written. Note that this plan * is analyzed and optimized. * @param outputColumnNames The original output column names of the input query plan. The * optimizer may not preserve the output column's names' case, so we need * this parameter instead of `data.output`. * @param physicalPlan The physical plan of the input query plan. We should run the writing * command with this physical plan instead of creating a new physical plan, * so that the metrics can be correctly linked to the given physical plan and * shown in the web UI. */ def writeAndRead( mode: SaveMode, data: LogicalPlan, outputColumnNames: Seq[String], physicalPlan: SparkPlan): BaseRelation = { val outputColumns = DataWritingCommand.logicalPlanOutputWithNames(data, outputColumnNames) if (outputColumns.map(_.dataType).exists(_.isInstanceOf[CalendarIntervalType])) { throw new AnalysisException("Cannot save interval data type into external storage.") } providingClass.getConstructor().newInstance() match { case dataSource: CreatableRelationProvider => dataSource.createRelation( sparkSession.sqlContext, mode, caseInsensitiveOptions, Dataset.ofRows(sparkSession, data)) case format: FileFormat => val cmd = planForWritingFileFormat(format, mode, data) val resolvedPartCols = cmd.partitionColumns.map { col => // The partition columns created in `planForWritingFileFormat` should always be // `UnresolvedAttribute` with a single name part. assert(col.isInstanceOf[UnresolvedAttribute]) val unresolved = col.asInstanceOf[UnresolvedAttribute] assert(unresolved.nameParts.length == 1) val name = unresolved.nameParts.head outputColumns.find(a => equality(a.name, name)).getOrElse { throw new AnalysisException( s"Unable to resolve $name given [${data.output.map(_.name).mkString(", ")}]") } } val resolved = cmd.copy( partitionColumns = resolvedPartCols, outputColumnNames = outputColumnNames) resolved.run(sparkSession, physicalPlan) // Replace the schema with that of the DataFrame we just wrote out to avoid re-inferring copy(userSpecifiedSchema = Some(outputColumns.toStructType.asNullable)).resolveRelation() case _ => sys.error(s"${providingClass.getCanonicalName} does not allow create table as select.") } } /** * Returns a logical plan to write the given [[LogicalPlan]] out to this [[DataSource]]. */ def planForWriting(mode: SaveMode, data: LogicalPlan): LogicalPlan = { if (data.schema.map(_.dataType).exists(_.isInstanceOf[CalendarIntervalType])) { throw new AnalysisException("Cannot save interval data type into external storage.") } providingClass.getConstructor().newInstance() match { case dataSource: CreatableRelationProvider => SaveIntoDataSourceCommand(data, dataSource, caseInsensitiveOptions, mode) case format: FileFormat => DataSource.validateSchema(data.schema) planForWritingFileFormat(format, mode, data) case _ => sys.error(s"${providingClass.getCanonicalName} does not allow create table as select.") } } /** Returns an [[InMemoryFileIndex]] that can be used to get partition schema and file list. */ private def createInMemoryFileIndex(globbedPaths: Seq[Path]): InMemoryFileIndex = { val fileStatusCache = FileStatusCache.getOrCreate(sparkSession) new InMemoryFileIndex( sparkSession, globbedPaths, options, userSpecifiedSchema, fileStatusCache) } /** * Checks and returns files in all the paths. */ private def checkAndGlobPathIfNecessary( checkEmptyGlobPath: Boolean, checkFilesExist: Boolean): Seq[Path] = { val allPaths = caseInsensitiveOptions.get("path") ++ paths val hadoopConf = sparkSession.sessionState.newHadoopConf() val allGlobPath = allPaths.flatMap { path => val hdfsPath = new Path(path) val fs = hdfsPath.getFileSystem(hadoopConf) val qualified = hdfsPath.makeQualified(fs.getUri, fs.getWorkingDirectory) val globPath = SparkHadoopUtil.get.globPathIfNecessary(fs, qualified) if (checkEmptyGlobPath && globPath.isEmpty) { throw new AnalysisException(s"Path does not exist: $qualified") } // Sufficient to check head of the globPath seq for non-glob scenario // Don't need to check once again if files exist in streaming mode if (checkFilesExist && !fs.exists(globPath.head)) { throw new AnalysisException(s"Path does not exist: ${globPath.head}") } globPath }.toSeq if (checkFilesExist) { val (filteredOut, filteredIn) = allGlobPath.partition { path => InMemoryFileIndex.shouldFilterOut(path.getName) } if (filteredOut.nonEmpty) { if (filteredIn.isEmpty) { throw new AnalysisException( s"All paths were ignored:\\n${filteredOut.mkString("\\n ")}") } else { logDebug( s"Some paths were ignored:\\n${filteredOut.mkString("\\n ")}") } } } allGlobPath } } object DataSource extends Logging { /** A map to maintain backward compatibility in case we move data sources around. */ private val backwardCompatibilityMap: Map[String, String] = { val jdbc = classOf[JdbcRelationProvider].getCanonicalName val json = classOf[JsonFileFormat].getCanonicalName val parquet = classOf[ParquetFileFormat].getCanonicalName val csv = classOf[CSVFileFormat].getCanonicalName val libsvm = "org.apache.spark.ml.source.libsvm.LibSVMFileFormat" val orc = "org.apache.spark.sql.hive.orc.OrcFileFormat" val nativeOrc = classOf[OrcFileFormat].getCanonicalName val socket = classOf[TextSocketSourceProvider].getCanonicalName val rate = classOf[RateStreamProvider].getCanonicalName Map( "org.apache.spark.sql.jdbc" -> jdbc, "org.apache.spark.sql.jdbc.DefaultSource" -> jdbc, "org.apache.spark.sql.execution.datasources.jdbc.DefaultSource" -> jdbc, "org.apache.spark.sql.execution.datasources.jdbc" -> jdbc, "org.apache.spark.sql.json" -> json, "org.apache.spark.sql.json.DefaultSource" -> json, "org.apache.spark.sql.execution.datasources.json" -> json, "org.apache.spark.sql.execution.datasources.json.DefaultSource" -> json, "org.apache.spark.sql.parquet" -> parquet, "org.apache.spark.sql.parquet.DefaultSource" -> parquet, "org.apache.spark.sql.execution.datasources.parquet" -> parquet, "org.apache.spark.sql.execution.datasources.parquet.DefaultSource" -> parquet, "org.apache.spark.sql.hive.orc.DefaultSource" -> orc, "org.apache.spark.sql.hive.orc" -> orc, "org.apache.spark.sql.execution.datasources.orc.DefaultSource" -> nativeOrc, "org.apache.spark.sql.execution.datasources.orc" -> nativeOrc, "org.apache.spark.ml.source.libsvm.DefaultSource" -> libsvm, "org.apache.spark.ml.source.libsvm" -> libsvm, "com.databricks.spark.csv" -> csv, "org.apache.spark.sql.execution.streaming.TextSocketSourceProvider" -> socket, "org.apache.spark.sql.execution.streaming.RateSourceProvider" -> rate ) } /** * Class that were removed in Spark 2.0. Used to detect incompatibility libraries for Spark 2.0. */ private val spark2RemovedClasses = Set( "org.apache.spark.sql.DataFrame", "org.apache.spark.sql.sources.HadoopFsRelationProvider", "org.apache.spark.Logging") /** Given a provider name, look up the data source class definition. */ def lookupDataSource(provider: String, conf: SQLConf): Class[_] = { val provider1 = backwardCompatibilityMap.getOrElse(provider, provider) match { case name if name.equalsIgnoreCase("orc") && conf.getConf(SQLConf.ORC_IMPLEMENTATION) == "native" => classOf[OrcFileFormat].getCanonicalName case name if name.equalsIgnoreCase("orc") && conf.getConf(SQLConf.ORC_IMPLEMENTATION) == "hive" => "org.apache.spark.sql.hive.orc.OrcFileFormat" case "com.databricks.spark.avro" if conf.replaceDatabricksSparkAvroEnabled => "org.apache.spark.sql.avro.AvroFileFormat" case name => name } val provider2 = s"$provider1.DefaultSource" val loader = Utils.getContextOrSparkClassLoader val serviceLoader = ServiceLoader.load(classOf[DataSourceRegister], loader) try { serviceLoader.asScala.filter(_.shortName().equalsIgnoreCase(provider1)).toList match { // the provider format did not match any given registered aliases case Nil => try { Try(loader.loadClass(provider1)).orElse(Try(loader.loadClass(provider2))) match { case Success(dataSource) => // Found the data source using fully qualified path dataSource case Failure(error) => if (provider1.startsWith("org.apache.spark.sql.hive.orc")) { throw new AnalysisException( "Hive built-in ORC data source must be used with Hive support enabled. " + "Please use the native ORC data source by setting 'spark.sql.orc.impl' to " + "'native'") } else if (provider1.toLowerCase(Locale.ROOT) == "avro" || provider1 == "com.databricks.spark.avro" || provider1 == "org.apache.spark.sql.avro") { throw new AnalysisException( s"Failed to find data source: $provider1. Avro is built-in but external data " + "source module since Spark 2.4. Please deploy the application as per " + "the deployment section of \\"Apache Avro Data Source Guide\\".") } else if (provider1.toLowerCase(Locale.ROOT) == "kafka") { throw new AnalysisException( s"Failed to find data source: $provider1. Please deploy the application as " + "per the deployment section of " + "\\"Structured Streaming + Kafka Integration Guide\\".") } else { throw new ClassNotFoundException( s"Failed to find data source: $provider1. Please find packages at " + "http://spark.apache.org/third-party-projects.html", error) } } } catch { case e: NoClassDefFoundError => // This one won't be caught by Scala NonFatal // NoClassDefFoundError's class name uses "/" rather than "." for packages val className = e.getMessage.replaceAll("/", ".") if (spark2RemovedClasses.contains(className)) { throw new ClassNotFoundException(s"$className was removed in Spark 2.0. " + "Please check if your library is compatible with Spark 2.0", e) } else { throw e } } case head :: Nil => // there is exactly one registered alias head.getClass case sources => // There are multiple registered aliases for the input. If there is single datasource // that has "org.apache.spark" package in the prefix, we use it considering it is an // internal datasource within Spark. val sourceNames = sources.map(_.getClass.getName) val internalSources = sources.filter(_.getClass.getName.startsWith("org.apache.spark")) if (internalSources.size == 1) { logWarning(s"Multiple sources found for $provider1 (${sourceNames.mkString(", ")}), " + s"defaulting to the internal datasource (${internalSources.head.getClass.getName}).") internalSources.head.getClass } else { throw new AnalysisException(s"Multiple sources found for $provider1 " + s"(${sourceNames.mkString(", ")}), please specify the fully qualified class name.") } } } catch { case e: ServiceConfigurationError if e.getCause.isInstanceOf[NoClassDefFoundError] => // NoClassDefFoundError's class name uses "/" rather than "." for packages val className = e.getCause.getMessage.replaceAll("/", ".") if (spark2RemovedClasses.contains(className)) { throw new ClassNotFoundException(s"Detected an incompatible DataSourceRegister. " + "Please remove the incompatible library from classpath or upgrade it. " + s"Error: ${e.getMessage}", e) } else { throw e } } } /** * When creating a data source table, the `path` option has a special meaning: the table location. * This method extracts the `path` option and treat it as table location to build a * [[CatalogStorageFormat]]. Note that, the `path` option is removed from options after this. */ def buildStorageFormatFromOptions(options: Map[String, String]): CatalogStorageFormat = { val path = CaseInsensitiveMap(options).get("path") val optionsWithoutPath = options.filterKeys(_.toLowerCase(Locale.ROOT) != "path") CatalogStorageFormat.empty.copy( locationUri = path.map(CatalogUtils.stringToURI), properties = optionsWithoutPath) } /** * Called before writing into a FileFormat based data source to make sure the * supplied schema is not empty. * @param schema */ private def validateSchema(schema: StructType): Unit = { def hasEmptySchema(schema: StructType): Boolean = { schema.size == 0 || schema.find { case StructField(_, b: StructType, _, _) => hasEmptySchema(b) case _ => false }.isDefined } if (hasEmptySchema(schema)) { throw new AnalysisException( s""" |Datasource does not support writing empty or nested empty schemas. |Please make sure the data schema has at least one or more column(s). """.stripMargin) } } }
guoxiaolongzte/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala
Scala
apache-2.0
36,370
/** * Copyright 2011-2017 GatlingCorp (http://gatling.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gatling.http.request.builder.ws2 import io.gatling.core.session._ import io.gatling.http.action.ws2._ import io.gatling.http.request.builder.CommonAttributes import io.gatling.http.request.builder.ws.Ws object Ws2 { def apply(requestName: Expression[String], wsName: String = Ws.DefaultWebSocketName): Ws2 = new Ws2(requestName, wsName) def checkTextMessage(name: String) = WsCheck(name, Nil, Nil) } /** * @param requestName The name of this request * @param wsName The name of the session attribute used to store the websocket */ class Ws2(requestName: Expression[String], wsName: String) { def wsName(wsName: String) = new Ws(requestName, wsName) /** * Opens a web socket and stores it in the session. * * @param url The socket URL * */ def connect(url: Expression[String]) = new WsConnectRequestBuilder(CommonAttributes(requestName, "GET", Left(url)), wsName) /** * Sends a text message on the given websocket. * * @param text The message */ def sendText(text: Expression[String]) = new WsSendBuilder(requestName, wsName, text, Nil) /** * Closes a websocket. */ def close = new WsCloseBuilder(requestName, wsName) }
timve/gatling
gatling-http/src/main/scala/io/gatling/http/request/builder/ws2/Ws2.scala
Scala
apache-2.0
1,811
package object editors { type ??? = Nothing // For mocking purpose only case class User(name: String, age: Int) }
julienrf/editors
library/src/main/scala/editors/package.scala
Scala
mit
123
/* * Copyright 2001-2019 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalatest.wordspec /** * A sister class to <code>org.scalatest.wordspec.AsyncWordSpec</code> that can pass a fixture object into its tests. * * <table><tr><td class="usage"> * <strong>Recommended Usage</strong>: * Use class <code>FixtureAsyncWordSpec</code> in situations for which <a href="AsyncWordSpec.html"><code>AsyncWordSpec</code></a> * would be a good choice, when all or most tests need the same fixture objects * that must be cleaned up afterwards. <em>Note: <code>FixtureAsyncWordSpec</code> is intended for use in special situations, with class <code>AsyncWordSpec</code> used for general needs. For * more insight into where <code>FixtureAsyncWordSpec</code> fits in the big picture, see the <a href="AsyncWordSpec.html#withFixtureOneArgAsyncTest"><code>withFixture(OneArgAsyncTest)</code></a> subsection of the <a href="AsyncWordSpec.html#sharedFixtures">Shared fixtures</a> section in the documentation for class <code>AsyncWordSpec</code>.</em> * </td></tr></table> * * <p> * Class <code>FixtureAsyncWordSpec</code> behaves similarly to class <code>org.scalatest.wordspec.AsyncWordSpec</code>, except that tests may have a * fixture parameter. The type of the * fixture parameter is defined by the abstract <code>FixtureParam</code> type, which is a member of this class. * This class also contains an abstract <code>withFixture</code> method. This <code>withFixture</code> method * takes a <code>OneArgAsyncTest</code>, which is a nested trait defined as a member of this class. * <code>OneArgAsyncTest</code> has an <code>apply</code> method that takes a <code>FixtureParam</code>. * This <code>apply</code> method is responsible for running a test. * This class's <code>runTest</code> method delegates the actual running of each test to <code>withFixture(OneArgAsyncTest)</code>, passing * in the test code to run via the <code>OneArgAsyncTest</code> argument. The <code>withFixture(OneArgAsyncTest)</code> method (abstract in this class) is responsible * for creating the fixture argument and passing it to the test function. * </p> * * <p> * Subclasses of this class must, therefore, do three things differently from a plain old <code>org.scalatest.wordspec.AsyncWordSpec</code>: * </p> * * <ol> * <li>define the type of the fixture parameter by specifying type <code>FixtureParam</code></li> * <li>define the <code>withFixture(OneArgAsyncTest)</code> method</li> * <li>write tests that take a fixture parameter</li> * <li>(You can also define tests that don't take a fixture parameter.)</li> * </ol> * * <p> * If the fixture you want to pass into your tests consists of multiple objects, you will need to combine * them into one object to use this class. One good approach to passing multiple fixture objects is * to encapsulate them in a case class. Here's an example: * </p> * * <pre class="stHighlight"> * case class FixtureParam(file: File, writer: FileWriter) * </pre> * * <p> * To enable the stacking of traits that define <code>withFixture(NoArgAsyncTest)</code>, it is a good idea to let * <code>withFixture(NoArgAsyncTest)</code> invoke the test function instead of invoking the test * function directly. To do so, you'll need to convert the <code>OneArgAsyncTest</code> to a <code>NoArgAsyncTest</code>. You can do that by passing * the fixture object to the <code>toNoArgAsyncTest</code> method of <code>OneArgAsyncTest</code>. In other words, instead of * writing &ldquo;<code>test(theFixture)</code>&rdquo;, you'd delegate responsibility for * invoking the test function to the <code>withFixture(NoArgAsyncTest)</code> method of the same instance by writing: * </p> * * <pre> * withFixture(test.toNoArgAsyncTest(theFixture)) * </pre> * * <p> * Here's a complete example: * </p> * * <pre class="stHighlight"> * package org.scalatest.examples.asyncwordspec.oneargasynctest * * import org.scalatest._ * import scala.concurrent.Future * import scala.concurrent.ExecutionContext * * // Defining actor messages * sealed abstract class StringOp * case object Clear extends StringOp * case class Append(value: String) extends StringOp * case object GetValue * * class StringActor { // Simulating an actor * private final val sb = new StringBuilder * def !(op: StringOp): Unit = * synchronized { * op match { * case Append(value) => sb.append(value) * case Clear => sb.clear() * } * } * def ?(get: GetValue.type)(implicit c: ExecutionContext): Future[String] = * Future { * synchronized { sb.toString } * } * } * * class ExampleSpec extends wordspec.FixtureAsyncWordSpec { * * type FixtureParam = StringActor * * def withFixture(test: OneArgAsyncTest): FutureOutcome = { * * val actor = new StringActor * complete { * actor ! Append("ScalaTest is ") // set up the fixture * withFixture(test.toNoArgAsyncTest(actor)) * } lastly { * actor ! Clear // ensure the fixture will be cleaned up * } * } * * "Testing" should { * "be easy" in { actor =&gt; * actor ! Append("easy!") * val futureString = actor ? GetValue * futureString map { s =&gt; * assert(s == "ScalaTest is easy!") * } * } * * "be fun" in { actor => * actor ! Append("fun!") * val futureString = actor ? GetValue * futureString map { s => * assert(s == "ScalaTest is fun!") * } * } * } * } * </pre> * * <p> * If a test fails, the future returned by the <code>OneArgAsyncTest</code> function will result in * an [[org.scalatest.Failed org.scalatest.Failed]] wrapping the exception describing * the failure. To ensure clean up happens even if a test fails, you should invoke the test function and do the cleanup using * <code>complete</code>-<code>lastly</code>, as shown in the previous example. The <code>complete</code>-<code>lastly</code> syntax, defined in <code>CompleteLastly</code>, which is extended by <code>AsyncTestSuite</code>, ensures * the second, cleanup block of code is executed, whether the the first block throws an exception or returns a future. If it returns a * future, the cleanup will be executed when the future completes. * </p> * * <a name="sharingFixturesAcrossClasses"></a><h2>Sharing fixtures across classes</h2> * * <p> * If multiple test classes need the same fixture, you can define the <code>FixtureParam</code> and <code>withFixture(OneArgAsyncTest)</code> * implementations in a trait, then mix that trait into the test classes that need it. For example, if your application requires a database and your * integration tests use that database, you will likely have many test classes that need a database fixture. You can create a "database fixture" trait * that creates a database with a unique name, passes the connector into the test, then removes the database once the test completes. This is shown in * the following example: * </p> * * <pre class="stHighlight"> * package org.scalatest.examples.fixture.asyncwordspec.sharing * * import java.util.concurrent.ConcurrentHashMap * import org.scalatest._ * import DbServer._ * import java.util.UUID.randomUUID * import scala.concurrent.Future * * object DbServer { // Simulating a database server * type Db = StringBuffer * private val databases = new ConcurrentHashMap[String, Db] * def createDb(name: String): Db = { * val db = new StringBuffer * databases.put(name, db) * db * } * def removeDb(name: String) { * databases.remove(name) * } * } * * trait DbFixture { this: FixtureAsyncTestSuite =&gt; * * type FixtureParam = Db * * // Allow clients to populate the database after * // it is created * def populateDb(db: Db) {} * * def withFixture(test: OneArgAsyncTest): FutureOutcome = { * val dbName = randomUUID.toString * val db = createDb(dbName) // create the fixture * complete { * populateDb(db) // setup the fixture * withFixture(test.toNoArgAsyncTest(db)) // "loan" the fixture to the test * } lastly { * removeDb(dbName) // ensure the fixture will be cleaned up * } * } * } * * class ExampleSpec extends wordspec.FixtureAsyncWordSpec with DbFixture { * * override def populateDb(db: Db) { // setup the fixture * db.append("ScalaTest is ") * } * * "Testing" should { * "be easy" in { db =&gt; * Future { * db.append("easy!") * assert(db.toString === "ScalaTest is easy!") * } * } * * "be fun" in { db =&gt; * Future { * db.append("fun!") * assert(db.toString === "ScalaTest is fun!") * } * } * } * * "Testing code" should { * // This test doesn't need a Db * "be clear" in { () =&gt; * Future { * val buf = new StringBuffer * buf.append("ScalaTest code is ") * buf.append("clear!") * assert(buf.toString === "ScalaTest code is clear!") * } * } * } * } * </pre> * * <p> * Often when you create fixtures in a trait like <code>DbFixture</code>, you'll still need to enable individual test classes * to "setup" a newly created fixture before it gets passed into the tests. A good way to accomplish this is to pass the newly * created fixture into a setup method, like <code>populateDb</code> in the previous example, before passing it to the test * function. Classes that need to perform such setup can override the method, as does <code>ExampleSuite</code>. * </p> * * <p> * If a test doesn't need the fixture, you can indicate that by providing a no-arg instead of a one-arg function, as is done in the * third test in the previous example, &ldquo;<code>test code should be clear</code>&rdquo;. In other words, instead of starting your function literal * with something like &ldquo;<code>db =&gt;</code>&rdquo;, you'd start it with &ldquo;<code>() =&gt;</code>&rdquo;. For such tests, <code>runTest</code> * will not invoke <code>withFixture(OneArgAsyncTest)</code>. It will instead directly invoke <code>withFixture(NoArgAsyncTest)</code>. * </p> * * * <p> * Both examples shown above demonstrate the technique of giving each test its own "fixture sandbox" to play in. When your fixtures * involve external side-effects, like creating files or databases, it is a good idea to give each file or database a unique name as is * done in these examples. This keeps tests completely isolated, allowing you to run them in parallel if desired. You could mix * <a href="../ParallelTestExecution.html"><code>ParallelTestExecution</code></a> into either of these <code>ExampleSuite</code> classes, and the tests would run in parallel just fine. * </p> * * @author Bill Venners */ abstract class FixtureAsyncWordSpec extends org.scalatest.wordspec.FixtureAsyncWordSpecLike { /** * Returns a user friendly string for this suite, composed of the * simple name of the class (possibly simplified further by removing dollar signs if added by the Scala interpeter) and, if this suite * contains nested suites, the result of invoking <code>toString</code> on each * of the nested suites, separated by commas and surrounded by parentheses. * * @return a user-friendly string for this suite */ override def toString: String = org.scalatest.Suite.suiteToString(None, this) }
dotty-staging/scalatest
scalatest/src/main/scala/org/scalatest/wordspec/FixtureAsyncWordSpec.scala
Scala
apache-2.0
12,020
/* * This file is part of Kiama. * * Copyright (C) 2008-2015 Anthony M Sloane, Macquarie University. * * Kiama is free software: you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the * Free Software Foundation, either version 3 of the License, or (at your * option) any later version. * * Kiama is distributed in the hope that it will be useful, but WITHOUT ANY * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for * more details. * * You should have received a copy of the GNU Lesser General Public License * along with Kiama. (See files COPYING and COPYING.LESSER.) If not, see * <http://www.gnu.org/licenses/>. */ package org.kiama package util import scala.util.parsing.combinator.RegexParsers /** * General support for applications that implement read-eval-print loops (REPLs). */ trait REPLBase[C <: REPLConfig] extends Profiler { import scala.annotation.tailrec import scala.io.Source /** * Banner message that is printed before the REPL starts. */ def banner : String /** * The entry point for this REPL. */ def main (args : Array[String]) { driver (args) } /** * Create the configuration for a particular run of the REPL. If supplied, use * `emitter` instead of a standard output emitter. */ def createConfig (args : Seq[String], output : Emitter = new OutputEmitter, error : Emitter = new ErrorEmitter) : C /** * Create and initialise the configuration for a particular run of the REPL. * If supplied, use `emitter` instead of a standard output emitter. Default: * call `createConfig` and then initialise the resulting configuration. */ def createAndInitConfig (args : Seq[String], output : Emitter = new OutputEmitter, error : Emitter = new ErrorEmitter) : C = { val config = createConfig (args, output, error) config.afterInit () config } /** * Driver for this REPL. First, use the argument list to create a * configuration for this execution. If the arguments parse ok, then * print the REPL banner. Read lines from the console and pass non-null ones * to `processline`. If `ignoreWhitespaceLines` is true, do not pass lines that * contain just whitespace, otherwise do. Continue until `processline` * returns false. Call `prompt` each time input is about to be read. */ def driver (args : Seq[String]) { // Set up the configuration val config = createAndInitConfig (args) // Process any filename arguments processfiles (config) // Enter interactive phase config.output.emitln (banner) if (config.profile.get != None) { val dimensions = parseProfileOption (config.profile ()) profile (processlines (config), dimensions, config.logging ()) } else if (config.time ()) time (processlines (config)) else processlines (config) config.output.emitln } /** * Define the prompt (default: `"> "`). */ def prompt : String = "> " /** * Process interactively entered lines, one by one, until end of file. * Prompt with the given prompt. */ def processlines (config : C) { processconsole (config.console (), prompt, config) } /** * Process the files one by one, allowing config to be updated each time * and updated config to be used by the next file. */ final def processfiles (config : C) : C = { @tailrec def loop (filenames : List[String], config : C) : C = filenames match { case filename +: rest => loop (rest, processfile (filename, config)) case _ => config } loop (config.filenames (), config) } /** * Process a file argument by passing its contents line-by-line to * `processline`. */ def processfile (filename : String, config : C) : C = processconsole (new FileConsole (filename), "", config) /** * Process interactively entered lines, one by one, until end of file. */ @tailrec final def processconsole (console : Console, prompt : String, config : C) : C = { val line = console.readLine (prompt) if (line == null) config else { processline (line, console, config) match { case Some (newConfig) => processconsole (console, prompt, newConfig) case _ => config } } } /** * Process a user input line. The return value allows the processing to * optionally return a new configuration that will be used in subsequent * processing. A return value of `None` indicates that no more lines * from the current console should be processed. */ def processline (line : String, console : Console, config : C) : Option[C] } /** * General support for applications that implement read-eval-print loops (REPLs). */ trait REPL extends REPLBase[REPLConfig] { def createConfig (args : Seq[String], out : Emitter = new OutputEmitter, err : Emitter = new ErrorEmitter) : REPLConfig = new REPLConfig (args) { lazy val output = out lazy val error = err } } /** * A REPL that parses its input lines into a value (such as an abstract syntax * tree), then processes them. Output is emitted using a configurable emitter. */ trait ParsingREPLBase[T, C <: REPLConfig] extends REPLBase[C] with RegexParsers { /** * Process a user input line by parsing it to get a value of type `T`, * then passing it to the `process` method. Returns the configuration * unchanged. */ def processline (line : String, console : Console, config : C) : Option[C] = { if (config.processWhitespaceLines () || (line.trim.length != 0)) { parseAll (parser, line) match { case Success (e, in) if in.atEnd => process (e, config) case Success (_, in) => config.error.emitln (s"extraneous input at ${in.pos}") case f => config.error.emitln (f) } } Some (config) } /** * The parser to use to convert user input lines into values. */ def parser : Parser[T] /** * Process a user input value in the given configuration. */ def process (t : T, config : C) } /** * A REPL that parses its input lines into a value (such as an abstract syntax * tree), then processes them. `C` is the type of the configuration. */ trait ParsingREPLWithConfig[T, C <: REPLConfig] extends ParsingREPLBase[T,C] /** * A REPL that parses its input lines into a value (such as an abstract syntax * tree), then processes them. Output is emitted to standard output. */ trait ParsingREPL[T ] extends ParsingREPLWithConfig[T,REPLConfig] { def createConfig (args : Seq[String], out : Emitter = new OutputEmitter, err : Emitter = new ErrorEmitter) : REPLConfig = new REPLConfig (args) { lazy val output = out lazy val error = err } }
joaoraf/kiama
library/src/org/kiama/util/REPL.scala
Scala
gpl-3.0
7,630
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.scheduler import org.apache.spark.executor.ExecutorExitCode /** * Represents an explanation for an executor or whole process failing or exiting. */ private[spark] class ExecutorLossReason(val message: String) extends Serializable { override def toString: String = message } private[spark] case class ExecutorExited(exitCode: Int, exitCausedByApp: Boolean, reason: String) extends ExecutorLossReason(reason) private[spark] object ExecutorExited { def apply(exitCode: Int, exitCausedByApp: Boolean): ExecutorExited = { ExecutorExited( exitCode, exitCausedByApp, ExecutorExitCode.explainExitCode(exitCode)) } } private[spark] object ExecutorKilled extends ExecutorLossReason("Executor killed by driver.") /** * A loss reason that means we don't yet know why the executor exited. * * This is used by the task scheduler to remove state associated with the executor, but * not yet fail any tasks that were running in the executor before the real loss reason * is known. */ private [spark] object LossReasonPending extends ExecutorLossReason("Pending loss reason.") /** * @param _message human readable loss reason * @param workerHost it's defined when the host is confirmed lost too (i.e. including * shuffle service) * @param causedByApp whether the loss of the executor is the fault of the running app. * (assumed true by default unless known explicitly otherwise) */ private[spark] case class ExecutorProcessLost( _message: String = "Executor Process Lost", workerHost: Option[String] = None, causedByApp: Boolean = true) extends ExecutorLossReason(_message) /** * A loss reason that means the executor is marked for decommissioning. * * This is used by the task scheduler to remove state associated with the executor, but * not yet fail any tasks that were running in the executor before the executor is "fully" lost. * If you update this code make sure to re-run the K8s integration tests. * * @param workerHost it is defined when the worker is decommissioned too */ private [spark] case class ExecutorDecommission(workerHost: Option[String] = None) extends ExecutorLossReason("Executor decommission.")
witgo/spark
core/src/main/scala/org/apache/spark/scheduler/ExecutorLossReason.scala
Scala
apache-2.0
3,041
package gh3.events import gh3._ import gh3.models.{GH3Sender, GH3Repository, GH3Organization} import net.liftweb.json.JsonAST.JValue case class RepositoryEvent( action: String, repository: GH3Repository, organization: GH3Organization, sender: GH3Sender ) extends GH3Event object RepositoryEvent extends GH3EventParser { def apply(json: JValue): Option[RepositoryEvent] = { val action = node2String(json)("action") val repository = GH3Repository(json \\ "repository") val organization = GH3Organization(json \\ "organization") val sender = GH3Sender(json \\ "sender") val params = Seq(action, repository, organization, sender) if(params.forall(_.isDefined)) Some(new RepositoryEvent(action.get, repository.get, organization.get, sender.get)) else None } }
mgoeminne/github_etl
src/main/scala/gh3/events/RepositoryEvent.scala
Scala
mit
961
package net.liftmodules import _root_.net.liftweb._ import http._ /** * ==FoBo / FoBo API Module== * * This FoBo / FoBo API module provides FoBo/Lift API components for the FoBo Module, * but can also be used as-is. * * If you are using this module via the FoBo/FoBo module see also [[net.liftmodules.fobo]] for setup information. */ package object foboapi { //override def toString() = FoBoAPI.API.toString() /** * Initiate FoBo's FoBo API in you bootstrap liftweb Boot. * * '''Example:''' * {{{ * import net.liftmodules.{foboapi => fobo} * : * fobo.API.init=fobo.API.[API Object] * }}} * '''Note:''' To see available objects click on the round trait icon in the header of this page. */ sealed trait API object API extends API { //we don't actually need to store the objects (for now) so lets just save //the object name, we can easily change this if we need to private type Store = List[String] //List[API] private var store: Store = List() def init: Store = store def init_=(t: API): Store = { store = if (store contains t.toString) store else t.toString :: store store } override def toString() = "foboapi.API = " + store.toString() /** * Enable usage of FoBo's Angular API for Bootstrap version 3&#8228;X&#8228;X in your bootstrap liftweb Boot. * @version 3.X.X * * '''Example:''' * * {{{ * import net.liftmodules.{foboapi => fobo} * : * fobo.API.init=fobo.API.FoBo1 * }}} * */ case object FoBo1 extends API { FoBoAPI.init } } private object FoBoAPI { lazy val init: Unit = { LiftRules.addToPackages("net.liftmodules.fobo") } } }
karma4u101/FoBo
FoBo/FoBo-API/src/main/scala/net/liftmodules/fobo/foboapi.scala
Scala
apache-2.0
1,776
package pb.frontend import org.scalajs.dom import org.scalajs.dom._ import org.scalajs.dom.ext._ import org.scalajs.dom.raw.HTMLFormElement import scala.scalajs.concurrent.JSExecutionContext.Implicits.queue import scala.scalajs.js import scala.scalajs.js.Dynamic.global import scala.scalajs.js.annotation.JSExport import scala.util.{Failure, Success} @JSExport object PairingBuddyApp extends js.JSApp { override def main(): Unit = document.getElementById("start-session").asInstanceOf[HTMLFormElement].onsubmit = { e => e.preventDefault() Ajax.get("/start-session").onComplete { case Success(xhr) => dom.window.location.href = xhr.responseText case Failure(_) => global.alert("oh nooo") } } }
cakper/pairing-buddy
frontend/src/main/scala/pb/frontend/PairingBuddyApp.scala
Scala
mit
742
package autolift.test.scalaz import scalaz._ import Scalaz._ import autolift.Scalaz._ class LiftMaximumByTest extends BaseSpec{ "liftMaximumBy on a List with identity" should "work" in{ val in = List(1, 2, 3) val out = in.liftMaxBy(identity[Int]) same[Option[Int]](out, Option(3)) } "liftMaximumBy on a Option[List]" should "work" in{ val in = Option(List("1", "2", "3")) val out = in.liftMaxBy(s2i) same[Option[Option[String]]](out, Option(Option("3"))) } "liftMaximumBy on a List[List]" should "work with functions" in{ val in = List(List("1"), List("2"), List("3")) val out = in.liftMaxBy(anyF) same[Option[List[String]]](out, Option(List("3"))) } "liftMaximumBy on a List[Option]" should "work" in{ val in = List(None, None, Some("1")) val out = in.liftMaxBy(s2i) same[List[Option[String]]](out, List(None, None, Some("1"))) } "LiftedMaximumBy" should "work" in{ val fn = liftMaxBy(s2i) val out = fn(List(List("1", "2"))) same[List[Option[String]]](out, List(Option("2"))) } "LiftedMaximumBy" should "map" in{ val lf = liftMaxBy(s2i) val fn = lf map {_ + 1} val out = fn(List(List("1", "2"))) same[List[Option[String]]](out, List(Option("2"))) } }
wheaties/AutoLifts
autolift-scalaz/src/test/scala/autolift/scalaz/LiftMaximumByTest.scala
Scala
apache-2.0
1,264
package memnets.fx.app import java.io.File import javafx.fxml.FXMLLoader import memnets.core.BuiltModel import memnets.fx._ import memnets.fx.utils._ import memnets.model._ import memnets.utils._ import org.fxmisc.flowless.VirtualizedScrollPane import scalafx.Includes._ import scalafx.beans.property._ import scalafx.scene.input._ import scalafx.stage.Stage import scalafx.util.Duration trait EditorDef { def title: String def fileDescription: String def fileTypes: Iterable[String] def codeStyles: CodeAreaStyles } /** subclass for testing w/o FX app */ trait EditorOwner { def model: BuiltModel def open( ext: Iterable[String], desc: String = "", title: String = "", initFile: Option[File] = None ): Option[File] def save( ext: Iterable[String], desc: String = "", title: String = "", initFile: Option[File] = None ): Option[File] def fadeDuration(): Duration = (500 ms) def hideEditor(fade: Duration = fadeDuration()): Unit = {} def openUrl(url: String): Unit def showMsg(txt: String): Unit } class CodeEditorFX(owner: EditorOwner, editDef: EditorDef) extends Logging { protected val _saveFileProp = ObjectProperty[Option[File]](None) val editorOpacity = DoubleProperty(1.0) private val editLoader = new FXMLLoader("code-editor.fxml".asURL) val fx: JBorderPane = editLoader.load() val codeArea = new CodeAreaP(editDef.codeStyles) val scroll = new VirtualizedScrollPane(codeArea) scroll.setId("codeScroll") fx.setCenter(scroll) val title = StringProperty(editDef.title) // editor actions val newAction = ActionP("New", '\uf15b') { text = "" _saveFileProp.value = None } val openAction = ActionP("Open", '\uf07c') { owner.open( editDef.fileTypes, editDef.fileDescription, initFile = _saveFileProp.value ) match { case Some(f) if f.isFile => try { for (inScript <- f.read()) { text = inScript _saveFileProp.value = Some(f) } } catch { case th: Throwable => _saveFileProp.value = None val msg = s"couldn't read file: $f" logger.error(msg, th) owner.showMsg(msg) } case Some(f) => // bad file _saveFileProp.value = None case None => // user cancelled, do nothing } } val saveAction = ActionP("Save", '\uf0c7') { if (_saveFileProp.value.isDefined) doSave(_saveFileProp.value) else saveAsAction.forceFire(true) } val saveAsAction = ActionP("Save As...") { val file = owner.save(editDef.fileTypes, editDef.fileDescription, initFile = _saveFileProp.value) doSave(file) } val closeEditorAction = ActionP("Close Editor", '\uf00d') { owner.hideEditor() } val cutAction = ActionP("Cut", '\uf0c4') { codeArea.cut() } val copyAction = ActionP("Copy", '\uf0c5') { codeArea.copy() } val pasteAction = ActionP("Paste", '\uf0ea') { codeArea.paste() } val undoAction = ActionP("Undo", '\uf0e2') { codeArea.undo() } val redoAction = ActionP("Redo", '\uf01e') { codeArea.redo() } val selectAllAction = ActionP("Select All") { codeArea.selectAll() } val selectNoneAction = ActionP("Select None") { codeArea.deselect() } val deleteAction = ActionP("Delete") { val selection = codeArea.getSelection codeArea.deleteText(selection.getStart, selection.getEnd) } val wordWrapAction = ActionP.toggle("Word Wrap", initSelect = true) { sel => codeArea.wrapTextProperty.set(sel.isSelected) } val transparentAction = ActionP.toggle("Transparent", initSelect = false) { sel => editorOpacity.value = if (sel.isSelected) 0.85 else 1.0 } val projectAction = ActionP("Project") { owner.openUrl("https://github.com/MemoryNetworks/memnets") } val helpAction = ActionP("Help", '\uf128') { owner.openUrl("https://github.com/MemoryNetworks/memnets/wiki") } val licenseAction = ActionP("License") { owner.openUrl("https://github.com/MemoryNetworks/memnets/blob/master/LICENSE") } newAction.setAccelerator(KeyCombination("CTRL+N")) openAction.setAccelerator(KeyCombination("CTRL+O")) saveAction.setAccelerator(KeyCombination("CTRL+S")) saveAsAction.setAccelerator(KeyCombination("CTRL+SHIFT+S")) cutAction.setAccelerator(KeyCombination("CTRL+X")) copyAction.setAccelerator(KeyCombination("CTRL+C")) pasteAction.setAccelerator(KeyCombination("CTRL+V")) undoAction.setAccelerator(KeyCombination("CTRL+Z")) redoAction.setAccelerator(KeyCombination("CTRL+Y")) selectAllAction.setAccelerator(KeyCombination("CTRL+A")) selectNoneAction.setAccelerator(KeyCombination("CTRL+SHIFT+A")) deleteAction.setAccelerator(new KeyCodeCombination(KeyCode.Delete)) newAction.disabledProperty <== codeArea.hasText.not() saveAction.disabledProperty <== codeArea.hasText.not() saveAsAction.disabledProperty <== codeArea.hasText.not() cutAction.disabledProperty <== codeArea.selectionEmpty copyAction.disabledProperty <== codeArea.selectionEmpty deleteAction.disabledProperty <== codeArea.selectionEmpty selectNoneAction.disabledProperty <== codeArea.selectionEmpty // paste can come from outside app and don't know how to tell as of now... // pasteAction.disabledProperty <== _hasPaste.not() undoAction.disabledProperty.bind(codeArea.undoAvailableProperty().map(x => !x)) redoAction.disabledProperty.bind(codeArea.redoAvailableProperty().map(x => !x)) def text = codeArea.getText() def text_=(code: String): Unit = { codeArea.setCode(code) } def bindActions(): Unit = { ActionP.lookupActions(editLoader) } protected def doSave(file: Option[File]): Unit = { file match { case Some(f) if f.isFile => try { val inScript = text f.printTo { op => op.print(inScript) } owner.showMsg(s"saved to file: ${f.getAbsoluteFile}") _saveFileProp.value = file } catch { case th: Throwable => _saveFileProp.value = None val msg = s"couldn't save to file: $f" logger.error(msg, th) owner.showMsg(msg) } case None => } } _saveFileProp.onChange { (_, _, fileOpt) => title.value = fileOpt match { case Some(file) => editDef.title + ": " + file case None => editDef.title } } } /** default impl needs stage */ trait EditorOwnerBase extends EditorOwner { def open( ext: Iterable[String], desc: String = "", title: String = "", initFile: Option[File] = None ): Option[File] = memnets.fx.open(stage, ext, desc, title, initFile) def save( ext: Iterable[String], desc: String = "", title: String = "", initFile: Option[File] = None ): Option[File] = memnets.fx.save(stage, ext, desc, title, initFile) def showMsg(txt: String): Unit def stage: Stage def openUrl(url: String): Unit }
MemoryNetworks/memnets
fx/src/main/scala/memnets/fx/app/CodeEditorFX.scala
Scala
apache-2.0
7,001
/* * Copyright 2018 CJWW Development * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package helpers.services import com.cjwwdev.auth.models.CurrentUser import helpers.other.Fixtures import org.mockito.ArgumentMatchers import org.mockito.Mockito.{reset, when} import org.mockito.stubbing.OngoingStubbing import org.scalatest.BeforeAndAfterEach import org.scalatest.mockito.MockitoSugar import org.scalatestplus.play.PlaySpec import services.LoginService import scala.concurrent.Future trait MockLoginService extends BeforeAndAfterEach with MockitoSugar with Fixtures { self: PlaySpec => val mockLoginService = mock[LoginService] override protected def beforeEach(): Unit = { super.beforeEach() reset(mockLoginService) } def mockLogin(loggedIn: Boolean): OngoingStubbing[Future[Option[CurrentUser]]] = { when(mockLoginService.login(ArgumentMatchers.any())(ArgumentMatchers.any())) .thenReturn(Future.successful(if(loggedIn) Some(testCurrentUser) else None)) } def mockGetContext(fetched: Boolean): OngoingStubbing[Future[Option[CurrentUser]]] = { when(mockLoginService.getContext(ArgumentMatchers.any())(ArgumentMatchers.any())) .thenReturn(Future.successful(if(fetched) Some(testCurrentUser) else None)) } }
cjww-development/auth-microservice
test/helpers/services/MockLoginService.scala
Scala
apache-2.0
1,775
package io.github.ptitjes.scott.api /** * @author Didier Villevalois */ trait IterationCallback[X, Y <: X] { def iterationDone(iteration: Int, hmm: HiddenMarkovModel[X, Y], elapsedTime: Long): Unit }
ptitjes/scott
scott-core/src/main/scala/io/github/ptitjes/scott/api/IterationCallback.scala
Scala
gpl-3.0
205
package com.thangiee.lolhangouts.data.usecases.entities import com.thangiee.lolchat.region.Region case class User( loginName: String, inGameName: String, region: Region, statusMsg: String, currentFriendChat: Option[String] = None, groupNames: Seq[String] )
Thangiee/LoL-Hangouts
src/com/thangiee/lolhangouts/data/usecases/entities/User.scala
Scala
apache-2.0
273
package sample.model.account import scala.util.{ Failure, Success, Try } import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import sample._ import sample.model.DataFixtures @RunWith(classOf[JUnitRunner]) class AccountSpec extends UnitSpecSupport { behavior of "口座管理" it should "通常口座が取得できる" in { implicit session => DataFixtures.saveAcc("normal", AccountStatusType.Normal) val acc = Account.loadActive("normal") acc.id should be ("normal") acc.statusType should be (AccountStatusType.Normal) } it should "退会時に例外が発生する" in { implicit session => DataFixtures.saveAcc("withdrawal", AccountStatusType.Withdrawal) Try(Account.loadActive("withdrawal")) match { case Success(v) => fail() case Failure(e) => e.getMessage should be ("error.Account.loadActive") } } it should "口座を登録する" in { implicit session => val p = RegAccount("regsample", "sample user", "sample@example.com", "passwd") val id = Account.register(encoder, p) val acc = Account.findById(id) acc.isDefined should be (true) acc.get.id should be ("regsample") acc.get.name should be ("sample user") acc.get.mail should be ("sample@example.com") val login = Login.findById(id) login.isDefined should be (true) login.get.id should be ("regsample") login.get.loginId should be ("regsample") encoder.matches("passwd", login.get.password) should be (true) } it should "口座を変更する" in { implicit session => DataFixtures.saveAcc("normal", AccountStatusType.Normal) val p = ChgAccount("sample user", "sample@example.com") Account.change("normal", p) val acc = Account.findById("normal") acc.isDefined should be (true) acc.get.name should be ("sample user") acc.get.mail should be ("sample@example.com") } }
jkazama/sample-boot-scala
src/test/scala/sample/model/account/AccountSpec.scala
Scala
mit
1,897
/* * La Trobe University - Distributed Deep Learning System * Copyright 2016 Matthias Langer (t3l@threelights.de) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.latrobe.blaze.modules.jvm import edu.latrobe._ import edu.latrobe.blaze._ import edu.latrobe.blaze.modules._ final class MultiplyValues_JVM_Baseline(override val builder: MultiplyValuesBuilder, override val inputHints: BuildHints, override val seed: InstanceSeed, override val weightBufferBuilder: ValueTensorBufferBuilder) extends MultiplyValues_JVM { // --------------------------------------------------------------------------- // Forward propagation related. // --------------------------------------------------------------------------- override protected def doPredictPerValue(output: RealArrayTensor) : Unit = { // input: RGB RGB RGB | RGB RGB RGB // values: RGB RGB RGB | RGB RGB RGB output.foreachSample((off, length) => { ArrayEx.multiply( output.values, off, 1, values, off, 1, length ) }) } override protected def doPredictPerUnit(output: RealArrayTensor) : Unit = { // input: RGB RGB RGB | RGB RGB RGB // values: RGB RGB RGB output.foreachSample((off, length) => { ArrayEx.multiply( output.values, off, 1, values, 0, 1, length ) }) } override protected def doPredictPerChannel(output: RealArrayTensor) : Unit = { // input: RGB RGB RGB | RGB RGB RGB // values: RGB output.foreachChannel((off, stride, length) => { ArrayEx.multiply( output.values, off, stride, values(off), length ) }) /* val sampleSize = output.layout.size.noValues output.foreachSample(offset0 => { var off0 = offset0 val end0 = offset0 + sampleSize while (off0 < end0) { ArrayEx.multiply( output.values, off0, 1, values, 0, 1, values.length ) off0 += values.length } }) */ } override protected def doPredictPerSample(output: RealArrayTensor) : Unit = { // input: RGB RGB RGB | RGB RGB RGB // values: R | R output.foreachSamplePair((i, off, length) => { ArrayEx.multiply( output.values, off, 1, values(i), length ) }) } override protected def doPredictPerBatch(output: RealArrayTensor) : Unit = { // input: RGB RGB RGB | RGB RGB RGB // values: R output *= values(0) } override protected def doPredictInvPerValue(input: RealArrayTensor) : Unit = { // output: RGB RGB RGB | RGB RGB RGB // values: RGB RGB RGB | RGB RGB RGB input.foreachSample((off, length) => { ArrayEx.divide( input.values, off, 1, values, off, 1, length ) }) } override protected def doPredictInvPerUnit(input: RealArrayTensor) : Unit = { // output: RGB RGB RGB | RGB RGB RGB // values: RGB RGB RGB input.foreachSample((off, length) => { ArrayEx.divide( input.values, off, 1, values, 0, 1, length ) }) } override protected def doPredictInvPerChannel(input: RealArrayTensor) : Unit = { // output: RGB RGB RGB | RGB RGB RGB // values: RGB input.foreachChannel((off, stride, length) => { ArrayEx.multiply( input.values, off, stride, Real.one / values(off), length ) }) /* val sampleSize = input.layout.size.noValues input.foreachSample(offset0 => { var off0 = offset0 val end0 = offset0 + sampleSize while (off0 < end0) { ArrayEx.divide( input.values, off0, 1, values, 0, 1, values.length ) off0 += values.length } }) */ } override protected def doPredictInvPerSample(input: RealArrayTensor) : Unit = { // output: RGB RGB RGB | RGB RGB RGB // values: R | R input.foreachSamplePair((i, off, length) => { ArrayEx.multiply( input.values, off, 1, Real.one / values(i), length ) }) } override protected def doPredictInvPerBatch(input: RealArrayTensor) : Unit = { // output: RGB RGB RGB | RGB RGB RGB // bias: R input *= Real.one / values(0) } // --------------------------------------------------------------------------- // Back propagation related. // --------------------------------------------------------------------------- override protected def doDeriveInputErrorPerValue(error: RealArrayTensor) : Unit = { // error: RGB RGB RGB | RGB RGB RGB // values: RGB RGB RGB | RGB RGB RGB error.foreachChannel((off, stride, length) => { ArrayEx.multiply( error.values, off, stride, values, off, stride, length ) }) } override protected def doDeriveInputErrorPerUnit(error: RealArrayTensor) : Unit = { // error: RGB RGB RGB | RGB RGB RGB // values: RGB RGB RGB error.foreachSample((off, length) => { ArrayEx.multiply( error.values, off, 1, values, 0, 1, length ) }) } override protected def doDeriveInputErrorPerChannel(error: RealArrayTensor) : Unit = { // error: RGB RGB RGB | RGB RGB RGB // values: RGB error.foreachChannel((off, stride, length) => { ArrayEx.multiply( error.values, off, stride, values(off), length ) }) /* val sampleSize = error.layout.size.noValues error.foreachSample(offset0 => { var off0 = offset0 val end0 = offset0 + sampleSize while (off0 < end0) { ArrayEx.multiply( error.values, off0, 1, values, 0, 1, values.length ) off0 += values.length } }) */ } override protected def doDeriveInputErrorPerSample(error: RealArrayTensor) : Unit = { // error: RGB RGB RGB | RGB RGB RGB // values: R | R error.foreachSamplePair((i, off, length) => { ArrayEx.multiply( error.values, off, 1, values(i), length ) }) } override protected def doDeriveInputErrorPerBatch(error: RealArrayTensor) : Unit = { // error: RGB RGB RGB | RGB RGB RGB // values: R error *= values(0) } } object MultiplyValues_JVM_Baseline_Description extends ModuleVariant_JVM_Description[MultiplyValuesBuilder] { override def build(builder: MultiplyValuesBuilder, hints: BuildHints, seed: InstanceSeed, weightsBuilder: ValueTensorBufferBuilder) : MultiplyValues_JVM_Baseline = new MultiplyValues_JVM_Baseline( builder, hints, seed, weightsBuilder ) }
bashimao/ltudl
blaze/src/main/scala/edu/latrobe/blaze/modules/jvm/MultiplyValues_JVM_Baseline.scala
Scala
apache-2.0
7,498
package com.datastax.spark.connector.writer import java.util.concurrent.atomic.AtomicInteger import java.util.concurrent.{Callable, Executors} import com.google.common.util.concurrent.MoreExecutors import org.junit.Assert._ import org.junit.Test class AsyncExecutorTest { @Test def test() { val taskCount = 20 val maxParallel = 5 val currentlyRunningCounter = new AtomicInteger(0) val maxParallelCounter = new AtomicInteger(0) val totalFinishedExecutionsCounter = new AtomicInteger(0) val task = new Callable[String] { override def call() = { val c = currentlyRunningCounter.incrementAndGet() var m = maxParallelCounter.get() while (m < c && !maxParallelCounter.compareAndSet(m, c)) m = maxParallelCounter.get() Thread.sleep(100) currentlyRunningCounter.decrementAndGet() totalFinishedExecutionsCounter.incrementAndGet() "ok" } } val underlyingExecutor = MoreExecutors.listeningDecorator(Executors.newCachedThreadPool()) val asyncExecutor = new AsyncExecutor[Callable[String], String](underlyingExecutor.submit(_: Callable[String]), maxParallel, None, None) for (i <- 1 to taskCount) asyncExecutor.executeAsync(task) asyncExecutor.waitForCurrentlyExecutingTasks() assertEquals(maxParallel, maxParallelCounter.get()) assertEquals(taskCount, totalFinishedExecutionsCounter.get()) assertEquals(true, asyncExecutor.successful) } }
clakech/spark-cassandra-connector
spark-cassandra-connector/src/test/scala/com/datastax/spark/connector/writer/AsyncExecutorTest.scala
Scala
apache-2.0
1,486
/* * Copyright (c) 2015-2017 EpiData, Inc. */ package models import java.util import cassandra.DB import com.datastax.driver.core.querybuilder.{ Clause, QueryBuilder } import com.epidata.lib.models.{ Measurement => Model, MeasurementsKeys, MeasurementSummary } import com.epidata.lib.models.util.{ JsonHelpers, Binary } import java.nio.ByteBuffer import java.util.{ Date, LinkedHashMap => JLinkedHashMap, LinkedList => JLinkedList } import service.Configs import scala.collection.convert.WrapAsScala import _root_.util.{ EpidataMetrics, Ordering } import com.datastax.driver.core._ object MeasurementService { import com.epidata.lib.models.Measurement._ private var prepareStatementMap: Map[String, PreparedStatement] = Map.empty def getPrepareStatement(statementSpec: String): PreparedStatement = { if (!prepareStatementMap.contains(statementSpec)) { val stm = DB.prepare(statementSpec) prepareStatementMap = prepareStatementMap + (statementSpec -> stm) stm } else { prepareStatementMap.get(statementSpec).get } } def reset = { prepareStatementMap = Map.empty } def epochForTs(ts: Date): Int = // Divide the timeline into epochs approximately 12 days in duration. (ts.getTime() / (1000L * 1000L * 1000L)).toInt /** * Insert a measurement into the database. * @param measurement The Measurement to insert. */ def insert(measurement: Model): Unit = { val statements = getInsertStatements(measurement) statements.foreach(statement => DB.execute(statement)) } /** * Insert a bulk of measurements into the database. * @param measurements The Measurement to insert. */ def bulkInsert(measurements: List[Model]): Unit = { val statements = measurements.flatMap(measurement => getInsertStatements(measurement)) DB.batchExecute(statements) } def getInsertStatements(measurement: Model): List[Statement] = { // Insert the measurement itself. val measurementInsertStatement = measurement.meas_value match { case _: Double | _: Long => getMeasurementInsertStatement(measurement) case _: String | _: Binary => getMeasurementInsertStatement(measurement) case _ => getMeasurementInsertStatementForNullMeasValue(measurement) } if (Configs.ingestionKeyCreation) { // Insert the measurement partition key into the partition key store. This // write is not batched with the write above, for improved performance. If Add a comment to this line // the below write fails we could miss a key in the key table, but that is // expected to be rare because the same partition keys will be written // repeatedly during normal ingestion. (The possibility, and risk level, // of inconsistency is considered acceptable.) The real world performance // impact of this write could be eliminated in the future by caching // previously written keys in the app server. val statement2 = getPrepareStatement(insertKeysStatement).bind( measurement.customer, measurement.customer_site, measurement.collection, measurement.dataset ) List(measurementInsertStatement, statement2) } else { List(measurementInsertStatement) } } /** * Find measurements in the database matching the specified parameters. * @param customer * @param customer_site * @param collection * @param dataset * @param beginTime Beginning of query time interval, inclusive * @param endTime End of query time interval, exclusive * @param ordering Timestamp ordering of results, if specified. */ @Deprecated def find( customer: String, customer_site: String, collection: String, dataset: String, beginTime: Date, endTime: Date, ordering: Ordering.Value = Ordering.Unspecified, tableName: String = com.epidata.lib.models.Measurement.DBTableName ): List[Model] = { import WrapAsScala.iterableAsScalaIterable // Find the epochs from which measurements are required, in timestamp // sorted order. In practice queries will commonly access only one epoch. val orderedEpochs = ordering match { case Ordering.Descending => epochForTs(endTime) to epochForTs(beginTime) by -1 case _ => epochForTs(beginTime) to epochForTs(endTime) } // Define the database query to execute for a single epoch. def queryForEpoch(epoch: Int) = { val query = QueryBuilder.select().all().from(tableName).where() .and(QueryBuilder.eq("customer", customer)) .and(QueryBuilder.eq("customer_site", customer_site)) .and(QueryBuilder.eq("collection", collection)) .and(QueryBuilder.eq("dataset", dataset)) .and(QueryBuilder.eq("epoch", epoch)) .and(QueryBuilder.gte("ts", beginTime)) .and(QueryBuilder.lt("ts", endTime)) // Apply an orderBy parameter if ordering is required. ordering match { case Ordering.Ascending => query.orderBy(QueryBuilder.asc("ts")) case Ordering.Descending => query.orderBy(QueryBuilder.desc("ts")) case _ => } query } // Execute the queries, concatenating results across epochs. orderedEpochs .map(queryForEpoch) .flatMap(DB.execute) .map(rowToMeasurement) .toList } def query( company: String, site: String, station: String, sensor: String, beginTime: Date, endTime: Date, size: Int, batch: String, ordering: Ordering.Value, tableName: String, modelName: String ): String = { // Get the data from Cassandra val rs: ResultSet = MeasurementService.query(company, site, station, sensor, beginTime, endTime, ordering, tableName, size, batch) // Get the next page info val nextPage = rs.getExecutionInfo().getPagingState() val nextBatch = if (nextPage == null) "" else nextPage.toString // only return the available ones by not fetching. val rows = 1.to(rs.getAvailableWithoutFetching()).map(_ => rs.one()) val records = new JLinkedList[JLinkedHashMap[String, Object]]() rows .map(Model.rowToJLinkedHashMap(_, tableName, modelName)) .foreach(m => records.add(m)) // Return the json object JsonHelpers.toJson(records, nextBatch) } def query( customer: String, customer_site: String, collection: String, dataset: String, beginTime: Date, endTime: Date, ordering: Ordering.Value = Ordering.Unspecified, tableName: String = com.epidata.lib.models.Measurement.DBTableName, size: Int = 10000, batch: String = "" ): ResultSet = { // Define the database query to execute for a single epoch. def queryForEpoch = { // Find the epochs from which measurements are required, in timestamp // sorted order. In practice queries will commonly access only one epoch. val orderedEpochs = ordering match { case Ordering.Descending => epochForTs(endTime) to epochForTs(beginTime) by -1 case _ => epochForTs(beginTime) to epochForTs(endTime) } val epochs = new util.ArrayList[Integer]() orderedEpochs.toList.foreach(e => epochs.add(e)) val query = QueryBuilder.select().all().from(tableName).where() .and(QueryBuilder.eq("customer", customer)) .and(QueryBuilder.eq("customer_site", customer_site)) .and(QueryBuilder.eq("collection", collection)) .and(QueryBuilder.eq("dataset", dataset)) .and(QueryBuilder.in("epoch", epochs)) .and(QueryBuilder.gte("ts", beginTime)) .and(QueryBuilder.lt("ts", endTime)) // Apply an orderBy parameter if ordering is required. ordering match { case Ordering.Ascending => query.orderBy(QueryBuilder.asc("ts")) case Ordering.Descending => query.orderBy(QueryBuilder.desc("ts")) case _ => } if (batch != null && !batch.isEmpty) { val pagingState = PagingState.fromString(batch); query.setPagingState(pagingState) } query.setFetchSize(size) query } def queryForMeasurementSummary = { val query = QueryBuilder.select().all().from(tableName).where() .and(QueryBuilder.eq("customer", customer)) .and(QueryBuilder.eq("customer_site", customer_site)) .and(QueryBuilder.eq("collection", collection)) .and(QueryBuilder.eq("dataset", dataset)) .and(QueryBuilder.gte("start_time", beginTime)) .and(QueryBuilder.lt("start_time", endTime)) // Apply an orderBy parameter if ordering is required. ordering match { case Ordering.Ascending => query.orderBy(QueryBuilder.asc("start_time")) case Ordering.Descending => query.orderBy(QueryBuilder.desc("start_time")) case _ => } if (batch != null && !batch.isEmpty) { val pagingState = PagingState.fromString(batch); query.setPagingState(pagingState) } query.setFetchSize(size) query } // Execute the query tableName match { case MeasurementSummary.DBTableName => DB.execute(queryForMeasurementSummary) case _ => DB.execute(queryForEpoch) } } private def getMeasurementInsertStatement(measurement: Model): Statement = { val insertStatementsStr = measurement.meas_value match { case _: Double => insertDoubleStatements case _: Long => insertLongStatements case _: String => insertStringStatement case _: Binary => insertBlobStatement } val meas_value = measurement.meas_value match { case value: Binary => ByteBuffer.wrap(value.backing) case _ => measurement.meas_value.asInstanceOf[AnyRef] } val insertStatements = insertStatementsStr.map(getPrepareStatement(_)) (measurement.meas_lower_limit, measurement.meas_upper_limit) match { case (None, None) => // Insert with neither a lower nor upper limit. insertStatements(0).bind( measurement.customer, measurement.customer_site, measurement.collection, measurement.dataset, measurement.epoch: java.lang.Integer, measurement.ts, measurement.key1.getOrElse(""), measurement.key2.getOrElse(""), measurement.key3.getOrElse(""), measurement.meas_datatype.getOrElse(""), meas_value, measurement.meas_unit.getOrElse(""), measurement.meas_status.getOrElse(""), measurement.meas_description.getOrElse(""), measurement.val1.getOrElse(""), measurement.val2.getOrElse("") ) case (_, None) => // Insert with a lower limit only. insertStatements(1).bind( measurement.customer, measurement.customer_site, measurement.collection, measurement.dataset, measurement.epoch: java.lang.Integer, measurement.ts, measurement.key1.getOrElse(""), measurement.key2.getOrElse(""), measurement.key3.getOrElse(""), measurement.meas_datatype.getOrElse(""), meas_value, measurement.meas_unit.getOrElse(""), measurement.meas_status.getOrElse(""), measurement.meas_lower_limit.get.asInstanceOf[AnyRef], measurement.meas_description.getOrElse(""), measurement.val1.getOrElse(""), measurement.val2.getOrElse("") ) case (None, _) => // Insert with an upper limit only. insertStatements(2).bind( measurement.customer, measurement.customer_site, measurement.collection, measurement.dataset, measurement.epoch: java.lang.Integer, measurement.ts, measurement.key1.getOrElse(""), measurement.key2.getOrElse(""), measurement.key3.getOrElse(""), measurement.meas_datatype.getOrElse(""), meas_value, measurement.meas_unit.getOrElse(""), measurement.meas_status.getOrElse(""), measurement.meas_upper_limit.get.asInstanceOf[AnyRef], measurement.meas_description.getOrElse(""), measurement.val1.getOrElse(""), measurement.val2.getOrElse("") ) case _ => // Insert with both a lower and an upper limit. insertStatements(3).bind( measurement.customer, measurement.customer_site, measurement.collection, measurement.dataset, measurement.epoch: java.lang.Integer, measurement.ts, measurement.key1.getOrElse(""), measurement.key2.getOrElse(""), measurement.key3.getOrElse(""), measurement.meas_datatype.getOrElse(""), meas_value, measurement.meas_unit.getOrElse(""), measurement.meas_status.getOrElse(""), measurement.meas_lower_limit.get.asInstanceOf[AnyRef], measurement.meas_upper_limit.get.asInstanceOf[AnyRef], measurement.meas_description.getOrElse(""), measurement.val1.getOrElse(""), measurement.val2.getOrElse("") ) } } private def getMeasurementInsertStatementForNullMeasValue(measurement: Model): Statement = { val insertStatementsStr = insertNullDoubleValueStatement val insertStatements = insertStatementsStr.map(getPrepareStatement(_)) (measurement.meas_lower_limit, measurement.meas_upper_limit) match { case (None, None) => // Insert with neither a lower nor upper limit. insertStatements(0).bind( measurement.customer, measurement.customer_site, measurement.collection, measurement.dataset, measurement.epoch: java.lang.Integer, measurement.ts, measurement.key1.getOrElse(""), measurement.key2.getOrElse(""), measurement.key3.getOrElse(""), measurement.meas_datatype.getOrElse(""), measurement.meas_unit.getOrElse(""), measurement.meas_status.getOrElse(""), measurement.meas_description.getOrElse(""), measurement.val1.getOrElse(""), measurement.val2.getOrElse("") ) case (_, None) => // Insert with a lower limit only. insertStatements(1).bind( measurement.customer, measurement.customer_site, measurement.collection, measurement.dataset, measurement.epoch: java.lang.Integer, measurement.ts, measurement.key1.getOrElse(""), measurement.key2.getOrElse(""), measurement.key3.getOrElse(""), measurement.meas_datatype.getOrElse(""), measurement.meas_unit.getOrElse(""), measurement.meas_status.getOrElse(""), measurement.meas_lower_limit.get.asInstanceOf[AnyRef], measurement.meas_description.getOrElse(""), measurement.val1.getOrElse(""), measurement.val2.getOrElse("") ) case (None, _) => // Insert with an upper limit only. insertStatements(2).bind( measurement.customer, measurement.customer_site, measurement.collection, measurement.dataset, measurement.epoch: java.lang.Integer, measurement.ts, measurement.key1.getOrElse(""), measurement.key2.getOrElse(""), measurement.key3.getOrElse(""), measurement.meas_datatype.getOrElse(""), measurement.meas_unit.getOrElse(""), measurement.meas_status.getOrElse(""), measurement.meas_upper_limit.get.asInstanceOf[AnyRef], measurement.meas_description.getOrElse(""), measurement.val1.getOrElse(""), measurement.val2.getOrElse("") ) case _ => // Insert with both a lower and an upper limit. insertStatements(3).bind( measurement.customer, measurement.customer_site, measurement.collection, measurement.dataset, measurement.epoch: java.lang.Integer, measurement.ts, measurement.key1.getOrElse(""), measurement.key2.getOrElse(""), measurement.key3.getOrElse(""), measurement.meas_datatype.getOrElse(""), measurement.meas_unit.getOrElse(""), measurement.meas_status.getOrElse(""), measurement.meas_lower_limit.get.asInstanceOf[AnyRef], measurement.meas_upper_limit.get.asInstanceOf[AnyRef], measurement.meas_description.getOrElse(""), measurement.val1.getOrElse(""), measurement.val2.getOrElse("") ) } } // Prepared statements for inserting different types of measurements. private lazy val insertDoubleStatements = prepareInserts("", "") private lazy val insertLongStatements = prepareInserts("_l", "_l") private lazy val insertStringStatement = prepareInserts("_s", "") private lazy val insertBlobStatement = prepareInserts("_b", "") private lazy val insertKeysStatement = prepareKeysInsert private lazy val insertNullDoubleValueStatement = prepareNullValueInserts("") private lazy val insertNullLongValueStatement = prepareNullValueInserts("_l") private def prepareInserts(typeSuffix: String, limitTypeSuffix: String) = List( s"""#INSERT INTO ${Model.DBTableName} ( #customer, #customer_site, #collection, #dataset, #epoch, #ts, #key1, #key2, #key3, #meas_datatype, #meas_value${typeSuffix}, #meas_unit, #meas_status, #meas_description, #val1, #val2) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""".stripMargin('#'), s"""#INSERT INTO ${Model.DBTableName} ( #customer, #customer_site, #collection, #dataset, #epoch, #ts, #key1, #key2, #key3, #meas_datatype, #meas_value${typeSuffix}, #meas_unit, #meas_status, #meas_lower_limit${limitTypeSuffix}, #meas_description, #val1, #val2) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""".stripMargin('#'), s"""#INSERT INTO ${Model.DBTableName} ( #customer, #customer_site, #collection, #dataset, #epoch, #ts, #key1, #key2, #key3, #meas_datatype, #meas_value${typeSuffix}, #meas_unit, #meas_status, #meas_upper_limit${limitTypeSuffix}, #meas_description, #val1, #val2) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""".stripMargin('#'), s"""#INSERT INTO ${Model.DBTableName} ( #customer, #customer_site, #collection, #dataset, #epoch, #ts, #key1, #key2, #key3, #meas_datatype, #meas_value${typeSuffix}, #meas_unit, #meas_status, #meas_lower_limit${limitTypeSuffix}, #meas_upper_limit${limitTypeSuffix}, #meas_description, #val1, #val2) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""".stripMargin('#') ) private def prepareNullValueInserts(typeSuffix: String) = List( s"""#INSERT INTO ${Model.DBTableName} ( #customer, #customer_site, #collection, #dataset, #epoch, #ts, #key1, #key2, #key3, #meas_datatype, #meas_unit, #meas_status, #meas_description, #val1, #val2) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""".stripMargin('#'), s"""#INSERT INTO ${Model.DBTableName} ( #customer, #customer_site, #collection, #dataset, #epoch, #ts, #key1, #key2, #key3, #meas_datatype, #meas_unit, #meas_status, #meas_lower_limit${typeSuffix}, #meas_description, #val1, #val2) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""".stripMargin('#'), s"""#INSERT INTO ${Model.DBTableName} ( #customer, #customer_site, #collection, #dataset, #epoch, #ts, #key1, #key2, #key3, #meas_datatype, #meas_unit, #meas_status, #meas_upper_limit${typeSuffix}, #meas_description, #val1, #val2) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""".stripMargin('#'), s"""#INSERT INTO ${Model.DBTableName} ( #customer, #customer_site, #collection, #dataset, #epoch, #ts, #key1, #key2, #key3, #meas_datatype, #meas_unit, #meas_status, #meas_lower_limit${typeSuffix}, #meas_upper_limit${typeSuffix}, #meas_description, #val1, #val2) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""".stripMargin('#') ) private def prepareKeysInsert = s"""#INSERT INTO ${MeasurementsKeys.DBTableName} ( #customer, #customer_site, #collection, #dataset) VALUES (?, ?, ?, ?)""".stripMargin('#') }
epidataio/epidata-community
play/app/models/MeasurementService.scala
Scala
apache-2.0
21,642
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** @author John Miller * @version 1.2 * @date Sun Nov 1 13:59:40 EST 2015 * @see LICENSE (MIT style license file). * * @see vlsicad.eecs.umich.edu/BK/Slots/cache/www.cise.ufl.edu/~davis/Morgan/ * @see www.optimization-online.org/DB_FILE/2013/05/3897.pdf * @see www.maths.ed.ac.uk/hall/HuHa12/ERGO-13-001.pdf * @see www.era.lib.ed.ac.uk/bitstream/handle/1842/7952/Huangfu2013.pdf?sequence=2&isAllowed=y */ // U N D E R D E V E L O P M E N T package scalation.minima import scala.collection.mutable.{ArrayBuffer, ArrayStack} import scala.util.control.Breaks.{breakable, break} import scalation.linalgebra.{MatriD, VectoD, VectorD, VectorI} import scalation.random.Randi //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** The `Ftran` object ... */ object Ftran { val m = 10 val x = new VectorD (m+1) val Xindex = new VectorI (m+1) val Lstart = new VectorI (m+1) val Lend = new VectorI (m+1) val Lindex = new VectorI (m+1) val Lvalue = new VectorD (m+1) val Lpiv_i = new VectorI (m+1) val Ustart = new VectorI (m+1) val Uend = new VectorI (m+1) val Uindex = new VectorI (m+1) val Uvalue = new VectorD (m+1) val Upiv_i = new VectorI (m+1) val Upiv_x = new VectorD (m+1) //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** * Figure 2.4: Standard 'ftran' with permuted LU factors */ def ftran () { // Solve with the lower factor for (i <- 1 to m) { val pivot = x(Lpiv_i(i)) if (pivot != 0) for (k <- Lstart(i) until Lend(i)) x(Lindex(k)) += pivot * Lvalue(k) } // for // 2. Solve with the upper factor for (i <- m to 1 by -1) { var pivot = x(Upiv_i(i)) if (pivot != 0) { pivot = pivot / Upiv_x(i) x(Upiv_i(i)) = pivot for (k <- Ustart(i) until Uend(i)) x(Uindex(k)) += pivot * Uvalue(k) } // if } // for } // ftran val Ulookup = new VectorI (m+1) val URstart = new VectorI (m+1) val URindex = new VectorI (m+1) val URcount = new VectorI (m+1) val URvalue = new VectorD (m+1) //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** * Figure 2.5: Form row-wise representation for a permuted factor. */ def permute () { // 1. Counting non-zero entries for each UR eta matrix j for (i <- 1 to m) { for (k <- Ustart(i) until Uend(i)) { val iRow = Ulookup(Uindex(k)) // index in the triangular factor URcount(iRow) += 1 } // for } // for // 2. Constructing the URstart pointer by accumulation URstart(1) = 1 for (i <- 2 to m) URstart(i) = URstart(i-1) + URcount(i-1) // 3. Filling UR element, URend becomes ready afterwards val URend = URstart for (i <- 1 to m) { for (k <- Ustart(i) until Uend(i)) { val iRow = Ulookup(Uindex(k)) val iPut = URend(iRow) URend(iRow) += 1 URindex(iPut) = Upiv_i(i) // index in the permuted factor URvalue(iPut) = Uvalue(k) } // for } // for } // permute val Hlookup = new VectorI (m+1) val Hstart = new VectorI (m+1) val Hend = new VectorI (m+1) val Hindex = new VectorI (m+1) val stack = new ArrayStack [Tuple2 [Int, Int]] () val list = ArrayBuffer [Int] () val visited = Array.ofDim [Int] (m+1) //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** * Figure 2.6: DFS based hyper-sparse 'ftran': search stage */ def dfs_search (Xcount: Int) { for (t <- 1 to Xcount) { var (i, k) = (0, 0) // ith eta matrix of H, the next non-zero position to visit i = Hlookup(Xindex(t)) k = Hstart(i) if (visited(i) == 0) { visited(i) = 1 var go = true while (go) { // keep searching current ETA until finish if (k < Hend(i)) { val child = Hlookup(Hindex(k)) // move to a child if it is not yet been visited k += 1 if (visited(child) == 0) { visited(child) = 1 stack.push ((i, k)) // store current eta (the father) to stack i = child k = Hstart(child) // start to search the child } // if } else { list += i // put current eta to the FTRAN to-do list if (stack.isEmpty) go = false // get another eta (the father) from the stack or quit else { val ik = stack.pop (); i = ik._1; k = ik._2 } } // if } // while } // if } // for } // dfs_search } // Ftran object //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** The `SimplexFT` class solves Linear Programming (LP) problems using the Forrest-Tomlin * (FT) Simplex Algorithm. Given a constraint matrix 'a', constant vector 'b' * and cost vector 'c', find values for the solution/decision vector 'x' that * minimize the objective function 'f(x)', while satisfying all of the constraints, * i.e., * * minimize f(x) = c x * subject to a x <= b, x >= 0 * * The FT Simplex Algorithm performs LU Factorization/Decomposition of the * basis-matrix ('ba' = 'B') rather than computing inverses ('b_inv'). It has * benefits over the (Revised) Simplex Algorithm (less run-time, less memory, * and much reduced chance of round off errors). * * @param a the constraint matrix * @param b the constant/limit vector * @param c the cost/revenue vector * @param x_B the initial basis (set of indices where x_i is in the basis) */ class SimplexFT (a: MatriD, b: VectoD, c: VectoD, var x_B: Array [Int] = null) extends MinimizerLP { private val DEBUG = false // debug flag private val CHECK = true // CHECK mode => check feasibility for each pivot private val M = a.dim1 // number of constraints (rows in a) private val N = a.dim2 // number of original variables (columns in a) private val MAX_ITER = 200 * N // maximum number of iterations if (b.dim != M) flaw ("constructor", "b.dim = " + b.dim + " != " + M) if (c.dim != N) flaw ("constructor", "c.dim = " + c.dim + " != " + N) if (x_B == null) x_B = setBasis () private val ba: MatriD = a.selectCols (x_B) // basis-matrix (selected columns from matrix-a) private val lu = ba.lud // perform an LU Decomposition on the basis-matrix // private var l_inv = lu._1.inverse // L-inverted // private var u_inv = lu._2.inverse // U-inverted (b_inv = u_inv * l_inv) private val c_B = c.select (x_B) // cost for basic variables // private val c_ = c_B * (u_inv * l_inv) // adjusted cost via inverse private val c_ : VectoD = c_B // adjusted cost via back-substitution - FIX // private val b_ = (u_inv * l_inv) * b // adjusted constants via inverse private val b_ = ba.solve (lu, b) // adjusted constants via back-substitution private var u: VectoD = null // vector used for leaving private var z: VectoD = null // vector used for entering val checker = new CheckLP (a, b, c) //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** There are M+N variables, N decision and M slack variables, of which, * for each iteration, M are chosen for a Basic Feasible Solution (BFS). * The the variables not in the basis are set to zero. Setting j to N * will start with the slack variables in the basis (only works if b >= 0). * @param j the offset to start the basis * @param l the size of the basis */ def setBasis (j: Int = N-M, l: Int = M): Array [Int] = { val idx = Array.ofDim [Int] (l) for (i <- 0 until l) idx(i) = i + j idx } // setBasis //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Find the best variable x_l to enter the basis. Use Dantiz's Rule: index of * max positive (cycling possible) z value. Return -1 to indicate no such column. */ def entering (): Int = { z = c_ *: a - c z.argmaxPos () } // entering //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Find the best variable x_k to leave the basis given that x_l is entering. * Determine the index of the leaving variable corresponding to ROW k using * the Min-Ratio Rule. Return -1 to indicate no such row. * @param l the variable chosen to enter the basis */ def leaving (l: Int): Int = { // u = (u_inv * l_inv) * a.col(l) u = ba.solve (lu._1, lu._2, a.col(l)) if (unbounded (u)) return -1 var k = 0 var r_min = Double.PositiveInfinity for (i <- 0 until M if u(i) > 0) { val r = b_(i) / u(i) if (r < r_min) { r_min = r; k = i} } // for k } // leaving //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Check if u <= 0., the solution is unbounded. * @param u the vector for leaving */ def unbounded (u: VectoD): Boolean = { for (i <- 0 until u.dim if u(i) > 0.0) return false flaw ("unbounded", "the solution is UNBOUNDED") true } // unbounded //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Pivot by replacing 'x_k' with 'x_l' in the basis. Update 'b_inv' (actually 'lu'), * 'b_' and 'c_'. * @param k the leaving variable * @param l the entering variable */ def pivot (k: Int, l: Int) { println ("pivot: entering = " + l + " leaving = " + k) x_B(k) = l // update basis (l replaces k) // b_inv(k) /= u(k) // FIX b_(k) /= u(k) for (i <- 0 until M if i != k) { // b_inv(i) -= b_inv(k) * u(i) // FIX b_ (i) -= b_(k) * u(i) } // for // c_ -= b_inv(k) * z(l) // FIX } // pivot //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Solve a Linear Programming (LP) problem using the FT Simplex Algorithm. * Iteratively pivot until there an optimal solution is found or it is * determined that the solution is unbounded. Return the optimal vector 'x'. */ def solve (): VectoD = { if (DEBUG) showTableau (0) // for iter = 0 var k = -1 // the leaving variable (row) var l = -1 // the entering variable (column) breakable { for (it <- 1 to MAX_ITER) { l = entering (); if (l == -1) break // -1 => optimal solution found k = leaving (l); if (k == -1) break // -1 => solution is unbounded pivot (k, l) // pivot: k leaves and l enters if (CHECK && infeasible) break // quit if infeasible if (DEBUG) showTableau (it) }} // for primal // return the optimal vector x } // solve //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Determine whether the current solution 'x = primal' is still primal feasible. */ def infeasible: Boolean = { if ( ! checker.isPrimalFeasible (primal)) { flaw ("infeasible", "solution x is no longer PRIMAL FEASIBLE") true } else { false } // if } // infeasible //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Return the primal (basis only) solution vector 'x'. */ def primal: VectoD = ba.solve (lu, b) // (u_inv * l_inv) * b //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Return the full primal solution vector 'xx'. */ def primalFull (x: VectoD): VectorD = { val xx = new VectorD (N) for (i <- 0 until x_B.length) xx(x_B(i)) = x(i) xx } // primalFull //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Return the dual solution vector 'y'. */ def dual: VectoD = z.slice (N - M, N).asInstanceOf [VectoD] // FIX //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Return the optimal objective function value 'f(x) = c x'. * @param x the primal solution vector */ def objF (x: VectoD): Double = c.select (x_B) dot x //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Show the current FT tableau. * @param iter the number of iterations do far */ def showTableau (iter: Int) { println ("showTableau: --------------------------------------------------------") println (this) println ("showTableau: after " + iter + " iterations, with limit of " + MAX_ITER) } // showTableau //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Convert the current FT tableau 'basis', b_inv', b_', and c_' to a string. */ override def toString: String = { val b_inv = a.selectCols (x_B).inverse // compute b_inv to show tableau var s = new StringBuilder () for (i <- 0 until M) { s ++= "x" + x_B(i) + " | " + b_inv(i) + " | " + b_(i) + "\n" } // for s ++= "c_ | " + c_ + "\n" s.toString } // toString } // SimplexFT class //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** The `SimplexFT` object is used to test the `SimplexFT` class. */ object SimplexFTTest extends App { //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Test the FT Simplex Algorithm for solving Linear Programming problems. * @param a the constraint matrix * @param b the limit/RHS vector * @param c the cost vector * @param x_B the indices of the initial basis */ def test (a: MatriD, b: VectoD, c: VectoD, x_B: Array [Int] = null) { // val lp = new SimplexFT (a, b, c, x_B) // test with user specified basis val lp = new SimplexFT (a, b, c) // test with default basis val x = lp.solve () // the primal solution vector x val xx = lp.primalFull (x) // the full primal solution vector xx val y = lp.dual // the dual solution vector y val f = lp.objF (x) // the minimum value of the objective function println ("primal x = " + x) println ("dual y = " + y) println ("objF f = " + f) println ("optimal? = " + lp.check (xx, y, f)) } // test import scalation.linalgebra.MatrixD //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Test case 1: Initialize matrix 'a', vectors 'b' and 'c', and optionally * the basis 'x_B'. For FT Simplex, matrix 'a' must be augmented with * an identity matrix and vector 'c' augmented with zeros. *------------------------------------------------------------------------- * Minimize z = -1x_0 - 2x_1 + 1x_2 - 1x_3 - 4x_4 + 2x_5 * Subject to 1x_0 + 1x_1 + 1x_2 + 1y_3 + 1y_4 + 1x_5 <= 6 * 2x_0 - 1x_1 - 2x_2 + 1y_3 + 0y_4 + 0x_5 <= 4 * 0x_0 + 0x_1 + 1x_2 + 1y_3 + 2y_4 + 1x_5 <= 4 * where z is the objective variable and x is the decision vector. *------------------------------------------------------------------------- * Solution: primal x_1 = 4, x_7 = 8, x_4 = 2 * dual y_1 = -2, y_2 = 0, y_3 = -1 * objF f = -16 * i.e., x = (4, 8, 2), x_B = (1, 7, 4), y = (-2, 0, -1), f = -16 * @see Linear Programming and Network Flows, Example 5.1 */ def test1 () { val a = new MatrixD ((3, 9), 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, // constraint matrix 2.0, -1.0, -2.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 2.0, 1.0, 0.0, 0.0, 1.0) val c = VectorD (-1.0, -2.0, 1.0, -1.0, -4.0, 2.0, 0.0, 0.0, 0.0) // cost vector val b = VectorD (6.0, 4.0, 4.0) // constant vector val x_B = Array (6, 7, 8) // starting basis test (a, b, c) // x_B is optional } // test1 //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Test case 2: * Solution: x = (2/3, 10/3, 0), x_B = (0, 1, 5), f = -22/3 * @see Linear Programming and Network Flows, Example 5.2 */ def test2 () { val a = new MatrixD ((3, 6), 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, // constraint matrix -1.0, 2.0, -2.0, 0.0, 1.0, 0.0, 2.0, 1.0, 0.0, 0.0, 0.0, 1.0) val c = VectorD (-1.0, -2.0, 1.0, 0.0, 0.0, 0.0) // cost vector val b = VectorD (4.0, 6.0, 5.0) // constant vector val x_B = Array (3, 4, 5) // starting basis test (a, b, c) } // test2 //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Test case 3: * Solution: x = (1/3, 0, 13/3), x_B = (0, 2, 4), f = -17 * @see Linear Programming and Network Flows, Example 3.9 */ def test3 () { val a = new MatrixD ((3, 6), 1.0, 1.0, 2.0, 1.0, 0.0, 0.0, // constraint matrix 1.0, 1.0, -1.0, 0.0, 1.0, 0.0, -1.0, 1.0, 1.0, 0.0, 0.0, 1.0) val c = VectorD (1.0, 1.0, -4.0, 0.0, 0.0, 0.0) // cost vector val b = VectorD (9.0, 2.0, 4.0) // constant vector val x_B = Array (3, 4, 5) // starting basis test (a, b, c, x_B) } // test3 //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Test case 4: randomly generated LP problem. */ def test4 () { val rn = Randi (0, 8) val (m, n) = (10, 10) val a = new MatrixD (m, m+n) val b = new VectorD (m) val c = new VectorD (m+n) for (i <- 0 until m) { for (j <- 0 until n) a(i, j) = rn.igen for (j <- n until m+n) a(i, j) = if (j-n == i) 1.0 else 0.0 } // for for (i <- 0 until m) b(i) = 100.0 * (rn.igen + 1) for (j <- 0 until n) c(j) = -10.0 * (rn.igen + 1) test (a, b, c) } // test4 println ("\ntest1 ========================================================") test1 () println ("\ntest2 ========================================================") test2 () println ("\ntest3 ========================================================") test3 () println ("\ntest4 ========================================================") test4 () } // SimplexFTTest object
NBKlepp/fda
scalation_1.2/src/main/scala/scalation/minima/SimplexFT.scala
Scala
mit
20,667
package slick /** The `dbio` package contains the Database I/O Action implementation. * See [[DBIOAction]] for details. */ package object dbio { /** Simplified type for a streaming [[DBIOAction]] without effect tracking */ type StreamingDBIO[+R, +T] = DBIOAction[R, Streaming[T], Effect.All] /** Simplified type for a [[DBIOAction]] without streaming or effect tracking */ type DBIO[+R] = DBIOAction[R, NoStream, Effect.All] val DBIO = DBIOAction }
jkutner/slick
slick/src/main/scala/slick/dbio/package.scala
Scala
bsd-2-clause
463
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.network; import java.net._ import java.io._ import org.junit._ import org.scalatest.junit.JUnitSuite import java.util.Random import junit.framework.Assert._ import kafka.producer.SyncProducerConfig import kafka.api.ProducerRequest import java.nio.ByteBuffer import kafka.common.TopicAndPartition import kafka.message.ByteBufferMessageSet import java.nio.channels.SelectionKey class SocketServerTest extends JUnitSuite { val server: SocketServer = new SocketServer(0, host = null, port = kafka.utils.TestUtils.choosePort, numProcessorThreads = 1, maxQueuedRequests = 50, maxRequestSize = 50) server.startup() def sendRequest(socket: Socket, id: Short, request: Array[Byte]) { val outgoing = new DataOutputStream(socket.getOutputStream) outgoing.writeInt(request.length + 2) outgoing.writeShort(id) outgoing.write(request) outgoing.flush() } def receiveResponse(socket: Socket): Array[Byte] = { val incoming = new DataInputStream(socket.getInputStream) val len = incoming.readInt() val response = new Array[Byte](len) incoming.readFully(response) response } /* A simple request handler that just echos back the response */ def processRequest(channel: RequestChannel) { val request = channel.receiveRequest val byteBuffer = ByteBuffer.allocate(request.requestObj.sizeInBytes) request.requestObj.writeTo(byteBuffer) byteBuffer.rewind() val send = new BoundedByteBufferSend(byteBuffer) channel.sendResponse(new RequestChannel.Response(request.processor, request, send)) } def connect() = new Socket("localhost", server.port) @After def cleanup() { server.shutdown() } @Test def simpleRequest() { val socket = connect() val correlationId = -1 val clientId = SyncProducerConfig.DefaultClientId val ackTimeoutMs = SyncProducerConfig.DefaultAckTimeoutMs val ack = SyncProducerConfig.DefaultRequiredAcks val emptyRequest = new ProducerRequest(correlationId, clientId, ack, ackTimeoutMs, collection.mutable.Map[TopicAndPartition, ByteBufferMessageSet]()) val byteBuffer = ByteBuffer.allocate(emptyRequest.sizeInBytes) emptyRequest.writeTo(byteBuffer) byteBuffer.rewind() val serializedBytes = new Array[Byte](byteBuffer.remaining) byteBuffer.get(serializedBytes) sendRequest(socket, 0, serializedBytes) processRequest(server.requestChannel) assertEquals(serializedBytes.toSeq, receiveResponse(socket).toSeq) } @Test(expected=classOf[IOException]) def tooBigRequestIsRejected() { val tooManyBytes = new Array[Byte](server.maxRequestSize + 1) new Random().nextBytes(tooManyBytes) val socket = connect() sendRequest(socket, 0, tooManyBytes) receiveResponse(socket) } @Test def testPipelinedRequestOrdering() { val socket = connect() val correlationId = -1 val clientId = SyncProducerConfig.DefaultClientId val ackTimeoutMs = SyncProducerConfig.DefaultAckTimeoutMs val ack: Short = 0 val emptyRequest = new ProducerRequest(correlationId, clientId, ack, ackTimeoutMs, collection.mutable.Map[TopicAndPartition, ByteBufferMessageSet]()) val byteBuffer = ByteBuffer.allocate(emptyRequest.sizeInBytes) emptyRequest.writeTo(byteBuffer) byteBuffer.rewind() val serializedBytes = new Array[Byte](byteBuffer.remaining) byteBuffer.get(serializedBytes) sendRequest(socket, 0, serializedBytes) sendRequest(socket, 0, serializedBytes) // here the socket server should've read only the first request completely and since the response is not sent yet // the selection key should not be readable val request = server.requestChannel.receiveRequest Assert.assertFalse((request.requestKey.asInstanceOf[SelectionKey].interestOps & SelectionKey.OP_READ) == SelectionKey.OP_READ) server.requestChannel.sendResponse(new RequestChannel.Response(0, request, null)) // if everything is working correctly, until you send a response for the first request, // the 2nd request will not be read by the socket server val request2 = server.requestChannel.receiveRequest server.requestChannel.sendResponse(new RequestChannel.Response(0, request2, null)) Assert.assertFalse((request.requestKey.asInstanceOf[SelectionKey].interestOps & SelectionKey.OP_READ) == SelectionKey.OP_READ) } }
akosiaris/kafka
core/src/test/scala/unit/kafka/network/SocketServerTest.scala
Scala
apache-2.0
5,396
package repositories import scala.concurrent.Future import models.Task trait TaskRepository { def deleteByUser(userId: String): Future[Boolean] def insert(userId: String, task: Task): Future[Task] def getTasksByUser(userId: String): Future[Traversable[Task]] } class InMemoryTaskRepositoryImpl extends TaskRepository { var cache: Map[String, Seq[Task]] = Map() def deleteByUser(userId: String): Future[Boolean] = { cache = cache.filter(_._1 != userId) Future.successful(true) } def insert(userId: String, task: Task): Future[Task] = { val tasks = cache.get(userId).getOrElse(Seq()) cache += (userId -> (tasks :+ task)) Future.successful(task) } def getTasksByUser(userId: String): Future[Traversable[Task]] = { Future.successful(cache.get(userId).getOrElse(Seq())) } }
toggm/akka-cake-pattern-testing
src/main/scala/repositories/TaskRepository.scala
Scala
apache-2.0
821
/************************************************************************\\ ** Project ** ** ______ ______ __ ______ ____ ** ** / ____/ / __ / / / / __ / / __/ (c) 2011-2014 ** ** / /__ / /_/ / / / / /_/ / / /_ ** ** /___ / / ____/ / / / __ / / __/ Erik Osheim, Tom Switzer ** ** ____/ / / / / / / / | | / /__ ** ** /_____/ /_/ /_/ /_/ |_| /____/ All rights reserved. ** ** ** ** Redistribution and use permitted under the MIT license. ** ** ** \\************************************************************************/ package spire package random package rng import spire.syntax.cfor._ import spire.math.max /** * This object provides helper functions used for seeding arrays of integers or longs. * * The seeding functions are an adaptation/port of code from the the 32-bit and 64-bit implementations of MersenneTwister (MT19937.c, MT19937-64.c). * * <p>MersenneTwister is a fast, 623-dimensionally equidistributed pseudo random number generator * with a <tt>2<sup>19937</sup>&nbsp;-&nbsp;1</tt> long period. * * <p><b>Reference: </b> * Makato Matsumoto and Takuji Nishimura: * "Mersenne Twister: A 623-Dimensionally Equidistributed Uniform Pseudo-Random Number Generator", * <i>ACM Transactions on Modeling and Computer Simulation,</i> Vol. 8, No. 1, January 1998, pp 3--30. * * @see <a href="http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/MT2002/CODES/mt19937ar.c">MT19937.c</a> * @see <a href="http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/VERSIONS/C-LANG/mt19937-64.c">MT19937-64.c</a> * @see <a href="http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/emt.html">Mersenne Twister Home Page</a> * @see <a href="http://en.wikipedia.org/wiki/Mersenne_twister">Mersenne Twister @ Wikipedia</a> * @author <a href="mailto:dusan.kysel@gmail.com">Du&#x0161;an Kysel</a> */ object Utils { /* final class IntArrayWrapper(transform: Int => Int, array: Array[Int]) { def apply(i: Int) = array(transform(i)) def update(i: Int, v: Int) = array(transform(i)) = v } final class LongArrayWrapper(transform: Int => Int, array: Array[Long]) { def apply(i: Int) = array(transform(i)) def update(i: Int, v: Int) = array(transform(i)) = v } */ @volatile private var seedUniquifier = 8682522807148012L def intFromTime(time: Long = System.nanoTime) : Int = { longFromTime(time).toInt } def longFromTime(time: Long = System.nanoTime) : Long = { seedUniquifier += 1 (seedUniquifier + time) } def seedFromInt(length: Int, seed: Int = 5489): Array[Int] = { val a = new Array[Int](length) a(0) = seed cfor(1)(_ < length, _ + 1) { i => val x = a(i - 1) a(i) = 1812433253 * (x ^ (x >>> 30)) + i } a } def seedFromLong(length: Int, seed: Long = 5489): Array[Long] = { val a = new Array[Long](length) a(0) = seed cfor(1)(_ < length, _ + 1) { i => val x = a(i - 1) a(i) = 6364136223846793005L * (x ^ (x >>> 62)) + i } a } def seedFromArray(length: Int, seed: Array[Int]): Array[Int] = { val a = seedFromInt(length, 19650218) val length_1 = length - 1 var i = 1 var j = 0 var k = max(length, seed.length) while (k != 0) { val x = a(i - 1) a(i) = a(i) ^ ((x ^ (x >>> 30)) * 1664525) + seed(j) + j i += 1 j += 1 if (i >= length) { a(0) = a(length_1) i = 1 } if (j >= seed.length) { j = 0 } k -= 1 } k = length_1 while (k != 0) { val x = a(i - 1) a(i) = a(i) ^ ((x ^ (x >>> 30)) * 1566083941) - i i += 1 if (i >= length) { a(0) = a(length_1) i = 1 } k -= 1 } a(0) = 0x80000000 // MSB is 1; assuring non-zero initial array a } def seedFromArray(length: Int, seed: Array[Long]): Array[Long] = { val a = seedFromLong(length, 19650218) val length_1 = length - 1 var i = 1 var j = 0 var k = max(length, seed.length) while (k != 0) { val x = a(i - 1) a(i) = a(i) ^ ((x ^ (x >>> 62)) * 3935559000370003845L) + seed(j) + j i += 1 j += 1 if (i >= length) { a(0) = a(length_1) i = 1 } if (j >= seed.length) { j = 0 } k -= 1 } k = length - 1 while (k != 0) { val x = a(i - 1) a(i) = a(i) ^ ((x ^ (x >>> 62)) * 2862933555777941757L) - i i += 1 if (i >= length) { a(0) = a(length_1) i = 1 } k -= 1 } a(0) = 1L << 63 // MSB is 1; assuring non-zero initial array a } }
lrytz/spire
core/src/main/scala/spire/random/rng/Utils.scala
Scala
mit
4,958
package mesosphere.marathon package core.task.tracker.impl import akka.actor.Status import akka.testkit.TestProbe import mesosphere.AkkaUnitTest import mesosphere.marathon.test.SettableClock import mesosphere.marathon.core.instance.TestInstanceBuilder import mesosphere.marathon.core.instance.TestInstanceBuilder._ import mesosphere.marathon.core.instance.update.{ InstanceUpdateEffect, InstanceUpdateOperation } import mesosphere.marathon.core.task.Task import mesosphere.marathon.state.PathId import mesosphere.marathon.test.MarathonTestHelper import org.apache.mesos.Protos.{ TaskID, TaskStatus } class InstanceStateOpProcessorDelegateTest extends AkkaUnitTest { class Fixture { lazy val clock = new SettableClock() lazy val config = MarathonTestHelper.defaultConfig() lazy val taskTrackerProbe = TestProbe() lazy val delegate = new InstanceStateOpProcessorDelegate(clock, config, taskTrackerProbe.ref) lazy val timeoutDuration = delegate.timeout.duration def timeoutFromNow = clock.now() + timeoutDuration } "InstanceCreationHandlerAndUpdaterDelegate" should { "Launch succeeds" in { val f = new Fixture val appId: PathId = PathId("/test") val instance = TestInstanceBuilder.newBuilderWithLaunchedTask(appId).getInstance() val stateOp = InstanceUpdateOperation.LaunchEphemeral(instance) val expectedStateChange = InstanceUpdateEffect.Update(instance, None, events = Nil) When("process is called") val create = f.delegate.process(stateOp) Then("an update operation is requested") f.taskTrackerProbe.expectMsg( InstanceTrackerActor.ForwardTaskOp(f.timeoutFromNow, instance.instanceId, stateOp) ) When("the request is acknowledged") f.taskTrackerProbe.reply(expectedStateChange) Then("The reply is Unit, because task updates are deferred") create.futureValue shouldBe a[InstanceUpdateEffect.Update] } "Launch fails" in { val f = new Fixture val appId: PathId = PathId("/test") val instance = TestInstanceBuilder.newBuilderWithLaunchedTask(appId).getInstance() val stateOp = InstanceUpdateOperation.LaunchEphemeral(instance) When("process is called") val create = f.delegate.process(stateOp) Then("an update operation is requested") f.taskTrackerProbe.expectMsg( InstanceTrackerActor.ForwardTaskOp(f.timeoutFromNow, instance.instanceId, stateOp) ) When("the response is an error") val cause: RuntimeException = new scala.RuntimeException("test failure") f.taskTrackerProbe.reply(Status.Failure(cause)) Then("The reply is the value of task") val createValue = create.failed.futureValue createValue.getMessage should include(appId.toString) createValue.getMessage should include(instance.instanceId.idString) createValue.getMessage should include("Launch") createValue.getCause should be(cause) } "Expunge succeeds" in { val f = new Fixture val appId: PathId = PathId("/test") val instance = TestInstanceBuilder.newBuilderWithLaunchedTask(appId).getInstance() val stateOp = InstanceUpdateOperation.ForceExpunge(instance.instanceId) val expectedStateChange = InstanceUpdateEffect.Expunge(instance, events = Nil) When("terminated is called") val terminated = f.delegate.process(stateOp) Then("an expunge operation is requested") f.taskTrackerProbe.expectMsg( InstanceTrackerActor.ForwardTaskOp(f.timeoutFromNow, instance.instanceId, stateOp) ) When("the request is acknowledged") f.taskTrackerProbe.reply(expectedStateChange) Then("The reply is the value of the future") terminated.futureValue should be(expectedStateChange) } "Expunge fails" in { val f = new Fixture val appId: PathId = PathId("/test") val instance = TestInstanceBuilder.newBuilderWithLaunchedTask(appId).getInstance() val stateOp = InstanceUpdateOperation.ForceExpunge(instance.instanceId) When("process is called") val terminated = f.delegate.process(stateOp) Then("an expunge operation is requested") f.taskTrackerProbe.expectMsg( InstanceTrackerActor.ForwardTaskOp(f.timeoutFromNow, instance.instanceId, stateOp) ) When("the response is an error") val cause: RuntimeException = new scala.RuntimeException("test failure") f.taskTrackerProbe.reply(Status.Failure(cause)) Then("The reply is the value of task") val terminatedValue = terminated.failed.futureValue terminatedValue.getMessage should include(appId.toString) terminatedValue.getMessage should include(instance.instanceId.idString) terminatedValue.getMessage should include("Expunge") terminatedValue.getCause should be(cause) } "StatusUpdate succeeds" in { val f = new Fixture val appId: PathId = PathId("/test") val instance = TestInstanceBuilder.newBuilderWithLaunchedTask(appId).getInstance() val task: Task = instance.appTask val taskIdString = task.taskId.idString val now = f.clock.now() val update = TaskStatus.newBuilder().setTaskId(TaskID.newBuilder().setValue(taskIdString)).buildPartial() val stateOp = InstanceUpdateOperation.MesosUpdate(instance, update, now) When("process is called") val statusUpdate = f.delegate.process(stateOp) Then("an update operation is requested") f.taskTrackerProbe.expectMsg( InstanceTrackerActor.ForwardTaskOp(f.timeoutFromNow, instance.instanceId, stateOp) ) When("the request is acknowledged") val expectedStateChange = InstanceUpdateEffect.Update(instance, Some(instance), events = Nil) f.taskTrackerProbe.reply(expectedStateChange) Then("The reply is the value of the future") statusUpdate.futureValue should be(expectedStateChange) } "StatusUpdate fails" in { val f = new Fixture val appId: PathId = PathId("/test") val instance = TestInstanceBuilder.newBuilderWithLaunchedTask(appId).getInstance() val task: Task = instance.appTask val taskId = task.taskId val now = f.clock.now() val update = TaskStatus.newBuilder().setTaskId(taskId.mesosTaskId).buildPartial() val stateOp = InstanceUpdateOperation.MesosUpdate(instance, update, now) When("statusUpdate is called") val statusUpdate = f.delegate.process(stateOp) Then("an update operation is requested") f.taskTrackerProbe.expectMsg( InstanceTrackerActor.ForwardTaskOp(f.timeoutFromNow, instance.instanceId, stateOp) ) When("the response is an error") val cause: RuntimeException = new scala.RuntimeException("test failure") f.taskTrackerProbe.reply(Status.Failure(cause)) Then("The reply is the value of task") val updateValue = statusUpdate.failed.futureValue updateValue.getMessage should include(appId.toString) updateValue.getMessage should include(taskId.toString) updateValue.getMessage should include("MesosUpdate") updateValue.getCause should be(cause) } } }
guenter/marathon
src/test/scala/mesosphere/marathon/core/task/tracker/impl/InstanceStateOpProcessorDelegateTest.scala
Scala
apache-2.0
7,173
package controllers import play.api.mvc._ import models.Environment object Home extends Controller { def index = Action { implicit request => Ok(views.html.home(Environment.apply)) } }
tanacasino/shanshan
app/controllers/Home.scala
Scala
mit
198
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.api.python import java.io._ import java.net._ import java.nio.charset.StandardCharsets import java.nio.charset.StandardCharsets.UTF_8 import java.util.concurrent.atomic.AtomicBoolean import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer import scala.util.control.NonFatal import org.json4s.JsonAST._ import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods.{compact, render} import org.apache.spark._ import org.apache.spark.internal.Logging import org.apache.spark.internal.config.{BUFFER_SIZE, EXECUTOR_CORES} import org.apache.spark.internal.config.Python._ import org.apache.spark.resource.ResourceProfile.{EXECUTOR_CORES_LOCAL_PROPERTY, PYSPARK_MEMORY_LOCAL_PROPERTY} import org.apache.spark.security.SocketAuthHelper import org.apache.spark.util._ /** * Enumerate the type of command that will be sent to the Python worker */ private[spark] object PythonEvalType { val NON_UDF = 0 val SQL_BATCHED_UDF = 100 val SQL_SCALAR_PANDAS_UDF = 200 val SQL_GROUPED_MAP_PANDAS_UDF = 201 val SQL_GROUPED_AGG_PANDAS_UDF = 202 val SQL_WINDOW_AGG_PANDAS_UDF = 203 val SQL_SCALAR_PANDAS_ITER_UDF = 204 val SQL_MAP_PANDAS_ITER_UDF = 205 val SQL_COGROUPED_MAP_PANDAS_UDF = 206 def toString(pythonEvalType: Int): String = pythonEvalType match { case NON_UDF => "NON_UDF" case SQL_BATCHED_UDF => "SQL_BATCHED_UDF" case SQL_SCALAR_PANDAS_UDF => "SQL_SCALAR_PANDAS_UDF" case SQL_GROUPED_MAP_PANDAS_UDF => "SQL_GROUPED_MAP_PANDAS_UDF" case SQL_GROUPED_AGG_PANDAS_UDF => "SQL_GROUPED_AGG_PANDAS_UDF" case SQL_WINDOW_AGG_PANDAS_UDF => "SQL_WINDOW_AGG_PANDAS_UDF" case SQL_SCALAR_PANDAS_ITER_UDF => "SQL_SCALAR_PANDAS_ITER_UDF" case SQL_MAP_PANDAS_ITER_UDF => "SQL_MAP_PANDAS_ITER_UDF" case SQL_COGROUPED_MAP_PANDAS_UDF => "SQL_COGROUPED_MAP_PANDAS_UDF" } } /** * A helper class to run Python mapPartition/UDFs in Spark. * * funcs is a list of independent Python functions, each one of them is a list of chained Python * functions (from bottom to top). */ private[spark] abstract class BasePythonRunner[IN, OUT]( funcs: Seq[ChainedPythonFunctions], evalType: Int, argOffsets: Array[Array[Int]]) extends Logging { require(funcs.length == argOffsets.length, "argOffsets should have the same length as funcs") private val conf = SparkEnv.get.conf protected val bufferSize: Int = conf.get(BUFFER_SIZE) private val reuseWorker = conf.get(PYTHON_WORKER_REUSE) // All the Python functions should have the same exec, version and envvars. protected val envVars: java.util.Map[String, String] = funcs.head.funcs.head.envVars protected val pythonExec: String = funcs.head.funcs.head.pythonExec protected val pythonVer: String = funcs.head.funcs.head.pythonVer // TODO: support accumulator in multiple UDF protected val accumulator: PythonAccumulatorV2 = funcs.head.funcs.head.accumulator // Python accumulator is always set in production except in tests. See SPARK-27893 private val maybeAccumulator: Option[PythonAccumulatorV2] = Option(accumulator) // Expose a ServerSocket to support method calls via socket from Python side. private[spark] var serverSocket: Option[ServerSocket] = None // Authentication helper used when serving method calls via socket from Python side. private lazy val authHelper = new SocketAuthHelper(conf) // each python worker gets an equal part of the allocation. the worker pool will grow to the // number of concurrent tasks, which is determined by the number of cores in this executor. private def getWorkerMemoryMb(mem: Option[Long], cores: Int): Option[Long] = { mem.map(_ / cores) } def compute( inputIterator: Iterator[IN], partitionIndex: Int, context: TaskContext): Iterator[OUT] = { val startTime = System.currentTimeMillis val env = SparkEnv.get // Get the executor cores and pyspark memory, they are passed via the local properties when // the user specified them in a ResourceProfile. val execCoresProp = Option(context.getLocalProperty(EXECUTOR_CORES_LOCAL_PROPERTY)) val memoryMb = Option(context.getLocalProperty(PYSPARK_MEMORY_LOCAL_PROPERTY)).map(_.toLong) val localdir = env.blockManager.diskBlockManager.localDirs.map(f => f.getPath()).mkString(",") // if OMP_NUM_THREADS is not explicitly set, override it with the number of cores if (conf.getOption("spark.executorEnv.OMP_NUM_THREADS").isEmpty) { // SPARK-28843: limit the OpenMP thread pool to the number of cores assigned to this executor // this avoids high memory consumption with pandas/numpy because of a large OpenMP thread pool // see https://github.com/numpy/numpy/issues/10455 execCoresProp.foreach(envVars.put("OMP_NUM_THREADS", _)) } envVars.put("SPARK_LOCAL_DIRS", localdir) // it's also used in monitor thread if (reuseWorker) { envVars.put("SPARK_REUSE_WORKER", "1") } // SPARK-30299 this could be wrong with standalone mode when executor // cores might not be correct because it defaults to all cores on the box. val execCores = execCoresProp.map(_.toInt).getOrElse(conf.get(EXECUTOR_CORES)) val workerMemoryMb = getWorkerMemoryMb(memoryMb, execCores) if (workerMemoryMb.isDefined) { envVars.put("PYSPARK_EXECUTOR_MEMORY_MB", workerMemoryMb.get.toString) } envVars.put("SPARK_BUFFER_SIZE", bufferSize.toString) val worker: Socket = env.createPythonWorker(pythonExec, envVars.asScala.toMap) // Whether is the worker released into idle pool or closed. When any codes try to release or // close a worker, they should use `releasedOrClosed.compareAndSet` to flip the state to make // sure there is only one winner that is going to release or close the worker. val releasedOrClosed = new AtomicBoolean(false) // Start a thread to feed the process input from our parent's iterator val writerThread = newWriterThread(env, worker, inputIterator, partitionIndex, context) context.addTaskCompletionListener[Unit] { _ => writerThread.shutdownOnTaskCompletion() if (!reuseWorker || releasedOrClosed.compareAndSet(false, true)) { try { worker.close() } catch { case e: Exception => logWarning("Failed to close worker socket", e) } } } writerThread.start() new MonitorThread(env, worker, context).start() // Return an iterator that read lines from the process's stdout val stream = new DataInputStream(new BufferedInputStream(worker.getInputStream, bufferSize)) val stdoutIterator = newReaderIterator( stream, writerThread, startTime, env, worker, releasedOrClosed, context) new InterruptibleIterator(context, stdoutIterator) } protected def newWriterThread( env: SparkEnv, worker: Socket, inputIterator: Iterator[IN], partitionIndex: Int, context: TaskContext): WriterThread protected def newReaderIterator( stream: DataInputStream, writerThread: WriterThread, startTime: Long, env: SparkEnv, worker: Socket, releasedOrClosed: AtomicBoolean, context: TaskContext): Iterator[OUT] /** * The thread responsible for writing the data from the PythonRDD's parent iterator to the * Python process. */ abstract class WriterThread( env: SparkEnv, worker: Socket, inputIterator: Iterator[IN], partitionIndex: Int, context: TaskContext) extends Thread(s"stdout writer for $pythonExec") { @volatile private var _exception: Throwable = null private val pythonIncludes = funcs.flatMap(_.funcs.flatMap(_.pythonIncludes.asScala)).toSet private val broadcastVars = funcs.flatMap(_.funcs.flatMap(_.broadcastVars.asScala)) setDaemon(true) /** Contains the throwable thrown while writing the parent iterator to the Python process. */ def exception: Option[Throwable] = Option(_exception) /** Terminates the writer thread, ignoring any exceptions that may occur due to cleanup. */ def shutdownOnTaskCompletion(): Unit = { assert(context.isCompleted) this.interrupt() } /** * Writes a command section to the stream connected to the Python worker. */ protected def writeCommand(dataOut: DataOutputStream): Unit /** * Writes input data to the stream connected to the Python worker. */ protected def writeIteratorToStream(dataOut: DataOutputStream): Unit override def run(): Unit = Utils.logUncaughtExceptions { try { TaskContext.setTaskContext(context) val stream = new BufferedOutputStream(worker.getOutputStream, bufferSize) val dataOut = new DataOutputStream(stream) // Partition index dataOut.writeInt(partitionIndex) // Python version of driver PythonRDD.writeUTF(pythonVer, dataOut) // Init a ServerSocket to accept method calls from Python side. val isBarrier = context.isInstanceOf[BarrierTaskContext] if (isBarrier) { serverSocket = Some(new ServerSocket(/* port */ 0, /* backlog */ 1, InetAddress.getByName("localhost"))) // A call to accept() for ServerSocket shall block infinitely. serverSocket.map(_.setSoTimeout(0)) new Thread("accept-connections") { setDaemon(true) override def run(): Unit = { while (!serverSocket.get.isClosed()) { var sock: Socket = null try { sock = serverSocket.get.accept() // Wait for function call from python side. sock.setSoTimeout(10000) authHelper.authClient(sock) val input = new DataInputStream(sock.getInputStream()) val requestMethod = input.readInt() // The BarrierTaskContext function may wait infinitely, socket shall not timeout // before the function finishes. sock.setSoTimeout(0) requestMethod match { case BarrierTaskContextMessageProtocol.BARRIER_FUNCTION => barrierAndServe(requestMethod, sock) case BarrierTaskContextMessageProtocol.ALL_GATHER_FUNCTION => val length = input.readInt() val message = new Array[Byte](length) input.readFully(message) barrierAndServe(requestMethod, sock, new String(message, UTF_8)) case _ => val out = new DataOutputStream(new BufferedOutputStream( sock.getOutputStream)) writeUTF(BarrierTaskContextMessageProtocol.ERROR_UNRECOGNIZED_FUNCTION, out) } } catch { case e: SocketException if e.getMessage.contains("Socket closed") => // It is possible that the ServerSocket is not closed, but the native socket // has already been closed, we shall catch and silently ignore this case. } finally { if (sock != null) { sock.close() } } } } }.start() } val secret = if (isBarrier) { authHelper.secret } else { "" } // Close ServerSocket on task completion. serverSocket.foreach { server => context.addTaskCompletionListener[Unit](_ => server.close()) } val boundPort: Int = serverSocket.map(_.getLocalPort).getOrElse(0) if (boundPort == -1) { val message = "ServerSocket failed to bind to Java side." logError(message) throw new SparkException(message) } else if (isBarrier) { logDebug(s"Started ServerSocket on port $boundPort.") } // Write out the TaskContextInfo dataOut.writeBoolean(isBarrier) dataOut.writeInt(boundPort) val secretBytes = secret.getBytes(UTF_8) dataOut.writeInt(secretBytes.length) dataOut.write(secretBytes, 0, secretBytes.length) dataOut.writeInt(context.stageId()) dataOut.writeInt(context.partitionId()) dataOut.writeInt(context.attemptNumber()) dataOut.writeLong(context.taskAttemptId()) val resources = context.resources() dataOut.writeInt(resources.size) resources.foreach { case (k, v) => PythonRDD.writeUTF(k, dataOut) PythonRDD.writeUTF(v.name, dataOut) dataOut.writeInt(v.addresses.size) v.addresses.foreach { case addr => PythonRDD.writeUTF(addr, dataOut) } } val localProps = context.getLocalProperties.asScala dataOut.writeInt(localProps.size) localProps.foreach { case (k, v) => PythonRDD.writeUTF(k, dataOut) PythonRDD.writeUTF(v, dataOut) } // sparkFilesDir PythonRDD.writeUTF(SparkFiles.getRootDirectory(), dataOut) // Python includes (*.zip and *.egg files) dataOut.writeInt(pythonIncludes.size) for (include <- pythonIncludes) { PythonRDD.writeUTF(include, dataOut) } // Broadcast variables val oldBids = PythonRDD.getWorkerBroadcasts(worker) val newBids = broadcastVars.map(_.id).toSet // number of different broadcasts val toRemove = oldBids.diff(newBids) val addedBids = newBids.diff(oldBids) val cnt = toRemove.size + addedBids.size val needsDecryptionServer = env.serializerManager.encryptionEnabled && addedBids.nonEmpty dataOut.writeBoolean(needsDecryptionServer) dataOut.writeInt(cnt) def sendBidsToRemove(): Unit = { for (bid <- toRemove) { // remove the broadcast from worker dataOut.writeLong(-bid - 1) // bid >= 0 oldBids.remove(bid) } } if (needsDecryptionServer) { // if there is encryption, we setup a server which reads the encrypted files, and sends // the decrypted data to python val idsAndFiles = broadcastVars.flatMap { broadcast => if (!oldBids.contains(broadcast.id)) { Some((broadcast.id, broadcast.value.path)) } else { None } } val server = new EncryptedPythonBroadcastServer(env, idsAndFiles) dataOut.writeInt(server.port) logTrace(s"broadcast decryption server setup on ${server.port}") PythonRDD.writeUTF(server.secret, dataOut) sendBidsToRemove() idsAndFiles.foreach { case (id, _) => // send new broadcast dataOut.writeLong(id) oldBids.add(id) } dataOut.flush() logTrace("waiting for python to read decrypted broadcast data from server") server.waitTillBroadcastDataSent() logTrace("done sending decrypted data to python") } else { sendBidsToRemove() for (broadcast <- broadcastVars) { if (!oldBids.contains(broadcast.id)) { // send new broadcast dataOut.writeLong(broadcast.id) PythonRDD.writeUTF(broadcast.value.path, dataOut) oldBids.add(broadcast.id) } } } dataOut.flush() dataOut.writeInt(evalType) writeCommand(dataOut) writeIteratorToStream(dataOut) dataOut.writeInt(SpecialLengths.END_OF_STREAM) dataOut.flush() } catch { case t: Throwable if (NonFatal(t) || t.isInstanceOf[Exception]) => if (context.isCompleted || context.isInterrupted) { logDebug("Exception/NonFatal Error thrown after task completion (likely due to " + "cleanup)", t) if (!worker.isClosed) { Utils.tryLog(worker.shutdownOutput()) } } else { // We must avoid throwing exceptions/NonFatals here, because the thread uncaught // exception handler will kill the whole executor (see // org.apache.spark.executor.Executor). _exception = t if (!worker.isClosed) { Utils.tryLog(worker.shutdownOutput()) } } } } /** * Gateway to call BarrierTaskContext methods. */ def barrierAndServe(requestMethod: Int, sock: Socket, message: String = ""): Unit = { require( serverSocket.isDefined, "No available ServerSocket to redirect the BarrierTaskContext method call." ) val out = new DataOutputStream(new BufferedOutputStream(sock.getOutputStream)) try { val messages = requestMethod match { case BarrierTaskContextMessageProtocol.BARRIER_FUNCTION => context.asInstanceOf[BarrierTaskContext].barrier() Array(BarrierTaskContextMessageProtocol.BARRIER_RESULT_SUCCESS) case BarrierTaskContextMessageProtocol.ALL_GATHER_FUNCTION => context.asInstanceOf[BarrierTaskContext].allGather(message) } out.writeInt(messages.length) messages.foreach(writeUTF(_, out)) } catch { case e: SparkException => writeUTF(e.getMessage, out) } finally { out.close() } } def writeUTF(str: String, dataOut: DataOutputStream): Unit = { val bytes = str.getBytes(UTF_8) dataOut.writeInt(bytes.length) dataOut.write(bytes) } } abstract class ReaderIterator( stream: DataInputStream, writerThread: WriterThread, startTime: Long, env: SparkEnv, worker: Socket, releasedOrClosed: AtomicBoolean, context: TaskContext) extends Iterator[OUT] { private var nextObj: OUT = _ private var eos = false override def hasNext: Boolean = nextObj != null || { if (!eos) { nextObj = read() hasNext } else { false } } override def next(): OUT = { if (hasNext) { val obj = nextObj nextObj = null.asInstanceOf[OUT] obj } else { Iterator.empty.next() } } /** * Reads next object from the stream. * When the stream reaches end of data, needs to process the following sections, * and then returns null. */ protected def read(): OUT protected def handleTimingData(): Unit = { // Timing data from worker val bootTime = stream.readLong() val initTime = stream.readLong() val finishTime = stream.readLong() val boot = bootTime - startTime val init = initTime - bootTime val finish = finishTime - initTime val total = finishTime - startTime logInfo("Times: total = %s, boot = %s, init = %s, finish = %s".format(total, boot, init, finish)) val memoryBytesSpilled = stream.readLong() val diskBytesSpilled = stream.readLong() context.taskMetrics.incMemoryBytesSpilled(memoryBytesSpilled) context.taskMetrics.incDiskBytesSpilled(diskBytesSpilled) } protected def handlePythonException(): PythonException = { // Signals that an exception has been thrown in python val exLength = stream.readInt() val obj = new Array[Byte](exLength) stream.readFully(obj) new PythonException(new String(obj, StandardCharsets.UTF_8), writerThread.exception.getOrElse(null)) } protected def handleEndOfDataSection(): Unit = { // We've finished the data section of the output, but we can still // read some accumulator updates: val numAccumulatorUpdates = stream.readInt() (1 to numAccumulatorUpdates).foreach { _ => val updateLen = stream.readInt() val update = new Array[Byte](updateLen) stream.readFully(update) maybeAccumulator.foreach(_.add(update)) } // Check whether the worker is ready to be re-used. if (stream.readInt() == SpecialLengths.END_OF_STREAM) { if (reuseWorker && releasedOrClosed.compareAndSet(false, true)) { env.releasePythonWorker(pythonExec, envVars.asScala.toMap, worker) } } eos = true } protected val handleException: PartialFunction[Throwable, OUT] = { case e: Exception if context.isInterrupted => logDebug("Exception thrown after task interruption", e) throw new TaskKilledException(context.getKillReason().getOrElse("unknown reason")) case e: Exception if writerThread.exception.isDefined => logError("Python worker exited unexpectedly (crashed)", e) logError("This may have been caused by a prior exception:", writerThread.exception.get) throw writerThread.exception.get case eof: EOFException => throw new SparkException("Python worker exited unexpectedly (crashed)", eof) } } /** * It is necessary to have a monitor thread for python workers if the user cancels with * interrupts disabled. In that case we will need to explicitly kill the worker, otherwise the * threads can block indefinitely. */ class MonitorThread(env: SparkEnv, worker: Socket, context: TaskContext) extends Thread(s"Worker Monitor for $pythonExec") { /** How long to wait before killing the python worker if a task cannot be interrupted. */ private val taskKillTimeout = env.conf.get(PYTHON_TASK_KILL_TIMEOUT) setDaemon(true) override def run(): Unit = { // Kill the worker if it is interrupted, checking until task completion. // TODO: This has a race condition if interruption occurs, as completed may still become true. while (!context.isInterrupted && !context.isCompleted) { Thread.sleep(2000) } if (!context.isCompleted) { Thread.sleep(taskKillTimeout) if (!context.isCompleted) { try { // Mimic the task name used in `Executor` to help the user find out the task to blame. val taskName = s"${context.partitionId}.${context.attemptNumber} " + s"in stage ${context.stageId} (TID ${context.taskAttemptId})" logWarning(s"Incomplete task $taskName interrupted: Attempting to kill Python Worker") env.destroyPythonWorker(pythonExec, envVars.asScala.toMap, worker) } catch { case e: Exception => logError("Exception when trying to kill worker", e) } } } } } } private[spark] object PythonRunner { def apply(func: PythonFunction): PythonRunner = { new PythonRunner(Seq(ChainedPythonFunctions(Seq(func)))) } } /** * A helper class to run Python mapPartition in Spark. */ private[spark] class PythonRunner(funcs: Seq[ChainedPythonFunctions]) extends BasePythonRunner[Array[Byte], Array[Byte]]( funcs, PythonEvalType.NON_UDF, Array(Array(0))) { protected override def newWriterThread( env: SparkEnv, worker: Socket, inputIterator: Iterator[Array[Byte]], partitionIndex: Int, context: TaskContext): WriterThread = { new WriterThread(env, worker, inputIterator, partitionIndex, context) { protected override def writeCommand(dataOut: DataOutputStream): Unit = { val command = funcs.head.funcs.head.command dataOut.writeInt(command.length) dataOut.write(command) } protected override def writeIteratorToStream(dataOut: DataOutputStream): Unit = { PythonRDD.writeIteratorToStream(inputIterator, dataOut) dataOut.writeInt(SpecialLengths.END_OF_DATA_SECTION) } } } protected override def newReaderIterator( stream: DataInputStream, writerThread: WriterThread, startTime: Long, env: SparkEnv, worker: Socket, releasedOrClosed: AtomicBoolean, context: TaskContext): Iterator[Array[Byte]] = { new ReaderIterator(stream, writerThread, startTime, env, worker, releasedOrClosed, context) { protected override def read(): Array[Byte] = { if (writerThread.exception.isDefined) { throw writerThread.exception.get } try { stream.readInt() match { case length if length > 0 => val obj = new Array[Byte](length) stream.readFully(obj) obj case 0 => Array.emptyByteArray case SpecialLengths.TIMING_DATA => handleTimingData() read() case SpecialLengths.PYTHON_EXCEPTION_THROWN => throw handlePythonException() case SpecialLengths.END_OF_DATA_SECTION => handleEndOfDataSection() null } } catch handleException } } } } private[spark] object SpecialLengths { val END_OF_DATA_SECTION = -1 val PYTHON_EXCEPTION_THROWN = -2 val TIMING_DATA = -3 val END_OF_STREAM = -4 val NULL = -5 val START_ARROW_STREAM = -6 } private[spark] object BarrierTaskContextMessageProtocol { val BARRIER_FUNCTION = 1 val ALL_GATHER_FUNCTION = 2 val BARRIER_RESULT_SUCCESS = "success" val ERROR_UNRECOGNIZED_FUNCTION = "Not recognized function call from python side." }
zuotingbing/spark
core/src/main/scala/org/apache/spark/api/python/PythonRunner.scala
Scala
apache-2.0
26,156
class MyActivity extends Activity with TypedActivity { lazy val title = findView( TR.my_title ) override def onCreate( savedInstanceState: Bundle ) = { super.onCreate( savedInstanceState ) setContentView( R.layout.main ) title.setText( "Hello Scala!" ) } override def onResume() = { super.onResume() title.setText( "Welcome back, Scala!" ) } }
Taig/Scala-on-Android
src/main/page/typed-resources/activity3.scala
Scala
mit
408
package au.com.onegeek.nginxperf import io.gatling.core.Predef._ import io.gatling.http.Predef._ import scala.concurrent.duration._ class Nginx extends Simulation { val httpConf = http .baseURL("http://api.foo.com") .acceptHeader("text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8") .doNotTrackHeader("1") .acceptLanguageHeader("en-US,en;q=0.5") .acceptEncodingHeader("gzip, deflate") .userAgentHeader("Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0") val scn = scenario("BasicSimulation") .repeat(10) { exec(http("request_1") .get("/header/host")) } setUp( scn.inject(rampUsers(10000) over (1 seconds)) ).protocols(httpConf) }
mefellows/nginx-docker-setup
test/gatling/user-files/simulations/nginx/Nginx.scala
Scala
mit
755
package com.yiguang.mcdb import java.io.File import org.iq80.leveldb._ import org.fusesource.leveldbjni.JniDBFactory._ import com.yiguang.util.StringUtils._ import org.scalatest.{Matchers, FlatSpec} import scala.reflect.io.Path import scala.util.Random /** * Created by yigli on 14-12-4. */ class LeveldbSpec extends FlatSpec with Matchers { private var db: DB = _ "db" should "open" in { val opt = new Options opt.createIfMissing(true) opt.logger() db = factory.open(new File("testdb"),opt) } "key value " should "save " in { val key = Random.nextString(5) val value = Random.nextString(20) db.put(key,value) val loaded = db.get(key) assert(fromBytes(loaded) == value) } "key " should "deleted " in { val key = Random.nextString(5) val value = Random.nextString(20) db.put(key,value) db.delete(key) val loaded = db.get(key) assert(loaded==null) } "db" should "close" in { db.close() Path("testdb").deleteRecursively() } }
liyiguang/memcachedb
src/test/scala/com/yiguang/mcdb/LeveldbSpec.scala
Scala
apache-2.0
1,027
package org.scalatra import collection.JavaConversions._ import collection.mutable.ConcurrentMap import java.util.concurrent.ConcurrentHashMap import org.scalatra.util.RicherString._ import java.util.Locale.ENGLISH import collection.{SortedMap, mutable} object ApiFormats { /** * The request attribute key in which the format is stored. */ val FormatKey = "org.scalatra.FormatKey".intern } /** * Adds support for mapping and inferring formats to content types. * * $ - Provides a request-scoped format variable * $ - Maps formats to content types and vice versa * $ - Augments the content-type inferrer to use the format */ trait ApiFormats extends ScalatraBase { /** * A map of suffixes to content types. */ val formats: ConcurrentMap[String, String] = new ConcurrentHashMap[String, String](Map( "json" -> "application/json", "xml" -> "application/xml", "atom" -> "application/atom+xml", "rss" -> "application/rss+xml", "xslt" -> "application/xslt+xml", "svg" -> "application/svg+xml", "pdf" -> "application/pdf", "swf" -> "application/x-shockwave-flash", "flv" -> "video/x-flv", "js" -> "text/javascript", "css" -> "text/stylesheet", "txt" -> "text/plain", "html" -> "text/html", "html5" -> "text/html", "xhtml" -> "application/xhtml+xml")) /** * A map of content types to suffixes. Not strictly a reverse of `formats`. */ val mimeTypes: ConcurrentMap[String, String] = new ConcurrentHashMap[String, String](Map( "application/json" -> "json", "application/xml" -> "xml", "application/atom+xml" -> "atom", "application/rss+xml" -> "rss", "application/xslt+xml" -> "xslt", "application/pdf" -> "pdf", "application/x-www-form-urlencoded" -> "html", "multipart/form-data" -> "html", "application/svg+xml" -> "svg", "application/x-shockwave-flash" -> "swf", "video/x-flv" -> "flv", "text/javascript" -> "json", "application/javascript" -> "json", "application/ecmascript" -> "json", "application/x-ecmascript" -> "json", "text/stylesheet" -> "css", "text/html" -> "html", "application/xhtml+xml" -> "html")) /** * The default format. */ def defaultFormat: Symbol = 'html /** * A list of formats accepted by default. */ def defaultAcceptedFormats: List[Symbol] = List.empty /** * The list of media types accepted by the current request. Parsed from the * `Accept` header. */ def acceptHeader: List[String] = parseAcceptHeader private def getFromParams = { params.get('format).find(p ⇒ formats.contains(p.toLowerCase(ENGLISH))) } private def getFromAcceptHeader = { val hdrs = request.contentType map { contentType => (acceptHeader ::: List(contentType)).distinct } getOrElse acceptHeader formatForMimeTypes(hdrs: _*) } private def parseAcceptHeader = { request.headers.get("Accept") map { s => val fmts = s.split(",").map(_.trim) val accepted = (fmts.foldLeft(Map.empty[Int, List[String]]) { (acc, f) => val parts = f.split(";").map(_.trim) val i = if (parts.size > 1) { val pars = parts(1).split("=").map(_.trim).grouped(2).map(a => a(0) -> a(1)).toSeq val a = Map(pars:_*) (a.get("q").map(_.toDouble).getOrElse(1.0) * 10).ceil.toInt } else 10 acc + (i -> (parts(0) :: acc.get(i).getOrElse(List.empty))) }) (accepted.toList sortWith ((kv1, kv2) => kv1._1 > kv2._1) flatMap (_._2.reverse) toList) } getOrElse Nil } protected def formatForMimeTypes(mimeTypes: String*): Option[String] = { val defaultMimeType = formats(defaultFormat.name) def matchMimeType(tm: String, f: String) = { tm.toLowerCase(ENGLISH).startsWith(f) || (defaultMimeType == f && tm.contains(defaultMimeType)) } mimeTypes find { hdr => formats exists { case (k, v) => matchMimeType(hdr, v) } } flatMap { hdr => formats find { case (k, v) => matchMimeType(hdr, v) } map { _._1 } } } /** * A content type inferrer based on the `format` variable. Looks up the media * type from the `formats` map. If not found, returns * `application/octet-stream`. This inferrer is prepended to the inherited * one. */ protected def inferFromFormats: ContentTypeInferrer = { case _ if format.nonBlank => formats.get(format) getOrElse "application/octet-stream" } override protected def contentTypeInferrer: ContentTypeInferrer = inferFromFormats orElse super.contentTypeInferrer protected def acceptedFormats(accepted: Symbol*) = { val conditions = if (accepted.isEmpty) defaultAcceptedFormats else accepted.toList conditions.isEmpty || (conditions filter { s => formats.get(s.name).isDefined } contains contentType) } private def getFormat = getFromParams orElse getFromAcceptHeader getOrElse defaultFormat.name import ApiFormats.FormatKey /** * Returns the request-scoped format. If not explicitly set, the format is: * $ - the `format` request parameter, if present in `formatParams` * $ - the first match from `Accept` header, looked up in `mimeTypes` * $ - the format from the `Content-Type` header, as looked up in `mimeTypes` * $ - the default format */ def format = { request.get(FormatKey).map(_.asInstanceOf[String]) getOrElse { val fmt = getFormat request(FormatKey) = fmt fmt } } /** * Explicitly sets the request-scoped format. This takes precedence over * whatever was inferred from the request. */ def format_=(formatValue: Symbol) { request(FormatKey) = formatValue.name } /** * Explicitly sets the request-scoped format. This takes precedence over * whatever was inferred from the request. */ def format_=(formatValue: String) { request(FormatKey) = formatValue } }
louk/scalatra
core/src/main/scala/org/scalatra/ApiFormats.scala
Scala
bsd-2-clause
5,866
package jitd.spec; import jitd.JITDRuntime import jitd.typecheck._ import jitd.codegen.policy._ case class Definition( nodes:Seq[Node], accessors:Seq[Accessor], mutators:Seq[Mutator], transforms:Seq[Transform], policies:Seq[Policy], policyImplementations: PolicyImplementation, functions:Seq[FunctionSignature], keyType: String = "int", recordType: String = "Record", includes:Seq[String] = Seq() ) { val nodesByName = nodes.map { n => n.name -> n }.toMap val functionsByName = functions.map { n => n.name -> n }.toMap val typechecker = new Typechecker(functionsByName, nodesByName) def node(name:String):Node = nodesByName(name) def accessor(name:String):Accessor = accessors.find { _.name.equals(name) }.get def mutator(name:String):Mutator = mutators.find { _.name.equals(name) }.get def transform(name:String):Transform = transforms.find { _.name.equals(name) }.get def policy(name:String):Policy = policies.find { _.name.equals(name) }.get def function(name:String):FunctionSignature = functionsByName(name) override def toString = Seq( Seq(s"key $keyType", s"record $recordType"), includes.map { include => "include \\""+include+"\\"" }, nodes.map { _.toString }, accessors.map { _.toString }, mutators.map { _.toString }, transforms.map { _.toString }, policies.map { _.toString } ).map { _.mkString("\\n") }.mkString("\\n\\n////////////////\\n\\n") } class HardcodedDefinition { var functionSignatures:List[FunctionSignature] = JITDRuntime.functions.toList var nodes = List[jitd.spec.Node]() var accessors = List[jitd.spec.Accessor]() var mutators = List[jitd.spec.Mutator]() var transforms = List[jitd.spec.Transform]() var policies = List[jitd.spec.Policy]() var policyImplementations = jitd.codegen.policy.SetPolicyImplementation def Def(ret: Type, name: String, args:Type*) { functionSignatures = jitd.typecheck.FunctionSignature(name, args, ret) :: functionSignatures } def Def(name: String, args:Type*) { functionSignatures = jitd.typecheck.FunctionSignature(name, args) :: functionSignatures } def Import(newFunctions: Seq[FunctionSignature]) { functionSignatures = functionSignatures ++ newFunctions } def Node(name:String, fields:Field*) { nodes = jitd.spec.Node(name, fields) :: nodes } def Accessor(name: String, ret:Type = TBool())(inputs:Field*)(outputs:Field*)(per_node:(String, Statement)*) { accessors = new jitd.spec.Accessor(name, inputs, outputs, per_node.toMap, ret) :: accessors } def Mutator(name:String)(args:Field*)(constructor:ConstructNode) { mutators = jitd.spec.Mutator(name, args, constructor) :: mutators } def Transform(name: String)(from: MatchNode)(to: ConstructorPattern) { transforms = jitd.spec.Transform(name, from, to) :: transforms } def InvertedTransform(name:String, as:String) { val source = transforms.find { _.name == name }.get transforms = source.invertAs(as) :: transforms } def Policy(name:String)(parameters:(String,Constant)*)(rule:PolicyRule) { policies = jitd.spec.Policy(name, parameters.map { p => Field(p._1, p._2.t) }, parameters.map { _._2 }, rule) :: policies } def Build(node:String, args:ConstructorPattern*) = ConstructNode(node, args) def Match(node:String, args:MatchPattern*) = MatchNode(node, args) def Any(name:String) = MatchAny(Some(name)) def Any = MatchAny(None) def Call(fn: String)(args:Expression*): Expression = FunctionCall(fn, args) def If(cond:Expression)(thenClause:Statement)(elseClause:Statement): Statement = IfThenElse(cond, thenClause, elseClause) def record = TRecord() def key = TKey() def int = TInt() def float = TFloat() def bool = TBool() def node = TNodeRef() def iterator = TIterator() def handlepref = THandleRef() def Delegate(args:Expression*) = Call("delegate")(args:_*) import scala.language.implicitConversions implicit def stringToVar(x:String) = Var(x) implicit def stringToExpressionConstructor(x:String) = ConstructExpression(Var(x)) implicit def stringToMatchAny(x:String) = MatchAny(Some(x)) implicit def stringToTransformPolicy(x:String) = TransformPolicy(x) implicit def boolToBoolConst(x:Boolean) = BoolConstant(x) implicit def intToIntConstant(x:Integer) = IntConstant(x) implicit def doubleToFloatConstant(x:Double) = FloatConstant(x) implicit class StringHelper(s:String) { def withFields(fields:MatchPattern*) = MatchNode(s, fields) def fromFields(fields:ConstructorPattern*) = ConstructNode(s, fields) def call(args: Expression*) = FunctionCall(s, args) } implicit def expressionToConstructor(x:Expression) = ConstructExpression(x) def definition = Definition( nodes = nodes, accessors = accessors, mutators = mutators, transforms = transforms, policies = policies, policyImplementations = policyImplementations, functions = functionSignatures, includes = Seq("int_record.hpp", "runtime.hpp") ) def functions = functionSignatures.map { f => f.name -> f }.toMap def node(name:String):Node = nodes.find { _.name.equals(name) }.get def accessor(name:String):Accessor = accessors.find { _.name.equals(name) }.get def mutator(name:String):Mutator = mutators.find { _.name.equals(name) }.get def transform(name:String):Transform = transforms.find { _.name.equals(name) }.get def policy(name:String):Policy = policies.find { _.name.equals(name) }.get }
UBOdin/jitd-synthesis
src/main/scala/jitd/spec/Definition.scala
Scala
apache-2.0
5,673
/* * Copyright (c) 2017. Yuriy Stul */ package com.stulsoft.ysps.pforcomprehansion import scala.util.{Success, Try} /** * @see [[https://wordpress.com/read/blogs/14365184/posts/892 Scala – conversion from for-comprehension to map, flatMap]] * @author Yuriy Stul. */ object FromForToMap extends App { testWithTry() testWithOption() testWithOption2() def testWithTry(): Unit = { testWithFor() testWithMap() def getProduct(id: String): Try[String] = { Success(id + "123") } def getPrice(product: String): Try[Double] = { Success(10.0 + product.length) } def calculateProductPrice(product: String, price: Double): Try[Double] = { Success(product.length * price) } def testWithFor(): Unit = { val result = for { product <- getProduct("test") price <- getPrice(product) calculatedPrice <- calculateProductPrice(product, price) if calculatedPrice > 10 } yield (product, calculatedPrice) println(s"testWithFor: result is $result") } def testWithMap(): Unit = { val result = getProduct("test").flatMap { product => getPrice(product).flatMap { price => calculateProductPrice(product, price).filter(p => p > 10) .map { p => (product, p) } } } println(s"testWithMap: result is $result") } } def testWithOption(): Unit = { testWithFor() testWithMap() def getProduct(id: String): Option[String] = { Some(id + "123") } def getPrice(product: String): Option[Double] = { Some(10.0 + product.length) } def calculateProductPrice(product: String, price: Double): Option[Double] = { Some(product.length * price) } def testWithFor(): Unit = { val result = for { product <- getProduct("test") price <- getPrice(product) calculatedPrice <- calculateProductPrice(product, price) if calculatedPrice > 10 } yield (product, calculatedPrice) println(s"testWithFor: result is $result") } def testWithMap(): Unit = { val result = getProduct("test").flatMap { product => getPrice(product).flatMap { price => calculateProductPrice(product, price).filter(p => p > 10) .map { p => (product, p) } } } println(s"testWithMap: result is $result") } } def testWithOption2(): Unit = { println("==>testWithOption2") testWithFor() testWithMap() def getProduct(id: String): Option[String] = { None } def getPrice(product: String): Option[Double] = { Some(10.0 + product.length) } def calculateProductPrice(product: String, price: Double): Option[Double] = { Some(product.length * price) } def testWithFor(): Unit = { val result = for { product <- getProduct("test") price <- getPrice(product) calculatedPrice <- calculateProductPrice(product, price) if calculatedPrice > 10 } yield (product, calculatedPrice) println(s"testWithFor: result is $result") } def testWithMap(): Unit = { val result = getProduct("test").flatMap { product => getPrice(product).flatMap { price => calculateProductPrice(product, price).filter(p => p > 10) .map { p => (product, p) } } } println(s"testWithMap: result is $result") } println("<==testWithOption2") } }
ysden123/ysps
src/main/scala/com/stulsoft/ysps/pforcomprehansion/FromForToMap.scala
Scala
mit
3,473
package org.jetbrains.plugins.scala.lang.resolve2 /** * Pavel.Fatin, 02.02.2010 */ class FunctionTypeTest extends ResolveTestBase { override def folderPath: String = { super.folderPath + "function/type/" } def testChoiceOne = doTest def testChoiceTwo = doTest def testIncompatible = doTest def testIncompatibleFirst = doTest def testIncompatibleSecond = doTest def testIncompatibleWithCount = doTest def testInheritanceChild = doTest def testInheritanceParent = doTest def testParentheses = doTest }
LPTK/intellij-scala
test/org/jetbrains/plugins/scala/lang/resolve2/FunctionTypeTest.scala
Scala
apache-2.0
531
import sbt._ import java.io.File object JDatPackerDef extends Build { val readmePath = TaskKey[File]("readme-path", "path to readme file") val licensePath = TaskKey[File]("license-path", "path to license file") val zipPath = TaskKey[File]("zip-path", "path to dist zip file") val dist = TaskKey[File]("dist", "creates a distributable zip file") }
memo33/JDatPacker
project/build.scala
Scala
mit
357
package org.wartremover package contrib.test import org.wartremover.contrib.warts.ExposedTuples import org.wartremover.test.WartTestTraverser import org.scalatest.funsuite.AnyFunSuite class ExposedTuplesTest extends AnyFunSuite with ResultAssertions { test("can't expose a tuple from a public method") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { def bar1(): (Int, String) = ??? } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a public method in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { def bar2(): (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a protected method in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { protected def bar3(): (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can expose a tuple from a private method in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { private def bar4(): (Int, String) = ??? } } assertEmpty(result) } test("can't expose a tuple from a private method for a scope") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { private[test] def bar5(): (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a method inside another type") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { def bar6(): Seq[(Int, String)] = ??? } assertError(result)(ExposedTuples.message) } test("can expose a tuple from a method if it's the base type of another type") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { def bar7(): Map[Int, String] = ??? } assertEmpty(result) } test("can't expose a tuple from a public method as a parameter") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { def bar8(baz: (Int, String)) = ??? } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a public method as a parameter in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { def bar9(baz: (Int, String)) = ??? } } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a protected method as a parameter in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { protected def bar10(baz: (Int, String)) = ??? } } assertError(result)(ExposedTuples.message) } test("can expose a tuple from a private method as a parameter in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { private def bar11(baz: (Int, String)) = ??? } } assertEmpty(result) } test("can't expose a tuple from a private method as a parameter for a scope") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { private[test] def bar12(baz: (Int, String)) = ??? } } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a method as a parameter inside another type") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { def bar13(baz: Seq[(Int, String)]) = ??? } assertError(result)(ExposedTuples.message) } test("can expose a tuple from a method as a parameter if it's the base type of another type") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { def bar14(baz: Map[Int, String]) = ??? } assertEmpty(result) } test("can't expose a tuple from a public value") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { val bar15: (Int, String) = ??? } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a public value in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { val bar16: (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a protected value in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { protected val bar17: (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can expose a tuple from a private value in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { private val bar18: (Int, String) = ??? } } assertEmpty(result) } test("can't expose a tuple from a private value for a scope") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { private[test] val bar19: (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a value inside another type") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { val bar20: Seq[(Int, String)] = ??? } assertError(result)(ExposedTuples.message) } test("can expose a tuple from a value if it's the base type of another type") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { val bar21: Map[Int, String] = ??? } assertEmpty(result) } test("can't expose a tuple from a public variable") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { var bar22: (Int, String) = ??? } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a public variable in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { var bar23: (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a protected variable in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { protected var bar24: (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can expose a tuple from a private variable in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { private var bar25: (Int, String) = ??? } } assertEmpty(result) } test("can't expose a tuple from a private variable for a scope") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { private[test] var bar26: (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a variable inside another type") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { var bar27: Seq[(Int, String)] = ??? } assertError(result)(ExposedTuples.message) } test("can expose a tuple from a variable if it's the base type of another type") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { var bar28: Map[Int, String] = ??? } assertEmpty(result) } test("can't expose a tuple from a public lazy value") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { lazy val bar29: (Int, String) = ??? } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a public lazy value in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { lazy val bar30: (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a protected lazy value in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { protected lazy val bar31: (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can expose a tuple from a private lazy value in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { private lazy val bar32: (Int, String) = ??? } } assertEmpty(result) } test("can't expose a tuple from a private lazy value for a scope") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { private[test] lazy val bar33: (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a lazy value inside another type") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { lazy val bar34: Seq[(Int, String)] = ??? } assertError(result)(ExposedTuples.message) } test("can expose a tuple from a lazy value if it's the base type of another type") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { lazy val bar35: Map[Int, String] = ??? } assertEmpty(result) } test("can't expose a tuple from a public implicit method") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { implicit def bar36(): (Int, String) = ??? } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a public implicit method in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { implicit def bar37(): (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a protected implicit method in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { protected implicit def bar38(): (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can expose a tuple from a private implicit method in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { private implicit def bar39(): (Int, String) = ??? } } assertEmpty(result) } test("can't expose a tuple from a private implicit method for a scope") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { private[test] implicit def bar40(): (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from an implicit method inside another type") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { implicit def bar41(): Seq[(Int, String)] = ??? } assertError(result)(ExposedTuples.message) } test("can expose a tuple from an implicit method if it's the base type of another type") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { implicit def bar42(): Map[Int, String] = ??? } assertEmpty(result) } test("can't expose a tuple from a public implicit value") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { implicit val bar43: (Int, String) = ??? } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a public implicit value in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { implicit val bar44: (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a protected implicit value in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { protected implicit val bar45: (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can expose a tuple from a private implicit value in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { private implicit val bar46: (Int, String) = ??? } } assertEmpty(result) } test("can't expose a tuple from a private implicit value for a scope") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { private[test] implicit val bar47: (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from an implicit value inside another type") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { implicit val bar48: Seq[(Int, String)] = ??? } assertError(result)(ExposedTuples.message) } test("can expose a tuple from an implicit value if it's the base type of another type") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { implicit val bar49: Map[Int, String] = ??? } assertEmpty(result) } test("can't expose a tuple from a public implicit variable") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { implicit var bar50: (Int, String) = ??? } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a public implicit variable in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { implicit var bar51: (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a protected implicit variable in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { protected implicit var bar52: (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can expose a tuple from a private implicit variable in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { private implicit var bar53: (Int, String) = ??? } } assertEmpty(result) } test("can't expose a tuple from a private implicit variable for a scope") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { private[test] implicit var bar54: (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from an implicit variable inside another type") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { implicit var bar55: Seq[(Int, String)] = ??? } assertError(result)(ExposedTuples.message) } test("can expose a tuple from an implicit variable if it's the base type of another type") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { implicit var bar56: Map[Int, String] = ??? } assertEmpty(result) } test("can't expose a tuple from a public implicit lazy value") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { lazy implicit val bar57: (Int, String) = ??? } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a public implicit lazy value in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { lazy implicit val bar58: (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a protected implicit lazy value in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { protected lazy implicit val bar59: (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can expose a tuple from a private implicit lazy value in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { private lazy implicit val bar60: (Int, String) = ??? } } assertEmpty(result) } test("can't expose a tuple from a private implicit lazy value for a scope") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { private[test] lazy implicit val bar61: (Int, String) = ??? } } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from an implicit lazy value inside another type") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { lazy implicit val bar62: Seq[(Int, String)] = ??? } assertError(result)(ExposedTuples.message) } test("can expose a tuple from an implicit lazy value if it's the base type of another type") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { lazy implicit val bar63: Map[Int, String] = ??? } assertEmpty(result) } test("can't expose a tuple from a public method as an implicit parameter") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { def bar64(implicit baz: (Int, String)) = ??? } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a public method as an implicit parameter in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { def bar65(implicit baz: (Int, String)) = ??? } } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a protected method as an implicit parameter in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { protected def bar66(implicit baz: (Int, String)) = ??? } } assertError(result)(ExposedTuples.message) } test("can expose a tuple from a private method as an implicit parameter in a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { private def bar67(implicit baz: (Int, String)) = ??? } } assertEmpty(result) } test("can't expose a tuple from a private method as an implicit parameter for a scope") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { private[test] def bar68(implicit baz: (Int, String)) = ??? } } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a method as an implicit parameter inside another type") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { def bar69(implicit baz: Seq[(Int, String)]) = ??? } assertError(result)(ExposedTuples.message) } test("can expose a tuple from a method as an implicit parameter if it's the base type of another type") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { def bar70(implicit baz: Map[Int, String]) = ??? } assertEmpty(result) } test("can expose a tuple from the unapply method of a case class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { case class Foo(a: Int, b: Int) } assertEmpty(result) } test("can expose a tuple from a custom unapply method") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo(val a: Int, val b: Int) object Foo { def unapply(foo: Foo): Option[(Int, Int)] = Some((foo.a, foo.b)) } // Testing to make sure unapply is properly defined val foo = new Foo(1, 2) val Foo(a, b) = foo } assertEmpty(result) } test("can't expose a tuple from a implicit class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { object X { implicit class Foo(tuple: (Int, String)) } } assertError(result.copy(errors = result.errors.distinct))(ExposedTuples.message) } test("can't expose a tuple from a public constructor") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo(tuple: (Int, String)) } assertError(result)(ExposedTuples.message) } test("can't expose a tuple from a protected constructor") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo protected (tuple: (Int, String)) } assertError(result)(ExposedTuples.message) } test("can expose a tuple from a private constructor") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo private (tuple: (Int, String)) } assertEmpty(result) } test("can expose a tuple from a private scoped constructor") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo private[test] (tuple: (Int, String)) } assertError(result)(ExposedTuples.message) } test("can expose a tuple from a local def") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { def foo: Unit = { def bar71(): (String, Int) = ??? } } assertEmpty(result) } test("can expose a tuple as a local def parameter") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { def foo: Unit = { def bar72(baz: (String, Int)) = ??? } } assertEmpty(result) } test("can expose a tuple from a local def inside a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { def foo: Unit = { def bar73(): (String, Int) = ??? } } } assertEmpty(result) } test("can expose a tuple as a local def parameter inside a class") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { class Foo { def foo: Unit = { def bar74(baz: (String, Int)) = ??? } } } assertEmpty(result) } test("can expose a tuple as a local value") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { def foo: Unit = { val bar75: (String, Int) = ??? } } assertEmpty(result) } test("can expose a tuple as a local value lambda") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { def foo: Unit = { val bar76: Unit => (String, Int) = ??? } } assertEmpty(result) } test("can expose a tuple as a local value lambda's parameter") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { def foo: Unit = { val bar77: ((String, Int)) => Unit = ??? } } assertEmpty(result) } test("can expose a tuple as a local variable") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { def foo: Unit = { var bar75b: (String, Int) = ??? } } assertEmpty(result) } test("can expose a tuple as a local variable lambda") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { def foo: Unit = { var bar76b: Unit => (String, Int) = ??? } } assertEmpty(result) } test("can expose a tuple as a local variable lambda's parameter") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { def foo: Unit = { var bar77b: ((String, Int)) => Unit = ??? } } assertEmpty(result) } test("can expose a tuple as a local lazy value") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { def foo: Unit = { lazy val bar75c: (String, Int) = ??? } } assertEmpty(result) } test("can expose a tuple as a local lazy value lambda") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { def foo: Unit = { lazy val bar76c: Unit => (String, Int) = ??? } } assertEmpty(result) } test("can expose a tuple as a local lazy value lambda's parameter") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { def foo: Unit = { lazy val bar77c: ((String, Int)) => Unit = ??? } } assertEmpty(result) } test("can't expose a tuple as a value lambda") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { val bar78: ((String, Int)) => Unit = ??? } assertError(result)(ExposedTuples.message) } test("can't expose a tuple as a variable lambda") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { var bar79: ((String, Int)) => Unit = ??? } assertError(result)(ExposedTuples.message) } test("can't expose a tuple as a lazy value lambda") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { lazy val bar80: ((String, Int)) => Unit = ??? } assertError(result)(ExposedTuples.message) } test("can't expose a tuple as a value lambda's return value") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { val bar81: Unit => (String, Int) = ??? } assertError(result)(ExposedTuples.message) } test("can't expose a tuple as a variable lambda's return value") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { var bar82: Unit => (String, Int) = ??? } assertError(result)(ExposedTuples.message) } test("can't expose a tuple as a lazy value lambda's return value") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { lazy val bar83: Unit => (String, Int) = ??? } assertError(result)(ExposedTuples.message) } test("obeys SuppressWarnings") { val result: WartTestTraverser.Result = WartTestTraverser(ExposedTuples) { @SuppressWarnings(Array("org.wartremover.contrib.warts.ExposedTuples")) def bar(): (Int, String) = ??? object X { @SuppressWarnings(Array("org.wartremover.contrib.warts.ExposedTuples")) implicit class TupleOps[A, B](tuple: (A, B)) } } assertEmpty(result) } }
wartremover/wartremover-contrib
core/src/test/scala/wartremover/contrib/warts/ExposedTuplesTest.scala
Scala
apache-2.0
26,303
/* * Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com> */ package com.lightbend.lagom.sbt import play.dev.filewatch.LoggerProxy import sbt.Logger class SbtLoggerProxy(logger: Logger) extends LoggerProxy { override def debug(message: => String): Unit = logger.debug(message) override def info(message: => String): Unit = logger.info(message) override def warn(message: => String): Unit = logger.warn(message) override def error(message: => String): Unit = logger.error(message) override def verbose(message: => String): Unit = logger.verbose(message) override def success(message: => String): Unit = logger.success(message) override def trace(t: => Throwable): Unit = logger.trace(t) }
rstento/lagom
dev/sbt-plugin/src/main/scala/com/lightbend/lagom/sbt/SbtLoggerProxy.scala
Scala
apache-2.0
727
/* * Copyright (c) 2014 Robert Conrad - All Rights Reserved. * Unauthorized copying of this file, via any medium is strictly prohibited. * This file is proprietary and confidential. * Last modified by rconrad, 12/24/14 6:32 PM */ package base.entity /** * Container for strings used in API Documentation, specifically in the annotations used * by Swagger. Unfortunately, annotations values must be final and must be primitive - meaning * no string composition, no pulling from conf files, no ingenious language solutions. * Still better than hand-maintaining docs. * * Coverage is turned off since the cov module can't determine whether final vals have been covered (weak, I know) * * NB: ALL STRINGS IN THIS FILE ARE CONSTANT - there is no interpolation or concatenation. This rule is required * because many of these strings are used in annotations for swagger, and annotations must be constant native * strings at the pre-compile stage. * * @author rconrad */ // $COVERAGE-OFF$ // scalastyle:off line.size.limit // scalastyle:off file.size.limit object ApiStrings { /** * Common */ // headers final val headerPrefix = "X-Base-" // methods final val getMethod = "GET" final val postMethod = "POST" final val putMethod = "PUT" final val deleteMethod = "DELETE" // data types final val stringDataType = "string" final val boolDataType = "boolean" final val intDataType = "integer" final val decimalDataType = "decimal" final val longDataType = "long" final val uuidDataType = "java.lang.UUID" // param types final val formParamType = "form" final val headerParamType = "header" final val pathParamType = "path" final val queryParamType = "query" // response codes final val OKCode = 200 final val OKCodeDesc = "Request completed successfully." final val foundCode = 302 final val foundCodeDesc = "Redirect to a different URI." final val errorCode = 400 final val errorCodeDesc = "An error occurred, input may not be valid." final val authErrorCode = 401 final val authErrorCodeDesc = "Authorization headers were not accepted. If using a PUBLISHABLE API Key this may include a malformed security hash." final val notFoundCode = 404 final val notFoundCodeDesc = "Resource could not be found." final val mediaErrorCode = 415 final val mediaErrorCodeDesc = "An unexpected Content-Type was supplied with the request. This server accepts only 'Content-Type: application/json'." final val serverErrorCode = 500 final val serverErrorCodeDesc = "Internal server error occurred - please contact support and provide uniqueId." // error objects final val errorObject = "Properties of an error generated during a request." final val errorResponseObject = "Response generated when one or more errors occur during a request." // error fields final val dataDesc = "List of errors that occurred during the request." final val errorStatusDesc = "HTTP status code." final val errorDescCodeDesc = "Descriptive error code." final val errorMessageDesc = "Human-readable message providing details of the error that occurred." final val errorParamDesc = "Parameter the error relates to, useful for displaying error messages on forms." final val errorUniqueIdDesc = "Uniquely identifying string for this error. Should be referenced when obtaining support." // error values final val genericError = "An internal server error occurred." // authentication notes final val userOrSecretOrPublishableAuthNote = "Can use any of user/password, publishable key or secret key authentication." final val userOrSecretAuthNote = "Can only use user/password or secret key authentication." final val userOrSecretAuthProviderOnlyNote = "Can only use user/password or secret key authentication, can only be accessed by providers." final val userAuthNote = "Can only use user/password authentication." // not implemented notes final val notYetImplementedNote = "This endpoint is NOT YET IMPLEMENTED. However, this API documentation may still be relied upon as the endpoint will come online soon." /** * Auth */ object Auth { final val failedDesc = "Authorization failed." } /** * Keys */ object Keys { // common final val keysResponse = "Keys Response." // endpoint final val endpointDesc = "Retrieve API keys." // read final val readDesc = endpointDesc final val readOKDesc = "API key retrieval succeeded." // refresh final val refreshDesc = "Deactivate all existing API keys, create new ones and retrieve them. WARNING: existing integrations will fail until updated." final val refreshOKDesc = "Successfully created new API keys." final val refreshErrorDesc = "Failed to create new API keys." // objects final val keysDesc = "List of API keys for the authenticated account." final val keyDesc = "Representation of an API key." // fields final val id = "id" final val idDesc = "Unique identifier of the API key." final val key = "apiKey" final val keyValueDesc = "API Key." final val active = "active" final val activeDesc = "Whether the API key is marked active. Inactive API keys will not be able to authenticate." final val createdAt = "createdAt" final val createdAtDesc = "When the API key was created. ISO_8601 datetime format ([YYYY]-[MM]-[DD]T[hh]:[mm]:[ss])." } /** * User */ object User { // common final val userResponse = "User Response." // endpoint final val endpointDesc = "Create, update and retrieve users." // create final val createDesc = "Create a user." final val createRequestDesc = "User Creation Request Representation" final val createOKDesc = "User created successfully." final val createErrorDesc = "User creation failed." // update final val updateDesc = "Update a user." final val updateRequestDesc = "User Update Request Representation" final val updateOKDesc = "User update succeeded." final val updateErrorDesc = "User update failed." final val updateNotFoundDesc = "Specified user not found." // aliased across labels // read final val readDesc = "Retrieve a user." final val readMeDesc = "Retrieve my user." final val readOKDesc = "User retrieval succeeded." final val readNotFoundDesc = updateNotFoundDesc // reset initiate final val resetInitDesc = "Reset a user's password." // via an email containing a link that must be visited to confirm the reset. final val resetInitRequestDesc = "Reset User Password Request Representation" final val resetInitOKDesc = "Reset password initiation completed successfully." final val resetInitErrorDesc = "Reset password initiation failed." final val resetInitNotFoundDesc = updateNotFoundDesc // reset complete final val resetCompleteDesc = "Complete password reset." // process by visiting the confirmation link sent to a user's email address. final val resetCompleteFoundDesc = "Password reset was completed successfully, redirect to a login URI." final val resetCompleteErrorDesc = "Reset password completion failed." final val resetCompleteNotFoundDesc = updateNotFoundDesc // fields final val id = "id" final val idDesc = "Unique identifier of the user." final val email = "email" final val emailDesc = "Unique account identifier and email address for password recovery and notifications." final val emailUniqueErrorDesc = "Email must be a unique identifier and this email is already in use." final val password = "password" final val passwordDesc = "Account password will be securely stored and cannot be retrieved." final val active = "active" final val activeDesc = "Whether the user is marked active. Inactive users will not be able to login." final val createdAt = "createdAt" final val createdAtDesc = "When the user was created. ISO_8601 datetime format ([YYYY]-[MM]-[DD]T[hh]:[mm]:[ss])." final val resetCode = "c" final val resetCodeDesc = "Code supplied as part of a link in the reset email sent to user's email address." } /** * Shared field type strings */ object Field { final val identifierLengthErrorDesc = "Identifier length must be between 0 and 255 characters inclusive." final val nameLengthErrorDesc = "Name length must be between 3 and 80 characters inclusive." final val emailLengthErrorDesc = "Email address must be between 5 and 255 characters inclusive." final val urlLengthErrorDesc = "URL must be between 5 and 1000 characters inclusive." final val urlMalformedErrorDesc = "URL must properly formed." } }
robconrad/base-api
project-entity/src/main/scala/base/entity/ApiStrings.scala
Scala
mit
8,702
package com.twitter.scalding.examples import com.twitter.scalding._ import com.twitter.scalding.mathematics.Matrix /* * MatrixTutorial6.scala * * Loads a document to word matrix where a[i,j] = freq of the word j in the document i * computes the Tf-Idf score of each word w.r.t. to each document and keeps the top nrWords in each document * (see http://en.wikipedia.org/wiki/Tf*idf for more info) * * ../scripts/scald.rb --local MatrixTutorial6.scala --input data/docBOW.tsv --nrWords 300 --output data/featSelectedMatrix.tsv * */ class TfIdfJob(args : Args) extends Job(args) { import Matrix._ val docWordMatrix = Tsv( args("input"), ('doc, 'word, 'count) ) .read .toMatrix[Long,String,Double]('doc, 'word, 'count) // compute the overall document frequency of each row val docFreq = docWordMatrix.sumRowVectors // compute the inverse document frequency vector val invDocFreqVct = docFreq.toMatrix(1).rowL1Normalize.mapValues( x => log2(1/x) ) // zip the row vector along the entire document - word matrix val invDocFreqMat = docWordMatrix.zip(invDocFreqVct.getRow(1)).mapValues( pair => pair._2 ) // multiply the term frequency with the inverse document frequency and keep the top nrWords docWordMatrix.hProd(invDocFreqMat).topRowElems( args("nrWords").toInt ).write(Tsv( args("output") )) def log2(x : Double) = scala.math.log(x)/scala.math.log(2.0) }
rjhall/etsy.scalding
tutorial/MatrixTutorial6.scala
Scala
apache-2.0
1,399
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.parser import java.sql.{Date, Timestamp} import org.apache.spark.sql.catalyst.FunctionIdentifier import org.apache.spark.sql.catalyst.analysis.{UnresolvedAttribute, _} import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.expressions.aggregate.{First, Last} import org.apache.spark.sql.catalyst.plans.PlanTest import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types.CalendarInterval /** * Test basic expression parsing. * If the type of an expression is supported it should be tested here. * * Please note that some of the expressions test don't have to be sound expressions, only their * structure needs to be valid. Unsound expressions should be caught by the Analyzer or * CheckAnalysis classes. */ class ExpressionParserSuite extends PlanTest { import CatalystSqlParser._ import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.dsl.plans._ val defaultParser = CatalystSqlParser def assertEqual( sqlCommand: String, e: Expression, parser: ParserInterface = defaultParser): Unit = { compareExpressions(parser.parseExpression(sqlCommand), e) } def intercept(sqlCommand: String, messages: String*): Unit = { val e = intercept[ParseException](defaultParser.parseExpression(sqlCommand)) messages.foreach { message => assert(e.message.contains(message)) } } test("star expressions") { // Global Star assertEqual("*", UnresolvedStar(None)) // Targeted Star assertEqual("a.b.*", UnresolvedStar(Option(Seq("a", "b")))) } // NamedExpression (Alias/Multialias) test("named expressions") { // No Alias val r0 = 'a assertEqual("a", r0) // Single Alias. val r1 = 'a as "b" assertEqual("a as b", r1) assertEqual("a b", r1) // Multi-Alias assertEqual("a as (b, c)", MultiAlias('a, Seq("b", "c"))) assertEqual("a() (b, c)", MultiAlias('a.function(), Seq("b", "c"))) // Numeric literals without a space between the literal qualifier and the alias, should not be // interpreted as such. An unresolved reference should be returned instead. // TODO add the JIRA-ticket number. assertEqual("1SL", Symbol("1SL")) // Aliased star is allowed. assertEqual("a.* b", UnresolvedStar(Option(Seq("a"))) as 'b) } test("binary logical expressions") { // And assertEqual("a and b", 'a && 'b) // Or assertEqual("a or b", 'a || 'b) // Combination And/Or check precedence assertEqual("a and b or c and d", ('a && 'b) || ('c && 'd)) assertEqual("a or b or c and d", 'a || 'b || ('c && 'd)) // Multiple AND/OR get converted into a balanced tree assertEqual("a or b or c or d or e or f", (('a || 'b) || 'c) || (('d || 'e) || 'f)) assertEqual("a and b and c and d and e and f", (('a && 'b) && 'c) && (('d && 'e) && 'f)) } test("long binary logical expressions") { def testVeryBinaryExpression(op: String, clazz: Class[_]): Unit = { val sql = (1 to 1000).map(x => s"$x == $x").mkString(op) val e = defaultParser.parseExpression(sql) assert(e.collect { case _: EqualTo => true }.size === 1000) assert(e.collect { case x if clazz.isInstance(x) => true }.size === 999) } testVeryBinaryExpression(" AND ", classOf[And]) testVeryBinaryExpression(" OR ", classOf[Or]) } test("not expressions") { assertEqual("not a", !'a) assertEqual("!a", !'a) assertEqual("not true > true", Not(GreaterThan(true, true))) } test("exists expression") { assertEqual( "exists (select 1 from b where b.x = a.x)", Exists(table("b").where(Symbol("b.x") === Symbol("a.x")).select(1))) } test("comparison expressions") { assertEqual("a = b", 'a === 'b) assertEqual("a == b", 'a === 'b) assertEqual("a <=> b", 'a <=> 'b) assertEqual("a <> b", 'a =!= 'b) assertEqual("a != b", 'a =!= 'b) assertEqual("a < b", 'a < 'b) assertEqual("a <= b", 'a <= 'b) assertEqual("a !> b", 'a <= 'b) assertEqual("a > b", 'a > 'b) assertEqual("a >= b", 'a >= 'b) assertEqual("a !< b", 'a >= 'b) } test("between expressions") { assertEqual("a between b and c", 'a >= 'b && 'a <= 'c) assertEqual("a not between b and c", !('a >= 'b && 'a <= 'c)) } test("in expressions") { assertEqual("a in (b, c, d)", 'a in ('b, 'c, 'd)) assertEqual("a not in (b, c, d)", !('a in ('b, 'c, 'd))) } test("in sub-query") { assertEqual( "a in (select b from c)", In('a, Seq(ListQuery(table("c").select('b))))) } test("like expressions") { assertEqual("a like 'pattern%'", 'a like "pattern%") assertEqual("a not like 'pattern%'", !('a like "pattern%")) assertEqual("a rlike 'pattern%'", 'a rlike "pattern%") assertEqual("a not rlike 'pattern%'", !('a rlike "pattern%")) assertEqual("a regexp 'pattern%'", 'a rlike "pattern%") assertEqual("a not regexp 'pattern%'", !('a rlike "pattern%")) } test("like expressions with ESCAPED_STRING_LITERALS = true") { val conf = new SQLConf() conf.setConfString(SQLConf.ESCAPED_STRING_LITERALS.key, "true") val parser = new CatalystSqlParser(conf) assertEqual("a rlike '^\\\\x20[\\\\x20-\\\\x23]+$'", 'a rlike "^\\\\x20[\\\\x20-\\\\x23]+$", parser) assertEqual("a rlike 'pattern\\\\\\\\'", 'a rlike "pattern\\\\\\\\", parser) assertEqual("a rlike 'pattern\\\\t\\\\n'", 'a rlike "pattern\\\\t\\\\n", parser) } test("is null expressions") { assertEqual("a is null", 'a.isNull) assertEqual("a is not null", 'a.isNotNull) assertEqual("a = b is null", ('a === 'b).isNull) assertEqual("a = b is not null", ('a === 'b).isNotNull) } test("is distinct expressions") { assertEqual("a is distinct from b", !('a <=> 'b)) assertEqual("a is not distinct from b", 'a <=> 'b) } test("binary arithmetic expressions") { // Simple operations assertEqual("a * b", 'a * 'b) assertEqual("a / b", 'a / 'b) assertEqual("a DIV b", ('a / 'b).cast(LongType)) assertEqual("a % b", 'a % 'b) assertEqual("a + b", 'a + 'b) assertEqual("a - b", 'a - 'b) assertEqual("a & b", 'a & 'b) assertEqual("a ^ b", 'a ^ 'b) assertEqual("a | b", 'a | 'b) // Check precedences assertEqual( "a * t | b ^ c & d - e + f % g DIV h / i * k", 'a * 't | ('b ^ ('c & ('d - 'e + (('f % 'g / 'h).cast(LongType) / 'i * 'k))))) } test("unary arithmetic expressions") { assertEqual("+a", 'a) assertEqual("-a", -'a) assertEqual("~a", ~'a) assertEqual("-+~~a", -(~(~'a))) } test("cast expressions") { // Note that DataType parsing is tested elsewhere. assertEqual("cast(a as int)", 'a.cast(IntegerType)) assertEqual("cast(a as timestamp)", 'a.cast(TimestampType)) assertEqual("cast(a as array<int>)", 'a.cast(ArrayType(IntegerType))) assertEqual("cast(cast(a as int) as long)", 'a.cast(IntegerType).cast(LongType)) } test("function expressions") { assertEqual("foo()", 'foo.function()) assertEqual("foo.bar()", UnresolvedFunction(FunctionIdentifier("bar", Some("foo")), Seq.empty, isDistinct = false)) assertEqual("foo(*)", 'foo.function(star())) assertEqual("count(*)", 'count.function(1)) assertEqual("foo(a, b)", 'foo.function('a, 'b)) assertEqual("foo(all a, b)", 'foo.function('a, 'b)) assertEqual("foo(distinct a, b)", 'foo.distinctFunction('a, 'b)) assertEqual("grouping(distinct a, b)", 'grouping.distinctFunction('a, 'b)) assertEqual("`select`(all a, b)", 'select.function('a, 'b)) assertEqual("foo(a as x, b as e)", 'foo.function('a as 'x, 'b as 'e)) } test("window function expressions") { val func = 'foo.function(star()) def windowed( partitioning: Seq[Expression] = Seq.empty, ordering: Seq[SortOrder] = Seq.empty, frame: WindowFrame = UnspecifiedFrame): Expression = { WindowExpression(func, WindowSpecDefinition(partitioning, ordering, frame)) } // Basic window testing. assertEqual("foo(*) over w1", UnresolvedWindowExpression(func, WindowSpecReference("w1"))) assertEqual("foo(*) over ()", windowed()) assertEqual("foo(*) over (partition by a, b)", windowed(Seq('a, 'b))) assertEqual("foo(*) over (distribute by a, b)", windowed(Seq('a, 'b))) assertEqual("foo(*) over (cluster by a, b)", windowed(Seq('a, 'b))) assertEqual("foo(*) over (order by a desc, b asc)", windowed(Seq.empty, Seq('a.desc, 'b.asc ))) assertEqual("foo(*) over (sort by a desc, b asc)", windowed(Seq.empty, Seq('a.desc, 'b.asc ))) assertEqual("foo(*) over (partition by a, b order by c)", windowed(Seq('a, 'b), Seq('c.asc))) assertEqual("foo(*) over (distribute by a, b sort by c)", windowed(Seq('a, 'b), Seq('c.asc))) // Test use of expressions in window functions. assertEqual( "sum(product + 1) over (partition by ((product) + (1)) order by 2)", WindowExpression('sum.function('product + 1), WindowSpecDefinition(Seq('product + 1), Seq(Literal(2).asc), UnspecifiedFrame))) assertEqual( "sum(product + 1) over (partition by ((product / 2) + 1) order by 2)", WindowExpression('sum.function('product + 1), WindowSpecDefinition(Seq('product / 2 + 1), Seq(Literal(2).asc), UnspecifiedFrame))) // Range/Row val frameTypes = Seq(("rows", RowFrame), ("range", RangeFrame)) val boundaries = Seq( ("10 preceding", ValuePreceding(10), CurrentRow), ("3 + 1 following", ValueFollowing(4), CurrentRow), // Will fail during analysis ("unbounded preceding", UnboundedPreceding, CurrentRow), ("unbounded following", UnboundedFollowing, CurrentRow), // Will fail during analysis ("between unbounded preceding and current row", UnboundedPreceding, CurrentRow), ("between unbounded preceding and unbounded following", UnboundedPreceding, UnboundedFollowing), ("between 10 preceding and current row", ValuePreceding(10), CurrentRow), ("between current row and 5 following", CurrentRow, ValueFollowing(5)), ("between 10 preceding and 5 following", ValuePreceding(10), ValueFollowing(5)) ) frameTypes.foreach { case (frameTypeSql, frameType) => boundaries.foreach { case (boundarySql, begin, end) => val query = s"foo(*) over (partition by a order by b $frameTypeSql $boundarySql)" val expr = windowed(Seq('a), Seq('b.asc), SpecifiedWindowFrame(frameType, begin, end)) assertEqual(query, expr) } } // We cannot use non integer constants. intercept("foo(*) over (partition by a order by b rows 10.0 preceding)", "Frame bound value must be a constant integer.") // We cannot use an arbitrary expression. intercept("foo(*) over (partition by a order by b rows exp(b) preceding)", "Frame bound value must be a constant integer.") } test("row constructor") { // Note that '(a)' will be interpreted as a nested expression. assertEqual("(a, b)", CreateStruct(Seq('a, 'b))) assertEqual("(a, b, c)", CreateStruct(Seq('a, 'b, 'c))) assertEqual("(a as b, b as c)", CreateStruct(Seq('a as 'b, 'b as 'c))) } test("scalar sub-query") { assertEqual( "(select max(val) from tbl) > current", ScalarSubquery(table("tbl").select('max.function('val))) > 'current) assertEqual( "a = (select b from s)", 'a === ScalarSubquery(table("s").select('b))) } test("case when") { assertEqual("case a when 1 then b when 2 then c else d end", CaseKeyWhen('a, Seq(1, 'b, 2, 'c, 'd))) assertEqual("case (a or b) when true then c when false then d else e end", CaseKeyWhen('a || 'b, Seq(true, 'c, false, 'd, 'e))) assertEqual("case 'a'='a' when true then 1 end", CaseKeyWhen("a" === "a", Seq(true, 1))) assertEqual("case when a = 1 then b when a = 2 then c else d end", CaseWhen(Seq(('a === 1, 'b.expr), ('a === 2, 'c.expr)), 'd)) assertEqual("case when (1) + case when a > b then c else d end then f else g end", CaseWhen(Seq((Literal(1) + CaseWhen(Seq(('a > 'b, 'c.expr)), 'd.expr), 'f.expr)), 'g)) } test("dereference") { assertEqual("a.b", UnresolvedAttribute("a.b")) assertEqual("`select`.b", UnresolvedAttribute("select.b")) assertEqual("(a + b).b", ('a + 'b).getField("b")) // This will fail analysis. assertEqual("struct(a, b).b", 'struct.function('a, 'b).getField("b")) } test("reference") { // Regular assertEqual("a", 'a) // Starting with a digit. assertEqual("1a", Symbol("1a")) // Quoted using a keyword. assertEqual("`select`", 'select) // Unquoted using an unreserved keyword. assertEqual("columns", 'columns) } test("subscript") { assertEqual("a[b]", 'a.getItem('b)) assertEqual("a[1 + 1]", 'a.getItem(Literal(1) + 1)) assertEqual("`c`.a[b]", UnresolvedAttribute("c.a").getItem('b)) } test("parenthesis") { assertEqual("(a)", 'a) assertEqual("r * (a + b)", 'r * ('a + 'b)) } test("type constructors") { // Dates. assertEqual("dAte '2016-03-11'", Literal(Date.valueOf("2016-03-11"))) intercept("DAtE 'mar 11 2016'") // Timestamps. assertEqual("tImEstAmp '2016-03-11 20:54:00.000'", Literal(Timestamp.valueOf("2016-03-11 20:54:00.000"))) intercept("timestamP '2016-33-11 20:54:00.000'") // Binary. assertEqual("X'A'", Literal(Array(0x0a).map(_.toByte))) assertEqual("x'A10C'", Literal(Array(0xa1, 0x0c).map(_.toByte))) intercept("x'A1OC'") // Unsupported datatype. intercept("GEO '(10,-6)'", "Literals of type 'GEO' are currently not supported.") } test("literals") { def testDecimal(value: String): Unit = { assertEqual(value, Literal(BigDecimal(value).underlying)) } // NULL assertEqual("null", Literal(null)) // Boolean assertEqual("trUe", Literal(true)) assertEqual("False", Literal(false)) // Integral should have the narrowest possible type assertEqual("787324", Literal(787324)) assertEqual("7873247234798249234", Literal(7873247234798249234L)) testDecimal("78732472347982492793712334") // Decimal testDecimal("7873247234798249279371.2334") // Scientific Decimal testDecimal("9.0e1") testDecimal(".9e+2") testDecimal("0.9e+2") testDecimal("900e-1") testDecimal("900.0E-1") testDecimal("9.e+1") intercept(".e3") // Tiny Int Literal assertEqual("10Y", Literal(10.toByte)) intercept("-1000Y", s"does not fit in range [${Byte.MinValue}, ${Byte.MaxValue}]") // Small Int Literal assertEqual("10S", Literal(10.toShort)) intercept("40000S", s"does not fit in range [${Short.MinValue}, ${Short.MaxValue}]") // Long Int Literal assertEqual("10L", Literal(10L)) intercept("78732472347982492793712334L", s"does not fit in range [${Long.MinValue}, ${Long.MaxValue}]") // Double Literal assertEqual("10.0D", Literal(10.0D)) intercept("-1.8E308D", s"does not fit in range") intercept("1.8E308D", s"does not fit in range") // BigDecimal Literal assertEqual("90912830918230182310293801923652346786BD", Literal(BigDecimal("90912830918230182310293801923652346786").underlying())) assertEqual("123.0E-28BD", Literal(BigDecimal("123.0E-28").underlying())) assertEqual("123.08BD", Literal(BigDecimal("123.08").underlying())) intercept("1.20E-38BD", "DecimalType can only support precision up to 38") } test("strings") { Seq(true, false).foreach { escape => val conf = new SQLConf() conf.setConfString(SQLConf.ESCAPED_STRING_LITERALS.key, escape.toString) val parser = new CatalystSqlParser(conf) // tests that have same result whatever the conf is // Single Strings. assertEqual("\\"hello\\"", "hello", parser) assertEqual("'hello'", "hello", parser) // Multi-Strings. assertEqual("\\"hello\\" 'world'", "helloworld", parser) assertEqual("'hello' \\" \\" 'world'", "hello world", parser) // 'LIKE' string literals. Notice that an escaped '%' is the same as an escaped '\\' and a // regular '%'; to get the correct result you need to add another escaped '\\'. // TODO figure out if we shouldn't change the ParseUtils.unescapeSQLString method? assertEqual("'pattern%'", "pattern%", parser) assertEqual("'no-pattern\\\\%'", "no-pattern\\\\%", parser) // tests that have different result regarding the conf if (escape) { // When SQLConf.ESCAPED_STRING_LITERALS is enabled, string literal parsing fallbacks to // Spark 1.6 behavior. // 'LIKE' string literals. assertEqual("'pattern\\\\\\\\%'", "pattern\\\\\\\\%", parser) assertEqual("'pattern\\\\\\\\\\\\%'", "pattern\\\\\\\\\\\\%", parser) // Escaped characters. // Unescape string literal "'\\\\0'" for ASCII NUL (X'00') doesn't work // when ESCAPED_STRING_LITERALS is enabled. // It is parsed literally. assertEqual("'\\\\0'", "\\\\0", parser) // Note: Single quote follows 1.6 parsing behavior when ESCAPED_STRING_LITERALS is enabled. val e = intercept[ParseException](parser.parseExpression("'\\''")) assert(e.message.contains("extraneous input '''")) // The unescape special characters (e.g., "\\\\t") for 2.0+ don't work // when ESCAPED_STRING_LITERALS is enabled. They are parsed literally. assertEqual("'\\\\\\"'", "\\\\\\"", parser) // Double quote assertEqual("'\\\\b'", "\\\\b", parser) // Backspace assertEqual("'\\\\n'", "\\\\n", parser) // Newline assertEqual("'\\\\r'", "\\\\r", parser) // Carriage return assertEqual("'\\\\t'", "\\\\t", parser) // Tab character // The unescape Octals for 2.0+ don't work when ESCAPED_STRING_LITERALS is enabled. // They are parsed literally. assertEqual("'\\\\110\\\\145\\\\154\\\\154\\\\157\\\\041'", "\\\\110\\\\145\\\\154\\\\154\\\\157\\\\041", parser) // The unescape Unicode for 2.0+ doesn't work when ESCAPED_STRING_LITERALS is enabled. // They are parsed literally. assertEqual("'\\\\u0057\\\\u006F\\\\u0072\\\\u006C\\\\u0064\\\\u0020\\\\u003A\\\\u0029'", "\\\\u0057\\\\u006F\\\\u0072\\\\u006C\\\\u0064\\\\u0020\\\\u003A\\\\u0029", parser) } else { // Default behavior // 'LIKE' string literals. assertEqual("'pattern\\\\\\\\%'", "pattern\\\\%", parser) assertEqual("'pattern\\\\\\\\\\\\%'", "pattern\\\\\\\\%", parser) // Escaped characters. // See: http://dev.mysql.com/doc/refman/5.7/en/string-literals.html assertEqual("'\\\\0'", "\\u0000", parser) // ASCII NUL (X'00') assertEqual("'\\\\''", "\\'", parser) // Single quote assertEqual("'\\\\\\"'", "\\"", parser) // Double quote assertEqual("'\\\\b'", "\\b", parser) // Backspace assertEqual("'\\\\n'", "\\n", parser) // Newline assertEqual("'\\\\r'", "\\r", parser) // Carriage return assertEqual("'\\\\t'", "\\t", parser) // Tab character assertEqual("'\\\\Z'", "\\u001A", parser) // ASCII 26 - CTRL + Z (EOF on windows) // Octals assertEqual("'\\\\110\\\\145\\\\154\\\\154\\\\157\\\\041'", "Hello!", parser) // Unicode assertEqual("'\\\\u0057\\\\u006F\\\\u0072\\\\u006C\\\\u0064\\\\u0020\\\\u003A\\\\u0029'", "World :)", parser) } } } test("intervals") { def intervalLiteral(u: String, s: String): Literal = { Literal(CalendarInterval.fromSingleUnitString(u, s)) } // Empty interval statement intercept("interval", "at least one time unit should be given for interval literal") // Single Intervals. val units = Seq( "year", "month", "week", "day", "hour", "minute", "second", "millisecond", "microsecond") val forms = Seq("", "s") val values = Seq("0", "10", "-7", "21") units.foreach { unit => forms.foreach { form => values.foreach { value => val expected = intervalLiteral(unit, value) assertEqual(s"interval $value $unit$form", expected) assertEqual(s"interval '$value' $unit$form", expected) } } } // Hive nanosecond notation. assertEqual("interval 13.123456789 seconds", intervalLiteral("second", "13.123456789")) assertEqual("interval -13.123456789 second", intervalLiteral("second", "-13.123456789")) // Non Existing unit intercept("interval 10 nanoseconds", "No interval can be constructed") // Year-Month intervals. val yearMonthValues = Seq("123-10", "496-0", "-2-3", "-123-0") yearMonthValues.foreach { value => val result = Literal(CalendarInterval.fromYearMonthString(value)) assertEqual(s"interval '$value' year to month", result) } // Day-Time intervals. val datTimeValues = Seq( "99 11:22:33.123456789", "-99 11:22:33.123456789", "10 9:8:7.123456789", "1 0:0:0", "-1 0:0:0", "1 0:0:1") datTimeValues.foreach { value => val result = Literal(CalendarInterval.fromDayTimeString(value)) assertEqual(s"interval '$value' day to second", result) } // Unknown FROM TO intervals intercept("interval 10 month to second", "Intervals FROM month TO second are not supported.") // Composed intervals. assertEqual( "interval 3 months 22 seconds 1 millisecond", Literal(new CalendarInterval(3, 22001000L))) assertEqual( "interval 3 years '-1-10' year to month 3 weeks '1 0:0:2' day to second", Literal(new CalendarInterval(14, 22 * CalendarInterval.MICROS_PER_DAY + 2 * CalendarInterval.MICROS_PER_SECOND))) } test("composed expressions") { assertEqual("1 + r.r As q", (Literal(1) + UnresolvedAttribute("r.r")).as("q")) assertEqual("1 - f('o', o(bar))", Literal(1) - 'f.function("o", 'o.function('bar))) intercept("1 - f('o', o(bar)) hello * world", "mismatched input '*'") } test("current date/timestamp braceless expressions") { assertEqual("current_date", CurrentDate()) assertEqual("current_timestamp", CurrentTimestamp()) } test("SPARK-17364, fully qualified column name which starts with number") { assertEqual("123_", UnresolvedAttribute("123_")) assertEqual("1a.123_", UnresolvedAttribute("1a.123_")) // ".123" should not be treated as token of type DECIMAL_VALUE assertEqual("a.123A", UnresolvedAttribute("a.123A")) // ".123E3" should not be treated as token of type SCIENTIFIC_DECIMAL_VALUE assertEqual("a.123E3_column", UnresolvedAttribute("a.123E3_column")) // ".123D" should not be treated as token of type DOUBLE_LITERAL assertEqual("a.123D_column", UnresolvedAttribute("a.123D_column")) // ".123BD" should not be treated as token of type BIGDECIMAL_LITERAL assertEqual("a.123BD_column", UnresolvedAttribute("a.123BD_column")) } test("SPARK-17832 function identifier contains backtick") { val complexName = FunctionIdentifier("`ba`r", Some("`fo`o")) assertEqual(complexName.quotedString, UnresolvedAttribute("`fo`o.`ba`r")) intercept(complexName.unquotedString, "mismatched input") // Function identifier contains countious backticks should be treated correctly. val complexName2 = FunctionIdentifier("ba``r", Some("fo``o")) assertEqual(complexName2.quotedString, UnresolvedAttribute("fo``o.ba``r")) } test("SPARK-19526 Support ignore nulls keywords for first and last") { assertEqual("first(a ignore nulls)", First('a, Literal(true)).toAggregateExpression()) assertEqual("first(a)", First('a, Literal(false)).toAggregateExpression()) assertEqual("last(a ignore nulls)", Last('a, Literal(true)).toAggregateExpression()) assertEqual("last(a)", Last('a, Literal(false)).toAggregateExpression()) } }
bOOm-X/spark
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala
Scala
apache-2.0
24,679
import stainless.lang._ import stainless.annotation._ import stainless.io.State object GhostEffect2 { import stainless.util.Random def bar(implicit state: State) = { @ghost val test = Random.nextBigInt () } }
epfl-lara/stainless
frontends/benchmarks/extraction/invalid/GhostEffect2.scala
Scala
apache-2.0
227
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.storage import java.util.UUID import org.apache.spark.SparkFunSuite class BlockIdSuite extends SparkFunSuite { def assertSame(id1: BlockId, id2: BlockId) { assert(id1.name === id2.name) assert(id1.hashCode === id2.hashCode) assert(id1 === id2) } def assertDifferent(id1: BlockId, id2: BlockId) { assert(id1.name != id2.name) assert(id1.hashCode != id2.hashCode) assert(id1 != id2) } test("test-bad-deserialization") { intercept[UnrecognizedBlockId] { BlockId("myblock") } } test("rdd") { val id = RDDBlockId(1, 2) assertSame(id, RDDBlockId(1, 2)) assertDifferent(id, RDDBlockId(1, 1)) assert(id.name === "rdd_1_2") assert(id.asRDDId.get.rddId === 1) assert(id.asRDDId.get.splitIndex === 2) assert(id.isRDD) assertSame(id, BlockId(id.toString)) } test("shuffle") { val id = ShuffleBlockId(1, 2, 3) assertSame(id, ShuffleBlockId(1, 2, 3)) assertDifferent(id, ShuffleBlockId(3, 2, 3)) assert(id.name === "shuffle_1_2_3") assert(id.asRDDId === None) assert(id.shuffleId === 1) assert(id.mapId === 2) assert(id.reduceId === 3) assert(id.isShuffle) assertSame(id, BlockId(id.toString)) } test("shuffle data") { val id = ShuffleDataBlockId(4, 5, 6) assertSame(id, ShuffleDataBlockId(4, 5, 6)) assertDifferent(id, ShuffleDataBlockId(6, 5, 6)) assert(id.name === "shuffle_4_5_6.data") assert(id.asRDDId === None) assert(id.shuffleId === 4) assert(id.mapId === 5) assert(id.reduceId === 6) assert(!id.isShuffle) assertSame(id, BlockId(id.toString)) } test("shuffle index") { val id = ShuffleIndexBlockId(7, 8, 9) assertSame(id, ShuffleIndexBlockId(7, 8, 9)) assertDifferent(id, ShuffleIndexBlockId(9, 8, 9)) assert(id.name === "shuffle_7_8_9.index") assert(id.asRDDId === None) assert(id.shuffleId === 7) assert(id.mapId === 8) assert(id.reduceId === 9) assert(!id.isShuffle) assertSame(id, BlockId(id.toString)) } test("broadcast") { val id = BroadcastBlockId(42) assertSame(id, BroadcastBlockId(42)) assertDifferent(id, BroadcastBlockId(123)) assert(id.name === "broadcast_42") assert(id.asRDDId === None) assert(id.broadcastId === 42) assert(id.isBroadcast) assertSame(id, BlockId(id.toString)) } test("taskresult") { val id = TaskResultBlockId(60) assertSame(id, TaskResultBlockId(60)) assertDifferent(id, TaskResultBlockId(61)) assert(id.name === "taskresult_60") assert(id.asRDDId === None) assert(id.taskId === 60) assert(!id.isRDD) assertSame(id, BlockId(id.toString)) } test("stream") { val id = StreamBlockId(1, 100) assertSame(id, StreamBlockId(1, 100)) assertDifferent(id, StreamBlockId(2, 101)) assert(id.name === "input-1-100") assert(id.asRDDId === None) assert(id.streamId === 1) assert(id.uniqueId === 100) assert(!id.isBroadcast) assertSame(id, BlockId(id.toString)) } test("temp local") { val id = TempLocalBlockId(new UUID(5, 2)) assertSame(id, TempLocalBlockId(new UUID(5, 2))) assertDifferent(id, TempLocalBlockId(new UUID(5, 3))) assert(id.name === "temp_local_00000000-0000-0005-0000-000000000002") assert(id.asRDDId === None) assert(id.isBroadcast === false) assert(id.id.getMostSignificantBits() === 5) assert(id.id.getLeastSignificantBits() === 2) assert(!id.isShuffle) assertSame(id, BlockId(id.toString)) } test("temp shuffle") { val id = TempShuffleBlockId(new UUID(1, 2)) assertSame(id, TempShuffleBlockId(new UUID(1, 2))) assertDifferent(id, TempShuffleBlockId(new UUID(1, 3))) assert(id.name === "temp_shuffle_00000000-0000-0001-0000-000000000002") assert(id.asRDDId === None) assert(id.isBroadcast === false) assert(id.id.getMostSignificantBits() === 1) assert(id.id.getLeastSignificantBits() === 2) assert(!id.isShuffle) assertSame(id, BlockId(id.toString)) } test("test") { val id = TestBlockId("abc") assertSame(id, TestBlockId("abc")) assertDifferent(id, TestBlockId("ab")) assert(id.name === "test_abc") assert(id.asRDDId === None) assert(id.id === "abc") assert(!id.isShuffle) assertSame(id, BlockId(id.toString)) } }
brad-kaiser/spark
core/src/test/scala/org/apache/spark/storage/BlockIdSuite.scala
Scala
apache-2.0
5,144
package org.hammerlab.bam.check.full.error /** * Bag of fields with information related to various inconsistencies in BAM-record-candidates. * @tparam T field type: [[Boolean]] for individual positions [[Flags]], [[Long]] for aggregate [[Counts]]. */ trait Error[T] { def tooFewFixedBlockBytes: T def negativeReadIdx: T def tooLargeReadIdx: T def negativeReadPos: T def tooLargeReadPos: T def negativeNextReadIdx: T def tooLargeNextReadIdx: T def negativeNextReadPos: T def tooLargeNextReadPos: T def tooFewBytesForReadName: T def nonNullTerminatedReadName: T def nonASCIIReadName: T def noReadName: T def emptyReadName: T def tooFewBytesForCigarOps: T def invalidCigarOp: T def emptyMappedCigar: T def emptyMappedSeq: T def tooFewRemainingBytesImplied: T }
ryan-williams/spark-bam
check/src/main/scala/org/hammerlab/bam/check/full/error/Error.scala
Scala
apache-2.0
800
/** * Copyright (C) 2010 Orbeon, Inc. * * This program is free software; you can redistribute it and/or modify it under the terms of the * GNU Lesser General Public License as published by the Free Software Foundation; either version * 2.1 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * The full text of the license is available at http://www.gnu.org/copyleft/lesser.html */ package org.orbeon.oxf.externalcontext import org.apache.commons.io.IOUtils import org.apache.commons.lang3.StringUtils import org.orbeon.oxf.http.{Headers, EmptyInputStream, StreamedContent} import org.orbeon.oxf.pipeline.api.ExternalContext.Request import org.orbeon.oxf.servlet.ServletExternalContext import org.orbeon.oxf.util.ScalaUtils.{appendStartingSlash, combineValues, decodeSimpleQuery} import org.orbeon.oxf.util._ import scala.collection.JavaConverters._ import scala.collection.mutable import ScalaUtils._ import java.{util ⇒ ju} // Request used for local (within Orbeon Forms) requests. // // Used by: // // - Connection, for internal quests // - LocalPortletSubmission // - RequestDispatcherSubmission // class LocalRequest( incomingRequest : Request, contextPath : String, pathQuery : String, methodUpper : String, headersMaybeCapitalized : Map[String, List[String]], content : Option[StreamedContent] ) extends Request { require(StringUtils.isAllUpperCase(methodUpper)) private val _contentLengthOpt = content flatMap (_.contentLength) private val _contentTypeOpt = content flatMap (_.contentType) private val _headersIncludingAuthBodyLowercase = { def userGroupRoleHeadersIterator = Iterator( Option(incomingRequest.getUsername) map (Headers.OrbeonUsernameLower → List(_)), Option(incomingRequest.getUserGroup) map (Headers.OrbeonGroupLower → List(_)), Option(incomingRequest.getUserRoles) filter (_.nonEmpty) map (Headers.OrbeonRolesLower → _.to[List]) ).flatten def bodyHeadersIterator = if (Connection.requiresRequestBody(methodUpper)) { (_contentLengthOpt.iterator map (value ⇒ Headers.ContentLengthLower → List(value.toString))) ++ (_contentTypeOpt.iterator map (value ⇒ Headers.ContentTypeLower → List(value))) } else Iterator.empty def allHeadersLowercaseIterator = headersMaybeCapitalized.iterator ++ userGroupRoleHeadersIterator ++ bodyHeadersIterator map { case (k, v) ⇒ k.toLowerCase → v.toArray } allHeadersLowercaseIterator.toMap.asJava } private val (_pathInfo, _queryString) = splitQuery(pathQuery) private lazy val _queryAndBodyParameters = { // Query string // SRV.4.1: "Query string data is presented before post body data." def queryParameters = Option(getQueryString) map decodeSimpleQuery getOrElse Seq() // POST body form parameters // SRV.4.1.1 When Parameters Are Available // NOTE: Remember, the servlet container does not help us decoding the body: the "other side" will just end up here // when asking for parameters. def bodyParameters = if (methodUpper == "POST") content collect { case StreamedContent(is, Some("application/x-www-form-urlencoded"), _, _) ⇒ useAndClose(is) { is ⇒ decodeSimpleQuery(IOUtils.toString(is, ServletExternalContext.getDefaultFormCharset)) } } else None // Make sure to keep order mutable.LinkedHashMap() ++ combineValues[String, AnyRef, Array](queryParameters ++ bodyParameters.getOrElse(Nil)) asJava } /* SUPPORTED: methods called by ExternalContextToHttpServletRequestWrapper */ def getMethod = methodUpper def getParameterMap = _queryAndBodyParameters def getQueryString = _queryString.orNull def getCharacterEncoding = null;//TODO? // not used by our code def getContentLength = _contentLengthOpt map (_.toInt) getOrElse -1 def getContentType = _contentTypeOpt.orNull def getInputStream = content map (_.inputStream) getOrElse EmptyInputStream def getReader = null;//TODO? // not used by our code def getHeaderValuesMap = _headersIncludingAuthBodyLowercase // 2013-09-10: We should start with a fresh attributes map: // // ju.Collections.synchronizedMap(new ju.HashMap[String, AnyRef]) // // However, upon forwarding via RequestDispatcher, this doesn't work. It's unclear why, as Tomcat stores special // attributes in its own ApplicationHttpRequest.specialAttributes. So for now we keep the forward, which was in // place before the 2013-09-10 refactor anyway. // lazy val getAttributesMap = { val newMap = new ju.HashMap[String, AnyRef] newMap.asScala ++= incomingRequest.getAttributesMap.asScala filter { case (k, v) ⇒ k.startsWith("javax.servlet.") } ju.Collections.synchronizedMap(newMap) } /* * NOTE: All the path methods are handled by the request dispatcher implementation in the servlet container upon * forward, but upon include we must provide them. * * NOTE: Checked 2009-02-12 that none of the methods below are called when forwarding through * spring/JSP/filter/Orbeon in Tomcat 5.5.27. HOWEVER they are called when including. * * NOTE: 2014-09-22: Checked that getServletPath and getPathInfo are called by JspServlet in tomcat-7.0.47 at least. */ def getPathInfo = _pathInfo def getServletPath = "" def getContextPath = contextPath // return the context path passed to this wrapper def getRequestPath: String = { // Get servlet path and path info val servletPath = Option(getServletPath) getOrElse "" val pathInfo = Option(getPathInfo) getOrElse "" // Concatenate servlet path and path info, avoiding a double slash val requestPath = if (servletPath.endsWith("/") && pathInfo.startsWith("/")) servletPath + pathInfo.substring(1) else servletPath + pathInfo // Add starting slash if missing appendStartingSlash(requestPath) } def getRequestURI: String = { // Must return the path including the context val contextPath = getContextPath if (contextPath == "/") getRequestPath else getContextPath + getRequestPath } // 2014-09-10: Only used by XHTMLToPDFProcessor def getRequestURL: String = { // Get absolute URL w/o query string e.g. http://foo.com/a/b/c val incomingRequestURL = incomingRequest.getRequestURL // Resolving request URI against incoming absolute URL, e.g. /d/e/f -> http://foo.com/d/e/f NetUtils.resolveURI(getRequestURI, incomingRequestURL) } // ==== Properties which are delegated ============================================================================= // TODO: Check against ExternalContextToHttpServletRequestWrapper // Container is preserved def getContainerType = incomingRequest.getContainerType def getContainerNamespace = incomingRequest.getContainerNamespace def getPortletMode = incomingRequest.getPortletMode // submission does not change portlet mode def getWindowState = incomingRequest.getWindowState // submission does not change window state def getNativeRequest = incomingRequest.getNativeRequest // should not have mainstream uses; see RequestDispatcherSubmission, and cookies forwarding def getPathTranslated = incomingRequest.getPathTranslated // should really not be called // Client and server are preserved, assuming all those relate to knowledge about the URL rewriting and/or def getProtocol = incomingRequest.getProtocol def getServerPort = incomingRequest.getServerPort def getScheme = incomingRequest.getScheme def getRemoteHost = incomingRequest.getRemoteHost def getRemoteAddr = incomingRequest.getRemoteAddr def isSecure = incomingRequest.isSecure def getLocale = incomingRequest.getLocale def getLocales = incomingRequest.getLocales def getServerName = incomingRequest.getServerName def getClientContextPath(urlString: String) = incomingRequest.getClientContextPath(urlString) // Session information is preserved def isRequestedSessionIdValid = incomingRequest.isRequestedSessionIdValid def sessionInvalidate() = incomingRequest.sessionInvalidate() def getSession(create: Boolean) = incomingRequest.getSession(create) def getRequestedSessionId = incomingRequest.getRequestedSessionId // User information is preserved def getUsername = incomingRequest.getUsername def getUserRoles = incomingRequest.getUserRoles def getUserGroup = incomingRequest.getUserGroup def isUserInRole(role: String) = incomingRequest.isUserInRole(role) def getUserPrincipal = incomingRequest.getUserPrincipal def getAuthType = incomingRequest.getAuthType }
wesley1001/orbeon-forms
src/main/scala/org/orbeon/oxf/externalcontext/LocalRequest.scala
Scala
lgpl-2.1
9,720
package org.jetbrains.plugins.scala package codeInsight.template import com.intellij.application.options.CodeStyle import com.intellij.lang.ASTNode import com.intellij.openapi.project.Project import com.intellij.psi.codeStyle.{CodeStyleSettingsManager, JavaCodeStyleSettings, ReferenceAdjuster} import org.jetbrains.plugins.scala.lang.psi.api.ScalaRecursiveElementVisitor import org.jetbrains.plugins.scala.lang.psi.{ScalaPsiElement, TypeAdjuster} import scala.collection.mutable.ArrayBuffer /** * @author Alefas * @since 03/09/14. */ class ScalaReferenceAdjuster extends ReferenceAdjuster { //todo: expression adjuster //todo: process returns element, should return element after replacement //todo: support useFqInJavadoc //todo: support useFqInCode override def process(element: ASTNode, addImports: Boolean, incompleteCode: Boolean, useFqInJavadoc: Boolean, useFqInCode: Boolean): ASTNode = { processRange(element, element.getTextRange.getStartOffset, element.getTextRange.getEndOffset, addImports, incompleteCode, useFqInJavadoc, useFqInCode) element } override def processRange(element: ASTNode, startOffset: Int, endOffset: Int, useFqInJavadoc: Boolean, useFqInCode: Boolean): Unit = { processRange(element, startOffset, endOffset, addImports = true, incompleteCode = false, useFqInJavadoc = useFqInJavadoc, useFqInCode = useFqInCode) } def processRange(element: ASTNode, startOffset: Int, endOffset: Int, addImports: Boolean, incompleteCode: Boolean, useFqInJavadoc: Boolean, useFqInCode: Boolean): Unit = { val psi = element.getPsi if (!psi.getLanguage.isKindOf(ScalaLanguage.INSTANCE)) return //do not process other languages val buffer = new ArrayBuffer[ScalaPsiElement]() val visitor = new ScalaRecursiveElementVisitor { override def visitElement(element: ScalaPsiElement): Unit = { if (element.getTextRange.getStartOffset >= startOffset && element.getTextRange.getEndOffset <= endOffset) { buffer += element } else super.visitElement(element) } } psi.accept(visitor) TypeAdjuster.adjustFor(buffer, addImports) } override def processRange(element: ASTNode, startOffset: Int, endOffset: Int, project: Project): Unit = { val settings = CodeStyle.getSettings(project).getCustomSettings(classOf[JavaCodeStyleSettings]) processRange(element, startOffset, endOffset, settings.useFqNamesInJavadocAlways, settings.USE_FQ_CLASS_NAMES) } override def process(element: ASTNode, addImports: Boolean, incompleteCode: Boolean, project: Project): ASTNode = { val settings = CodeStyle.getSettings(project).getCustomSettings(classOf[JavaCodeStyleSettings]) process(element, addImports, incompleteCode, settings.useFqNamesInJavadocAlways, settings.USE_FQ_CLASS_NAMES) } }
jastice/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/codeInsight/template/ScalaReferenceAdjuster.scala
Scala
apache-2.0
2,906
/* * Copyright (C) 2005, The Beangle Software. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.beangle.ems.core.user.model import org.beangle.data.model.LongId import org.beangle.data.model.pojo.Updated import org.beangle.ems.core.config.model.Domain class Todo extends LongId with Updated { var user: User = _ var domain: Domain = _ var contents: String = _ var isDone: Boolean = _ }
beangle/ems
core/src/main/scala/org/beangle/ems/core/user/model/Todo.scala
Scala
lgpl-3.0
1,037
package io.zophie.connection import io.zophie.api.event._ import org.json4s._ import org.json4s.native.JsonMethods._ case class JSONConversionException(msg : String) extends Exception(msg) // Implementation of JSON, to be passed to JSON converter trait EventDataJSONConverter[T <: EventData] { def toJSON (data : T) : JObject def fromJSON (data : JObject) : T } trait EventDataJSONConverterRegistry { def register[T <: EventData] (event : Event[T])(implicit converter : EventDataJSONConverter[T]) : Unit def getEvent (name : String) : Option[Event[EventData]] def getEventDataJSONConverter[T <: EventData] (event : Event[T]) : Option[EventDataJSONConverter[T]] } class JSONEventConverter(implicit edconvreg : EventDataJSONConverterRegistry) extends EventConverter[String] { def toData[T <: EventData] (event : EventDataComposite[T]) : String = { val eventDataJSONConverter = edconvreg.getEventDataJSONConverter(event.event) match { case Some(value) => value case _ => throw JSONConversionException("No event-data jsonconverter found for " + event) } val eventstring = event.event.name val data = eventDataJSONConverter.toJSON(event.data) val json = { import org.json4s.JsonDSL._ ("event" -> eventstring) ~ ("data" -> data) } return compact(render(json)) } // Parses JSON string into Event-EventData pair def fromData (data : String) : EventDataComposite[EventData] = { // TODO: Add Exception on json syntax errors, or not fitting schema val json = { parse(data) } val name : String = (json \\ "event").extract[String](DefaultFormats, implicitly) val jsondata = (json \\ "data").asInstanceOf[JObject] val event = edconvreg.getEvent(name) match { case Some(value) => value case _ => throw JSONConversionException("No event found with name " + name) } val eventDataJSONConverter = edconvreg.getEventDataJSONConverter(event) match { case Some(value) => value case _ => throw JSONConversionException("No event-data jsonconverter found for " + event) } // TODO: implement with Option val eventData = eventDataJSONConverter.fromJSON(jsondata) return EventDataComposite(event)(eventData) } } // A converter that creates JSON-data from an Event-Event data pair package object JSONConverter { def register[T <: EventData] (event : Event[T])(implicit converter : EventDataJSONConverter[T]) = mainEventJSONConverterRegistry.register[T](event)(converter) def toData[T <: EventData] (event : EventDataComposite[T]) : String = jsonc.toData(event) def fromData (data : String) : EventDataComposite[EventData] = jsonc.fromData(data) implicit object edjsonNoEventData extends EventDataJSONConverter[NoEventData.type] { override def toJSON (data : NoEventData.type) = JObject() override def fromJSON (data : JObject) = NoEventData } implicit object mainEventJSONConverterRegistry extends EventDataJSONConverterRegistry { private var eventRegistry : Map[String, Event[EventData]] = Map() private var eventDataRegistry : Map[Event[EventData], EventDataJSONConverter[_]] = Map() def register[T <: EventData] (event : Event[T])(implicit converter : EventDataJSONConverter[T]) = { eventRegistry += (event.name -> event) eventDataRegistry += (event -> converter) } override def getEvent (name : String) : Option[Event[EventData]] = eventRegistry.get(name) override def getEventDataJSONConverter[T <: EventData] (event : Event[T]) = eventDataRegistry.get(event).map(_.asInstanceOf[EventDataJSONConverter[T]]) } // Implicit version, so it doesn't need to be passed implicit case object jsonc extends JSONEventConverter { } }
torstein-vik/zophie
src/main/scala/connection/JSONConverter.scala
Scala
gpl-3.0
4,093
package org.sofi.deadman.test.view import org.sofi.deadman.component.view._ import org.sofi.deadman.messages.command._ import org.sofi.deadman.messages.query._ import org.sofi.deadman.test.TestSystem import scala.concurrent.duration._ final class KeyViewTest extends TestSystem { // View private val viewActor = system.actorOf(KeyView.props(aggregate, eventLog)) // Test TTL (min allowed value) private val ttl = 1.second.toMillis // Expected value private val taskKey = "test" "A key view" must { "Successfully receive Task events" in { // Should come back in query results taskActor ! ScheduleTask(taskKey, aggregate, "0", ttl) expectMsg(CommandResponse(ResponseType.SUCCESS)) // Should come back in query results taskActor ! ScheduleTask(taskKey, aggregate, "1", ttl) expectMsg(CommandResponse(ResponseType.SUCCESS)) // Should NOT come back in query results taskActor ! ScheduleTask("test2", aggregate, "2", ttl) expectMsg(CommandResponse(ResponseType.SUCCESS)) // Query viewActor ! GetTasks(QueryType.KEY, key = Some(taskKey)) expectMsgPF() { case result: Tasks ⇒ result.tasks.size must be(2) result.tasks.foreach(_.key must be(taskKey)) } } "Successfully clear state on a TaskExpiration event" in { // Wait for tasks to expire Thread.sleep(2.seconds.toMillis) // Query view state viewActor ! GetTasks(QueryType.KEY, key = Some(taskKey)) expectMsgPF() { case result: Tasks ⇒ result.tasks.isEmpty must be(true) } } } }
SocialFinance/deadman-switch
core/src/test/scala/org/sofi/deadman/test/view/KeyViewTest.scala
Scala
bsd-3-clause
1,624
package com.machinomy.bergae.crypto import java.security.SecureRandom import org.bouncycastle.asn1.x9.X9IntegerConverter import org.bouncycastle.crypto.digests.SHA256Digest import org.bouncycastle.crypto.ec.CustomNamedCurves import org.bouncycastle.crypto.params.{ECDomainParameters, ECPrivateKeyParameters, ECPublicKeyParameters} import org.bouncycastle.crypto.signers.{ECDSASigner, HMacDSAKCalculator} import org.bouncycastle.math.ec.custom.sec.SecP256K1Curve import org.bouncycastle.math.ec.{ECAlgorithms, ECPoint, FixedPointUtil} object EllipticCurve { val CURVE_PARAMS = CustomNamedCurves.getByName("secp256k1") FixedPointUtil.precompute(CURVE_PARAMS.getG, 12) val CURVE = new ECDomainParameters(CURVE_PARAMS.getCurve, CURVE_PARAMS.getG, CURVE_PARAMS.getN, CURVE_PARAMS.getH) val HALF_CURVE_ORDER = CURVE_PARAMS.getN.shiftRight(1) val secureRandom = new SecureRandom() def sign(message: Seq[Byte], key: ECKey)(implicit digest: Digest[Sha256Hash]): ECSignature = { val hash = digest(message) sign(hash, key) } def sign(digest: Sha256Hash, key: ECKey): ECSignature = { val signer = new ECDSASigner(new HMacDSAKCalculator(new SHA256Digest())) val privateKeyParameters = new ECPrivateKeyParameters(key.priv.bigInteger, CURVE) signer.init(true, privateKeyParameters) val Array(r, sRaw) = signer.generateSignature(digest.bytes) val s = if (sRaw.compareTo(HALF_CURVE_ORDER) > 0) CURVE.getN.subtract(sRaw) else sRaw ECSignature(r, s) } def verify(digest: Sha256Hash, signature: ECSignature, pub: ECPub): Boolean = { assert(signature.r >= 1) assert(signature.r <= CURVE.getN) assert(signature.s >= 1) assert(signature.s <= CURVE.getN) val signer = new ECDSASigner() val point = pub.point.getEncoded(true) val params = new ECPublicKeyParameters(CURVE.getCurve.decodePoint(point), CURVE) signer.init(false, params) signer.verifySignature(digest.bytes, signature.r.bigInteger, signature.s.bigInteger) } def verify(message: Seq[Byte], signature: ECSignature, pub: ECPub)(implicit digest: Digest[Sha256Hash]): Boolean = { val hash = digest(message) verify(hash, signature, pub) } def recoverFromSignature(recId: Int, signature: ECSignature, digest: Sha256Hash)(implicit toBigInt: ToBigInt[Sha256Hash]): Option[ECPub] = { def decompressKey(xBN: BigInt, yBit: Boolean): ECPoint = { val x9 = new X9IntegerConverter() val compEnc = x9.integerToBytes(xBN.bigInteger, 1 + x9.getByteLength(CURVE.getCurve)) compEnc.update(0, if (yBit) 0x03 else 0x02) CURVE.getCurve.decodePoint(compEnc) } assert(recId >= 0 && recId <= 3) assert(signature.r.signum >= 0) assert(signature.s.signum >= 0) val n = CURVE.getN val i = BigInt(recId / 2) val x = signature.r + i * n val prime = SecP256K1Curve.q if (x >= prime) { None } else { val R = decompressKey(x, (recId & 1) == 1) if (!R.multiply(n).isInfinity) { None } else { val e = toBigInt(digest) val eInv = (0 - e).mod(n) val rInv = signature.r.modInverse(n) val srInv = (rInv * signature.s).mod(n) val eInvrInv = (rInv * eInv).mod(n) val q = ECAlgorithms.sumOfTwoMultiplies(CURVE.getG, eInvrInv.bigInteger, R, srInv.bigInteger) Some(ECPub(q)) } } } }
machinomy/bergae
src/main/scala/com/machinomy/bergae/crypto/EllipticCurve.scala
Scala
apache-2.0
3,362
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.expressions import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, EmptyBlock, ExprCode} import org.apache.spark.sql.catalyst.expressions.codegen.Block._ import org.apache.spark.sql.errors.QueryExecutionErrors import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ /** * Return the unscaled Long value of a Decimal, assuming it fits in a Long. * Note: this expression is internal and created only by the optimizer, * we don't need to do type check for it. */ case class UnscaledValue(child: Expression) extends UnaryExpression with NullIntolerant { override def dataType: DataType = LongType override def toString: String = s"UnscaledValue($child)" protected override def nullSafeEval(input: Any): Any = input.asInstanceOf[Decimal].toUnscaledLong override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { defineCodeGen(ctx, ev, c => s"$c.toUnscaledLong()") } override protected def withNewChildInternal(newChild: Expression): UnscaledValue = copy(child = newChild) } /** * Create a Decimal from an unscaled Long value. * Note: this expression is internal and created only by the optimizer, * we don't need to do type check for it. */ case class MakeDecimal( child: Expression, precision: Int, scale: Int, nullOnOverflow: Boolean) extends UnaryExpression with NullIntolerant { def this(child: Expression, precision: Int, scale: Int) = { this(child, precision, scale, !SQLConf.get.ansiEnabled) } override def dataType: DataType = DecimalType(precision, scale) override def nullable: Boolean = child.nullable || nullOnOverflow override def toString: String = s"MakeDecimal($child,$precision,$scale)" protected override def nullSafeEval(input: Any): Any = { val longInput = input.asInstanceOf[Long] val result = new Decimal() if (nullOnOverflow) { result.setOrNull(longInput, precision, scale) } else { result.set(longInput, precision, scale) } } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { nullSafeCodeGen(ctx, ev, eval => { val setMethod = if (nullOnOverflow) { "setOrNull" } else { "set" } val setNull = if (nullable) { s"${ev.isNull} = ${ev.value} == null;" } else { "" } s""" |${ev.value} = (new Decimal()).$setMethod($eval, $precision, $scale); |$setNull |""".stripMargin }) } override protected def withNewChildInternal(newChild: Expression): MakeDecimal = copy(child = newChild) } object MakeDecimal { def apply(child: Expression, precision: Int, scale: Int): MakeDecimal = { new MakeDecimal(child, precision, scale) } } /** * An expression used to wrap the children when promote the precision of DecimalType to avoid * promote multiple times. */ case class PromotePrecision(child: Expression) extends UnaryExpression { override def dataType: DataType = child.dataType override def eval(input: InternalRow): Any = child.eval(input) /** Just a simple pass-through for code generation. */ override def genCode(ctx: CodegenContext): ExprCode = child.genCode(ctx) override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = ev.copy(EmptyBlock) override def prettyName: String = "promote_precision" override def sql: String = child.sql override lazy val canonicalized: Expression = child.canonicalized override protected def withNewChildInternal(newChild: Expression): Expression = copy(child = newChild) } /** * Rounds the decimal to given scale and check whether the decimal can fit in provided precision * or not. If not, if `nullOnOverflow` is `true`, it returns `null`; otherwise an * `ArithmeticException` is thrown. */ case class CheckOverflow( child: Expression, dataType: DecimalType, nullOnOverflow: Boolean) extends UnaryExpression { override def nullable: Boolean = true override def nullSafeEval(input: Any): Any = input.asInstanceOf[Decimal].toPrecision( dataType.precision, dataType.scale, Decimal.ROUND_HALF_UP, nullOnOverflow) override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { nullSafeCodeGen(ctx, ev, eval => { s""" |${ev.value} = $eval.toPrecision( | ${dataType.precision}, ${dataType.scale}, Decimal.ROUND_HALF_UP(), $nullOnOverflow); |${ev.isNull} = ${ev.value} == null; """.stripMargin }) } override def toString: String = s"CheckOverflow($child, $dataType, $nullOnOverflow)" override def sql: String = child.sql override protected def withNewChildInternal(newChild: Expression): CheckOverflow = copy(child = newChild) } // A variant `CheckOverflow`, which treats null as overflow. This is necessary in `Sum`. case class CheckOverflowInSum( child: Expression, dataType: DecimalType, nullOnOverflow: Boolean) extends UnaryExpression { override def nullable: Boolean = true override def eval(input: InternalRow): Any = { val value = child.eval(input) if (value == null) { if (nullOnOverflow) null else throw QueryExecutionErrors.overflowInSumOfDecimalError } else { value.asInstanceOf[Decimal].toPrecision( dataType.precision, dataType.scale, Decimal.ROUND_HALF_UP, nullOnOverflow) } } override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val childGen = child.genCode(ctx) val nullHandling = if (nullOnOverflow) { "" } else { s"throw QueryExecutionErrors.overflowInSumOfDecimalError();" } val code = code""" |${childGen.code} |boolean ${ev.isNull} = ${childGen.isNull}; |Decimal ${ev.value} = null; |if (${childGen.isNull}) { | $nullHandling |} else { | ${ev.value} = ${childGen.value}.toPrecision( | ${dataType.precision}, ${dataType.scale}, Decimal.ROUND_HALF_UP(), $nullOnOverflow); | ${ev.isNull} = ${ev.value} == null; |} |""".stripMargin ev.copy(code = code) } override def toString: String = s"CheckOverflowInSum($child, $dataType, $nullOnOverflow)" override def sql: String = child.sql override protected def withNewChildInternal(newChild: Expression): CheckOverflowInSum = copy(child = newChild) }
maropu/spark
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalExpressions.scala
Scala
apache-2.0
7,289
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution import org.apache.spark.sql.ExperimentalMethods import org.apache.spark.sql.catalyst.catalog.SessionCatalog import org.apache.spark.sql.catalyst.optimizer.Optimizer import org.apache.spark.sql.execution.python.ExtractPythonUDFFromAggregate import org.apache.spark.sql.internal.SQLConf class SparkOptimizer( catalog: SessionCatalog, conf: SQLConf, experimentalMethods: ExperimentalMethods) extends Optimizer(catalog, conf) { override def batches: Seq[Batch] = super.batches :+ Batch("Extract Python UDF from Aggregate", Once, ExtractPythonUDFFromAggregate) :+ Batch("User Provided Optimizers", fixedPoint, experimentalMethods.extraOptimizations: _*) }
gioenn/xSpark
sql/core/src/main/scala/org/apache/spark/sql/execution/SparkOptimizer.scala
Scala
apache-2.0
1,519
package org.jetbrains.plugins.scala package scalai18n package codeInspection package i18n package internal import com.intellij.codeInspection.LocalInspectionTool import org.jetbrains.plugins.scala.codeInspection.{ScalaInspectionBundle, ScalaInspectionTestBase} class ScalaExtractStringToBundleInspectionTest extends ScalaInspectionTestBase{ override protected val classOfInspection: Class[_ <: LocalInspectionTool] = classOf[ScalaExtractStringToBundleInspection] override protected val description = ScalaInspectionBundle.message("internal.string.should.be.in.bundle") override protected def createTestText(text: String): String = s""" |object org { | object jetbrains { | object annotations { | class Nls extends scala.annotation.StaticAnnotation | | @Nls | class SpecificNls extends scala.annotation.StaticAnnotation | } | } |} |import org.jetbrains.annotations.Nls |import org.jetbrains.annotations.SpecificNls | |def toNls(@Nls arg: String): Unit = () | |$text |""".stripMargin def test_simple_string(): Unit = checkTextHasError(raw""" toNls($START"blub"$END) """) def test_interpolated_string(): Unit = checkTextHasError(raw"""val v = 3; toNls(${START}s"$$v blub"$END) """) def test_concated_string(): Unit = checkTextHasError(raw""" toNls($START"blub" + "abc"$END) """) def test_string_in_parenthesis(): Unit = checkTextHasError(raw""" toNls( ($START"blub"$END) ) """) def test_string_to_named_parameter(): Unit = checkTextHasError(raw""" toNls( arg = $START"blub"$END) """) def test_string_to_overriding_parameter(): Unit = checkTextHasError( raw""" |trait Base { def toNls(@Nls arg: String): Unit } |object Impl extends Base { override def toNls(arg: String): Unit = () } |Impl.toNls($START"blub"$END) |""".stripMargin) def test_string_to_overriding_named_parameter(): Unit = checkTextHasError( raw""" |trait Base { def toNls(@Nls arg: String): Unit } |object Impl extends Base { override def toNls(arg: String): Unit = () } |Impl.toNls(arg = $START"blub"$END) |""".stripMargin) def test_string_in_block(): Unit = checkTextHasError( raw""" |toNls( | { | "not to nls" | $START"blub"$END | } |) |""".stripMargin) def test_string_in_callblock(): Unit = checkTextHasError( raw""" |toNls { | "not to nls" | $START"blub"$END |} |""".stripMargin) def test_string_in_infix(): Unit = checkTextHasError( raw""" |object X { | def asInfix(@Nls str: String): Unit = () |} |X asInfix $START"blub"$END |""".stripMargin) def test_string_in_infix_2_params(): Unit = checkTextHasError( raw""" |object X { | def asInfix(@Nls str: String, int: Int): Unit = () |} |X asInfix ($START"blub"$END, 1) |""".stripMargin) def test_string_in_if(): Unit = checkTextHasError( raw""" |toNls(if ("".toBoolean) $START"in then"$END | else $START"in else"$END) |""".stripMargin ) def test_string_in_match(): Unit = checkTextHasError( raw""" |toNls("string" match { | case false => $START"in true"$END | case true => $START"in false"$END |}) |""".stripMargin) def test_string_in_typeExpr(): Unit = checkTextHasError( raw""" |toNls($START"blub"$END: String) |""".stripMargin) def test_in_def(): Unit = checkTextHasError( raw""" |@Nls |def test = $START"blub"$END |""".stripMargin) def test_in_annotated_val(): Unit = checkTextHasError( raw""" |@Nls |val test = $START"blub"$END |""".stripMargin) def test_in_overriding_val(): Unit = checkTextHasError( raw""" |trait Base { @Nls def test: String } |new Base { val test = $START"blub"$END } |""".stripMargin) def test_in_lazyval(): Unit = checkTextHasError( raw""" |@Nls |lazy val test = $START"blub"$END |""".stripMargin) def test_in_var(): Unit = checkTextHasError( raw""" |@Nls |var test = $START"blub"$END |""".stripMargin) def test_assign_to_var(): Unit = checkTextHasError( raw""" |@Nls |var test = null |test = $START"blub"$END |""".stripMargin) def test_assign_to_method(): Unit = checkTextHasError( raw""" |object X { | def test: String = "" | def test_=(@Nls arg: String): Unit = () |} | |X.test = $START"blub"$END |""".stripMargin) def test_case_class_with_nls(): Unit = checkTextHasError( s""" |case class Test(@Nls text: String) |Test(${START}"blub"$END) |""".stripMargin ) }
JetBrains/intellij-scala
scala/integration/properties/test/org/jetbrains/plugins/scala/scalai18n/codeInspection/i18n/internal/ScalaExtractStringToBundleInspectionTest.scala
Scala
apache-2.0
5,304
package toplev /* * This trait is for representations that can be generically * printed. */ trait GenericPrintable { def prettyPrint: String }
j-c-w/mlc
src/main/scala/GenericPrintable.scala
Scala
gpl-3.0
150
package org.opensplice.mobile.dev.dadds import org.omg.dds.core.event.DataAvailableEvent import org.omg.dds.core.event.LivelinessChangedEvent import org.omg.dds.core.event.RequestedDeadlineMissedEvent import org.omg.dds.core.event.RequestedIncompatibleQosEvent import org.omg.dds.core.event.SampleLostEvent import org.omg.dds.core.event.SampleRejectedEvent import org.omg.dds.core.event.SubscriptionMatchedEvent object prelude { import org.omg.dds.core.event._ import org.omg.dds.sub.DataReader implicit class ReaderListener[T]( val fun: PartialFunction[Any, Unit] ) extends org.omg.dds.sub.DataReaderListener[T] { def onRequestedDeadlineMissed( e: RequestedDeadlineMissedEvent[T] ) {} def onRequestedIncompatibleQos( e: RequestedIncompatibleQosEvent[T] ) {} def onSampleRejected( e: SampleRejectedEvent[T] ) {} def onLivelinessChanged( e: LivelinessChangedEvent[T] ) {} def onDataAvailable( e: DataAvailableEvent[T] ) { val evt = DataAvailable( e.getSource.asInstanceOf[DataReader[T]] ) if ( fun.isDefinedAt( evt ) ) fun( evt ) } def onSubscriptionMatched( e: SubscriptionMatchedEvent[T] ) { //println("onSubscriptionMatched: " + e.getSource().toString()) val evt = SubscriptionMatched( e.getSource.asInstanceOf[DataReader[T]] ) if ( fun.isDefinedAt( evt ) ) fun( evt ) } def onSampleLost( e: SampleLostEvent[T] ) {} } }
levitha/levitha
src/main/scala/org/opensplice/mobile/dev/dadds/prelude.scala
Scala
apache-2.0
1,411
import main.scala.Global import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileUtil, Path, FileSystem} object FsUtils { def copyFromLocal (source: String, dest: String) = { val conf = new Configuration() conf.set("fs.default.name", "hdfs://" + Global.host + ":" + Global.fsPort) conf.set("mapred.job.tracker", Global.host + ":" + Global.jtPort) //disable caching to avoid java.io.IOException: Filesystem closed conf.setBoolean("fs.hdfs.impl.disable.cache", true); val fileSystem = FileSystem.get(conf) val srcPath = new Path(source) val dstPath = new Path(dest) // Check if the file already exists if (fileSystem.exists(dstPath)) { println(dstPath + "exists!") // console log delFromHdfs(source) //return } // Get the filename out of the file path val filename = source.substring(source.lastIndexOf('/') + 1, source.length()) fileSystem.copyFromLocalFile(srcPath, dstPath) println("File " + filename + " (local) " + "copied to " + dest + " (hdfs)") fileSystem.close() } def copyFromHdfs (source: String, dest: String) = { val conf = new Configuration() conf.set("fs.default.name", "hdfs://" + Global.host + ":" + Global.fsPort) conf.set("mapred.job.tracker", Global.host + ":" + Global.jtPort) val fileSystem = FileSystem.get(conf) val srcPath = new Path(source) val dstPath = new Path(dest) // Check if the file already exists if (fileSystem.exists(dstPath)) { println(dstPath + " exists!") // return } // Get the filename out of the file path val filename = source.substring(source.lastIndexOf('/') + 1, source.length()) fileSystem.copyToLocalFile(srcPath, dstPath) println("File " + filename + " (hdfs) " + "copied to " + dest + " (local)") fileSystem.close() } def mergeOnHdfs(source: String, dest: String) = { val conf = new Configuration() conf.set("fs.default.name", "hdfs://" + Global.host + ":" + Global.fsPort) conf.set("mapred.job.tracker", Global.host + ":" + Global.jtPort) val fileSystem = FileSystem.get(conf) val srcPath = new Path(source) val dstPath = new Path(dest) // Check if the file already exists if (fileSystem.exists(dstPath)) { println(dstPath + " exists!") // return } // Get the filename out of the file path val filename = source.substring(source.lastIndexOf('/') + 1, source.length()) FileUtil.copyMerge(fileSystem, srcPath, fileSystem, dstPath, false, conf, null) println("File " + filename + " (hdfs) " + "copied to " + dest + " (hdfs)") fileSystem.close() } def delFromHdfs(path: String) = { val conf = new Configuration() conf.set("fs.default.name", "hdfs://" + Global.host + ":" + Global.fsPort) conf.set("mapred.job.tracker", Global.host + ":" + Global.jtPort) val fileSystem = FileSystem.get(conf) fileSystem.delete(new Path(path), true) println("File " + path + " (hdfs) " + "deleted") fileSystem.close() } }
pomadchin/hadoop-dg-decomp
src/main/scala/scalding/FsUtils.scala
Scala
apache-2.0
3,058
object Test { class A class B class C class F[X] def f(implicit aa: F[A]) = println(aa) implicit def a : F[A] = new F[A]() // generalised from t2421b to verify we check enough class G[X] implicit def g[X] = new G[X]() implicit def b[X <: B](implicit mx: G[X]) = new F[X]() f }
scala/scala
test/files/pos/t2421c.scala
Scala
apache-2.0
303
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.plan.rules.logical import org.apache.flink.api.scala._ import org.apache.flink.table.api._ import org.apache.flink.table.planner.plan.nodes.FlinkConventions import org.apache.flink.table.planner.plan.nodes.logical.{FlinkLogicalCalc, FlinkLogicalLegacyTableSourceScan} import org.apache.flink.table.planner.plan.optimize.program._ import org.apache.flink.table.planner.plan.rules.FlinkBatchRuleSets import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedScalarFunctions.NonDeterministicUdf import org.apache.flink.table.planner.utils.TableTestBase import org.apache.calcite.plan.hep.HepMatchOrder import org.apache.calcite.rel.rules._ import org.apache.calcite.tools.RuleSets import org.junit.{Before, Test} /** * Test for [[FlinkCalcMergeRule]]. */ class FlinkCalcMergeRuleTest extends TableTestBase { private val util = batchTestUtil() @Before def setup(): Unit = { val programs = new FlinkChainedProgram[BatchOptimizeContext]() programs.addLast( "table_ref", FlinkHepRuleSetProgramBuilder.newBuilder .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE) .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) .add(FlinkBatchRuleSets.TABLE_REF_RULES) .build()) programs.addLast( "logical", FlinkVolcanoProgramBuilder.newBuilder .add(RuleSets.ofList( CoreRules.FILTER_TO_CALC, CoreRules.PROJECT_TO_CALC, FlinkCalcMergeRule.INSTANCE, FlinkLogicalCalc.CONVERTER, FlinkLogicalLegacyTableSourceScan.CONVERTER )) .setRequiredOutputTraits(Array(FlinkConventions.LOGICAL)) .build()) util.replaceBatchProgram(programs) util.addTableSource[(Int, Int, String)]("MyTable", 'a, 'b, 'c) util.addFunction("random_udf", new NonDeterministicUdf) } @Test def testCalcMergeWithSameDigest(): Unit = { util.verifyRelPlan("SELECT a, b FROM (SELECT * FROM MyTable WHERE a = b) t WHERE b = a") } @Test def testCalcMergeWithNonDeterministicExpr1(): Unit = { val sqlQuery = "SELECT a, a1 FROM (SELECT a, random_udf(a) AS a1 FROM MyTable) t WHERE a1 > 10" util.verifyRelPlan(sqlQuery) } @Test def testCalcMergeWithNonDeterministicExpr2(): Unit = { val sqlQuery = "SELECT a FROM (SELECT a FROM MyTable) t WHERE random_udf(a) > 10" util.verifyRelPlan(sqlQuery) } @Test def testCalcMergeWithNestedNonDeterministicExpr(): Unit = { val sqlQuery = "SELECT random_udf(a1) as a2 FROM (SELECT random_udf(a) as" + " a1, b FROM MyTable) t WHERE b > 10" util.verifyRelPlan(sqlQuery) } @Test def testCalcMergeWithTopMultiNonDeterministicExpr(): Unit = { val sqlQuery = "SELECT random_udf(a1) as a2, random_udf(a1) as a3 FROM" + " (SELECT random_udf(a) as a1, b FROM MyTable) t WHERE b > 10" util.verifyRelPlan(sqlQuery) } @Test def testCalcMergeTopFilterHasNonDeterministicExpr(): Unit = { val sqlQuery = "SELECT a, c FROM" + " (SELECT a, random_udf(b) as b1, c FROM MyTable) t WHERE b1 > 10" util.verifyRelPlan(sqlQuery) } @Test def testCalcMergeWithBottomMultiNonDeterministicExpr(): Unit = { val sqlQuery = "SELECT a1, b2 FROM" + " (SELECT random_udf(a) as a1, random_udf(b) as b2, c FROM MyTable) t WHERE c > 10" util.verifyRelPlan(sqlQuery) } @Test def testCalcMergeWithBottomMultiNonDeterministicInConditionExpr(): Unit = { val sqlQuery = "SELECT c FROM" + " (SELECT random_udf(a) as a1, random_udf(b) as b2, c FROM MyTable) t WHERE a1 > b2" util.verifyRelPlan(sqlQuery) } @Test def testCalcMergeWithoutInnerNonDeterministicExpr(): Unit = { val sqlQuery = "SELECT a, c FROM (SELECT a, random_udf(a) as a1, c FROM MyTable) t WHERE c > 10" util.verifyRelPlan(sqlQuery) } }
tillrohrmann/flink
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/plan/rules/logical/FlinkCalcMergeRuleTest.scala
Scala
apache-2.0
4,635
/* ************************************************************************************* * Copyright 2011 Normation SAS ************************************************************************************* * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * In accordance with the terms of section 7 (7. Additional Terms.) of * the GNU Affero GPL v3, the copyright holders add the following * Additional permissions: * Notwithstanding to the terms of section 5 (5. Conveying Modified Source * Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3 * licence, when you create a Related Module, this Related Module is * not considered as a part of the work and may be distributed under the * license agreement of your choice. * A "Related Module" means a set of sources files including their * documentation that, without modification of the Source Code, enables * supplementary functions or services in addition to those offered by * the Software. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>. * ************************************************************************************* */ package com.normation.rudder.web.comet import scala.xml._ import net.liftweb.common._ import net.liftweb.util._ import Helpers._ import net.liftweb.http._ import js._ import JE._ import JsCmds._ import com.normation.rudder.batch._ import org.joda.time.Duration import org.joda.time.format.PeriodFormatterBuilder import com.normation.rudder.web.components.DateFormaterService import com.normation.rudder.web.model.CurrentUser import com.normation.rudder.services.eventlog.EventLogDeploymentService import com.normation.eventlog.EventLog import com.normation.rudder.domain.eventlog.ModificationWatchList import com.normation.eventlog.UnspecializedEventLog import com.normation.rudder.domain.eventlog.RudderEventActor import org.joda.time.DateTime import net.liftweb.common.EmptyBox import com.normation.rudder.web.services.EventListDisplayer import com.normation.eventlog.EventLogDetails import com.normation.utils.StringUuidGenerator import com.normation.eventlog.ModificationId import bootstrap.liftweb.RudderConfig class AsyncDeployment extends CometActor with CometListener with Loggable { private[this] val periodFormatter = new PeriodFormatterBuilder(). appendDays(). appendSuffix(" day ", " days "). appendSeparator(", "). appendMinutes(). appendSuffix(" min ", " min "). appendSeparator(" "). appendSeconds(). appendSuffix("s", "s") .toFormatter() private[this] def formatPeriod(duration:Duration) : String = { if(duration.getMillis < 1000) "less than 1s" else periodFormatter.print(duration.toPeriod) } private[this] val asyncDeploymentAgent = RudderConfig.asyncDeploymentAgent private[this] val eventLogDeploymentService = RudderConfig.eventLogDeploymentService private[this] val eventList = RudderConfig.eventListDisplayer private[this] val uuidGen = RudderConfig.stringUuidGenerator //current states of the deployment private[this] var deploymentStatus = DeploymentStatus(NoStatus, IdleDeployer) // the last deployment data private[this] var lastSuccessfulDeployement : Box[EventLog] = eventLogDeploymentService.getLastSuccessfulDeployement() private[this] var lastEventSinceDeployment : Box[Seq[EventLog]] = Empty override def registerWith = asyncDeploymentAgent override def lowPriority = { case d:DeploymentStatus => deploymentStatus = d ; reRender() } override def render = { new RenderOut(( ClearClearable & "#deploymentLastStatus *" #> lastStatus & "#deploymentProcessing *" #> currentStatus )(layout) , JsRaw("""$("button.deploymentButton").button(); """)) } val deployementErrorMessage = """(.*)!errormessage!(.*)""".r private[this] def lastStatus : NodeSeq = { deploymentStatus.current match { case NoStatus => <span>Rules application status unavailable</span> case SuccessStatus(id,start,end,configurationNodes) => <span class="deploymentSuccess"> <img src="/images/icOK.png" alt="Error" height="16" width="16" class="iconscala" /> Success: Rules applied at {DateFormaterService.getFormatedDate(start)} (took {formatPeriod(new Duration(start,end))}) </span> case ErrorStatus(id,start,end,failure) => {<span class="error deploymentError"><img src="/images/icfail.png" alt="Error" height="16" width="16" class="iconscala" /> Error: Rules not applied at {DateFormaterService.getFormatedDate(start)} <br/>(took {formatPeriod(new Duration(start,end))} - <span class="errorscala" id="errorDetailsLink" onClick={ """$('#errorDetailsDialog').modal({ minHeight:140, minWidth: 300 }); $('#simplemodal-container').css('height', 'auto'); correctButtons(); return false;"""}>details</span>) </span>} ++ { ("#errorDetailsMessage" #> { failure.messageChain match { case deployementErrorMessage(chain, error) => <span>{chain.split("<-").map(x => Text("⇨ " + x) ++ {<br/>})}</span> <br/> <div class="curspoint listopen" onClick="$('#deploymentErrorMsg').toggle();$('#simplemodal-container').css('width', '80%');$('#simplemodal-container').resize();$(this).toggleClass('listopen');$(this).toggleClass('listclose');"><b>Show technical details</b></div> <br/> <fieldset id="deploymentErrorMsg" style="display:none;"><legend><b>Technical details</b></legend> <span>{error.split("<-").map(x => Text("⇨ " + x) ++ {<br/>})}<br/></span> </fieldset> case _ => <span>{failure.messageChain.split("<-").map(x => Text("⇨ " + x) ++ {<br/>})}</span> } }).apply(errorPopup) } } } private[this] def currentStatus : NodeSeq = { deploymentStatus.processing match { case IdleDeployer => <lift:authz role="deployment_write"> { SHtml.ajaxButton("Regenerate now", { () => asyncDeploymentAgent ! ManualStartDeployment(ModificationId(uuidGen.newUuid), CurrentUser.getActor, "User requested a manual regeneration") //TODO: let the user fill the cause Noop }, ( "class" , "deploymentButton")) } </lift:authz> case Processing(id, start) => <span> <img src="/images/deploying.gif" alt="Deploying..." height="16" width="16" class="iconscala" /> Generating Rules (started at {DateFormaterService.getFormatedDate(start)}) </span> case ProcessingAndPendingAuto(asked, Processing(id, start), actor, logId) => <span> <img src="/images/deploying.gif" alt="Deploying..." height="16" width="16" class="iconscala" /> Generating Rules (started at {DateFormaterService.getFormatedDate(start)}). Another generation is pending since {DateFormaterService.getFormatedDate(asked)} </span> case ProcessingAndPendingManual(asked, Processing(id, start), actor, logId, cause) => <span> <img src="/images/deploying.gif" alt="Deploying..." height="16" width="16" class="iconscala" /> Generating Rules (started at {DateFormaterService.getFormatedDate(start)}). Another generation is pending since {DateFormaterService.getFormatedDate(asked)} </span> } } private[this] def layout = { <div id="deploymentStatus"> <div style="font-size: 14px; font-weight: bold; margin-bottom:7px;">Deployment status</div> <lift:ignore> Here come the status of the last finised deployment. Status can be: no previous deployment, correctly deployed, warning, error. </lift:ignore> <div id="deploymentLastStatus"> [Here comes the status of the last finished deployement] </div> <lift:ignore> Here comes an indication of the current deployement. May be : not deploying (a button is shown to start a deployment), deploying (give an idea of the time remaining ?), deploying + one pending </lift:ignore> <div id="deploymentProcessing"> [Here comes the current deployment processing] </div> </div> } private[this] def errorPopup = { <div id="errorDetailsDialog" class="nodisplay"> <div class="simplemodal-title"> <h1>Error</h1> <hr/> </div> <div class="simplemodal-content"> <br /> <div> <img src="/images/icfail.png" alt="Error" height="24" width="24" class="erroricon" /> <h2>Deployment process was stopped due to an error:</h2> </div> <hr class="spacer" /> <br /> <p id="errorDetailsMessage">[Here come the error message]</p> <hr class="spacer" /> </div> <div class="simplemodal-bottom"> <hr/> <div class="popupButton"> <span> <button class="simplemodal-close" onClick="return false;"> Close </button> </span> </div> </div> </div> } }
jooooooon/rudder
rudder-web/src/main/scala/com/normation/rudder/web/comet/AsyncDeployment.scala
Scala
agpl-3.0
9,661
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.visor.commands.ping import org.apache.ignite.visor.{VisorRuntimeBaseSpec, visor} import org.apache.ignite.visor.commands.ping.VisorPingCommand._ /** * Unit test for 'ping' command. */ class VisorPingCommandSpec extends VisorRuntimeBaseSpec(2) { describe("A 'ping' visor command") { it("should properly execute") { visor.ping() } it("should print error message when not connected") { closeVisorQuiet() visor.ping() } } }
pperalta/ignite
modules/visor-console/src/test/scala/org/apache/ignite/visor/commands/ping/VisorPingCommandSpec.scala
Scala
apache-2.0
1,332
package com.arcusys.valamis.content.storage.impl import com.arcusys.valamis.content.model.PlainText import com.arcusys.valamis.persistence.common.{OptionFilterSupport3, SlickProfile} import com.arcusys.valamis.content.storage.PlainTextStorage import com.arcusys.valamis.content.storage.impl.schema.ContentTableComponent import slick.driver.JdbcProfile import slick.jdbc.JdbcBackend /** * * Created by pkornilov on 23.10.15. */ class PlainTextStorageImpl(val db: JdbcBackend#DatabaseDef, val driver: JdbcProfile) extends PlainTextStorage with ContentTableComponent with OptionFilterSupport3 with SlickProfile { import driver.api._ override def getById(id: Long) = plainTexts.filter(_.id === id).result.headOption override def update(plainText: PlainText) = plainTexts.filter(_.id === plainText.id).map(_.update).update(plainText) override def delete(id: Long) = plainTexts.filter(_.id === id).delete override def create(plainText: PlainText) = { (plainTexts returning plainTexts.map(_.id)).into { (row, gId) => row.copy(id = Some(gId)) } += plainText } override def getByCourse(courseId: Long) = plainTexts.filter(_.courseId === courseId).result override def getByCategory(categoryId: Long) = plainTexts.filter(pt => pt.categoryId === categoryId).result override def getByCategory(categoryId: Option[Long], courseId: Long) = plainTexts.filter(pt => optionFilter(pt.categoryId, categoryId) && pt.courseId === courseId).result override def getCountByCourse(courseId: Long) = plainTexts.filter(_.courseId === courseId).length.result override def getCountByCategory(categoryId: Option[Long], courseId: Long) = plainTexts.filter(pt => optionFilter(pt.categoryId, categoryId) && pt.courseId === courseId).length.result override def moveToCategory(id: Long, newCategoryId: Option[Long], courseId: Long) = { val query = for {q <- plainTexts if q.id === id} yield (q.categoryId, q.courseId) query.update(newCategoryId, courseId) } override def moveToCourse(id: Long, courseId: Long, moveToRoot: Boolean) = { if (moveToRoot) { val query = for {q <- plainTexts if q.id === id} yield (q.courseId, q.categoryId) query.update(courseId, None) } else { val query = for {q <- plainTexts if q.id === id} yield q.courseId query.update(courseId) } } }
arcusys/Valamis
valamis-questionbank/src/main/scala/com/arcusys/valamis/content/storage/impl/PlainTextStorageImpl.scala
Scala
gpl-3.0
2,413
package dsmoq.maintenance.services import java.util.UUID import java.nio.file.Paths import scala.util.Failure import scala.util.Success import scala.util.Try import org.joda.time.DateTime import org.slf4j.MarkerFactory import com.typesafe.scalalogging.LazyLogging import dsmoq.maintenance.AppConfig import dsmoq.maintenance.data.SearchResult import dsmoq.maintenance.data.dataset.SearchCondition import dsmoq.maintenance.data.dataset.SearchCondition.DatasetType import dsmoq.maintenance.data.dataset.AccessLevel import dsmoq.maintenance.data.dataset.AclAddData import dsmoq.maintenance.data.dataset.AclListData import dsmoq.maintenance.data.dataset.AclUpdateData import dsmoq.maintenance.data.dataset.OwnerType import dsmoq.maintenance.data.dataset.SearchAclsParameter import dsmoq.maintenance.data.dataset.SearchAclGroupParameter import dsmoq.maintenance.data.dataset.SearchAclUserParameter import dsmoq.maintenance.data.dataset.SearchResultDataset import dsmoq.maintenance.data.dataset.SearchResultOwnership import dsmoq.maintenance.data.dataset.UpdateParameter import dsmoq.maintenance.data.dataset.AddAclGroupParameter import dsmoq.maintenance.data.dataset.AddAclUserParameter import dsmoq.maintenance.data.dataset.UpdateAclGroupParameter import dsmoq.maintenance.data.dataset.UpdateAclUserParameter import dsmoq.persistence import dsmoq.persistence.GroupType import dsmoq.persistence.UserAccessLevel import dsmoq.persistence.PostgresqlHelper.PgConditionSQLBuilder import dsmoq.persistence.PostgresqlHelper.PgSQLSyntaxType import org.scalatra.util.MultiMap import scalikejdbc.ConditionSQLBuilder import scalikejdbc.DB import scalikejdbc.DBSession import scalikejdbc.SelectSQLBuilder import scalikejdbc.SQLSyntax import scalikejdbc.convertJavaSqlTimestampToConverter import scalikejdbc.interpolation.Implicits.scalikejdbcSQLInterpolationImplicitDef import scalikejdbc.interpolation.Implicits.scalikejdbcSQLSyntaxToStringImplicitDef import scalikejdbc.select import scalikejdbc.sqls import scalikejdbc.update import scalikejdbc.delete import scalikejdbc.withSQL /** * データセット処理サービス */ object DatasetService extends LazyLogging { /** * ログマーカー */ val LOG_MARKER = MarkerFactory.getMarker("MAINTENANCE_DATASET_LOG") /** * サービス名 */ val SERVICE_NAME = "DatasetService" /** * ユーザーのアクセス権として有効なアクセスレベル */ val validUserAccessLevel = Set[AccessLevel](AccessLevel.LimitedRead, AccessLevel.FullRead, AccessLevel.Owner) /** * グループのアクセス権として有効なアクセスレベル */ val validGroupAccessLevel = Set[AccessLevel](AccessLevel.LimitedRead, AccessLevel.FullRead, AccessLevel.Provider) /** * データセットを検索する。 * * @param condition 検索条件 * @return 検索結果 */ def search(condition: SearchCondition): SearchResult[SearchResultDataset] = { logger.info(LOG_MARKER, Util.formatLogMessage(SERVICE_NAME, "search", condition)) val d = persistence.Dataset.d val o = persistence.Ownership.o val g = persistence.Group.g val m = persistence.Member.m val u = persistence.User.u def createSqlBase(topSelect: SelectSQLBuilder[Unit]): ConditionSQLBuilder[Unit] = { topSelect .from(persistence.Dataset as d) .where( sqls.toAndConditionOpt( condition.datasetType match { case DatasetType.NotDeleted => Some(sqls.isNull(d.deletedBy).and.isNull(d.deletedAt)) case DatasetType.Deleted => Some(sqls.isNotNull(d.deletedBy).and.isNotNull(d.deletedAt)) case _ => None }, if (condition.ownerId.isEmpty) { None } else { Some( sqls.exists( select .from(persistence.Ownership as o) .innerJoin(persistence.Group as g).on(g.id, o.groupId) .innerJoin(persistence.Member as m).on(m.groupId, g.id) .innerJoin(persistence.User as u).on(u.id, m.userId) .where .eq(o.datasetId, d.id) .and .eq(o.accessLevel, UserAccessLevel.Owner) .and .eq(g.groupType, GroupType.Personal) .and .eq(u.disabled, false) .and .upperLikeQuery(u.name, condition.ownerId) .toSQLSyntax ) ) }, if (condition.datasetName.isEmpty) { None } else { Some(upperLikeQuery(d.name, condition.datasetName)) } ) ) } val limit = AppConfig.searchLimit val offset = (condition.page - 1) * limit DB.readOnly { implicit s => val total = withSQL { createSqlBase(select(sqls.count)) }.map(_.int(1)).single.apply().getOrElse(0) val records = withSQL { createSqlBase(select(d.result.*)) .orderBy(d.createdAt, d.id) .offset(offset) .limit(limit) }.map { rs => val dataset = persistence.Dataset(d.resultName)(rs) val ownerships = searchDatasetOwnerships(dataset.id) SearchResultDataset( id = dataset.id, name = dataset.name, description = dataset.description, owners = ownerships.collect { case SearchResultOwnership(_, _, name, AccessLevel.Owner) => name }, numOfFiles = dataset.filesCount, createdAt = dataset.createdAt, updatedAt = dataset.updatedAt, deletedAt = dataset.deletedAt ) }.list.apply() SearchResult( offset, limit, total, records ) } } /** * データセットアクセス権追加画面を表示するための情報を取得する。 * * @param param 入力パラメータ * @return 取得結果 * Failure(ServiceException) データセットIDが未指定の場合 * Failure(ServiceException) 存在しないIDの場合 */ def getAclAddData(param: SearchAclsParameter): Try[AclAddData] = { logger.info(LOG_MARKER, Util.formatLogMessage(SERVICE_NAME, "getAclAddData", param)) val result = DB.readOnly { implicit s => for { id <- Util.require(param.datasetId, "データセットID") dataset <- searchDatasetById(id) } yield { AclAddData( datasetId = dataset.id, datasetName = dataset.name ) } } Util.withErrorLogging(logger, LOG_MARKER, result) } /** * データセットアクセス権一覧画面を表示するための情報を取得する。 * * @param param 入力パラメータ * @return 取得結果 * Failure(ServiceException) データセットIDが未指定の場合 * Failure(ServiceException) 存在しないIDの場合 */ def getAclListData(param: SearchAclsParameter): Try[AclListData] = { logger.info(LOG_MARKER, Util.formatLogMessage(SERVICE_NAME, "getAclListData", param)) val result = DB.readOnly { implicit s => for { id <- Util.require(param.datasetId, "データセットID") dataset <- searchDatasetById(id) } yield { val ownerships = searchDatasetOwnerships(id) AclListData( datasetId = dataset.id, datasetName = dataset.name, ownerships = ownerships ) } } Util.withErrorLogging(logger, LOG_MARKER, result) } /** * データセットアクセス権更新(ユーザー)画面を表示するための情報を取得する。 * * @param param 入力パラメータ * @return 取得結果 * Failure(ServiceException) データセットID、ユーザーIDが未指定の場合 * Failure(ServiceException) データセット、ユーザーが存在しない場合 * Failure(ServiceException) 指定したユーザーが無効化されている場合 */ def getAclUpdateDataForUser(param: SearchAclUserParameter): Try[AclUpdateData] = { logger.info(LOG_MARKER, Util.formatLogMessage(SERVICE_NAME, "getAclUpdateDataForUser", param)) val result = DB.readOnly { implicit s => for { datasetId <- Util.require(param.datasetId, "データセットID") userId <- Util.require(param.userId, "ユーザーID") dataset <- searchDatasetById(datasetId) user <- searchUserById(userId) ownership <- searchOwnershipForUser(datasetId, userId) } yield { AclUpdateData( datasetId = dataset.id, datasetName = dataset.name, ownership = ownership ) } } Util.withErrorLogging(logger, LOG_MARKER, result) } /** * IDからユーザーを取得する。 * * @param userId ユーザーID * @param s DBセッション * @return 取得結果 * Failure(ServiceException) 存在しないIDの場合 * Failure(ServiceException) 指定したユーザーが無効化されている場合 */ def searchUserById(userId: String)(implicit s: DBSession): Try[persistence.User] = { val u = persistence.User.u val result = withSQL { select(u.result.*) .from(persistence.User as u) .where .eq(u.id, sqls.uuid(userId)) }.map(persistence.User(u.resultName)).single.apply() result match { case Some(user) if user.disabled => Failure(new ServiceException("無効なユーザーが指定されました。")) case Some(user) => Success(user) case None => Failure(new ServiceException("存在しないユーザーが指定されました。")) } } /** * 対象のユーザーに対するアクセス権を取得する。 * * @param datasetId データセットID * @param userId ユーザーID * @param s DBセッション * @return 取得結果 * Failure(ServiceException) アクセス権が取得できなかった場合 */ def searchOwnershipForUser(datasetId: String, userId: String)(implicit s: DBSession): Try[SearchResultOwnership] = { val o = persistence.Ownership.o val g = persistence.Group.g val m = persistence.Member.m val u = persistence.User.u val result = withSQL { select(o.result.accessLevel, u.result.*) .from(persistence.Ownership as o) .innerJoin(persistence.Group as g).on(g.id, o.groupId) .innerJoin(persistence.Member as m).on(m.groupId, g.id) .innerJoin(persistence.User as u).on(u.id, m.userId) .where( sqls.toAndConditionOpt( Some(sqls.eq(o.datasetId, sqls.uuid(datasetId))), Some(sqls.ne(o.accessLevel, persistence.UserAccessLevel.Deny)), Some(sqls.isNull(g.deletedBy)), Some(sqls.isNull(g.deletedAt)), Some(sqls.eq(g.groupType, persistence.GroupType.Personal)), Some(sqls.eq(u.id, sqls.uuid(userId))), Some(sqls.eq(u.disabled, false)) ) ) }.map { rs => val accessLevel = rs.int(o.resultName.accessLevel) val userId = rs.string(u.resultName.id) val userName = rs.string(u.resultName.name) val ownerType = OwnerType(persistence.OwnerType.User) SearchResultOwnership( id = userId, ownerType = ownerType, name = userName, accessLevel = AccessLevel(ownerType, accessLevel) ) }.single.apply() result match { case Some(r) => Success(r) case None => Failure(new ServiceException("アクセス権が未設定です。")) } } /** * データセットアクセス権更新(グループ)画面を表示するための情報を取得する。 * * @param param 入力パラメータ * @return 取得結果 * Failure(ServiceException) データセットID、グループIDが未指定の場合 * Failure(ServiceException) データセット、グループが存在しない場合 * Failure(ServiceException) 指定したグループが削除されている場合 */ def getAclUpdateDataForGroup(param: SearchAclGroupParameter): Try[AclUpdateData] = { logger.info(LOG_MARKER, Util.formatLogMessage(SERVICE_NAME, "getAclUpdateDataForGroup", param)) val result = DB.readOnly { implicit s => for { datasetId <- Util.require(param.datasetId, "データセットID") groupId <- Util.require(param.groupId, "グループID") dataset <- searchDatasetById(datasetId) group <- searchGroupById(groupId) ownership <- searchOwnershipForGroup(datasetId, groupId) } yield { AclUpdateData( datasetId = dataset.id, datasetName = dataset.name, ownership = ownership ) } } Util.withErrorLogging(logger, LOG_MARKER, result) } /** * IDからグループを取得する。 * * @param groupId グループID * @param s DBセッション * @return 取得結果 * Failure(ServiceException) 存在しないIDの場合 * Failure(ServiceException) 指定したグループが論理削除されている場合 */ def searchGroupById(groupId: String)(implicit s: DBSession): Try[persistence.Group] = { val g = persistence.Group.g val result = withSQL { select(g.result.*) .from(persistence.Group as g) .where .eq(g.id, sqls.uuid(groupId)) }.map(persistence.Group(g.resultName)).single.apply() result match { case Some(group) if group.deletedAt.isDefined => Failure(new ServiceException("削除されたグループが指定されました。")) case Some(group) => Success(group) case None => Failure(new ServiceException("存在しないグループが指定されました。")) } } /** * 対象のグループに対するアクセス権を取得する。 * * @param datasetId データセットID * @param groupId グループID * @param s DBセッション * @return 取得結果 * Failure(ServiceException) アクセス権が取得できなかった場合 */ def searchOwnershipForGroup(datasetId: String, groupId: String)(implicit s: DBSession): Try[SearchResultOwnership] = { val o = persistence.Ownership.o val g = persistence.Group.g val result = withSQL { select(o.result.accessLevel, g.result.*) .from(persistence.Ownership as o) .innerJoin(persistence.Group as g).on(g.id, o.groupId) .where( sqls.toAndConditionOpt( Some(sqls.eq(o.datasetId, sqls.uuid(datasetId))), Some(sqls.ne(o.accessLevel, persistence.GroupAccessLevel.Deny)), Some(sqls.isNull(g.deletedBy)), Some(sqls.isNull(g.deletedAt)), Some(sqls.eq(g.groupType, persistence.GroupType.Public)) ) ) }.map { rs => val accessLevel = rs.int(o.resultName.accessLevel) val groupId = rs.string(g.resultName.id) val groupName = rs.string(g.resultName.name) val ownerType = OwnerType(persistence.OwnerType.Group) SearchResultOwnership( id = groupId, ownerType = ownerType, name = groupName, accessLevel = AccessLevel(ownerType, accessLevel) ) }.single.apply() result match { case Some(r) => Success(r) case None => Failure(new ServiceException("アクセス権が未設定です。")) } } /** * データセットを取得する。 * * @param datasetId データセットID * @param s DBセッション * @return 取得結果 * Failure(ServiceException) 存在しないIDの場合 */ def searchDatasetById(datasetId: String)(implicit s: DBSession): Try[persistence.Dataset] = { val d = persistence.Dataset.d val dataset = withSQL { select .from(persistence.Dataset as d) .where .eq(d.id, sqls.uuid(datasetId)) }.map(persistence.Dataset(d)).single.apply dataset match { case Some(d) => Success(d) case None => Failure(new ServiceException("存在しないデータセットが指定されました。")) } } /** * データセットを検索する。 * * @param condition 検索条件 * @return 検索結果 */ def upperLikeQuery(column: SQLSyntax, value: String): SQLSyntax = { sqls.upperLikeQuery(column, value) } /** * データセットの持つアクセス権を取得する。 * * @param datasetId データセットID * @param s DBセッション * @return 取得結果 */ def searchDatasetOwnerships(datasetId: String)(implicit s: DBSession): Seq[SearchResultOwnership] = { val o = persistence.Ownership.o val g = persistence.Group.g val m = persistence.Member.m val u = persistence.User.u val publics = withSQL { select(o.result.*, g.result.*) .from(persistence.Ownership as o) .innerJoin(persistence.Group as g).on(g.id, o.groupId) .where( sqls.toAndConditionOpt( Some(sqls.eq(o.datasetId, sqls.uuid(datasetId))), Some(sqls.ne(o.accessLevel, persistence.GroupAccessLevel.Deny)), Some(sqls.eq(g.groupType, persistence.GroupType.Public)), Some(sqls.isNull(g.deletedBy)), Some(sqls.isNull(g.deletedAt)) ) ) }.map { rs => val accessLevel = rs.int(o.resultName.accessLevel) val createdAt = rs.jodaDateTime(o.resultName.createdAt) val groupId = rs.string(g.resultName.id) val groupName = rs.string(g.resultName.name) val ownerType = OwnerType(persistence.OwnerType.Group) val result = SearchResultOwnership( id = groupId, ownerType = ownerType, name = groupName, accessLevel = AccessLevel(ownerType, accessLevel) ) (result, createdAt) }.list.apply() val personals = withSQL { select(o.result.*, u.result.*) .from(persistence.Ownership as o) .innerJoin(persistence.Group as g).on(g.id, o.groupId) .innerJoin(persistence.Member as m).on(m.groupId, g.id) .innerJoin(persistence.User as u).on(u.id, m.userId) .where( sqls.toAndConditionOpt( Some(sqls.eq(o.datasetId, sqls.uuid(datasetId))), Some(sqls.ne(o.accessLevel, persistence.UserAccessLevel.Deny)), Some(sqls.eq(g.groupType, persistence.GroupType.Personal)), Some(sqls.isNull(g.deletedBy)), Some(sqls.isNull(g.deletedAt)), Some(sqls.eq(u.disabled, false)) ) ) }.map { rs => val accessLevel = rs.int(o.resultName.accessLevel) val createdAt = rs.jodaDateTime(o.resultName.createdAt) val userId = rs.string(u.resultName.id) val userName = rs.string(u.resultName.name) val ownerType = OwnerType(persistence.OwnerType.User) val result = SearchResultOwnership( id = userId, ownerType = ownerType, name = userName, accessLevel = AccessLevel(ownerType, accessLevel) ) (result, createdAt) }.list.apply() (publics ++ personals).sortWith { case ((_, date1), (_, date2)) => date1.isBefore(date2) } .collect { case (result, _) => result } } /** * Seqが空ではないことを確認する。 * * @param seq 確認対象 * @return 処理結果、Seqが空の場合 Failure(ServiceException) */ def checkNonEmpty(seq: Seq[String]): Try[Unit] = { if (seq.isEmpty) { Failure(new ServiceException("データセットが選択されていません。")) } else { Success(()) } } /** * 論理削除を適用する。 * * @param param 入力パラメータ * @return 処理結果 * Failure(ServiceException) 要素が空の場合 */ def applyLogicalDelete(param: UpdateParameter): Try[Unit] = { logger.info(LOG_MARKER, Util.formatLogMessage(SERVICE_NAME, "applyLogicalDelete", param)) DB.localTx { implicit s => for { _ <- checkNonEmpty(param.targets) _ <- execApplyLogicalDelete(param.targets) } yield { () } } } /** * データセットに論理削除を適用する。 * * @param ids 論理削除対象のデータセットID * @param s DBセッション * @return 処理結果 */ def execApplyLogicalDelete(ids: Seq[String])(implicit s: DBSession): Try[Unit] = { Try { val timestamp = DateTime.now() val systemUserId = AppConfig.systemUserId val d = persistence.Dataset.column withSQL { update(persistence.Dataset) .set( d.deletedAt -> timestamp, d.deletedBy -> sqls.uuid(systemUserId), d.updatedAt -> timestamp, d.updatedBy -> sqls.uuid(systemUserId) ) .where .inUuid(d.id, ids) .and .isNull(d.deletedAt) .and .isNull(d.deletedBy) }.update.apply() } } /** * 論理削除解除を適用する。 * * @param param 入力パラメータ * @return 処理結果 * Failure(ServiceException) 要素が空の場合 */ def applyCancelLogicalDelete(param: UpdateParameter): Try[Unit] = { logger.info(LOG_MARKER, Util.formatLogMessage(SERVICE_NAME, "applyCancelLogicalDelete", param)) DB.localTx { implicit s => for { _ <- checkNonEmpty(param.targets) _ <- execApplyCancelLogicalDelete(param.targets) } yield { () } } } /** * データセットに論理削除解除を適用する。 * * @param ids 論理削除解除対象のデータセットID * @param s DBセッション * @return 処理結果 */ def execApplyCancelLogicalDelete(ids: Seq[String])(implicit s: DBSession): Try[Unit] = { Try { val timestamp = DateTime.now() val systemUserId = AppConfig.systemUserId val d = persistence.Dataset.column withSQL { update(persistence.Dataset) .set( d.deletedAt -> None, d.deletedBy -> None, d.updatedAt -> timestamp, d.updatedBy -> sqls.uuid(systemUserId) ) .where .inUuid(d.id, ids) .and .isNotNull(d.deletedAt) .and .isNotNull(d.deletedBy) }.update.apply() } } /** * 物理削除を適用する。 * * @param param 入力パラメータ * @return 処理結果 * Failure(ServiceException) 要素が空の場合 * Failure(ServiceException) データセットの物理削除に失敗した場合 * Failure(ServiceException) 物理ファイルの物理削除に失敗した場合 */ def applyPhysicalDelete(param: UpdateParameter): Try[Unit] = { logger.info(LOG_MARKER, Util.formatLogMessage(SERVICE_NAME, "applyPhysicalDelete", param)) DB.localTx { implicit s => for { _ <- checkNonEmpty(param.targets) _ <- execApplyPhysicalDelete(param.targets) } yield { () } } } /** * データセット物理削除の削除対象のケースクラス * * @param apps AppID * @param images 画像ID * @param invalids 削除に失敗したデータセット * @param records 物理削除を行うデータセットオブジェクト * @param deleteFiles 削除対象の物理ファイル・ディレクトリ */ case class DeleteInfo( apps: Seq[String], images: Seq[String], invalids: Seq[DeleteUtil.DeleteFailedData], records: Seq[persistence.Dataset], deleteFiles: Seq[DeleteUtil.DeleteTarget] ) /** * データセットに物理削除を適用する。 * * @param targets 物理削除対象のデータセットID * @param s DBセッション * @return 処理結果 * Failure(ServiceException) データセットの物理削除に失敗した場合 * Failure(ServiceException) 物理ファイルの物理削除に失敗した場合 */ def execApplyPhysicalDelete(targets: Seq[String])(implicit s: DBSession): Try[Unit] = { val deleteResult = Try { val datasets = getDatasets(targets) val (deleteds, notDeleteds) = datasets.partition(d => isLogicalDeleted(d)) val (synchronizingOrDeletings, records) = deleteds.partition(d => isSynchronizingState(d) || isDeletingState(d)) val (appIds, apps) = pickupDeleteApps(records) val (imageIds, images) = pickupDeleteImages(records) val invalids = notDeleteds.map { dataset => DeleteUtil.DeleteFailedData("データセット", "論理削除済みではない", dataset.name) } ++ synchronizingOrDeletings.map { dataset => DeleteUtil.DeleteFailedData("データセット", "ファイルが移動中、または削除中", dataset.name) } val datasetFiles = records.flatMap(getDatasetFile) DeleteInfo( appIds, imageIds, invalids, records, apps ++ images ++ datasetFiles ) } for { DeleteInfo(apps, images, invalids, records, deleteFiles) <- deleteResult _ <- deleteDatasetDBData(records, apps, images) deleteFaileds <- DeleteUtil.deletePhysicalFiles(deleteFiles) _ <- DeleteUtil.deleteResultToTry(invalids, deleteFaileds) } yield { () } } /** * データセット関連のDBデータを物理削除する。 * * @param datasets 削除対象のデータセットオブジェクトのリスト * @param apps 削除対象のAppIDのリスト * @param images 削除対象の画像IDのリスト * @param s DBセッション * @return 処理結果 */ def deleteDatasetDBData( datasets: Seq[persistence.Dataset], apps: Seq[String], images: Seq[String] )(implicit s: DBSession): Try[Unit] = { Try { deleteFiles(datasets.map(_.id)) deleteAnnotations(datasets.map(_.id)) deleteApps(apps) deleteImages(images) deleteDatasets(datasets.map(_.id)) } } /** * 削除対象のAppの物理ファイルを取得する。 * * @param datasets 削除対象のデータセットオブジェクトのリスト * @param s DBセッション * @return 削除対象のAppの物理ファイルのリスト */ def pickupDeleteApps( datasets: Seq[persistence.Dataset] )(implicit s: DBSession): (Seq[String], Seq[DeleteUtil.DeleteTarget]) = { if (datasets.isEmpty) { return (Seq.empty, Seq.empty) } val a = persistence.App.a val appIds = withSQL { select(a.result.id) .from(persistence.App as a) .where .in(a.datasetId, datasets.map(dataset => sqls.uuid(dataset.id))) }.map(_.string(a.resultName.id)).list.apply().toSet.toSeq val deleteApps = appIds.map { id => DeleteUtil.LocalFile(Paths.get(AppConfig.appDir, "upload", id)) } (appIds, deleteApps) } /** * 削除対象のImageの物理ディレクトリを取得する。 * * @param datasets 削除対象のデータセットオブジェクトのリスト * @param s DBセッション * @return 削除対象のImageの物理ディレクトリのリスト */ def pickupDeleteImages( datasets: Seq[persistence.Dataset] )(implicit s: DBSession): (Seq[String], Seq[DeleteUtil.DeleteTarget]) = { if (datasets.isEmpty) { return (Seq.empty, Seq.empty) } val di = persistence.DatasetImage.di val imageIds = withSQL { select(sqls.distinct(di.result.imageId)) .from(persistence.DatasetImage as di) .where .in(di.datasetId, datasets.map(dataset => sqls.uuid(dataset.id))) }.map(_.string(di.resultName.imageId)).list.apply() val deletableImageIds = imageIds.filter { id => DeleteUtil.canDeleteImage(imageId = id, datasetIds = datasets.map(_.id)) } val deleteImages = deletableImageIds.map { id => DeleteUtil.LocalFile(Paths.get(AppConfig.imageDir, "upload", id)) } (deletableImageIds, deleteImages) } /** * 削除対象のデータセットの物理ディレクトリを取得する。 * * @param dataset 削除対象のデータセットオブジェクト * @return 削除対象のデータセットの物理ディレクトリのリスト */ def getDatasetFile(dataset: persistence.Dataset): Seq[DeleteUtil.DeleteTarget] = { val localDir = if (dataset.localState == SaveStatus.Saved) { val path = Paths.get(AppConfig.fileDir, dataset.id) Seq(DeleteUtil.LocalFile(path)) } else { Seq.empty } val s3Dir = if (dataset.s3State == SaveStatus.Saved) { Seq(DeleteUtil.S3File(AppConfig.s3UploadRoot, dataset.id)) } else { Seq.empty } localDir ++ s3Dir } /** * 論理削除済みかを判定する。 * * @param dataset データセットオブジェクト * @return 論理削除済みであればtrue、そうでなければfalse */ def isLogicalDeleted(dataset: persistence.Dataset): Boolean = { dataset.deletedAt.isDefined && dataset.deletedBy.isDefined } /** * データセットのローカル、S3の保存状態が移動中であるかを判定する。 * @param dataset データセットオブジェクト * @return 移動中であればtrue、そうでなければfalse */ def isSynchronizingState(dataset: persistence.Dataset): Boolean = { dataset.localState == SaveStatus.Synchronizing || dataset.s3State == SaveStatus.Synchronizing } /** * データセットのローカル、S3の保存状態が削除中であるかを判定する。 * @param dataset データセットオブジェクト * @return 削除中であればtrue、そうでなければfalse */ def isDeletingState(dataset: persistence.Dataset): Boolean = { dataset.localState == SaveStatus.Deleting || dataset.s3State == SaveStatus.Deleting } /** * データセットを取得する。 * * @param ids データセットID * @param s DBセッション * @return データセットオブジェクトのリスト */ def getDatasets( ids: Seq[String] )(implicit s: DBSession): Seq[persistence.Dataset] = { if (ids.isEmpty) { return Seq.empty } val d = persistence.Dataset.d withSQL { select .from(persistence.Dataset as d) .where .inUuid(d.id, ids) }.map(persistence.Dataset(d.resultName)).list.apply() } /** * File関連のDBデータを物理削除する。 * * @param datasetIds データセットIDのリスト * @param s DBセッション */ def deleteFiles(datasetIds: Seq[String])(implicit s: DBSession): Unit = { if (datasetIds.isEmpty) { return } val f = persistence.File.f val fileIds = withSQL { select(f.result.id) .from(persistence.File as f) .where .in(f.datasetId, datasetIds.map(sqls.uuid)) }.map(_.string(f.resultName.id)).list.apply() withSQL { delete .from(persistence.File) .where .in(persistence.File.column.id, fileIds.map(sqls.uuid)) }.update.apply() val fh = persistence.FileHistory.fh val fileHistoryIds = withSQL { select(fh.result.id) .from(persistence.FileHistory as fh) .where .in(fh.fileId, fileIds.map(sqls.uuid)) }.map(_.string(fh.resultName.id)).list.apply() withSQL { delete .from(persistence.FileHistory) .where .in(persistence.FileHistory.column.id, fileHistoryIds.map(sqls.uuid)) }.update.apply() withSQL { delete .from(persistence.ZipedFiles) .where .in(persistence.ZipedFiles.column.historyId, fileHistoryIds.map(sqls.uuid)) }.update.apply() } /** * Dataset関連のDBデータを物理削除する。 * * @param datasetIds データセットIDのリスト * @param s DBセッション */ def deleteDatasets(datasetIds: Seq[String])(implicit s: DBSession): Unit = { if (datasetIds.isEmpty) { return } withSQL { delete .from(persistence.Ownership) .where .inUuid(persistence.Ownership.column.datasetId, datasetIds) }.update.apply() withSQL { delete .from(persistence.DatasetImage) .where .inUuid(persistence.DatasetImage.column.datasetId, datasetIds) }.update.apply() withSQL { delete .from(persistence.App) .where .inUuid(persistence.App.column.datasetId, datasetIds) }.update.apply() withSQL { delete .from(persistence.Dataset) .where .inUuid(persistence.Dataset.column.id, datasetIds) }.update.apply() } /** * Imageを物理削除する。 * * @param imageIds 画像IDのリスト * @param s DBセッション */ def deleteImages(imageIds: Seq[String])(implicit s: DBSession): Unit = { if (imageIds.isEmpty) { return } withSQL { delete .from(persistence.Image) .where .inUuid(persistence.Image.column.id, imageIds) }.update.apply() } /** * Appを物理削除する。 * * @param appIds AppIDのリスト * @param s DBセッション */ def deleteApps(appIds: Seq[String])(implicit s: DBSession): Unit = { if (appIds.isEmpty) { return } withSQL { delete .from(persistence.App) .where .inUuid(persistence.App.column.id, appIds) }.update.apply() } /** * Annotation, DatasetAnnotationを物理削除する。 * Annotationは、他のデータセットから使用されていない場合のみ削除する。 * * @param datasetIds データセットIDのリスト * @param s DBセッション */ def deleteAnnotations(datasetIds: Seq[String])(implicit s: DBSession): Unit = { if (datasetIds.isEmpty) { return } val da = persistence.DatasetAnnotation.da val ac = persistence.Annotation.column withSQL { delete .from(persistence.Annotation) .where .notExists( select .from(persistence.DatasetAnnotation as da) .where .eq(da.annotationId, ac.id) .and .notIn(da.datasetId, datasetIds.map(sqls.uuid)) ) }.update.apply() val dac = persistence.DatasetAnnotation.column withSQL { delete .from(persistence.DatasetAnnotation) .where .in(dac.datasetId, datasetIds.map(sqls.uuid)) }.update.apply() } /** * アクセス権追加・更新時に適切なアクセスレベルかを確認する。 * * @param accessLevel チェック対象のアクセスレベル * @param validAccessLevels 有効なアクセスレベル * @param 確認結果 * Failure(ServiceException) チェック対象が有効なアクセスレベルに含まれなかった場合 */ def checkAccessLevel(accessLevel: AccessLevel, validAccessLevels: Set[AccessLevel]): Try[Unit] = { if (validAccessLevels.contains(accessLevel)) { Success(()) } else { Failure(new ServiceException("無効なアクセス権が指定されました。")) } } /** * ユーザーのアクセス権を更新する。 * * @param param 入力パラメータ * @return 処理結果 * Failure(ServiceException) データセットID、ユーザーIDが未指定の場合 * Failure(ServiceException) アクセスレベルの指定がLimitedRead、FullRead、Owner以外の場合 * Failure(ServiceException) データセット、ユーザーが存在しない場合 * Failure(ServiceException) 指定したユーザーが無効化されている場合 */ def applyUpdateAclUser(param: UpdateAclUserParameter): Try[Unit] = { logger.info(LOG_MARKER, Util.formatLogMessage(SERVICE_NAME, "applyUpdateAclUser", param)) DB.localTx { implicit s => for { datasetId <- Util.require(param.datasetId, "データセットID") userId <- Util.require(param.userId, "ユーザーID") _ <- checkAccessLevel(param.accessLevel, validUserAccessLevel) dataset <- searchDatasetById(datasetId) _ <- searchUserById(userId) group <- searchUserGroupById(userId) ownership <- searchOwnership(datasetId, group.id) notDenyOwnership <- requireNotDenyOwnership(ownership) _ <- updateOwnership(notDenyOwnership.id, param.accessLevel) } yield { () } } } /** * ユーザーのアクセス権を削除する。 * * @param param 入力パラメータ * @return 処理結果 * Failure(ServiceException) データセットIDが未指定の場合 * Failure(ServiceException) データセット、ユーザーが存在しない場合 * Failure(ServiceException) 指定したユーザーが無効化されている場合 */ def applyDeleteAclUser(param: UpdateAclUserParameter): Try[Unit] = { logger.info(LOG_MARKER, Util.formatLogMessage(SERVICE_NAME, "applyDeleteAclUser", param)) DB.localTx { implicit s => for { datasetId <- Util.require(param.datasetId, "データセットID") userId <- Util.require(param.userId, "ユーザーID") dataset <- searchDatasetById(datasetId) _ <- searchUserById(userId) group <- searchUserGroupById(userId) ownership <- searchOwnership(datasetId, group.id) notDenyOwnership <- requireNotDenyOwnership(ownership) _ <- updateOwnership(notDenyOwnership.id, AccessLevel.Deny) } yield { () } } } /** * ユーザーのOwnershipを更新する。 * * @param ownershipId OwnershipId * @param accessLevel アクセスレベル * @param s DBセッション * @return 処理結果 */ def updateOwnership( ownershipId: String, accessLevel: AccessLevel )(implicit s: DBSession): Try[Unit] = { Try { val timestamp = DateTime.now() val systemUserId = AppConfig.systemUserId val oc = persistence.Ownership.column val o = persistence.Ownership.o val g = persistence.Group.g val m = persistence.Member.m val u = persistence.User.u withSQL { update(persistence.Ownership) .set( oc.accessLevel -> accessLevel.toDBValue, oc.updatedAt -> timestamp, oc.updatedBy -> sqls.uuid(systemUserId) ) .where .eq(oc.id, sqls.uuid(ownershipId)) }.update.apply() } } /** * グループのアクセス権を更新する。 * * @param param 入力パラメータ * @return 処理結果 * Failure(ServiceException) データセットID、グループIDが未指定の場合 * Failure(ServiceException) アクセスレベルの指定がLimitedRead、FullRead、Provider以外の場合 * Failure(ServiceException) データセット、グループが存在しない場合 * Failure(ServiceException) 指定したグループが削除されている場合 */ def applyUpdateAclGroup(param: UpdateAclGroupParameter): Try[Unit] = { logger.info(LOG_MARKER, Util.formatLogMessage(SERVICE_NAME, "applyUpdateAclGroup", param)) DB.localTx { implicit s => for { datasetId <- Util.require(param.datasetId, "データセットID") groupId <- Util.require(param.groupId, "グループID") _ <- checkAccessLevel(param.accessLevel, validGroupAccessLevel) dataset <- searchDatasetById(datasetId) _ <- searchGroupById(groupId) ownership <- searchOwnership(datasetId, groupId) notDenyOwnership <- requireNotDenyOwnership(ownership) _ <- updateOwnership(notDenyOwnership.id, param.accessLevel) } yield { () } } } /** * グループのアクセス権を削除する。 * * @param param 入力パラメータ * @return 処理結果 * Failure(ServiceException) データセットID、グループIDが未指定の場合 * Failure(ServiceException) データセット、グループが存在しない場合 * Failure(ServiceException) 指定したグループが削除されている場合 */ def applyDeleteAclGroup(param: UpdateAclGroupParameter): Try[Unit] = { logger.info(LOG_MARKER, Util.formatLogMessage(SERVICE_NAME, "applyDeleteAclGroup", param)) DB.localTx { implicit s => for { datasetId <- Util.require(param.datasetId, "データセットID") groupId <- Util.require(param.groupId, "グループID") dataset <- searchDatasetById(datasetId) _ <- searchGroupById(groupId) ownership <- searchOwnership(datasetId, groupId) notDenyOwnership <- requireNotDenyOwnership(ownership) _ <- updateOwnership(notDenyOwnership.id, AccessLevel.Deny) } yield { () } } } /** * ユーザーのアクセス権を追加する。 * * @param param 入力パラメータ * @return 処理結果 * Failure(ServiceException) データセットIDが未指定の場合 * Failure(ServiceException) ユーザー名が未指定の場合 * Failure(ServiceException) アクセスレベルの指定がLimitedRead、FullRead、Owner以外の場合 * Failure(ServiceException) データセット、ユーザーが存在しない場合 * Failure(ServiceException) 既にDeny以外のアクセス権を持っている場合 * Failure(ServiceException) 指定したユーザーが無効化されている場合 */ def applyAddAclUser(param: AddAclUserParameter): Try[Unit] = { logger.info(LOG_MARKER, Util.formatLogMessage(SERVICE_NAME, "applyAddAclUser", param)) DB.localTx { implicit s => for { datasetId <- Util.require(param.datasetId, "データセットID") userName <- Util.require(param.userName, "ユーザー名") _ <- checkAccessLevel(param.accessLevel, validUserAccessLevel) dataset <- searchDatasetById(datasetId) group <- searchUserGroupByName(userName) ownership <- searchOwnership(datasetId, group.id) _ <- checkOwnershipNotExistsOrDeny(ownership, "ユーザー") _ <- upsertOwnership(datasetId, group.id, ownership, param.accessLevel) } yield { () } } } /** * OwnershipをUpsertする。 * * @param datasetId データセットID * @param groupId グループID * @param ownership オプショナルなアクセス権 * @param accessLevel アクセスレベル * @param s DBセッション * @return 処理結果 */ def upsertOwnership( datasetId: String, groupId: String, ownership: Option[persistence.Ownership], accessLevel: AccessLevel )(implicit s: DBSession): Try[Unit] = { ownership match { case Some(o) => updateOwnership(o.id, accessLevel) case None => addOwnership(datasetId, groupId, accessLevel) } } /** * ユーザーIDからユーザーのPersonalグループを取得する。 * * @param userId ユーザーID * @param s DBセッション * @return 取得結果 * Failure(ServiceException) 存在しないユーザーの場合 * Failure(ServiceException) 指定したユーザーが無効化されている場合 */ def searchUserGroupById(userId: String)(implicit s: DBSession): Try[persistence.Group] = { val g = persistence.Group.g val m = persistence.Member.m val u = persistence.User.u val result = withSQL { select(g.result.*, u.result.disabled) .from(persistence.Group as g) .innerJoin(persistence.Member as m).on(m.groupId, g.id) .innerJoin(persistence.User as u).on(u.id, m.userId) .where .eq(g.groupType, persistence.GroupType.Personal) .and .eq(u.id, sqls.uuid(userId)) }.map { rs => val group = persistence.Group(g.resultName)(rs) val disabled = rs.boolean(u.resultName.disabled) (group, disabled) }.single.apply() result match { case Some((group, true)) => Failure(new ServiceException("無効なユーザーが指定されました。")) case Some((group, false)) => Success(group) case None => Failure(new ServiceException("存在しないユーザーが指定されました。")) } } /** * ユーザー名からユーザーのPersonalグループを取得する。 * * @param userName ユーザー名 * @param s DBセッション * @return 取得結果 * Failure(ServiceException) 存在しないユーザーの場合 * Failure(ServiceException) 指定したユーザーが無効化されている場合 */ def searchUserGroupByName(userName: String)(implicit s: DBSession): Try[persistence.Group] = { val g = persistence.Group.g val m = persistence.Member.m val u = persistence.User.u val result = withSQL { select(g.result.*, u.result.disabled) .from(persistence.Group as g) .innerJoin(persistence.Member as m).on(m.groupId, g.id) .innerJoin(persistence.User as u).on(u.id, m.userId) .where .eq(g.groupType, persistence.GroupType.Personal) .and .eq(u.name, userName) }.map { rs => val group = persistence.Group(g.resultName)(rs) val disabled = rs.boolean(u.resultName.disabled) (group, disabled) }.single.apply() result match { case Some((group, true)) => Failure(new ServiceException("無効なユーザーが指定されました。")) case Some((group, false)) => Success(group) case None => Failure(new ServiceException("存在しないユーザーが指定されました。")) } } /** * データセットとグループの間のアクセス権を取得する。 * * @param datasetId データセットID * @param groupId グループID * @return 確認結果 */ def searchOwnership( datasetId: String, groupId: String )(implicit s: DBSession): Try[Option[persistence.Ownership]] = { Try { val o = persistence.Ownership.o withSQL { select .from(persistence.Ownership as o) .where .eq(o.datasetId, sqls.uuid(datasetId)) .and .eq(o.groupId, sqls.uuid(groupId)) }.map(persistence.Ownership(o.resultName)).single.apply() } } /** * AccessLevelがDeny以外のアクセス権を取得する。 * * @param ownership オプショナルなアクセス権 * @return 取得結果 * Failure(ServiceException) AccessLevelがDenyか、アクセス権がない場合 */ def requireNotDenyOwnership(ownership: Option[persistence.Ownership]): Try[persistence.Ownership] = { ownership match { case Some(o) if o.accessLevel != persistence.UserAccessLevel.Deny => Success(o) case _ => Failure(new ServiceException(s"まだアクセス権の登録がありません。")) } } /** * アクセス権が存在しないか、またはAccessLevelがDenyであるかを確認する。 * * @param ownership オプショナルなアクセス権 * @param target アクセス権の対象(ユーザー/グループ) * @return 確認結果(アクセス権がない場合、AccessLevelがDenyの場合) * Failure(ServiceException) AccessLevelがDenyではない場合 */ def checkOwnershipNotExistsOrDeny( ownership: Option[persistence.Ownership], target: String ): Try[Unit] = { ownership match { case Some(o) if o.accessLevel != persistence.UserAccessLevel.Deny => Failure(new ServiceException(s"既に登録のある${target}が指定されました。")) case _ => Success(()) } } /** * データセットとグループの間にアクセス権を追加する。 * * @param datasetId データセットID * @param groupId グループID * @param accessLevel アクセスレベル * @return 処理結果 */ def addOwnership( datasetId: String, groupId: String, accessLevel: AccessLevel )(implicit s: DBSession): Try[Unit] = { Try { val id = UUID.randomUUID.toString val timestamp = DateTime.now() val systemUserId = AppConfig.systemUserId persistence.Ownership.create( id = id, datasetId = datasetId, groupId = groupId, accessLevel = accessLevel.toDBValue, createdBy = systemUserId, createdAt = timestamp, updatedBy = systemUserId, updatedAt = timestamp ) } } /** * グループのアクセス権を追加する。 * * @param param 入力パラメータ * @return 処理結果 * Failure(ServiceException) データセットIDが未指定の場合 * Failure(ServiceException) グループ名が未指定の場合 * Failure(ServiceException) アクセスレベルの指定がLimitedRead、FullRead、Provider以外の場合 * Failure(ServiceException) データセット、グループが存在しない場合 * Failure(ServiceException) 既にDeny以外のアクセス権を持っている場合 * Failure(ServiceException) 指定したグループが削除されている場合 */ def applyAddAclGroup(param: AddAclGroupParameter): Try[Unit] = { logger.info(LOG_MARKER, Util.formatLogMessage(SERVICE_NAME, "applyAddAclGroup", param)) DB.localTx { implicit s => for { datasetId <- Util.require(param.datasetId, "データセットID") groupName <- Util.require(param.groupName, "グループ名") _ <- checkAccessLevel(param.accessLevel, validGroupAccessLevel) dataset <- searchDatasetById(datasetId) group <- searchGroupByName(groupName) ownership <- searchOwnership(datasetId, group.id) _ <- checkOwnershipNotExistsOrDeny(ownership, "グループ") _ <- upsertOwnership(datasetId, group.id, ownership, param.accessLevel) } yield { () } } } /** * グループ名からグループを取得する。 * * @param groupName グループ名 * @param s DBセッション * @return 取得結果 * Failure(ServiceException) 存在しないグループの場合 * Failure(ServiceException) 指定したグループが論理削除されている場合 */ def searchGroupByName(groupName: String)(implicit s: DBSession): Try[persistence.Group] = { val g = persistence.Group.g val result = withSQL { select(g.result.*) .from(persistence.Group as g) .where .eq(g.name, groupName) }.map(persistence.Group(g.resultName)).single.apply() result match { case Some(group) if group.deletedAt.isDefined => Failure(new ServiceException("削除されたグループが指定されました。")) case Some(group) => Success(group) case None => Failure(new ServiceException("存在しないグループが指定されました。")) } } /** * POST /dataset/applyの更新操作を行う。 * * @param params 入力パラメータ * @param multiParams 入力パラメータ(複数取得可能) * @return 処理結果 * Failure(ServiceException) 存在しない操作の場合 */ def applyChange(params: Map[String, String], multiParams: MultiMap): Try[Unit] = { val param = UpdateParameter.fromMap(multiParams) val result = params.get("update") match { case Some("logical_delete") => applyLogicalDelete(param) case Some("cancel_logical_delete") => applyCancelLogicalDelete(param) case Some("physical_delete") => applyPhysicalDelete(param) case _ => Failure(new ServiceException("無効な操作です。")) } Util.withErrorLogging(logger, LOG_MARKER, result) } /** * POST /dataset/acl/update/user/applyの更新操作を行う。 * * @param params 入力パラメータ * @return 処理結果 * Failure(ServiceException) 存在しない操作の場合 */ def applyChangeForAclUpdateUser(params: Map[String, String]): Try[Unit] = { val param = UpdateAclUserParameter.fromMap(params) val result = params.get("update") match { case Some("update") => applyUpdateAclUser(param) case Some("delete") => applyDeleteAclUser(param) case _ => Failure(new ServiceException("無効な操作です。")) } Util.withErrorLogging(logger, LOG_MARKER, result) } /** * POST /dataset/acl/update/group/applyの更新操作を行う。 * * @param params 入力パラメータ * @return 処理結果 * Failure(ServiceException) 存在しない操作の場合 */ def applyChangeForAclUpdateGroup(params: Map[String, String]): Try[Unit] = { val param = UpdateAclGroupParameter.fromMap(params) val result = params.get("update") match { case Some("update") => applyUpdateAclGroup(param) case Some("delete") => applyDeleteAclGroup(param) case _ => Failure(new ServiceException("無効な操作です。")) } Util.withErrorLogging(logger, LOG_MARKER, result) } /** * POST /dataset/acl/add/user/applyの更新操作を行う。 * * @param params 入力パラメータ * @return 処理結果 * Failure(ServiceException) 存在しない操作の場合 */ def applyChangeForAclAddUser(params: Map[String, String]): Try[Unit] = { val param = AddAclUserParameter.fromMap(params) val result = params.get("update") match { case Some("add") => applyAddAclUser(param) case _ => Failure(new ServiceException("無効な操作です。")) } Util.withErrorLogging(logger, LOG_MARKER, result) } /** * POST /dataset/acl/add/group/applyの更新操作を行う。 * * @param params 入力パラメータ * @return 処理結果 * Failure(ServiceException) 存在しない操作の場合 */ def applyChangeForAclAddGroup(params: Map[String, String]): Try[Unit] = { val param = AddAclGroupParameter.fromMap(params) val result = params.get("update") match { case Some("add") => applyAddAclGroup(param) case _ => Failure(new ServiceException("無効な操作です。")) } Util.withErrorLogging(logger, LOG_MARKER, result) } }
nkawa/dsmoq
server/maintenance/src/main/scala/dsmoq/maintenance/services/DatasetService.scala
Scala
apache-2.0
54,457
package dhgarrette.typesupervisedtagging.util import java.io.BufferedWriter import java.io.File import java.io.FileWriter import scala.collection.mutable.ListBuffer import scala.util.Random object FileUtils { private val random = new Random(System.currentTimeMillis()) def pathjoin(parts: String*): String = { (parts.dropRight(1).filter(_.nonEmpty).map(p => if (p.endsWith(File.separator)) p.dropRight(File.separator.length) else p) :+ parts.last).mkString(File.separator) } def openForWrite(filename: String): BufferedWriter = new BufferedWriter(new FileWriter(filename)) def dumpToFile(content: String, filename: String): String = { writeUsing(filename) { w => w.write(content) } } def mktemp(prefix: String = "temp-", suffix: String = ""): String = { val f = File.createTempFile(prefix, suffix) f.delete() return f.getAbsolutePath // return "/tmp/" + f.getName() } def insertTempSubdir(file: String, mkDir: Boolean = false) = { val (origPath, hadoopGraphFilename) = getPathAndFile(file) val path = if (new File(origPath).getName != "temp") { val newPath = pathjoin(origPath, "temp") val pathObj = new File(newPath) if (mkDir && !pathObj.exists) pathObj.mkdirs() newPath } else origPath pathjoin(path, hadoopGraphFilename) } def getPathAndFile(pathAndFile: String) = { val absFile = new File(pathAndFile).getAbsoluteFile (absFile.getParent, absFile.getName) } def remove(filename: String) = { new File(filename).delete() } implicit def string2file(s: String) = new File(new File(s).getAbsolutePath) implicit def file2recursivelyDeletableFile(f: File) = new RecursivelyDeletableFile(f) class RecursivelyDeletableFile(private val f: File) { def recursiveDelete(): Boolean = { if (f.isDirectory) f.listFiles.filter(null !=).foreach(_.recursiveDelete()) return f.delete() } } def exists(filename: String) = new File(filename).exists() def findBinary(name: String, binDir: Option[String] = None, envar: Option[String] = None, verbose: Boolean = false): String = { val checked = new ListBuffer[String] if (binDir.isDefined) { val path = FileUtils.pathjoin(binDir.get, name) if (FileUtils.exists(path)) return path else checked += path } if (envar.isDefined) { val envpath = System.getenv(envar.get) if (envpath != null) { val path = FileUtils.pathjoin(envpath, name) if (FileUtils.exists(path)) return path else checked += path } } try { return scala.sys.process.Process(List("which", name)) !!; } catch { case _ => { checked += "which " + name } } throw new RuntimeException("No binary found. Checked the following:\\n" + checked.map((" ") * 16 + _).mkString("\\n")) } /** * Automatic Resource Management * * using(new BufferedReader(new FileReader("file"))) { r => * var count = 0 * while (r.readLine != null) count += 1 * println(count) * } */ def using[T <: { def close() }](resource: T)(block: T => Unit) { try { block(resource) } finally { if (resource != null) resource.close() } } def writeUsing(filename: String)(block: BufferedWriter => Unit): String = { using(openForWrite(filename))(block) filename } }
dhgarrette/type-supervised-tagging-2012emnlp
src/main/scala/dhgarrette/typesupervisedtagging/util/FileUtils.scala
Scala
apache-2.0
3,459
package specs import org.specs2.mutable._ import com.plasmaconduit.json._ import com.plasmaconduit.json.JsWriter._ class JsWriterSpec extends Specification { "The boolean json writer" should { "convert a boolean true to a JsBoolean(true)" in { JsValue.from(true) must beEqualTo(JsBoolean(true)) } "convert a boolean false to a JsBoolean(true)" in { JsValue.from(false) must beEqualTo(JsBoolean(false)) } } "The string json writer" should { "convert a string \\"test\\" to a JsString(\\"test\\")" in { JsValue.from("test") must beEqualTo(JsString("test")) } } "The int json writer" should { "convert a int from 42 to JsLong(42)" in { JsValue.from(42) must beEqualTo(JsLong(42)) } } "The float json writer" should { "convert a float from 4.2 to JsFloat(4.2)" in { JsValue.from(4.2f) must beEqualTo(JsFloat(4.2f)) } } "The list json writer" should { "convert a List(1, 2, 3) to a JsArray(List(JsLong(1), JsLong(2), JsLong(3)))" in { JsValue.from(List(1, 2, 3)) must beEqualTo(JsArray(List(JsLong(1), JsLong(2), JsLong(3)))) } } "Multiple types in a list" should { "properly convert to js values" in { val sugar = JsObject( "key" -> List[JsValue](3, true) //"key" -> JsArray(3, true) ) sugar must beEqualTo(JsObject(Map("key" -> JsArray(JsLong(3), JsBoolean(true))))) } } "The map json writer" should { "convert a Map(\\"a\\" -> 1, \\"b\\" -> 2) to a JsObject(Map(Map(\\"a\\" -> JsLong(1), \\"b\\" -> JsLong(2))))" in { JsValue.from(Map("a" -> 1, "b" -> 2)) must beEqualTo(JsObject(Map("a" -> JsLong(1), "b" -> JsLong(2)))) } } }
plasmaconduit/json
src/test/scala/specs/JsWriterSpec.scala
Scala
mit
1,692
package edu.neu.coe.scala.numerics import org.scalatest.{ FlatSpec, Matchers } /** * @author scalaprof */ class LazyNumberFuzzySpec extends FlatSpec with Matchers { import Fuzzy._ val fuzz1 = LazyFuzzy(1) val fuzz2 = LazyFuzzy(1,Product(2)) // XXX why can't we say x*x here? def squ(x: Fuzzy): Fuzzy = x.times(x,x) val fuzzSquare = Named[Fuzzy]("square",squ) val fuzz4 = fuzz2 map fuzzSquare val fuzzy = Exact(1) val p = fuzzy*fuzzy "fuzz1" should "be 1" in { fuzz1.get shouldBe (Fuzzy.one) } it should "be -1 after negate" in { (-fuzz1).get shouldBe (Fuzzy.one * -1) } it should "be 0 after minus(1)" in { (fuzz1.-(fuzz1)).get shouldBe (Fuzzy.zero) } "fuzz2" should "be 2" in { fuzz2.get shouldBe (Fuzzy.one+Fuzzy.one) } it should "be 4 when multiplied by itself" in { (fuzz2 * fuzz2).get shouldBe (Exact(4)) } it should "be 1 when divided by itself" in { (fuzz2 / fuzz2).get shouldBe (Fuzzy.one) } it should "be 3 when added to one" in { (fuzz2 + fuzz1).get shouldBe (Exact(3)) } ignore should "be 6 when added to one and three" in { // (fuzz2 + fuzz1 + LazyFuzzy(Exact(3))).get shouldBe (Exact(6)) } ignore should "be 3 when added to one by explicit function" in { // val lr = fuzz2 map Named("add Rat.1",{ x => x+Fuzzy.one }) // lr.get shouldBe (Fuzzy.one*3) } "fuzzy for comprehension" should "give 4" in { val z = for (x <- fuzz2 ) yield fuzzSquare(x) z.get should be (Exact(4)) } // "fuzzy composition" should "work" in { // val p = fuzz1.map(ExpDifferentiable[Fuzzy]()) // println(s"p: $p") // println(s"p.get: ${p.get}") // p.get should be (2.718281828459045) // } // ignore should "work with fuzzy 1" in { // val f = LazyFuzzy(Bounded(1,1E-3)) // val p = f.map(ExpDifferentiable[Fuzzy]()) // println(s"p: $p") // println(s"p.get: ${p.get}") // p.get should be (2.718281828459045) // } // it should "give 8" in { // val z = for (x <- fuzz2; y <- fuzz4 ) yield x*y // z.get should be (Exact(8)) // } }
rchillyard/Scalaprof
Numerics/src/test/scala/edu/neu/coe/scala/numerics/LazyNumberFuzzySpec.scala
Scala
gpl-2.0
2,081
package scikoro import utest._ import scikoro.dice._ object ScikoroTests extends TestSuite { val pool = 4.d6 def tests = this { "A six sided die would never score 7" - { for (i <- 0 to 1000) { val result = pool.roll assert(result.values.forall(_ < pool.face + 1)) assert(result.total <= pool.face * pool.size) } } "Number keyed table" - { val table = Table.withKeyNumbers(1.d6)( 1 -> "One", 2 -> "Two", 3 -> "Three", 4 -> "Four", 5 -> "Five", 6 -> "Six" ) for (_ <- 0 to 1000) { val entry = table.nextEntry() assert(entry.nonEmpty) } } } }
lettenj61/scikoro
scikoro/shared/src/test/scala/ScikoroTests.scala
Scala
mit
691
package com.lkroll.ep.mapviewer.facades import org.denigma.threejs._ import scalajs.js import scalajs.js.annotation._ import scalajs.js.typedarray._ import org.scalajs.dom.Node @js.native @JSGlobal("THREE.CSS3DRenderer") class CSS3DRenderer extends js.Any {} @js.native @JSGlobal("THREE.CSS3DObject") class CSS3DObject extends Object3D { def this(element: Node = js.native) = this(); } @js.native @JSGlobal("THREE.CSS3DSprite") class CSS3DSprite extends CSS3DObject { def this(element: Node = js.native) = this(); }
Bathtor/ep-explorer
src/main/scala/com/lkroll/ep/mapviewer/facades/CSS3DRenderer.scala
Scala
mit
525
package smartchess import board._ import move._ import square._ import piece._ import shared._ // game is responsible for representing, reporting and manipulating a chess game object DataUtils { def BeginsWith(str: String, what: Character): Boolean = { if (str == null) return false if (str.length == 0) return false if (str(0) == what) return true false } def EndsWith(str: String, what: Character): Boolean = { if (str == null) return false if (str.length == 0) return false if (str(str.length - 1) == what) return true false } } object game { def split_pgn(fpgn: String): scala.collection.mutable.ArrayBuffer[String] = { var pgn = fpgn.replaceAll("\\r\\n", "\\n") pgn = fpgn.replaceAll("\\r", "") val lines = pgn.split("\\n") :+ "" var pgn_list = scala.collection.mutable.ArrayBuffer[String]() val READING_HEAD = 0 val READING_BODY = 1 var status = READING_HEAD var content = "" var i = 0 while (i < lines.length) { var line = lines(i) val empty = (line == "") line = line + "\\n" if (empty) { if (status == READING_BODY) { pgn_list += content content = "" status = READING_HEAD } else { content += "\\n" } } else { if (status == READING_BODY) { content += line } else { if (line(0) == '[') { content += line } else { status = READING_BODY content += line } } } i += 1 } pgn_list } } // GameNode holds a game node case class GameNode( genSan: String = "", genAlgeb: String = "", genTrueAlgeb: String = "", genFen: String = "", genTruncFen: String = "", genPriority: Int = 0, fen: String = "", parent: GameNode = null, fullmove_number: Int = 0, turn: Char = ' ', num_checks: Map[TColor, Int] = Map(WHITE -> 0, BLACK -> 0), var comment: String = "" ) { var priority = 0 var childs = Map[String, GameNode]() def sortedSans: List[String] = childs.keys.toList.sortWith(childs(_).genPriority < childs(_).genPriority) def get_fullmove_number: Int = { if (parent == null) { return 0 } return parent.fullmove_number } def get_turn: Char = { if (parent == null) { return ' ' } return parent.turn } def get_move_no: String = { if (parent == null) { return "" } var move_no = "" if (parent.fullmove_number > 0) { move_no = parent.fullmove_number + "." if (parent.turn == 'b') move_no += "." } move_no } def commented_san: String = { if (comment == "") return genSan s"$genSan {$comment}" } } // game represents a chess game class game(set_variant: String = "Standard") { var variant = set_variant def IS_FOUR_PLAYER = (variant == "Four Player") def IS_FLICK = (variant == "Flick") def IS_EXOTIC = (IS_FOUR_PLAYER || IS_FLICK) var b = new board(variant) var root = GameNode() var current_node = GameNode() var pgn_headers = Map[String, String]() var book: Book = Book() var nodesbyid = Map[Int, GameNode]() var currentnodeid: Int = 0 reset def has_moves: Boolean = (root.childs.keys.toList.length > 0) def reset_nodesbyid { nodesbyid = Map[Int, GameNode]() currentnodeid = 0 } def get_node_nodeid(gn: GameNode): Int = { for ((k, v) <- nodesbyid) if (v == gn) return k 0 } def get_current_node_nodeid: Int = get_node_nodeid(current_node) def select_node_by_nodeid(nodeid: Int) { if (nodesbyid.contains(nodeid)) { current_node = nodesbyid(nodeid) b.set_from_fen(current_node.fen) } } def set_from_fen(fen: String, clear_headers: Boolean = true) { /////////////////////////////////// reset_nodesbyid /////////////////////////////////// b.set_from_fen(fen) root = GameNode( genSan = "*", fen = fen, genPriority = 0, parent = null, fullmove_number = b.fullmove_number, turn = colorLetterOf(b.turn) ) current_node = root /////////////////////////////////// nodesbyid += (currentnodeid -> root) currentnodeid += 1 /////////////////////////////////// if (clear_headers) { pgn_headers = Map[String, String]() } } def set_from_fen_extended(fen: String, set_num_checks: Map[TColor, Int]) { b.set_from_fen(fen) b.num_checks = Map(WHITE -> set_num_checks(WHITE), BLACK -> set_num_checks(BLACK)) } def reset { pgn_headers = Map[String, String]() b.reset set_from_fen(b.report_fen) } def delete { if (current_node.parent != null) { val delSan = current_node.genSan current_node = current_node.parent current_node.childs = current_node.childs - delSan b.set_from_fen(current_node.fen) } } def hasnode(gn: GameNode, cn: GameNode): Boolean = { if (gn == null) return false if (gn == cn) return true for ((san, tn) <- cn.childs) if (hasnode(gn, tn)) return true false } def tonode(gn: GameNode) { if (!hasnode(gn, root)) return current_node = gn set_from_fen_extended(current_node.fen, current_node.num_checks) } def tobegin { current_node = root set_from_fen_extended(current_node.fen, current_node.num_checks) } def forward_node: Boolean = { if (current_node.childs.size > 0) { val main_san = current_node.sortedSans(0) current_node = current_node.childs(main_san) return true } return false } def forward { if (forward_node) { set_from_fen_extended(current_node.fen, current_node.num_checks) } } def toend { while (forward_node) { } set_from_fen_extended(current_node.fen, current_node.num_checks) } def back { if (current_node.parent != null) { current_node = current_node.parent set_from_fen_extended(current_node.fen, current_node.num_checks) } } def report_fen: String = current_node.fen def report_trunc_fen: String = b.report_trunc_fen def report_pretty_start_fen: String = { if (IS_EXOTIC) return "startpos" root.fen } def makeMove(m: move, addcomment: String = "") { val genFen = report_fen val genTruncFen = report_trunc_fen val san = b.toSan(m) val algeb = m.toAlgeb val true_algeb = b.to_true_algeb(algeb) if (san != null) { val tfen = report_trunc_fen val bpos = book.get(tfen) val bm = bpos.get(san) b.makeMove(m) if (current_node.childs.contains(san)) { current_node = current_node.childs(san) } else { current_node.priority += 1 val newNode = GameNode( genSan = san, genAlgeb = algeb, genTrueAlgeb = true_algeb, genFen = genFen, genTruncFen = genTruncFen, fen = b.report_fen, genPriority = current_node.priority, parent = current_node, fullmove_number = b.fullmove_number, turn = colorLetterOf(b.turn), num_checks = Map(WHITE -> b.num_checks(WHITE), BLACK -> b.num_checks(BLACK)), comment = addcomment ) current_node.childs += (san -> newNode) current_node = newNode /////////////////////////////////// nodesbyid += (currentnodeid -> current_node) currentnodeid += 1 /////////////////////////////////// } } } def makeSanMove(san: String, addcomment: String = "") { val m = b.sanToMove(san) if (m != null) { makeMove(m, addcomment) } } def makeAlgebMove(algeb: String, addcomment: String = "") { val m = move(fromalgeb = algeb) makeMove(m, addcomment) } def getsubcolor(level: Int): String = { if (level > 2) return "#ff5f5f" List("#0000ff", "#00ff00", "#dfdf00")(level) } var html_pgn_nodes = scala.collection.mutable.ArrayBuffer[GameNode]() def report_pgn_move_list_html(cn: GameNode): Tuple2[String, List[String]] = { val ids = scala.collection.mutable.ArrayBuffer[String]() var pgn = "" html_pgn_nodes = scala.collection.mutable.ArrayBuffer[GameNode]() def report_pgn_recursive(gn: GameNode, sub: Boolean, level: Int = 0) { val sortedSans = gn.sortedSans val numSans = sortedSans.length var i = 0 for (san <- sortedSans) { val is_sub = (i > 0) val child = gn.childs(san) var move_no = child.get_move_no if (is_sub) { pgn += s""" |<font color="${getsubcolor(level + 1)}">($move_no """.stripMargin } else if (child.get_turn == 'w') { val color = getsubcolor(level) pgn += s""" |<font color="$color">$move_no</font> """.stripMargin } //val addsan=child.genSan val addsan = child.commented_san val addlen = addsan.length val index = html_pgn_nodes.length val sannode = gn.childs(san) html_pgn_nodes += sannode var style = "padding: 4px;" var action = "" val nodeid = "pgnnode_" + index ids += nodeid if (cn == sannode) { style = "background-color: #cfffcf; border-style: solid; border-width: 1px; border-color: #000000; border-radius: 10px; padding: 3px;" action = "editcomment" } pgn += s""" <span id="$nodeid" style="cursor: pointer; $style">$addsan</span> """ if (is_sub) report_pgn_recursive(gn.childs(san), true, level + 1) i += 1 } if (numSans > 0) { val main_child = gn.childs(sortedSans(0)) val sortedSansMain = main_child.sortedSans val numSansMain = sortedSansMain.length val subcolor = getsubcolor(level) if ((numSansMain > 0) && (numSans > 1)) { val mains_main_child = main_child.childs(sortedSansMain(0)) val moveno = mains_main_child.get_move_no if (main_child.turn == 'b') { pgn += s""" |<font color="$subcolor">$moveno</font> """.stripMargin } } report_pgn_recursive(main_child, sub, level) } else if (sub) { pgn += """|)</font> """.stripMargin } } val dummy = new board(variant) dummy.set_from_fen(root.fen) if (dummy.getturn == BLACK) { val fmn = dummy.fullmove_number pgn += s""" |<font color="#0000ff">$fmn. ...</font> """.stripMargin } else if (!has_moves) { val fmn = dummy.fullmove_number pgn += s""" |<font color="#0000ff">$fmn. </font> """.stripMargin } if (has_moves) { report_pgn_recursive(root, false) } pgn = pgn.replaceAll(" +", " ") pgn = pgn.replaceAll(" +\\\\)", ")") (pgn, ids.toList) } val preferred_pgn_headers: List[String] = List("Event", "Site", "Date", "Round", "White", "WhiteElo", "Black", "BlackElo", "Yellow", "YellowElo", "Red", "RedElo", "Result", "TimeControl", "Time", "TimeZone", "Variant", "FEN", "Termination", "Annotator", "StartPosition", "Opening", "PlyCount", "ECO", "NewWhiteElo", "NewBlackElo", "NewYellowElo", "NewRedElo") def get_result: String = { get_header("Result") } def get_termination: String = { var term = "" if (pgn_headers.contains("Result")) { term += " " + pgn_headers("Result") } else { term += " *" } if (pgn_headers.contains("Termination")) { term += " {" + pgn_headers("Termination") + "}" } term } def pgn_header_sort_func(a: String, b: String): Boolean = { val ai = preferred_pgn_headers.indexOf(a) val bi = preferred_pgn_headers.indexOf(b) if ((ai < 0) && (bi < 0)) return false if (ai < 0) return false if (bi < 0) return true ai < bi } def sorted_pgn_header_keys: List[String] = { pgn_headers.keys.toList.sortWith(pgn_header_sort_func) } var report_headers_html = "" def report_pgn_html(cn: GameNode, doheaders: Boolean = true): Tuple2[String, List[String]] = { pgn_headers += ("FEN" -> report_pretty_start_fen) pgn_headers += ("Variant" -> variant) var report_headers_html = "" if (doheaders) report_headers_html = (for (k <- sorted_pgn_header_keys) yield { val v = pgn_headers(k) s""" |<tr onmousedown="x='edit'; field='$k';"> |<td><font color="#7f0000">$k</font></td> |<td>&nbsp;&nbsp;&nbsp;<font color="#00007f">$v</font></td> |</tr> """.stripMargin }).mkString("\\n") val move_list_html = report_pgn_move_list_html(cn) val term = get_termination val content = s""" |<script> |var x=""; |var field=""; |var action=""; |</script> |<div style="font-family: monospace; font-size: 28px; font-weight: bold;"> |<table cellpadding="0" cellspacing="0"> |$report_headers_html |</table> |${if (doheaders) "<br>" else ""} |${move_list_html._1} |$term |</div> """.stripMargin (content, move_list_html._2) } def report_pgn_move_list: String = { var pgn = "" def report_pgn_recursive(gn: GameNode, sub: Boolean) { val sortedSans = gn.sortedSans val numSans = sortedSans.length var i = 0 for (san <- sortedSans) { val is_sub = (i > 0) val child = gn.childs(san) var move_no = child.get_move_no if (is_sub) { pgn += "(" + move_no } else if (child.get_turn == 'w') { pgn += move_no } //val addsan=child.genSan val addsan = child.commented_san pgn += " " + addsan + " " //if(child==current_node) pgn+=" {*} " if (is_sub) report_pgn_recursive(gn.childs(san), true) i += 1 } if (numSans > 0) { val main_child = gn.childs(sortedSans(0)) val sortedSansMain = main_child.sortedSans val numSansMain = sortedSansMain.length if ((numSansMain > 0) && (numSans > 1)) { val mains_main_child = main_child.childs(sortedSansMain(0)) if (main_child.turn == 'b') { pgn += " " + mains_main_child.get_move_no + " " } } report_pgn_recursive(main_child, sub) } else if (sub) { pgn += ") " } } val dummy = new board(variant) dummy.set_from_fen(root.fen) if (dummy.getturn == BLACK) { pgn += dummy.fullmove_number + ". ... " } else if (!has_moves) { pgn += dummy.fullmove_number + ". " } if (has_moves) { report_pgn_recursive(root, false) } pgn = pgn.replaceAll(" +", " ") pgn = pgn.replaceAll(" +\\\\)", ")") pgn } var report_headers = "" var replen = 0 def report_pgn: String = { pgn_headers += ("FEN" -> report_pretty_start_fen) pgn_headers += ("Variant" -> variant) report_headers = (for (k <- sorted_pgn_header_keys) yield { val v = pgn_headers(k); s"""[$k "$v"]""" }).mkString("\\n") replen = report_headers.length + 2 val move_list = report_pgn_move_list List(report_headers, move_list).mkString("\\n\\n") + get_termination } def get_header(key: String): String = (if (pgn_headers.contains(key)) pgn_headers(key) else "?") var PARSING = false def parse_pgn(set_pgn: String, head_only: Boolean = false) { PARSING = true val READING_HEAD = 0 val READING_BODY = 1 var status = READING_HEAD var pgn = set_pgn pgn = pgn.replaceAll("\\r\\n", "\\n") pgn = pgn.replaceAll("\\r", "") val lines = pgn.split("\\n") var move_list = "" reset var i = 0 var done = false while ((i < lines.length) && (!done)) { val line = lines(i) val empty = (line == "") if (empty) { if (status == READING_BODY) { done = true } } else { if (status == READING_BODY) { move_list += (line + " ") } else { if (line(0) == '[') { val pline = line.replaceAll("\\\\[|\\\\]", "") val parts = pline.split(" +" + '"') val key = parts(0) val quot = '"' val value = parts(1).replaceAll(quot + "$", "") pgn_headers += (key -> value) } else { status = READING_BODY move_list += (line + " ") } } } i += 1 } if (head_only) { PARSING = false return } ////////////////////////////////////////////////////////// if (pgn_headers.contains("Variant")) { val pgn_variant = pgn_headers("Variant") variant = pgn_variant b = new board(pgn_variant) b.reset } var fen = b.report_fen if (pgn_headers.contains("FEN")) fen = pgn_headers("FEN") else pgn_headers += ("FEN" -> fen) set_from_fen(fen, clear_headers = false) val White = get_header("White") val Black = get_header("Black") // convert white spaces to space move_list = move_list.replaceAll("[\\r\\n\\t]", " ") // separate comments move_list = move_list.replaceAll("\\\\}", "} ") move_list = move_list.replaceAll("\\\\{", " {") // replace multiple spaces with single space move_list = move_list.replaceAll(" +", " ") // remove leading and trailing spaces move_list = move_list.replaceAll("^ | $", "") // variation opening joined with move move_list = move_list.replaceAll("\\\\( ", "(") // variation closing joined with move move_list = move_list.replaceAll(" \\\\)", ")") // separate multiple closings move_list = move_list.replaceAll("\\\\)\\\\)", ") )") // separate closing from comment end move_list = move_list.replaceAll("\\\\}\\\\)", "} )") var moves = move_list.split(" ") var commentbuff = "" var previouswasmove = false def parse_moves_recursive() { while (moves.length > 0) { var move = moves.head moves = moves.tail if (move.length > 0) { def addcomment { commentbuff = commentbuff.replaceAll("^\\\\{|\\\\}$", "") if (previouswasmove) { current_node.comment = commentbuff } commentbuff = "" previouswasmove = false } if (DataUtils.BeginsWith(move, '{')) { commentbuff = move if (DataUtils.EndsWith(move, '}')) addcomment } else if (DataUtils.EndsWith(move, '}')) { commentbuff += " " + move addcomment } else if (commentbuff != "") { commentbuff += " " + move } else { // remove line numbers, dots from moves move = move.replaceAll("^[0-9]*[\\\\.]*", "") var open_sub = false var close_sub = false if (DataUtils.BeginsWith(move, '(')) { open_sub = true move = move.substring(1) } if (DataUtils.EndsWith(move, ')')) { close_sub = true move = move.substring(0, move.length - 1) } if (open_sub) { val save_current_node = current_node back val m = b.sanToMove(move) if (m != null) { makeMove(m) previouswasmove = true } else { previouswasmove = false } parse_moves_recursive() current_node = save_current_node b.set_from_fen(current_node.fen) } else { if (move == "") { // commented moves can generate empty closings previouswasmove = false } else { val m = b.sanToMove(move) if (m != null) { makeMove(m) previouswasmove = true } else { previouswasmove = false } } } if (close_sub) { return } } } } } parse_moves_recursive() tobegin PARSING = false } def set_from_pgn(pgn: String) { parse_pgn(pgn) } def current_line_pgn: String = { var fullmove_number = root.fullmove_number var turn = root.turn var a = scala.collection.mutable.ArrayBuffer[String]() var first = true for (san <- current_line_moves) { var psan = san if (turn == 'w') { psan = fullmove_number + ". " + san } else if (first == true) { psan = fullmove_number + ". ... " + san } first = false if (turn == 'b') fullmove_number += 1 turn = (if (turn == 'w') 'b' else 'w') a += psan } a.mkString(" ") } def current_line_length: Int = { val cal = current_line_algeb if (cal == "") return 0 cal.split(" ").length } def current_line_algeb: String = { var a = scala.collection.mutable.ArrayBuffer[String]() var first = true for (algeb <- current_line_moves_algeb) { a += algeb } a.mkString(" ") } def current_line: String = { current_line_moves.mkString(" ") } def current_line_moves: scala.collection.mutable.ArrayBuffer[String] = { var a = scala.collection.mutable.ArrayBuffer[String]() var cn = current_node while (cn != root) { a += cn.genSan cn = cn.parent } a.reverse } def current_line_moves_algeb: scala.collection.mutable.ArrayBuffer[String] = { var a = scala.collection.mutable.ArrayBuffer[String]() var cn = current_node while (cn != root) { a += cn.genAlgeb cn = cn.parent } a.reverse } def get_info_line: String = { val white = get_header("White") val black = get_header("Black") s"""$white - $black""" } def truncate_fen(fen: String): String = { val parts = fen.split(" ").toList val trunc_fen = parts(0) + " " + parts(1) + " " + parts(2) + " " + parts(3) trunc_fen } def is_threefold_repetition: Boolean = { var fen_cnt = 1 var node_ptr = current_node val trunc_fen = truncate_fen(node_ptr.fen) while ((node_ptr.parent != null) && (fen_cnt < 3)) { node_ptr = node_ptr.parent if (truncate_fen(node_ptr.fen) == trunc_fen) fen_cnt += 1 } fen_cnt >= 3 } def is_fifty_move_rule: Boolean = { b.halfmove_clock >= 100 } }
serversideapps/silhmojs
shared/src/main/scala/shared/smartchess/game.scala
Scala
apache-2.0
23,066
package com.github.mdr.mash.functions import com.github.mdr.mash.evaluator.Suggestor sealed trait GeneralArgument[+T] { val isPositionArg: Boolean val value: T } object GeneralArgument { case class PositionArg[T](value: T) extends GeneralArgument[T] { val isPositionArg = true } case class LongFlag[T](flag: String, value: T) extends GeneralArgument[T] { val isPositionArg = false } case class ShortFlag[T](flags: Seq[String], value: T) extends GeneralArgument[T] { val isPositionArg = false } } case class GeneralArgBinderResult[T](parameterToArguments: Map[Parameter, Seq[T]] = Map(), posToParam: Map[Int, Parameter]) case class ArgBindingException[T](message: String, argumentOpt: Option[T] = None) extends RuntimeException(message) class GeneralArgBinder[T](params: ParameterModel, arguments: Seq[GeneralArgument[T]], forgiving: Boolean = false) { private var parameterToArguments: Map[Parameter, Seq[T]] = Map() private var posToParam: Map[Int, Parameter] = Map() case class ArgAndPos(arg: GeneralArgument[T], pos: Int) case class PositionArgAndPos(arg: GeneralArgument.PositionArg[T], pos: Int) case class PositionalParameterSet(initialMandatory: Seq[Parameter], initialOptional: Seq[Parameter], variadicOrAllArgsOpt: Option[Parameter], finalOptional: Seq[Parameter], finalMandatory: Seq[Parameter]) { def maxNumberOfPositionalArgs = initialMandatory.size + initialOptional.size + finalOptional.size + finalMandatory.size } private lazy val positionalParameterSet: PositionalParameterSet = { def isRegular(p: Parameter) = !p.isVariadic && !p.isAllArgsParam def isOptional(p: Parameter) = isRegular(p) && p.hasDefault def isMandatory(p: Parameter) = isRegular(p) && !p.hasDefault val (initialMandatory, rest) = params.positionalParams.filterNot(isProvidedAsNamedArg).span(isMandatory) val (finalMandatory, rest2) = rest.reverse.span(isMandatory) val (initialOptional, rest3) = rest2.reverse.span(isOptional) val (finalOptional, rest4) = rest3.reverse.span(isOptional) val variadicOpt = rest4 match { case Seq(p) if p.isVariadic || p.isAllArgsParam ⇒ Some(p) case Seq() ⇒ None case _ if forgiving ⇒ None case _ ⇒ throw new ArgBindingException[T](s"Bad parameters") } PositionalParameterSet(initialMandatory, initialOptional, variadicOpt, finalOptional, finalMandatory) } private def handlePositional() { val args1 = positionArgAndPos for ((param, argAndPos) ← positionalParameterSet.initialMandatory zip args1) addArgToParam(param, argAndPos) val args2 = args1.drop(positionalParameterSet.initialMandatory.length) for ((param, argAndPos) ← positionalParameterSet.finalMandatory.reverse zip args2.reverse) addArgToParam(param, argAndPos) val args3 = args2.dropRight(positionalParameterSet.finalMandatory.length) for ((param, argAndPos) ← positionalParameterSet.initialOptional zip args3) addArgToParam(param, argAndPos) val args4 = args3.drop(positionalParameterSet.initialOptional.length) for ((param, argAndPos) ← positionalParameterSet.finalOptional.reverse zip args4.reverse) addArgToParam(param, argAndPos) val args5 = args4.dropRight(positionalParameterSet.finalOptional.length) if (args5.nonEmpty) positionalParameterSet.variadicOrAllArgsOpt match { case Some(variadicOrAllArgsParam) ⇒ if (variadicOrAllArgsParam.isVariadic) for (argAndPos ← args5) addArgToParam(variadicOrAllArgsParam, argAndPos) // else: allArgs param is handled later case None ⇒ handleTooManyPositionalArguments() } } private def handleTooManyPositionalArguments() { val maxPositionArgs = positionalParameterSet.maxNumberOfPositionalArgs val providedArgs = positionArgAndPos.size val firstExcessArgument = positionArgAndPos.drop(maxPositionArgs).head // should be at least one if this is called val wasWere = if (providedArgs == 1) "was" else "were" val isAre = if (maxPositionArgs == 1) "is" else "are" val message = s"Too many positional arguments -- $providedArgs $wasWere provided, but at most $maxPositionArgs $isAre allowed" if (!forgiving) throw new ArgBindingException(message, Some(firstExcessArgument.arg.value)) } @throws[ArgBindingException[T]] def bind: GeneralArgBinderResult[T] = { handlePositional() handleFlagArgs() handleDefaultAndMandatory() handleNamedArgsParams() handleAllArgsParams() GeneralArgBinderResult(parameterToArguments, posToParam) } private def addArgToParam(param: Parameter, argAndPos: PositionArgAndPos) { addArgToParam(param, argAndPos.arg, argAndPos.pos) } private def addArgToParam(param: Parameter, arg: GeneralArgument[T], pos: Int) { ensureParamIsBound(param) parameterToArguments += param -> (parameterToArguments(param) :+ arg.value) posToParam += pos -> param } private def ensureParamIsBound(param: Parameter) = parameterToArguments += param -> parameterToArguments.getOrElse(param, Seq()) private def handleNamedArgsParams() = for (param ← params.params if param.isNamedArgsParam) { ensureParamIsBound(param) for (ArgAndPos(arg, pos) ← argsAndPos if !posToParam.contains(pos) && !arg.isPositionArg) addArgToParam(param, arg, pos) } private def handleAllArgsParams() = for (param ← params.params if param.isAllArgsParam) { ensureParamIsBound(param) for (ArgAndPos(arg, pos) ← argsAndPos if !posToParam.contains(pos)) addArgToParam(param, arg, pos) } private def handleFlagArgs() = for (ArgAndPos(argument, pos) ← argsAndPos) argument match { case GeneralArgument.ShortFlag(flags, _) ⇒ for (flag ← flags) bindFlagParam(flag, argument, pos) case GeneralArgument.LongFlag(flag, _) ⇒ bindFlagParam(flag, argument, pos) case posArg: GeneralArgument.PositionArg[T] ⇒ // skip } private def bindFlagParam(paramName: String, arg: GeneralArgument[T], pos: Int): Unit = params.paramByName.get(paramName) match { case Some(param) ⇒ if (!param.isNamedArgsParam) if (parameterToArguments contains param) { if (!forgiving) throw new ArgBindingException(s"${describe(param).capitalize} is provided multiple times", Some(arg)) } else addArgToParam(param, arg, pos) case None ⇒ if (!hasNamedArgsParam && !hasAllArgsParam) if (!forgiving) { val names = params.paramByName.keys.toSeq val message = s"Unexpected named argument '$paramName'${Suggestor.suggestionSuffix(names, paramName)}" throw new ArgBindingException(message, Some(arg.value)) } } private def handleDefaultAndMandatory() = for (param ← params.params if !(parameterToArguments contains param)) param.defaultValueGeneratorOpt match { case Some(generator) ⇒ ensureParamIsBound(param) case None ⇒ if (param.isVariadic) if (param.variadicAtLeastOne) { if (!forgiving) throw new ArgBindingException(s"Missing mandatory ${describe(param)}") } else ensureParamIsBound(param) else if (!param.isNamedArgsParam && !param.isAllArgsParam) if (!forgiving) throw new ArgBindingException(s"Missing mandatory ${describe(param)}") } private def describe(param: Parameter) = param.nameOpt match { case Some(name) ⇒ s"argument '$name'" case None ⇒ "anonymous argument" } private def hasNamedArgsParam = params.params.exists(_.isNamedArgsParam) private def hasAllArgsParam = params.params.exists(_.isAllArgsParam) private lazy val argsAndPos: Seq[ArgAndPos] = arguments.zipWithIndex.map((ArgAndPos.apply _).tupled) private lazy val positionArgAndPos: Seq[PositionArgAndPos] = argsAndPos.collect { case ArgAndPos(arg: GeneralArgument.PositionArg[T], pos) => PositionArgAndPos(arg, pos) } private def isProvidedAsNamedArg(param: Parameter): Boolean = arguments.exists { case GeneralArgument.LongFlag(name, _) ⇒ param.nameOpt contains name case GeneralArgument.ShortFlag(flags, _) ⇒ param.shortFlagOpt.map(_.toString) exists flags.contains case _ ⇒ false } }
mdr/mash
src/main/scala/com/github/mdr/mash/functions/GeneralArgBinder.scala
Scala
mit
8,881
package pl.gosub.akka.online import akka.actor.ActorSystem import akka.stream.scaladsl.{Sink, Source} import akka.stream._ import akka.stream.stage.{GraphStage, GraphStageLogic, InHandler, OutHandler} import scala.concurrent.Await import scala.concurrent.duration.Duration import scala.util.Random /* def max_subarray(A): max_ending_here = max_so_far = 0 for x in A: max_ending_here = max(0, max_ending_here + x) max_so_far = max(max_so_far, max_ending_here) return max_so_far */ class KadaneFlowStage extends GraphStage[FlowShape[Int, Int]] { /* This stage has a Flow shape +-------+ | | ---> >Inlet > Logic > Outlet> ---> | | +-------+ */ // Shape definition val in: Inlet[Int] = Inlet("KadaneFlowStage.in") val out: Outlet[Int] = Outlet("KadaneFlowStage.out") override val shape: FlowShape[Int, Int] = FlowShape(in, out) // Logic for the stage override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { // state var maxEndingHere = 0 var maxSoFar = 0 // Handler(s) for the Inlet setHandler(in, new InHandler { // what to do when a new element is ready to be consumed override def onPush(): Unit = { val elem = grab(in) // "Business" logic maxEndingHere = Math.max(0, maxEndingHere + elem) maxSoFar = Math.max(maxSoFar, maxEndingHere) // this should never happen // we decide to not push the value, avoiding the error // but potentially losing the value if (isAvailable(out)) push(out, maxSoFar) } override def onUpstreamFinish(): Unit = { completeStage() } }) // Handler for the Outlet setHandler(out, new OutHandler { override def onPull(): Unit = { if (!hasBeenPulled(in)) pull(in) } }) } } object KadaneFlowMain extends App { implicit val system = ActorSystem() implicit val mat = ActorMaterializer() val kadaneFlowStage = new KadaneFlowStage val done = Source.repeat(1).take(100).map(_ => Random.nextInt(1100) - 1000) // .throttle(1, Duration(100, "millisecond"), 1, ThrottleMode.shaping) .via(kadaneFlowStage) .throttle(1, Duration(100, "millisecond"), 1, ThrottleMode.shaping) .runWith(Sink.foreach(println)) import scala.concurrent.ExecutionContext.Implicits.global done.onComplete(_ => system.terminate()) Await.ready(system.whenTerminated, Duration.Inf) }
gosubpl/akka-online
src/main/scala/pl/gosub/akka/online/KadaneFlowStage.scala
Scala
apache-2.0
2,603
package io.netflow.storage.cassandra import java.net.{ InetAddress, InetSocketAddress } import java.util.UUID import com.datastax.driver.core.Row import com.datastax.driver.core.utils.UUIDs import com.websudos.phantom.CassandraTable import com.websudos.phantom.Implicits._ import com.websudos.phantom.column.{ DateTimeColumn, TimeUUIDColumn } import io.netflow.flows.cflow._ import io.netflow.lib._ import org.joda.time.DateTime private[netflow] object NetFlowV7Packet extends FlowPacketMeta[NetFlowV7Packet] { def persist(fp: NetFlowV7Packet): Unit = fp.flows.foldLeft((0, new BatchStatement())) { case ((count, b), row) => val statement = NetFlowV7Record.insert .value(_.id, UUIDs.startOf(fp.timestamp.getMillis + count)) .value(_.packet, fp.id) .value(_.sender, row.sender.getAddress) .value(_.timestamp, row.timestamp) .value(_.uptime, row.uptime) .value(_.senderPort, row.senderPort) .value(_.length, row.length) .value(_.srcPort, row.srcPort) .value(_.dstPort, row.dstPort) .value(_.srcAS, row.srcAS) .value(_.dstAS, row.dstAS) .value(_.pkts, row.pkts) .value(_.bytes, row.bytes) .value(_.proto, row.proto) .value(_.tos, row.tos) .value(_.tcpflags, row.tcpflags) .value(_.start, row.start) .value(_.stop, row.stop) .value(_.srcAddress, row.srcAddress) .value(_.dstAddress, row.dstAddress) .value(_.nextHop, row.nextHop) .value(_.snmpInput, row.snmpInput) .value(_.snmpOutput, row.snmpOutput) .value(_.srcMask, row.srcMask) .value(_.dstMask, row.dstMask) .value(_.flags, row.flags) .value(_.routerAddress, row.routerAddress) (count + 1, b.add(statement)) }._2.future()(Connection.session) } /** * NetFlow Version 7 Flow * * *-------*-----------*----------------------------------------------------------* * | Bytes | Contents | Description | * *-------*-----------*----------------------------------------------------------* * | 0-3 | srcaddr | Source IP address | * *-------*-----------*----------------------------------------------------------* * | 4-7 | dstaddr | Destination IP address | * *-------*-----------*----------------------------------------------------------* * | 8-11 | nexthop | IP address of next hop senderIP | * *-------*-----------*----------------------------------------------------------* * | 12-13 | input | Interface index (ifindex) of input interface | * *-------*-----------*----------------------------------------------------------* * | 14-15 | output | Interface index (ifindex) of output interface | * *-------*-----------*----------------------------------------------------------* * | 16-19 | dPkts | Packets in the flow | * *-------*-----------*----------------------------------------------------------* * | 20-23 | dOctets | Total number of Layer 3 bytes in the packets of the flow | * *-------*-----------*----------------------------------------------------------* * | 24-27 | First | SysUptime at start of flow | * *-------*-----------*----------------------------------------------------------* * | 28-31 | Last | SysUptime at the time the last packet of the flow was | * | | | received | * *-------*-----------*----------------------------------------------------------* * | 32-33 | srcport | TCP/UDP source port number or equivalent | * *-------*-----------*----------------------------------------------------------* * | 34-35 | dstport | TCP/UDP destination port number or equivalent | * *-------*-----------*----------------------------------------------------------* * | 36 | pad1 | Unused (zero) bytes | * *-------*-----------*----------------------------------------------------------* * | 37 | tcpflags | Cumulative OR of TCP flags | * *-------*-----------*----------------------------------------------------------* * | 38 | prot | IP protocol type (for example, TCP = 6; UDP = 17) | * *-------*-----------*----------------------------------------------------------* * | 39 | tos | IP type of service (ToS) | * *-------*-----------*----------------------------------------------------------* * | 40-41 | src_as | AS number of the source, either origin or peer | * *-------*-----------*----------------------------------------------------------* * | 42-43 | dst_as | AS number of the destination, either origin or peer | * *-------*-----------*----------------------------------------------------------* * | 44 | src_mask | Source address prefix mask bits | * *-------*-----------*----------------------------------------------------------* * | 45 | dst_mask | Destination address prefix mask bits | * *-------*-----------*----------------------------------------------------------* * | 46-47 | flags | Flags indicating various things (validity) | * *-------*-----------*----------------------------------------------------------* * | 48-51 | router_sc | IP address of the router that is bypassed by the | * | | | Catalyst 5000 series switch. This is the same address | * | | | the router uses when usesit sends NetFlow export packets.| * | | | This IP address is propagated to all switches bypassing | * | | | the router through the FCP protocol. | * *-------*-----------*----------------------------------------------------------* */ private[netflow] sealed class NetFlowV7Record extends CassandraTable[NetFlowV7Record, NetFlowV7] { object id extends UUIDColumn(this) with PartitionKey[UUID] object packet extends TimeUUIDColumn(this) with Index[UUID] object sender extends InetAddressColumn(this) with PrimaryKey[InetAddress] object timestamp extends DateTimeColumn(this) with PrimaryKey[DateTime] object uptime extends LongColumn(this) object senderPort extends IntColumn(this) with Index[Int] object length extends IntColumn(this) object srcPort extends IntColumn(this) with Index[Int] object dstPort extends IntColumn(this) with Index[Int] object srcAS extends OptionalIntColumn(this) with Index[Option[Int]] object dstAS extends OptionalIntColumn(this) with Index[Option[Int]] object pkts extends LongColumn(this) object bytes extends LongColumn(this) object proto extends IntColumn(this) with Index[Int] object tos extends IntColumn(this) with Index[Int] object tcpflags extends IntColumn(this) object start extends OptionalDateTimeColumn(this) with Index[Option[DateTime]] object stop extends OptionalDateTimeColumn(this) with Index[Option[DateTime]] object srcAddress extends InetAddressColumn(this) with Index[InetAddress] object dstAddress extends InetAddressColumn(this) with Index[InetAddress] object nextHop extends OptionalInetAddressColumn(this) with Index[Option[InetAddress]] object snmpInput extends IntColumn(this) object snmpOutput extends IntColumn(this) object srcMask extends IntColumn(this) object dstMask extends IntColumn(this) object flags extends IntColumn(this) object routerAddress extends InetAddressColumn(this) def fromRow(row: Row): NetFlowV7 = NetFlowV7(id(row), new InetSocketAddress(sender(row), senderPort(row)), length(row), uptime(row), timestamp(row), srcPort(row), dstPort(row), srcAS(row), dstAS(row), pkts(row), bytes(row), proto(row), tos(row), tcpflags(row), start(row), stop(row), srcAddress(row), dstAddress(row), nextHop(row), snmpInput(row), snmpOutput(row), srcMask(row), dstMask(row), flags(row), routerAddress(row), packet(row)) } private[netflow] object NetFlowV7Record extends NetFlowV7Record
ayscb/netflow
netflow1/netflow-master/src/main/scala/io/netflow/storage/cassandra/NetFlowV7Record.scala
Scala
apache-2.0
8,260
package pl.umk.bugclassification.scmparser.gerrit import akka.actor.{Actor, ActorLogging} import pl.umk.bugclassification.scmparser.invokers.InvokerOnDirectory import pl.umk.bugclassification.scmparser.messages.{Classify, Learn} trait ProjectInvoker extends Actor with ActorLogging with InvokerOnDirectory { protected def resetRepo: Unit protected def resetRepo(branch: String): Unit protected def fetch: Unit protected def fetchAndCheckoutFromGerrit(ref: String) protected def learn protected def classify(ref: String, sha1: String): Boolean protected def send(sha1: String, isCommitClassifiedBuggy: Boolean): Unit def receive = { case Learn("master") => { resetRepo learn } case Learn(branch) => { resetRepo(branch) learn } case Classify(ref, sha1) => { log.info("Classify " + ref + " " + sha1) fetchAndCheckoutFromGerrit(ref) val isCommitClassifiedBuggy = classify(ref, sha1) send(sha1, isCommitClassifiedBuggy) } } }
mfejzer/CommitClassification
src/main/scala/pl/umk/bugclassification/scmparser/gerrit/ProjectInvoker.scala
Scala
bsd-3-clause
1,020
package com.varwise.btc.forwarding import java.math.BigInteger import org.bitcoinj.core.{Base58, DumpedPrivateKey, ECKey, NetworkParameters} object Utils { def addressToKey(params: NetworkParameters, sourceAddress: String): ECKey = { sourceAddress match { case _ if sourceAddress.length == 51 || sourceAddress.length == 52 => new DumpedPrivateKey(params, sourceAddress).getKey //WIF case _ if sourceAddress.length == 64 => ECKey.fromPrivate(new BigInteger(sourceAddress, 16)) //hex encoded private key case _ => ECKey.fromPrivate(Base58.decodeToBigInteger(sourceAddress)) //base58 } } }
wlk/forwarding-service
src/main/scala/com/varwise/btc/forwarding/Utils.scala
Scala
apache-2.0
620
package cromwell.engine import cromwell.CromwellTestKitWordSpec class WorkflowAbortSpec extends CromwellTestKitWordSpec { // TODO: When re-enabled, this test also needs to check that child processes have actually been stopped. "A WorkflowManagerActor" should { "abort the triple-wait workflow" ignore { // withDataAccess { dataAccess => // implicit val workflowManagerActor = TestActorRef(WorkflowManagerActor.props(dataAccess, CromwellSpec.BackendInstance), self, "Test the WorkflowManagerActor") // // val waitThreshold = 10 // // // Start the workflow: // val workflowId = messageAndWait[WorkflowId](SubmitWorkflow(TripleSleep.workflowSource(), TripleSleep.WorkflowJson, TripleSleep.rawInputs)) // // def waitForStarted(currentAttempt: Int): Unit = { // val status = messageAndWait[Option[WorkflowState]](WorkflowStatus(workflowId)) // status match { // case None | Some(WorkflowSubmitted) => // if (currentAttempt > waitThreshold) { fail("Workflow took too long to start") } // Thread.sleep(1000) // waitForStarted(currentAttempt + 1) // case Some(_) => // We're good to continue // } // } // // def waitForAborted(currentAttempt: Int): Unit = { // val status = messageAndWait[Option[WorkflowState]](WorkflowStatus(workflowId)) // status match { // case Some(WorkflowAborted) => // All good // case Some(x: WorkflowState) if x.isTerminal => fail("Unexpected end state of workflow: " + x) // case Some(_) => // if (currentAttempt > waitThreshold) { fail("Workflow took too long to complete after an abort attempt") } // Thread.sleep(1000) // waitForAborted(currentAttempt + 1) // case None => fail("Workflow mysteriously disappeared") // } // } // // // Wait for the workflow to start: // waitForStarted(0) // // // Abort the workflow: // workflowManagerActor ! WorkflowAbort(workflowId) // // // Wait for the workflow to complete: // waitForAborted(0) // } } } }
ohsu-comp-bio/cromwell
engine/src/test/scala/cromwell/engine/WorkflowAbortSpec.scala
Scala
bsd-3-clause
2,191
package aecor.runtime import cats.Applicative import cats.implicits._ final class NoopKeyValueStore[F[_]: Applicative, K, V] extends KeyValueStore[F, K, V] { override def setValue(key: K, value: V): F[Unit] = ().pure[F] override def getValue(key: K): F[Option[V]] = none[V].pure[F] override def deleteValue(key: K): F[Unit] = ().pure[F] } object NoopKeyValueStore { def apply[F[_]: Applicative, K, V]: NoopKeyValueStore[F, K, V] = new NoopKeyValueStore[F, K, V] }
notxcain/aecor
modules/core/src/main/scala/aecor/runtime/NoopKeyValueStore.scala
Scala
mit
475
package demo.helloworld.snippet class HelloWorld { def howdy = <span>Welcome to helloworld at {new _root_.java.util.Date}</span> }
scalatest/scalatest-maven-plugin
src/it/lift/src/main/scala/demo/helloworld/snippet/HelloWorld.scala
Scala
apache-2.0
135
package isabelle.eclipse.ui.text import org.eclipse.jface.text.IDocument import org.eclipse.jface.text.IRegion import org.eclipse.jface.text.Region import org.eclipse.jface.text.rules.IToken import org.eclipse.jface.text.rules.ITokenScanner import org.eclipse.jface.text.rules.Token /** A token scanner that returns a single token for the whole range. Useful for partition ranges. * * @author Andrius Velykis */ class SingleTokenScanner() extends ITokenScanner { private var lastRegion: IRegion = _ private var consumed = true def setRange(document: IDocument, offset: Int, length: Int) { // set the region and allow consumption lastRegion = new Region(offset, length) consumed = false } def nextToken(): IToken = if (consumed) { Token.EOF } else { // consume the region consumed = true // return default token getToken } def getTokenOffset() = lastRegion.getOffset() def getTokenLength() = lastRegion.getLength() /** Allow subclasses to provide the token */ protected def getToken: IToken = Token.UNDEFINED }
andriusvelykis/isabelle-eclipse
isabelle.eclipse.ui/src/isabelle/eclipse/ui/text/SingleTokenScanner.scala
Scala
epl-1.0
1,084
/** * Copyright (C) 2010 Orbeon, Inc. * * This program is free software; you can redistribute it and/or modify it under the terms of the * GNU Lesser General Public License as published by the Free Software Foundation; either version * 2.1 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * The full text of the license is available at http://www.gnu.org/copyleft/lesser.html */ package org.orbeon.oxf.externalcontext import java.{util => ju} import org.apache.commons.io.IOUtils import org.orbeon.io.IOUtils._ import org.orbeon.oxf.externalcontext.ExternalContext.Request import org.orbeon.oxf.http.HttpMethod.{HttpMethodsWithRequestBody, POST} import org.orbeon.oxf.http._ import org.orbeon.oxf.util.CollectionUtils._ import org.orbeon.oxf.util.PathUtils._ import org.orbeon.oxf.util._ import scala.jdk.CollectionConverters._ import scala.collection.mutable // Request used for local (within Orbeon Forms) requests. // // Used by: // // - InternalHttpClient // - RequestDispatcherSubmission // class LocalRequest( incomingRequest : Request, contextPath : String, pathQuery : String, method : HttpMethod, headersMaybeCapitalized : Map[String, List[String]], content : Option[StreamedContent] ) extends Request { private val _contentLengthOpt = content flatMap (_.contentLength) private val _contentTypeOpt = content flatMap (_.contentType) private val _headersIncludingAuthBodyLowercase = { def requestHeadersIt = headersMaybeCapitalized.iterator map { case (k, v) => k.toLowerCase -> v.toArray } def credentialsHeadersIt = incomingRequest.credentials match { case Some(credentials) => CredentialsSupport.toHeaders(credentials).iterator case None => Iterator.empty } def bodyHeadersIt = if (HttpMethodsWithRequestBody(method)) { (_contentLengthOpt.iterator map (value => Headers.ContentLengthLower -> Array(value.toString))) ++ (_contentTypeOpt.iterator map (value => Headers.ContentTypeLower -> Array(value))) } else Iterator.empty def allHeadersLowercaseIt = requestHeadersIt ++ credentialsHeadersIt ++ bodyHeadersIt allHeadersLowercaseIt.toMap.asJava } private val (_pathInfo, _queryString) = splitQuery(pathQuery) private lazy val _queryAndBodyParameters = { // Query string // SRV.4.1: "Query string data is presented before post body data." def queryParameters = Option(getQueryString) map decodeSimpleQuery getOrElse Seq() // POST body form parameters // SRV.4.1.1 When Parameters Are Available // NOTE: Remember, the servlet container does not help us decoding the body: the "other side" will just end up here // when asking for parameters. def bodyParameters = if (method == POST) content collect { case StreamedContent(is, Some("application/x-www-form-urlencoded"), _, _) => useAndClose(is) { is => decodeSimpleQuery(IOUtils.toString(is, ExternalContext.StandardFormCharacterEncoding)) } } else None // Make sure to keep order mutable.LinkedHashMap() ++ combineValues[String, AnyRef, Array](queryParameters ++ bodyParameters.getOrElse(Nil)) asJava } /* SUPPORTED: methods called by ExternalContextToHttpServletRequestWrapper */ def getMethod = method def getParameterMap = _queryAndBodyParameters def getQueryString = _queryString.orNull def getCharacterEncoding = null;//TODO? // not used by our code def getContentLength = _contentLengthOpt map (_.toInt) getOrElse -1 def getContentType = _contentTypeOpt.orNull def getInputStream = content map (_.inputStream) getOrElse EmptyInputStream def getHeaderValuesMap = _headersIncludingAuthBodyLowercase lazy val getAttributesMap = { // See https://github.com/orbeon/orbeon-forms/issues/5081 val newMap = new ju.HashMap[String, AnyRef] newMap.asScala ++= incomingRequest.getAttributesMap.asScala ju.Collections.synchronizedMap(newMap) } /* * NOTE: All the path methods are handled by the request dispatcher implementation in the servlet container upon * forward, but upon include we must provide them. * * NOTE: Checked 2009-02-12 that none of the methods below are called when forwarding through * spring/JSP/filter/Orbeon in Tomcat 5.5.27. HOWEVER they are called when including. * * NOTE: 2014-09-22: Checked that getServletPath and getPathInfo are called by JspServlet in tomcat-7.0.47 at least. */ def getPathInfo = _pathInfo def getServletPath = "" def getContextPath = contextPath // return the context path passed to this wrapper def getRequestPath: String = { // Get servlet path and path info val servletPath = Option(getServletPath) getOrElse "" val pathInfo = Option(getPathInfo) getOrElse "" // Concatenate servlet path and path info, avoiding a double slash val requestPath = if (servletPath.endsWith("/") && pathInfo.startsWith("/")) servletPath + pathInfo.substring(1) else servletPath + pathInfo // Add starting slash if missing requestPath.prependSlash } def getRequestURI: String = { // Must return the path including the context val contextPath = getContextPath if (contextPath == "/") getRequestPath else getContextPath + getRequestPath } // 2014-09-10: Only used by XHTMLToPDFProcessor def getRequestURL: String = { // Get absolute URL w/o query string e.g. http://foo.com/a/b/c val incomingRequestURL = incomingRequest.getRequestURL // Resolving request URI against incoming absolute URL, e.g. /d/e/f -> http://foo.com/d/e/f NetUtils.resolveURI(getRequestURI, incomingRequestURL) } // ==== Properties which are delegated ============================================================================= // TODO: Check against ExternalContextToHttpServletRequestWrapper // Container is preserved def getContainerType = incomingRequest.getContainerType def getContainerNamespace = incomingRequest.getContainerNamespace def getPortletMode = incomingRequest.getPortletMode // submission does not change portlet mode def getWindowState = incomingRequest.getWindowState // submission does not change window state def getNativeRequest = incomingRequest.getNativeRequest // should not have mainstream uses; see RequestDispatcherSubmission, and cookies forwarding def getPathTranslated = incomingRequest.getPathTranslated // should really not be called // Client and server are preserved, assuming all those relate to knowledge about URL rewriting def getProtocol = incomingRequest.getProtocol def getServerPort = incomingRequest.getServerPort def getScheme = incomingRequest.getScheme def getRemoteHost = incomingRequest.getRemoteHost def getRemoteAddr = incomingRequest.getRemoteAddr def isSecure = incomingRequest.isSecure def getLocale = incomingRequest.getLocale def getLocales = incomingRequest.getLocales def getServerName = incomingRequest.getServerName def getClientContextPath(urlString: String) = incomingRequest.getClientContextPath(urlString) // Session information is preserved def isRequestedSessionIdValid = incomingRequest.isRequestedSessionIdValid def sessionInvalidate() = incomingRequest.sessionInvalidate() def getSession(create: Boolean) = incomingRequest.getSession(create) def getRequestedSessionId = incomingRequest.getRequestedSessionId // User information is preserved def credentials = incomingRequest.credentials def isUserInRole(role: String) = incomingRequest.isUserInRole(role) def getAuthType = incomingRequest.getAuthType }
orbeon/orbeon-forms
src/main/scala/org/orbeon/oxf/externalcontext/LocalRequest.scala
Scala
lgpl-2.1
8,587
package org.apache.streams.examples.flink.twitter.collection import java.io.Serializable import java.util.Objects import java.util.concurrent.TimeUnit import com.fasterxml.jackson.databind.ObjectMapper import com.google.common.util.concurrent.Uninterruptibles import org.apache.flink.configuration.Configuration import org.apache.flink.api.common.accumulators.IntCounter import org.apache.flink.streaming.api.functions.source.RichSourceFunction import org.apache.flink.streaming.api.functions.source.SourceFunction import org.apache.streams.config.{ComponentConfigurator, StreamsConfiguration, StreamsConfigurator} import org.apache.streams.core.StreamsDatum import org.apache.streams.flink.StreamsFlinkConfiguration import org.apache.streams.jackson.StreamsJacksonMapper import org.apache.streams.twitter.config.TwitterStreamConfiguration import org.apache.streams.twitter.converter.TwitterDateTimeFormat import org.apache.streams.twitter.provider.TwitterStreamProvider import scala.collection.JavaConversions._ class SpritzerSource( streamsConfiguration : StreamsConfiguration, twitterConfiguration : TwitterStreamConfiguration = new ComponentConfigurator(classOf[TwitterStreamConfiguration]).detectConfiguration(), flinkConfiguration : StreamsFlinkConfiguration = new ComponentConfigurator(classOf[StreamsFlinkConfiguration]).detectConfiguration() ) extends RichSourceFunction[String] with Serializable /*with StoppableFunction*/ { var mapper: ObjectMapper = _ var twitProvider: TwitterStreamProvider = _ var items : IntCounter = new IntCounter() @throws[Exception] override def open(parameters: Configuration): Unit = { mapper = StreamsJacksonMapper.getInstance(TwitterDateTimeFormat.TWITTER_FORMAT) getRuntimeContext().addAccumulator("SpritzerSource.items", this.items) twitProvider = new TwitterStreamProvider( twitterConfiguration ) twitProvider.prepare(twitProvider) twitProvider.startStream() } override def run(ctx: SourceFunction.SourceContext[String]): Unit = { do { Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getProviderWaitMs, TimeUnit.MILLISECONDS) val current : List[StreamsDatum] = twitProvider.readCurrent().iterator().toList items.add(current.size) for( item <- current ) { ctx.collect(mapper.writeValueAsString(item.getDocument)) } } while( twitProvider.isRunning ) } override def cancel(): Unit = { close() } }
jfrazee/incubator-streams
streams-examples/streams-examples-flink/flink-twitter-collection/src/main/scala/org/apache/streams/examples/flink/twitter/collection/SpritzerSource.scala
Scala
apache-2.0
2,534
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.plan.rules import org.apache.flink.table.planner.plan.nodes.logical._ import org.apache.flink.table.planner.plan.rules.logical._ import org.apache.flink.table.planner.plan.rules.physical.FlinkExpandConversionRule import org.apache.flink.table.planner.plan.rules.physical.stream._ import org.apache.calcite.rel.core.RelFactories import org.apache.calcite.rel.logical.{LogicalIntersect, LogicalMinus, LogicalUnion} import org.apache.calcite.rel.rules._ import org.apache.calcite.tools.{RuleSet, RuleSets} import scala.collection.JavaConverters._ object FlinkStreamRuleSets { val SEMI_JOIN_RULES: RuleSet = RuleSets.ofList( SimplifyFilterConditionRule.EXTENDED, FlinkRewriteSubQueryRule.FILTER, FlinkSubQueryRemoveRule.FILTER, JoinConditionTypeCoerceRule.INSTANCE, FlinkJoinPushExpressionsRule.INSTANCE ) /** * Convert sub-queries before query decorrelation. */ val TABLE_SUBQUERY_RULES: RuleSet = RuleSets.ofList( CoreRules.FILTER_SUB_QUERY_TO_CORRELATE, CoreRules.PROJECT_SUB_QUERY_TO_CORRELATE, CoreRules.JOIN_SUB_QUERY_TO_CORRELATE ) /** * Expand plan by replacing references to tables into a proper plan sub trees. Those rules * can create new plan nodes. */ val EXPAND_PLAN_RULES: RuleSet = RuleSets.ofList( LogicalCorrelateToJoinFromTemporalTableRule.LOOKUP_JOIN_WITH_FILTER, LogicalCorrelateToJoinFromTemporalTableRule.LOOKUP_JOIN_WITHOUT_FILTER, LogicalCorrelateToJoinFromTemporalTableRule.WITH_FILTER, LogicalCorrelateToJoinFromTemporalTableRule.WITHOUT_FILTER, LogicalCorrelateToJoinFromTemporalTableFunctionRule.INSTANCE) val POST_EXPAND_CLEAN_UP_RULES: RuleSet = RuleSets.ofList( EnumerableToLogicalTableScan.INSTANCE) /** * Convert table references before query decorrelation. */ val TABLE_REF_RULES: RuleSet = RuleSets.ofList( EnumerableToLogicalTableScan.INSTANCE ) /** * Solid transformations before actual decorrelation. */ val PRE_DECORRELATION_RULES: RuleSet = RuleSets.ofList( CorrelateSortToRankRule.INSTANCE ) /** * RuleSet to reduce expressions */ private val REDUCE_EXPRESSION_RULES: RuleSet = RuleSets.ofList( CoreRules.FILTER_REDUCE_EXPRESSIONS, CoreRules.PROJECT_REDUCE_EXPRESSIONS, CoreRules.CALC_REDUCE_EXPRESSIONS, CoreRules.JOIN_REDUCE_EXPRESSIONS ) /** * RuleSet to simplify coalesce invocations */ private val SIMPLIFY_COALESCE_RULES: RuleSet = RuleSets.ofList( RemoveUnreachableCoalesceArgumentsRule.PROJECT_INSTANCE, RemoveUnreachableCoalesceArgumentsRule.FILTER_INSTANCE, RemoveUnreachableCoalesceArgumentsRule.JOIN_INSTANCE, RemoveUnreachableCoalesceArgumentsRule.CALC_INSTANCE ) /** * RuleSet to simplify predicate expressions in filters and joins */ private val PREDICATE_SIMPLIFY_EXPRESSION_RULES: RuleSet = RuleSets.ofList( SimplifyFilterConditionRule.INSTANCE, SimplifyJoinConditionRule.INSTANCE, JoinConditionTypeCoerceRule.INSTANCE, CoreRules.JOIN_PUSH_EXPRESSIONS ) /** * RuleSet to normalize plans for stream */ val DEFAULT_REWRITE_RULES: RuleSet = RuleSets.ofList(( PREDICATE_SIMPLIFY_EXPRESSION_RULES.asScala ++ SIMPLIFY_COALESCE_RULES.asScala ++ REDUCE_EXPRESSION_RULES.asScala ++ List( //removes constant keys from an Agg CoreRules.AGGREGATE_PROJECT_PULL_UP_CONSTANTS, // fix: FLINK-17553 unsupported call error when constant exists in group window key // this rule will merge the project generated by AggregateProjectPullUpConstantsRule and // make sure window aggregate can be correctly rewritten by StreamLogicalWindowAggregateRule CoreRules.PROJECT_MERGE, StreamLogicalWindowAggregateRule.INSTANCE, // slices a project into sections which contain window agg functions // and sections which do not. CoreRules.PROJECT_TO_LOGICAL_PROJECT_AND_WINDOW, WindowPropertiesRules.WINDOW_PROPERTIES_RULE, WindowPropertiesRules.WINDOW_PROPERTIES_HAVING_RULE, //ensure union set operator have the same row type new CoerceInputsRule(classOf[LogicalUnion], false), //ensure intersect set operator have the same row type new CoerceInputsRule(classOf[LogicalIntersect], false), //ensure except set operator have the same row type new CoerceInputsRule(classOf[LogicalMinus], false), ConvertToNotInOrInRule.INSTANCE, // optimize limit 0 FlinkLimit0RemoveRule.INSTANCE, // unnest rule LogicalUnnestRule.INSTANCE, // rewrite constant table function scan to correlate JoinTableFunctionScanToCorrelateRule.INSTANCE, // Wrap arguments for JSON aggregate functions WrapJsonAggFunctionArgumentsRule.INSTANCE ) ).asJava) /** * RuleSet about filter */ private val FILTER_RULES: RuleSet = RuleSets.ofList( // push a filter into a join CoreRules.FILTER_INTO_JOIN, // push filter into the children of a join CoreRules.JOIN_CONDITION_PUSH, // push filter through an aggregation CoreRules.FILTER_AGGREGATE_TRANSPOSE, // push a filter past a project CoreRules.FILTER_PROJECT_TRANSPOSE, // push a filter past a setop CoreRules.FILTER_SET_OP_TRANSPOSE, CoreRules.FILTER_MERGE ) /** * RuleSet to extract sub-condition which can be pushed into join inputs */ val JOIN_PREDICATE_REWRITE_RULES: RuleSet = RuleSets.ofList( RuleSets.ofList(JoinDependentConditionDerivationRule.INSTANCE)) /** * RuleSet to do predicate pushdown */ val FILTER_PREPARE_RULES: RuleSet = RuleSets.ofList(( FILTER_RULES.asScala // simplify predicate expressions in filters and joins ++ PREDICATE_SIMPLIFY_EXPRESSION_RULES.asScala // reduce expressions in filters and joins ++ REDUCE_EXPRESSION_RULES.asScala ).asJava) /** * RuleSet to push down partitions into table source */ val PUSH_PARTITION_DOWN_RULES: RuleSet = RuleSets.ofList( // push partition into the table scan PushPartitionIntoLegacyTableSourceScanRule.INSTANCE, // push partition into the dynamic table scan PushPartitionIntoTableSourceScanRule.INSTANCE ) /** * RuleSet to push down filters into table source */ val PUSH_FILTER_DOWN_RULES: RuleSet = RuleSets.ofList( // push a filter down into the table scan PushFilterIntoTableSourceScanRule.INSTANCE, PushFilterIntoLegacyTableSourceScanRule.INSTANCE ) /** * RuleSet to prune empty results rules */ val PRUNE_EMPTY_RULES: RuleSet = RuleSets.ofList( PruneEmptyRules.AGGREGATE_INSTANCE, PruneEmptyRules.FILTER_INSTANCE, PruneEmptyRules.JOIN_LEFT_INSTANCE, FlinkPruneEmptyRules.JOIN_RIGHT_INSTANCE, PruneEmptyRules.PROJECT_INSTANCE, PruneEmptyRules.SORT_INSTANCE, PruneEmptyRules.UNION_INSTANCE ) /** * RuleSet about project */ val PROJECT_RULES: RuleSet = RuleSets.ofList( // push a projection past a filter CoreRules.PROJECT_FILTER_TRANSPOSE, // push a projection to the children of a non semi/anti join // push all expressions to handle the time indicator correctly new FlinkProjectJoinTransposeRule( PushProjector.ExprCondition.FALSE, RelFactories.LOGICAL_BUILDER), // push a projection to the children of a semi/anti Join ProjectSemiAntiJoinTransposeRule.INSTANCE, // merge projections CoreRules.PROJECT_MERGE, // remove identity project CoreRules.PROJECT_REMOVE, //removes constant keys from an Agg CoreRules.AGGREGATE_PROJECT_PULL_UP_CONSTANTS, // push project through a Union CoreRules.PROJECT_SET_OP_TRANSPOSE, // push a projection to the child of a WindowTableFunctionScan ProjectWindowTableFunctionTransposeRule.INSTANCE ) val JOIN_REORDER_PREPARE_RULES: RuleSet = RuleSets.ofList( // merge project to MultiJoin CoreRules.PROJECT_MULTI_JOIN_MERGE, // merge filter to MultiJoin CoreRules.FILTER_MULTI_JOIN_MERGE, // merge join to MultiJoin CoreRules.JOIN_TO_MULTI_JOIN ) val JOIN_REORDER_RULES: RuleSet = RuleSets.ofList( // equi-join predicates transfer RewriteMultiJoinConditionRule.INSTANCE, // join reorder CoreRules.MULTI_JOIN_OPTIMIZE ) /** * RuleSet to do logical optimize. * This RuleSet is a sub-set of [[LOGICAL_OPT_RULES]]. */ private val LOGICAL_RULES: RuleSet = RuleSets.ofList( // scan optimization PushProjectIntoTableSourceScanRule.INSTANCE, PushProjectIntoLegacyTableSourceScanRule.INSTANCE, PushFilterIntoTableSourceScanRule.INSTANCE, PushFilterIntoLegacyTableSourceScanRule.INSTANCE, PushLimitIntoTableSourceScanRule.INSTANCE, // reorder the project and watermark assigner ProjectWatermarkAssignerTransposeRule.INSTANCE, // reorder sort and projection CoreRules.SORT_PROJECT_TRANSPOSE, // remove unnecessary sort rule CoreRules.SORT_REMOVE, // join rules FlinkJoinPushExpressionsRule.INSTANCE, SimplifyJoinConditionRule.INSTANCE, // remove union with only a single child CoreRules.UNION_REMOVE, // convert non-all union into all-union + distinct CoreRules.UNION_TO_DISTINCT, // aggregation and projection rules CoreRules.AGGREGATE_PROJECT_MERGE, CoreRules.AGGREGATE_PROJECT_PULL_UP_CONSTANTS, // remove aggregation if it does not aggregate and input is already distinct FlinkAggregateRemoveRule.INSTANCE, // push aggregate through join FlinkAggregateJoinTransposeRule.EXTENDED, // using variants of aggregate union rule CoreRules.AGGREGATE_UNION_AGGREGATE_FIRST, CoreRules.AGGREGATE_UNION_AGGREGATE_SECOND, // reduce aggregate functions like AVG, STDDEV_POP etc. CoreRules.AGGREGATE_REDUCE_FUNCTIONS, WindowAggregateReduceFunctionsRule.INSTANCE, // reduce useless aggCall PruneAggregateCallRule.PROJECT_ON_AGGREGATE, PruneAggregateCallRule.CALC_ON_AGGREGATE, // expand grouping sets DecomposeGroupingSetsRule.INSTANCE, // calc rules CoreRules.FILTER_CALC_MERGE, CoreRules.PROJECT_CALC_MERGE, CoreRules.FILTER_TO_CALC, CoreRules.PROJECT_TO_CALC, FlinkCalcMergeRule.INSTANCE, // semi/anti join transpose rule FlinkSemiAntiJoinJoinTransposeRule.INSTANCE, FlinkSemiAntiJoinProjectTransposeRule.INSTANCE, FlinkSemiAntiJoinFilterTransposeRule.INSTANCE, // set operators ReplaceIntersectWithSemiJoinRule.INSTANCE, RewriteIntersectAllRule.INSTANCE, ReplaceMinusWithAntiJoinRule.INSTANCE, RewriteMinusAllRule.INSTANCE ) /** * RuleSet to translate calcite nodes to flink nodes */ private val LOGICAL_CONVERTERS: RuleSet = RuleSets.ofList( // translate to flink logical rel nodes FlinkLogicalAggregate.STREAM_CONVERTER, FlinkLogicalTableAggregate.CONVERTER, FlinkLogicalOverAggregate.CONVERTER, FlinkLogicalCalc.CONVERTER, FlinkLogicalCorrelate.CONVERTER, FlinkLogicalJoin.CONVERTER, FlinkLogicalSort.STREAM_CONVERTER, FlinkLogicalUnion.CONVERTER, FlinkLogicalValues.CONVERTER, FlinkLogicalTableSourceScan.CONVERTER, FlinkLogicalLegacyTableSourceScan.CONVERTER, FlinkLogicalTableFunctionScan.CONVERTER, FlinkLogicalDataStreamTableScan.CONVERTER, FlinkLogicalIntermediateTableScan.CONVERTER, FlinkLogicalExpand.CONVERTER, FlinkLogicalRank.CONVERTER, FlinkLogicalWatermarkAssigner.CONVERTER, FlinkLogicalWindowAggregate.CONVERTER, FlinkLogicalWindowTableAggregate.CONVERTER, FlinkLogicalSnapshot.CONVERTER, FlinkLogicalMatch.CONVERTER, FlinkLogicalSink.CONVERTER, FlinkLogicalLegacySink.CONVERTER ) /** * RuleSet to do logical optimize for stream */ val LOGICAL_OPT_RULES: RuleSet = RuleSets.ofList(( FILTER_RULES.asScala ++ PROJECT_RULES.asScala ++ PRUNE_EMPTY_RULES.asScala ++ LOGICAL_RULES.asScala ++ LOGICAL_CONVERTERS.asScala ).asJava) /** * RuleSet to do rewrite on FlinkLogicalRel for Stream */ val LOGICAL_REWRITE: RuleSet = RuleSets.ofList( // watermark push down PushWatermarkIntoTableSourceScanAcrossCalcRule.INSTANCE, PushWatermarkIntoTableSourceScanRule.INSTANCE, // transform over window to topn node FlinkLogicalRankRule.INSTANCE, // transpose calc past rank to reduce rank input fields CalcRankTransposeRule.INSTANCE, // remove output of rank number when it is a constant ConstantRankNumberColumnRemoveRule.INSTANCE, // split distinct aggregate to reduce data skew SplitAggregateRule.INSTANCE, // transpose calc past snapshot CalcSnapshotTransposeRule.INSTANCE, // Rule that splits python ScalarFunctions from join conditions SplitPythonConditionFromJoinRule.INSTANCE, // Rule that splits python ScalarFunctions from // java/scala ScalarFunctions in correlate conditions SplitPythonConditionFromCorrelateRule.INSTANCE, // Rule that transpose the conditions after the Python correlate node. CalcPythonCorrelateTransposeRule.INSTANCE, // Rule that splits java calls from python TableFunction PythonCorrelateSplitRule.INSTANCE, // merge calc after calc transpose FlinkCalcMergeRule.INSTANCE, // remove output of rank number when it is not used by successor calc RedundantRankNumberColumnRemoveRule.INSTANCE, // remove the trivial calc that is produced by PushWatermarkIntoTableSourceScanAcrossCalcRule. // because [[PushWatermarkIntoTableSourceScanAcrossCalcRule]] will push the rowtime computed // column into the source. After FlinkCalcMergeRule applies, it may produces a trivial calc. FlinkLogicalCalcRemoveRule.INSTANCE, // filter push down PushFilterInCalcIntoTableSourceScanRule.INSTANCE, //Rule that rewrites temporal join with extracted primary key TemporalJoinRewriteWithUniqueKeyRule.INSTANCE, // Rule that splits python ScalarFunctions from java/scala ScalarFunctions. PythonCalcSplitRule.SPLIT_CONDITION_REX_FIELD, PythonCalcSplitRule.SPLIT_PROJECTION_REX_FIELD, PythonCalcSplitRule.SPLIT_CONDITION, PythonCalcSplitRule.SPLIT_PROJECT, PythonCalcSplitRule.SPLIT_PANDAS_IN_PROJECT, PythonCalcSplitRule.EXPAND_PROJECT, PythonCalcSplitRule.PUSH_CONDITION, PythonCalcSplitRule.REWRITE_PROJECT, PythonMapMergeRule.INSTANCE ) /** * RuleSet to do physical optimize for stream */ val PHYSICAL_OPT_RULES: RuleSet = RuleSets.ofList( FlinkCalcMergeRule.STREAM_PHYSICAL_INSTANCE, FlinkExpandConversionRule.STREAM_INSTANCE, StreamPhysicalCalcRemoveRule.INSTANCE, // source StreamPhysicalDataStreamScanRule.INSTANCE, StreamPhysicalTableSourceScanRule.INSTANCE, StreamPhysicalLegacyTableSourceScanRule.INSTANCE, StreamPhysicalIntermediateTableScanRule.INSTANCE, StreamPhysicalWatermarkAssignerRule.INSTANCE, StreamPhysicalValuesRule.INSTANCE, // calc StreamPhysicalCalcRule.INSTANCE, StreamPhysicalPythonCalcRule.INSTANCE, // union StreamPhysicalUnionRule.INSTANCE, // sort StreamPhysicalSortRule.INSTANCE, StreamPhysicalLimitRule.INSTANCE, StreamPhysicalSortLimitRule.INSTANCE, StreamPhysicalTemporalSortRule.INSTANCE, // rank StreamPhysicalRankRule.INSTANCE, StreamPhysicalDeduplicateRule.INSTANCE, // expand StreamPhysicalExpandRule.INSTANCE, // group agg StreamPhysicalGroupAggregateRule.INSTANCE, StreamPhysicalGroupTableAggregateRule.INSTANCE, StreamPhysicalPythonGroupAggregateRule.INSTANCE, StreamPhysicalPythonGroupTableAggregateRule.INSTANCE, // over agg StreamPhysicalOverAggregateRule.INSTANCE, StreamPhysicalPythonOverAggregateRule.INSTANCE, // window agg StreamPhysicalGroupWindowAggregateRule.INSTANCE, StreamPhysicalGroupWindowTableAggregateRule.INSTANCE, StreamPhysicalPythonGroupWindowAggregateRule.INSTANCE, // window TVFs StreamPhysicalWindowTableFunctionRule.INSTANCE, StreamPhysicalWindowAggregateRule.INSTANCE, PullUpWindowTableFunctionIntoWindowAggregateRule.INSTANCE, ExpandWindowTableFunctionTransposeRule.INSTANCE, StreamPhysicalWindowRankRule.INSTANCE, StreamPhysicalWindowDeduplicateRule.INSTANCE, // join StreamPhysicalJoinRule.INSTANCE, StreamPhysicalIntervalJoinRule.INSTANCE, StreamPhysicalTemporalJoinRule.INSTANCE, StreamPhysicalLookupJoinRule.SNAPSHOT_ON_TABLESCAN, StreamPhysicalLookupJoinRule.SNAPSHOT_ON_CALC_TABLESCAN, StreamPhysicalWindowJoinRule.INSTANCE, // CEP StreamPhysicalMatchRule.INSTANCE, // correlate StreamPhysicalConstantTableFunctionScanRule.INSTANCE, StreamPhysicalCorrelateRule.INSTANCE, StreamPhysicalPythonCorrelateRule.INSTANCE, // sink StreamPhysicalSinkRule.INSTANCE, StreamPhysicalLegacySinkRule.INSTANCE ) /** * RuleSet related to transpose watermark to be close to source */ val WATERMARK_TRANSPOSE_RULES: RuleSet = RuleSets.ofList( WatermarkAssignerChangelogNormalizeTransposeRule.WITH_CALC, WatermarkAssignerChangelogNormalizeTransposeRule.WITHOUT_CALC ) /** * RuleSet related to mini-batch. */ val MINI_BATCH_RULES: RuleSet = RuleSets.ofList( // mini-batch interval infer rule MiniBatchIntervalInferRule.INSTANCE ) /** * RuleSet to optimize plans after stream exec execution. */ val PHYSICAL_REWRITE: RuleSet = RuleSets.ofList( // optimize agg rule TwoStageOptimizedAggregateRule.INSTANCE, // incremental agg rule IncrementalAggregateRule.INSTANCE, // optimize window agg rule TwoStageOptimizedWindowAggregateRule.INSTANCE, // simplify window tvf SimplifyWindowTableFunctionRules.WITH_CALC_WINDOW_RANK, SimplifyWindowTableFunctionRules.WITH_WINDOW_RANK, SimplifyWindowTableFunctionRules.WITH_CALC_WINDOW_DEDUPLICATE, SimplifyWindowTableFunctionRules.WITH_WINDOW_DEDUPLICATE, SimplifyWindowTableFunctionRules.WITH_LEFT_RIGHT_CALC_WINDOW_JOIN, SimplifyWindowTableFunctionRules.WITH_LEFT_CALC_WINDOW_JOIN, SimplifyWindowTableFunctionRules.WITH_RIGHT_CALC_WINDOW_JOIN, SimplifyWindowTableFunctionRules.WITH_WINDOW_JOIN, // optimize ChangelogNormalize PushFilterPastChangelogNormalizeRule.INSTANCE ) }
StephanEwen/incubator-flink
flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/plan/rules/FlinkStreamRuleSets.scala
Scala
apache-2.0
19,148
package net.sansa_stack.query.spark.graph.jena.expression import net.sansa_stack.query.spark.graph.jena.util.Result import org.apache.jena.graph.Node class LogicalOr(left: Expression, right: Expression) extends FilterTwo(left, right) { private val tag = "Logical Or" private val leftFilter = left match { case e: Filter => e case _ => throw new TypeNotPresentException( "Filter", new Throwable("The input left expression is not a type of filter")) } private val rightFilter = right match { case e: Filter => e case _ => throw new TypeNotPresentException( "Filter", new Throwable("The input left expression is not a type of filter")) } override def evaluate(result: Map[Node, Node]): Boolean = { // compiler here throw new UnsupportedOperationException } override def evaluate(result: Result[Node]): Boolean = { leftFilter.evaluate(result) || rightFilter.evaluate(result) } override def getTag: String = { tag } }
SANSA-Stack/SANSA-RDF
sansa-query/sansa-query-spark/src/main/scala/net/sansa_stack/query/spark/graph/jena/expression/LogicalOr.scala
Scala
apache-2.0
991
package com.avsystem.commons package redis import com.avsystem.commons.redis.config.{ConnectionConfig, NodeConfig} import org.scalatest.Suite /** * Author: ghik * Created: 14/04/16. */ trait UsesRedisNodeClient extends UsesRedisServer with UsesActorSystem with UsesSslContext { this: Suite => def useTls: Boolean = false def connectionConfig: ConnectionConfig = ConnectionConfig(sslEngineCreator = if (useTls) OptArg(() => sslContext.createSSLEngine()) else OptArg.Empty) def nodeConfig: NodeConfig = NodeConfig( connectionConfigs = _ => connectionConfig, blockingConnectionConfigs = _ => connectionConfig ) var redisClient: RedisNodeClient = _ override protected def beforeAll(): Unit = { super.beforeAll() redisClient = new RedisNodeClient(if (useTls) tlsAddress else address, nodeConfig) } override protected def afterAll(): Unit = { redisClient.close() super.afterAll() } }
AVSystem/scala-commons
commons-redis/src/test/scala/com/avsystem/commons/redis/UsesRedisNodeClient.scala
Scala
mit
937
package toysph import scala.collection.breakOut import scala.collection.immutable._ import scala.collection.mutable.{ArrayBuilder, Map => MMap} import scala.math._ trait Vec2D { def x: Double def y: Double def len2: Double = x * x + y * y def -(v: Vec2D): Vec2D = Vec2D(x - v.x, y - v.y) def distance2To(v: Vec2D): Double = (this - v).len2 } object Vec2D { private case class FreeVec2D(x: Double, y: Double) extends Vec2D def apply(x: Double, y: Double): Vec2D = FreeVec2D(x, y) } trait Particle2D { def mass: Double // mass def r: Vec2D // position def v: Vec2D // velocity } object Particle2D { private case class FreeParticle2D(mass: Double, r: Vec2D, v: Vec2D) extends Particle2D def apply(mass: Double, r: Vec2D, v: Vec2D): Particle2D = FreeParticle2D(mass, r, v) def apply(mass: Double, rx: Double, ry: Double, vx: Double = 0.0, vy: Double = 0.0): Particle2D = FreeParticle2D(mass, Vec2D(rx, ry), Vec2D(vx, vy)) } trait ParticleBag { def particles: Iterable[Particle2D] def neighbours(p: Particle2D, radius: Double): Iterable[Particle2D] } trait ParticleBagBuilder { def add(p: Particle2D): Unit def result: ParticleBag } object ParticleBagBuilder { private case class Cell(row: Int, col: Int) private object Cell { def fromXY(x: Double, y: Double, gridSize: Double): Cell = Cell((x / gridSize).toInt, (y / gridSize).toInt) def forParticle(p: Particle2D, gridSize: Double): Cell = fromXY(p.r.x, p.r.y, gridSize) } private final class DefaultParticleBagBuilder(gridSize: Double) extends ParticleBagBuilder { def add(p: Particle2D): Unit = addToCell(Cell.forParticle(p, gridSize), p) def result: ParticleBag = new DefaultParticleBag(immutableMap, gridSize) private val gMap: MMap[Cell, ArrayBuilder[Particle2D]] = MMap.empty[Cell, ArrayBuilder[Particle2D]] private def addToCell(c: Cell, p: Particle2D): Unit = { ensureCellExists(c); gMap(c) += p } private def ensureCellExists(c: Cell): Unit = if (!gMap.contains(c)) gMap(c) = new ArrayBuilder.ofRef[Particle2D] private def immutableMap: Map[Cell, Array[Particle2D]] = gMap.map { case (c, ab) => (c, ab.result()) }(breakOut) } private final class DefaultParticleBag(gMap: Map[Cell, Array[Particle2D]], gridSize: Double) extends ParticleBag { def particles: Iterable[Particle2D] = gMap.values.flatMap(_.toIterable)(breakOut) def neighbours(p: Particle2D, radius: Double): Iterable[Particle2D] = { val nAdjCells = ceil(radius / gridSize).toInt val pCell = Cell.forParticle(p, gridSize) val r2 = radius * radius val cells: Iterable[Cell] = for { row <- (pCell.row - nAdjCells) to (pCell.row + nAdjCells) col <- (pCell.col - nAdjCells) to (pCell.col + nAdjCells) cell = Cell(row, col) if (gMap.contains(cell)) } yield cell for { cell <- cells pb <- gMap(cell) if (pb != p) curDist2 = pb.r distance2To p.r if (curDist2 < r2) } yield pb } } def apply(gridSize: Double): ParticleBagBuilder = new DefaultParticleBagBuilder(gridSize) }
lancelet/toysph
src/main/scala/toysph/particles.scala
Scala
apache-2.0
3,119
/* * Copyright 2014 Commonwealth Computer Research, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.locationtech.geomesa.utils.geotools import com.typesafe.config.ConfigFactory import org.junit.runner.RunWith import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes._ import org.locationtech.geomesa.utils.geotools.RichAttributeDescriptors._ import org.locationtech.geomesa.utils.stats.{IndexCoverage, Cardinality} import org.specs2.mutable.Specification import org.specs2.runner.JUnitRunner @RunWith(classOf[JUnitRunner]) class SimpleFeatureTypesTest extends Specification { args(color = true) "SimpleFeatureTypes" should { "create an sft that" >> { val sft = SimpleFeatureTypes.createType("testing", "id:Integer:index=false,dtg:Date:index=false,*geom:Point:srid=4326:index=true") "has name \\'test\\'" >> { sft.getTypeName mustEqual "testing" } "has three attributes" >> { sft.getAttributeCount must be_==(3) } "has an id attribute which is " >> { val idDescriptor = sft.getDescriptor("id") "not null" >> { (idDescriptor must not).beNull } "not indexed" >> { idDescriptor.getUserData.get("index").asInstanceOf[String] mustEqual("none") } } "has a default geom field called 'geom'" >> { val geomDescriptor = sft.getGeometryDescriptor geomDescriptor.getLocalName must be equalTo "geom" } "encode an sft properly" >> { SimpleFeatureTypes.encodeType(sft) must be equalTo s"id:Integer,dtg:Date,*geom:Point:srid=4326:index=full:$OPT_INDEX_VALUE=true" } } "handle namespaces" >> { "simple ones" >> { val sft = SimpleFeatureTypes.createType("ns:testing", "dtg:Date,*geom:Point:srid=4326") sft.getName.getLocalPart mustEqual "testing" sft.getName.getNamespaceURI mustEqual "ns" sft.getTypeName mustEqual("testing") } "complex ones" >> { val sft = SimpleFeatureTypes.createType("http://geomesa/ns:testing", "dtg:Date,*geom:Point:srid=4326") sft.getName.getLocalPart mustEqual "testing" sft.getName.getNamespaceURI mustEqual "http://geomesa/ns" sft.getTypeName mustEqual("testing") } "invalid ones" >> { val sft = SimpleFeatureTypes.createType("http://geomesa/ns:testing:", "dtg:Date,*geom:Point:srid=4326") sft.getName.getLocalPart mustEqual "http://geomesa/ns:testing:" sft.getName.getNamespaceURI must beNull sft.getTypeName mustEqual("http://geomesa/ns:testing:") } } "handle empty srid" >> { val sft = SimpleFeatureTypes.createType("testing", "id:Integer:index=false,*geom:Point:index=true") (sft.getGeometryDescriptor.getCoordinateReferenceSystem must not).beNull } "handle Int vs. Integer lexicographical ordering" >> { val sft1 = SimpleFeatureTypes.createType("testing1", "foo:Int,*geom:Point:index=true") val sft2 = SimpleFeatureTypes.createType("testing2", "foo:Integer,*geom:Point:index=true") sft1.getAttributeCount must beEqualTo(2) sft2.getAttributeCount must beEqualTo(2) } "handle no index attribute" >> { val sft = SimpleFeatureTypes.createType("testing", "id:Integer,*geom:Point:index=true") sft.getDescriptor("id").getIndexCoverage() mustEqual(IndexCoverage.NONE) } "handle no explicit geometry" >> { val sft = SimpleFeatureTypes.createType("testing", "id:Integer,geom:Point:index=true,geom2:Geometry") sft.getGeometryDescriptor.getLocalName must be equalTo "geom" } "handle a namespace" >> { val sft = SimpleFeatureTypes.createType("foo:testing", "id:Integer,geom:Point:index=true,geom2:Geometry") sft.getName.getNamespaceURI must be equalTo "foo" } "return the indexed attributes (not including the default geometry)" >> { val sft = SimpleFeatureTypes.createType("testing", "id:Integer:index=false,dtg:Date:index=true,*geom:Point:srid=4326:index=true") val indexed = SimpleFeatureTypes.getSecondaryIndexedAttributes(sft) indexed.map(_.getLocalName) must containTheSameElementsAs(List("dtg")) } "set index=true for a default geometry" >> { val sft = SimpleFeatureTypes.createType("testing", "id:Integer:index=false,dtg:Date:index=true,*geom:Point:srid=4326:index=false") sft.getGeometryDescriptor.getUserData.get("index") mustEqual(IndexCoverage.FULL.toString) } "handle list types" >> { "with no values specified" >> { val sft = SimpleFeatureTypes.createType("testing", "id:Integer,names:List,dtg:Date,*geom:Point:srid=4326") sft.getAttributeCount mustEqual(4) sft.getDescriptor("names") must not beNull sft.getDescriptor("names").getType.getBinding mustEqual(classOf[java.util.List[_]]) val spec = SimpleFeatureTypes.encodeType(sft) spec mustEqual s"id:Integer,names:List[String],dtg:Date,*geom:Point:srid=4326:index=full:$OPT_INDEX_VALUE=true" } "with defined values" >> { val sft = SimpleFeatureTypes.createType("testing", "id:Integer,names:List[Double],dtg:Date,*geom:Point:srid=4326") sft.getAttributeCount mustEqual(4) sft.getDescriptor("names") must not beNull sft.getDescriptor("names").getType.getBinding mustEqual(classOf[java.util.List[_]]) val spec = SimpleFeatureTypes.encodeType(sft) spec mustEqual s"id:Integer,names:List[Double],dtg:Date,*geom:Point:srid=4326:index=full:$OPT_INDEX_VALUE=true" } "fail for illegal value format" >> { val spec = "id:Integer,names:List[Double][Double],dtg:Date,*geom:Point:srid=4326" SimpleFeatureTypes.createType("testing", spec) should throwAn[IllegalArgumentException] } "fail for illegal value classes" >> { val spec = "id:Integer,names:List[FAKE],dtg:Date,*geom:Point:srid=4326" SimpleFeatureTypes.createType("testing", spec) should throwAn[IllegalArgumentException] } } "handle map types" >> { "with no values specified" >> { val sft = SimpleFeatureTypes.createType("testing", "id:Integer,metadata:Map,dtg:Date,*geom:Point:srid=4326") sft.getAttributeCount mustEqual(4) sft.getDescriptor("metadata") must not beNull sft.getDescriptor("metadata").getType.getBinding mustEqual classOf[java.util.Map[_, _]] val spec = SimpleFeatureTypes.encodeType(sft) spec mustEqual s"id:Integer,metadata:Map[String,String],dtg:Date,*geom:Point:srid=4326:index=full:$OPT_INDEX_VALUE=true" } "with defined values" >> { val sft = SimpleFeatureTypes.createType("testing", "id:Integer,metadata:Map[Double,String],dtg:Date,*geom:Point:srid=4326") sft.getAttributeCount mustEqual(4) sft.getDescriptor("metadata") must not beNull sft.getDescriptor("metadata").getType.getBinding mustEqual classOf[java.util.Map[_, _]] val spec = SimpleFeatureTypes.encodeType(sft) spec mustEqual s"id:Integer,metadata:Map[Double,String],dtg:Date,*geom:Point:srid=4326:index=full:$OPT_INDEX_VALUE=true" } "fail for illegal value format" >> { val spec = "id:Integer,metadata:Map[String],dtg:Date,*geom:Point:srid=4326" SimpleFeatureTypes.createType("testing", spec) should throwAn[IllegalArgumentException] } "fail for illegal value classes" >> { val spec = "id:Integer,metadata:Map[String,FAKE],dtg:Date,*geom:Point:srid=4326" SimpleFeatureTypes.createType("testing", spec) should throwAn[IllegalArgumentException] } } "handle splitter and splitter options" >> { val spec = "name:String,dtg:Date,*geom:Point:srid=4326;table.splitter.class=org.locationtech.geomesa.core.data.DigitSplitter,table.splitter.options=fmt:%02d,min:0,max:99" val sft = SimpleFeatureTypes.createType("test", spec) sft.getUserData.get(SimpleFeatureTypes.TABLE_SPLITTER) must be equalTo "org.locationtech.geomesa.core.data.DigitSplitter" val opts = sft.getUserData.get(SimpleFeatureTypes.TABLE_SPLITTER_OPTIONS).asInstanceOf[Map[String, String]] opts.size must be equalTo 3 opts("fmt") must be equalTo "%02d" opts("min") must be equalTo "0" opts("max") must be equalTo "99" } "allow specification of ST index entry values" >> { val spec = s"name:String:index=true:$OPT_INDEX_VALUE=true,dtg:Date,*geom:Point:srid=4326" val sft = SimpleFeatureTypes.createType("test", spec) sft.getDescriptor("name").isIndexValue() mustEqual(true) } "automatically set default geom in ST index entry" >> { val spec = s"name:String:index=true:$OPT_INDEX_VALUE=true,dtg:Date,*geom:Point:srid=4326" val sft = SimpleFeatureTypes.createType("test", spec) sft.getDescriptor("geom").isIndexValue() mustEqual(true) } "allow specification of attribute cardinality" >> { val spec = s"name:String:$OPT_CARDINALITY=high,dtg:Date,*geom:Point:srid=4326" val sft = SimpleFeatureTypes.createType("test", spec) sft.getDescriptor("name").getUserData.get(OPT_CARDINALITY) mustEqual("high") sft.getDescriptor("name").getCardinality() mustEqual(Cardinality.HIGH) } "allow specification of attribute cardinality regardless of case" >> { val spec = s"name:String:$OPT_CARDINALITY=LOW,dtg:Date,*geom:Point:srid=4326" val sft = SimpleFeatureTypes.createType("test", spec) sft.getDescriptor("name").getUserData.get(OPT_CARDINALITY) mustEqual("low") sft.getDescriptor("name").getCardinality() mustEqual(Cardinality.LOW) } "allow specification of index attribute coverages" >> { val spec = s"name:String:$OPT_INDEX=join,dtg:Date,*geom:Point:srid=4326" val sft = SimpleFeatureTypes.createType("test", spec) sft.getDescriptor("name").getUserData.get(OPT_INDEX) mustEqual("join") sft.getDescriptor("name").getIndexCoverage() mustEqual(IndexCoverage.JOIN) } "allow specification of index attribute coverages regardless of case" >> { val spec = s"name:String:$OPT_INDEX=FULL,dtg:Date,*geom:Point:srid=4326" val sft = SimpleFeatureTypes.createType("test", spec) sft.getDescriptor("name").getUserData.get(OPT_INDEX) mustEqual("full") sft.getDescriptor("name").getIndexCoverage() mustEqual(IndexCoverage.FULL) } "allow specification of index attribute coverages as booleans" >> { val spec = s"name:String:$OPT_INDEX=true,dtg:Date,*geom:Point:srid=4326" val sft = SimpleFeatureTypes.createType("test", spec) sft.getDescriptor("name").getUserData.get(OPT_INDEX) mustEqual("join") sft.getDescriptor("name").getIndexCoverage() mustEqual(IndexCoverage.JOIN) } "build from conf" >> { val conf = ConfigFactory.parseString( """ |{ | type-name = "testconf" | fields = [ | { name = "testStr", type = "string" , index = true }, | { name = "testCard", type = "string" , index = true, cardinality = high }, | { name = "testList", type = "List[String]" , index = false }, | { name = "geom", type = "Point" , srid = 4326, default = true } | ] |} """.stripMargin) val sft = SimpleFeatureTypes.createType(conf) sft.getAttributeCount must be equalTo 4 sft.getGeometryDescriptor.getName.getLocalPart must be equalTo "geom" sft.getDescriptor("testStr").getCardinality() mustEqual(Cardinality.UNKNOWN) sft.getDescriptor("testCard").getCardinality() mustEqual(Cardinality.HIGH) sft.getTypeName must be equalTo "testconf" } } }
mmatz-ccri/geomesa
geomesa-utils/src/test/scala/org/locationtech/geomesa/utils/geotools/SimpleFeatureTypesTest.scala
Scala
apache-2.0
12,170
/* * Copyright (c) 2012-2015 Snowplow Analytics Ltd. All rights reserved. * * This program is licensed to you under the Apache License Version 2.0, * and you may not use this file except in compliance with the Apache License Version 2.0. * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, * software distributed under the Apache License Version 2.0 is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ package com.snowplowanalytics.snowplow.enrich package hadoop package bad // Scala import scala.collection.mutable.Buffer // Specs2 import org.specs2.mutable.Specification // Scalding import com.twitter.scalding._ // Cascading import cascading.tuple.TupleEntry // This project import JobSpecHelpers._ /** * Holds the input and expected data * for the test. */ object NullNumericFieldsSpec { val lines = Lines( "2014-10-11 14:01:05 - 37 172.31.38.31 GET 24.209.95.109 /i 200 http://www.myvideowebsite.com/embed/ab123456789?auto_start=e9&rf=cb Mozilla%2F5.0+%28Macintosh%3B+Intel+Mac+OS+X+10.6%3B+rv%3A32.0%29+Gecko%2F20100101+Firefox%2F32.0 e=se&se_ca=video-player%3Anewformat&se_ac=play-time&se_la=efba3ef384&se_va=&tid=" ) val expected = """{"line":"2014-10-11 14:01:05 - 37 172.31.38.31 GET 24.209.95.109 /i 200 http://www.myvideowebsite.com/embed/ab123456789?auto_start=e9&rf=cb Mozilla%2F5.0+%28Macintosh%3B+Intel+Mac+OS+X+10.6%3B+rv%3A32.0%29+Gecko%2F20100101+Firefox%2F32.0 e=se&se_ca=video-player%3Anewformat&se_ac=play-time&se_la=efba3ef384&se_va=&tid=","errors":[{"level":"error","message":"Field [se_va]: cannot convert [] to Double-like String"},{"level":"error","message":"Field [tid]: [] is not a valid integer"}]}""" } /** * Integration test for the EtlJob: * * Check that all tuples in a custom structured event * (CloudFront format) are successfully extracted. */ class NullNumericFieldsSpec extends Specification { "A job which processes a CloudFront file containing 1 event with null integer and double fields" should { EtlJobSpec("clj-tomcat", "2", true, List("geo", "organization")). source(MultipleTextLineFiles("inputFolder"), NullNumericFieldsSpec.lines). sink[String](Tsv("outputFolder")){ output => "not write any events" in { output must beEmpty } }. sink[TupleEntry](Tsv("exceptionsFolder")){ trap => "not trap any exceptions" in { trap must beEmpty } }. sink[String](Tsv("badFolder")){ buf => val json = buf.head "write a bad row JSON containing the input line and all errors" in { removeTstamp(json) must_== NullNumericFieldsSpec.expected } }. run. finish } }
mdavid/lessig-bigdata
lib/snowplow/3-enrich/scala-hadoop-enrich/src/test/scala/com.snowplowanalytics.snowplow.enrich.hadoop/bad/NullNumericFieldsSpec.scala
Scala
mit
3,000
package spire package algebra import spire.math.{ Rational, NumberTag } import spire.std.int._ import spire.std.long._ import spire.std.float._ import spire.std.double._ import spire.syntax.euclideanRing._ import spire.syntax.isReal.{ eqOps => _, _ } import org.scalatest.FunSuite import org.scalatest.prop.Checkers import org.scalacheck.{Arbitrary, Gen} import org.scalacheck.Arbitrary._ import org.scalacheck.Prop._ class GCDTest extends FunSuite with Checkers { implicit def ArbBigDecimal: Arbitrary[BigDecimal] = Arbitrary(for { value <- arbitrary[Long] scale <- arbitrary[Short] } yield BigDecimal(value, scale.toInt)) implicit def ArbRational: Arbitrary[Rational] = Arbitrary(for { n <- arbitrary[Long] d <- arbitrary[Long] if d != 0 } yield Rational(n, d)) def testGcd[A: EuclideanRing: IsReal: NumberTag](x: A, y: A): Boolean = { (x == Ring[A].zero || y == Ring[A].zero) || { val den = spire.math.gcd(x, y) val x0 = x /~ den val y0 = y /~ den if (NumberTag[A].isFinite(x0) && NumberTag[A].isFinite(y0)) { x0.isWhole && y0.isWhole && (spire.math.gcd(x0, y0) == Ring[A].one) } else { // Ideally we'd filter this out at the ScalaCheck level. true } } } test("GCD of floats with 0 exponent in result is correct") { val x = -1.586002E-34f val y = 3.3793717E-7f val d = spire.math.gcd(x, y) assert((x / d).isWhole === true) assert((y / d).isWhole === true) assert(spire.math.gcd(x / d, y / d) === 1f) } test("Int GCD")(check(forAll { (a: Int, b: Int) => testGcd(a, b) })) test("Long GCD")(check(forAll { (a: Long, b: Long) => testGcd(a, b) })) test("Float GCD")(check(forAll { (a: Float, b: Float) => testGcd(a, b) })) test("Double GCD")(check(forAll { (a: Double, b: Double) => testGcd(a, b) })) // Disabled. Getting unexplainable OOM errors, even with isWhole commented out. // test("BigDecimal GCD")(check(forAll { (a: BigDecimal, b: BigDecimal) => testGcd(a, b) })) test("Rational GCD")(check(forAll { (a: Rational, b: Rational) => testGcd(a, b) })) }
tixxit/spire
tests/src/test/scala/spire/algebra/GCDTest.scala
Scala
mit
2,105
package org.joda.time.chrono import org.joda.time.Chronology import org.joda.time.DateTime import org.joda.time.DateTimeConstants import org.joda.time.DateTimeFieldType import org.joda.time.DateTimeZone import org.joda.time.DurationFieldType import org.joda.time.chrono.AssembledChronology.Fields import org.joda.time.field.DelegatedDateTimeField import org.joda.time.field.DividedDateTimeField import org.joda.time.field.OffsetDateTimeField import org.joda.time.field.RemainderDateTimeField import org.joda.time.field.SkipUndoDateTimeField import org.joda.time.field.UnsupportedDurationField import BuddhistChronology._ object BuddhistChronology { val BE = DateTimeConstants.CE private val ERA_FIELD = new BasicSingleEraDateTimeField("BE") private val BUDDHIST_OFFSET = 543 private val cCache = new collection.mutable.HashMap[DateTimeZone, BuddhistChronology]() private val INSTANCE_UTC = getInstance(DateTimeZone.UTC) def getInstanceUTC(): BuddhistChronology = INSTANCE_UTC def getInstance(): BuddhistChronology = getInstance(DateTimeZone.getDefault) def getInstance(zone: DateTimeZone): BuddhistChronology = { var _zone: DateTimeZone = zone if (_zone == null) { _zone = DateTimeZone.getDefault } var chrono = cCache.get(_zone).orNull if (chrono == null) { chrono = new BuddhistChronology(GJChronology.getInstance(_zone, null), null) val lowerLimit = new DateTime(1, 1, 1, 0, 0, 0, 0, chrono) chrono = new BuddhistChronology( LimitChronology.getInstance(chrono, lowerLimit, null), "") val oldChrono = if (cCache.get(_zone).isEmpty) { cCache(_zone) = chrono chrono } else { chrono } if (oldChrono != null) { chrono = oldChrono } } chrono } } @SerialVersionUID(-3474595157769370126L) class BuddhistChronology private (base: Chronology, param: AnyRef) extends AssembledChronology(base, param) { private def readResolve(): AnyRef = { val base = getBase if (base == null) getInstanceUTC else getInstance(base.getZone) } def withUTC(): Chronology = INSTANCE_UTC def withZone(zone: DateTimeZone): Chronology = { var _zone: DateTimeZone = zone if (_zone == null) { _zone = DateTimeZone.getDefault } if (_zone == getZone) { return this } getInstance(_zone) } override def equals(obj: Any): Boolean = { if (super.equals(obj)) { return true } if (obj.isInstanceOf[BuddhistChronology]) { val chrono = obj.asInstanceOf[BuddhistChronology] return getZone == chrono.getZone } false } override def hashCode(): Int = { "Buddhist".hashCode * 11 + getZone.hashCode } override def toString(): String = { var str = "BuddhistChronology" val zone = getZone if (zone != null) { str = str + '[' + zone.getID + ']' } str } protected def assemble(fields: Fields) { if (getParam == null) { fields.eras = UnsupportedDurationField.getInstance(DurationFieldType.eras()) var field = fields.year fields.year = new OffsetDateTimeField( new SkipUndoDateTimeField(this, field), BUDDHIST_OFFSET) field = fields.yearOfEra fields.yearOfEra = new DelegatedDateTimeField( fields.year, fields.eras, DateTimeFieldType.yearOfEra()) field = fields.weekyear fields.weekyear = new OffsetDateTimeField( new SkipUndoDateTimeField(this, field), BUDDHIST_OFFSET) field = new OffsetDateTimeField(fields.yearOfEra, 99) fields.centuryOfEra = new DividedDateTimeField( field, fields.eras, DateTimeFieldType.centuryOfEra(), 100) fields.centuries = fields.centuryOfEra.getDurationField field = new RemainderDateTimeField( fields.centuryOfEra.asInstanceOf[DividedDateTimeField]) fields.yearOfCentury = new OffsetDateTimeField(field, DateTimeFieldType.yearOfCentury(), 1) field = new RemainderDateTimeField(fields.weekyear, fields.centuries, DateTimeFieldType.weekyearOfCentury(), 100) fields.weekyearOfCentury = new OffsetDateTimeField( field, DateTimeFieldType.weekyearOfCentury(), 1) fields.era = ERA_FIELD } } }
mdedetrich/soda-time
js/src/main/scala/org/joda/time/chrono/BuddhistChronology.scala
Scala
bsd-2-clause
4,434
/* * Copyright 2014 nidkil * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.nidkil.downloader.merger import com.nidkil.downloader.datatypes.Download import scala.collection.mutable.LinkedHashSet import com.nidkil.downloader.datatypes.Chunk import java.io.FileInputStream import org.apache.commons.io.FileUtils import java.io.FileOutputStream import java.io.IOException import org.apache.commons.io.IOUtils import java.io.File /** * The merger handles the merging of chunks. * * This is the default merger which uses standard io to merge the chunks. */ class DefaultMerger extends Merger { import Merger._ private var _tempFile: File = null def tempFile = _tempFile def merge(download: Download, chunks: LinkedHashSet[Chunk]): Boolean = { require(download != null, "Download cannot be null") require(chunks != null, "Chunks cannot be null") _tempFile = new File(download.workDir, download.id + MERGED_FILE_EXT) logger.debug(s"Merge chunks [chunk cnt=${chunks.size}, destination=${_tempFile.getPath}]") try { if (_tempFile.exists) FileUtils.forceDelete(_tempFile) } catch { case e: IOException => { val msg = s"An error occurred while deleting temp merge file [temp file=${_tempFile.getPath}, message=$e.getMessage}]"; throw new MergerException(msg, e) } } var in: FileInputStream = null var out: FileOutputStream = null try { out = FileUtils.openOutputStream(_tempFile) for (chunk <- chunks) { logger.debug(s"Merging chunk [$chunk, actual size=${chunk.destFile.length}]") in = FileUtils.openInputStream(chunk.destFile) IOUtils.copy(in, out) in.close() } out.flush(); } catch { case e: IOException => { val msg = s"Error merging chunks [${e.getMessage}]" throw new MergerException(msg, e); } } finally { IOUtils.closeQuietly(in) IOUtils.closeQuietly(out) } true } }
nidkil/scala-downloader
src/main/scala/com/nidkil/downloader/merger/DefaultMerger.scala
Scala
apache-2.0
2,525
/* * Copyright 2014 http4s.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.http4s.server.middleware import cats.data._ import cats.effect._ import cats.syntax.all._ import org.http4s._ import org.http4s.headers.{Date => HDate, _} import org.typelevel.ci.CIString import scala.concurrent.duration._ /** Caching contains middlewares to support caching functionality. * * Helper functions to support [[Caching.cache]] can be found in * [[Caching.Helpers]] */ object Caching { /** Middleware that implies responses should NOT be cached. * This is a best attempt, many implementors of caching have done so differently. */ def `no-store`[G[_]: Temporal, F[_], A]( http: Kleisli[G, A, Response[F]] ): Kleisli[G, A, Response[F]] = Kleisli { (a: A) => for { resp <- http(a) out <- `no-store-response`[G](resp) } yield out } /** Transform a Response so that it will not be cached. */ def `no-store-response`[G[_]]: PartiallyAppliedNoStoreCache[G] = new PartiallyAppliedNoStoreCache[G] { def apply[F[_]](resp: Response[F])(implicit G: Temporal[G]): G[Response[F]] = HttpDate.current[G].map(now => resp.putHeaders(HDate(now)).putHeaders(noStoreStaticHeaders)) } // These never change, so don't recreate them each time. private val noStoreStaticHeaders: List[Header.ToRaw] = List( `Cache-Control`( NonEmptyList.of[CacheDirective]( CacheDirective.`no-store`, CacheDirective.`no-cache`(), CacheDirective.`max-age`(0.seconds), ) ), "Pragma" -> "no-cache", Expires(HttpDate.Epoch), // Expire at the epoch for no time confusion ) /** Helpers Contains the default arguments used to help construct middleware * with caching. They serve to support the default arguments for * [[publicCache]] and [[privateCache]]. */ object Helpers { def defaultStatusToSetOn(s: Status): Boolean = s match { case Status.NotModified => true case otherwise => otherwise.isSuccess } def defaultMethodsToSetOn(m: Method): Boolean = methodsToSetOn.contains(m) private lazy val methodsToSetOn: Set[Method] = Set( Method.GET, Method.HEAD, ) } /** Sets headers for response to be publicly cached for the specified duration. * * Note: If set to Duration.Inf, lifetime falls back to * 10 years for support of Http1 caches. */ def publicCache[G[_]: Temporal, F[_]](lifetime: Duration, http: Http[G, F]): Http[G, F] = cache( lifetime, Either.left(CacheDirective.public), Helpers.defaultMethodsToSetOn, Helpers.defaultStatusToSetOn, http, ) /** Publicly Cache a Response for the given lifetime. * * Note: If set to Duration.Inf, lifetime falls back to * 10 years for support of Http1 caches. */ def publicCacheResponse[G[_]](lifetime: Duration): PartiallyAppliedCache[G] = cacheResponse(lifetime, Either.left(CacheDirective.public)) /** Sets headers for response to be privately cached for the specified duration. * * Note: If set to Duration.Inf, lifetime falls back to * 10 years for support of Http1 caches. */ def privateCache[G[_]: Temporal, F[_]]( lifetime: Duration, http: Http[G, F], fieldNames: List[CIString] = Nil, ): Http[G, F] = cache( lifetime, Either.right(CacheDirective.`private`(fieldNames)), Helpers.defaultMethodsToSetOn, Helpers.defaultStatusToSetOn, http, ) /** Privately Caches A Response for the given lifetime. * * Note: If set to Duration.Inf, lifetime falls back to * 10 years for support of Http1 caches. */ def privateCacheResponse[G[_]]( lifetime: Duration, fieldNames: List[CIString] = Nil, ): PartiallyAppliedCache[G] = cacheResponse(lifetime, Either.right(CacheDirective.`private`(fieldNames))) /** Construct a Middleware that will apply the appropriate caching headers. * * Helper functions for methodToSetOn and statusToSetOn can be found in [[Helpers]]. * * Note: If set to Duration.Inf, lifetime falls back to * 10 years for support of Http1 caches. */ def cache[G[_]: Temporal, F[_]]( lifetime: Duration, isPublic: Either[CacheDirective.public.type, CacheDirective.`private`], methodToSetOn: Method => Boolean, statusToSetOn: Status => Boolean, http: Http[G, F], ): Http[G, F] = Kleisli { (req: Request[F]) => for { resp <- http(req) out <- if (methodToSetOn(req.method) && statusToSetOn(resp.status)) cacheResponse[G](lifetime, isPublic)(resp) else resp.pure[G] } yield out } // Here as an optimization so we don't recreate durations // in cacheResponse #TeamStatic private val tenYearDuration: FiniteDuration = 315360000.seconds /** Method in order to turn a generated Response into one that * will be appropriately cached. * * Note: If set to Duration.Inf, lifetime falls back to * 10 years for support of Http1 caches. */ def cacheResponse[G[_]]( lifetime: Duration, isPublic: Either[CacheDirective.public.type, CacheDirective.`private`], ): PartiallyAppliedCache[G] = { val actualLifetime = lifetime match { case finite: FiniteDuration => finite case _ => tenYearDuration // Http1 caches do not respect max-age headers, so to work globally it is recommended // to explicitly set an Expire which requires some time interval to work } new PartiallyAppliedCache[G] { override def apply[F[_]](resp: Response[F])(implicit G: Temporal[G]): G[Response[F]] = for { now <- HttpDate.current[G] expires <- HttpDate .fromEpochSecond(now.epochSecond + actualLifetime.toSeconds) .liftTo[G] } yield resp.putHeaders( `Cache-Control`( NonEmptyList.of( isPublic.fold[CacheDirective](identity, identity), CacheDirective.`max-age`(actualLifetime), ) ), HDate(now), Expires(expires), ) } } trait PartiallyAppliedCache[G[_]] { def apply[F[_]](resp: Response[F])(implicit G: Temporal[G]): G[Response[F]] } trait PartiallyAppliedNoStoreCache[G[_]] { def apply[F[_]](resp: Response[F])(implicit G: Temporal[G]): G[Response[F]] } }
rossabaker/http4s
server/shared/src/main/scala/org/http4s/server/middleware/Caching.scala
Scala
apache-2.0
6,987
package lensimpl package object data { type Id[A] = A def Id[A](a: A): Id[A] = a }
julien-truffaut/LensImpl
core/src/main/scala/lensimpl/data/package.scala
Scala
mit
90
object ch8_13 { import ch8.Prop._ def listOf1[A](g: ch8.Gen[A]): ch8.SGen[List[A]] = ch8.SGen(n => g.listOfN(n max 1)) val smallInt = ch8.Gen.choose(-10, 10) val maxProp1 = forAll(listOf1(smallInt)) { l => val max = l.max !l.exists(_ > max) // No value greater than `max` should exist in `l` } } import ch8_13._ /* from repl you can test typing: :load src/main/scala/fpinscala/ch6/RNG.scala :load src/main/scala/fpinscala/ch6/State.scala :load src/main/scala/fpinscala/ch8/Gen.scala :load src/main/scala/fpinscala/ch8/Exercise13.scala */
rucka/fpinscala
src/main/scala/fpinscala/ch8/Exercise13.scala
Scala
gpl-2.0
564
package org.velvia.filo import com.google.flatbuffers.Table import java.nio.ByteBuffer import java.sql.Timestamp import org.joda.time.DateTime import org.velvia.filo.codecs._ import org.velvia.filo.vector._ import org.velvia.filo.{vectors => bv} case class UnsupportedFiloType(vectType: Int, subType: Int) extends Exception(s"Unsupported Filo vector type $vectType, subType $subType") /** * VectorReader is a type class to help create FiloVector's from the raw Filo binary byte buffers -- * mostly parsing the header bytes and ensuring the creation of the right FiloVector parsing class. * * NOTE: I KNOW there is LOTS of repetition here, but apply() method is the inner loop and must be * super fast. Traits would slow it WAY down. Instead maybe we can use macros. */ object VectorReader { import WireFormat._ import TypedBufferReader._ implicit object BoolVectorReader extends PrimitiveVectorReader[Boolean] implicit object IntVectorReader extends PrimitiveVectorReader[Int] { override def makeDiffVector(dpv: DiffPrimitiveVector): FiloVector[Int] = { new DiffPrimitiveWrapper[Int, Int](dpv) { val base = baseReader.readInt(0) final def apply(i: Int): Int = base + dataReader.read(i) } } override val otherMaker: PartialFunction[(Int, Int, ByteBuffer), FiloVector[Int]] = { case (VECTORTYPE_BINSIMPLE, SUBTYPE_INT, b) => bv.IntBinaryVector.masked(b) case (VECTORTYPE_BINSIMPLE, SUBTYPE_INT_NOMASK, b) => bv.IntBinaryVector(b) case (VECTORTYPE_BINSIMPLE, SUBTYPE_REPEATED, b) => bv.IntBinaryVector.const(b) } } implicit object LongVectorReader extends PrimitiveVectorReader[Long] { override def makeDiffVector(dpv: DiffPrimitiveVector): FiloVector[Long] = { new DiffPrimitiveWrapper[Long, Long](dpv) { val base = baseReader.readLong(0) final def apply(i: Int): Long = base + dataReader.read(i) } } override val otherMaker: PartialFunction[(Int, Int, ByteBuffer), FiloVector[Long]] = { case (VECTORTYPE_BINSIMPLE, SUBTYPE_INT, b) => bv.LongBinaryVector.fromMaskedIntBuf(b) case (VECTORTYPE_BINSIMPLE, SUBTYPE_INT_NOMASK, b) => bv.LongBinaryVector.fromIntBuf(b) case (VECTORTYPE_BINSIMPLE, SUBTYPE_REPEATED, b) => bv.LongBinaryVector.const(b) case (VECTORTYPE_DELTA2, SUBTYPE_INT_NOMASK, b) => bv.DeltaDeltaVector(b) case (VECTORTYPE_BINSIMPLE, SUBTYPE_PRIMITIVE, b) => bv.LongBinaryVector.masked(b) case (VECTORTYPE_BINSIMPLE, SUBTYPE_PRIMITIVE_NOMASK, b) => bv.LongBinaryVector(b) } } implicit object DoubleVectorReader extends PrimitiveVectorReader[Double] { override val otherMaker: PartialFunction[(Int, Int, ByteBuffer), FiloVector[Double]] = { case (VECTORTYPE_BINSIMPLE, SUBTYPE_INT, b) => bv.DoubleVector.fromMaskedIntBuf(b) case (VECTORTYPE_BINSIMPLE, SUBTYPE_INT_NOMASK, b) => bv.DoubleVector.fromIntBuf(b) case (VECTORTYPE_BINSIMPLE, SUBTYPE_REPEATED, b) => bv.DoubleVector.const(b) case (VECTORTYPE_BINSIMPLE, SUBTYPE_PRIMITIVE, b) => bv.DoubleVector.masked(b) case (VECTORTYPE_BINSIMPLE, SUBTYPE_PRIMITIVE_NOMASK, b) => bv.DoubleVector(b) } } implicit object FloatVectorReader extends PrimitiveVectorReader[Float] implicit object StringVectorReader extends VectorReader[String] { def makeVector(buf: ByteBuffer, headerBytes: Int): FiloVector[String] = { (majorVectorType(headerBytes), vectorSubType(headerBytes)) match { case (VECTORTYPE_SIMPLE, SUBTYPE_STRING) => val ssv = SimpleStringVector.getRootAsSimpleStringVector(buf) new SimpleStringWrapper(ssv) case (VECTORTYPE_CONST, SUBTYPE_STRING) => val csv = ConstStringVector.getRootAsConstStringVector(buf) new ConstStringWrapper(csv) case (VECTORTYPE_DICT, SUBTYPE_STRING) => val dsv = DictStringVector.getRootAsDictStringVector(buf) new DictStringWrapper(dsv) { val intReader = TypedBufferReader[Int](reader, dsv.info.nbits, dsv.info.signed) final def getCode(i: Int): Int = intReader.read(i) } case (vectType, subType) => throw UnsupportedFiloType(vectType, subType) } } } implicit object UTF8VectorReader extends VectorReader[ZeroCopyUTF8String] { def makeVector(buf: ByteBuffer, headerBytes: Int): FiloVector[ZeroCopyUTF8String] = { (majorVectorType(headerBytes), vectorSubType(headerBytes)) match { case (VECTORTYPE_BINSIMPLE, SUBTYPE_UTF8) => bv.UTF8Vector(buf) case (VECTORTYPE_BINSIMPLE, SUBTYPE_FIXEDMAXUTF8) => bv.UTF8Vector.fixedMax(buf) case (VECTORTYPE_BINDICT, SUBTYPE_UTF8) => bv.DictUTF8Vector(buf) case (VECTORTYPE_BINSIMPLE, SUBTYPE_REPEATED) => bv.UTF8Vector.const(buf) case (vectType, subType) => throw UnsupportedFiloType(vectType, subType) } } } implicit object DateTimeVectorReader extends VectorReader[DateTime] { def makeVector(buf: ByteBuffer, headerBytes: Int): FiloVector[DateTime] = { (majorVectorType(headerBytes), vectorSubType(headerBytes)) match { case (VECTORTYPE_DIFF, SUBTYPE_DATETIME) => val ddtv = DiffDateTimeVector.getRootAsDiffDateTimeVector(buf) if (ddtv.tzLength == 0) { new DiffDateTimeWrapper(ddtv) } else { new DiffDateTimeWithTZWrapper(ddtv) } case (vectType, subType) => throw UnsupportedFiloType(vectType, subType) } } } implicit object TimestampVectorReader extends VectorReader[Timestamp] { def makeVector(buf: ByteBuffer, headerBytes: Int): FiloVector[Timestamp] = { (majorVectorType(headerBytes), vectorSubType(headerBytes)) match { case (VECTORTYPE_DIFF, SUBTYPE_PRIMITIVE) => val dpv = DiffPrimitiveVector.getRootAsDiffPrimitiveVector(buf) new DiffPrimitiveWrapper[Long, Timestamp](dpv) { val base = baseReader.readLong(0) final def apply(i: Int): Timestamp = new Timestamp(base + dataReader.read(i)) } case (vectType, subType) => throw UnsupportedFiloType(vectType, subType) } } } } /** * Implemented by specific Filo column/vector types. */ trait VectorReader[A] { /** * Creates a FiloVector based on the remaining bytes. Needs to decipher * what sort of vector it is and make the appropriate choice. * @param buf a ByteBuffer of the binary vector, with the position at right after * the 4 header bytes... at the beginning of FlatBuffers or whatever * @param the four byte headerBytes */ def makeVector(buf: ByteBuffer, headerBytes: Int): FiloVector[A] } // NOTE: we MUST @specialize here so that the apply method below will not create boxing class PrimitiveVectorReader[@specialized A: TypedReaderProvider] extends VectorReader[A] { import VectorReader._ import WireFormat._ def makeVector(buf: ByteBuffer, headerBytes: Int): FiloVector[A] = vectMaker((majorVectorType(headerBytes), vectorSubType(headerBytes), buf)) val fbbPrimitiveMaker: PartialFunction[(Int, Int, ByteBuffer), FiloVector[A]] = { case (VECTORTYPE_SIMPLE, SUBTYPE_PRIMITIVE, buf) => val spv = SimplePrimitiveVector.getRootAsSimplePrimitiveVector(buf) new SimplePrimitiveWrapper[A](spv) { val typedReader = TypedBufferReader[A](reader, spv.info.nbits, spv.info.signed) final def apply(i: Int): A = typedReader.read(i) } case (VECTORTYPE_CONST, SUBTYPE_PRIMITIVE, buf) => val spv = SimplePrimitiveVector.getRootAsSimplePrimitiveVector(buf) new SimplePrimitiveWrapper[A](spv) { val typedReader = TypedBufferReader[A](reader, spv.info.nbits, spv.info.signed) final def apply(i: Int): A = typedReader.read(0) } case (VECTORTYPE_DIFF, SUBTYPE_PRIMITIVE, buf) => val dpv = DiffPrimitiveVector.getRootAsDiffPrimitiveVector(buf) makeDiffVector(dpv) } val defaultMaker: PartialFunction[(Int, Int, ByteBuffer), FiloVector[A]] = { case (vectType, subType, _) => throw UnsupportedFiloType(vectType, subType) } def otherMaker: PartialFunction[(Int, Int, ByteBuffer), FiloVector[A]] = Map.empty lazy val vectMaker = otherMaker orElse fbbPrimitiveMaker orElse defaultMaker def makeDiffVector(dpv: DiffPrimitiveVector): FiloVector[A] = ??? }
velvia/filo
filo-scala/src/main/scala/org.velvia.filo/VectorReader.scala
Scala
apache-2.0
8,367
package example.herding.cats import cats._ import cats.data._ import cats.implicits._ object Cartesian extends App { // the type of hs will be // hs: List[Int ⇒ Int] = List(<function1>, <function1>, <function1>, <function1>) val hs = Functor[List].map(List(1, 2, 3, 4))({ (_: Int) * (_: Int) }.curried) val xs = Functor[List].map(hs) { _(9) } println(s"xs=${xs}") // Cartesian // // Cartesian defines product function, which produces a pair of (A, B) wrapped in effect F[_] out // of F[A] and F[B]. The symbolic alias for product is |@| also known as the applicative style. val x: Option[Int] = 9.some val y: Option[Int] = none[Int] println(s"x=${x}, y=${y}") // Option forms Cartesian val threeCrossFive = (3.some |@| 5.some) map { _ - _ } println(s"threeCrossFive=${threeCrossFive}") // expects res: Option[Int] = Some(-2) val threeCrossNone = (3.some |@| none[Int]) map { _ - _ } // expects: None println(s"threeCrossNone=${threeCrossNone}") // See Cartesian definition // https://github.com/typelevel/cats/blob/master/core/src/main/scala/cats/syntax/cartesian.scala // // List as a Cartesian val ys = (List("a", "b", "c") |@| List(1, 2, 3)) map { _ + _ } println(s"List |@| List = ${ys}") // > and < operators println(s"1.some <* 2.some = ${1.some <* 2.some}") // expects: Some(1) println(s"1.some *> 2.some = ${1.some *> 2.some}") // expects: Some(2) // If either side fails, we get None println(s"none[Int] <* 2.some = ${none[Int] <* 2.some}") // expects: None println(s"none[Int] *> 2.some = ${none[Int] *> 2.some}") // expects: None println(s"List(1, 2) <* List(a, b) = ${List(1, 2) <* List("a", "b")}") // expects: List(1, 1, 2, 2) println(s"List(1, 2) *> List(a, b) = ${List(1, 2) *> List("a", "b")}") // expects: List(a, b, a, b) }
stevenchen3/feed-cats
src/main/scala/herding-cats/Cartesian.scala
Scala
mit
1,816
package io.udash package rest import com.avsystem.commons._ import com.avsystem.commons.rpc.AsRawReal import com.avsystem.commons.serialization.json.JsonStringOutput import com.avsystem.commons.serialization.{GenCodec, HasPolyGenCodec, flatten, whenAbsent} import io.udash.rest.openapi.adjusters._ import io.udash.rest.openapi.{Header => OASHeader, _} import io.udash.rest.raw._ import monix.execution.{FutureUtils, Scheduler} import scala.concurrent.Future import scala.concurrent.duration._ @description("Entity identifier") case class RestEntityId(value: String) extends AnyVal object RestEntityId extends RestDataWrapperCompanion[String, RestEntityId] sealed trait BaseEntity object BaseEntity extends RestDataCompanion[BaseEntity] @description("Flat sealed entity with some serious cases") @flatten sealed trait FlatBaseEntity extends BaseEntity object FlatBaseEntity extends RestDataCompanion[FlatBaseEntity] @description("REST entity") case class RestEntity( @description("entity id") id: RestEntityId, @whenAbsent("anonymous") name: String = whenAbsent.value, @description("recursive optional subentity") subentity: OptArg[RestEntity] = OptArg.Empty ) extends FlatBaseEntity object RestEntity extends RestDataCompanion[RestEntity] case class RestOtherEntity(fuu: Boolean, kek: List[String]) extends FlatBaseEntity case object SingletonEntity extends FlatBaseEntity case class CustomResp(value: String) object CustomResp { implicit val asResponse: AsRawReal[RestResponse, CustomResp] = AsRawReal.create( cr => RestResponse(200, IMapping.create("X-Value" -> PlainValue(cr.value)), HttpBody.plain("Yes")), resp => CustomResp(resp.headers("X-Value").value) ) implicit val restResponses: RestResponses[CustomResp] = new RestResponses[CustomResp] { def responses(resolver: SchemaResolver, schemaTransform: RestSchema[_] => RestSchema[_]): Responses = Responses(byStatusCode = Map(200 -> RefOr(Response( description = "Custom response", headers = Map("X-Value" -> RefOr(OASHeader( schema = RefOr(Schema.String) ))), content = Map(HttpBody.PlainType -> MediaType( schema = RefOr(Schema.String) )) )))) } } @description("binary bytes") case class Bytes(bytes: Array[Byte]) extends AnyVal object Bytes extends RestDataWrapperCompanion[Array[Byte], Bytes] case class ThirdParty(thing: Int) object ThirdPartyImplicits { implicit val thirdPartyCodec: GenCodec[ThirdParty] = GenCodec.materialize[ThirdParty] implicit val thirdPartySchema: RestSchema[ThirdParty] = RestStructure.materialize[ThirdParty].standaloneSchema } case class HasThirdParty(dur: ThirdParty) object HasThirdParty extends RestDataCompanionWithDeps[ThirdPartyImplicits.type, HasThirdParty] case class ErrorWrapper[T](error: T) object ErrorWrapper extends HasPolyGenCodec[ErrorWrapper] trait RestTestApi { @GET def trivialGet: Future[Unit] @GET def failingGet: Future[Unit] @GET def jsonFailingGet: Future[Unit] @GET def moreFailingGet: Future[Unit] @GET def neverGet: Future[Unit] @GET def wait(millis: Int): Future[String] @GET def getEntity(id: RestEntityId): Future[RestEntity] @pathDescription("path with a followed by b") @description("A really complex GET operation") @GET("multi/param") def complexGet( @Path("p1") p1: Int, @description("Very serious path parameter") @title("Stri") @Path p2: String, @Header("X-H1") h1: Int, @Header("X-H2") h2: String, q1: Int, @Query("q=2") @whenAbsent("q2def") q2: String = whenAbsent.value, @OptQuery @whenAbsent(Opt(42)) q3: Opt[Int], // @whenAbsent value must be completely ignored in this case @Cookie c1: Int, @Cookie("có") c2: String ): Future[RestEntity] @POST("multi/param") def multiParamPost( @Path("p1") p1: Int, @Path p2: String, @Header("X-H1") h1: Int, @Header("X-H2") h2: String, @Query q1: Int, @Query("q=2") q2: String, b1: Int, @Body("b\\"2") @description("weird body field") b2: String ): Future[RestEntity] @CustomBody @bodyDescription("Serious body") @responseDescription("Serious response") @PUT("") def singleBodyPut( @description("REST entity description") entity: RestEntity ): Future[String] @FormBody @POST def formPost( @Query q1: String, p1: String, @whenAbsent(42) p2: Int = whenAbsent.value ): Future[String] @pathSummary("summary for prefix paths") def prefix( p0: String, @Header("X-H0") h0: String, @Query @example("q0example") q0: String ): RestTestSubApi @Prefix("") def transparentPrefix: RestTestSubApi def complexParams( baseEntity: BaseEntity, @whenAbsent(Opt.Empty) flatBaseEntity: Opt[FlatBaseEntity] ): Future[Unit] @PUT def complexParams( flatBaseEntity: FlatBaseEntity, @whenAbsent(Opt.Empty) baseEntity: Opt[BaseEntity] ): Future[Unit] def customResponse(@Query value: String): Future[CustomResp] @CustomBody def binaryEcho(bytes: Array[Byte]): Future[Array[Byte]] @CustomBody def wrappedBinaryEcho(bytes: Bytes): Future[Bytes] @CustomBody def wrappedBody(id: RestEntityId): Future[RestEntityId] @CustomBody def thirdPartyBody(param: HasThirdParty): Future[HasThirdParty] } object RestTestApi extends DefaultRestApiCompanion[RestTestApi] { import Scheduler.Implicits.global val Impl: RestTestApi = new RestTestApi { def trivialGet: Future[Unit] = Future.unit def failingGet: Future[Unit] = Future.failed(HttpErrorException.plain(503, "nie")) def jsonFailingGet: Future[Unit] = Future.failed(HttpErrorException(503, HttpBody.json(JsonValue(JsonStringOutput.write(ErrorWrapper("nie")))))) def moreFailingGet: Future[Unit] = throw HttpErrorException.plain(503, "nie") def neverGet: Future[Unit] = Future.never def wait(millis: Int): Future[String] = FutureUtils.delayedResult(millis.millis)(s"waited $millis ms") def getEntity(id: RestEntityId): Future[RestEntity] = Future.successful(RestEntity(id, s"${id.value}-name")) def complexGet(p1: Int, p2: String, h1: Int, h2: String, q1: Int, q2: String, q3: Opt[Int], c1: Int, c2: String): Future[RestEntity] = Future.successful(RestEntity(RestEntityId(s"$p1-$h1-$q1-$c1"), s"$p2-$h2-$q2-${q3.getOrElse(".")}-$c2")) def multiParamPost(p1: Int, p2: String, h1: Int, h2: String, q1: Int, q2: String, b1: Int, b2: String): Future[RestEntity] = Future.successful(RestEntity(RestEntityId(s"$p1-$h1-$q1-$b1"), s"$p2-$h2-$q2-$b2")) def singleBodyPut(entity: RestEntity): Future[String] = Future.successful(entity.toString) def formPost(q1: String, p1: String, p2: Int): Future[String] = Future.successful(s"$q1-$p1-$p2") def prefix(p0: String, h0: String, q0: String): RestTestSubApi = RestTestSubApi.impl(s"$p0-$h0-$q0") def transparentPrefix: RestTestSubApi = RestTestSubApi.impl("") def complexParams(baseEntity: BaseEntity, flatBaseEntity: Opt[FlatBaseEntity]): Future[Unit] = Future.unit def complexParams(flatBaseEntity: FlatBaseEntity, baseEntity: Opt[BaseEntity]): Future[Unit] = Future.unit def customResponse(value: String): Future[CustomResp] = Future.successful(CustomResp(value)) def binaryEcho(bytes: Array[Byte]): Future[Array[Byte]] = Future.successful(bytes) def wrappedBinaryEcho(bytes: Bytes): Future[Bytes] = Future.successful(bytes) def wrappedBody(id: RestEntityId): Future[RestEntityId] = Future.successful(id) def thirdPartyBody(dur: HasThirdParty): Future[HasThirdParty] = Future.successful(dur) } } trait RestTestSubApi { @GET def subget(@Path p1: Int, @Header("X-H1") h1: Int, q1: Int): Future[String] } object RestTestSubApi extends DefaultRestApiCompanion[RestTestSubApi] { def impl(arg: String): RestTestSubApi = new RestTestSubApi { def subget(p1: Int, h1: Int, q1: Int): Future[String] = Future.successful(s"$arg-$p1-$h1-$q1") } }
UdashFramework/udash-core
rest/src/test/scala/io/udash/rest/RestTestApi.scala
Scala
apache-2.0
7,896
package rere.driver.pool.impl import rere.driver.pool.PoolShutdownResult case class StreamPoolShutdownResult( queriesStarted: Long, connectionsTurnedOff: Long ) extends PoolShutdownResult
pbaun/rere
modules/driver/src/main/scala/rere/driver/pool/impl/StreamPoolShutdownResult.scala
Scala
apache-2.0
200
/** * Copyright 2017 Interel * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package core3.core import akka.actor.{Actor, ActorRef, Props} import akka.pattern.{ask, pipe} import akka.util.Timeout import com.typesafe.config.ConfigRenderOptions import core3.config.{DynamicConfig, RuntimeConfig, StaticConfig} import core3.core.Component.{ActionResult, ExecuteAction} import core3.utils.Time.getCurrentTimestamp import core3.utils.Timestamp import play.api.libs.json.Json import scala.concurrent.{ExecutionContext, Future} import scala.util.control.NonFatal /** * System and component manager. * * The state of the system/app can be altered by setting System Modes ([[core3.core.ComponentManagerActor.SetMode]]) and/or * by sending action to components ([[core3.core.ComponentManagerActor.ExecuteComponentAction]]). * * @param components the components in use by the system */ class ComponentManagerActor(private val components: Map[String, ActorRef])(implicit ec: ExecutionContext, timeout: Timeout) extends Actor { import ComponentManagerActor._ private val modes = Map[SystemMode, ModeState]( SystemMode.Maintenance -> ModeState(state = false), SystemMode.Trace -> ModeState(state = false), SystemMode.Metrics -> ModeState(state = false) ) private def handle_ExecuteComponentAction( component: String, action: String, params: Option[Map[String, Option[String]]] ): Future[ActionResult] = { if (components.contains(component)) { (components(component) ? ExecuteAction(action, params)).mapTo[ActionResult] } else { Future.failed(new IllegalStateException(s"core3.core.ComponentManagerActor::handle_ExecuteComponentAction > Component [$component] does not exist.")) } } private def handle_GetComponents(): Future[ActionResult] = { Future.successful(ActionResult(wasSuccessful = true, data = Some(Json.toJson(components.keys)))) } private def handle_GetStaticConfig(): Future[ActionResult] = { val data = Json.parse(StaticConfig.get.root().render(ConfigRenderOptions.concise())) Future.successful(ActionResult(wasSuccessful = true, data = Some(data))) } private def handle_GetDynamicConfig(): Future[ActionResult] = { val data = Json.parse(DynamicConfig.get.root().render(ConfigRenderOptions.concise())) Future.successful(ActionResult(wasSuccessful = true, data = Some(data))) } private def handle_ReloadDynamicConfig(): Future[ActionResult] = { try { DynamicConfig.reload() Future.successful(ActionResult(wasSuccessful = true)) } catch { case NonFatal(e) => Future.successful(ActionResult(wasSuccessful = false, message = Some(e.getMessage))) } } private def handle_GetModes(): Future[ActionResult] = { val data = Json.toJson( modes.map { case (k, v) => k.toString -> Json.obj("state" -> v.state, "lastUpdated" -> v.lastUpdated) } ) Future.successful(ActionResult(wasSuccessful = true, message = None, data = Some(data))) } private def handle_GetMode(mode: SystemMode): Future[Boolean] = { if (modes.contains(mode)) { Future.successful(modes(mode).state) } else { Future.failed(new IllegalStateException(s"core3.core.ComponentManagerActor::handle_GetMode > Mode [$mode] is not supported.")) } } private def handle_SetMode(mode: SystemMode, enabled: Boolean): Future[ActionResult] = { if (modes.contains(mode)) { modes(mode).state = enabled modes(mode).lastUpdated = getCurrentTimestamp mode match { case SystemMode.Maintenance => RuntimeConfig.maintenance = enabled case SystemMode.Trace => RuntimeConfig.trace = enabled case SystemMode.Metrics => RuntimeConfig.metrics = enabled } Future.successful(ActionResult(wasSuccessful = true)) } else { Future.failed(new IllegalStateException(s"core3.core.ComponentManagerActor::handle_SetMode > Mode [$mode] is not supported.")) } } override def receive: Receive = { case ExecuteComponentAction(component, action, params) => handle_ExecuteComponentAction(component, action, params) pipeTo sender case GetComponents() => handle_GetComponents() pipeTo sender case GetStaticConfig() => handle_GetStaticConfig() pipeTo sender case GetDynamicConfig() => handle_GetDynamicConfig() pipeTo sender case ReloadDynamicConfig() => handle_ReloadDynamicConfig() pipeTo sender case GetModes() => handle_GetModes() pipeTo sender case GetMode(mode) => handle_GetMode(mode) pipeTo sender case SetMode(mode, enabled) => handle_SetMode(mode, enabled) pipeTo sender } } object ComponentManagerActor { sealed trait SystemMode object SystemMode { case object Maintenance extends SystemMode case object Trace extends SystemMode case object Metrics extends SystemMode } /** * Message for executing component actions. * * @param component the component to execute the specified action * @param action the action to be executed * @param params action parameters (if any) * @return Future[ [[core3.core.Component.ActionResult]] ] - the result of the action */ case class ExecuteComponentAction(component: String, action: String, params: Option[Map[String, Option[String]]] = None) /** * Message for retrieving all registered components. * * @return Future[ [[core3.core.Component.ActionResult]] ] - the list of components as a JSON value */ case class GetComponents() /** * Message for retrieving the current static configuration. * * @return Future[ [[core3.core.Component.ActionResult]] ] - the current static config as a JSON value */ case class GetStaticConfig() /** * Message for retrieving the current dynamic configuration. * * @return Future[ [[core3.core.Component.ActionResult]] ] - the current dynamic config as a JSON value */ case class GetDynamicConfig() /** * Message for reloading the current dynamic configuration. * * Note: See [[core3.config.DynamicConfig.reload]] for more information. * * @return Future[ [[core3.core.Component.ActionResult]] ] - nothing; not data/message is set */ case class ReloadDynamicConfig() /** * Message for retrieving the current system modes. * * @return Future[ [[core3.core.Component.ActionResult]] ] - the current system modes as a JSON value */ case class GetModes() /** * Message for retrieving the current state of the specified system mode. * * Note: For internal system use only. * * @param mode the mode to be checked * @return Future[Boolean] - true, if the specified mode is enabled */ case class GetMode(mode: SystemMode) /** * Message for setting the state of the specified system mode. * * @param mode the mode to be updated * @param enabled set to true, if the mode is to be enabled * @return Future[ [[core3.core.Component.ActionResult]] ] - nothing; no data/message is set */ case class SetMode(mode: SystemMode, enabled: Boolean) private case class ModeState(var lastUpdated: Timestamp = getCurrentTimestamp, var state: Boolean) def props( components: Map[String, ActorRef] )(implicit ec: ExecutionContext, timeout: Timeout): Props = Props( classOf[ComponentManagerActor], components, ec, timeout ) }
Interel-Group/core3
src/main/scala/core3/core/ComponentManagerActor.scala
Scala
apache-2.0
7,916
/* __ *\ ** ________ ___ / / ___ __ ____ Scala.js Test Suite ** ** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL ** ** __\ \/ /__/ __ |/ /__/ __ |/_// /_\ \ http://scala-js.org/ ** ** /____/\___/_/ |_/____/_/ | |__/ /____/ ** ** |/____/ ** \* */ package org.scalajs.testsuite.jsinterop import scala.language.implicitConversions import scala.scalajs.runtime.RuntimeLong import org.junit.Assert._ import org.junit.Assume._ import org.junit.Test import org.scalajs.testsuite.utils.AssertThrows._ import org.scalajs.testsuite.utils.Platform._ import scala.util.Try /** * test the runtime Long implementation directly * does not depend on magic compiler Long rewriting */ class RuntimeLongTest { // Short builders def lg(lo: Int, hi: Int): RuntimeLong = new RuntimeLong(lo, hi) def lg(i: Int): RuntimeLong = new RuntimeLong(i) // Common values val MaxVal = lg(0xffffffff, 0x7fffffff) val MinVal = lg(0, 0x80000000) val IntMaxVal = lg(Int.MaxValue) val IntMinVal = lg(Int.MinValue) val IntMaxValPlus1 = lg(0x80000000, 0) val IntMinValMinus1 = lg(2147483647, -1) val MaxSafeDouble = lg(-1, 2097151) val TwoPow53 = lg(0, 2097152) val MinSafeDouble = lg(1, -2097152) val NegTwoPow53 = lg(0, -2097152) // scala.scalajs.runtime.RuntimeLong @Test def sanity_of_equality_tests(): Unit = { assertEquals(123L + (456L << 32), lg(123, 456).toLong) assertEquals(lg(123, 456), lg(123, 456)) assertEquals(lg(456, 123), lg(456, 123)) assertNotEquals(123L + (4L << 32), lg(123, 456).toLong) assertNotEquals(lg(123, 4), lg(123, 456)) assertNotEquals(1L + (456L << 32), lg(123, 456).toLong) assertNotEquals(lg(1, 456), lg(123, 456)) assertNotEquals(123L, lg(123, 456).toLong) } @Test def equals_Any(): Unit = { assertFalse(lg(0, 0).equals(0: Any)) assertFalse(lg(0, 0).equals(null: Any)) assertTrue(lg(0, 0).equals(lg(0, 0): Any)) assertTrue(lg(123, 456).equals(lg(123, 456): Any)) assertTrue(lg(-123, 456).equals(lg(-123, 456): Any)) assertTrue(lg(-123, -456).equals(lg(-123, -456): Any)) assertFalse(lg(123, 456).equals(lg(-123, 456): Any)) assertFalse(lg(123, 456).equals(lg(123, -456): Any)) assertFalse(lg(-123, -456).equals(lg(123, -456): Any)) assertFalse(lg(-123, -456).equals(lg(-123, 456): Any)) } @Test def hashCode_as_specified_in_j_l_toFloat_strict(): Unit = { assertEquals(0, lg(0).hashCode()) assertEquals(0, lg(-1).hashCode()) assertEquals(55, lg(55).hashCode()) assertEquals(11, lg(-12).hashCode()) assertEquals(10006548, lg(10006548).hashCode()) assertEquals(1098747, lg(-1098748).hashCode()) assertEquals(957662195, lg(579906195, 461662560).hashCode()) assertEquals(-1075860794, lg(-1403218312, 327367870).hashCode()) assertEquals(1425294575, lg(-1152051636, -274640221).hashCode()) assertEquals(-1863811248, lg(1026519507, -1379463549).hashCode()) assertEquals(-881942797, lg(363765329, -557842270).hashCode()) assertEquals(548587254, lg(21652572, 569942698).hashCode()) assertEquals(-1328999812, lg(55820229, -1281708615).hashCode()) assertEquals(-1756412154, lg(-1843678104, 89453422).hashCode()) assertEquals(-529144798, lg(-1928579430, 1836700344).hashCode()) assertEquals(-1163319584, lg(-181377900, 1335444084).hashCode()) assertEquals(2070477069, lg(1189983760, 1032146717).hashCode()) assertEquals(-1718642695, lg(-1982789145, 274636318).hashCode()) assertEquals(260982265, lg(-2087901827, -1945935740).hashCode()) assertEquals(-385578983, lg(-1911332808, 1729620001).hashCode()) assertEquals(-1362397169, lg(-1920965295, 592125278).hashCode()) assertEquals(1419211160, lg(2017870028, 751907156).hashCode()) assertEquals(-1851816270, lg(1506336851, -933796127).hashCode()) assertEquals(112959880, lg(-1747722429, -1855422773).hashCode()) assertEquals(1715333902, lg(-2139132623, -431847873).hashCode()) assertEquals(-453690224, lg(739274932, -924496860).hashCode()) assertEquals(-1503679197, lg(-1482800071, 29485338).hashCode()) assertEquals(1950154296, lg(237609240, 2048220960).hashCode()) assertEquals(2037562473, lg(-431092385, -1623412426).hashCode()) assertEquals(220707473, lg(2144172772, 1927987317).hashCode()) assertEquals(1902658020, lg(971459211, 1217334127).hashCode()) assertEquals(840583449, lg(-530209544, -763367967).hashCode()) assertEquals(2065572837, lg(-1322671605, -902331922).hashCode()) assertEquals(407536450, lg(1361976000, 1231329666).hashCode()) assertEquals(-1678479110, lg(-96547475, 1640676759).hashCode()) assertEquals(-1558558486, lg(1799144078, -936998300).hashCode()) assertEquals(-110470482, lg(221720683, -195204411).hashCode()) assertEquals(992932874, lg(2080474705, 1194291803).hashCode()) assertEquals(2035378556, lg(-1962255291, -228903623).hashCode()) assertEquals(542449527, lg(-1961045404, -1421226733).hashCode()) assertEquals(-1824846728, lg(1762001719, -96661681).hashCode()) assertEquals(-985103709, lg(568630982, -458482587).hashCode()) assertEquals(37361715, lg(-1237704639, -1275053966).hashCode()) assertEquals(-1555729529, lg(936273516, -1802824213).hashCode()) assertEquals(1534845437, lg(-870754516, -1755138351).hashCode()) assertEquals(-715250396, lg(964079858, -332884522).hashCode()) assertEquals(2003953821, lg(1769001167, 503396434).hashCode()) assertEquals(1631287431, lg(811930233, 1365142270).hashCode()) assertEquals(-1393125048, lg(-280291442, 1136496326).hashCode()) assertEquals(926193137, lg(439731659, 755060794).hashCode()) assertEquals(1141998463, lg(-561661919, -1701561506).hashCode()) assertEquals(480895538, lg(1556104387, 1080665841).hashCode()) assertEquals(-849143869, lg(1931061917, -1099252386).hashCode()) assertEquals(-1840233445, lg(2086961898, -298531087).hashCode()) assertEquals(47538111, lg(-1148008529, -1186490352).hashCode()) assertEquals(540301593, lg(807317094, 271251327).hashCode()) assertEquals(1903332829, lg(1077071399, 826295290).hashCode()) assertEquals(-1325859168, lg(781949710, -1637653074).hashCode()) assertEquals(-1476869146, lg(1778433204, -839352494).hashCode()) assertEquals(84316181, lg(-2038023199, -2088719372).hashCode()) assertEquals(524038724, lg(-1764916235, -1980649039).hashCode()) assertEquals(-794988445, lg(-1796682086, 1148567289).hashCode()) assertEquals(-1285356617, lg(-1606200144, 320886535).hashCode()) assertEquals(1441713710, lg(755146140, 2028753842).hashCode()) assertEquals(365800340, lg(-1851453861, -2073516593).hashCode()) assertEquals(2130603708, lg(-543327214, -1587342674).hashCode()) assertEquals(-1414171289, lg(506958308, -1249713021).hashCode()) assertEquals(-262714124, lg(-2097389477, 1923820719).hashCode()) assertEquals(158195454, lg(-374932306, -523558320).hashCode()) assertEquals(50128093, lg(-902905695, -925752196).hashCode()) assertEquals(-825145129, lg(-397013030, 646399757).hashCode()) assertEquals(-1344834498, lg(1764398539, -956440075).hashCode()) assertEquals(-103814738, lg(-1750710329, 1852419689).hashCode()) assertEquals(-1354282241, lg(-1664538473, 864969320).hashCode()) assertEquals(1408148925, lg(-500471847, -1312439708).hashCode()) assertEquals(1910019874, lg(14748928, 1899600418).hashCode()) assertEquals(1877620608, lg(-1985642880, -431011584).hashCode()) assertEquals(-378358620, lg(494530531, -200582329).hashCode()) assertEquals(492633155, lg(-2067225228, -1718331081).hashCode()) assertEquals(-1581166836, lg(-1799546135, 897340901).hashCode()) assertEquals(174532880, lg(25821759, 200092463).hashCode()) assertEquals(-629188646, lg(403690141, -1032813241).hashCode()) assertEquals(2139225425, lg(-1843541251, -308529236).hashCode()) assertEquals(200043623, lg(1643311840, 1780391559).hashCode()) assertEquals(1992690082, lg(1531597671, 764172997).hashCode()) assertEquals(754072038, lg(638938496, 182932582).hashCode()) assertEquals(-139359279, lg(309356043, -440275494).hashCode()) assertEquals(-1669264515, lg(-541225182, 1128039519).hashCode()) assertEquals(25583899, lg(-387355169, -378598204).hashCode()) assertEquals(1822592670, lg(1787244135, 103129337).hashCode()) assertEquals(1468680630, lg(-1654639624, -890602930).hashCode()) assertEquals(2103231504, lg(-1867306675, -303043235).hashCode()) assertEquals(1159389820, lg(1255224728, 265017316).hashCode()) assertEquals(776506096, lg(119985367, 695098919).hashCode()) assertEquals(-1303579924, lg(-332671386, 1583817866).hashCode()) assertEquals(1108767081, lg(1610629865, 571880320).hashCode()) assertEquals(-1101969936, lg(727577343, -1794328817).hashCode()) assertEquals(-1022615009, lg(730759795, -394092436).hashCode()) assertEquals(-1221218252, lg(-148400203, 1074931585).hashCode()) assertEquals(410005178, lg(181091802, 314250080).hashCode()) assertEquals(1180107886, lg(-1934827635, -889463837).hashCode()) assertEquals(425308062, lg(-1067099255, -650316777).hashCode()) assertEquals(1727927187, lg(1821917070, 174468125).hashCode()) assertEquals(-759140792, lg(474121453, -830281051).hashCode()) assertEquals(1698140938, lg(-402668999, -2100801229).hashCode()) assertEquals(512144461, lg(-615008378, -976157749).hashCode()) } @Test def toString()(): Unit = { assertEquals("0", lg(0).toString()) assertEquals("1", lg(1).toString()) assertEquals("-1", lg(-1).toString()) assertEquals(Int.MaxValue.toString(), IntMaxVal.toString()) assertEquals("2147483648", IntMaxValPlus1.toString()) assertEquals(Int.MinValue.toString(), IntMinVal.toString()) assertEquals("-2147483649", IntMinValMinus1.toString()) assertEquals("999999999", lg(999999999).toString()) assertEquals("1000000000", lg(1000000000).toString()) assertEquals("9007199254740991", MaxSafeDouble.toString()) assertEquals("9007199254740992", TwoPow53.toString()) assertEquals("-9007199254740991", MinSafeDouble.toString()) assertEquals("-9007199254740992", NegTwoPow53.toString()) assertEquals("-86922", lg(-86922, -1).toString()) assertEquals("0", lg(0, 0).toString()) assertEquals("-21874015", lg(-21874015, -1).toString()) assertEquals("-2098921896914", lg(1317110830, -489).toString()) assertEquals("80985205273168", lg(-698060208, 18855).toString()) assertEquals("-12451732102972849", lg(858389071, -2899145).toString()) assertEquals("3350", lg(3350, 0).toString()) assertEquals("-92511590195450", lg(2005360390, -21540).toString()) assertEquals("-2", lg(-2, -1).toString()) assertEquals("446248293253325286", lg(1492984294, 103900277).toString()) assertEquals("499596119314678396", lg(116015740, 116321286).toString()) assertEquals("-3205893", lg(-3205893, -1).toString()) assertEquals("-88762100292970", lg(1988813462, -20667).toString()) assertEquals("-1278004", lg(-1278004, -1).toString()) assertEquals("-1", lg(-1, -1).toString()) assertEquals("-305393", lg(-305393, -1).toString()) assertEquals("-2", lg(-2, -1).toString()) assertEquals("80295210784300943", lg(-1678336113, 18695185).toString()) assertEquals("5", lg(5, 0).toString()) assertEquals("21", lg(21, 0).toString()) assertEquals("64", lg(64, 0).toString()) assertEquals("39146094", lg(39146094, 0).toString()) assertEquals("-1725731", lg(-1725731, -1).toString()) assertEquals("-768047304243556260", lg(-874655652, -178824949).toString()) assertEquals("-2726923242838", lg(380990122, -635).toString()) assertEquals("-1781092907033", lg(1318520807, -415).toString()) assertEquals("-213275", lg(-213275, -1).toString()) assertEquals("7662405832810", lg(184176746, 1784).toString()) assertEquals("-154157877107", lg(460945549, -36).toString()) assertEquals("-929963900939521435", lg(1586508389, -216524094).toString()) assertEquals("-6872", lg(-6872, -1).toString()) assertEquals("31842553544728", lg(-333987816, 7413).toString()) assertEquals("567569520305426", lg(-1817926382, 132147).toString()) assertEquals("19649016", lg(19649016, 0).toString()) assertEquals("-1349346", lg(-1349346, -1).toString()) assertEquals("9479824673588660", lg(-1372338764, 2207193).toString()) assertEquals("3521781", lg(3521781, 0).toString()) assertEquals("1740", lg(1740, 0).toString()) assertEquals("0", lg(0, 0).toString()) assertEquals("92834698468", lg(-1654582044, 21).toString()) assertEquals("-80139798970631138", lg(100400158, -18659001).toString()) assertEquals("30058", lg(30058, 0).toString()) assertEquals("-611022189550002", lg(1332815438, -142265).toString()) assertEquals("514941281681226", lg(472694602, 119894).toString()) assertEquals("2454759250363", lg(-1962042949, 571).toString()) assertEquals("14860137468144958", lg(1595551038, 3459895).toString()) assertEquals("-79255", lg(-79255, -1).toString()) assertEquals("2290122305310796", lg(-1501556660, 533210).toString()) assertEquals("-755641947927852310", lg(-463451414, -175936602).toString()) assertEquals("-2621852156570472370", lg(-771329970, -610447526).toString()) assertEquals("-37956135735", lg(698569929, -9).toString()) assertEquals("853219", lg(853219, 0).toString()) assertEquals("901", lg(901, 0).toString()) assertEquals("4385596303898", lg(434694682, 1021).toString()) assertEquals("-972597865", lg(-972597865, -1).toString()) assertEquals("-8057379", lg(-8057379, -1).toString()) assertEquals("-14968", lg(-14968, -1).toString()) assertEquals("-98204964", lg(-98204964, -1).toString()) assertEquals("335479", lg(335479, 0).toString()) assertEquals("-429441918886", lg(54810714, -100).toString()) assertEquals("9798741", lg(9798741, 0).toString()) assertEquals("135908509698671494", lg(-896875642, 31643665).toString()) assertEquals("-141095409221912371", lg(233027789, -32851335).toString()) assertEquals("-9040837797787104", lg(-359183840, -2104985).toString()) assertEquals("-889", lg(-889, -1).toString()) assertEquals("3222082994", lg(-1072884302, 0).toString()) assertEquals("-1454853", lg(-1454853, -1).toString()) assertEquals("547641844425", lg(-2113969463, 127).toString()) assertEquals("2528132853", lg(-1766834443, 0).toString()) assertEquals("242", lg(242, 0).toString()) assertEquals("-1655763891", lg(-1655763891, -1).toString()) assertEquals("82", lg(82, 0).toString()) assertEquals("-120254181", lg(-120254181, -1).toString()) assertEquals("-210088", lg(-210088, -1).toString()) assertEquals("-2", lg(-2, -1).toString()) assertEquals("250255458324299", lg(598888267, 58267).toString()) assertEquals("-100656997", lg(-100656997, -1).toString()) assertEquals("-24097181761", lg(1672622015, -6).toString()) assertEquals("206088", lg(206088, 0).toString()) assertEquals("-593", lg(-593, -1).toString()) assertEquals("-99542049", lg(-99542049, -1).toString()) assertEquals("421501", lg(421501, 0).toString()) assertEquals("-2", lg(-2, -1).toString()) assertEquals("-101", lg(-101, -1).toString()) assertEquals("3", lg(3, 0).toString()) assertEquals("14967492854", lg(2082590966, 3).toString()) assertEquals("-1528445803513883", lg(-86853659, -355870).toString()) assertEquals("26760588095306", lg(-1353126070, 6230).toString()) assertEquals("12452686330472", lg(1576139368, 2899).toString()) assertEquals("-130630407827875", lg(1022479965, -30415).toString()) assertEquals("-10281777615", lg(-1691843023, -3).toString()) assertEquals("-90497242609445", lg(2013284571, -21071).toString()) assertEquals("-13935178716929", lg(1990158591, -3245).toString()) assertEquals("-11308540", lg(-11308540, -1).toString()) assertEquals("545166", lg(545166, 0).toString()) assertEquals("-1043705339124703", lg(1778574369, -243007).toString()) assertEquals("510", lg(510, 0).toString()) assertEquals("-2485453027", lg(1809514269, -1).toString()) assertEquals("-15103", lg(-15103, -1).toString()) assertEquals("-168776672025670194", lg(-779514418, -39296382).toString()) } @Test def toByte(): Unit = { assertEquals(0, lg(0).toByte) assertEquals(-1, lg(-1).toByte) assertEquals(0x98.toByte, lg(0xfedcba98, 0x76543210).toByte) assertEquals(102, lg(-1755353242, -1245269156).toByte) assertEquals(77, lg(-359135667, 1391746928).toByte) assertEquals(-47, lg(-957203503, 1516742479).toByte) assertEquals(-22, lg(-1928741654, 1162703256).toByte) assertEquals(-113, lg(-1698228849, 1497186951).toByte) assertEquals(-84, lg(-68041812, -2115448390).toByte) assertEquals(33, lg(1534301729, 1468418695).toByte) assertEquals(113, lg(1101829489, -514588123).toByte) assertEquals(12, lg(-1437577204, 1896338488).toByte) assertEquals(86, lg(-857671082, -1304076936).toByte) assertEquals(-36, lg(-292818212, -1485650549).toByte) assertEquals(88, lg(1044510040, 147719255).toByte) assertEquals(107, lg(-1166136469, 78076997).toByte) assertEquals(61, lg(500131901, 248541787).toByte) assertEquals(99, lg(1863435363, -1465266670).toByte) assertEquals(-76, lg(136483252, 1662447178).toByte) assertEquals(0, lg(1787939584, 1303926235).toByte) assertEquals(-69, lg(2105657787, 845433223).toByte) assertEquals(26, lg(-1298285542, -1826340261).toByte) assertEquals(64, lg(-766959552, -326327606).toByte) } @Test def toShort(): Unit = { assertEquals(0, lg(0).toShort) assertEquals(-1, lg(-1).toShort) assertEquals(0xba98.toShort, lg(0xfedcba98, 0x76543210).toShort) assertEquals(-670, lg(1925512546, -812328457).toShort) assertEquals(-15861, lg(2028716555, -1639243756).toShort) assertEquals(9963, lg(-1970657557, -1904990267).toShort) assertEquals(18394, lg(-1012119590, -1704668195).toShort) assertEquals(-7956, lg(848486636, -810351120).toShort) assertEquals(21453, lg(2103989197, 955793808).toShort) assertEquals(22979, lg(-237938237, -703399620).toShort) assertEquals(8452, lg(666247428, -1109641927).toShort) assertEquals(-26563, lg(1824561213, -872828437).toShort) assertEquals(-5754, lg(-10950266, -1779965318).toShort) assertEquals(11796, lg(1251814932, -491043391).toShort) assertEquals(18020, lg(-117750172, -366379322).toShort) assertEquals(3768, lg(-2095575368, 965048164).toShort) assertEquals(-4579, lg(-177410531, 1454361289).toShort) assertEquals(-29102, lg(-359035310, -790126871).toShort) assertEquals(30020, lg(1486058820, 1675509542).toShort) assertEquals(-13051, lg(268881157, -342358099).toShort) assertEquals(-2720, lg(-1089211040, 747294820).toShort) assertEquals(4726, lg(1163661942, 1708185440).toShort) assertEquals(-16878, lg(-1363821038, -1952481751).toShort) } @Test def toInt(): Unit = { assertEquals(0, lg(0).toInt) assertEquals(-1, lg(-1).toInt) assertEquals(0xfedcba98, lg(0xfedcba98, 0x76543210).toInt) assertEquals(-1869423218, lg(-1869423218, -5516698).toInt) assertEquals(450655357, lg(450655357, -521592408).toInt) assertEquals(-596464514, lg(-596464514, 629510497).toInt) assertEquals(1668957409, lg(1668957409, 1231040344).toInt) assertEquals(-313016061, lg(-313016061, 283507721).toInt) assertEquals(-406779255, lg(-406779255, 1389322213).toInt) assertEquals(-1125423893, lg(-1125423893, -436921025).toInt) assertEquals(1491309031, lg(1491309031, 948401259).toInt) assertEquals(360542935, lg(360542935, -1033853853).toInt) assertEquals(178673916, lg(178673916, -2045867551).toInt) assertEquals(-1167644863, lg(-1167644863, 738699232).toInt) assertEquals(-1852739075, lg(-1852739075, 950841298).toInt) assertEquals(-1965326912, lg(-1965326912, 1694989583).toInt) assertEquals(-141857741, lg(-141857741, -1197558189).toInt) assertEquals(-938893686, lg(-938893686, 1763555645).toInt) assertEquals(-1178638558, lg(-1178638558, 299067184).toInt) assertEquals(-1296424902, lg(-1296424902, -1694453755).toInt) assertEquals(204387309, lg(204387309, -240738711).toInt) assertEquals(-942136876, lg(-942136876, -527367452).toInt) assertEquals(-1703892744, lg(-1703892744, 240186844).toInt) } @Test def toLong(): Unit = { assertEquals(0L, lg(0).toLong) assertEquals(-1L, lg(-1).toLong) assertEquals(0x76543210fedcba98L, lg(0xfedcba98, 0x76543210).toLong) assertEquals(6907420169189163269L, lg(-85753595, 1608259083).toLong) assertEquals(-6558938415102325809L, lg(539593679, -1527121853).toLong) assertEquals(-7633462319206780754L, lg(-379998034, -1777303946).toLong) assertEquals(-4051533910437546682L, lg(-655641274, -943321249).toLong) assertEquals(-3890339056676572253L, lg(1727460259, -905790147).toLong) assertEquals(-3091543614186826784L, lg(1824805856, -719806090).toLong) assertEquals(2806266116723834799L, lg(948567983, 653384746).toLong) assertEquals(-1741184441450532748L, lg(-957910924, -405401095).toLong) assertEquals(3395924718030703835L, lg(-433042213, 790675337).toLong) assertEquals(-7712245542997911283L, lg(889526541, -1795647094).toLong) assertEquals(-2751064647855401745L, lg(1316066543, -640532153).toLong) assertEquals(5225909624054208018L, lg(1913378322, 1216751901).toLong) assertEquals(1334025594846136121L, lg(-434813127, 310602037).toLong) assertEquals(-1574909139329823322L, lg(1689963942, -366687109).toLong) assertEquals(-9142211941778525044L, lg(754250892, -2128587091).toLong) assertEquals(-5517402195275269807L, lg(-1817691823, -1284620305).toLong) assertEquals(7612683537409046411L, lg(-222627957, 1772466007).toLong) assertEquals(-2955859733488660001L, lg(-1282993697, -688214725).toLong) assertEquals(462084382441397543L, lg(799857959, 107587404).toLong) assertEquals(8801656334077465992L, lg(2076251528, 2049295309).toLong) } @Test def toFloat_strict(): Unit = { assumeTrue("Assumed strict floats", hasStrictFloats) assertEquals(0, lg(0).toFloat) assertEquals(-1, lg(-1).toFloat) if (!isInFullOpt) { assertEquals(9.223372E18f, MaxVal.toFloat) assertEquals(-9.223372E18f, MinVal.toFloat) } else { // Closure seems to incorrectly rewrite the constant on the right :-( assertEquals(9.223372E18f, MaxVal.toFloat, 1E4f) assertEquals(-9.223372E18f, MinVal.toFloat, 1E4f) } assertEquals(4.7971489E18f, lg(-1026388143, 1116923232).toFloat) assertEquals(-2.24047663E18f, lg(-1288678667, -521651607).toFloat) assertEquals(4.59211416E18f, lg(1192262605, 1069184891).toFloat) assertEquals(3.38942079E18f, lg(-180353617, 789161022).toFloat) assertEquals(-6.8076878E18f, lg(-1158443188, -1585038363).toFloat) assertEquals(7.4159717E18f, lg(906981906, 1726665521).toFloat) assertEquals(-1.85275997E18f, lg(2042933575, -431379283).toFloat) assertEquals(5.7344188E18f, lg(599900903, 1335148382).toFloat) assertEquals(3.20410168E18f, lg(1458166084, 746013039).toFloat) assertEquals(-7.2310311E18f, lg(1956524672, -1683605603).toFloat) assertEquals(7.7151362E18f, lg(478583639, 1796320118).toFloat) assertEquals(1.41365268E18f, lg(-1645816617, 329141676).toFloat) assertEquals(-3.03197918E18f, lg(184187116, -705937657).toFloat) assertEquals(-4.04287594E18f, lg(659513335, -941305424).toFloat) assertEquals(-7.8204678E18f, lg(770505156, -1820844549).toFloat) assertEquals(-5.9733025E18f, lg(929928858, -1390767911).toFloat) assertEquals(1.1261721E18f, lg(-1475096259, 262207373).toFloat) assertEquals(4.00884963E18f, lg(787691795, 933383012).toFloat) assertEquals(-1.43511611E18f, lg(1189057493, -334139018).toFloat) assertEquals(3.81415059E18f, lg(-618946450, 888051141).toFloat) } @Test def toDouble(): Unit = { assertEquals(0, lg(0).toDouble) assertEquals(-1, lg(-1).toDouble) if (!isInFullOpt) { assertEquals(9.223372036854776E18, MaxVal.toDouble) assertEquals(-9.223372036854776E18, MinVal.toDouble) } else { // Closure seems to incorrectly rewrite the constant on the right :-( assertEquals(9.223372036854776E18, MaxVal.toDouble, 1E4) assertEquals(-9.223372036854776E18, MinVal.toDouble, 1E4) } assertEquals(3.4240179834317537E18, lg(-151011088, 797216310).toDouble) assertEquals(8.5596043411285968E16, lg(-508205099, 19929381).toDouble) assertEquals(-3.1630346897289943E18, lg(1249322201, -736451403).toDouble) assertEquals(-4.4847682439933604E18, lg(483575860, -1044191477).toDouble) assertEquals(-6.4014772289576371E17, lg(-1526343930, -149046007).toDouble) assertEquals(-1.76968119148756736E18, lg(531728928, -412036011).toDouble) assertEquals(-8.5606671350959739E18, lg(-734111585, -1993185640).toDouble) assertEquals(-9.0403963253949932E18, lg(-1407864332, -2104881296).toDouble) assertEquals(-6.4988752582247977E18, lg(-1712351423, -1513137310).toDouble) assertEquals(-7.7788492399114394E17, lg(1969244733, -181115448).toDouble) assertEquals(7.6357174849871442E18, lg(-907683842, 1777829016).toDouble) assertEquals(1.25338659134517658E18, lg(-815927209, 291826806).toDouble) assertEquals(-3.1910241505692349E18, lg(463523496, -742968207).toDouble) assertEquals(7.4216510087652332E18, lg(1482622807, 1727987781).toDouble) assertEquals(-8.189046896086654E18, lg(1170040143, -1906661060).toDouble) assertEquals(6.8316272807487539E18, lg(-85609173, 1590612176).toDouble) assertEquals(-8.0611115909320561E18, lg(-1212811257, -1876873801).toDouble) assertEquals(1.7127521901359959E18, lg(-648802816, 398781194).toDouble) assertEquals(-6.4442523492577423E18, lg(-1484519186, -1500419423).toDouble) assertEquals(-1.71264450938175027E18, lg(-2016996893, -398756124).toDouble) } @Test def comparisons(): Unit = { def test(x: RuntimeLong, y: RuntimeLong, expected: Int): Unit = { assertEquals(expected, x.compareTo(y).signum) assertEquals(expected, x.compareTo(y.toLong: java.lang.Long).signum) assertEquals(expected == 0, x.equals(y)) assertEquals(expected != 0, x.notEquals(y)) assertEquals(expected < 0, x < y) assertEquals(expected <= 0, x <= y) assertEquals(expected > 0, x > y) assertEquals(expected >= 0, x >= y) } test(lg(0), lg(0), 0) test(lg(0), lg(1), -1) test(lg(0), lg(-1), 1) test(MaxVal, MinVal, 1) test(MinVal, MaxVal, -1) // Positive and negative numbers requiring lo to be compared via unsigned test(lg(0x87654321, 0x654789ab), lg(0x12345678, 0x654789ab), 1) test(lg(0x87654321, 0x89abcdef), lg(0x12345678, 0x89abcdef), 1) // Workaround for https://code.google.com/p/v8/issues/detail?id=3304 test(lg(-1, 0), lg(0, 0), 1) test(lg(0, 0), lg(-1, 0), -1) test(lg(173547161, -1884162399), lg(173547161, -1884162399), 0) test(lg(-1131022787, -472928681), lg(-1131022787, -472928681), 0) test(lg(-1426164191, 1230100202), lg(-1426164191, 1230100202), 0) test(lg(-865774626, 1656835920), lg(-865774626, 1656835920), 0) test(lg(323675568, -725625271), lg(323675568, -725625271), 0) test(lg(-480943595, -1454872354), lg(-480943595, -1454872354), 0) test(lg(-626788852, 1037229194), lg(-626788852, 1037229194), 0) test(lg(-717389653, 232764759), lg(-717389653, 232764759), 0) test(lg(-861190423, -1233377930), lg(-861190423, -1233377930), 0) test(lg(-424759090, 2081288998), lg(-424759090, 2081288998), 0) test(lg(-1092215366, 753517982), lg(349136582, -103427916), 1) test(lg(363609757, -1151024787), lg(472951646, -1802702403), 1) test(lg(604332601, 1869576376), lg(1642523661, 1083165388), 1) test(lg(309732766, 1349689861), lg(1287300335, 1464464808), -1) test(lg(-1309668929, -965374553), lg(-1952664258, 53355972), -1) test(lg(1881957750, 388099413), lg(1843907319, -1819358211), 1) test(lg(-969542710, 864289013), lg(-1025874755, 1102102911), -1) test(lg(-1425636748, -220185411), lg(1184140796, 40447497), -1) test(lg(242386079, 452246653), lg(435337552, -956883630), 1) test(lg(-1007383056, 344856628), lg(-195994328, 635205577), -1) test(lg(-1652098619, 2042392045), lg(819672742, -2139008380), 1) test(lg(1423590080, 1919857862), lg(918443721, 1202178673), 1) test(lg(-1726296442, 302493002), lg(314727886, 1583734481), -1) test(lg(-2124336701, 769721099), lg(461146322, -591528218), 1) test(lg(1544826993, -689540243), lg(-1107003972, -1622786326), 1) test(lg(2050227802, 951848379), lg(-774454951, 1675192386), -1) test(lg(251298779, -327163776), lg(767615943, 1531730165), -1) test(lg(1890888425, 761833495), lg(1870917399, 2027251288), -1) test(lg(594868313, 126374530), lg(-1567484882, -1199917303), 1) test(lg(-914360997, -703435655), lg(2049249771, -1581791194), 1) test(lg(-732484281, -738997306), lg(1445589646, 1910084021), -1) test(lg(340771740, 1351224018), lg(459324247, 1301544548), 1) test(lg(-940710332, 1344186742), lg(-1143672211, 1112189558), 1) test(lg(-804347876, 364046111), lg(-4317439, -1733157379), 1) test(lg(914214836, -1226397169), lg(-299522125, 1393423940), -1) test(lg(1244546642, 1821771770), lg(44151604, -1398558064), 1) test(lg(-2094640323, -1469168677), lg(-263524564, 88152070), -1) test(lg(-124567753, -93039352), lg(-200449699, -30383890), -1) test(lg(161119306, -1098626173), lg(-137189625, 1289988889), -1) test(lg(-2052616761, 846341515), lg(-150583666, 1044666783), -1) test(lg(-10359669, -1628837253), lg(165345114, 1529503183), -1) test(lg(1717988228, 1622548180), lg(834798590, -1907713185), 1) test(lg(-1416372109, -353311343), lg(-722195813, -2060788759), 1) test(lg(980620531, -300588346), lg(-889348218, 1805452697), -1) test(lg(-465681479, 556544868), lg(-684386776, 724207906), -1) test(lg(1720493596, 1118244444), lg(2048914469, -789300492), 1) test(lg(-1259678249, -1557339417), lg(-1908141376, -468055129), -1) test(lg(1374750478, 1591281700), lg(1107931774, 1073828802), 1) test(lg(1307860622, -1769647645), lg(-1521056504, 1476896409), -1) test(lg(1870719065, -606069057), lg(1219817813, -1063559023), 1) test(lg(-526519712, 1166848880), lg(-748095992, 59925642), 1) test(lg(-1011429486, -2053277854), lg(537284118, 1714076830), -1) test(lg(-669104363, -107157886), lg(1647426475, -1784147450), 1) test(lg(-389860398, 693324889), lg(1047633230, -1757663140), 1) test(lg(-200206281, 96771163), lg(613429570, -1206384633), 1) test(lg(-1436571081, -2050819200), lg(-665572561, 644211697), -1) test(lg(620796821, -567816428), lg(-109412350, -624638338), 1) test(lg(858464866, -2104597302), lg(-987329519, 1189618105), -1) test(lg(-1342634556, -1517778924), lg(-693373055, 142499537), -1) test(lg(1839280888, -168388422), lg(-1645740821, -1967920957), 1) } @Test def bitwise_not_~(): Unit = { assertEquals(lg(1664374422, 327449892), ~lg(-1664374423, -327449893)) assertEquals(lg(-2033180390, -1179462631), ~lg(2033180389, 1179462630)) assertEquals(lg(-1134559214, 581653069), ~lg(1134559213, -581653070)) assertEquals(lg(-304074638, -795726117), ~lg(304074637, 795726116)) assertEquals(lg(-1711832787, 1153070599), ~lg(1711832786, -1153070600)) assertEquals(lg(-1526506637, 966114536), ~lg(1526506636, -966114537)) assertEquals(lg(4362923, 1155261397), ~lg(-4362924, -1155261398)) assertEquals(lg(-1976846289, -68873334), ~lg(1976846288, 68873333)) assertEquals(lg(-980717878, -1171857118), ~lg(980717877, 1171857117)) assertEquals(lg(1087568370, 543704246), ~lg(-1087568371, -543704247)) assertEquals(lg(466027718, 693030605), ~lg(-466027719, -693030606)) assertEquals(lg(457333958, 1344424074), ~lg(-457333959, -1344424075)) assertEquals(lg(-1195369388, -1211454825), ~lg(1195369387, 1211454824)) assertEquals(lg(1637646574, 618600148), ~lg(-1637646575, -618600149)) assertEquals(lg(1882417448, 81477816), ~lg(-1882417449, -81477817)) assertEquals(lg(-755550612, -520392566), ~lg(755550611, 520392565)) assertEquals(lg(-754282895, -1550447287), ~lg(754282894, 1550447286)) assertEquals(lg(949172349, -708028075), ~lg(-949172350, 708028074)) assertEquals(lg(1587810906, -1344614950), ~lg(-1587810907, 1344614949)) assertEquals(lg(-1761617639, -353615615), ~lg(1761617638, 353615614)) assertEquals(lg(-153730678, 249152220), ~lg(153730677, -249152221)) assertEquals(lg(-189227914, 2071190797), ~lg(189227913, -2071190798)) assertEquals(lg(-853867870, 445686068), ~lg(853867869, -445686069)) assertEquals(lg(-779434875, 417640992), ~lg(779434874, -417640993)) assertEquals(lg(1997707715, -1100729422), ~lg(-1997707716, 1100729421)) assertEquals(lg(1171311729, -1236578928), ~lg(-1171311730, 1236578927)) assertEquals(lg(-833922040, 1773972621), ~lg(833922039, -1773972622)) assertEquals(lg(1414648869, 1222586075), ~lg(-1414648870, -1222586076)) assertEquals(lg(1123832582, -1270176018), ~lg(-1123832583, 1270176017)) assertEquals(lg(1163066309, 237396271), ~lg(-1163066310, -237396272)) assertEquals(lg(-1826566063, 509270117), ~lg(1826566062, -509270118)) assertEquals(lg(-450318543, 1650640099), ~lg(450318542, -1650640100)) assertEquals(lg(1461907704, -27364749), ~lg(-1461907705, 27364748)) assertEquals(lg(1012261256, 1691289854), ~lg(-1012261257, -1691289855)) assertEquals(lg(-1929178874, 1804481536), ~lg(1929178873, -1804481537)) assertEquals(lg(-888719200, -1846455123), ~lg(888719199, 1846455122)) assertEquals(lg(984231682, -867292444), ~lg(-984231683, 867292443)) assertEquals(lg(2105026705, -16146223), ~lg(-2105026706, 16146222)) assertEquals(lg(1742028653, -1648876191), ~lg(-1742028654, 1648876190)) assertEquals(lg(1922039594, -60702355), ~lg(-1922039595, 60702354)) assertEquals(lg(264728648, 275960741), ~lg(-264728649, -275960742)) assertEquals(lg(1237639032, -1761272007), ~lg(-1237639033, 1761272006)) assertEquals(lg(1118919822, 901486922), ~lg(-1118919823, -901486923)) assertEquals(lg(18001220, -1121574637), ~lg(-18001221, 1121574636)) assertEquals(lg(2122002356, -1370943785), ~lg(-2122002357, 1370943784)) assertEquals(lg(2006182035, -1422441078), ~lg(-2006182036, 1422441077)) assertEquals(lg(1314896174, 460075839), ~lg(-1314896175, -460075840)) assertEquals(lg(1829402918, -1031934892), ~lg(-1829402919, 1031934891)) assertEquals(lg(-2138673173, -107590306), ~lg(2138673172, 107590305)) assertEquals(lg(1382443514, -56307753), ~lg(-1382443515, 56307752)) } @Test def bitwise_or_|(): Unit = { assertEquals(lg(1467334397, -608514), lg(1198889513, -170491266) | lg(356560637, 1244673694)) assertEquals(lg(-1645778056, 796647391), lg(-1930990792, 627822941) | lg(-1849669008, 185716690)) assertEquals(lg(2121785322, -3735189), lg(711185578, -154795743) | lg(1446469570, -104529814)) assertEquals(lg(401988479, 1357601567), lg(356565628, 275405582) | lg(380967239, 1356925723)) assertEquals(lg(-167780425, -167778583), lg(1968397619, -447093015) | lg(-1242708043, 1353146913)) assertEquals(lg(-34603479, -565777), lg(-2121965024, -76133937) | lg(2104409609, -1365814226)) assertEquals(lg(-537280529, -10535202), lg(1496398822, -548061626) | lg(-556169301, -245689186)) assertEquals(lg(2132402169, -1093993487), lg(856203065, -1102382704) | lg(1276763344, 377524977)) assertEquals(lg(500957183, -5777537), lg(474066920, -215674305) | lg(366737695, 530830706)) assertEquals(lg(-1077937506, 1876426559), lg(-1543310820, 664058893) | lg(1002387606, 1826081595)) assertEquals(lg(-2121745, -302649859), lg(1606847457, -857707283) | lg(-82108753, 628476252)) assertEquals(lg(2113649662, -9748643), lg(703699686, -1218298019) | lg(1575693246, -565500071)) assertEquals(lg(1845274268, 1608495102), lg(1281663616, 1255777790) | lg(1708663964, 1604300502)) assertEquals(lg(-174066179, 1861146349), lg(-1315547660, 1726760037) | lg(-442781559, 235328140)) assertEquals(lg(2139059199, -40115785), lg(2014986997, -1130692301) | lg(124088654, 1637408903)) assertEquals(lg(-4195861, -679630869), lg(1653153899, 1412277603) | lg(-1615398494, -682581111)) assertEquals(lg(601802239, 1937620978), lg(551077237, 1349033186) | lg(597575118, 1662855120)) assertEquals(lg(-1383162189, -1107312899), lg(613289137, -1123701660) | lg(-1383294317, 369006329)) assertEquals(lg(-141299717, -576585865), lg(-418175046, -593383309) | lg(1468132939, 360734532)) assertEquals(lg(1998808831, -86066691), lg(1428236018, -1294026291) | lg(572735565, 1213340152)) assertEquals(lg(-1680360554, -738459673), lg(-1949058688, -1013245209) | lg(416580246, 300148007)) assertEquals(lg(-1073808964, -183288105), lg(-1746245220, 1427323605) | lg(-1185613404, -469621610)) assertEquals(lg(1475346349, 1845485055), lg(1445648649, 701317455) | lg(1407661733, 1287118327)) assertEquals(lg(-33566733, -268503975), lg(-1861500445, 764080137) | lg(-33812527, -411163560)) assertEquals(lg(-286605413, 1602191341), lg(-1408712806, 393166157) | lg(1323973395, 1580353248)) assertEquals(lg(-553947394, -2013546505), lg(-2072304578, -2142600249) | lg(-625840402, -2018265417)) assertEquals(lg(-553746946, -140321), lg(450125308, 1742298015) | lg(-999674466, -89794491)) assertEquals(lg(-16643, -68193313), lg(1239068904, -68194107) | lg(-1092247939, -639552609)) assertEquals(lg(-52733444, -1159005505), lg(-2075047684, -1706497393) | lg(-119858776, -1461536706)) assertEquals(lg(-121509406, 1048526839), lg(-1065293728, 1045575815) | lg(943802850, 4130803)) assertEquals(lg(1844952571, -1327497834), lg(1688647147, -1327540094) | lg(1767049400, -1609892586)) assertEquals(lg(-5046291, -1345721876), lg(-207425559, 231270892) | lg(515004644, -1349918716)) assertEquals(lg(-1075861506, -67698709), lg(781813534, 1274454635) | lg(-1814682890, -1182466103)) assertEquals(lg(2144796219, -17303617), lg(1792206347, -54265949) | lg(931436592, -625499620)) assertEquals(lg(-874545153, -1611301156), lg(-1957992337, 421859924) | lg(1138122674, -1896513908)) assertEquals(lg(-1218644010, -67141891), lg(-1220262128, 1790926509) | lg(-2107837994, -245286664)) assertEquals(lg(-2555905, 2146160604), lg(-485426246, 2122993116) | lg(-1077361187, 795578180)) assertEquals(lg(999978447, 2129346287), lg(713580935, 2059541733) | lg(957494730, 1688940106)) assertEquals(lg(-836113, 1983903423), lg(-181332639, 608154803) | lg(787627150, 1378378253)) assertEquals(lg(-273220891, -1242040457), lg(-944448827, -1528432780) | lg(-374967708, 364320051)) assertEquals(lg(-52433921, -1615929419), lg(1822361801, -1626992863) | lg(-1865553026, -1867721804)) assertEquals(lg(-1646593, -1583649), lg(-333036705, -39743141) | lg(-136127263, -404241201)) assertEquals(lg(-105959457, -50406273), lg(1342309595, 143297662) | lg(-1448137844, -50933699)) assertEquals(lg(-480707585, -87100434), lg(-514802766, 718197230) | lg(1113082335, -259890518)) assertEquals(lg(-73693249, -555903498), lg(-476348284, -1025699402) | lg(1518405435, 1545110880)) assertEquals(lg(-1646871041, -403194029), lg(-2058311589, 1135057747) | lg(-1664731675, -1535754941)) assertEquals(lg(-203423937, -34342961), lg(333362997, -34482226) | lg(-205173969, 1754490115)) assertEquals(lg(2083487743, -159909991), lg(2083354303, -2043490039) | lg(1344953817, -195725679)) assertEquals(lg(-134268937, -680984614), lg(-942983837, -683124136) | lg(909452980, -1021249590)) assertEquals(lg(-17107060, -35914117), lg(-402624124, -505696678) | lg(-688199800, 2110291577)) } @Test def bitwise_and_&(): Unit = { assertEquals(lg(-2012982272, 17896961), lg(-1973652216, 353474049) & lg(-576365513, -1546420349)) assertEquals(lg(440467456, -805024688), lg(2054268182, -735220496) & lg(-1706223071, -653894309)) assertEquals(lg(-1073741824, -2144861952), lg(-761230816, -1888512251) & lg(-988806710, -256349768)) assertEquals(lg(-1977056222, -1878455803), lg(-834874333, -101893315) & lg(-1964333382, -1877225849)) assertEquals(lg(-1069166300, 304091682), lg(-767041747, 1403541430) & lg(-320482908, 442929698)) assertEquals(lg(193986570, 67633664), lg(1538292767, 67928849) & lg(261587146, 2097883842)) assertEquals(lg(167772308, 35669040), lg(448790964, 1852174074) & lg(-284620129, 35804464)) assertEquals(lg(540801, 554500096), lg(123267521, 1965916169) & lg(-401979731, 588194498)) assertEquals(lg(-1878826824, 268436097), lg(-1725202754, 324931273) & lg(-1240211271, 948007557)) assertEquals(lg(306780164, 8388625), lg(1044995460, -1447811559) & lg(1381579300, 378161591)) assertEquals(lg(29904144, 12096051), lg(1640550232, -1980050765) & lg(-1613988461, 381206391)) assertEquals(lg(-963297278, 537741320), lg(-810205145, 832395272) & lg(-153237294, -1368559681)) assertEquals(lg(-2138566639, -1881372656), lg(-2087037677, -539042218) & lg(-1930915595, -1879201391)) assertEquals(lg(348136448, 1461360), lg(936077102, 1888906741) & lg(-590306112, 153013360)) assertEquals(lg(-2147459072, 50628864), lg(-1520343420, -480326676) & lg(-1031638712, 463833361)) assertEquals(lg(-805279656, -972355264), lg(-603625122, -837874740) & lg(-266310439, -433325742)) assertEquals(lg(1763723264, 1095287337), lg(2101242821, 1363798717) & lg(-337523686, -1007893653)) assertEquals(lg(1296302405, 1947206722), lg(-849542331, 2084521938) & lg(1866786159, -179258269)) assertEquals(lg(1275593362, 814484868), lg(1283984114, 1922846117) & lg(-42342754, 948944324)) assertEquals(lg(1081520, 35397649), lg(18451376, 39592223) & lg(-300891980, 43819665)) assertEquals(lg(539714600, -1617688304), lg(1772840110, -1611388521) & lg(876572201, -1080057992)) assertEquals(lg(268660738, 1111507460), lg(-1792575438, 1131693597) & lg(2026108738, -691967420)) assertEquals(lg(-1977139054, 2393104), lg(-1977130853, 1105495064) & lg(-289941322, 37545108)) assertEquals(lg(-2145341308, -1333516032), lg(-1590955612, -1330697458) & lg(-924798828, -1177272879)) assertEquals(lg(-1503395487, -299827136), lg(-285931035, -293654078) & lg(-1486596765, -31342500)) assertEquals(lg(1233401994, 34091008), lg(1237743775, -1293389691) & lg(1803860874, 1175174664)) assertEquals(lg(-932558672, 270533826), lg(-839976008, 900736195) & lg(-362132238, -668577850)) assertEquals(lg(117477888, 473995424), lg(1202887172, 484547048) & lg(793351913, -1622877017)) assertEquals(lg(302600257, -2030040226), lg(1393155525, -2025583778) & lg(-1164217783, -416769026)) assertEquals(lg(145293649, 536871648), lg(-658787467, -1534848013) & lg(770509273, 861439716)) assertEquals(lg(1546608834, 302001248), lg(1550840002, 1588870758) & lg(2084528882, 302148833)) assertEquals(lg(201606209, -695465177), lg(481609689, -152204489) & lg(1279544421, -561242137)) assertEquals(lg(608207492, -2112820352), lg(-1529763097, -1978531900) & lg(641783708, -2039026814)) assertEquals(lg(270672860, -1476361723), lg(887514076, -129985897) & lg(423346174, -1364800691)) assertEquals(lg(606102544, -503185240), lg(1736270961, -223672071) & lg(748709016, -498985816)) assertEquals(lg(144970344, 74547586), lg(413438572, 628333003) & lg(-1964689415, -2039117914)) assertEquals(lg(0, 33646849), lg(-1441786846, -952014445) & lg(1364118108, 582220621)) assertEquals(lg(886489100, -1836576552), lg(-167845571, -610782244) & lg(920048140, -1832380167)) assertEquals(lg(181408260, 8425760), lg(1070668735, 1223734716) & lg(1255200260, 310500128)) assertEquals(lg(18633796, 1494253868), lg(565998918, 2102701486) & lg(1230790357, -651115716)) assertEquals(lg(1242169472, 1074954242), lg(1259021457, -988117846) & lg(-95497780, 2025257730)) assertEquals(lg(202639938, 134272082), lg(236334914, 210367602) & lg(-1388488109, 672191707)) assertEquals(lg(955253125, 1994661641), lg(2029259749, 2012495659) & lg(-1125022313, -17866867)) assertEquals(lg(134242336, 1377566768), lg(2078335024, -748696528) & lg(-1944488853, 1455161657)) assertEquals(lg(883214088, 536873986), lg(1962270604, 747650594) & lg(1051641707, -1606005365)) assertEquals(lg(203000132, 19923458), lg(504991188, 623990339) & lg(-1919047324, 331123498)) assertEquals(lg(274893395, 1881151488), lg(409659995, 1887189252) & lg(384277491, 1973591160)) assertEquals(lg(115235, 335685459), lg(872793907, 353626075) & lg(34859627, 1988247415)) assertEquals(lg(538493100, 441057288), lg(-1407266644, 441386073) & lg(1635378940, -548742904)) assertEquals(lg(839516176, 671232089), lg(844761371, 1022505085) & lg(1930384912, 688275291)) } @Test def bitwise_xor_^(): Unit = { assertEquals(lg(1342248740, -313223199), lg(690404572, -1279287229) ^ lg(2032643064, 1592473506)) assertEquals(lg(-1691405730, 274213753), lg(1880634009, 1433776255) ^ lg(-348716857, 1160616710)) assertEquals(lg(882329013, -513228751), lg(-958227509, 287282926) ^ lg(-227156354, -260614433)) assertEquals(lg(1416185065, -1664302164), lg(-266860160, 1815641996) ^ lg(-1536078487, -252396512)) assertEquals(lg(-1268929640, 1388542260), lg(1278830943, 22194981) ^ lg(-127614265, 1402065425)) assertEquals(lg(2107251545, -1588280474), lg(-865349911, -84319450) ^ lg(-1309551184, 1538105408)) assertEquals(lg(-1128180942, 150893828), lg(-1973252863, -1969367363) ^ lg(916708915, -2107399239)) assertEquals(lg(-721878765, 35051090), lg(2098389933, -3394272) ^ lg(-1444158786, -35986574)) assertEquals(lg(-1863503396, 535478572), lg(533612062, -1712875225) ^ lg(-1893500990, -2045945845)) assertEquals(lg(1732708730, -1611595623), lg(799833325, 2072025633) ^ lg(1223390615, -462316872)) assertEquals(lg(-757432261, -1755342186), lg(570370215, 1665373667) ^ lg(-215635812, -199487627)) assertEquals(lg(755676969, 926086823), lg(-1440978805, 1756956707) ^ lg(-2028544094, 1603010180)) assertEquals(lg(1331057947, 1347408402), lg(-1788434031, -203193594) ^ lg(-634323830, -1548988140)) assertEquals(lg(596183682, -256181831), lg(-1101798994, 1399594232) ^ lg(-1646597332, -1546197695)) assertEquals(lg(1360009516, 182700672), lg(-1432962218, -1631098948) ^ lg(-75062662, -1809535684)) assertEquals(lg(594798246, -124892913), lg(699430210, 902448324) ^ lg(180589540, -851178037)) assertEquals(lg(-1331407219, 1819608371), lg(-1873118605, -20501824) ^ lg(553528574, -1833816077)) assertEquals(lg(1679931669, 470452622), lg(-693963147, 616673404) ^ lg(-1300017312, 952842738)) assertEquals(lg(1861159718, -1488989292), lg(1250421224, 1104113895) ^ lg(610853582, -420437133)) assertEquals(lg(1056597675, -102857583), lg(-611286212, -1550148499) ^ lg(-445979241, 1514412284)) assertEquals(lg(255992058, 1610836280), lg(1704771515, 1382796179) ^ lg(1792974657, 845718187)) assertEquals(lg(315376042, 566682776), lg(1042258124, 728098489) ^ lg(752081254, 178455073)) assertEquals(lg(-185728083, -2076881789), lg(-1887944331, 1039677246) ^ lg(2073445080, -1177715779)) assertEquals(lg(22829354, 1511361245), lg(1986213921, -1875380784) ^ lg(2000642315, -903708915)) assertEquals(lg(-1209040105, 1698106233), lg(365179043, -418125319) ^ lg(-1574194252, -2111511936)) assertEquals(lg(-2034371369, -364230501), lg(-376038790, 1936322298) ^ lg(1865150125, -1725716895)) assertEquals(lg(-324294323, -1435696355), lg(182372182, -1389399582) ^ lg(-428511717, 121795327)) assertEquals(lg(-1632322296, 110394084), lg(408417754, -547668779) ^ lg(-2031925038, -640727503)) assertEquals(lg(1545363539, -418308022), lg(1515701412, 860890032) ^ lg(105620727, -733936646)) assertEquals(lg(-2124553361, 1571601224), lg(144626057, 2121098703) ^ lg(-1983696154, 599907975)) assertEquals(lg(-508527758, 679546956), lg(1716685092, -647833300) ^ lg(-2015169962, -236730016)) assertEquals(lg(-703803607, -1904715404), lg(-2016515438, -1674300757) ^ lg(1371710907, 306998239)) assertEquals(lg(-1295788899, 1052686696), lg(-547404938, -860356684) ^ lg(1838979051, -234273060)) assertEquals(lg(-1416482745, -1744821078), lg(1034397763, 1158948099) ^ lg(-1774872572, -585891415)) assertEquals(lg(-420256974, -1759976200), lg(1755131065, -847055172) ^ lg(-1905373301, 1520046660)) assertEquals(lg(-1978435977, -1613559541), lg(755114159, 1707687361) ^ lg(-1492035880, -98945846)) assertEquals(lg(1517584033, -1108617107), lg(1110955283, -394871226) ^ lg(407088050, 1436378667)) assertEquals(lg(1706214170, -555203143), lg(729918767, -1047522396) ^ lg(1311993397, 527980061)) assertEquals(lg(-278231087, -1148948163), lg(-1533968339, 1826223468) ^ lg(1274742780, -681737135)) assertEquals(lg(-204001370, 1220298027), lg(230297309, -219465279) ^ lg(-26402437, -1168671510)) assertEquals(lg(-1169385448, -2039889677), lg(-1364422220, 1487677662) ^ lg(350226860, -557455315)) assertEquals(lg(791138554, 668046473), lg(-1049451753, 1883174397) ^ lg(-296389651, 1475305844)) assertEquals(lg(2103687665, 1121138741), lg(-895088167, 1303802204) ^ lg(-1211781080, 258296169)) assertEquals(lg(-387978954, 908804328), lg(1409034242, -1162000487) ^ lg(-1155284684, -1936324751)) assertEquals(lg(1265820840, 1142688859), lg(861082066, -475962819) ^ lg(2015491450, -1480757658)) assertEquals(lg(1490973918, -277478122), lg(-288714491, 1935424926) ^ lg(-1240144421, -1674954616)) assertEquals(lg(1839163014, 362842460), lg(-699164585, -731232280) ^ lg(-1144193327, -1043673420)) assertEquals(lg(634920094, -2001579101), lg(683993930, 248552821) ^ lg(220002260, -2040344874)) assertEquals(lg(-831642917, -817908795), lg(640417317, 298956382) ^ lg(-398074626, -554826341)) assertEquals(lg(857398449, 1711937081), lg(-1493347776, 1187436882) ^ lg(-1779986703, 550293355)) } @Test def shift_left_<<(): Unit = { assertEquals(lg(1065353216, -691528727), lg(-1875389825, 1268606893) << -73329513) assertEquals(lg(671088640, -1046568266), lg(869553861, -291578632) << -339545061) assertEquals(lg(0, 0), lg(543726956, -1753066291) << -809014658) assertEquals(lg(-754974720, -1479892363), lg(-895322669, 847749031) << 1030973528) assertEquals(lg(0, 1696595968), lg(1598039634, 819660072) << 82069876) assertEquals(lg(0, -763223040), lg(-151740279, -595601314) << 503039850) assertEquals(lg(0, -1360527360), lg(-1702267427, 1115684531) << 1171866675) assertEquals(lg(508125184, -784066052), lg(-807341493, 286689824) << -1938771891) assertEquals(lg(-551288832, 439734876), lg(-382832750, -2134078182) << 1537970769) assertEquals(lg(-1409069728, 1129787), lg(-580904341, 939559401) << 1856717061) assertEquals(lg(1711276032, 1295846454), lg(-198125160, 663832884) << 1561097110) assertEquals(lg(-1004724328, -940313723), lg(-1199332365, -1728151952) << 858801923) assertEquals(lg(-1029298112, -1523092059), lg(773140802, -181814355) << 1110910853) assertEquals(lg(536870912, 200145086), lg(1601160689, 869229832) << -338843811) assertEquals(lg(0, -1735502848), lg(-1919381932, -201750119) << -813015128) assertEquals(lg(-1727917056, 2104066035), lg(-52019067, -102802849) << -2122946486) assertEquals(lg(0, 771751936), lg(-456947922, 1170727731) << 2126487160) assertEquals(lg(0, -710836224), lg(1756719200, -1702547414) << -32425558) assertEquals(lg(0, -1073741824), lg(97072750, 409070577) << 1222452733) assertEquals(lg(0, -1182793728), lg(1177105779, 212324545) << -834196361) assertEquals(lg(0, 1543503872), lg(1395605166, -1743726419) << -1762017159) assertEquals(lg(0, -67108864), lg(703808254, 1939941481) << 1042647417) assertEquals(lg(0, 1207959552), lg(-702184622, -618243162) << -753853766) assertEquals(lg(-58458112, -1619174179), lg(-1368457662, 1747275710) << 1382741393) assertEquals(lg(0, -299542812), lg(-74885703, 1342895995) << 1929734882) assertEquals(lg(0, -1585446912), lg(-61401466, -496528012) << -129147274) assertEquals(lg(1888485376, 630678170), lg(-660169692, 1479330149) << 289081298) assertEquals(lg(0, -536870912), lg(-421237721, 1011668330) << 370873533) assertEquals(lg(0, 102137856), lg(-821818323, -2029348763) << -916638609) assertEquals(lg(0, -1073741824), lg(-1246065172, -1572087360) << 1493241980) assertEquals(lg(1156516188, -1812425640), lg(578258094, -906212820) << 2074806145) assertEquals(lg(0, 1370357760), lg(61151968, -1770168701) << -2062208020) assertEquals(lg(-402653184, 1642287002), lg(1013576541, 460756940) << -902835237) assertEquals(lg(-1744830464, 1690731362), lg(-1731171245, 771836652) << 868975579) assertEquals(lg(-417260032, 563566725), lg(1123258511, 1049676716) << 575477257) assertEquals(lg(411626816, -1915897795), lg(-779579692, 1222433667) << 1238257604) assertEquals(lg(0, -2147483648), lg(-1102469156, -543766743) << 553354173) assertEquals(lg(0, -1909156352), lg(843520587, -517185932) << 1899246569) assertEquals(lg(0, -487976960), lg(-510775647, -896837143) << 1487779500) assertEquals(lg(-1148788736, -847308273), lg(-1594115986, -186853391) << -119255604) assertEquals(lg(0, 1940424228), lg(-588635767, 1047291343) << 2089738146) assertEquals(lg(1726279680, 2137615428), lg(-1002017201, -986188138) << 800913356) assertEquals(lg(0, 1650633728), lg(1813551275, -400674286) << -1609938966) assertEquals(lg(-1207959552, 897838789), lg(-1333929801, 254558182) << -1518372133) assertEquals(lg(0, -1104224256), lg(834127324, 878312672) << -923142549) assertEquals(lg(-504160320, 305586753), lg(126340223, -2008491127) << -252023418) assertEquals(lg(0, 0), lg(510931784, -1313923431) << 1174528765) assertEquals(lg(-1449390900, -1602240664), lg(711394099, -400560166) << -967606846) assertEquals(lg(0, 1162928128), lg(1319282800, -1994311032) << 1237159401) assertEquals(lg(-1749421258, 1809275319), lg(-874710629, -1242845989) << 484063041) } @Test def shift_logical_right_>>>(): Unit = { assertEquals(lg(1982185809, 4856), lg(88517143, 1273092247) >>> 2099569298) assertEquals(lg(40, 0), lg(-1987462914, 1361836721) >>> -2053535175) assertEquals(lg(258, 0), lg(1513792977, 1085974656) >>> -303705162) assertEquals(lg(-1589724844, 2), lg(-2071249600, 1411897130) >>> 1015183069) assertEquals(lg(827423626, 419765), lg(-1560865755, 214919778) >>> 1191603401) assertEquals(lg(376475826, 25773988), lg(944265510, -995896821) >>> 485744647) assertEquals(lg(291969293, 528), lg(1131824263, -2080089658) >>> -386336938) assertEquals(lg(185, 0), lg(-827478170, -1185129975) >>> 2048537528) assertEquals(lg(45022, 0), lg(-916869993, -1344352401) >>> -791372688) assertEquals(lg(587, 0), lg(588931659, -1830830904) >>> -1259543946) assertEquals(lg(-684574597, 28915), lg(473794659, 947514265) >>> -1409717873) assertEquals(lg(3, 0), lg(471518489, -940479957) >>> -847604034) assertEquals(lg(11, 0), lg(-818287716, 1547586919) >>> -216455813) assertEquals(lg(266, 0), lg(-2088976011, -2057680935) >>> 787633143) assertEquals(lg(-800511856, 59336150), lg(306848777, -497453644) >>> 1584315654) assertEquals(lg(25694, 0), lg(-1689341833, -927188015) >>> 1300572337) assertEquals(lg(237982231, 3229829), lg(396954515, 413418119) >>> 1180537031) assertEquals(lg(1319611409, 10188), lg(1478732342, 1335401807) >>> -1668840943) assertEquals(lg(-530293557, 9), lg(-1326271298, -1643756084) >>> -2118687716) assertEquals(lg(26, 0), lg(1205635051, 875594107) >>> 350453433) assertEquals(lg(1698203097, 57089), lg(-2049358216, -553556680) >>> -1203541232) assertEquals(lg(-308392901, 40188), lg(1278981121, -1661145698) >>> 254766480) assertEquals(lg(-1667461656, 7259908), lg(1313272948, 929268302) >>> 1175504903) assertEquals(lg(99018, 0), lg(1982277801, -1050318135) >>> 629735727) assertEquals(lg(16237, 0), lg(-610510955, 1064153335) >>> 577897264) assertEquals(lg(689994, 0), lg(1859860682, 1413109554) >>> 243415787) assertEquals(lg(4088, 0), lg(1757351444, -7991214) >>> -1844808396) assertEquals(lg(48441534, 0), lg(-1277568919, -1194709070) >>> -2102413146) assertEquals(lg(42961906, 0), lg(-1768551066, 1342559) >>> 365466523) assertEquals(lg(1946, 0), lg(1051996382, -213518283) >>> -717261067) assertEquals(lg(-605712863, 10), lg(451444747, -1380034334) >>> -675522340) assertEquals(lg(8, 0), lg(605006440, -1956088854) >>> 192236860) assertEquals(lg(-152492078, 258), lg(-384174131, -2122615661) >>> -1278414057) assertEquals(lg(-1650335224, 9146646), lg(-1579022332, -1953425763) >>> 2134440904) assertEquals(lg(175996054, 0), lg(-433112808, -1479030417) >>> -1873327132) assertEquals(lg(771890457, 0), lg(-1786180708, 385945228) >>> 1526047775) assertEquals(lg(868056695, -1200391723), lg(868056695, -1200391723) >>> 93595840) assertEquals(lg(88233, 0), lg(1335240662, -1403745666) >>> 1625850351) assertEquals(lg(21, 0), lg(-681452715, -1446696044) >>> -742234373) assertEquals(lg(200097858, 0), lg(301750839, 1600782865) >>> 1678034787) assertEquals(lg(1, 0), lg(-2077889650, 445749598) >>> 363036476) assertEquals(lg(-1160719403, 3135), lg(-1633078438, 1644025478) >>> -1297864237) assertEquals(lg(27660, 0), lg(1159483779, 906375175) >>> -1204888593) assertEquals(lg(1096217739, 131290637), lg(179807326, 1050325098) >>> -1598422013) assertEquals(lg(61, 0), lg(952383136, -193355640) >>> 415626042) assertEquals(lg(12362394, 0), lg(972435428, -1130194211) >>> -1259042456) assertEquals(lg(-924965860, 8483), lg(605823642, 555993310) >>> 1780437072) assertEquals(lg(88, 0), lg(665774635, 184915839) >>> 1729784373) assertEquals(lg(27109, 0), lg(-263808048, -741669613) >>> -204793551) assertEquals(lg(-5828381, 10), lg(-954198224, 369053217) >>> 768150041) } @Test def shift_arithmetic_right_>>(): Unit = { assertEquals(lg(144041519, 2813487), lg(-1780076655, 720252680) >> -1316031160) assertEquals(lg(1519, 0), lg(234061537, 796729805) >> 1452874739) assertEquals(lg(-935479627, 124), lg(1523206972, 1046748891) >> 1356453463) assertEquals(lg(-15335, -1), lg(1866043067, -2009962307) >> 393061105) assertEquals(lg(5, 0), lg(89507691, 183545611) >> -1980770119) assertEquals(lg(-1283367734, 14309038), lg(-1062312593, 1831556953) >> 1545082311) assertEquals(lg(523169438, 0), lg(-1568293714, 523169438) >> -2119005984) assertEquals(lg(-1704853904, -731301), lg(-2013675422, -748851607) >> 511130378) assertEquals(lg(345569760, -46), lg(-521585277, -770402055) >> -1176556648) assertEquals(lg(1777038301, 61), lg(-145701849, 257587932) >> -1512809002) assertEquals(lg(-51, -1), lg(-973180026, -1694110170) >> 2083093369) assertEquals(lg(-5, -1), lg(1761120319, -539393529) >> -207994821) assertEquals(lg(-587262921, -3246345), lg(-30904807, -1662128199) >> -638486135) assertEquals(lg(-10706, -1), lg(1812122560, -701571284) >> 611632432) assertEquals(lg(7484398, 100362842), lg(119750375, 1605805472) >> 244039684) assertEquals(lg(1, 0), lg(269986751, 1459449758) >> -439796226) assertEquals(lg(7, 0), lg(-1969890020, 2011804532) >> -652735044) assertEquals(lg(-2130588861, 98), lg(-1582649974, 826310885) >> 613066583) assertEquals(lg(-669931160, -697), lg(756433442, -1459944907) >> -775565931) assertEquals(lg(933146972, -1), lg(1678061064, -1680910162) >> -531660641) assertEquals(lg(1601141595, 1298147), lg(1870355258, 332325727) >> -434372344) assertEquals(lg(-1047936567, -129548), lg(1886551280, -2122502046) >> -763866098) assertEquals(lg(-72307, -1), lg(-1169141408, -592336405) >> -1841005139) assertEquals(lg(72262, 0), lg(686282122, 295988927) >> 69079212) assertEquals(lg(-1582088844, -23862710), lg(1825529126, -1527213400) >> 1371712838) assertEquals(lg(70395261, 0), lg(633149491, 1126324183) >> 1948323684) assertEquals(lg(-329, -1), lg(-363762029, -1377253181) >> -1243200330) assertEquals(lg(1924403917, -21), lg(-1694234908, -689608667) >> 728732313) assertEquals(lg(-62655, -1), lg(1319661865, -2053067582) >> -777879057) assertEquals(lg(-1472236443, 19900875), lg(-1472236443, 19900875) >> 373478400) assertEquals(lg(-1, -1), lg(-1719111010, -1766452468) >> 942391743) assertEquals(lg(5131, 0), lg(-624682758, 1345231635) >> -813574478) assertEquals(lg(9, 0), lg(1316519660, 314590421) >> -641829383) assertEquals(lg(-14492, -1), lg(-1380652891, -474856510) >> -920501329) assertEquals(lg(40, 0), lg(-2084688189, 1352268039) >> -177471111) assertEquals(lg(-868447412, 13901269), lg(507881044, 1779362534) >> -508943033) assertEquals(lg(-37529, -1), lg(1742323077, -1229747072) >> 401183471) assertEquals(lg(376386, 0), lg(346182810, 770838817) >> 797274667) assertEquals(lg(-1822, -1), lg(828281422, -477411393) >> 1298272370) assertEquals(lg(1021967080, -2560), lg(-341778503, -671026265) >> 532386578) assertEquals(lg(-1683940185, 34921), lg(-1907127360, 1144311248) >> -2131012273) assertEquals(lg(-121723, -1), lg(756366897, -1994294687) >> -1642432978) assertEquals(lg(-644688038, 9473), lg(-1363894143, 1241756453) >> 1681307793) assertEquals(lg(-278047, -1), lg(1708006412, -1138876437) >> 2010442220) assertEquals(lg(872834, 0), lg(-664430929, 446891142) >> -1707024855) assertEquals(lg(-1, -1), lg(-1904131429, -938887) >> -829231944) assertEquals(lg(-2101780246, 11998), lg(-1043053889, 1572668786) >> 309495249) assertEquals(lg(-11427, -1), lg(563683687, -1497656119) >> -176819791) assertEquals(lg(201, 0), lg(-627312011, 421917318) >> 2056663541) assertEquals(lg(-104838948, -3), lg(-904956287, -543423347) >> -617227620) } @Test def negate_-(): Unit = { assertEquals(lg(0), -lg(0)) assertEquals(lg(1), -lg(-1)) assertEquals(lg(-1), -lg(1)) assertEquals(lg(1, -2147483648), -MaxVal) assertEquals(MinVal, -MinVal) assertEquals(lg(0, -1), -lg(0, 1)) assertEquals(lg(792771844, -1518464955), -lg(-792771844, 1518464954)) assertEquals(lg(1313283210, -1172119606), -lg(-1313283210, 1172119605)) assertEquals(lg(-1034897743, -341494686), -lg(1034897743, 341494685)) assertEquals(lg(-924881290, 1614058538), -lg(924881290, -1614058539)) assertEquals(lg(-1636891236, -1405401040), -lg(1636891236, 1405401039)) assertEquals(lg(2044349674, -477271433), -lg(-2044349674, 477271432)) assertEquals(lg(1426086684, -1493816436), -lg(-1426086684, 1493816435)) assertEquals(lg(-2125201680, 1667846199), -lg(2125201680, -1667846200)) assertEquals(lg(161054645, -1272528725), -lg(-161054645, 1272528724)) assertEquals(lg(-1013390126, -1323844683), -lg(1013390126, 1323844682)) assertEquals(lg(-1028806094, -691441881), -lg(1028806094, 691441880)) assertEquals(lg(1060422114, -11477649), -lg(-1060422114, 11477648)) assertEquals(lg(1366334123, -2046238761), -lg(-1366334123, 2046238760)) assertEquals(lg(1307711795, 940346049), -lg(-1307711795, -940346050)) assertEquals(lg(421687960, -250174762), -lg(-421687960, 250174761)) assertEquals(lg(379452754, -843386803), -lg(-379452754, 843386802)) assertEquals(lg(-1251296999, 1144268297), -lg(1251296999, -1144268298)) assertEquals(lg(-690359429, -1676679602), -lg(690359429, 1676679601)) assertEquals(lg(1952563749, -882544420), -lg(-1952563749, 882544419)) assertEquals(lg(-1420900897, -1865273591), -lg(1420900897, 1865273590)) assertEquals(lg(115947827, -832851217), -lg(-115947827, 832851216)) assertEquals(lg(-1834973959, -1423776005), -lg(1834973959, 1423776004)) assertEquals(lg(1376766876, 1519617584), -lg(-1376766876, -1519617585)) assertEquals(lg(-1845217535, 724725865), -lg(1845217535, -724725866)) assertEquals(lg(-1133294381, 699400553), -lg(1133294381, -699400554)) assertEquals(lg(113507585, 615978889), -lg(-113507585, -615978890)) assertEquals(lg(-1839784424, 1163726652), -lg(1839784424, -1163726653)) assertEquals(lg(1065777168, 1301742163), -lg(-1065777168, -1301742164)) assertEquals(lg(334075220, -1058529734), -lg(-334075220, 1058529733)) assertEquals(lg(1443112398, 1148167880), -lg(-1443112398, -1148167881)) assertEquals(lg(1647739462, 12310882), -lg(-1647739462, -12310883)) assertEquals(lg(1461318149, 518941731), -lg(-1461318149, -518941732)) assertEquals(lg(56833825, -162898592), -lg(-56833825, 162898591)) assertEquals(lg(-680096727, -1760413869), -lg(680096727, 1760413868)) assertEquals(lg(461541717, -1103626950), -lg(-461541717, 1103626949)) assertEquals(lg(1287248387, 1483137214), -lg(-1287248387, -1483137215)) assertEquals(lg(-1681467124, -1197977023), -lg(1681467124, 1197977022)) assertEquals(lg(-310946355, 885055747), -lg(310946355, -885055748)) assertEquals(lg(-717629012, -1299204708), -lg(717629012, 1299204707)) assertEquals(lg(800584851, 350245993), -lg(-800584851, -350245994)) assertEquals(lg(1911014238, -441020786), -lg(-1911014238, 441020785)) assertEquals(lg(-1647080824, -1197295589), -lg(1647080824, 1197295588)) assertEquals(lg(-925751968, -479541400), -lg(925751968, 479541399)) assertEquals(lg(-656919119, 1574890072), -lg(656919119, -1574890073)) assertEquals(lg(-1833364814, 432106462), -lg(1833364814, -432106463)) assertEquals(lg(-315730911, -1990201785), -lg(315730911, 1990201784)) assertEquals(lg(1218524771, -572482048), -lg(-1218524771, 572482047)) assertEquals(lg(276668811, 2002398729), -lg(-276668811, -2002398730)) assertEquals(lg(1489416833, 834462753), -lg(-1489416833, -834462754)) assertEquals(lg(2066446588, 688546120), -lg(-2066446588, -688546121)) } @Test def plus_+(): Unit = { assertEquals(lg(802149732, -566689627), lg(-202981355, -566689628) + lg(1005131087, 0)) assertEquals(lg(902769101, 1674149440), lg(1153016325, 1674149440) + lg(-250247224, -1)) assertEquals(lg(1128646485, -1965159800), lg(1701699755, -1965159800) + lg(-573053270, -1)) assertEquals(lg(66936416, -973893589), lg(-1183294843, -973893590) + lg(1250231259, 0)) assertEquals(lg(-155818001, 449544496), lg(-2145882999, 449544496) + lg(1990064998, 0)) assertEquals(lg(-1244599644, -917980205), lg(-528276750, -917980205) + lg(-716322894, -1)) assertEquals(lg(580594010, 1794016499), lg(-1061043923, 1794016498) + lg(1641637933, 0)) assertEquals(lg(-1874551871, 1883156001), lg(-315483661, 1883156001) + lg(-1559068210, -1)) assertEquals(lg(-611587809, 95409025), lg(-1899047326, 95409025) + lg(1287459517, 0)) assertEquals(lg(-1393747885, 1167571449), lg(-705065818, 1167571449) + lg(-688682067, -1)) assertEquals(lg(1135734754, -607437553), lg(-192210545, -607437554) + lg(1327945299, 0)) assertEquals(lg(545472170, -2007097641), lg(11453726, -2007097641) + lg(534018444, 0)) assertEquals(lg(-1984029353, -1191350400), lg(1809973610, -1191350400) + lg(500964333, 0)) assertEquals(lg(1031291620, 108684756), lg(972641234, 108684756) + lg(58650386, 0)) assertEquals(lg(-1375760766, 127758048), lg(-1511325903, 127758048) + lg(135565137, 0)) assertEquals(lg(640679472, 429508922), lg(-942832491, 429508921) + lg(1583511963, 0)) assertEquals(lg(-820503583, -594798242), lg(1500842230, -594798242) + lg(1973621483, 0)) assertEquals(lg(1875301895, 910473912), lg(-1088230684, 910473912) + lg(-1331434717, -1)) assertEquals(lg(-1755864971, 378724963), lg(798219431, 378724963) + lg(1740882894, 0)) assertEquals(lg(468052904, -683558197), lg(-1763683665, -683558197) + lg(-2063230727, -1)) assertEquals(lg(-1488850347, -1636478025), lg(627629519, -1636478024) + lg(-2116479866, -1)) assertEquals(lg(915882407, -338305025), lg(-526665240, -338305026) + lg(1442547647, 0)) assertEquals(lg(-950882103, -466473801), lg(-1265295286, -466473801) + lg(314413183, 0)) assertEquals(lg(-673278223, -1417005301), lg(-1412852606, -1417005301) + lg(739574383, 0)) assertEquals(lg(-1565299836, -2035157269), lg(708993121, -2035157269) + lg(2020674339, 0)) assertEquals(lg(638729196, 1182702858), lg(847269791, 1182702858) + lg(-208540595, -1)) assertEquals(lg(-1453651445, -1902383955), lg(97084677, -1902383954) + lg(-1550736122, -1)) assertEquals(lg(1116569659, -606967004), lg(-267181534, -606967005) + lg(1383751193, 0)) assertEquals(lg(529048030, 1063184820), lg(-904322265, 1063184819) + lg(1433370295, 0)) assertEquals(lg(-499260224, 101142421), lg(1841727454, 101142421) + lg(1953979618, 0)) assertEquals(lg(1452864874, 1045175929), lg(-1716387490, 1045175929) + lg(-1125714932, -1)) assertEquals(lg(982736721, 1506316757), lg(-1020814821, 1506316756) + lg(2003551542, 0)) assertEquals(lg(-1478064805, 1107506955), lg(467820886, 1107506956) + lg(-1945885691, -1)) assertEquals(lg(1436947166, -57552832), lg(-103701719, -57552833) + lg(1540648885, 0)) assertEquals(lg(3887456, -414981457), lg(1280780483, -414981457) + lg(-1276893027, -1)) assertEquals(lg(939083871, 606376864), lg(-1505747919, 606376864) + lg(-1850135506, -1)) assertEquals(lg(-1161495325, -606274238), lg(-1797917239, -606274238) + lg(636421914, 0)) assertEquals(lg(2146013782, 52949338), lg(-551974000, 52949338) + lg(-1596979514, -1)) assertEquals(lg(-159062053, -623553409), lg(484182807, -623553408) + lg(-643244860, -1)) assertEquals(lg(1680160313, 371486519), lg(1170065239, 371486519) + lg(510095074, 0)) assertEquals(lg(-2071737549, -251530660), lg(553737773, -251530660) + lg(1669491974, 0)) assertEquals(lg(793877651, -324566030), lg(1363264202, -324566030) + lg(-569386551, -1)) assertEquals(lg(1897556965, 1255689015), lg(1461362302, 1255689015) + lg(436194663, 0)) assertEquals(lg(-540868058, 718534179), lg(-1463314706, 718534179) + lg(922446648, 0)) assertEquals(lg(2547531, -716998232), lg(-1684072850, -716998233) + lg(1686620381, 0)) assertEquals(lg(-1709813271, -2086072551), lg(-183257712, -2086072551) + lg(-1526555559, -1)) assertEquals(lg(-2134341942, -1223154956), lg(-485818523, -1223154956) + lg(-1648523419, -1)) assertEquals(lg(1634619686, -1934382665), lg(392330048, -1934382665) + lg(1242289638, 0)) assertEquals(lg(-1409927090, -75135322), lg(1907808353, -75135322) + lg(977231853, 0)) assertEquals(lg(-1393001322, 1362535802), lg(88305723, 1362535803) + lg(-1481307045, -1)) } @Test def minus_-(): Unit = { // Workaround for https://code.google.com/p/v8/issues/detail?id=3304 assertEquals(lg(-1), lg(0) - lg(1)) assertEquals(lg(1318078695, 462416044), lg(406229717, 462416044) - lg(-911848978, -1)) assertEquals(lg(459412414, 466142261), lg(873646396, 466142261) - lg(414233982, 0)) assertEquals(lg(1749422706, -573388520), lg(-2077914189, -573388520) - lg(467630401, 0)) assertEquals(lg(855866353, -1980988131), lg(-789253983, -1980988132) - lg(-1645120336, -1)) assertEquals(lg(1858485462, 1825277273), lg(-482388232, 1825277273) - lg(1954093602, 0)) assertEquals(lg(1211608504, -1077757379), lg(-1616159373, -1077757379) - lg(1467199419, 0)) assertEquals(lg(-1391411781, -1825579414), lg(-105778670, -1825579414) - lg(1285633111, 0)) assertEquals(lg(1573921037, -2018677385), lg(1306759468, -2018677385) - lg(-267161569, -1)) assertEquals(lg(2075838974, -289291128), lg(618139116, -289291128) - lg(-1457699858, -1)) assertEquals(lg(600013127, -1980710784), lg(1736445522, -1980710784) - lg(1136432395, 0)) assertEquals(lg(-558434179, 21136449), lg(-1970971750, 21136449) - lg(-1412537571, -1)) assertEquals(lg(-343650116, 229693364), lg(-1491842755, 229693364) - lg(-1148192639, -1)) assertEquals(lg(1686071974, -2064363005), lg(2125082313, -2064363005) - lg(439010339, 0)) assertEquals(lg(-1587252411, -1887690341), lg(922634658, -1887690341) - lg(-1785080227, -1)) assertEquals(lg(-992416688, 1754335328), lg(478015362, 1754335329) - lg(1470432050, 0)) assertEquals(lg(1718268050, -845578935), lg(-1788952896, -845578935) - lg(787746350, 0)) assertEquals(lg(1316319511, -1479013672), lg(-1177368338, -1479013672) - lg(1801279447, 0)) assertEquals(lg(1568876561, -2147323821), lg(1761081661, -2147323821) - lg(192205100, 0)) assertEquals(lg(-1122491731, 1604940224), lg(261772552, 1604940225) - lg(1384264283, 0)) assertEquals(lg(1556996455, 1018615990), lg(-1441241840, 1018615990) - lg(1296729001, 0)) assertEquals(lg(-52258673, -155632234), lg(907527568, -155632233) - lg(959786241, 0)) assertEquals(lg(1911811399, 1534910973), lg(1509034771, 1534910973) - lg(-402776628, -1)) assertEquals(lg(1234505303, -718856464), lg(-344668006, -718856465) - lg(-1579173309, -1)) assertEquals(lg(1263823751, 1792314521), lg(-2096618226, 1792314521) - lg(934525319, 0)) assertEquals(lg(-1901870284, -977488448), lg(1861956484, -977488448) - lg(-531140528, -1)) assertEquals(lg(170060904, -1532994269), lg(-691455907, -1532994270) - lg(-861516811, -1)) assertEquals(lg(-417244722, -946809431), lg(-693769914, -946809431) - lg(-276525192, -1)) assertEquals(lg(1392505816, -834216711), lg(-1698674051, -834216711) - lg(1203787429, 0)) assertEquals(lg(339105023, -930632047), lg(1453492556, -930632047) - lg(1114387533, 0)) assertEquals(lg(1588670098, -422836102), lg(-516102112, -422836103) - lg(-2104772210, -1)) assertEquals(lg(-1793332542, 1839759286), lg(1194707556, 1839759286) - lg(-1306927198, -1)) assertEquals(lg(-1933743595, -1652840750), lg(1188016800, -1652840750) - lg(-1173206901, -1)) assertEquals(lg(1172675504, 1790839027), lg(-1268512415, 1790839027) - lg(1853779377, 0)) assertEquals(lg(-2038245078, 275932678), lg(-777434907, 275932678) - lg(1260810171, 0)) assertEquals(lg(-640120196, 658575618), lg(607917442, 658575619) - lg(1248037638, 0)) assertEquals(lg(-939204613, -2089057829), lg(-1490388970, -2089057829) - lg(-551184357, -1)) assertEquals(lg(-2089897031, 992436418), lg(-1342917439, 992436418) - lg(746979592, 0)) assertEquals(lg(-767046771, -1192540532), lg(-1045496394, -1192540532) - lg(-278449623, -1)) assertEquals(lg(735191894, -683257085), lg(1555450000, -683257085) - lg(820258106, 0)) assertEquals(lg(2026420598, 481753248), lg(1022728181, 481753248) - lg(-1003692417, -1)) assertEquals(lg(-2132649422, 1411964223), lg(2028304312, 1411964223) - lg(-134013562, -1)) assertEquals(lg(1346424260, -217374406), lg(704117341, -217374406) - lg(-642306919, -1)) assertEquals(lg(-692878557, 278237510), lg(313351245, 278237511) - lg(1006229802, 0)) assertEquals(lg(-1545280043, 2054685372), lg(2076724262, 2054685372) - lg(-672962991, -1)) assertEquals(lg(1156651977, 261806288), lg(1990098163, 261806288) - lg(833446186, 0)) assertEquals(lg(-244547539, 1626774417), lg(1425435353, 1626774418) - lg(1669982892, 0)) assertEquals(lg(-125857115, -1714068645), lg(2084724465, -1714068645) - lg(-2084385716, -1)) assertEquals(lg(-2124426763, -543675020), lg(-1799809279, -543675020) - lg(324617484, 0)) assertEquals(lg(-2145169231, -602489858), lg(1972622018, -602489858) - lg(-177176047, -1)) assertEquals(lg(408960051, 967789979), lg(883147297, 967789979) - lg(474187246, 0)) } @Test def times_*(): Unit = { assertEquals(lg(-1056314208, 1039912134), lg(-1436299491, 1172705251) * lg(1721031968, 0)) assertEquals(lg(15417694, -1235494072), lg(-1754547158, 1592794750) * lg(-850659149, -1)) assertEquals(lg(-1312839754, -486483117), lg(-582562130, 1508550574) * lg(-2054981347, -1)) assertEquals(lg(-377676239, 1969822597), lg(-517256163, 1107889737) * lg(324089381, 0)) assertEquals(lg(-1426078720, -1379092277), lg(1862517504, -2146745095) * lg(2043533548, 0)) assertEquals(lg(-1611894400, 514550890), lg(-1341087062, 93674761) * lg(1272468928, 0)) assertEquals(lg(88803236, -172420721), lg(-1911825604, 1026411170) * lg(244738503, 0)) assertEquals(lg(1486387579, 668666773), lg(2102189793, 425022510) * lg(750432219, 0)) assertEquals(lg(913918418, 2124658288), lg(-1628887094, 2043879870) * lg(-1367964491, -1)) assertEquals(lg(-1067082241, 864193319), lg(454909009, -1096315634) * lg(-461844145, -1)) assertEquals(lg(949541055, 403324299), lg(-1346593793, -331776468) * lg(1495188289, 0)) assertEquals(lg(-232871624, -1943313306), lg(39946028, -363039140) * lg(-1134101206, -1)) assertEquals(lg(-528828160, -1884969955), lg(769959254, -432157368) * lg(-488368768, -1)) assertEquals(lg(913322937, -2105457977), lg(1975078475, 1181124823) * lg(-1852476533, -1)) assertEquals(lg(1594278208, 943829214), lg(-2118478876, -1521449422) * lg(-235907376, -1)) assertEquals(lg(-50678328, 2146883835), lg(-192590815, -1552754278) * lg(990887112, 0)) assertEquals(lg(1779498513, -1732099612), lg(-74714605, 386143916) * lg(1634792395, 0)) assertEquals(lg(982209626, 857499597), lg(1839773441, -590412588) * lg(799604314, 0)) assertEquals(lg(1806268816, -990479821), lg(1395571130, -1228992407) * lg(1440046952, 0)) assertEquals(lg(1683728223, -957382628), lg(-1094818235, 1759139279) * lg(-156634285, -1)) assertEquals(lg(-1590791694, 595489480), lg(853844787, 525523561) * lg(600761926, 0)) assertEquals(lg(1353714367, 146465211), lg(-903115469, 793487771) * lg(1986597957, 0)) assertEquals(lg(1421874569, -1462441210), lg(-830036223, 830164681) * lg(-1711884663, -1)) assertEquals(lg(-962035602, -2086325336), lg(1514898873, 1802395563) * lg(1763957470, 0)) assertEquals(lg(213232144, -1084932179), lg(-1931885288, 136587512) * lg(-241565738, -1)) assertEquals(lg(-915935202, 1495104097), lg(571274323, 1264898114) * lg(1823828906, 0)) assertEquals(lg(1116543789, -1473151538), lg(-15708939, -2105030313) * lg(48280153, 0)) assertEquals(lg(-1230228445, -570579388), lg(1792017337, -1626094957) * lg(301685947, 0)) assertEquals(lg(1335719116, 1447187791), lg(-1942632452, -691115342) * lg(-889918259, -1)) assertEquals(lg(1398640985, -1330552693), lg(-683458011, -1409200935) * lg(-996910555, -1)) assertEquals(lg(-402621042, 1775759707), lg(562125786, -1303526635) * lg(-1761056509, -1)) assertEquals(lg(129149596, -78429064), lg(2115902292, -1194658096) * lg(-1549721205, -1)) assertEquals(lg(1706925885, 1413499189), lg(1852083423, 330104035) * lg(1414822755, 0)) assertEquals(lg(-722178384, 1850552711), lg(-1623207532, 1442771787) * lg(-948878276, -1)) assertEquals(lg(545021767, -1389368834), lg(-898643831, 773279296) * lg(1294488911, 0)) assertEquals(lg(1541594150, 820379725), lg(421823854, 802578424) * lg(1394107269, 0)) assertEquals(lg(-279324848, 1175391379), lg(1589092022, 237831212) * lg(-763790472, -1)) assertEquals(lg(2089067814, 975727054), lg(-1247207721, -370556328) * lg(1449901386, 0)) assertEquals(lg(-1977714127, -377823390), lg(109386811, 368962517) * lg(1406834819, 0)) assertEquals(lg(1759713497, -312922364), lg(2135299059, -798752868) * lg(-1861488893, -1)) assertEquals(lg(1030024362, -795941843), lg(-695671854, 1917612060) * lg(2083344781, 0)) assertEquals(lg(-704748314, 388197332), lg(250669253, -442179349) * lg(-552836178, -1)) assertEquals(lg(758103782, -158300478), lg(1237744278, 206295616) * lg(-1547545223, -1)) assertEquals(lg(-629736326, 810097466), lg(492775518, 1691641907) * lg(1172634963, 0)) assertEquals(lg(610754048, 1997636055), lg(-1549380722, 49835026) * lg(-1645815552, -1)) assertEquals(lg(1696857284, 1549588995), lg(1850430325, -1942955614) * lg(-295254732, -1)) assertEquals(lg(-66011146, -376837532), lg(-1276671498, -1984743584) * lg(-1583554303, -1)) assertEquals(lg(2033040344, -167450557), lg(-2127158934, -2058421178) * lg(1620104636, 0)) assertEquals(lg(-1886196376, -31345953), lg(69958717, -772556465) * lg(21655944, 0)) assertEquals(lg(-38147573, -1269583268), lg(406538265, -107036516) * lg(2077087683, 0)) } @Test def divide_/(): Unit = { expectThrows(classOf[ArithmeticException], lg(0) / lg(0)) expectThrows(classOf[ArithmeticException], lg(5, 0) / lg(0)) expectThrows(classOf[ArithmeticException], lg(0, 5) / lg(0)) expectThrows(classOf[ArithmeticException], lg(-1) / lg(0)) expectThrows(classOf[ArithmeticException], lg(-1, 0) / lg(0)) assertEquals(IntMaxValPlus1, IntMinVal / lg(-1)) assertEquals(lg(-1), IntMinVal / IntMaxValPlus1) assertEquals(IntMinVal, IntMaxValPlus1 / lg(-1)) assertEquals(lg(-1), IntMaxValPlus1 / IntMinVal) assertEquals(lg(1, -2147483648), MaxVal / lg(-1)) assertEquals(MinVal, MinVal / lg(1)) assertEquals(MinVal, MinVal / lg(-1)) // int32 / int32 assertEquals(lg(1, 0), lg(-10426835, -1) / lg(-6243356, -1)) assertEquals(lg(-291, -1), lg(49659080, 0) / lg(-170373, -1)) assertEquals(lg(3, 0), lg(97420, 0) / lg(27521, 0)) assertEquals(lg(26998, 0), lg(-9881291, -1) / lg(-366, -1)) assertEquals(lg(0, 0), lg(-40, -1) / lg(81, 0)) assertEquals(lg(0, 0), lg(-6007, -1) / lg(-326806, -1)) assertEquals(lg(-1, -1), lg(202, 0) / lg(-112, -1)) assertEquals(lg(0, 0), lg(0, 0) / lg(47, 0)) assertEquals(lg(323816, 0), lg(22667160, 0) / lg(70, 0)) assertEquals(lg(0, 0), lg(254, 0) / lg(-307349204, -1)) assertEquals(lg(0, 0), lg(-17, -1) / lg(-44648, -1)) assertEquals(lg(-40, -1), lg(39646, 0) / lg(-976, -1)) assertEquals(lg(0, 0), lg(9, 0) / lg(315779722, 0)) assertEquals(lg(0, 0), lg(-2674, -1) / lg(-3051991, -1)) assertEquals(lg(0, 0), lg(-37697, -1) / lg(2015928, 0)) assertEquals(lg(0, 0), lg(-13, -1) / lg(-31, -1)) assertEquals(lg(0, 0), lg(6, 0) / lg(-334, -1)) assertEquals(lg(8, 0), lg(-15989, -1) / lg(-1918, -1)) assertEquals(lg(8746, 0), lg(-113261535, -1) / lg(-12950, -1)) assertEquals(lg(55322, 0), lg(-6362112, -1) / lg(-115, -1)) assertEquals(lg(0, 0), lg(455, 0) / lg(13919, 0)) assertEquals(lg(36190, 0), lg(293468259, 0) / lg(8109, 0)) assertEquals(lg(1, 0), lg(-48287007, -1) / lg(-27531186, -1)) assertEquals(lg(349634, 0), lg(1048904, 0) / lg(3, 0)) assertEquals(lg(0, 0), lg(-34, -1) / lg(3949717, 0)) assertEquals(lg(-1, -1), lg(1449, 0) / lg(-983, -1)) assertEquals(lg(-18537151, -1), lg(18537151, 0) / lg(-1, -1)) assertEquals(lg(0, 0), lg(14037, 0) / lg(23645, 0)) assertEquals(lg(-4, -1), lg(1785, 0) / lg(-398, -1)) assertEquals(lg(0, 0), lg(346, 0) / lg(2198158, 0)) assertEquals(lg(-802, -1), lg(-3517419, -1) / lg(4381, 0)) assertEquals(lg(-6, -1), lg(6, 0) / lg(-1, -1)) assertEquals(lg(39, 0), lg(-822, -1) / lg(-21, -1)) assertEquals(lg(0, 0), lg(3629, 0) / lg(282734, 0)) assertEquals(lg(-92367, -1), lg(-278856469, -1) / lg(3019, 0)) assertEquals(lg(0, 0), lg(-13, -1) / lg(37, 0)) assertEquals(lg(0, 0), lg(-4, -1) / lg(47150459, 0)) assertEquals(lg(0, 0), lg(-26, -1) / lg(-210691, -1)) assertEquals(lg(0, 0), lg(-21294, -1) / lg(156839456, 0)) assertEquals(lg(0, 0), lg(-5, -1) / lg(-25644, -1)) assertEquals(lg(0, 0), lg(-1009, -1) / lg(28100, 0)) assertEquals(lg(-857, -1), lg(16282815, 0) / lg(-18989, -1)) assertEquals(lg(-7, -1), lg(-2201086, -1) / lg(276963, 0)) assertEquals(lg(-300, -1), lg(11412578, 0) / lg(-37989, -1)) assertEquals(lg(0, 0), lg(8406900, 0) / lg(239727371, 0)) assertEquals(lg(0, 0), lg(-1, -1) / lg(-479069, -1)) assertEquals(lg(0, 0), lg(4, 0) / lg(-21776, -1)) assertEquals(lg(-16812960, -1), lg(-16812960, -1) / lg(1, 0)) assertEquals(lg(0, 0), lg(10873, 0) / lg(57145, 0)) assertEquals(lg(0, 0), lg(-1, -1) / lg(-7, -1)) // int32 / int53 assertEquals(lg(0, 0), lg(-6975858, -1) / lg(42227636, 14)) assertEquals(lg(0, 0), lg(-1, -1) / lg(370644892, 82735)) assertEquals(lg(0, 0), lg(43, 0) / lg(-1602218381, 49)) assertEquals(lg(0, 0), lg(4063968, 0) / lg(973173538, 23810)) assertEquals(lg(0, 0), lg(-388987094, -1) / lg(-241988155, 1723)) assertEquals(lg(0, 0), lg(5939808, 0) / lg(-1882484681, 12)) assertEquals(lg(0, 0), lg(7, 0) / lg(-385609304, 1342)) assertEquals(lg(0, 0), lg(-1175803932, -1) / lg(297649103, 2408)) assertEquals(lg(0, 0), lg(464610492, 0) / lg(829919518, 2777)) assertEquals(lg(0, 0), lg(214483, 0) / lg(1502817270, 8078)) // int32 / big assertEquals(lg(0, 0), lg(211494165, 0) / lg(1365318534, 14804989)) assertEquals(lg(0, 0), lg(5353, 0) / lg(-1032992082, -394605386)) assertEquals(lg(0, 0), lg(2926, 0) / lg(26982087, -226814570)) assertEquals(lg(0, 0), lg(-6, -1) / lg(-1339229562, -580578613)) assertEquals(lg(0, 0), lg(-8, -1) / lg(-108570365, 4920615)) assertEquals(lg(0, 0), lg(-585878041, -1) / lg(551925027, -1296114209)) assertEquals(lg(0, 0), lg(-4, -1) / lg(474545806, 64068407)) assertEquals(lg(0, 0), lg(34, 0) / lg(-137127086, -18652281)) assertEquals(lg(0, 0), lg(785315, 0) / lg(-881374655, 29722835)) assertEquals(lg(0, 0), lg(713146, 0) / lg(1442548271, 2727525)) // int53 / int32 assertEquals(lg(-578207, -1), lg(397755625, 53271) / lg(-395701427, -1)) assertEquals(lg(-560062154, 0), lg(-1680186460, 2) / lg(3, 0)) assertEquals(lg(-926675094, 18), lg(1514942014, 56) / lg(3, 0)) assertEquals(lg(-162400270, -1), lg(713597492, 1154) / lg(-30524, -1)) assertEquals(lg(-9, -1), lg(2028377478, 1) / lg(-691707459, -1)) assertEquals(lg(135006, 0), lg(1387175556, 73) / lg(2332622, 0)) assertEquals(lg(-200274428, -13), lg(1756997282, 1397) / lg(-116, -1)) assertEquals(lg(1125157, 0), lg(-1655346723, 0) / lg(2346, 0)) assertEquals(lg(997096, 0), lg(198249458, 5686) / lg(24492497, 0)) assertEquals(lg(1369365326, -302), lg(873090497, 11162) / lg(-37, -1)) assertEquals(lg(-2166511, -1), lg(360057887, 3519) / lg(-6976354, -1)) assertEquals(lg(1680790298, -2), lg(1115898639, 48) / lg(-30, -1)) assertEquals(lg(92036331, 1), lg(154624251, 955) / lg(935, 0)) assertEquals(lg(23215066, 0), lg(806830498, 1063) / lg(196698, 0)) assertEquals(lg(-13221428, -1), lg(-220365267, 21359) / lg(-6938757, -1)) assertEquals(lg(-973041595, -2009), lg(759822848, 648657) / lg(-323, -1)) assertEquals(lg(171873494, 1659), lg(-1180673754, 486098) / lg(293, 0)) assertEquals(lg(1583541189, 785), lg(1387172319, 769661) / lg(980, 0)) assertEquals(lg(-917576, -1), lg(-305851327, 2) / lg(-13709, -1)) assertEquals(lg(456092, 0), lg(577374631, 17) / lg(161353, 0)) assertEquals(lg(404991630, 376), lg(809983260, 752) / lg(2, 0)) assertEquals(lg(495082175, 39), lg(495082175, 39) / lg(1, 0)) assertEquals(lg(90893135, 0), lg(1455620681, 30929) / lg(1461502, 0)) assertEquals(lg(799104733, 0), lg(1388707384, 34362) / lg(184688, 0)) assertEquals(lg(1094556328, -70011), lg(2105854641, 140021) / lg(-2, -1)) assertEquals(lg(-1819673734, 1), lg(1310105355, 427420) / lg(271150, 0)) assertEquals(lg(-119338773, -6), lg(-236557650, 35455) / lg(-7052, -1)) assertEquals(lg(32825, 0), lg(-1127581476, 0) / lg(96492, 0)) assertEquals(lg(-57018115, -1), lg(2004387480, 7243) / lg(-545624, -1)) assertEquals(lg(-5950946, -1), lg(381447319, 2213) / lg(-1597249, -1)) assertEquals(lg(-811421531, -4249), lg(-1860702702, 12744) / lg(-3, -1)) assertEquals(lg(4741011, 0), lg(-548164065, 6487) / lg(5877480, 0)) assertEquals(lg(-1064193809, 45), lg(-476290317, 131491) / lg(2874, 0)) assertEquals(lg(228327608, 0), lg(499912484, 1) / lg(21, 0)) assertEquals(lg(99111506, 0), lg(-1509435894, 8467) / lg(366943, 0)) assertEquals(lg(-1209485521, -1), lg(-1580093356, 5) / lg(-20, -1)) assertEquals(lg(-319956618, -1), lg(1299112295, 55074) / lg(-739295, -1)) assertEquals(lg(-62197, -1), lg(-1405948570, 43) / lg(-3015755, -1)) assertEquals(lg(9087, 0), lg(1405130313, 57) / lg(27093454, 0)) assertEquals(lg(345582531, 0), lg(-1804200888, 1989226) / lg(24722497, 0)) assertEquals(lg(-1424974, -1), lg(-1642507127, 886) / lg(-2672324, -1)) assertEquals(lg(1991351, 0), lg(-1276796892, 35) / lg(77004, 0)) assertEquals(lg(1193137, 0), lg(-1200759296, 816) / lg(2939970, 0)) assertEquals(lg(573585390, 0), lg(399171813, 123795) / lg(926969, 0)) assertEquals(lg(1683063904, -942), lg(1649267984, 229752) / lg(-244, -1)) assertEquals(lg(-6019138, -1), lg(-387146187, 7364) / lg(-5255245, -1)) assertEquals(lg(-123416174, 28), lg(149703916, 19121) / lg(660, 0)) assertEquals(lg(-40732946, -1), lg(-1582312743, 7920) / lg(-835168, -1)) assertEquals(lg(715821610, 298), lg(1431643220, 596) / lg(2, 0)) assertEquals(lg(-570078780, -1), lg(-1717918737, 8458) / lg(-63727, -1)) // int53 / int53 assertEquals(lg(1, 0), lg(-1232398900, 28871) / lg(13989713, 22345)) assertEquals(lg(0, 0), lg(-916994839, 12266) / lg(1713571419, 15301)) assertEquals(lg(32, 0), lg(1133414946, 229) / lg(256531666, 7)) assertEquals(lg(368, 0), lg(134792921, 3907) / lg(-1656790262, 10)) assertEquals(lg(1, 0), lg(1532393452, 52260) / lg(-701373106, 31864)) assertEquals(lg(0, 0), lg(193990135, 1460) / lg(867607428, 6918)) assertEquals(lg(0, 0), lg(867672590, 1) / lg(-1315044816, 987593)) assertEquals(lg(0, 0), lg(-978844610, 2) / lg(720710523, 209)) assertEquals(lg(0, 0), lg(-297570329, 1) / lg(-2127979750, 195738)) assertEquals(lg(0, 0), lg(-1035330427, 5) / lg(-2091513925, 70)) assertEquals(lg(0, 0), lg(1037142987, 15) / lg(-485498951, 30819)) assertEquals(lg(0, 0), lg(744551901, 15) / lg(-604684037, 1587)) assertEquals(lg(67766, 0), lg(1341710951, 232724) / lg(1864827988, 3)) assertEquals(lg(694, 0), lg(-409318148, 157818) / lg(517165426, 227)) assertEquals(lg(1, 0), lg(1908192460, 110512) / lg(-61974596, 95795)) assertEquals(lg(0, 0), lg(946490654, 498) / lg(-1889366637, 1163)) assertEquals(lg(12, 0), lg(1765257877, 34422) / lg(728455544, 2851)) assertEquals(lg(0, 0), lg(-1725136864, 84) / lg(1122821677, 14720)) assertEquals(lg(1, 0), lg(1854803780, 2) / lg(-302860117, 1)) assertEquals(lg(131, 0), lg(380756581, 107) / lg(-806772264, 0)) assertEquals(lg(0, 0), lg(1868292481, 1134) / lg(691774521, 33775)) assertEquals(lg(0, 0), lg(-1515810361, 98) / lg(2038289788, 198)) assertEquals(lg(315, 0), lg(-1943767475, 31777) / lg(-1513506636, 100)) assertEquals(lg(0, 0), lg(1508904915, 18) / lg(1834666309, 976)) assertEquals(lg(1, 0), lg(1430753947, 3772) / lg(-1853122145, 3615)) assertEquals(lg(2340149, 0), lg(-1654852151, 1195820) / lg(-2100231332, 0)) assertEquals(lg(0, 0), lg(1011710080, 18) / lg(-616681449, 57)) assertEquals(lg(14, 0), lg(-495370429, 356832) / lg(-34555439, 25233)) assertEquals(lg(131, 0), lg(744211838, 511) / lg(-475809581, 3)) assertEquals(lg(0, 0), lg(1135128265, 67) / lg(163864249, 972)) assertEquals(lg(1, 0), lg(954856869, 5120) / lg(1474096435, 3606)) assertEquals(lg(0, 0), lg(1544045220, 1) / lg(85376495, 2353)) assertEquals(lg(8, 0), lg(1367437144, 53) / lg(2010850631, 6)) assertEquals(lg(0, 0), lg(-1398730804, 13) / lg(-2055007528, 52)) assertEquals(lg(0, 0), lg(1598156017, 13) / lg(-1006929331, 160)) assertEquals(lg(0, 0), lg(738323529, 41) / lg(-1508093984, 10361)) assertEquals(lg(0, 0), lg(-1788797806, 31) / lg(588557582, 575930)) assertEquals(lg(76, 0), lg(-913009845, 1002) / lg(204577043, 13)) assertEquals(lg(0, 0), lg(1908599465, 6) / lg(1058868127, 3383)) assertEquals(lg(0, 0), lg(-634312634, 75) / lg(-850292534, 332928)) assertEquals(lg(0, 0), lg(-1679695022, 148) / lg(-1395453213, 912)) assertEquals(lg(0, 0), lg(456310936, 71) / lg(487720864, 1590813)) assertEquals(lg(0, 0), lg(-1724925398, 0) / lg(-273170277, 38)) assertEquals(lg(0, 0), lg(-6742076, 15) / lg(192793866, 175)) assertEquals(lg(50, 0), lg(337939061, 2094205) / lg(880147944, 41142)) assertEquals(lg(0, 0), lg(-998413092, 0) / lg(-1758700885, 29)) assertEquals(lg(0, 0), lg(1986052307, 3) / lg(-2092246422, 47)) assertEquals(lg(0, 0), lg(-109615093, 1) / lg(-2066395387, 20016)) assertEquals(lg(127, 0), lg(-1147373454, 901) / lg(313439710, 7)) assertEquals(lg(0, 0), lg(-792716629, 66379) / lg(2017337246, 250513)) // int53 / big assertEquals(lg(0, 0), lg(291278707, 13808) / lg(941639833, -14430466)) assertEquals(lg(0, 0), lg(-857819626, 204588) / lg(-1909684886, -709519130)) assertEquals(lg(0, 0), lg(-978105991, 7435) / lg(-306472275, 158306339)) assertEquals(lg(0, 0), lg(75049741, 248171) / lg(-1574105194, 64879257)) assertEquals(lg(0, 0), lg(136051120, 621) / lg(-1671784392, 102642869)) assertEquals(lg(0, 0), lg(-448460356, 2858) / lg(71740423, -16715717)) assertEquals(lg(0, 0), lg(-1266403435, 2) / lg(-1022999838, 25812014)) assertEquals(lg(0, 0), lg(552733494, 22) / lg(241731505, -33191170)) assertEquals(lg(0, 0), lg(1366167794, 115591) / lg(191854687, -2136953)) assertEquals(lg(0, 0), lg(1329114439, 80951) / lg(-51187101, 1471052997)) // big / int32 assertEquals(lg(422668131, 6), lg(-1495113094, 168518701) / lg(27633219, 0)) assertEquals(lg(932715295, 204683), lg(-1211847018, -609137255) / lg(-2976, -1)) assertEquals(lg(189814434, 0), lg(-457166837, -15040808) / lg(-340331202, -1)) assertEquals(lg(-1116045071, -1131771), lg(-104570473, -117704108) / lg(104, 0)) assertEquals(lg(-784306379, 14408), lg(453828098, -10187034) / lg(-707, -1)) assertEquals(lg(-284027201, 2002401), lg(1911518920, 168201762) / lg(84, 0)) assertEquals(lg(-862273257, -2), lg(610589058, 36481453) / lg(-30381877, -1)) assertEquals(lg(-761280647, -71), lg(410700182, 503953004) / lg(-7181145, -1)) assertEquals(lg(-1212582262, -2538), lg(194917334, -8806907) / lg(3471, 0)) assertEquals(lg(-1201233065, 4), lg(852311155, 9671380) / lg(2048884, 0)) assertEquals(lg(1324107666, 0), lg(-1028681544, 4163983) / lg(13506586, 0)) assertEquals(lg(-354367044, 6361111), lg(-708734088, 12722223) / lg(2, 0)) assertEquals(lg(-292170842, -76359), lg(1693696214, 18402294) / lg(-241, -1)) assertEquals(lg(2104544550, -41349584), lg(-1932788158, 206747917) / lg(-5, -1)) assertEquals(lg(-1928473941, -17816), lg(1427262980, -60732866) / lg(3409, 0)) assertEquals(lg(-1929237164, -681), lg(-677896940, 2512898) / lg(-3693, -1)) assertEquals(lg(1550060300, -35), lg(-926729663, -9677195) / lg(279372, 0)) assertEquals(lg(-1706875941, 0), lg(-405257725, -2271799) / lg(-3770075, -1)) assertEquals(lg(1540708852, 10909), lg(-1893733008, -6491069) / lg(-595, -1)) assertEquals(lg(-1563665409, -358), lg(-1343018634, -2584815) / lg(7233, 0)) assertEquals(lg(278715917, -374389), lg(-1224507547, 122799570) / lg(-328, -1)) assertEquals(lg(1421525100, 0), lg(-2082712791, -15998594) / lg(-48337828, -1)) assertEquals(lg(1574832373, -2193811), lg(-2147318181, -32907160) / lg(15, 0)) assertEquals(lg(-1260116915, -61610), lg(1074158039, 118905936) / lg(-1930, -1)) assertEquals(lg(130856059, -15612), lg(1270835097, -2201288) / lg(141, 0)) assertEquals(lg(-110248455, 2347), lg(320077861, -446108079) / lg(-189997, -1)) assertEquals(lg(-1659387265, 122), lg(1075676628, 54005547) / lg(440453, 0)) assertEquals(lg(-144903831, 18), lg(-1800001035, 54578889) / lg(2877683, 0)) assertEquals(lg(-1312994937, -23952), lg(-654120591, 33364168) / lg(-1393, -1)) assertEquals(lg(-178073210, -1), lg(302695822, -2432394) / lg(58667176, 0)) assertEquals(lg(1316938460, 142), lg(523451067, -54366538) / lg(-382038, -1)) assertEquals(lg(-1457978633, 17556853), lg(-78968601, 52670560) / lg(3, 0)) assertEquals(lg(-1760960552, 505129611), lg(-773046192, -1010259224) / lg(-2, -1)) assertEquals(lg(1210355204, 2314), lg(1515488136, -21874592) / lg(-9452, -1)) assertEquals(lg(-1625685934, 862807773), lg(-1043595428, -1725615548) / lg(-2, -1)) assertEquals(lg(184379181, 4), lg(-1217231978, 1516494005) / lg(375097846, 0)) assertEquals(lg(1243945230, 0), lg(-1873413508, -236381131) / lg(-816152673, -1)) assertEquals(lg(-1540093941, -876), lg(265593875, 26513736) / lg(-30289, -1)) assertEquals(lg(-1304692919, 543912), lg(106204837, -839801203) / lg(-1544, -1)) assertEquals(lg(-806250591, 23), lg(815576040, -55524975) / lg(-2331779, -1)) assertEquals(lg(-2106907248, -3), lg(-2053929476, -1795047022) / lg(720742474, 0)) assertEquals(lg(893100234, -124), lg(1552099699, 65024502) / lg(-525272, -1)) assertEquals(lg(-1109915706, 1255), lg(-194253417, -12405472) / lg(-9879, -1)) assertEquals(lg(-1177955013, 0), lg(412309016, 112344162) / lg(154800321, 0)) assertEquals(lg(-1975688052, -51023804), lg(343591192, -102047607) / lg(2, 0)) assertEquals(lg(-728332094, -309956), lg(1756765281, 8058834) / lg(-26, -1)) assertEquals(lg(10173004, 1227), lg(1762668787, -960735493) / lg(-782994, -1)) assertEquals(lg(1157067129, 5766), lg(1523935530, -109345767) / lg(-18963, -1)) assertEquals(lg(1226263794, 42306948), lg(-1256703941, 1438436241) / lg(34, 0)) assertEquals(lg(1502167534, -439314), lg(-444491016, -6150392) / lg(14, 0)) // big / int53 assertEquals(lg(88399, 0), lg(-1883357942, 360257606) / lg(1478768728, 4075)) assertEquals(lg(-45459, -1), lg(-1991900757, -48856999) / lg(-1087694619, 1074)) assertEquals(lg(4395497, 0), lg(518426119, 218946975) / lg(-808940852, 49)) assertEquals(lg(3198134, 0), lg(-946567777, 600381050) / lg(-1165957306, 187)) assertEquals(lg(470, 0), lg(257885254, 845979705) / lg(792779187, 1798424)) assertEquals(lg(92, 0), lg(1278680372, 6485140) / lg(1376461023, 70263)) assertEquals(lg(167728, 0), lg(1445602310, 420550818) / lg(1397186900, 2507)) assertEquals(lg(25700177, 0), lg(1822058703, 522114268) / lg(1355449555, 20)) assertEquals(lg(-35822646, -1), lg(532749659, -130990067) / lg(-1474774415, 3)) assertEquals(lg(-348, -1), lg(1329707986, -2121642) / lg(-63366094, 6086)) assertEquals(lg(-2179, -1), lg(1028585430, -118524228) / lg(1655878874, 54392)) assertEquals(lg(1187, 0), lg(203502475, 42252914) / lg(36519512, 35581)) assertEquals(lg(3223, 0), lg(341088508, 35053507) / lg(917391400, 10874)) assertEquals(lg(23608500, 0), lg(1454135412, 69933847) / lg(-162213744, 2)) assertEquals(lg(7286803, 0), lg(1674604578, 10565585) / lg(1932570831, 1)) assertEquals(lg(-137450, -1), lg(-1910257093, -16610962) / lg(-640594227, 120)) assertEquals(lg(114592, 0), lg(1080864951, 17606069) / lg(-1542196664, 153)) assertEquals(lg(61, 0), lg(-1419644278, 13937517) / lg(-919779905, 227700)) assertEquals(lg(-247360, -1), lg(-1958380469, -855713410) / lg(1631833189, 3459)) assertEquals(lg(-61725, -1), lg(1951473618, -4122677) / lg(-899615165, 66)) assertEquals(lg(2226, 0), lg(1521276132, 182952467) / lg(346742782, 82171)) assertEquals(lg(-997, -1), lg(-1003647481, -7808320) / lg(-228453385, 7826)) assertEquals(lg(36, 0), lg(-875689390, 4467236) / lg(-590010750, 120938)) assertEquals(lg(56005, 0), lg(1189085620, 611543209) / lg(1619962756, 10919)) assertEquals(lg(-90057, -1), lg(-1072173311, -18503031) / lg(1971480267, 205)) assertEquals(lg(-9, -1), lg(767303802, -3407362) / lg(-339044225, 352939)) assertEquals(lg(62240, 0), lg(427996893, 482974074) / lg(-736462105, 7759)) assertEquals(lg(-1774, -1), lg(842450255, -4396651) / lg(859272322, 2477)) assertEquals(lg(-153400, -1), lg(1640433988, -2618618) / lg(302672196, 17)) assertEquals(lg(2145, 0), lg(-361322518, 63967358) / lg(-1922353888, 29810)) assertEquals(lg(106042, 0), lg(-1774479550, 43276853) / lg(472456506, 408)) assertEquals(lg(-381407, -1), lg(-1756338345, -38928780) / lg(283612141, 102)) assertEquals(lg(1217514, 0), lg(-495049835, 37161263) / lg(-2052025512, 30)) assertEquals(lg(-17, -1), lg(1606509747, -10876159) / lg(1068727249, 635715)) assertEquals(lg(4880327, 0), lg(-1857686692, 1918485655) / lg(454913535, 393)) assertEquals(lg(-1023070, -1), lg(-502107392, -511268482) / lg(-1118977400, 499)) assertEquals(lg(439, 0), lg(-909192131, 45216813) / lg(1442986382, 102923)) assertEquals(lg(2171202, 0), lg(259184089, 14858724) / lg(-671961291, 6)) assertEquals(lg(-5332527, -1), lg(1737846340, -614952982) / lg(1379175047, 115)) assertEquals(lg(-435180, -1), lg(-406629212, -528407898) / lg(973577032, 1214)) assertEquals(lg(27837, 0), lg(-597461306, 538945619) / lg(-1867966522, 19360)) assertEquals(lg(-396, -1), lg(-1906945200, -371170760) / lg(151858506, 936902)) assertEquals(lg(-115583279, -1), lg(-1366510, -207691415) / lg(-872314548, 1)) assertEquals(lg(-6783543, -1), lg(-1280665444, -104856505) / lg(1964875665, 15)) assertEquals(lg(-1464006069, -1), lg(897601097, -1352132581) / lg(-328204224, 0)) assertEquals(lg(11599107, 0), lg(-496529216, 32992512) / lg(-668292521, 2)) assertEquals(lg(842, 0), lg(1819966537, 311969505) / lg(-879441284, 370147)) assertEquals(lg(43514, 0), lg(433235702, 408255734) / lg(573404298, 9382)) assertEquals(lg(-230, -1), lg(1693350453, -4127304) / lg(-1671879801, 17931)) assertEquals(lg(249094, 0), lg(-492682302, 64433722) / lg(-1408841594, 258)) // big / big assertEquals(lg(-10, -1), lg(1450795502, -706709103) / lg(742056886, 64843937)) assertEquals(lg(0, 0), lg(-392893244, 72026637) / lg(1419676270, 875736789)) assertEquals(lg(-2, -1), lg(-1861146463, 8382761) / lg(-724412724, -3000735)) assertEquals(lg(0, 0), lg(1373482238, 23344691) / lg(1835527248, -294342355)) assertEquals(lg(-37, -1), lg(1956796392, 107480459) / lg(-560958184, -2839471)) assertEquals(lg(3, 0), lg(422228275, 30436377) / lg(-2023395425, 8226201)) assertEquals(lg(-3, -1), lg(1747624836, -215352612) / lg(-1349940168, 58723974)) assertEquals(lg(2, 0), lg(-583006891, 16111063) / lg(1853686630, 5479773)) assertEquals(lg(0, 0), lg(1498104050, 7322401) / lg(-407388940, 2141575618)) assertEquals(lg(5, 0), lg(1943726712, 869895175) / lg(-627430826, 169278540)) assertEquals(lg(0, 0), lg(1872895982, 98966340) / lg(1347573135, 529034148)) assertEquals(lg(-2, -1), lg(16010610, 187913494) / lg(-848952152, -81951424)) assertEquals(lg(0, 0), lg(830929771, -4393252) / lg(1829525088, 52659897)) assertEquals(lg(22, 0), lg(-2093526384, 133319293) / lg(-464927151, 6049576)) assertEquals(lg(0, 0), lg(1056318793, 13467735) / lg(1970348162, -672507521)) assertEquals(lg(0, 0), lg(-28853693, -169722715) / lg(-83877421, 770900857)) assertEquals(lg(-27, -1), lg(1743854071, -302158995) / lg(80117835, 11113120)) assertEquals(lg(-6, -1), lg(635796581, -146765250) / lg(441664676, 23716738)) assertEquals(lg(0, 0), lg(-1048312948, -37662905) / lg(1319664078, 208772026)) assertEquals(lg(0, 0), lg(-784292680, -14102823) / lg(2037268040, 744987722)) assertEquals(lg(176, 0), lg(-1116104092, -2073525743) / lg(1766685765, -11731135)) assertEquals(lg(0, 0), lg(-1991687284, 19448294) / lg(-1731357606, -202272807)) assertEquals(lg(6, 0), lg(-2042068328, -52956481) / lg(370482897, -7759903)) assertEquals(lg(1, 0), lg(334395247, 1906338595) / lg(342095090, 1248830168)) assertEquals(lg(0, 0), lg(-309616588, 44123460) / lg(2040055580, -476494291)) assertEquals(lg(0, 0), lg(137178123, 36336421) / lg(-360221107, -515689970)) assertEquals(lg(0, 0), lg(-422856762, -16760844) / lg(-334268074, -43984484)) assertEquals(lg(0, 0), lg(-24820293, 25823996) / lg(390711705, 288223876)) assertEquals(lg(0, 0), lg(1170265006, 2998984) / lg(-134995170, -2123267074)) assertEquals(lg(0, 0), lg(-1501380980, -6088910) / lg(-1175861016, -56027408)) assertEquals(lg(-56, -1), lg(307880183, 196786483) / lg(-1107761890, -3480429)) assertEquals(lg(0, 0), lg(-588606997, -37732967) / lg(-1124435958, -77404915)) assertEquals(lg(108, 0), lg(90560661, 990295925) / lg(731139348, 9165999)) assertEquals(lg(0, 0), lg(46312609, -28251908) / lg(1279863155, -519028300)) assertEquals(lg(0, 0), lg(1123427761, 55212863) / lg(-1081219733, 233090714)) assertEquals(lg(0, 0), lg(1447869812, -3646400) / lg(-1237950546, -27122943)) assertEquals(lg(-13, -1), lg(-1399920635, 110072031) / lg(-398678056, -8069387)) assertEquals(lg(0, 0), lg(513704441, 14319377) / lg(-796719013, 260081997)) assertEquals(lg(8, 0), lg(166886349, -190148673) / lg(68245235, -21656365)) assertEquals(lg(0, 0), lg(-1594024534, -144937584) / lg(177399758, 200473672)) assertEquals(lg(-1, -1), lg(447753993, -23591908) / lg(1399162166, 12505918)) assertEquals(lg(0, 0), lg(1500283330, 5361180) / lg(348398676, 156400271)) assertEquals(lg(-1, -1), lg(-216115001, 670826068) / lg(1759253954, -470062110)) assertEquals(lg(0, 0), lg(-1251659767, 18831569) / lg(-669341445, -34474821)) assertEquals(lg(31, 0), lg(817032953, 218701872) / lg(-176557210, 6899121)) assertEquals(lg(-19, -1), lg(1365998269, 613319842) / lg(319204438, -30758748)) assertEquals(lg(0, 0), lg(-428500325, 6610536) / lg(-46648893, -105360271)) assertEquals(lg(0, 0), lg(784528299, -6958267) / lg(1370662827, -774132635)) assertEquals(lg(-2, -1), lg(-769114167, 137614183) / lg(-929091402, -67103082)) assertEquals(lg(8, 0), lg(1810734914, 124115952) / lg(1149563530, 15197570)) } @Test def modulo_%(): Unit = { expectThrows(classOf[ArithmeticException], lg(0) % lg(0)) expectThrows(classOf[ArithmeticException], lg(5, 0) % lg(0)) expectThrows(classOf[ArithmeticException], lg(0, 5) % lg(0)) expectThrows(classOf[ArithmeticException], lg(-1) % lg(0)) expectThrows(classOf[ArithmeticException], lg(-1, 0) % lg(0)) assertEquals(lg(0), IntMinVal % lg(-1)) assertEquals(lg(0), IntMinVal % IntMaxValPlus1) assertEquals(lg(0), IntMaxValPlus1 % lg(-1)) assertEquals(lg(0), IntMaxValPlus1 % IntMinVal) assertEquals(lg(0), MaxVal % lg(-1)) assertEquals(lg(0), MinVal % lg(1)) assertEquals(lg(0), MinVal % lg(-1)) assertEquals(lg(-1, 2147483647), MaxVal % MinVal) assertEquals(lg(0), MaxVal % MaxVal) assertEquals(lg(0), MinVal % MinVal) assertEquals(lg(-1), MinVal % MaxVal) // int32 % int32 assertEquals(lg(880, 0), lg(880, 0) % lg(-219594, -1)) assertEquals(lg(-27, -1), lg(-49125, -1) % lg(98, 0)) assertEquals(lg(-1194, -1), lg(-1922504, -1) % lg(4195, 0)) assertEquals(lg(3, 0), lg(3, 0) % lg(7963, 0)) assertEquals(lg(-626, -1), lg(-626, -1) % lg(-484628621, -1)) assertEquals(lg(11315, 0), lg(11315, 0) % lg(-3914076, -1)) assertEquals(lg(26241, 0), lg(15712341, 0) % lg(-1045740, -1)) assertEquals(lg(-507, -1), lg(-855439, -1) % lg(5213, 0)) assertEquals(lg(-259, -1), lg(-101026259, -1) % lg(-500, -1)) assertEquals(lg(27720977, 0), lg(27720977, 0) % lg(-42317657, -1)) assertEquals(lg(1, 0), lg(25954, 0) % lg(-3, -1)) assertEquals(lg(6724180, 0), lg(338447650, 0) % lg(-8505730, -1)) assertEquals(lg(10488, 0), lg(23967, 0) % lg(-13479, -1)) assertEquals(lg(1, 0), lg(885202, 0) % lg(-3, -1)) assertEquals(lg(0, 0), lg(692795590, 0) % lg(-10, -1)) assertEquals(lg(-1, -1), lg(-1, -1) % lg(156, 0)) assertEquals(lg(388, 0), lg(388, 0) % lg(189523294, 0)) assertEquals(lg(352, 0), lg(352, 0) % lg(-3257, -1)) assertEquals(lg(-9, -1), lg(-9, -1) % lg(14653, 0)) assertEquals(lg(-1, -1), lg(-258745, -1) % lg(8, 0)) assertEquals(lg(-21023, -1), lg(-206976653, -1) % lg(34321, 0)) assertEquals(lg(-1, -1), lg(-1, -1) % lg(-971, -1)) assertEquals(lg(59, 0), lg(59, 0) % lg(388, 0)) assertEquals(lg(0, 0), lg(-7, -1) % lg(1, 0)) assertEquals(lg(12, 0), lg(77, 0) % lg(13, 0)) assertEquals(lg(224246, 0), lg(224246, 0) % lg(719055, 0)) assertEquals(lg(-61296, -1), lg(-61296, -1) % lg(-135723660, -1)) assertEquals(lg(549465, 0), lg(6897809, 0) % lg(793543, 0)) assertEquals(lg(45, 0), lg(45, 0) % lg(984210147, 0)) assertEquals(lg(0, 0), lg(-64, -1) % lg(1, 0)) assertEquals(lg(2, 0), lg(379611734, 0) % lg(4, 0)) assertEquals(lg(0, 0), lg(0, 0) % lg(-263, -1)) assertEquals(lg(29, 0), lg(29, 0) % lg(-117, -1)) assertEquals(lg(24, 0), lg(245094, 0) % lg(-70, -1)) assertEquals(lg(0, 0), lg(0, 0) % lg(5, 0)) assertEquals(lg(2, 0), lg(2, 0) % lg(47787927, 0)) assertEquals(lg(-124, -1), lg(-124, -1) % lg(-22714040, -1)) assertEquals(lg(412, 0), lg(412, 0) % lg(-17176, -1)) assertEquals(lg(-11860, -1), lg(-11860, -1) % lg(9506787, 0)) assertEquals(lg(-31, -1), lg(-31, -1) % lg(-1544676, -1)) assertEquals(lg(-3, -1), lg(-1990315281, -1) % lg(-7, -1)) assertEquals(lg(99, 0), lg(99, 0) % lg(-277, -1)) assertEquals(lg(-86, -1), lg(-29227, -1) % lg(-161, -1)) assertEquals(lg(106, 0), lg(106, 0) % lg(-47032956, -1)) assertEquals(lg(18, 0), lg(18, 0) % lg(510836179, 0)) assertEquals(lg(2, 0), lg(3543112, 0) % lg(10, 0)) assertEquals(lg(534271, 0), lg(3547603, 0) % lg(-1506666, -1)) assertEquals(lg(-16361, -1), lg(-16361, -1) % lg(10637613, 0)) assertEquals(lg(8, 0), lg(606879016, 0) % lg(-16, -1)) assertEquals(lg(-1, -1), lg(-1, -1) % lg(46424570, 0)) // int32 % int53 assertEquals(lg(-3, -1), lg(-3, -1) % lg(206801065, 1)) assertEquals(lg(-57756, -1), lg(-57756, -1) % lg(-1211050362, 13)) assertEquals(lg(0, 0), lg(0, 0) % lg(-475702596, 10040)) assertEquals(lg(423524, 0), lg(423524, 0) % lg(-2084961556, 16)) assertEquals(lg(38317, 0), lg(38317, 0) % lg(-1699004544, 24)) assertEquals(lg(60291, 0), lg(60291, 0) % lg(-458289291, 56)) assertEquals(lg(1, 0), lg(1, 0) % lg(-1247681936, 1229953)) assertEquals(lg(296788, 0), lg(296788, 0) % lg(183245860, 52)) assertEquals(lg(-2005515, -1), lg(-2005515, -1) % lg(331735459, 17)) assertEquals(lg(-179812, -1), lg(-179812, -1) % lg(-853047550, 5154)) assertEquals(lg(-3678, -1), lg(-3678, -1) % lg(1751271067, 243605)) assertEquals(lg(-93867, -1), lg(-93867, -1) % lg(-1925367590, 42)) assertEquals(lg(7600917, 0), lg(7600917, 0) % lg(-1807424604, 95574)) assertEquals(lg(300012, 0), lg(300012, 0) % lg(1951216728, 101)) assertEquals(lg(-6347, -1), lg(-6347, -1) % lg(-438713154, 23)) assertEquals(lg(-41, -1), lg(-41, -1) % lg(-1211982116, 459)) assertEquals(lg(3425, 0), lg(3425, 0) % lg(-1580976156, 2)) assertEquals(lg(-25, -1), lg(-25, -1) % lg(200240265, 25993)) assertEquals(lg(-8303, -1), lg(-8303, -1) % lg(1353761386, 1921)) assertEquals(lg(274032571, 0), lg(274032571, 0) % lg(1455543028, 255)) assertEquals(lg(-3, -1), lg(-3, -1) % lg(1143775281, 729)) assertEquals(lg(-1124428, -1), lg(-1124428, -1) % lg(-521284400, 339)) assertEquals(lg(-2, -1), lg(-2, -1) % lg(-303859962, 2524)) assertEquals(lg(1, 0), lg(1, 0) % lg(-402000545, 1)) assertEquals(lg(107013504, 0), lg(107013504, 0) % lg(157604607, 3)) assertEquals(lg(4976822, 0), lg(4976822, 0) % lg(-2046021074, 2230)) assertEquals(lg(-1, -1), lg(-1, -1) % lg(-306200858, 41)) assertEquals(lg(80396, 0), lg(80396, 0) % lg(-409002766, 13)) assertEquals(lg(937638, 0), lg(937638, 0) % lg(-697219650, 26)) assertEquals(lg(756, 0), lg(756, 0) % lg(-948806692, 1700920)) assertEquals(lg(5, 0), lg(5, 0) % lg(646021801, 21350)) assertEquals(lg(262831839, 0), lg(262831839, 0) % lg(1086270794, 10633)) assertEquals(lg(-2146273993, -1), lg(-2146273993, -1) % lg(-1539129401, 0)) assertEquals(lg(59799, 0), lg(59799, 0) % lg(1910837623, 102082)) assertEquals(lg(-5347, -1), lg(-5347, -1) % lg(1965292799, 18)) assertEquals(lg(926, 0), lg(926, 0) % lg(1939309159, 104206)) assertEquals(lg(1, 0), lg(1, 0) % lg(1651864405, 1233)) assertEquals(lg(334, 0), lg(334, 0) % lg(581635234, 20)) assertEquals(lg(-61747, -1), lg(-61747, -1) % lg(-842193425, 1497)) assertEquals(lg(-1, -1), lg(-1, -1) % lg(758739794, 79508)) assertEquals(lg(59605313, 0), lg(59605313, 0) % lg(-1162319751, 0)) assertEquals(lg(12267518, 0), lg(12267518, 0) % lg(1340161110, 568352)) assertEquals(lg(19230695, 0), lg(19230695, 0) % lg(1844291137, 21)) assertEquals(lg(3950296, 0), lg(3950296, 0) % lg(-848670202, 243)) assertEquals(lg(503276, 0), lg(503276, 0) % lg(-1756374670, 1)) assertEquals(lg(30880536, 0), lg(30880536, 0) % lg(-1380766565, 51064)) assertEquals(lg(5659804, 0), lg(5659804, 0) % lg(-725339057, 1)) assertEquals(lg(11882277, 0), lg(11882277, 0) % lg(243727355, 7)) assertEquals(lg(371783010, 0), lg(371783010, 0) % lg(630143580, 14001)) assertEquals(lg(840, 0), lg(840, 0) % lg(-1719362098, 109)) // int32 % big assertEquals(lg(-267334310, -1), lg(-267334310, -1) % lg(1537718115, -134598983)) assertEquals(lg(57, 0), lg(57, 0) % lg(-1668867109, -10100325)) assertEquals(lg(30332, 0), lg(30332, 0) % lg(-615310153, -90004876)) assertEquals(lg(187, 0), lg(187, 0) % lg(-590535223, 8244144)) assertEquals(lg(-2, -1), lg(-2, -1) % lg(2125719729, 390762530)) assertEquals(lg(-4252915, -1), lg(-4252915, -1) % lg(2070489053, 23484863)) assertEquals(lg(-2, -1), lg(-2, -1) % lg(37507428, 96913792)) assertEquals(lg(10, 0), lg(10, 0) % lg(-533680689, -79923599)) assertEquals(lg(-14, -1), lg(-14, -1) % lg(-930313329, 2972085)) assertEquals(lg(-20155233, -1), lg(-20155233, -1) % lg(-49989774, -25498857)) assertEquals(lg(-406, -1), lg(-406, -1) % lg(2109762544, 126098611)) assertEquals(lg(43, 0), lg(43, 0) % lg(598811771, 154269509)) assertEquals(lg(-4830, -1), lg(-4830, -1) % lg(-1043650540, -2874494)) assertEquals(lg(-4271, -1), lg(-4271, -1) % lg(-950378080, -106126516)) assertEquals(lg(126, 0), lg(126, 0) % lg(-877412093, -90804729)) assertEquals(lg(40445345, 0), lg(40445345, 0) % lg(-1461218790, 6749169)) assertEquals(lg(-1, -1), lg(-1, -1) % lg(1776909778, 28425796)) assertEquals(lg(-2123811, -1), lg(-2123811, -1) % lg(-51805125, 44153129)) assertEquals(lg(-25650126, -1), lg(-25650126, -1) % lg(-1317209725, -16141386)) assertEquals(lg(30, 0), lg(30, 0) % lg(712479950, 158765535)) assertEquals(lg(2494211, 0), lg(2494211, 0) % lg(-432472367, 21859989)) assertEquals(lg(100937174, 0), lg(100937174, 0) % lg(212873269, -74778594)) assertEquals(lg(901687, 0), lg(901687, 0) % lg(-1225225931, -512562107)) assertEquals(lg(-422854, -1), lg(-422854, -1) % lg(-1361503923, -98826041)) assertEquals(lg(2, 0), lg(2, 0) % lg(386622050, -9945722)) assertEquals(lg(-465211, -1), lg(-465211, -1) % lg(-418132599, -160175963)) assertEquals(lg(63, 0), lg(63, 0) % lg(-1330189832, 180061391)) assertEquals(lg(47, 0), lg(47, 0) % lg(1439978282, -16520554)) assertEquals(lg(233450563, 0), lg(233450563, 0) % lg(-328511972, 377539644)) assertEquals(lg(-134912, -1), lg(-134912, -1) % lg(1349244684, -12612862)) assertEquals(lg(-95441, -1), lg(-95441, -1) % lg(511120357, 16112596)) assertEquals(lg(-1160726496, -1), lg(-1160726496, -1) % lg(-913371934, -9441145)) assertEquals(lg(-502, -1), lg(-502, -1) % lg(-1021329523, -377728463)) assertEquals(lg(3313324, 0), lg(3313324, 0) % lg(-67454848, 442297818)) assertEquals(lg(-145, -1), lg(-145, -1) % lg(-1010112762, 29724438)) assertEquals(lg(-19091, -1), lg(-19091, -1) % lg(-1944488998, -173788926)) assertEquals(lg(-3331910, -1), lg(-3331910, -1) % lg(2144172121, 73505274)) assertEquals(lg(56622, 0), lg(56622, 0) % lg(-1451372835, 5219178)) assertEquals(lg(0, 0), lg(0, 0) % lg(556032035, 32471322)) assertEquals(lg(800, 0), lg(800, 0) % lg(-1649243607, 2299368)) assertEquals(lg(86949, 0), lg(86949, 0) % lg(794150820, -1384562176)) assertEquals(lg(10, 0), lg(10, 0) % lg(-790693444, 1000869239)) assertEquals(lg(-333236, -1), lg(-333236, -1) % lg(-1020207444, 125043716)) assertEquals(lg(-598, -1), lg(-598, -1) % lg(-93061561, -329975227)) assertEquals(lg(-19, -1), lg(-19, -1) % lg(-1096862531, 163621631)) assertEquals(lg(465328283, 0), lg(465328283, 0) % lg(-21925149, -52057346)) assertEquals(lg(-25837, -1), lg(-25837, -1) % lg(677002620, 8643698)) assertEquals(lg(-383633650, -1), lg(-383633650, -1) % lg(1609519787, 8262009)) assertEquals(lg(-66, -1), lg(-66, -1) % lg(1917139359, 239618524)) assertEquals(lg(1676620, 0), lg(1676620, 0) % lg(910745834, 82765572)) // int53 / int32 assertEquals(lg(15827410, 0), lg(1244623439, 3) % lg(-231372097, -1)) assertEquals(lg(15118, 0), lg(-1392787378, 124) % lg(-20252, -1)) assertEquals(lg(11, 0), lg(578165055, 72) % lg(13, 0)) assertEquals(lg(42298679, 0), lg(-1836745385, 3) % lg(-95630157, -1)) assertEquals(lg(17447610, 0), lg(-1766124150, 29) % lg(-45315780, -1)) assertEquals(lg(0, 0), lg(540281958, 253606) % lg(-11, -1)) assertEquals(lg(51980, 0), lg(-442404110, 7696) % lg(1489246, 0)) assertEquals(lg(2, 0), lg(-631827526, 1455) % lg(8, 0)) assertEquals(lg(5125741, 0), lg(1266390909, 49) % lg(-34627848, -1)) assertEquals(lg(77691, 0), lg(-453014259, 21413) % lg(149449, 0)) assertEquals(lg(521867604, 0), lg(1573062436, 653) % lg(671211684, 0)) assertEquals(lg(14579368, 0), lg(-21113520, 0) % lg(177469767, 0)) assertEquals(lg(0, 0), lg(-262825676, 31) % lg(1, 0)) assertEquals(lg(24027362, 0), lg(-163968426, 1) % lg(33341027, 0)) assertEquals(lg(6792805, 0), lg(668741217, 14380) % lg(-11334498, -1)) assertEquals(lg(9, 0), lg(808041281, 1818) % lg(-10, -1)) assertEquals(lg(204, 0), lg(-1601247507, 25) % lg(-235, -1)) assertEquals(lg(61089, 0), lg(-1577206289, 0) % lg(1618642, 0)) assertEquals(lg(289305533, 0), lg(863396135, 503) % lg(-321808286, -1)) assertEquals(lg(7272892, 0), lg(-900149281, 55) % lg(15166197, 0)) assertEquals(lg(3, 0), lg(1802954050, 3593) % lg(7, 0)) assertEquals(lg(12036, 0), lg(800669146, 41901) % lg(-20591, -1)) assertEquals(lg(29, 0), lg(-1055636867, 39) % lg(48, 0)) assertEquals(lg(0, 0), lg(-491067123, 14) % lg(1, 0)) assertEquals(lg(260441364, 0), lg(1420289126, 67) % lg(1010219079, 0)) assertEquals(lg(3936541, 0), lg(1338756461, 32) % lg(-4427443, -1)) assertEquals(lg(183313645, 0), lg(-820843233, 778) % lg(-273780418, -1)) assertEquals(lg(91783, 0), lg(-1033566360, 561225) % lg(-156677, -1)) assertEquals(lg(5, 0), lg(-1567070603, 38) % lg(-8, -1)) assertEquals(lg(11214823, 0), lg(-1649343541, 185302) % lg(-19368267, -1)) assertEquals(lg(75719, 0), lg(-591434325, 76351) % lg(94212, 0)) assertEquals(lg(10941, 0), lg(235794528, 55) % lg(17599, 0)) assertEquals(lg(5331, 0), lg(-763589741, 116) % lg(-14942, -1)) assertEquals(lg(1, 0), lg(-1283158225, 237055) % lg(-2, -1)) assertEquals(lg(24400, 0), lg(1537105400, 29108) % lg(-37848, -1)) assertEquals(lg(95, 0), lg(-56778611, 994650) % lg(-170, -1)) assertEquals(lg(9836, 0), lg(-2057746932, 7) % lg(-10100, -1)) assertEquals(lg(30255783, 0), lg(1365793356, 12) % lg(-38454651, -1)) assertEquals(lg(417, 0), lg(-2128793438, 4) % lg(6825, 0)) assertEquals(lg(0, 0), lg(1667515072, 8) % lg(2, 0)) assertEquals(lg(257, 0), lg(420324337, 980) % lg(-845, -1)) assertEquals(lg(82991, 0), lg(-771084081, 8204) % lg(105392, 0)) assertEquals(lg(691256, 0), lg(-332377894, 1) % lg(882238, 0)) assertEquals(lg(0, 0), lg(1749263284, 11) % lg(-20, -1)) assertEquals(lg(4, 0), lg(347303218, 1234317) % lg(-13, -1)) assertEquals(lg(150, 0), lg(1199079324, 17271) % lg(11033, 0)) assertEquals(lg(14, 0), lg(1196217208, 13) % lg(-23, -1)) assertEquals(lg(256216433, 0), lg(-1078128939, 0) % lg(740155481, 0)) assertEquals(lg(45583, 0), lg(-1354463473, 3691) % lg(-63588, -1)) assertEquals(lg(459, 0), lg(-1255896801, 1469630) % lg(-502, -1)) // int53 % int53 assertEquals(lg(1805177178, 1), lg(1805177178, 1) % lg(-1293833696, 410)) assertEquals(lg(-583440651, 2), lg(647007072, 1811985) % lg(1091239449, 3)) assertEquals(lg(1346307032, 1), lg(1346307032, 1) % lg(-672335266, 33)) assertEquals(lg(858355422, 81), lg(858355422, 81) % lg(1490435172, 162402)) assertEquals(lg(744276027, 1), lg(-1299053281, 6330) % lg(1042770708, 1)) assertEquals(lg(29273105, 0), lg(-88774269, 25) % lg(775537355, 1)) assertEquals(lg(383200445, 2), lg(-962613261, 4309) % lg(-529185362, 5)) assertEquals(lg(-171009725, 445), lg(-171009725, 445) % lg(-1167557775, 307982)) assertEquals(lg(8166883, 15498), lg(1848497503, 78519) % lg(1533824479, 15755)) assertEquals(lg(-1752533311, 17), lg(-1752533311, 17) % lg(1904799096, 73566)) assertEquals(lg(-1641266817, 46), lg(-1641266817, 46) % lg(-31936789, 751199)) assertEquals(lg(-350685679, 656), lg(-637954451, 32352) % lg(-10259599, 1131)) assertEquals(lg(-1671876486, 0), lg(-1657673170, 122149) % lg(-534342412, 0)) assertEquals(lg(-660565679, 235), lg(-660565679, 235) % lg(-897090894, 14655)) assertEquals(lg(-1798560222, 612), lg(-1798560222, 612) % lg(-236039758, 2924)) assertEquals(lg(-28767936, 5704), lg(1010899296, 62798) % lg(-1974205776, 9515)) assertEquals(lg(-2004786867, 4), lg(1206965517, 91420) % lg(880030876, 7)) assertEquals(lg(712148070, 3), lg(712148070, 3) % lg(472319826, 2838)) assertEquals(lg(-1275175525, 44), lg(-1275175525, 44) % lg(162799342, 861329)) assertEquals(lg(1187224322, 14), lg(-516916094, 191396) % lg(-1920802608, 30)) assertEquals(lg(-1461747946, 0), lg(-1627551726, 4499) % lg(1200735793, 1)) assertEquals(lg(453535447, 39039), lg(453535447, 39039) % lg(520791957, 141909)) assertEquals(lg(216221627, 20), lg(216221627, 20) % lg(-781572865, 8131)) assertEquals(lg(1611884803, 23), lg(-1999221053, 528) % lg(1107934896, 25)) assertEquals(lg(1722095012, 0), lg(-701225584, 44) % lg(-1403297482, 0)) assertEquals(lg(-232837834, 5049), lg(-232837834, 5049) % lg(1000581509, 15836)) assertEquals(lg(-82376749, 239), lg(-82376749, 239) % lg(-163409376, 7688)) assertEquals(lg(2063025646, 2), lg(941363778, 110) % lg(336092572, 3)) assertEquals(lg(721574845, 383), lg(1004884706, 1133) % lg(283309861, 750)) assertEquals(lg(-2004547354, 47), lg(1436404594, 1595) % lg(1522987410, 70)) assertEquals(lg(1696970595, 8), lg(1696970595, 8) % lg(-1168832286, 4163)) assertEquals(lg(-2033329312, 6), lg(-1244970780, 32) % lg(394179266, 13)) assertEquals(lg(1864629418, 1), lg(1864629418, 1) % lg(528888491, 970677)) assertEquals(lg(1596298266, 43057), lg(-1763600443, 962032) % lg(1535552275, 102108)) assertEquals(lg(1181714932, 5), lg(1181714932, 5) % lg(1296434411, 26359)) assertEquals(lg(-2140209952, 7), lg(1535735456, 276446) % lg(-1930593680, 7)) assertEquals(lg(-1703068243, 11), lg(2079501385, 97596) % lg(-1803771626, 21)) assertEquals(lg(-1025858772, 33402), lg(286993796, 174379) % lg(656426284, 70488)) assertEquals(lg(-578045904, 11724), lg(221015334, 1635766) % lg(-2014306775, 270673)) assertEquals(lg(-2080784768, 56), lg(-2103734262, 977) % lg(-22949494, 920)) assertEquals(lg(-922083739, 29), lg(-922083739, 29) % lg(2040148267, 19160)) assertEquals(lg(-1728890579, 468), lg(-559850131, 11989) % lg(1366001936, 2880)) assertEquals(lg(1341547600, 13), lg(-1071198220, 2182) % lg(1526886260, 17)) assertEquals(lg(-896451936, 45), lg(-896451936, 45) % lg(2132477227, 164356)) assertEquals(lg(-1538011120, 53), lg(-561327714, 1420) % lg(-368698210, 151)) assertEquals(lg(1880884956, 621), lg(2112956103, 118429) % lg(-374507565, 859)) assertEquals(lg(902909663, 0), lg(380445410, 8) % lg(-1822479769, 1)) assertEquals(lg(-652149100, 56), lg(-1867274924, 105813) % lg(175641312, 79)) assertEquals(lg(-991170416, 37), lg(-991170416, 37) % lg(1740161397, 88122)) assertEquals(lg(-31602776, 1), lg(-31602776, 1) % lg(-503633567, 241909)) // int53 % big assertEquals(lg(-930109303, 3), lg(-930109303, 3) % lg(1606982787, 925386547)) assertEquals(lg(-717668907, 16251), lg(-717668907, 16251) % lg(2079100937, 7825426)) assertEquals(lg(265990345, 3), lg(265990345, 3) % lg(-1140922127, -3108870)) assertEquals(lg(-1181318422, 1), lg(-1181318422, 1) % lg(1489652251, 75207246)) assertEquals(lg(380276439, 59), lg(380276439, 59) % lg(-1062351234, -3631372)) assertEquals(lg(1080382784, 7211), lg(1080382784, 7211) % lg(572850722, -139092025)) assertEquals(lg(2020323378, 316), lg(2020323378, 316) % lg(1716930349, -16333391)) assertEquals(lg(1302118364, 5), lg(1302118364, 5) % lg(-442067036, 1941456592)) assertEquals(lg(-641137972, 602), lg(-641137972, 602) % lg(1134212295, -135713760)) assertEquals(lg(-761172703, 499), lg(-761172703, 499) % lg(769981236, 12756336)) assertEquals(lg(1601268090, 610), lg(1601268090, 610) % lg(448513898, -160887452)) assertEquals(lg(-16483553, 0), lg(-16483553, 0) % lg(-1253549192, -1748027086)) assertEquals(lg(-1284021361, 241), lg(-1284021361, 241) % lg(13275221, -3818882)) assertEquals(lg(1499414278, 26), lg(1499414278, 26) % lg(570654893, -17498947)) assertEquals(lg(-368610421, 5074), lg(-368610421, 5074) % lg(685701351, 31070898)) assertEquals(lg(1200134796, 70), lg(1200134796, 70) % lg(1230376618, -2490370)) assertEquals(lg(1537764087, 64483), lg(1537764087, 64483) % lg(-1252591472, 66761881)) assertEquals(lg(-1981129198, 15), lg(-1981129198, 15) % lg(1937978150, 8201544)) assertEquals(lg(32422964, 200), lg(32422964, 200) % lg(2051327691, -20319622)) assertEquals(lg(1404616230, 30), lg(1404616230, 30) % lg(-748420073, -120320053)) assertEquals(lg(-1860381107, 38), lg(-1860381107, 38) % lg(392948122, 60098039)) assertEquals(lg(1050519262, 106431), lg(1050519262, 106431) % lg(361773491, -6329760)) assertEquals(lg(460136491, 1681770), lg(460136491, 1681770) % lg(1399049044, 759923035)) assertEquals(lg(2065599344, 11089), lg(2065599344, 11089) % lg(-465681057, 3484544)) assertEquals(lg(1849358428, 418531), lg(1849358428, 418531) % lg(1023666326, 3435570)) assertEquals(lg(1292603836, 80), lg(1292603836, 80) % lg(-1114872574, 250120091)) assertEquals(lg(1456627133, 194844), lg(1456627133, 194844) % lg(-1256385160, 59427917)) assertEquals(lg(-568179858, 160), lg(-568179858, 160) % lg(1142846538, 154324747)) assertEquals(lg(-2133580755, 203337), lg(-2133580755, 203337) % lg(111334842, 12695612)) assertEquals(lg(1961218705, 6687), lg(1961218705, 6687) % lg(-245612957, 134017780)) assertEquals(lg(335350966, 55096), lg(335350966, 55096) % lg(-1815119598, -120983980)) assertEquals(lg(-767561503, 211), lg(-767561503, 211) % lg(554589640, -7873602)) assertEquals(lg(1476687067, 3767), lg(1476687067, 3767) % lg(552659809, -753378142)) assertEquals(lg(-1107393223, 30), lg(-1107393223, 30) % lg(-78383575, -52663801)) assertEquals(lg(607313614, 2), lg(607313614, 2) % lg(-234099925, 59184919)) assertEquals(lg(-1542671184, 616882), lg(-1542671184, 616882) % lg(1370026838, -45628731)) assertEquals(lg(525616384, 1001), lg(525616384, 1001) % lg(1995646126, -11226360)) assertEquals(lg(2109958916, 21549), lg(2109958916, 21549) % lg(-419960245, -115959896)) assertEquals(lg(-450913111, 32140), lg(-450913111, 32140) % lg(-99267096, -3640047)) assertEquals(lg(1515870052, 198), lg(1515870052, 198) % lg(1415757861, -110282301)) assertEquals(lg(124639649, 865615), lg(124639649, 865615) % lg(-1354782388, 2569606)) assertEquals(lg(557119825, 7205), lg(557119825, 7205) % lg(683150209, -15864187)) assertEquals(lg(992846513, 1385110), lg(992846513, 1385110) % lg(1578961851, -8380578)) assertEquals(lg(1081385155, 4176), lg(1081385155, 4176) % lg(1892231070, 31130825)) assertEquals(lg(-738492748, 8), lg(-738492748, 8) % lg(-431212066, 687916944)) assertEquals(lg(-1448153936, 8101), lg(-1448153936, 8101) % lg(-584523654, -4814205)) assertEquals(lg(-713251055, 243), lg(-713251055, 243) % lg(261411225, 31444708)) assertEquals(lg(881178812, 47057), lg(881178812, 47057) % lg(823893049, -5940358)) assertEquals(lg(-506817388, 0), lg(-506817388, 0) % lg(-465610822, 10559551)) assertEquals(lg(-420315839, 112832), lg(-420315839, 112832) % lg(-686319219, -666166549)) // big % int32 assertEquals(lg(-3, -1), lg(-412174169, -319069709) % lg(-6, -1)) assertEquals(lg(464005, 0), lg(1634601702, 814446468) % lg(825883, 0)) assertEquals(lg(34559370, 0), lg(-1005992901, 2694218) % lg(108493743, 0)) assertEquals(lg(-286379, -1), lg(1534700309, -630528658) % lg(-506616, -1)) assertEquals(lg(-62, -1), lg(-456613426, -23298167) % lg(-206, -1)) assertEquals(lg(386945695, 0), lg(857770611, 2618490) % lg(1225551197, 0)) assertEquals(lg(270232, 0), lg(2127943654, 2768088) % lg(-291653, -1)) assertEquals(lg(277129, 0), lg(1085973072, 3470797) % lg(-29714535, -1)) assertEquals(lg(15, 0), lg(1536124828, 1268901218) % lg(-121, -1)) assertEquals(lg(1, 0), lg(371220141, 34588968) % lg(2, 0)) assertEquals(lg(46669, 0), lg(-1712997009, 187259899) % lg(129274, 0)) assertEquals(lg(-1508, -1), lg(586579000, -243530833) % lg(-31235, -1)) assertEquals(lg(0, 0), lg(1745775262, -400161972) % lg(-1, -1)) assertEquals(lg(-1680, -1), lg(-1564631310, -56487209) % lg(2626, 0)) assertEquals(lg(53, 0), lg(-1848745069, 11533547) % lg(59, 0)) assertEquals(lg(-1699972, -1), lg(-1415791920, -26215621) % lg(-2142359, -1)) assertEquals(lg(-200041, -1), lg(-481609933, -25891343) % lg(483607, 0)) assertEquals(lg(-13123232, -1), lg(-889674017, -4084771) % lg(428648085, 0)) assertEquals(lg(0, 0), lg(1587465684, -367383975) % lg(7, 0)) assertEquals(lg(-4528, -1), lg(811562260, -335104547) % lg(5502, 0)) assertEquals(lg(-71, -1), lg(2107357891, -10075787) % lg(110, 0)) assertEquals(lg(0, 0), lg(-1356326655, 5174156) % lg(-1, -1)) assertEquals(lg(7872112, 0), lg(-1794856776, 3059124) % lg(-29413816, -1)) assertEquals(lg(-37, -1), lg(-1118254374, -3629384) % lg(-85, -1)) assertEquals(lg(14227, 0), lg(288539563, 70814306) % lg(-14561, -1)) assertEquals(lg(-49, -1), lg(-719069745, -128562664) % lg(-256, -1)) assertEquals(lg(6101, 0), lg(1530955727, 15829469) % lg(195494, 0)) assertEquals(lg(-6, -1), lg(2144004402, -5408490) % lg(11, 0)) assertEquals(lg(-137624717, -1), lg(-1766192560, -17443468) % lg(-168087095, -1)) assertEquals(lg(-3592, -1), lg(-524619138, -371121095) % lg(4765, 0)) assertEquals(lg(4335, 0), lg(-1960083221, 176122524) % lg(-5564, -1)) assertEquals(lg(-271754, -1), lg(1528631102, -597885631) % lg(-413908, -1)) assertEquals(lg(-361112, -1), lg(-1513123614, -30582360) % lg(-496311, -1)) assertEquals(lg(-4, -1), lg(-1975522255, -46421733) % lg(29, 0)) assertEquals(lg(414436, 0), lg(-1715879325, 3072313) % lg(438221, 0)) assertEquals(lg(0, 0), lg(-1321015849, -300384564) % lg(1, 0)) assertEquals(lg(-454, -1), lg(-1088390706, -277354665) % lg(-1237, -1)) assertEquals(lg(586891857, 0), lg(-1012773943, 223943652) % lg(707359548, 0)) assertEquals(lg(2, 0), lg(1097288344, 26740237) % lg(-3, -1)) assertEquals(lg(-24053960, -1), lg(-1121404205, -87484234) % lg(80229261, 0)) assertEquals(lg(-79944815, -1), lg(-1503637931, -163703901) % lg(-983334452, -1)) assertEquals(lg(2600110, 0), lg(2012820970, 445991475) % lg(1035472980, 0)) assertEquals(lg(74, 0), lg(2015362538, 2985510) % lg(-148, -1)) assertEquals(lg(0, 0), lg(1764134228, 50881407) % lg(-1, -1)) assertEquals(lg(106, 0), lg(-523555853, 77167937) % lg(-563, -1)) assertEquals(lg(0, 0), lg(1531888651, -2389306) % lg(1, 0)) assertEquals(lg(659, 0), lg(-181277952, 32599207) % lg(-729, -1)) assertEquals(lg(968, 0), lg(223126732, 88838488) % lg(13378, 0)) assertEquals(lg(920991, 0), lg(670834629, 46037187) % lg(922370, 0)) assertEquals(lg(2462152, 0), lg(1098978850, 6541822) % lg(-8405198, -1)) // big % int53 assertEquals(lg(1057995305, 4748), lg(2008672965, 41566313) % lg(313991275, 18390)) assertEquals(lg(-1074209653, 18), lg(1922552561, 28139870) % lg(-2083633557, 19)) assertEquals(lg(1480601143, -11310), lg(843627074, -173776705) % lg(1451117493, 14364)) assertEquals(lg(-691687452, -38), lg(204865470, -6692402) % lg(-645190286, 413)) assertEquals(lg(-1218791457, -31), lg(952830559, -214594684) % lg(-1778162360, 378)) assertEquals(lg(-281609960, -1292), lg(1673740333, -69274846) % lg(-1549261605, 2390)) assertEquals(lg(-860426348, 1), lg(-1276804811, 367022678) % lg(-678111623, 11)) assertEquals(lg(-1244563205, -1264), lg(-1331527548, -33013551) % lg(-1975438267, 2961)) assertEquals(lg(-935830326, 135167), lg(1067523314, 72606174) % lg(-1716982106, 255179)) assertEquals(lg(-2025081444, -42140), lg(-937134490, -32649070) % lg(-804857990, 57507)) assertEquals(lg(85696931, 194), lg(108363299, 1224097478) % lg(1137551776, 281)) assertEquals(lg(-385517902, -5258), lg(-1965834834, -11053948) % lg(-942300324, 6487)) assertEquals(lg(-755355475, 2268), lg(-3151939, 171473802) % lg(-2071379940, 3914)) assertEquals(lg(-676865399, -663), lg(1465781759, -970108425) % lg(-1251607207, 3003)) assertEquals(lg(2042443783, -22321), lg(919308511, -1689158617) % lg(658566728, 36406)) assertEquals(lg(-903837593, 31415), lg(-418485001, 1000432592) % lg(-1653953022, 31957)) assertEquals(lg(496274972, -48207), lg(-880302655, -14116770) % lg(913871933, 118223)) assertEquals(lg(1210119082, -104892), lg(-525597278, -3790314) % lg(2133284776, 127083)) assertEquals(lg(473810731, -5), lg(-393124913, -28106221) % lg(958070140, 159)) assertEquals(lg(-1912903061, 25777), lg(6929245, 2749730) % lg(1462129294, 43237)) assertEquals(lg(1099532724, -19), lg(708024745, -15568245) % lg(1288198049, 56)) assertEquals(lg(920504149, 6836), lg(487601139, 13603229) % lg(723875593, 45021)) assertEquals(lg(1778080723, 29), lg(-2070321133, 115478389) % lg(-1799479616, 75)) assertEquals(lg(-720480381, 2735), lg(-307180735, 3049800) % lg(1043781053, 3319)) assertEquals(lg(1473972065, -1), lg(-1073877839, -6538577) % lg(-1408649838, 0)) assertEquals(lg(-1389255096, -200), lg(-1892822171, -1698321438) % lg(96164237, 514)) assertEquals(lg(857386403, 29656), lg(-674980011, 2764943) % lg(-445529419, 65125)) assertEquals(lg(-419043446, -22164), lg(2003347800, -46928389) % lg(368897711, 128159)) assertEquals(lg(-1599543668, -6569), lg(-1929871429, -241628283) % lg(202358381, 7645)) assertEquals(lg(581185953, 1), lg(419719197, 661188517) % lg(2112360098, 1)) assertEquals(lg(-1880704128, 171407), lg(1092830824, 1600823129) % lg(-1827462760, 172800)) assertEquals(lg(1210159480, -13), lg(-836779994, -27475595) % lg(-417527207, 16)) assertEquals(lg(807846066, 1), lg(-1759597755, 9157722) % lg(-987185779, 1)) assertEquals(lg(949995673, 1), lg(-1097231525, 20092165) % lg(1106421078, 1)) assertEquals(lg(-712450167, 7), lg(390678483, 3835040) % lg(1221250555, 14)) assertEquals(lg(1129531033, -4), lg(-284334384, -18425278) % lg(-1111448031, 6)) assertEquals(lg(2094997010, 3022), lg(-233961390, 53260849) % lg(-613558136, 3663)) assertEquals(lg(-496446555, 540290), lg(-3383211, 8039036) % lg(-1668680584, 749874)) assertEquals(lg(1280740603, -9472), lg(804358887, -189240235) % lg(179665302, 12347)) assertEquals(lg(2127427912, 6), lg(208769744, 280071599) % lg(-325433064, 14)) assertEquals(lg(-722136158, -1), lg(-1527711901, -51564742) % lg(-1019145455, 0)) assertEquals(lg(-1603688570, -2), lg(-159182038, -2145592347) % lg(-483720705, 15)) assertEquals(lg(-256578646, 177817), lg(1059926378, 477886379) % lg(924988992, 543468)) assertEquals(lg(1286157765, 80885), lg(-1800046387, 119696078) % lg(436524799, 94037)) assertEquals(lg(251450065, 19154), lg(-822280387, 44882065) % lg(-940828508, 22947)) assertEquals(lg(1310986115, 209), lg(1465101985, 269803551) % lg(-1953360551, 334)) assertEquals(lg(1436855439, -5), lg(-567675197, -8838663) % lg(1903221047, 6)) assertEquals(lg(296887390, -17), lg(689376065, -22622471) % lg(1534988921, 63)) assertEquals(lg(1577958450, -39), lg(-2017356377, -57717216) % lg(-1390284125, 42)) assertEquals(lg(661387374, 344542), lg(-128715878, 982583003) % lg(2004099318, 988167)) // big % big assertEquals(lg(-320078007, 205603273), lg(-320078007, 205603273) % lg(2020227799, -360928021)) assertEquals(lg(408769930, -2221999), lg(-800732960, -371808530) % lg(744251542, -11199592)) assertEquals(lg(1575977183, -2441606), lg(-56774921, -32434115) % lg(1413374280, -2726592)) assertEquals(lg(-1897285736, 18894093), lg(1667937500, 228622683) % lg(-243248020, 69909529)) assertEquals(lg(-1333815518, 2097776), lg(-1333815518, 2097776) % lg(-1750106076, 18608702)) assertEquals(lg(-789967161, -4640836), lg(-162800691, -117885498) % lg(-709007774, 8711127)) assertEquals(lg(-1909427145, -2824029), lg(-1909427145, -2824029) % lg(2028036056, -660713154)) assertEquals(lg(14077923, 63046905), lg(14077923, 63046905) % lg(-688765214, 375445962)) assertEquals(lg(272760540, 19525127), lg(272760540, 19525127) % lg(-396955631, 848435537)) assertEquals(lg(-600396362, 406643261), lg(-600396362, 406643261) % lg(-1533973181, 491661310)) assertEquals(lg(1801834226, 200420454), lg(1801834226, 200420454) % lg(-1889418050, -328758068)) assertEquals(lg(361053022, 54544094), lg(1170836790, 510289402) % lg(202445942, 113936327)) assertEquals(lg(1369752396, -3152427), lg(-378923036, -1036580478) % lg(905093048, 5526353)) assertEquals(lg(1458911735, 21273958), lg(-2137034353, 1455139814) % lg(1665353214, 27574343)) assertEquals(lg(-1350216191, -3821167), lg(-1350216191, -3821167) % lg(-1333339390, -4746360)) assertEquals(lg(1166542449, -1370750), lg(-1289646201, -5193401) % lg(1838778646, -3822651)) assertEquals(lg(301867174, 5185218), lg(301867174, 5185218) % lg(157012848, -15464466)) assertEquals(lg(512572633, 48335882), lg(467711834, 155069651) % lg(-44860799, 106733768)) assertEquals(lg(1624269582, 11007763), lg(1624269582, 11007763) % lg(-158694824, -491219717)) assertEquals(lg(-1015519521, -163989350), lg(-1015519521, -163989350) % lg(1652525166, 530116116)) assertEquals(lg(-2127450406, -89864400), lg(2001612518, -452587333) % lg(1115217917, 90680733)) assertEquals(lg(-761803769, -6085789), lg(1039524645, -86121932) % lg(1131434363, 13339357)) assertEquals(lg(-1922291990, 6439098), lg(-1922291990, 6439098) % lg(-1083372307, -20634200)) assertEquals(lg(1508171882, 126457), lg(1408756974, 235847122) % lg(-1813277898, -9066180)) assertEquals(lg(-496706473, -2657930), lg(1121009342, -1533788016) % lg(-1724900447, -5821788)) assertEquals(lg(-1626361260, -113469353), lg(-1626361260, -113469353) % lg(1216987736, -817139415)) assertEquals(lg(-433139577, -182483493), lg(-433139577, -182483493) % lg(1019490766, -595625160)) assertEquals(lg(-1118452074, 1653764), lg(793542905, 198273616) % lg(-82759497, -2621599)) assertEquals(lg(-1199275184, 1262327), lg(425605214, 249789222) % lg(392156278, 6716943)) assertEquals(lg(213473729, 11660532), lg(213473729, 11660532) % lg(-547058106, 894811834)) assertEquals(lg(-1550227391, 2847368), lg(-1550227391, 2847368) % lg(-1996700003, 689370771)) assertEquals(lg(-1014778289, -3747071), lg(-144234222, -54239417) % lg(-1102770075, -7213193)) assertEquals(lg(524484467, 15124083), lg(524484467, 15124083) % lg(-1101379967, -39968226)) assertEquals(lg(-919997306, 2085072), lg(314758022, 5390195) % lg(-1234755328, -3305123)) assertEquals(lg(580679232, -10426812), lg(580679232, -10426812) % lg(-1964013803, -1738507605)) assertEquals(lg(225658926, -4189255), lg(1670083752, -254253193) % lg(722212413, -125031969)) assertEquals(lg(-495749254, -1833207), lg(-1744001445, -5443198) % lg(1248252191, 3609991)) assertEquals(lg(-1481543825, 608612), lg(-1786439869, 137339199) % lg(1821158508, 2909161)) assertEquals(lg(1026706952, -6267613), lg(1273422584, -284542935) % lg(1626032463, -17392208)) assertEquals(lg(-855876173, -4928311), lg(-513801887, -32580141) % lg(-342074286, 27651829)) assertEquals(lg(-1027906958, 55543678), lg(-1027906958, 55543678) % lg(-1936394792, 928937151)) assertEquals(lg(-1793811005, -17787029), lg(251585986, -50474191) % lg(-2045396991, 32687162)) assertEquals(lg(-356034186, -2235041), lg(66679938, -917589429) % lg(2124767660, -3454168)) assertEquals(lg(-924611099, -76507846), lg(-599564184, -209788131) % lg(-325046915, 133280284)) assertEquals(lg(838338995, -12983151), lg(838338995, -12983151) % lg(-842402530, 19411056)) assertEquals(lg(747658762, 18528439), lg(1444498155, 520850879) % lg(851271837, 23920116)) assertEquals(lg(-2028924578, -3124146), lg(2096765386, -117024114) % lg(-1726450785, -5694999)) assertEquals(lg(2056903464, -4954201), lg(-425905039, -180148939) % lg(-1397064581, -15926795)) assertEquals(lg(-2055992988, 596420), lg(-920215872, 219325473) % lg(1357686103, 54682263)) assertEquals(lg(1279110660, -10784541), lg(1279110660, -10784541) % lg(278869448, 758126792)) } } class RuntimeLongOldTest { import RuntimeLong.fromDouble def assertHexEquals(expected: String, l: RuntimeLong): Unit = assertEquals(expected, l.toHexString) def fromInt(x: Int): RuntimeLong = new RuntimeLong(x) val maxInt = fromInt(Int.MaxValue) val minInt = fromInt(Int.MinValue) val one = fromInt(1) val billion = fromInt(1000000000) val `4503599627370510L` = new RuntimeLong(14, 0, 256) val `613354684553L` = new RuntimeLong(639113, 146235, 0) val `9863155567412L` = new RuntimeLong(2247476, 2351559, 0) val `3632147899696541255L` = new RuntimeLong(1568327, 2954580, 206463) val `7632147899696541255L` = new RuntimeLong(2616903, 1593290, 433837) val minValue = new RuntimeLong(0, 0, 524288) val minus1 = new RuntimeLong(4194303, 4194303, 1048575) val minus2 = new RuntimeLong(4194302, 4194303, 1048575) val minus3 = new RuntimeLong(4194301, 4194303, 1048575) val minus4 = new RuntimeLong(4194300, 4194303, 1048575) val minus15 = new RuntimeLong(4194289, 4194303, 1048575) val minus16 = new RuntimeLong(4194288, 4194303, 1048575) @Test def should_correctly_implement_negation(): Unit = { assertHexEquals("fffffffffffffffb", -fromInt(5)) assertHexEquals("0", -fromInt(0)) assertHexEquals("80000000", -minInt) } @Test def should_correctly_implement_comparison(): Unit = { assertEquals(true, fromInt(7) < fromInt(15)) assertEquals(false, fromInt(15) < fromInt(15)) assertEquals(true, fromInt(15) <= fromInt(15)) assertEquals(true, fromInt(14) <= fromInt(15)) assertEquals(false, fromInt(15) > fromInt(15)) assertEquals(false, fromInt(14) > fromInt(15)) assertEquals(true, fromInt(16) > fromInt(15)) assertEquals(true, fromInt(15) >= fromInt(15)) assertEquals(false, fromInt(14) >= fromInt(15)) assertEquals(true, fromInt(16) >= fromInt(15)) } @Test def should_correctly_implement_addition(): Unit = { assertHexEquals("16", fromInt(7) + fromInt(15)) assertHexEquals("fffffffe", maxInt + maxInt) assertHexEquals("80000000", maxInt + one) } @Test def should_correctly_implement_subtraction(): Unit = { assertHexEquals("fffffffffffffff8", fromInt(7) - fromInt(15)) assertHexEquals("0", maxInt - maxInt) } @Test def should_correctly_implement_multiplication(): Unit = { assertHexEquals("69", fromInt(7) * fromInt(15)) assertHexEquals("ffffffffffffff97", fromInt(-7) * fromInt(15)) assertHexEquals("3fffffff00000001", maxInt * maxInt) assertHexEquals("ffbfffffffffffc8", `4503599627370510L` * fromInt(-4)) } @Test def should_correctly_implement_division(): Unit = { assertHexEquals("0", fromInt(7) / fromInt(15)) assertHexEquals("4", fromInt(24) / fromInt(5)) assertHexEquals("fffffffffffffffc", fromInt(24) / fromInt(-5)) assertHexEquals("ffffffffe6666667", maxInt / fromInt(-5)) assertHexEquals("2", maxInt / billion) assertHexEquals("2", (maxInt+one) / billion) assertHexEquals("1", minValue / minValue) assertHexEquals("8000000000000000", minValue / minus1) assertHexEquals("4000000000000000", minValue / minus2) assertHexEquals("2aaaaaaaaaaaaaaa", minValue / minus3) assertHexEquals("2000000000000000", minValue / minus4) assertHexEquals("888888888888888", minValue / minus15) assertHexEquals("800000000000000", minValue / minus16) assertHexEquals("0", `7632147899696541255L` / minValue) assertHexEquals("961529ec0d5811b9", `7632147899696541255L` / minus1) assertHexEquals("cb0a94f606ac08dd", `7632147899696541255L` / minus2) assertHexEquals("dcb1b8a40472b093", `7632147899696541255L` / minus3) assertHexEquals("e5854a7b0356046f", `7632147899696541255L` / minus4) assertHexEquals("f8f05820cdb089b7", `7632147899696541255L` / minus15) assertHexEquals("f961529ec0d5811c", `7632147899696541255L` / minus16) } @Test def should_correctly_implement_modulus(): Unit = { assertHexEquals("7", fromInt(7) % fromInt(15)) assertHexEquals("4", fromInt(24) % fromInt(5)) assertHexEquals("4", fromInt(24) % fromInt(-5)) assertHexEquals("8ca6bff", maxInt % billion) assertHexEquals("8ca6c00", (maxInt+one) % billion) assertHexEquals("2", maxInt % fromInt(-5)) assertHexEquals("0", minValue % minValue) assertHexEquals("0", minValue % minus1) assertHexEquals("0", minValue % minus2) assertHexEquals("fffffffffffffffe", minValue % minus3) assertHexEquals("0", minValue % minus4) assertHexEquals("fffffffffffffff8", minValue % minus15) assertHexEquals("0", minValue % minus16) assertHexEquals("69ead613f2a7ee47", `7632147899696541255L` % minValue) assertHexEquals("0", `7632147899696541255L` % minus1) assertHexEquals("1", `7632147899696541255L` % minus2) assertHexEquals("0", `7632147899696541255L` % minus3) assertHexEquals("3", `7632147899696541255L` % minus4) assertHexEquals("0", `7632147899696541255L` % minus15) assertHexEquals("7", `7632147899696541255L` % minus16) } @Test def should_correctly_implement_toString(): Unit = { assertEquals("2147483647", maxInt.toString) assertEquals("-50", fromInt(-50).toString) assertEquals("-1000000000", fromInt(-1000000000).toString) assertEquals("2147483648", (maxInt+one).toString) assertEquals("-2147483648", minInt.toString) } @Test def should_correctly_implement_fromDouble(): Unit = { assertHexEquals("4", fromDouble(4.5)) assertHexEquals("fffffffffffffffc", fromDouble(-4.5)) } @Test def should_correctly_implement_toDouble(): Unit = { assertEquals(5.0, fromInt(5).toDouble) assertEquals(2147483648.0, (maxInt+one).toDouble) } @Test def should_correctly_implement_numberOfLeadingZeros(): Unit = { assertEquals(64, fromInt(0).numberOfLeadingZeros) assertEquals(63, fromInt(1).numberOfLeadingZeros) assertEquals(0, fromInt(-1).numberOfLeadingZeros) assertEquals(62, fromInt(2).numberOfLeadingZeros) } @Test def should_implement_hashCode_according_to_spec_in_j_l_Long(): Unit = { assertEquals(0, fromInt(0).hashCode()) assertEquals(55, fromInt(55).hashCode()) assertEquals(11, fromInt(-12).hashCode()) assertEquals(10006548, fromInt(10006548).hashCode()) assertEquals(1098747, fromInt(-1098748).hashCode()) assertEquals(-825638905, `613354684553L`.hashCode()) assertEquals(1910653900, `9863155567412L`.hashCode()) assertEquals(1735398658, `3632147899696541255L`.hashCode()) assertEquals(-1689438124, `7632147899696541255L`.hashCode()) } }
japgolly/scala-js
test-suite/js/src/test/scala/org/scalajs/testsuite/jsinterop/RuntimeLongTest.scala
Scala
bsd-3-clause
148,504
package org.apache.mesos.chronos.scheduler.jobs import org.apache.mesos.{Protos => mesos} /** * Represents an environment variable definition for the job */ case class Label( key: String, value: String) { def toProto(): mesos.Label = mesos.Label.newBuilder .setKey(key) .setValue(value) .build } object Label { def apply(proto: mesos.Label): Label = Label( proto.getKey, proto.getValue ) }
vixns/chronos
src/main/scala/org/apache/mesos/chronos/scheduler/jobs/Label.scala
Scala
apache-2.0
488