code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.streaming.examples import org.apache.spark.util.IntParam import org.apache.spark.storage.StorageLevel import org.apache.spark.streaming._ /** * Produces a count of events received from Flume. * * This should be used in conjunction with an AvroSink in Flume. It will start * an Avro server on at the request host:port address and listen for requests. * Your Flume AvroSink should be pointed to this address. * * Usage: FlumeEventCount <master> <host> <port> * * <master> is a Spark master URL * <host> is the host the Flume receiver will be started on - a receiver * creates a server and listens for flume events. * <port> is the port the Flume receiver will listen on. */ object FlumeEventCount { def main(args: Array[String]) { if (args.length != 3) { System.err.println( "Usage: FlumeEventCount <master> <host> <port>") System.exit(1) } val Array(master, host, IntParam(port)) = args val batchInterval = Milliseconds(2000) // Create the context and set the batch size val ssc = new StreamingContext(master, "FlumeEventCount", batchInterval, System.getenv("SPARK_HOME"), Seq(System.getenv("SPARK_EXAMPLES_JAR"))) // Create a flume stream val stream = ssc.flumeStream(host,port,StorageLevel.MEMORY_ONLY) // Print out the count of events received from this server in each batch stream.count().map(cnt => "Received " + cnt + " flume events." ).print() ssc.start() } }
mkolod/incubator-spark
examples/src/main/scala/org/apache/spark/streaming/examples/FlumeEventCount.scala
Scala
apache-2.0
2,311
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.optimizer import org.apache.spark.sql.catalyst.SimpleCatalystConf import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.dsl.plans._ import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.plans.PlanTest import org.apache.spark.sql.catalyst.plans.logical.{LocalRelation, LogicalPlan} import org.apache.spark.sql.catalyst.rules.RuleExecutor import org.apache.spark.sql.types.DecimalType class DecimalAggregatesSuite extends PlanTest { object Optimize extends RuleExecutor[LogicalPlan] { val batches = Batch("Decimal Optimizations", FixedPoint(100), DecimalAggregates(SimpleCatalystConf(caseSensitiveAnalysis = true))) :: Nil } val testRelation = LocalRelation('a.decimal(2, 1), 'b.decimal(12, 1)) test("Decimal Sum Aggregation: Optimized") { val originalQuery = testRelation.select(sum('a)) val optimized = Optimize.execute(originalQuery.analyze) val correctAnswer = testRelation .select(MakeDecimal(sum(UnscaledValue('a)), 12, 1).as("sum(a)")).analyze comparePlans(optimized, correctAnswer) } test("Decimal Sum Aggregation: Not Optimized") { val originalQuery = testRelation.select(sum('b)) val optimized = Optimize.execute(originalQuery.analyze) val correctAnswer = originalQuery.analyze comparePlans(optimized, correctAnswer) } test("Decimal Average Aggregation: Optimized") { val originalQuery = testRelation.select(avg('a)) val optimized = Optimize.execute(originalQuery.analyze) val correctAnswer = testRelation .select((avg(UnscaledValue('a)) / 10.0).cast(DecimalType(6, 5)).as("avg(a)")).analyze comparePlans(optimized, correctAnswer) } test("Decimal Average Aggregation: Not Optimized") { val originalQuery = testRelation.select(avg('b)) val optimized = Optimize.execute(originalQuery.analyze) val correctAnswer = originalQuery.analyze comparePlans(optimized, correctAnswer) } test("Decimal Sum Aggregation over Window: Optimized") { val spec = windowSpec(Seq('a), Nil, UnspecifiedFrame) val originalQuery = testRelation.select(windowExpr(sum('a), spec).as('sum_a)) val optimized = Optimize.execute(originalQuery.analyze) val correctAnswer = testRelation .select('a) .window( Seq(MakeDecimal(windowExpr(sum(UnscaledValue('a)), spec), 12, 1).as('sum_a)), Seq('a), Nil) .select('a, 'sum_a, 'sum_a) .select('sum_a) .analyze comparePlans(optimized, correctAnswer) } test("Decimal Sum Aggregation over Window: Not Optimized") { val spec = windowSpec('b :: Nil, Nil, UnspecifiedFrame) val originalQuery = testRelation.select(windowExpr(sum('b), spec)) val optimized = Optimize.execute(originalQuery.analyze) val correctAnswer = originalQuery.analyze comparePlans(optimized, correctAnswer) } test("Decimal Average Aggregation over Window: Optimized") { val spec = windowSpec(Seq('a), Nil, UnspecifiedFrame) val originalQuery = testRelation.select(windowExpr(avg('a), spec).as('avg_a)) val optimized = Optimize.execute(originalQuery.analyze) val correctAnswer = testRelation .select('a) .window( Seq((windowExpr(avg(UnscaledValue('a)), spec) / 10.0).cast(DecimalType(6, 5)).as('avg_a)), Seq('a), Nil) .select('a, 'avg_a, 'avg_a) .select('avg_a) .analyze comparePlans(optimized, correctAnswer) } test("Decimal Average Aggregation over Window: Not Optimized") { val spec = windowSpec('b :: Nil, Nil, UnspecifiedFrame) val originalQuery = testRelation.select(windowExpr(avg('b), spec)) val optimized = Optimize.execute(originalQuery.analyze) val correctAnswer = originalQuery.analyze comparePlans(optimized, correctAnswer) } }
sachintyagi22/spark
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/DecimalAggregatesSuite.scala
Scala
apache-2.0
4,665
package cn.gridx.scala.lang.datetime import org.joda.time.DateTime /** * Created by tao on 4/19/16. */ object Timezones { def main(args: Array[String]): Unit = { new DateTime() } }
TaoXiao/Scala
lang/src/main/scala/cn/gridx/scala/lang/datetime/Timezones.scala
Scala
apache-2.0
197
package org.http4s import cats.{Applicative, Functor} import cats.data.{Kleisli, OptionT} object AuthedService { /** * Lifts a total function to an `AuthedService`. The function is expected to * handle all requests it is given. If `f` is a `PartialFunction`, use * `apply` instead. */ @deprecated("Use liftF with an OptionT[F, Response[F]] instead", "0.18") def lift[F[_]: Functor, T](f: AuthedRequest[F, T] => F[Response[F]]): AuthedService[T, F] = Kleisli(f.andThen(OptionT.liftF(_))) /** Lifts a partial function to an `AuthedService`. Responds with * [[org.http4s.Response.notFoundFor]], which generates a 404, for any request * where `pf` is not defined. */ def apply[T, F[_]](pf: PartialFunction[AuthedRequest[F, T], F[Response[F]]])( implicit F: Applicative[F]): AuthedService[T, F] = Kleisli(req => pf.andThen(OptionT.liftF(_)).applyOrElse(req, Function.const(OptionT.none))) /** * The empty service (all requests fallthrough). * * @tparam T - ignored. * @return */ def empty[T, F[_]: Applicative]: AuthedService[T, F] = Kleisli.liftF(OptionT.none) }
reactormonk/http4s
core/src/main/scala/org/http4s/AuthedService.scala
Scala
apache-2.0
1,147
/* * Copyright 2014-2021 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.atlas.core.model import com.netflix.atlas.core.stacklang.BaseExamplesSuite import com.netflix.atlas.core.stacklang.Vocabulary class StyleExamplesSuite extends BaseExamplesSuite { override def vocabulary: Vocabulary = StyleVocabulary }
copperlight/atlas
atlas-core/src/test/scala/com/netflix/atlas/core/model/StyleExamplesSuite.scala
Scala
apache-2.0
862
/* NOTE: if inference is changed so that T is inferred to be Int, rather than Nothing, the piece of code below will start to compile OK. In that case, see ticket #2139, and make sure that the generated code will no longer crash! */ class U { def f[T](x:T):T=x } object H extends App { val u=new U val z:Int=(u.f _)(4) println("done") }
AlexSikia/dotty
tests/untried/neg/t2139.scala
Scala
bsd-3-clause
378
package com.datastax.spark.connector.writer import com.datastax.spark.connector.CassandraRow import com.datastax.spark.connector.cql.{CassandraConnector, Schema} import com.datastax.spark.connector.testkit.SharedEmbeddedCassandra import org.apache.cassandra.dht.IPartitioner import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers} import scala.collection.immutable.Map class RoutingKeyGeneratorSpec extends FlatSpec with Matchers with BeforeAndAfter with SharedEmbeddedCassandra { useCassandraConfig("cassandra-default.yaml.template") val conn = CassandraConnector(Set(cassandraHost)) conn.withSessionDo { session => session.execute("CREATE KEYSPACE IF NOT EXISTS routing_key_gen_test WITH REPLICATION = { 'class': 'SimpleStrategy', 'replication_factor': 1 }") session.execute("CREATE TABLE IF NOT EXISTS routing_key_gen_test.one_key (id INT PRIMARY KEY, value TEXT)") session.execute("CREATE TABLE IF NOT EXISTS routing_key_gen_test.two_keys (id INT, id2 TEXT, value TEXT, PRIMARY KEY ((id, id2)))") } implicit val protocolVersion = conn.withClusterDo(_.getConfiguration.getProtocolOptions.getProtocolVersionEnum) val cp = conn.withClusterDo(cluster => Class.forName(cluster.getMetadata.getPartitioner).newInstance().asInstanceOf[IPartitioner]) "RoutingKeyGenerator" should "generate proper routing keys when there is one partition key column" in { val schema = Schema.fromCassandra(conn, Some("routing_key_gen_test"), Some("one_key")) val rowWriter = RowWriterFactory.defaultRowWriterFactory[(Int, String)].rowWriter(schema.tables.head, Seq("id", "value"), Map.empty) val rkg = new RoutingKeyGenerator(schema.tables.head, Seq("id", "value")) conn.withSessionDo { session => val pStmt = session.prepare("INSERT INTO routing_key_gen_test.one_key (id, value) VALUES (:id, :value)") val bStmt = pStmt.bind(1: java.lang.Integer, "first row") session.execute(bStmt) val row = session.execute("SELECT TOKEN(id) FROM routing_key_gen_test.one_key WHERE id = 1").one() val readTokenStr = CassandraRow.fromJavaDriverRow(row, Array("token(id)")).getString(0) val rk = rkg.apply(bStmt) val rkToken = cp.getToken(rk) rkToken.getTokenValue.toString should be(readTokenStr) } } "RoutingKeyGenerator" should "generate proper routing keys when there are more partition key columns" in { val schema = Schema.fromCassandra(conn, Some("routing_key_gen_test"), Some("two_keys")) val rowWriter = RowWriterFactory.defaultRowWriterFactory[(Int, String, String)].rowWriter(schema.tables.head, Seq("id", "id2", "value"), Map.empty) val rkg = new RoutingKeyGenerator(schema.tables.head, Seq("id", "id2", "value")) conn.withSessionDo { session => val pStmt = session.prepare("INSERT INTO routing_key_gen_test.two_keys (id, id2, value) VALUES (:id, :id2, :value)") val bStmt = pStmt.bind(1: java.lang.Integer, "one", "first row") session.execute(bStmt) val row = session.execute("SELECT TOKEN(id, id2) FROM routing_key_gen_test.two_keys WHERE id = 1 AND id2 = 'one'").one() val readTokenStr = CassandraRow.fromJavaDriverRow(row, Array("token(id,id2)")).getString(0) val rk = rkg.apply(bStmt) val rkToken = cp.getToken(rk) rkToken.getTokenValue.toString should be(readTokenStr) } } }
brkyvz/spark-cassandra-connector
spark-cassandra-connector/src/it/scala/com/datastax/spark/connector/writer/RoutingKeyGeneratorSpec.scala
Scala
apache-2.0
3,344
package com.cloudray.scalapress.section import org.springframework.stereotype.Component import org.springframework.transaction.annotation.Transactional import com.cloudray.scalapress.util.{GenericDaoImpl, GenericDao} /** @author Stephen Samuel */ trait SectionDao extends GenericDao[Section, java.lang.Long] @Component @Transactional class SectionDaoImpl extends GenericDaoImpl[Section, java.lang.Long] with SectionDao
vidyacraghav/scalapress
src/main/scala/com/cloudray/scalapress/section/sections.scala
Scala
apache-2.0
422
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.log import java.io.File import java.nio.ByteBuffer import kafka.common.InvalidOffsetException import kafka.utils.CoreUtils._ import kafka.utils.Logging import org.apache.kafka.common.record.RecordBatch /** * An index that maps from the timestamp to the logical offsets of the messages in a segment. This index might be * sparse, i.e. it may not hold an entry for all the messages in the segment. * * The index is stored in a file that is preallocated to hold a fixed maximum amount of 12-byte time index entries. * The file format is a series of time index entries. The physical format is a 8 bytes timestamp and a 4 bytes "relative" * offset used in the [[OffsetIndex]]. A time index entry (TIMESTAMP, OFFSET) means that the biggest timestamp seen * before OFFSET is TIMESTAMP. i.e. Any message whose timestamp is greater than TIMESTAMP must come after OFFSET. * * All external APIs translate from relative offsets to full offsets, so users of this class do not interact with the internal * storage format. * * The timestamps in the same time index file are guaranteed to be monotonically increasing. * * The index support timestamp lookup for a memory map of this file. The lookup is done using a binary search to find * the offset of the message whose indexed timestamp is closest but smaller or equals to the target timestamp. * * Time index files can be opened in two ways: either as an empty, mutable index that allows appends or * an immutable read-only index file that has previously been populated. The makeReadOnly method will turn a mutable file into an * immutable one and truncate off any extra bytes. This is done when the index file is rolled over. * * No attempt is made to checksum the contents of this file, in the event of a crash it is rebuilt. * */ class TimeIndex(file: File, baseOffset: Long, maxIndexSize: Int = -1) extends AbstractIndex[Long, Long](file, baseOffset, maxIndexSize) with Logging { override def entrySize = 12 // We override the full check to reserve the last time index entry slot for the on roll call. override def isFull: Boolean = entries >= maxEntries - 1 private def timestamp(buffer: ByteBuffer, n: Int): Long = buffer.getLong(n * entrySize) private def relativeOffset(buffer: ByteBuffer, n: Int): Int = buffer.getInt(n * entrySize + 8) /** * The last entry in the index */ def lastEntry: TimestampOffset = { inLock(lock) { _entries match { case 0 => TimestampOffset(RecordBatch.NO_TIMESTAMP, baseOffset) case s => parseEntry(mmap, s - 1).asInstanceOf[TimestampOffset] } } } /** * Get the nth timestamp mapping from the time index * @param n The entry number in the time index * @return The timestamp/offset pair at that entry */ def entry(n: Int): TimestampOffset = { maybeLock(lock) { if(n >= _entries) throw new IllegalArgumentException("Attempt to fetch the %dth entry from a time index of size %d.".format(n, _entries)) val idx = mmap.duplicate TimestampOffset(timestamp(idx, n), relativeOffset(idx, n)) } } override def parseEntry(buffer: ByteBuffer, n: Int): IndexEntry = { TimestampOffset(timestamp(buffer, n), baseOffset + relativeOffset(buffer, n)) } /** * Attempt to append a time index entry to the time index. * The new entry is appended only if both the timestamp and offsets are greater than the last appended timestamp and * the last appended offset. * * @param timestamp The timestamp of the new time index entry * @param offset The offset of the new time index entry * @param skipFullCheck To skip checking whether the segment is full or not. We only skip the check when the segment * gets rolled or the segment is closed. */ def maybeAppend(timestamp: Long, offset: Long, skipFullCheck: Boolean = false) { inLock(lock) { if (!skipFullCheck) require(!isFull, "Attempt to append to a full time index (size = " + _entries + ").") // We do not throw exception when the offset equals to the offset of last entry. That means we are trying // to insert the same time index entry as the last entry. // If the timestamp index entry to be inserted is the same as the last entry, we simply ignore the insertion // because that could happen in the following two scenarios: // 1. A log segment is closed. // 2. LogSegment.onBecomeInactiveSegment() is called when an active log segment is rolled. if (_entries != 0 && offset < lastEntry.offset) throw new InvalidOffsetException("Attempt to append an offset (%d) to slot %d no larger than the last offset appended (%d) to %s." .format(offset, _entries, lastEntry.offset, file.getAbsolutePath)) if (_entries != 0 && timestamp < lastEntry.timestamp) throw new IllegalStateException("Attempt to append a timestamp (%d) to slot %d no larger than the last timestamp appended (%d) to %s." .format(timestamp, _entries, lastEntry.timestamp, file.getAbsolutePath)) // We only append to the time index when the timestamp is greater than the last inserted timestamp. // If all the messages are in message format v0, the timestamp will always be NoTimestamp. In that case, the time // index will be empty. if (timestamp > lastEntry.timestamp) { debug("Adding index entry %d => %d to %s.".format(timestamp, offset, file.getName)) mmap.putLong(timestamp) mmap.putInt((offset - baseOffset).toInt) _entries += 1 require(_entries * entrySize == mmap.position, _entries + " entries but file position in index is " + mmap.position + ".") } } } /** * Find the time index entry whose timestamp is less than or equal to the given timestamp. * If the target timestamp is smaller than the least timestamp in the time index, (NoTimestamp, baseOffset) is * returned. * * @param targetTimestamp The timestamp to look up. * @return The time index entry found. */ def lookup(targetTimestamp: Long): TimestampOffset = { maybeLock(lock) { val idx = mmap.duplicate val slot = indexSlotFor(idx, targetTimestamp, IndexSearchType.KEY) if (slot == -1) TimestampOffset(RecordBatch.NO_TIMESTAMP, baseOffset) else { val entry = parseEntry(idx, slot).asInstanceOf[TimestampOffset] TimestampOffset(entry.timestamp, entry.offset) } } } override def truncate() = truncateToEntries(0) /** * Remove all entries from the index which have an offset greater than or equal to the given offset. * Truncating to an offset larger than the largest in the index has no effect. */ override def truncateTo(offset: Long) { inLock(lock) { val idx = mmap.duplicate val slot = indexSlotFor(idx, offset, IndexSearchType.VALUE) /* There are 3 cases for choosing the new size * 1) if there is no entry in the index <= the offset, delete everything * 2) if there is an entry for this exact offset, delete it and everything larger than it * 3) if there is no entry for this offset, delete everything larger than the next smallest */ val newEntries = if(slot < 0) 0 else if(relativeOffset(idx, slot) == offset - baseOffset) slot else slot + 1 truncateToEntries(newEntries) } } /** * Truncates index to a known number of entries. */ private def truncateToEntries(entries: Int) { inLock(lock) { _entries = entries mmap.position(_entries * entrySize) } } override def sanityCheck() { val entry = lastEntry val lastTimestamp = entry.timestamp val lastOffset = entry.offset require(_entries == 0 || (lastTimestamp >= timestamp(mmap, 0)), s"Corrupt time index found, time index file (${file.getAbsolutePath}) has non-zero size but the last timestamp " + s"is $lastTimestamp which is no larger than the first timestamp ${timestamp(mmap, 0)}") require(_entries == 0 || lastOffset >= baseOffset, s"Corrupt time index found, time index file (${file.getAbsolutePath}) has non-zero size but the last offset " + s"is $lastOffset which is smaller than the first offset $baseOffset") val len = file.length() require(len % entrySize == 0, "Time index file " + file.getAbsolutePath + " is corrupt, found " + len + " bytes which is not positive or not a multiple of 12.") } }
rhauch/kafka
core/src/main/scala/kafka/log/TimeIndex.scala
Scala
apache-2.0
9,372
package com.crobox.clickhouse.dsl import com.crobox.clickhouse.dsl.JoinQuery.InnerJoin import com.crobox.clickhouse.dsl.schemabuilder.ColumnType import com.crobox.clickhouse._ import org.joda.time.{DateTime, LocalDate} import java.util.UUID import scala.util.{Failure, Success} class QueryTest extends DslTestSpec { it should "perform simple select" in { val query = select(shieldId) from OneTestTable toSql(query.internalQuery) should matchSQL( s"SELECT shield_id FROM $database.captainAmerica FORMAT JSON" ) } it should "generate for join between tables" in { val query = select(col1, shieldId).from(OneTestTable).join(InnerJoin, TwoTestTable) using shieldId toSql(query.internalQuery) should matchSQL( s"SELECT column_1, shield_id FROM $database.captainAmerica AS L1 INNER JOIN (SELECT * " + s"FROM $database.twoTestTable) AS R1 USING shield_id FORMAT JSON" ) } it should "generate inner join" in { val expectedUUID = UUID.randomUUID() val innerQuery: OperationalQuery = select(shieldId as itemId) from OneTestTable where shieldId.isEq(expectedUUID) val joinInnerQuery: OperationalQuery = select(itemId) from TwoTestTable where (col3 isEq "wompalama") val query = select(col1, shieldId) from innerQuery join (InnerJoin, joinInnerQuery) using itemId toSql(query.internalQuery) should matchSQL( s"SELECT column_1, shield_id FROM (SELECT shield_id AS item_id FROM $database.captainAmerica " + s"WHERE shield_id = '$expectedUUID') AS L1 INNER JOIN (SELECT item_id FROM $database.twoTestTable " + s"WHERE column_3 = 'wompalama') AS R1 USING item_id FORMAT JSON" ) } it should "escape from evil" in { val query = select(shieldId) from OneTestTable where col3.isEq("use ' evil") toSql(query.internalQuery) should matchSQL( s"SELECT shield_id FROM $database.captainAmerica WHERE column_3 = 'use \\\\' evil' FORMAT JSON" ) } it should "overrule with left preference" in { val query = select(shieldId) from OneTestTable val query2 = select(itemId) from OneTestTable where col2 >= 2 val composed = query :+> query2 toSql(composed.internalQuery) should matchSQL( s"SELECT shield_id FROM $database.captainAmerica WHERE column_2 >= 2 FORMAT JSON" ) } it should "overrule with right preference" in { val query = select(shieldId) from OneTestTable val query2 = select(itemId) from OneTestTable where col2 >= 2 val composed = query <+: query2 toSql(composed.internalQuery) should matchSQL( s"SELECT item_id FROM $database.captainAmerica WHERE column_2 >= 2 FORMAT JSON" ) } it should "compose indexOf and arrayElement" in { def lookupNestedValue(column: NativeColumn[_], elm: String): ExpressionColumn[String] = column.clickhouseType match { case ColumnType.Nested(k, v) => val keyColumn = ref[Seq[String]](column.name + "." + k.name) val valueColumn = ref[Seq[String]](column.name + "." + v.name) arrayElement(valueColumn, indexOf(keyColumn, elm)) case _ => throw new IllegalArgumentException(s"ColumnType ${column.clickhouseType} is unsupported for nested lookup") } val nested = NativeColumn("props", ColumnType.Nested(NativeColumn("key"), NativeColumn("value"))) toSql( select(lookupNestedValue(nested, "cate'gory")).internalQuery ) should matchSQL( "SELECT `props.value`[indexOf(`props.key`,'cate\\\\'gory')] FORMAT JSON" ) } it should "fail on try override of conflicting queries" in { val query = select(shieldId) from OneTestTable val query2 = select(itemId) from OneTestTable where col2 >= 2 val composed = query + query2 composed should matchPattern { case Failure(_: IllegalArgumentException) => } } it should "parse datefunction" in { val query = select(toYear(NativeColumn[DateTime]("dateTime"))) from OneTestTable toSql(query.internalQuery).nonEmpty shouldBe true } it should "parse column function in filter" in { val query = select(minus(NativeColumn[LocalDate]("date"), NativeColumn[Double]("double"))) from OneTestTable where (sum( col2 ) > 0) toSql(query.internalQuery) should matchSQL( s"SELECT date - double FROM $database.captainAmerica WHERE sum(column_2) > 0 FORMAT JSON" ) } it should "parse const as column for magnets" in { val query = select(col2 - 1, intDiv(2, 3)) from OneTestTable toSql(query.internalQuery) should matchSQL( s"SELECT column_2 - 1, intDiv(2, 3) FROM $database.captainAmerica FORMAT JSON" ) } it should "succeed on safe override of non-conflicting multi part queries" in { val query = select(shieldId) val query2 = from(OneTestTable) val query3 = where(col2 >= 4) val composed = query + query2 val composed2 = composed + query3 composed should matchPattern { case t: Success[_] => } toSql(composed.get.internalQuery) should matchSQL( s"SELECT shield_id FROM $database.captainAmerica FORMAT JSON" ) composed2 should matchPattern { case t: Success[_] => } toSql(composed2.get.internalQuery) should matchSQL( s"SELECT shield_id FROM $database.captainAmerica WHERE column_2 >= 4 FORMAT JSON" ) } it should "throw an exception if the union doesn't have the same number of columns" in { val query = select(shieldId) from OneTestTable val query2 = select(shieldId, itemId) from OneTestTable an[IllegalArgumentException] should be thrownBy { query.unionAll(query2) } } it should "perform the union of multiple tables" in { val query = select(shieldId) from OneTestTable val query2 = select(itemId) from TwoTestTable val query3 = select(itemId) from ThreeTestTable val union = query.unionAll(query2).unionAll(query3) toSql(union.internalQuery) should matchSQL( s""" |SELECT shield_id FROM $database.captainAmerica |UNION ALL SELECT item_id FROM $database.twoTestTable |UNION ALL SELECT item_id FROM $database.threeTestTable |FORMAT JSON""".stripMargin ) } it should "select from an union of two tables" in { val query2 = select(itemId) from TwoTestTable val query3 = select(itemId) from ThreeTestTable val query = select(itemId) from query2.unionAll(query3) toSql(query.internalQuery) should matchSQL( s""" |SELECT item_id FROM (SELECT item_id FROM $database.twoTestTable |UNION ALL SELECT item_id FROM $database.threeTestTable) |FORMAT JSON""".stripMargin ) } it should "use alias in subselect" in { val query = select(dsl.all()).from(select(col1, shieldId).from(OneTestTable).join(InnerJoin, TwoTestTable) using shieldId) toSql(query.internalQuery) should matchSQL(s""" |SELECT * FROM |(SELECT column_1, shield_id FROM $database.captainAmerica AS L1 | INNER JOIN (SELECT * FROM $database.twoTestTable) AS R1 | USING shield_id) |FORMAT JSON""".stripMargin) } it should "select from using ALIAS and final" in { var query = select(shieldId as itemId, col1, notEmpty(col1) as "empty") from OneTestTable as "3sf" asFinal toSql(query.internalQuery) should matchSQL( s""" |SELECT shield_id AS item_id, column_1, notEmpty(column_1) AS empty |FROM ${OneTestTable.quoted} AS `3sf` FINAL |FORMAT JSON""".stripMargin ) query = select(shieldId as itemId, col1, notEmpty(col1) as "empty") from OneTestTable as "3sf" toSql(query.internalQuery) should matchSQL( s""" |SELECT shield_id AS item_id, column_1, notEmpty(column_1) AS empty |FROM ${OneTestTable.quoted} AS `3sf` |FORMAT JSON""".stripMargin ) } }
crobox/clickhouse-scala-client
dsl/src/test/scala/com/crobox/clickhouse/dsl/QueryTest.scala
Scala
lgpl-3.0
7,891
/* * Copyright (C) 2016 University of Basel, Graphics and Vision Research Group * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package scalismo.ui.view.properties import java.awt.Color import javax.swing.BorderFactory import javax.swing.border.TitledBorder import scalismo.ui.model.SceneNode import scalismo.ui.model.properties.{ColorMapping, HasScalarRange, NodeProperty} import scalismo.ui.view.ScalismoFrame import scalismo.ui.view.util.ScalableUI.implicits.scalableInt import scalismo.ui.view.util.{FancySlider, MultiLineLabel} import scala.swing.BorderPanel import scala.swing.event.ValueChanged object ScalarRangePropertyPanel extends PropertyPanel.Factory { override def create(frame: ScalismoFrame): PropertyPanel = { new ScalarRangePropertyPanel(frame) } } class ScalarRangePropertyPanel(override val frame: ScalismoFrame) extends BorderPanel with PropertyPanel { override def description: String = "Scalar Range Mapping Thresholds" private var targets: List[HasScalarRange] = Nil private var min: Float = 0 private var max: Float = 100 private var step: Float = 1 private def colorizeSliderValue(slider: FancySlider, color: Color): Unit = { // Setting the color of the displayed text may result in poor readability for certain colors, so we // create an "underline" showing the color instead. slider.valueLabel.border = BorderFactory.createMatteBorder(0, 0, 3.scaled, 0, color) } private val minimumSlider = new FancySlider { min = 0 max = 100 value = 0 override def formattedValue(sliderValue: Int): String = formatSliderValue(sliderValue) colorizeSliderValue(this, ColorMapping.Default.lowerColor) } private val maximumSlider = new FancySlider { min = 0 max = 100 value = 100 override def formattedValue(sliderValue: Int): String = formatSliderValue(sliderValue) colorizeSliderValue(this, ColorMapping.Default.upperColor) } private val mismatchMessage = new MultiLineLabel( "Scalar range domain mismatch. Please multi-select only nodes with matching domains." ) { val northedPanel: BorderPanel = new BorderPanel { private val slidersPanel = new BorderPanel { border = new TitledBorder(null, description, TitledBorder.LEADING, 0, null, null) layout(minimumSlider) = BorderPanel.Position.North layout(maximumSlider) = BorderPanel.Position.South layout(mismatchMessage) = BorderPanel.Position.Center } layout(slidersPanel) = BorderPanel.Position.Center } layout(northedPanel) = BorderPanel.Position.North } listenToOwnEvents() def listenToOwnEvents(): Unit = { listenTo(minimumSlider, maximumSlider) } def deafToOwnEvents(): Unit = { deafTo(minimumSlider, maximumSlider) } def toSliderValue(v: Float): Int = { if (step == 0) 0 else Math.round((v - min) / step) } def formatSliderValue(i: Int): String = { if (step == 0) "0" else if (step >= 1) f"${fromSliderValue(i)}%2.0f" else if (step >= .1) f"${fromSliderValue(i)}%2.1f" else if (step >= .01) f"${fromSliderValue(i)}%2.2f" else f"${fromSliderValue(i)}%2.3f" } def fromSliderValue(v: Int): Float = { v * step + min } def updateUi(): Unit = { deafToOwnEvents() // either show sliders, or information mismatchMessage.visible = targets.isEmpty minimumSlider.visible = targets.nonEmpty maximumSlider.visible = targets.nonEmpty targets.headOption.foreach { t => val range = t.scalarRange.value min = range.domainMinimum max = range.domainMaximum step = (max - min) / 100.0f // this is an ugly workaround to make sure (min, max) values are properly displayed def reinitSlider(s: FancySlider): Unit = { s.min = 1 s.min = 0 s.max = 99 s.max = 100 } reinitSlider(minimumSlider) reinitSlider(maximumSlider) minimumSlider.value = toSliderValue(range.mappedMinimum) maximumSlider.value = toSliderValue(range.mappedMaximum) colorizeSliderValue(minimumSlider, range.colorMapping.lowerColor) colorizeSliderValue(maximumSlider, range.colorMapping.upperColor) } listenToOwnEvents() } override def setNodes(nodes: List[SceneNode]): Boolean = { cleanup() val supported = allMatch[HasScalarRange](nodes) if (supported.nonEmpty) { // check if all nodes have the same scalar range domain val ranges = supported.map(_.scalarRange.value) val minima = ranges.map(_.domainMinimum).distinct.length val maxima = ranges.map(_.domainMaximum).distinct.length if (minima == 1 && maxima == 1) { targets = supported } else { // can't show a single set of sliders for multiple domains. // However, we'll still show the UI, with a message instead. targets = Nil } targets.foreach(t => listenTo(t.scalarRange)) updateUi() true } else false } def cleanup(): Unit = { targets.foreach(t => deafTo(t.scalarRange)) targets = Nil } reactions += { case NodeProperty.event.PropertyChanged(_) => updateUi() case ValueChanged(slider) => deafToOwnEvents() if (maximumSlider.value < minimumSlider.value) { if (slider eq minimumSlider) maximumSlider.value = minimumSlider.value else if (slider eq maximumSlider) minimumSlider.value = maximumSlider.value } propagateSliderChanges() listenToOwnEvents() //target.foreach(_.opacity.value = minimumSlider.value.toFloat / 100.0f) } def propagateSliderChanges(): Unit = { val (fMin, fMax) = (fromSliderValue(minimumSlider.value), fromSliderValue(maximumSlider.value)) targets.foreach(t => t.scalarRange.value = t.scalarRange.value.copy(mappedMinimum = fMin, mappedMaximum = fMax)) } }
unibas-gravis/scalismo-ui
src/main/scala/scalismo/ui/view/properties/ScalarRangePropertyPanel.scala
Scala
gpl-3.0
6,443
/** * Copyright 2015, deepsense.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.deepsense.deeplang.params import spray.json._ import io.deepsense.deeplang.params.validators.RangeValidator class NumericParamSpec extends AbstractParamSpec[Double, NumericParam] { override def className: String = "NumericParam" override def paramFixture: (NumericParam, JsValue) = { val description = "Numeric parameter description" val param = NumericParam( name = "Numeric parameter", description = Some(description), validator = RangeValidator(1.0, 3.0, true, false)) val json = JsObject( "type" -> JsString("numeric"), "name" -> JsString(param.name), "description" -> JsString( description + param.constraints), "default" -> JsNull, "isGriddable" -> JsTrue, "validator" -> JsObject( "type" -> JsString("range"), "configuration" -> JsObject( "begin" -> JsNumber(1.0), "end" -> JsNumber(3.0), "beginIncluded" -> JsBoolean(true), "endIncluded" -> JsBoolean(false) ) ) ) (param, json) } override def valueFixture: (Double, JsValue) = { val value = 2.5 (value, JsNumber(value)) } }
deepsense-io/seahorse-workflow-executor
deeplang/src/test/scala/io/deepsense/deeplang/params/NumericParamSpec.scala
Scala
apache-2.0
1,768
package s_mach.aeondb.impl import scala.concurrent.Future import s_mach.concurrent._ import s_mach.datadiff._ import s_mach.aeondb._ import s_mach.aeondb.internal._ case class NowMomentImpl[A,B,PB]( aeonMap: AeonMapImpl[A,B,PB], oldMoment: OldMomentImpl[A,B,PB] ) extends NowMoment[A,B,PB] with LiftedLocalMoment[A,B,LocalMoment[A,B]] { import aeonMap._ def aeon = oldMoment.aeon def local = oldMoment.local override def filterKeys(f: (A) => Boolean): NowMomentImpl[A,B,PB] = copy(oldMoment = oldMoment.filterKeys(f)) override def put( key: A, value: B )(implicit metadata:Metadata) : Future[Boolean] = { putFold(key)( f = { _ => (value,true).future }, g = { _ => false } ) } override def putFold[X](key: A)( f: Moment[A,B] => Future[(B,X)], g: Exception => X )(implicit metadata:Metadata) : Future[X] = { aeonMap._commitFold({ nowMoment => if( nowMoment.local.active.contains(key) == false && nowMoment.local.inactive.contains(key) == false ) { for { (value,x) <- f(nowMoment) } yield { val (checkout,commit) = CommitBuilder[A,B,PB]() .put(key,value) .result() (checkout,(commit,metadata) :: Nil,x) } } else { (Checkout.empty[A,Long],Nil, g(KeyAlreadyExists(key)) ).future } },g) } override def replace( key: A, value: B )(implicit metadata:Metadata) : Future[Boolean] = { replaceFold(key)( f = { _ => (value,true).future }, g = { _ => false } ) } override def replaceFold[X](key: A)( f: Moment[A,B] => Future[(B,X)], g: Exception => X )(implicit metadata:Metadata) : Future[X] = { aeonMap._commitFold({ nowMoment => if( nowMoment.local.active.contains(key) || nowMoment.local.inactive.contains(key) ) { val record = nowMoment.local.active(key) val oldValue = record.value for { (newValue, x) <- f(nowMoment) } yield { val patch = oldValue calcDiff newValue val (checkout,commit) = CommitBuilder[A,B,PB]() .replace(key,patch,record.version) .result() (checkout, (commit,metadata) :: Nil, x) } } else { (Checkout.empty[A,Long],Nil, g(KeyNotFoundError(Iterable(key))) ).future } },g) } override def deactivate( key: A )(implicit metadata: Metadata) : Future[Boolean] = { aeonMap._commitFold({ nowMoment => nowMoment.local.active.get(key) match { case Some(record) => val (checkout,commit) = CommitBuilder[A,B,PB]() .deactivate(key,record.version) .result() (checkout, (commit,metadata) :: Nil, true).future case None => (Checkout.empty[A,Long],Nil,false).future } },{ _ => false }) } override def reactivate( key: A, value: B )(implicit metadata: Metadata) : Future[Boolean] = { aeonMap._commitFold({ nowMoment => nowMoment.local.inactive.get(key) match { case Some(record) => val (checkout,commit) = CommitBuilder[A,B,PB]() .reactivate(key,value,record.version) .result() (checkout, (commit,metadata) :: Nil, true).future case None => (Checkout.empty[A,Long],Nil, false).future } }, { _ => false }) } override def commit( checkout: Checkout[A], oomCommit: List[(Commit[A,B,PB],Metadata)] ) : Future[Boolean] = { commitFold( f = { _ => (checkout,oomCommit,true).future }, g = { _ => false } ) } override def commitFold[X]( f: Moment[A,B] => Future[(Checkout[A],List[(Commit[A,B,PB],Metadata)],X)], g: Exception => X ) : Future[X] = { aeonMap._commitFold(f,g) } override def merge( other: AeonMap[A,B,PB] )(implicit metadata: Metadata) : Future[Boolean] = { mergeFold( f = { _ => (other,true).future }, g = { _ => false } ) } override def mergeFold[X]( f: Moment[A,B] => Future[(AeonMap[A,B,PB],X)], g: Exception => X )(implicit metadata: Metadata) : Future[X] = { aeonMap._mergeFold(f,g) } override def checkout(): Future[AeonMapImpl[A,B,PB]] = oldMoment.checkout() }
S-Mach/aeondb
src/main/scala/s_mach/aeondb/impl/NowMomentImpl.scala
Scala
apache-2.0
4,334
/* * Copyright 2014 Treode, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.treode.store import com.treode.async.implicits._ import com.treode.async.stubs.StubScheduler import org.scalatest.FreeSpec import Fruits.{Apple, Banana} import StoreTestTools._ import Window.{Between, Latest, Through} class WindowSpec extends FreeSpec { def concat [A, B] (x: (Seq [A], Seq [B]), y: (Seq [A], Seq [B])): (Seq [A], Seq [B]) = (x._1 ++ y._1, x._2 ++ y._2) def testOverlap (window: Window, latest: Long, earliest: Long, expected: Boolean) { s"$window should ${if (expected) "overlap" else "not overlap"} [$earliest, $latest]" in { assertResult (expected) (window.overlaps (latest, earliest)) }} "Window.Latest should" - { val filter = Latest (3, true, 2, true) def testFilter (items: (Seq [Cell], Seq [Cell])*) { val in = items .map (_._1) .flatten val out = items .map (_._2) .flatten s"filter ${testStringOf (in)}" in { implicit val scheduler = StubScheduler.random() assertCells (out: _*) (in.batch.window (filter)) }} val apples = Seq ( ( Seq (Apple##4::4, Apple##3::3, Apple##2::2, Apple##1::1), Seq (Apple##3::3)), ( Seq (Apple##4::4, Apple##3::3, Apple##2::2), Seq (Apple##3::3)), ( Seq (Apple##4::4, Apple##3::3, Apple##1::1), Seq (Apple##3::3)), ( Seq (Apple##4::4, Apple##3::3), Seq (Apple##3::3)), ( Seq (Apple##4::4, Apple##2::2, Apple##1::1), Seq (Apple##2::2)), ( Seq (Apple##4::4, Apple##2::2), Seq (Apple##2::2)), ( Seq (Apple##4::4, Apple##1::1), Seq ()), ( Seq (Apple##4::4), Seq ()), ( Seq (Apple##3::3, Apple##2::2, Apple##1::1), Seq (Apple##3::3)), ( Seq (Apple##3::3, Apple##2::2), Seq (Apple##3::3)), ( Seq (Apple##3::3, Apple##1::1), Seq (Apple##3::3)), ( Seq (Apple##3::3), Seq (Apple##3::3)), ( Seq (Apple##2::2, Apple##1::1), Seq (Apple##2::2)), ( Seq (Apple##2::2), Seq (Apple##2::2)), ( Seq (Apple##1::1), Seq ())) val bananas = Seq ( ( Seq (Banana##4::4, Banana##3::3, Banana##2::2, Banana##1::1), Seq (Banana##3::3)), ( Seq (Banana##4::4, Banana##3::3, Banana##2::2), Seq (Banana##3::3)), ( Seq (Banana##4::4, Banana##3::3, Banana##1::1), Seq (Banana##3::3)), ( Seq (Banana##4::4, Banana##3::3), Seq (Banana##3::3)), ( Seq (Banana##4::4, Banana##2::2, Banana##1::1), Seq (Banana##2::2)), ( Seq (Banana##4::4, Banana##2::2), Seq (Banana##2::2)), ( Seq (Banana##4::4, Banana##1::1), Seq ()), ( Seq (Banana##4::4), Seq ()), ( Seq (Banana##3::3, Banana##2::2, Banana##1::1), Seq (Banana##3::3)), ( Seq (Banana##3::3, Banana##2::2), Seq (Banana##3::3)), ( Seq (Banana##3::3, Banana##1::1), Seq (Banana##3::3)), ( Seq (Banana##3::3), Seq (Banana##3::3)), ( Seq (Banana##2::2, Banana##1::1), Seq (Banana##2::2)), ( Seq (Banana##2::2), Seq (Banana##2::2)), ( Seq (Banana##1::1), Seq ())) testFilter ((Seq.empty, Seq.empty)) for (a <- apples) testFilter (a) for (a <- apples; b <- bananas) testFilter (concat (a, b)) testOverlap (Latest (3, true, 2, true ), 2, 1, true ) testOverlap (Latest (3, true, 2, false), 2, 1, false) testOverlap (Latest (3, true, 2, true ), 3, 2, true ) testOverlap (Latest (3, true, 2, false), 3, 2, true ) testOverlap (Latest (3, false, 2, true ), 3, 2, true ) testOverlap (Latest (3, false, 2, false), 3, 2, true ) testOverlap (Latest (3, true, 2, true ), 4, 3, true ) testOverlap (Latest (3, false, 2, true ), 4, 3, false) } "Window.Between should" - { val filter = Between (3, true, 2, true) def test (items: (Seq [Cell], Seq [Cell])*) { val in = items .map (_._1) .flatten val out = items .map (_._2) .flatten s"handle ${testStringOf (in)}" in { implicit val scheduler = StubScheduler.random() assertCells (out: _*) (in.batch.window (filter)) }} val apple1 = ( Seq (Apple##1::1), Seq ()) val apple2 = ( Seq (Apple##2::2), Seq (Apple##2::2)) val apple3 = ( Seq (Apple##2::2, Apple##1::1), Seq (Apple##2::2)) val apple4 = ( Seq (Apple##3::3, Apple##2::2, Apple##1::1), Seq (Apple##3::3, Apple##2::2)) val apple5 = ( Seq (Apple##4::4, Apple##3::3, Apple##2::2, Apple##1::1), Seq (Apple##3::3, Apple##2::2)) val apple6 = ( Seq (Apple##4::4, Apple##3::3, Apple##2::2), Seq (Apple##3::3, Apple##2::2)) val apple7 = ( Seq (Apple##4::4, Apple##3::3), Seq (Apple##3::3)) val apples = Seq (apple1, apple2, apple3, apple4, apple5, apple6, apple7) test ((Seq.empty, Seq.empty)) for (a <- apples) test (a) testOverlap (Between (3, true, 2, true ), 2, 1, true ) testOverlap (Between (3, true, 2, false), 2, 1, false) testOverlap (Between (3, true, 2, true ), 3, 2, true ) testOverlap (Between (3, true, 2, false), 3, 2, true ) testOverlap (Between (3, false, 2, true ), 3, 2, true ) testOverlap (Between (3, false, 2, false), 3, 2, true ) testOverlap (Between (3, true, 2, true ), 4, 3, true ) testOverlap (Between (3, false, 2, true ), 4, 3, false) } "Window.Through should" - { val filter = Through (3, true, 2) def test (items: (Seq [Cell], Seq [Cell])*) { val in = items .map (_._1) .flatten val out = items .map (_._2) .flatten s"handle ${testStringOf (in)}" in { implicit val scheduler = StubScheduler.random() assertCells (out: _*) (in.batch.window (filter)) }} val apple1 = ( Seq (Apple##1::1), Seq (Apple##1::1)) val apple2 = ( Seq (Apple##2::2), Seq (Apple##2::2)) val apple3 = ( Seq (Apple##2::2, Apple##1::1), Seq (Apple##2::2)) val apple4 = ( Seq (Apple##3::3, Apple##2::2, Apple##1::1), Seq (Apple##3::3, Apple##2::2)) val apple5 = ( Seq (Apple##4::4, Apple##3::3, Apple##2::2, Apple##1::1), Seq (Apple##3::3, Apple##2::2)) val apple6 = ( Seq (Apple##4::4, Apple##3::3, Apple##2::2), Seq (Apple##3::3, Apple##2::2)) val apple7 = ( Seq (Apple##4::4, Apple##3::3), Seq (Apple##3::3)) val apples = Seq (apple1, apple2, apple3, apple4, apple5, apple6, apple7) val banana1 = ( Seq (Banana##1::1), Seq (Banana##1::1)) val banana2 = ( Seq (Banana##2::2), Seq (Banana##2::2)) val banana3 = ( Seq (Banana##2::2, Banana##1::1), Seq (Banana##2::2)) val banana4 = ( Seq (Banana##3::3, Banana##2::2, Banana##1::1), Seq (Banana##3::3, Banana##2::2)) val banana5 = ( Seq (Banana##4::4, Banana##3::3, Banana##2::2, Banana##1::1), Seq (Banana##3::3, Banana##2::2)) val banana6 = ( Seq (Banana##4::4, Banana##3::3, Banana##2::2), Seq (Banana##3::3, Banana##2::2)) val banana7 = ( Seq (Banana##4::4, Banana##3::3), Seq (Banana##3::3)) val bananas = Seq (banana1, banana2, banana3, banana4, banana5, banana6, banana7) test ((Seq.empty, Seq.empty)) for (a <- apples) test (a) for (a <- apples; b <- bananas) test (concat (a, b)) testOverlap (Through (3, true, 2), 2, 1, true ) testOverlap (Through (3, true, 2), 3, 2, true ) testOverlap (Through (3, false, 2), 3, 2, true ) testOverlap (Through (3, true, 2), 4, 3, true ) testOverlap (Through (3, false, 2), 4, 3, false) }}
Treode/store
store/test/com/treode/store/WindowSpec.scala
Scala
apache-2.0
8,491
package at.logic.gapt.proofs.reduction import at.logic.gapt.expr._ import at.logic.gapt.expr.hol._ import at.logic.gapt.proofs._ import at.logic.gapt.proofs.expansion._ import at.logic.gapt.proofs.lk.LKProof import at.logic.gapt.proofs.resolution._ import at.logic.gapt.utils.NameGenerator import scala.collection.mutable /** * Represents a reduction of a problem together with a back-translation of the solutions. * * A problem P1 is reduced to a problem P2, a solution S2 to the problem P2 * can then be translated back to a solution S1 of the problem P1. */ trait Reduction[-P1, +P2, +S1, -S2] { def forward( problem: P1 ): ( P2, S2 => S1 ) /** Sequentially composes reductions. */ def |>[P2_ >: P2, P3, S2_ <: S2, S3]( other: Reduction[P2_, P3, S2_, S3] ): Reduction[P1, P3, S1, S3] = CombinedReduction( this, other ) } /** A reduction that does not change the type of the problem. */ trait Reduction_[P, S] extends Reduction[P, P, S, S] /** A reduction without back-translation. */ trait OneWayReduction_[P] extends Reduction[P, P, Nothing, Any] /** * Sequential composition of reductions. * * This class is not intended to be used directly, but via the [[Reduction#|>]] operator. */ case class CombinedReduction[-P1, P2, +P3, +S1, S2, -S3]( red1: Reduction[P1, P2, S1, S2], red2: Reduction[P2, P3, S2, S3] ) extends Reduction[P1, P3, S1, S3] { override def toString = s"$red1 |> $red2" override def forward( problem: P1 ): ( P3, S3 => S1 ) = { val ( prob2, back1 ) = red1.forward( problem ) val ( prob3, back2 ) = red2.forward( prob2 ) ( prob3, sol3 => back1( back2( sol3 ) ) ) } } private class ErasureReductionHelper( constants: Set[Const] ) { val termErasure = constants map { case c @ Const( name, FunctionType( _, argTypes ) ) => c -> FOLFunctionConst( s"f_$name", argTypes.size ) } toMap val termReification = termErasure map { _.swap } val predicateErasure = constants collect { case c @ HOLAtomConst( name, argTypes ) => c -> FOLAtomConst( s"P_$name", argTypes.size ) } toMap val predicateReification = predicateErasure map { _.swap } private def renameFreeVars( vs: Set[Var] ) = vs.toSeq.zipWithIndex.map { case ( v, i ) => v -> FOLVar( s"${v.name}_$i" ) }.toMap def forward( sequent: HOLSequent ): FOLSequent = sequent map { f => forward( f, renameFreeVars( freeVariables( f ) ) ) } def forward( clause: HOLClause )( implicit dummyImplicit: DummyImplicit ): HOLClause = forward( clause, renameFreeVars( freeVariables( clause ) ) ) def forward( clause: HOLClause, freeVars: Map[Var, FOLVar] ): FOLClause = clause map { forward( _, freeVars ).asInstanceOf[FOLAtom] } def forward( formula: Formula, freeVars: Map[Var, FOLVar] ): FOLFormula = formula match { case f @ Top() => f case f @ Bottom() => f case Neg( f ) => Neg( forward( f, freeVars ) ) case And( f, g ) => And( forward( f, freeVars ), forward( g, freeVars ) ) case Or( f, g ) => Or( forward( f, freeVars ), forward( g, freeVars ) ) case Imp( f, g ) => Imp( forward( f, freeVars ), forward( g, freeVars ) ) case All( x, f ) => val y = rename( FOLVar( x.name ), freeVars.values ) All( y, forward( f, freeVars + ( x -> y ) ) ) case Ex( x, f ) => val y = rename( FOLVar( x.name ), freeVars.values ) Ex( y, forward( f, freeVars + ( x -> y ) ) ) case Eq( t, s ) => Eq( forward( t, freeVars ), forward( s, freeVars ) ) case Apps( c: HOLAtomConst, args ) => predicateErasure( c )( args map { forward( _, freeVars ) }: _* ) } def forward( term: Expr, freeVars: Map[Var, FOLVar] ): FOLTerm = term match { case Apps( c: Const, args ) => termErasure( c )( args map { forward( _, freeVars ) }: _* ) case v: Var => freeVars( v ) } def infer( formula: FOLFormula, known: Map[FOLVar, Var] ): Map[FOLVar, Var] = infer( formula, To, known ) def infer( expr: FOLExpression, ty: Ty, known: Map[FOLVar, Var] ): Map[FOLVar, Var] = { val res = mutable.Map[FOLVar, Var]() res ++= known def i( f: FOLExpression, expected: Ty ): Ty = f match { case Eq( a @ FOLFunction( _, _ ), b ) => i( b, i( a, null ) ) case Eq( a, b @ FOLFunction( _, _ ) ) => i( a, i( b, null ) ) case Eq( a: FOLVar, b ) if known isDefinedAt a => i( b, known( a ).ty ) case Eq( a, b: FOLVar ) if known isDefinedAt b => i( a, known( b ).ty ) case Eq( a: FOLVar, b: FOLVar ) => i( b, i( a, Ti ) ) // hope for the best... case Apps( c: FOLAtomConst, args ) => predicateReification( c ) match { case Const( _, FunctionType( _, argTypes ) ) => for ( ( a: FOLTerm, t ) <- args zip argTypes ) i( a, t ) } expected case v @ FOLVar( name ) => res.get( v ) match { case Some( Var( _, `expected` ) ) => case Some( Var( _, other ) ) => throw new Exception( s"Reification failure: $v should have type $expected but already has type $other instead" ) case None => res( v ) = Var( name, expected ) } expected case Apps( c: FOLFunctionConst, args ) => termReification( c ) match { case Const( _, FunctionType( retType, argTypes ) ) => for ( ( a: FOLTerm, t ) <- args zip argTypes ) i( a, t ) retType } } i( expr, ty ) res.toMap } def infer( clause: FOLClause, known: Map[FOLVar, Var] ): Map[FOLVar, Var] = clause.elements.foldRight( known )( infer ) def back( proof: ResolutionProof, originalInputs: Set[HOLClause] ): ResolutionProof = { import at.logic.gapt.proofs.resolution._ val memo = mutable.Map[( ResolutionProof, Map[FOLVar, Var] ), ResolutionProof]() def f( p: ResolutionProof, vars: Map[FOLVar, Var] ): ResolutionProof = { g( p, freeVariables( p.conclusion ) map { case v: FOLVar => v -> vars( v ) } toMap ) } def g( p: ResolutionProof, vars: Map[FOLVar, Var] ): ResolutionProof = memo.getOrElseUpdate( ( p, vars ), p match { case Refl( term: FOLTerm ) => Refl( back( term, vars ) ) case Taut( atom: FOLAtom ) => Taut( back( atom, vars ) ) case Input( clause ) => ( for ( original <- originalInputs; subst <- syntacticMatching( original.toDisjunction, back( clause.toDisjunction.asInstanceOf[FOLFormula], vars ) ) ) yield Subst.ifNecessary( Input( original ), subst ) ).head case Subst( subProof, subst ) => val subProofVars = freeVariables( subProof.conclusion ).map { case v @ FOLVar( name ) => v -> Var( name, subst( v ) match { case Apps( head: FOLFunctionConst, _ ) => termReification( head ) match { case Const( _, FunctionType( retType, _ ) ) => retType } case u: FOLVar => vars( u ).ty } ) }.toMap val subProof_ = f( subProof, subProofVars ) val newSubst = Substitution( freeVariables( subProof.conclusion ) map { case v @ FOLVar( name ) => subProofVars( v ) -> back( subst( v ).asInstanceOf[FOLTerm], vars ) } ) Subst( subProof_, newSubst ) case Factor( subProof, idx1, idx2 ) => Factor( f( subProof, vars ), idx1, idx2 ) case Resolution( subProof1, idx1, subProof2, idx2 ) => val subProofVars = infer( subProof1.conclusion( idx1 ).asInstanceOf[FOLAtom], vars ) val q1 = f( subProof1, subProofVars ) val q2 = f( subProof2, subProofVars ) Resolution( q1, idx1, q2, idx2 ) case Paramod( subProof1, eq, ltr, subProof2, lit, Abs( v: FOLVar, con: FOLAtom ) ) => val subProofVars = infer( subProof1.conclusion( eq ).asInstanceOf[FOLAtom], vars ) val q1 = f( subProof1, subProofVars ) val q2 = f( subProof2, subProofVars ) val conVars = infer( con, vars ) val newCon = Abs( conVars( v ), back( con, conVars ) ) Paramod( q1, eq, ltr, q2, lit, newCon ) case Flip( subProof1, idx1 ) => Flip( f( subProof1, vars ), idx1 ) // FIXME: propositional } ) f( proof, Map() ) } def back( et: ExpansionTree, shallow: Formula, freeVars: Map[FOLVar, Var] ): ExpansionTree = ( et, shallow ) match { case ( ETAtom( atom: FOLAtom, pol ), _ ) => ETAtom( back( atom, freeVars ), pol ) case ( ETWeakening( _, pol ), _ ) => ETWeakening( shallow, pol ) case ( ETMerge( a, b ), _ ) => ETMerge( back( a, shallow, freeVars ), back( b, shallow, freeVars ) ) case ( _: ETBottom | _: ETTop, _ ) => et case ( ETNeg( a ), Neg( sha ) ) => ETNeg( back( a, sha, freeVars ) ) case ( ETAnd( a, b ), And( sha, shb ) ) => ETAnd( back( a, sha, freeVars ), back( b, shb, freeVars ) ) case ( ETOr( a, b ), Or( sha, shb ) ) => ETOr( back( a, sha, freeVars ), back( b, shb, freeVars ) ) case ( ETImp( a, b ), Imp( sha, shb ) ) => ETImp( back( a, sha, freeVars ), back( b, shb, freeVars ) ) case ( ETWeakQuantifier( _, insts ), Quant( x, sh, isForall ) ) => ETWeakQuantifier( shallow, for ( ( t: FOLTerm, inst ) <- insts ) yield { val childFreeVars = infer( t, x.ty, freeVars ) val t_ = back( t, childFreeVars ) t_ -> back( inst, Substitution( x -> t_ )( sh ), childFreeVars ) } ) } def back( expansionProof: ExpansionProof, endSequent: HOLSequent ): ExpansionProof = { require( expansionProof.shallow isSubsetOf endSequent.map( forward( _, Map[Var, FOLVar]() ) ) ) ExpansionProof( for { et <- expansionProof.expansionSequent originalSh <- endSequent.elements if forward( originalSh, Map[Var, FOLVar]() ) == et.shallow } yield back( et, originalSh, Map() ) ) } def back( t: FOLTerm, freeVars: Map[FOLVar, Var] ): Expr = t match { case v: FOLVar => freeVars( v ) case Apps( c: FOLFunctionConst, args ) => termReification( c )( args map { _.asInstanceOf[FOLTerm] } map { back( _, freeVars ) }: _* ) } def back( formula: FOLFormula, freeVars: Map[FOLVar, Var] ): Formula = formula match { case f @ Top() => f case f @ Bottom() => f case Neg( f ) => Neg( back( f, freeVars ) ) case And( a, b ) => And( back( a, freeVars ), back( b, freeVars ) ) case Or( a, b ) => Or( back( a, freeVars ), back( b, freeVars ) ) case Eq( a, b ) => Eq( back( a, freeVars ), back( b, freeVars ) ) case Apps( c: FOLAtomConst, args ) => predicateReification( c )( args map { _.asInstanceOf[FOLTerm] } map { back( _, freeVars ) }: _* ) } def back( atom: FOLAtom, freeVars: Map[FOLVar, Var] ): Atom = back( atom: FOLFormula, freeVars ).asInstanceOf[Atom] } /** * Reduces finding a resolution proof of a many-sorted clause set to the first-order case. * * Sorts are simply ignored and we make a best effort to convert the resolution refutation back. */ case object ErasureReductionCNF extends Reduction_[Set[HOLClause], ResolutionProof] { override def forward( problem: Set[HOLClause] ): ( Set[HOLClause], ( ResolutionProof ) => ResolutionProof ) = { val helper = new ErasureReductionHelper( problem flatMap { constants( _ ) } ) ( problem map helper.forward, helper.back( _, problem ) ) } } /** * Reduces finding an expansion proof of a many-sorted sequent to the first-order case. * * Sorts are simply ignored and we make a best effort to convert the expansion tree. */ case object ErasureReductionET extends Reduction_[HOLSequent, ExpansionProof] { override def forward( problem: HOLSequent ): ( HOLSequent, ( ExpansionProof ) => ExpansionProof ) = { val helper = new ErasureReductionHelper( constants( problem ) ) ( helper.forward( problem ), helper.back( _, problem ) ) } } private class PredicateReductionHelper( constants: Set[Const] ) { private val nameGen = rename awayFrom constants val baseTypes = constants flatMap { case Const( _, FunctionType( ret, args ) ) => ret +: args } val predicateForType = baseTypes.map { ty => ty -> HOLAtomConst( nameGen fresh s"is_$ty", ty ) }.toMap val predicates = predicateForType.values.toSet val predicateAxioms = constants.map { case c @ Const( _, FunctionType( retType, argTypes ) ) => val args = argTypes.zipWithIndex map { case ( t, i ) => Var( s"x$i", t ) } And( args map { a => predicateForType( a.ty )( a ) } ) --> predicateForType( retType )( c( args: _* ) ) } val nonEmptyWitnesses = baseTypes.map { ty => Const( nameGen fresh s"nonempty_$ty", ty ) } val nonEmptyAxioms = nonEmptyWitnesses.map { w => predicateForType( w.ty )( w ) } val extraAxioms = existentialClosure( predicateAxioms ++: nonEmptyAxioms ++: Sequent() ) val extraAxiomClauses = CNFn( extraAxioms.toDisjunction ) private def guard( formula: Formula ): Formula = formula match { case Top() | Bottom() | Atom( _, _ ) => formula case Neg( f ) => Neg( guard( f ) ) case And( f, g ) => And( guard( f ), guard( g ) ) case Or( f, g ) => Or( guard( f ), guard( g ) ) case Imp( f, g ) => Imp( guard( f ), guard( g ) ) case All( x @ Var( _, t ), f ) => All( x, predicateForType( t )( x ) --> guard( f ) ) case Ex( x @ Var( _, t ), f ) => Ex( x, predicateForType( t )( x ) & guard( f ) ) } private def guardAndAddAxioms( sequent: HOLSequent ): HOLSequent = extraAxioms ++ sequent.map( guard ) def forward( sequent: HOLSequent ): HOLSequent = guardAndAddAxioms( sequent ) def forward( cnf: Set[HOLClause] ): Set[HOLClause] = extraAxiomClauses union cnf.map( forward ) def forward( clause: HOLClause )( implicit dummyImplicit: DummyImplicit ): HOLClause = CNFp( guard( universalClosure( clause.toImplication ) ) ).head def back( proof: ResolutionProof ): ResolutionProof = mapInputClauses( proof ) { cls => val clauseWithoutPredicates = cls filterNot { case Apps( c: HOLAtomConst, _ ) => predicates contains c } if ( clauseWithoutPredicates.nonEmpty ) Input( clauseWithoutPredicates ) else Input( cls ) } def unguard( formula: Formula ): Formula = formula match { case Top() | Bottom() | Atom( _, _ ) => formula case Neg( f ) => Neg( unguard( f ) ) case And( f, g ) => And( unguard( f ), unguard( g ) ) case Or( f, g ) => Or( unguard( f ), unguard( g ) ) case Imp( f, g ) => Imp( unguard( f ), unguard( g ) ) case All( x, Imp( grd, f ) ) => All( x, unguard( f ) ) case Ex( x, And( grd, f ) ) => Ex( x, unguard( f ) ) } def unguard( et: ExpansionTree ): ExpansionTree = et match { case ETMerge( a, b ) => ETMerge( unguard( a ), unguard( b ) ) case ETWeakening( f, pol ) => ETWeakening( unguard( f ), pol ) case _: ETAtom => et case _: ETTop | _: ETBottom => et case ETNeg( a ) => ETNeg( unguard( a ) ) case ETAnd( a, b ) => ETAnd( unguard( a ), unguard( b ) ) case ETOr( a, b ) => ETOr( unguard( a ), unguard( b ) ) case ETImp( a, b ) => ETImp( unguard( a ), unguard( b ) ) case ETWeakQuantifier( shallow, insts ) => ETWeakQuantifier( unguard( shallow ), insts map { case ( t, ETImp( _, inst ) ) if et.polarity.inAnt => t -> unguard( inst ) case ( t, ETAnd( _, inst ) ) if et.polarity.inSuc => t -> unguard( inst ) } ) } def back( expansionProof: ExpansionProof, endSequent: HOLSequent ): ExpansionProof = ExpansionProof( expansionProof.expansionSequent.zipWithIndex collect { case ( et, i ) if !extraAxioms.contains( et.shallow, i.polarity ) => unguard( et ) } ) } /** * Simplifies the problem of finding a resolution refutation of a many-sorted clause set by adding * predicates for each of the sorts. The resulting problem is still many-sorted. */ case object PredicateReductionCNF extends Reduction_[Set[HOLClause], ResolutionProof] { override def forward( problem: Set[HOLClause] ): ( Set[HOLClause], ( ResolutionProof ) => ResolutionProof ) = { val helper = new PredicateReductionHelper( problem flatMap { constants( _ ) } ) ( helper forward problem, helper.back ) } } /** * Simplifies the problem of finding an expansion proof of a many-sorted sequent by adding * predicates for each of the sorts. The resulting problem is still many-sorted. */ case object PredicateReductionET extends Reduction_[HOLSequent, ExpansionProof] { override def forward( problem: HOLSequent ): ( HOLSequent, ( ExpansionProof ) => ExpansionProof ) = { val helper = new PredicateReductionHelper( constants( problem ) ) ( helper.forward( problem ), helper.back( _, problem ) ) } } private object removeReflsAndTauts { def apply( proof: ResolutionProof ): ResolutionProof = new ResolutionProofVisitor { override def apply( p: ResolutionProof ): ResolutionProof = { for { Eq( t, t_ ) <- p.conclusion.succedent if t == t_ } return Refl( t ) if ( p.conclusion.isTaut ) return Taut( p.conclusion.antecedent intersect p.conclusion.succedent head ) super.apply( p ) } }.apply( proof ) } private object definitionIntroducingBackReplacement { def apply( proof: ResolutionProof, defs: Map[Const, Expr] ): ResolutionProof = { val nonBoolReplaced = TermReplacement( proof, defs.filterNot { _._1.isInstanceOf[HOLAtomConst] }.toMap ) new ResolutionProofVisitor { override def apply( p: ResolutionProof ): ResolutionProof = p.conclusion match { case Sequent( Seq(), Seq( Eq( t, t_ ) ) ) if t == t_ => Refl( t ) case Sequent( Seq(), Seq( And( Imp( f @ Apps( c: HOLAtomConst, args ), g ), Imp( g_, f_ ) ) ) ) if f == f_ && g == g_ && defs.contains( c ) => var defn: ResolutionProof = Defn( c, defs( c ) ) for ( ev <- args ) defn = AllR( defn, Suc( 0 ), ev.asInstanceOf[Var] ) defn case _ => super.apply( p ) } }.apply( nonBoolReplaced ) } } private class LambdaEliminationReductionHelper( constants: Set[Const], lambdas: Set[Abs], addAxioms: Boolean ) { val nameGen = rename.awayFrom( constants ) private val replacements = mutable.Map[Abs, Expr]() private val extraAxioms = mutable.Buffer[Formula]() def equalOrEquivalent( a: Expr, b: Expr ) = if ( a.ty == To ) a <-> b else a === b private def setup( e: Expr ): Expr = e match { case App( a, b ) => App( setup( a ), setup( b ) ) case v: Var => v case c: Const => c case lam: Abs if replacements contains lam => replacements( lam ) case lam @ Abs( x, t ) => val fvs = freeVariables( lam ).toSeq val lamSym = Const( nameGen freshWithIndex "lambda", FunctionType( lam.ty, fvs.map { _.ty } ) ) replacements( lam ) = lamSym( fvs: _* ) extraAxioms += universalClosure( equalOrEquivalent( replacements( lam )( x ), t ) ) replacements( lam ) } private def setup( f: Formula ): Formula = f match { case All( x, g ) => All( x, setup( g ) ) case Ex( x, g ) => Ex( x, setup( g ) ) case Top() | Bottom() => f case Neg( g ) => Neg( setup( g ) ) case And( g, h ) => And( setup( g ), setup( h ) ) case Or( g, h ) => Or( setup( g ), setup( h ) ) case Imp( g, h ) => Imp( setup( g ), setup( h ) ) case Apps( hd, args ) => hd( args map setup: _* ).asInstanceOf[Formula] } lambdas foreach setup if ( !addAxioms ) extraAxioms.clear() val extraAxiomClauses = extraAxioms.flatMap { case All.Block( vs, f ) => Seq( Seq() :- Seq( f ) ) } def delambdaify( e: Expr ): Expr = e match { case App( a, b ) => App( delambdaify( a ), delambdaify( b ) ) case lam: Abs => replacements( lam ) case _: Var | _: Const => e } def delambdaify( f: Formula ): Formula = f match { case All( x, g ) => All( x, delambdaify( g ) ) case Ex( x, g ) => Ex( x, delambdaify( g ) ) case Top() | Bottom() => f case Neg( g ) => Neg( delambdaify( g ) ) case And( g, h ) => And( delambdaify( g ), delambdaify( h ) ) case Or( g, h ) => Or( delambdaify( g ), delambdaify( h ) ) case Imp( g, h ) => Imp( delambdaify( g ), delambdaify( h ) ) case Apps( hd, args ) => hd( args map delambdaify: _* ).asInstanceOf[Formula] } def forward( sequent: HOLSequent ): HOLSequent = extraAxioms ++: sequent map delambdaify def forward( cnf: Set[HOLSequent] ): Set[HOLSequent] = cnf.map( _.map( delambdaify ).map( _.asInstanceOf[Atom] ) ) ++ extraAxiomClauses val backReplacements = replacements. map { case ( abs, Apps( c: Const, args ) ) => c -> Abs( args.map( _.asInstanceOf[Var] ), abs ) } def back( expansion: ExpansionProof ): ExpansionProof = ExpansionProof( TermReplacement( expansion.expansionSequent.filterNot { e => extraAxioms.contains( e.shallow ) }, { case expr => BetaReduction.betaNormalize( TermReplacement( expr, backReplacements.toMap ) ) } ) ) def back( resolution: ResolutionProof ): ResolutionProof = definitionIntroducingBackReplacement( resolution, backReplacements.toMap ) } /** * Replaces lambda abstractions by fresh function symbols, together with axioms that axiomatize them. */ case class LambdaEliminationReduction( extraAxioms: Boolean = true ) extends OneWayReduction_[HOLSequent] { override def forward( problem: HOLSequent ) = { val lambdas = atoms( problem ).flatMap { subTerms( _ ) }.collect { case a: Abs => a }.toSet val helper = new LambdaEliminationReductionHelper( constants( problem ), lambdas, extraAxioms ) ( helper.forward( problem ), _ => throw new UnsupportedOperationException ) } } /** * Replaces lambda abstractions by fresh function symbols, together with axioms that axiomatize them. */ case class LambdaEliminationReductionET( extraAxioms: Boolean = true ) extends Reduction_[HOLSequent, ExpansionProof] { override def forward( problem: HOLSequent ): ( HOLSequent, ( ExpansionProof ) => ExpansionProof ) = { val lambdas = atoms( problem ).flatMap { subTerms( _ ) }.collect { case a: Abs => a } val helper = new LambdaEliminationReductionHelper( constants( problem ), lambdas, extraAxioms ) ( helper.forward( problem ), helper.back( _ ) ) } } /** * Replaces lambda abstractions by fresh function symbols, together with axioms that axiomatize them. */ case class LambdaEliminationReductionRes( extraAxioms: Boolean = true ) extends Reduction_[HOLSequent, ResolutionProof] { override def forward( problem: HOLSequent ): ( HOLSequent, ( ResolutionProof ) => ResolutionProof ) = { val lambdas = atoms( problem ).flatMap { subTerms( _ ) }.collect { case a: Abs => a } val helper = new LambdaEliminationReductionHelper( constants( problem ), lambdas, extraAxioms ) ( helper.forward( problem ), helper.back( _ ) ) } } /** * Replaces lambda abstractions by fresh function symbols, together with axioms that axiomatize them. */ case class LambdaEliminationReductionCNFRes( extraAxioms: Boolean = true ) extends Reduction_[Set[HOLSequent], ResolutionProof] { override def forward( problem: Set[HOLSequent] ): ( Set[HOLSequent], ( ResolutionProof ) => ResolutionProof ) = { val lambdas = problem.flatMap( atoms( _ ) ).flatMap { subTerms( _ ) }.collect { case a: Abs => a } val helper = new LambdaEliminationReductionHelper( problem.flatMap( constants( _ ) ), lambdas, extraAxioms ) ( helper.forward( problem ), helper.back ) } } private class HOFunctionReductionHelper( names: Set[VarOrConst], addExtraAxioms: Boolean ) { private val nameGen = rename.awayFrom( names ) val baseTys = names map { _.ty } flatMap { baseTypes( _ ) } private val typeNameGen = new NameGenerator( baseTys.map { _.name } ) val partialAppTypes = names map { _.ty } flatMap { case FunctionType( _, argTypes ) => argTypes.filterNot { _.isInstanceOf[TBase] } } map { t => ( TBase( typeNameGen freshWithIndex "fun" ), t ) } toMap def equalOrEquivalent( a: Expr, b: Expr ) = if ( a.ty == To ) a <-> b else a === b val partiallyAppedTypes = partialAppTypes.map { _.swap } val applyFunctions = partialAppTypes.map { case ( partialAppType, ty @ FunctionType( ret, args ) ) => partialAppType -> Const( nameGen freshWithIndex "apply", partialAppType -> ty ) } val partialApplicationFuns = for { ( partialAppType, funType @ FunctionType( ret, argTypes ) ) <- partialAppTypes g @ Const( _, FunctionType( `ret`, gArgTypes ) ) <- names if gArgTypes endsWith argTypes } yield ( Const( nameGen freshWithIndex "partial", FunctionType( partialAppType, gArgTypes.dropRight( argTypes.size ) map reduceArgTy ) ), g, funType ) val newConstants = names.collect { case c @ Const( n, t ) => c -> Const( n, reduceFunTy( t ) ) }.toMap val extraAxioms = if ( !addExtraAxioms ) Set() else for { f @ Const( _, FunctionType( ret, ( partialAppType: TBase ) :: argTypes ) ) <- applyFunctions.values ( partialApplicationFun @ Const( _, FunctionType( `partialAppType`, pappArgTypes ) ), g, _ ) <- partialApplicationFuns } yield { val varGen = rename.awayFrom( Set[Var]() ) val gArgVars = pappArgTypes map { Var( varGen freshWithIndex "x", _ ) } val fArgVars = argTypes map { Var( varGen freshWithIndex "y", _ ) } universalClosure( equalOrEquivalent( applyFunctions( partialAppType )( partialApplicationFun( gArgVars: _* ) )( fArgVars: _* ), newConstants( g )( gArgVars: _* )( fArgVars: _* ) ) ) } val extraAxiomClauses = extraAxioms.flatMap { case All.Block( vs, f ) => Seq( Seq() :- Seq( f ) ) } def reduceFunTy( t: Ty ): Ty = { val FunctionType( ret, args ) = t FunctionType( ret, args map reduceArgTy ) } def reduceArgTy( t: Ty ): TBase = t match { case t: TBase => t case _ => partiallyAppedTypes( t ) } def reduce( f: Formula ): Formula = reduce( f: Expr ).asInstanceOf[Formula] def reduce( e: Expr ): Expr = e match { case All( Var( x, t ), f ) => All( Var( x, reduceArgTy( t ) ), reduce( f ) ) case Ex( Var( x, t ), f ) => Ex( Var( x, reduceArgTy( t ) ), reduce( f ) ) case Top() | Bottom() => e case Neg( f ) => Neg( reduce( f ) ) case And( g, h ) => And( reduce( g ), reduce( h ) ) case Or( g, h ) => Or( reduce( g ), reduce( h ) ) case Imp( g, h ) => Imp( reduce( g ), reduce( h ) ) case Var( n, t ) => Var( n, reduceArgTy( t ) ) case Apps( f: Const, args ) if partiallyAppedTypes.contains( e.ty ) => val Some( ( p, _, _ ) ) = partialApplicationFuns find { paf => paf._2 == f && paf._3 == e.ty } p( args map reduce: _* ) case Apps( f: Var, args ) => applyFunctions( reduceArgTy( f.ty ) )( reduce( f ) )( args map reduce: _* ) case Apps( f: Const, args ) => newConstants( f )( args map reduce: _* ) // case Abs( Var( x, t ), b ) => Abs( Var( x, reduceArgTy( t ) ), reduce( b ) ) } def forward( sequent: HOLSequent ): HOLSequent = extraAxioms ++: sequent.map( reduce ) def forward( cnf: Set[HOLSequent] ): Set[HOLSequent] = extraAxiomClauses.toSet ++ cnf.map( _.map( reduce ) ) def back( formula: Formula ): Formula = back( formula: Expr ).asInstanceOf[Formula] def back( expr: Expr ): Expr = expr match { case Top() | Bottom() => expr case Neg( f ) => Neg( back( f ) ) case And( f, g ) => And( back( f ), back( g ) ) case Or( f, g ) => Or( back( f ), back( g ) ) case Imp( f, g ) => Imp( back( f ), back( g ) ) case All( x, f ) => All( back( x ).asInstanceOf[Var], back( f ) ) case Ex( x, f ) => Ex( back( x ).asInstanceOf[Var], back( f ) ) case Eq( a, b ) => Eq( back( a ), back( b ) ) case Apps( f, args ) if partialApplicationFuns.exists { _._1 == f } => partialApplicationFuns.find { _._1 == f }.get._2( args.map( back ) ) case Apps( app, Seq( f, args @ _* ) ) if applyFunctions.exists { _._2 == app } => back( f )( args.map( back ) ) case Apps( f: Const, args ) => newConstants.map( _.swap ).getOrElse( f, f )( args map back ) case Var( n, t: TBase ) => Var( n, partiallyAppedTypes.map( _.swap ).getOrElse( t, t ) ) case Abs( v, f ) => Abs( back( v ).asInstanceOf[Var], back( f ) ) } def back( et: ExpansionTree ): ExpansionTree = et match { case ETMerge( a, b ) => ETMerge( back( a ), back( b ) ) case ETWeakening( f, pol ) => ETWeakening( back( f ), pol ) case ETAtom( atom, pol ) => ETAtom( back( atom ).asInstanceOf[Atom], pol ) case _: ETTop | _: ETBottom => et case ETNeg( a ) => ETNeg( back( a ) ) case ETAnd( a, b ) => ETAnd( back( a ), back( b ) ) case ETOr( a, b ) => ETOr( back( a ), back( b ) ) case ETImp( a, b ) => ETImp( back( a ), back( b ) ) case ETWeakQuantifier( shallow, insts ) => ETWeakQuantifier( back( shallow ), for ( ( t, c ) <- insts ) yield back( t ) -> back( c ) ) } def back( expansionProof: ExpansionProof ): ExpansionProof = ExpansionProof( expansionProof.expansionSequent.zipWithIndex collect { case ( et, i ) if !( i.isAnt && extraAxioms.toSeq.contains( et.shallow ) ) => back( et ) } ) def back( resolutionProof: ResolutionProof ): ResolutionProof = removeReflsAndTauts( TermReplacement( resolutionProof, { case expr => back( expr ) } ) ) } /** * Replaces the use of higher-order functions by fresh function symbols, together with axioms that axiomatize them. */ case class HOFunctionReduction( extraAxioms: Boolean = true ) extends OneWayReduction_[HOLSequent] { override def forward( problem: HOLSequent ) = { val helper = new HOFunctionReductionHelper( containedNames( problem ), extraAxioms ) ( helper.forward( problem ), _ => throw new UnsupportedOperationException ) } } /** * Replaces the use of higher-order functions by fresh function symbols, together with axioms that axiomatize them. */ case class HOFunctionReductionET( extraAxioms: Boolean = true ) extends Reduction_[HOLSequent, ExpansionProof] { override def forward( problem: HOLSequent ) = { val helper = new HOFunctionReductionHelper( containedNames( problem ), extraAxioms ) ( helper.forward( problem ), helper.back( _ ) ) } } /** * Replaces the use of higher-order functions by fresh function symbols, together with axioms that axiomatize them. */ case class HOFunctionReductionRes( extraAxioms: Boolean = true ) extends Reduction_[HOLSequent, ResolutionProof] { override def forward( problem: HOLSequent ) = { val helper = new HOFunctionReductionHelper( containedNames( problem ), extraAxioms ) ( helper.forward( problem ), helper.back( _ ) ) } } /** * Replaces the use of higher-order functions by fresh function symbols, together with axioms that axiomatize them. */ case class HOFunctionReductionCNFRes( extraAxioms: Boolean = true ) extends Reduction_[Set[HOLSequent], ResolutionProof] { override def forward( problem: Set[HOLSequent] ) = { val helper = new HOFunctionReductionHelper( containedNames( problem ), extraAxioms ) ( helper.forward( problem ), helper.back ) } } /** * Reduces finding an expansion proof for a sequent to finding a resolution proof of a clause set. */ case object CNFReductionExpRes extends Reduction[HOLSequent, Set[HOLClause], ExpansionProof, ResolutionProof] { override def forward( problem: HOLSequent ): ( Set[HOLClause], ( ResolutionProof ) => ExpansionProof ) = { val cnf = structuralCNF( problem, propositional = false ) ( cnf.map( _.conclusion.map( _.asInstanceOf[Atom] ) ), res => ResolutionToExpansionProof( mapInputClauses( res )( seq => cnf.find( _.conclusion == seq ).get ) ) ) } } /** * Reduces finding an LK proof for a sequent to finding a resolution proof of a clause set. */ case object CNFReductionLKRes extends Reduction[HOLSequent, Set[HOLClause], LKProof, ResolutionProof] { override def forward( problem: HOLSequent ): ( Set[HOLClause], ( ResolutionProof ) => LKProof ) = { val cnf = structuralCNF( problem, propositional = false ) ( cnf.map( _.conclusion.map( _.asInstanceOf[Atom] ) ), res => ResolutionToLKProof( mapInputClauses( res )( seq => cnf.find( _.conclusion == seq ).get ) ) ) } } /** * Reduces finding a resolution proof for a sequent to finding a resolution proof of a clause set. */ case object CNFReductionResRes extends Reduction[HOLSequent, Set[HOLClause], ResolutionProof, ResolutionProof] { override def forward( problem: HOLSequent ): ( Set[HOLClause], ( ResolutionProof ) => ResolutionProof ) = { val cnf = structuralCNF( problem, propositional = false, structural = false /* FIXME */ ) ( cnf.map( _.conclusion.map( _.asInstanceOf[Atom] ) ), fixDerivation( _, cnf ) ) } } /** * Reduces finding a resolution proof for a sequent set to finding a resolution proof of a clause set. */ case object CNFReductionSequentsResRes extends Reduction[Set[HOLSequent], Set[HOLClause], ResolutionProof, ResolutionProof] { override def forward( problem: Set[HOLSequent] ): ( Set[HOLClause], ( ResolutionProof ) => ResolutionProof ) = { val clausifier = new Clausifier( propositional = false, structural = false, bidirectionalDefs = false, nameGen = rename.awayFrom( containedNames( problem ) ) ) problem.map( Input ).foreach( clausifier.expand ) ( Set() ++ clausifier.cnf.view.map( _.conclusion.map( _.asInstanceOf[Atom] ) ), fixDerivation( _, clausifier.cnf ) ) } } /** * Simplifies the problem by grounding free variables. */ case object GroundingReductionET extends Reduction_[HOLSequent, ExpansionProof] { override def forward( problem: HOLSequent ): ( HOLSequent, ( ExpansionProof ) => ExpansionProof ) = { val nameGen = rename.awayFrom( constants( problem ) ) val subst = for ( v @ Var( name, ty ) <- freeVariables( problem ) ) yield v -> Const( nameGen fresh name, ty ) ( Substitution( subst )( problem ), exp => { require( exp.eigenVariables intersect subst.map( _._1 ) isEmpty ) TermReplacement( exp, subst.map( _.swap ).toMap ) } ) } }
gebner/gapt
core/src/main/scala/at/logic/gapt/proofs/reduction/manySorted.scala
Scala
gpl-3.0
34,639
/* * Copyright 2019 ACINQ SAS * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package fr.acinq.eclair.blockchain import fr.acinq.bitcoin.Crypto.PublicKey import fr.acinq.bitcoin.{Satoshi, Transaction} import fr.acinq.eclair.blockchain.fee.FeeratePerKw import scodec.bits.ByteVector import scala.concurrent.{ExecutionContext, Future} /** * Created by PM on 06/07/2017. */ /** This trait lets users fund lightning channels. */ trait OnChainChannelFunder { import OnChainWallet.MakeFundingTxResponse /** Create a channel funding transaction with the provided pubkeyScript. */ def makeFundingTx(pubkeyScript: ByteVector, amount: Satoshi, feeRatePerKw: FeeratePerKw)(implicit ec: ExecutionContext): Future[MakeFundingTxResponse] /** * Committing *must* include publishing the transaction on the network. * * We need to be very careful here, we don't want to consider a commit 'failed' if we are not absolutely sure that the * funding tx won't end up on the blockchain: if that happens and we have cancelled the channel, then we would lose our * funds! * * @return true if success * false IF AND ONLY IF *HAS NOT BEEN PUBLISHED* otherwise funds are at risk!!! */ def commit(tx: Transaction)(implicit ec: ExecutionContext): Future[Boolean] /** * Rollback a transaction that we failed to commit: this probably translates to "release locks on utxos". */ def rollback(tx: Transaction)(implicit ec: ExecutionContext): Future[Boolean] /** * Tests whether the inputs of the provided transaction have been spent by another transaction. * Implementations may always return false if they don't want to implement it. */ def doubleSpent(tx: Transaction)(implicit ec: ExecutionContext): Future[Boolean] } /** This trait lets users generate on-chain addresses and public keys. */ trait OnChainAddressGenerator { /** * @param label used if implemented with bitcoin core, can be ignored by implementation */ def getReceiveAddress(label: String = "")(implicit ec: ExecutionContext): Future[String] /** * @param receiveAddress if provided, will extract the public key from this address, otherwise will generate a new * address and return the underlying public key. */ def getReceivePubkey(receiveAddress: Option[String] = None)(implicit ec: ExecutionContext): Future[PublicKey] } /** This trait lets users check the wallet's on-chain balance. */ trait OnChainBalanceChecker { import OnChainWallet.OnChainBalance /** Get our on-chain balance */ def onChainBalance()(implicit ec: ExecutionContext): Future[OnChainBalance] } /** * This trait defines the minimal set of feature an on-chain wallet needs to implement to support lightning. */ trait OnChainWallet extends OnChainChannelFunder with OnChainAddressGenerator with OnChainBalanceChecker object OnChainWallet { final case class OnChainBalance(confirmed: Satoshi, unconfirmed: Satoshi) final case class MakeFundingTxResponse(fundingTx: Transaction, fundingTxOutputIndex: Int, fee: Satoshi) }
ACINQ/eclair
eclair-core/src/main/scala/fr/acinq/eclair/blockchain/OnChainWallet.scala
Scala
apache-2.0
3,587
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.graph.scala.test.operations import org.apache.flink.api.scala._ import org.apache.flink.graph.scala.test.TestGraphUtils import org.apache.flink.graph.scala.{NeighborsFunctionWithVertexValue, _} import org.apache.flink.graph.{Edge, EdgeDirection, ReduceNeighborsFunction, Vertex} import org.apache.flink.test.util.{MultipleProgramsTestBase, TestBaseUtils} import org.apache.flink.util.Collector import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.Parameterized import _root_.scala.collection.JavaConverters._ @RunWith(classOf[Parameterized]) class ReduceOnNeighborMethodsITCase(mode: MultipleProgramsTestBase.TestExecutionMode) extends MultipleProgramsTestBase(mode) { private var expectedResult: String = null @Test @throws(classOf[Exception]) def testSumOfAllNeighborsNoValue { val env: ExecutionEnvironment = ExecutionEnvironment.getExecutionEnvironment val graph: Graph[Long, Long, Long] = Graph.fromDataSet(TestGraphUtils .getLongLongVertexData(env), TestGraphUtils.getLongLongEdgeData(env), env) val res = graph.reduceOnNeighbors(new SumNeighbors, EdgeDirection.ALL) .collect().toList expectedResult = "(1,10)\\n" + "(2,4)\\n" + "(3,12)\\n" + "(4,8)\\n" + "(5,8)\\n" TestBaseUtils.compareResultAsText(res.asJava, expectedResult) } @Test @throws(classOf[Exception]) def testSumOfOutNeighborsNoValue { val env: ExecutionEnvironment = ExecutionEnvironment.getExecutionEnvironment val graph: Graph[Long, Long, Long] = Graph.fromDataSet(TestGraphUtils .getLongLongVertexData(env), TestGraphUtils.getLongLongEdgeData(env), env) val res = graph.reduceOnNeighbors(new SumNeighbors, EdgeDirection.OUT).collect().toList expectedResult = "(1,5)\\n" + "(2,3)\\n" + "(3,9)\\n" + "(4,5)\\n" + "(5,1)\\n" TestBaseUtils.compareResultAsText(res.asJava, expectedResult) } @Test @throws(classOf[Exception]) def testSumOfAllNeighbors { val env: ExecutionEnvironment = ExecutionEnvironment.getExecutionEnvironment val graph: Graph[Long, Long, Long] = Graph.fromDataSet(TestGraphUtils .getLongLongVertexData(env), TestGraphUtils.getLongLongEdgeData(env), env) val result = graph.groupReduceOnNeighbors(new SumAllNeighbors, EdgeDirection.ALL) val res = result.collect().toList expectedResult = "(1,11)\\n" + "(2,6)\\n" + "(3,15)\\n" + "(4,12)\\n" + "(5,13)\\n" TestBaseUtils.compareResultAsText(res.asJava, expectedResult) } @Test @throws(classOf[Exception]) def testSumOfInNeighborsNoValueMultipliedByTwoIdGreaterThanTwo = { val env: ExecutionEnvironment = ExecutionEnvironment.getExecutionEnvironment val graph: Graph[Long, Long, Long] = Graph.fromDataSet(TestGraphUtils .getLongLongVertexData(env), TestGraphUtils.getLongLongEdgeData(env), env) val result = graph.groupReduceOnNeighbors(new SumInNeighborsNoValueMultipliedByTwoIdGreaterThanTwo, EdgeDirection.IN) val res = result.collect().toList expectedResult = "(3,59)\\n" + "(3,118)\\n" + "(4,204)\\n" + "(4,102)\\n" + "(5,570)\\n" + "(5,285)" TestBaseUtils.compareResultAsText(res.asJava, expectedResult) } final class SumNeighbors extends ReduceNeighborsFunction[Long] { override def reduceNeighbors(firstNeighbor: Long, secondNeighbor: Long): Long = { firstNeighbor + secondNeighbor } } final class SumAllNeighbors extends NeighborsFunctionWithVertexValue[Long, Long, Long, (Long, Long)] { @throws(classOf[Exception]) def iterateNeighbors(vertex: Vertex[Long, Long], neighbors: Iterable[(Edge[Long, Long], Vertex[Long, Long])], out: Collector[(Long, Long)]) { var sum: Long = 0 for (neighbor <- neighbors) { sum += neighbor._2.getValue } out.collect((vertex.getId, sum + vertex.getValue)) } } final class SumInNeighborsNoValueMultipliedByTwoIdGreaterThanTwo extends NeighborsFunction[Long, Long, Long, (Long, Long)] { @throws(classOf[Exception]) def iterateNeighbors(neighbors: Iterable[(Long, Edge[Long, Long], Vertex[Long, Long])], out: Collector[(Long, Long)]) { var sum: Long = 0 var next: (Long, Edge[Long, Long], Vertex[Long, Long]) = null val neighborsIterator: Iterator[(Long, Edge[Long, Long], Vertex[Long, Long])] = neighbors.iterator while (neighborsIterator.hasNext) { next = neighborsIterator.next sum += next._3.getValue * next._2.getValue } if (next._1 > 2) { out.collect(new Tuple2[Long, Long](next._1, sum)) out.collect(new Tuple2[Long, Long](next._1, sum * 2)) } } } }
hequn8128/flink
flink-libraries/flink-gelly-scala/src/test/scala/org/apache/flink/graph/scala/test/operations/ReduceOnNeighborMethodsITCase.scala
Scala
apache-2.0
5,441
/* * Copyright 2016 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.dllib.nn.tf import java.io.File import com.google.protobuf.ByteString import com.intel.analytics.bigdl.dllib.tensor.Tensor import com.intel.analytics.bigdl.dllib.utils.serializer.ModuleSerializationTest import com.intel.analytics.bigdl.dllib.utils.tf.TFRecordIterator import org.tensorflow.example.Example import org.tensorflow.framework.DataType class DecodeRawSerialTest extends ModuleSerializationTest { override def test(): Unit = { val decodeRaw = new DecodeRaw[Float](DataType.DT_UINT8, true).setName("decodeRaw") val input = getInputs("raw") runSerializationTest(decodeRaw, input) } private def getInputs(name: String): Tensor[ByteString] = { import com.intel.analytics.bigdl.dllib.utils.tf.TFTensorNumeric.NumericByteString val index = name match { case "png" => 0 case "jpeg" => 1 case "gif" => 2 case "raw" => 3 } val resource = getClass.getClassLoader.getResource("tf") val path = resource.getPath + File.separator + "decode_image_test_case.tfrecord" val file = new File(path) val bytesVector = TFRecordIterator(file).toVector val pngBytes = bytesVector(index) val example = Example.parseFrom(pngBytes) val imageByteString = example.getFeatures.getFeatureMap.get("image/encoded") .getBytesList.getValueList.get(0) Tensor[ByteString](Array(imageByteString), Array[Int]()) } }
intel-analytics/BigDL
scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/nn/tf/DecodeRawSpec.scala
Scala
apache-2.0
2,024
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.spark.integration import java.lang.Boolean.{FALSE, TRUE} import java.{lang => jl} import java.net.URI import java.nio.charset.StandardCharsets import java.nio.file.Files import java.nio.file.Paths import java.sql.Timestamp import java.{util => ju} import java.util.concurrent.TimeUnit import org.elasticsearch.spark.integration.ScalaUtils.propertiesAsScalaMap import org.elasticsearch.spark.rdd.JDKCollectionConvertersCompat.Converters._ import scala.collection.Map import scala.collection.mutable.ArrayBuffer import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.SparkException import org.apache.spark.sql.Row import org.apache.spark.sql.SQLContext import org.apache.spark.sql.SaveMode import org.apache.spark.sql.types.ArrayType import org.apache.spark.sql.types.Decimal import org.apache.spark.sql.types.DecimalType import org.apache.spark.sql.types.DoubleType import org.apache.spark.sql.types.IntegerType import org.apache.spark.sql.types.MapType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType import org.apache.spark.sql.types.TimestampType import org.apache.spark.storage.StorageLevel.DISK_ONLY import org.apache.spark.storage.StorageLevel.DISK_ONLY_2 import org.elasticsearch.hadoop.{EsHadoopIllegalArgumentException, EsHadoopIllegalStateException} import org.elasticsearch.hadoop.cfg.ConfigurationOptions._ import org.elasticsearch.hadoop.util.StringUtils import org.elasticsearch.hadoop.util.TestSettings import org.elasticsearch.hadoop.util.TestUtils import org.elasticsearch.hadoop.util.TestUtils.resource import org.elasticsearch.hadoop.util.TestUtils.docEndpoint import org.elasticsearch.spark.cfg.SparkSettingsManager import org.elasticsearch.spark.sparkRDDFunctions import org.elasticsearch.spark.sparkStringJsonRDDFunctions import org.elasticsearch.spark.sql.EsSparkSQL import org.elasticsearch.spark.sql.ScalaEsRow import org.elasticsearch.spark.sql.SchemaUtilsTestable import org.elasticsearch.spark.sql.api.java.JavaEsSparkSQL import org.elasticsearch.spark.sql.sparkDatasetFunctions import org.elasticsearch.spark.sql.sqlContextFunctions import org.hamcrest.Matchers.containsString import org.hamcrest.Matchers.is import org.hamcrest.Matchers.not import org.junit.AfterClass import org.junit.Assert.assertEquals import org.junit.Assert.assertFalse import org.junit.Assert.assertThat import org.junit.Assert.assertTrue import org.junit.Assert.fail import org.junit.Assume.assumeFalse import org.junit.Assume.assumeTrue import org.junit.BeforeClass import org.junit.FixMethodOrder import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.MethodSorters import org.junit.runners.Parameterized import org.junit.runners.Parameterized.Parameters import com.esotericsoftware.kryo.io.{Input => KryoInput} import com.esotericsoftware.kryo.io.{Output => KryoOutput} import org.apache.spark.rdd.RDD import javax.xml.bind.DatatypeConverter import org.apache.spark.sql.SparkSession import org.elasticsearch.hadoop.EsAssume import org.elasticsearch.hadoop.TestData import org.elasticsearch.hadoop.cfg.ConfigurationOptions import org.elasticsearch.hadoop.rest.{EsHadoopParsingException, RestUtils} import org.elasticsearch.hadoop.serialization.JsonUtils import org.elasticsearch.hadoop.util.EsMajorVersion import org.junit.Assert._ import org.junit.ClassRule object AbstractScalaEsScalaSparkSQL { @transient val conf = new SparkConf() .setAll(propertiesAsScalaMap(TestSettings.TESTING_PROPS)) .setAppName("estest") .setJars(SparkUtils.ES_SPARK_TESTING_JAR) @transient var cfg: SparkConf = null @transient var sc: SparkContext = null @transient var sqc: SQLContext = null @transient var keywordType: String = "keyword" @transient var textType: String = "text" @transient @ClassRule val testData = new TestData() @BeforeClass def setup() { conf.setAll(propertiesAsScalaMap(TestSettings.TESTING_PROPS)); sc = new SparkContext(conf) sqc = SparkSession.builder().config(conf).getOrCreate().sqlContext val version = TestUtils.getEsClusterInfo.getMajorVersion if (version.before(EsMajorVersion.V_5_X)) { keywordType = "string" textType = "string" } } @AfterClass def cleanup() { if (sc != null) { sc.stop // give jetty time to clean its act up Thread.sleep(TimeUnit.SECONDS.toMillis(3)) } } @Parameters def testParams(): ju.Collection[Array[jl.Object]] = { val list = new ju.ArrayList[Array[jl.Object]]() val noQuery = "" val uriQuery = "?q=*" val dslQuery = """ {"query" : { "match_all" : { } } } """ // no query meta, push, strict, filter, encode, query list.add(Array("default", FALSE, TRUE, FALSE, TRUE, FALSE, noQuery)) list.add(Array("defaultstrict", FALSE, TRUE, TRUE, TRUE, FALSE, noQuery)) list.add(Array("defaultnopush", FALSE, FALSE, FALSE, TRUE, FALSE, noQuery)) list.add(Array("withmeta", TRUE, TRUE, FALSE, TRUE, FALSE, noQuery)) list.add(Array("withmetastrict", TRUE, TRUE, TRUE, TRUE, FALSE, noQuery)) list.add(Array("withmetanopush", TRUE, FALSE, FALSE, TRUE, FALSE, noQuery)) // disable double filtering meta, push, strict, filter, encode, query list.add(Array("default_skiphandled", FALSE, TRUE, FALSE, FALSE, FALSE, noQuery)) list.add(Array("defaultstrict_skiphandled", FALSE, TRUE, TRUE, FALSE, FALSE, noQuery)) list.add(Array("defaultnopush_skiphandled", FALSE, FALSE, FALSE, FALSE, FALSE, noQuery)) list.add(Array("withmeta_skiphandled", TRUE, TRUE, FALSE, FALSE, FALSE, noQuery)) list.add(Array("withmetastrict_skiphandled", TRUE, TRUE, TRUE, FALSE, FALSE, noQuery)) list.add(Array("withmetanopush_skiphandled", TRUE, FALSE, FALSE, FALSE, FALSE, noQuery)) // uri query meta, push, strict, filter, encode, query list.add(Array("defaulturiquery", FALSE, TRUE, FALSE, TRUE, FALSE, uriQuery)) list.add(Array("defaulturiquerystrict", FALSE, TRUE, TRUE, TRUE, FALSE, uriQuery)) list.add(Array("defaulturiquerynopush", FALSE, FALSE, FALSE, TRUE, FALSE, uriQuery)) list.add(Array("withmetauri_query", TRUE, TRUE, FALSE, TRUE, FALSE, uriQuery)) list.add(Array("withmetauri_querystrict", TRUE, TRUE, TRUE, TRUE, FALSE, uriQuery)) list.add(Array("withmetauri_querynopush", TRUE, FALSE, FALSE, TRUE, FALSE, uriQuery)) // disable double filtering meta, push, strict, filter, encode, query list.add(Array("defaulturiquery_skiphandled", FALSE, TRUE, FALSE, FALSE, FALSE, uriQuery)) list.add(Array("defaulturiquerystrict_skiphandled", FALSE, TRUE, TRUE, FALSE, FALSE, uriQuery)) list.add(Array("defaulturiquerynopush_skiphandled", FALSE, FALSE, FALSE, FALSE, FALSE, uriQuery)) list.add(Array("withmetauri_query_skiphandled", TRUE, TRUE, FALSE, FALSE, FALSE, uriQuery)) list.add(Array("withmetauri_querystrict_skiphandled", TRUE, TRUE, TRUE, FALSE, FALSE, uriQuery)) list.add(Array("withmetauri_querynopush_skiphandled", TRUE, FALSE, FALSE, FALSE, FALSE, uriQuery)) // dsl query meta, push, strict, filter, encode, query list.add(Array("defaultdslquery", FALSE, TRUE, FALSE, TRUE, FALSE, dslQuery)) list.add(Array("defaultstrictdslquery", FALSE, TRUE, TRUE, TRUE, FALSE, dslQuery)) list.add(Array("defaultnopushdslquery", FALSE, FALSE, FALSE, TRUE, FALSE, dslQuery)) list.add(Array("withmetadslquery", TRUE, TRUE, FALSE, TRUE, FALSE, dslQuery)) list.add(Array("withmetastrictdslquery", TRUE, TRUE, TRUE, TRUE, FALSE, dslQuery)) list.add(Array("withmetanopushdslquery", TRUE, FALSE, FALSE, TRUE, FALSE, dslQuery)) // disable double filtering meta, push, strict, filter, encode, query list.add(Array("defaultdslquery_skiphandled", FALSE, TRUE, FALSE, FALSE, FALSE, dslQuery)) list.add(Array("defaultstrictdslquery_skiphandled", FALSE, TRUE, TRUE, FALSE, FALSE, dslQuery)) list.add(Array("defaultnopushdslquery_skiphandled", FALSE, FALSE, FALSE, FALSE, FALSE, dslQuery)) list.add(Array("withmetadslquery_skiphandled", TRUE, TRUE, FALSE, FALSE, FALSE, dslQuery)) list.add(Array("withmetastrictdslquery_skiphandled", TRUE, TRUE, TRUE, FALSE, FALSE, dslQuery)) list.add(Array("withmetanopushdslquery_skiphandled", TRUE, FALSE, FALSE, FALSE, FALSE, dslQuery)) // unicode meta, push, strict, filter, encode, query list.add(Array("default_" + "בְּדִיק" + "_", FALSE, TRUE, FALSE, TRUE, TRUE, noQuery)) list.add(Array("defaultstrict_" + "בְּדִיק" + "_", FALSE, TRUE, TRUE, TRUE, TRUE, noQuery)) list.add(Array("defaultnopush_" + "בְּדִיק" + "_", FALSE, FALSE, FALSE, TRUE, TRUE, noQuery)) list.add(Array("withmeta_" + "בְּדִיק" + "_", TRUE, TRUE, FALSE, TRUE, TRUE, noQuery)) list.add(Array("withmetastrict_" + "בְּדִיק" + "_", TRUE, TRUE, TRUE, TRUE, TRUE, noQuery)) list.add(Array("withmetanopush_" + "בְּדִיק" + "_", TRUE, FALSE, FALSE, TRUE, TRUE, noQuery)) // disable double filtering meta, push, strict, filter, encode, query list.add(Array("default_skiphandled_" + "בְּדִיק" + "_", FALSE, TRUE, FALSE, FALSE, TRUE, noQuery)) list.add(Array("defaultstrict_skiphandled_" + "בְּדִיק" + "_", FALSE, TRUE, TRUE, FALSE, TRUE, noQuery)) list.add(Array("defaultnopush_skiphandled_" + "בְּדִיק" + "_", FALSE, FALSE, FALSE, FALSE, TRUE, noQuery)) list.add(Array("withmeta_skiphandled_" + "בְּדִיק" + "_", TRUE, TRUE, FALSE, FALSE, TRUE, noQuery)) list.add(Array("withmetastrict_skiphandled_" + "בְּדִיק" + "_", TRUE, TRUE, TRUE, FALSE, TRUE, noQuery)) list.add(Array("withmetanopush_skiphandled_" + "בְּדִיק" + "_", TRUE, FALSE, FALSE, FALSE, TRUE, noQuery)) list } } @FixMethodOrder(MethodSorters.NAME_ASCENDING) @RunWith(classOf[Parameterized]) class AbstractScalaEsScalaSparkSQL(prefix: String, readMetadata: jl.Boolean, pushDown: jl.Boolean, strictPushDown: jl.Boolean, doubleFiltering: jl.Boolean, encodeResources: jl.Boolean, query: String = "") extends Serializable { val sc = AbstractScalaEsScalaSparkSQL.sc val sqc = AbstractScalaEsScalaSparkSQL.sqc val cfg = Map(ES_QUERY -> query, ES_READ_METADATA -> readMetadata.toString(), "es.internal.spark.sql.pushdown" -> pushDown.toString(), "es.internal.spark.sql.pushdown.strict" -> strictPushDown.toString(), "es.internal.spark.sql.pushdown.keep.handled.filters" -> doubleFiltering.toString()) val version = TestUtils.getEsClusterInfo.getMajorVersion val keyword = AbstractScalaEsScalaSparkSQL.keywordType val text = AbstractScalaEsScalaSparkSQL.textType @Test def test1KryoScalaEsRow() { val kryo = SparkUtils.sparkSerializer(sc.getConf) val row = new ScalaEsRow((new ArrayBuffer() ++= StringUtils.tokenize("foo,bar,tar").asScala).toSeq) val storage = Array.ofDim[Byte](512) val output = new KryoOutput(storage) val input = new KryoInput(storage) kryo.writeClassAndObject(output, row) val serialized = kryo.readClassAndObject(input).asInstanceOf[ScalaEsRow] println(serialized.rowOrder) } @Test(expected = classOf[EsHadoopIllegalArgumentException]) def testNoIndexExists() { val idx = sqc.read.format("org.elasticsearch.spark.sql").load("existing_index") idx.printSchema() } @Test(expected = classOf[EsHadoopIllegalArgumentException]) def testNoMappingExists() { EsAssume.versionOnOrBefore(EsMajorVersion.V_6_X, "types are deprecated fully in 7.0 and will be removed in a later release") val index = wrapIndex("spark-index-ex") RestUtils.touch(index) val idx = sqc.read.format("org.elasticsearch.spark.sql").load(s"$index/no_such_mapping") idx.printSchema() } @Test def testArrayMappingFirstLevel() { val mapping = wrapMapping("data", s"""{ | "properties" : { | "arr" : { | "properties" : { | "one" : { "type" : "$keyword" }, | "two" : { "type" : "$keyword" } | } | }, | "top-level" : { "type" : "$keyword" } | } }""".stripMargin) val index = wrapIndex("sparksql-test-array-mapping-top-level") val typename = "data" val (target, docPath) = makeTargets(index, typename) RestUtils.touch(index) RestUtils.putMapping(index, typename, mapping.getBytes(StringUtils.UTF_8)) // add some data val doc1 = """{"arr" : [{"one" : "1", "two" : "2"}, {"one" : "unu", "two" : "doi"}], "top-level" : "root" }""".stripMargin RestUtils.postData(docPath, doc1.getBytes(StringUtils.UTF_8)) RestUtils.refresh(index) val newCfg = collection.mutable.Map(cfg.toSeq: _*) += (ES_READ_FIELD_AS_ARRAY_INCLUDE -> "arr") val df = sqc.read.options(newCfg).format("org.elasticsearch.spark.sql").load(target) df.printSchema() assertEquals("string", df.schema("top-level").dataType.typeName) assertEquals("array", df.schema("arr").dataType.typeName) assertEquals("struct", df.schema("arr").dataType.asInstanceOf[ArrayType].elementType.typeName) df.take(1).foreach(println) assertEquals(1, df.count()) } @Test def testEmptyDataFrame() { val index = wrapIndex("spark-test-empty-dataframe") val (target, _) = makeTargets(index, "data") val idx = sqc.emptyDataFrame.saveToEs(target) } @Test(expected = classOf[EsHadoopIllegalArgumentException]) def testIndexCreationDisabled() { val newCfg = collection.mutable.Map(cfg.toSeq: _*) += (ES_INDEX_AUTO_CREATE -> "no") val index = wrapIndex("spark-test-non-existing-empty-dataframe") val (target, _) = makeTargets(index, "data") val idx = sqc.emptyDataFrame.saveToEs(target, newCfg) } @Test def testMultiFieldsWithSameName { val index = wrapIndex("sparksql-test-array-mapping-nested") val (target, docPath) = makeTargets(index, "data") RestUtils.touch(index) // add some data val jsonDoc = s"""{ | "bar" : { | "bar" : { | "bar" : [{ | "bar" : 1 | }, { | "bar" : 2 | } | ], | "level" : 2, | "level3" : true | }, | "foo2" : 10, | "level" : 1, | "level2" : 2 | }, | "foo1" : "$text", | "level" : 0, | "level1" : "$text" |} """.stripMargin RestUtils.postData(docPath, jsonDoc.getBytes(StringUtils.UTF_8)) RestUtils.refresh(index) val newCfg = collection.mutable.Map(cfg.toSeq: _*) += (ES_READ_FIELD_AS_ARRAY_INCLUDE -> "bar.bar.bar", "es.resource" -> target) val cfgSettings = new SparkSettingsManager().load(sc.getConf).copy().merge(newCfg.asJava) val schema = SchemaUtilsTestable.discoverMapping(cfgSettings) val mapping = SchemaUtilsTestable.rowInfo(cfgSettings) val df = sqc.read.options(newCfg).format("org.elasticsearch.spark.sql").load(target) df.printSchema() df.take(1).foreach(println) assertEquals(1, df.count()) } @Test def testNestedFieldArray { val index = wrapIndex("sparksql-test-nested-same-name-fields") val (target, _) = makeTargets(index, "data") RestUtils.touch(index) // add some data val jsonDoc = """{"foo" : 5, "nested": { "bar" : [{"date":"2015-01-01", "age":20},{"date":"2015-01-01", "age":20}], "what": "now" } }""" sc.makeRDD(Seq(jsonDoc)).saveJsonToEs(target) RestUtils.refresh(index) val newCfg = collection.mutable.Map(cfg.toSeq: _*) += (ES_READ_FIELD_AS_ARRAY_INCLUDE -> "nested.bar") val df = sqc.read.options(newCfg).format("org.elasticsearch.spark.sql").load(target) df.printSchema() df.take(1).foreach(println) assertEquals(1, df.count()) } @Test def testArrayValue { val index = wrapIndex("sparksql-test-array-value") val (target, _) = makeTargets(index, "data") RestUtils.touch(index) // add some data val jsonDoc = """{"array" : [1, 2, 4, 5] }""" sc.makeRDD(Seq(jsonDoc)).saveJsonToEs(target) RestUtils.refresh(index) val newCfg = collection.mutable.Map(cfg.toSeq: _*) += (ES_READ_FIELD_AS_ARRAY_INCLUDE -> "array") val df = sqc.read.options(newCfg).format("org.elasticsearch.spark.sql").load(target) assertEquals("array", df.schema("array").dataType.typeName) assertEquals("long", df.schema("array").dataType.asInstanceOf[ArrayType].elementType.typeName) assertEquals(1, df.count()) val first = df.first() val array = first.getSeq[Long](0) assertEquals(1l, array(0)) assertEquals(4l, array(2)) } @Test def testSometimesArrayValue { val index = wrapIndex("sparksql-test-sometimes-array-value") val (target, _) = makeTargets(index, "data") RestUtils.touch(index) // add some data val jsonDoc1 = """{"array" : [1, 2, 4, 5] }""" val jsonDoc2 = """{"array" : 6 }""" sc.makeRDD(Seq(jsonDoc1, jsonDoc2)).saveJsonToEs(target) RestUtils.refresh(index) val newCfg = collection.mutable.Map(cfg.toSeq: _*) += (ES_READ_FIELD_AS_ARRAY_INCLUDE -> "array") val df = sqc.read.options(newCfg).format("org.elasticsearch.spark.sql").load(target) assertEquals("array", df.schema("array").dataType.typeName) assertEquals("long", df.schema("array").dataType.asInstanceOf[ArrayType].elementType.typeName) assertEquals(2, df.count()) val arrays = df.collect().map(_.getSeq[Long](0)).sortBy(_.head) { val array = arrays(0) assertEquals(1l, array(0)) assertEquals(4l, array(2)) } { val array = arrays(1) assertEquals(6l, array(0)) } } @Test def testBasicRead() { val dataFrame = artistsAsDataFrame assertTrue(dataFrame.count > 300) dataFrame.createOrReplaceTempView("datfile") println(dataFrame.schema.treeString) //dataFrame.take(5).foreach(println) val results = sqc.sql("SELECT name FROM datfile WHERE id >=1 AND id <=10") //results.take(5).foreach(println) } @Test def testEmptyStrings(): Unit = { val data = Seq(("Java", "20000"), ("Python", ""), ("Scala", "3000")) val rdd: RDD[Row] = sc.parallelize(data).map(row => Row(row._1, row._2)) val schema = StructType( Array( StructField("language", StringType,true), StructField("description", StringType,true) )) val inputDf = sqc.createDataFrame(rdd, schema) inputDf.write .format("org.elasticsearch.spark.sql") .save("empty_strings_test") val reader = sqc.read.format("org.elasticsearch.spark.sql") val outputDf = reader.load("empty_strings_test") assertEquals(data.size, outputDf.count) val nullDescriptionsDf = outputDf.filter(row => row.getAs("description") == null) assertEquals(1, nullDescriptionsDf.count) val reader2 = sqc.read.format("org.elasticsearch.spark.sql").option("es.field.read.empty.as.null", "no") val outputDf2 = reader2.load("empty_strings_test") assertEquals(data.size, outputDf2.count) val nullDescriptionsDf2 = outputDf2.filter(row => row.getAs("description") == null) assertEquals(0, nullDescriptionsDf2.count) val emptyDescriptionsDf = outputDf2.filter(row => row.getAs("description") == "") assertEquals(1, emptyDescriptionsDf.count) } @Test def test0WriteFieldNameWithPercentage() { val index = wrapIndex("spark-test-scala-sql-field-with-percentage") val (target, _) = makeTargets(index, "data") val trip1 = Map("%s" -> "special") sc.makeRDD(Seq(trip1)).saveToEs(target) } @Test def test1ReadFieldNameWithPercentage() { val index = wrapIndex("spark-test-scala-sql-field-with-percentage") val (target, docPath) = makeTargets(index, "data") sqc.esDF(target).count() } @Test def testEsDataFrame1Write() { val dataFrame = artistsAsDataFrame val index = wrapIndex("sparksql-test-scala-basic-write") val (target, _) = makeTargets(index, "data") dataFrame.saveToEs(target, cfg) assertTrue(RestUtils.exists(target)) assertThat(RestUtils.get(target + "/_search?"), containsString("345")) } @Test def testEsDataFrame1WriteCount() { val index = wrapIndex("sparksql-test-scala-basic-write") val (target, _) = makeTargets(index, "data") val dataFrame = sqc.esDF(target, cfg) assertEquals(345, dataFrame.count()) } @Test def testEsDataFrame1WriteWithMapping() { val dataFrame = artistsAsDataFrame val index = wrapIndex("sparksql-test-scala-basic-write-id-mapping") val (target, docPath) = makeTargets(index, "data") val newCfg = collection.mutable.Map(cfg.toSeq: _*) += (ES_MAPPING_ID -> "id", ES_MAPPING_EXCLUDE -> "url") dataFrame.saveToEs(target, newCfg) assertTrue(RestUtils.exists(target)) assertThat(RestUtils.get(target + "/_search?"), containsString("345")) assertThat(RestUtils.exists(docPath + "/1"), is(true)) assertThat(RestUtils.get(target + "/_search?"), not(containsString("url"))) } @Test def testEsDataFrame1WriteNullValue() { val index = wrapIndex("spark-test-null-data-test-0") val (target, docPath) = makeTargets(index, "data") val docs = Seq( """{"id":"1","name":{"first":"Robert","last":"Downey","suffix":"Jr"}}""", """{"id":"2","name":{"first":"Chris","last":"Evans"}}""" ) val conf = Map(ES_MAPPING_ID -> "id") val rdd = sc.makeRDD(docs) val jsonDF = sqc.read.json(rdd).toDF.select("id", "name") jsonDF.saveToEs(target, conf) RestUtils.refresh(index) val hit1 = RestUtils.get(s"$docPath/1") val hit2 = RestUtils.get(s"$docPath/2") assertThat(hit1, containsString("suffix")) assertThat(hit2, not(containsString("suffix"))) } @Test def testEsDataFrame12CheckYesWriteNullValue() { val index = wrapIndex("spark-test-null-data-test-1") val (target, docPath) = makeTargets(index, "data") val docs = Seq( """{"id":"1","name":{"first":"Robert","last":"Downey","suffix":"Jr"}}""", """{"id":"2","name":{"first":"Chris","last":"Evans"}}""" ) val conf = Map(ES_MAPPING_ID -> "id", ES_SPARK_DATAFRAME_WRITE_NULL_VALUES -> "true") val rdd = sc.makeRDD(docs) val jsonDF = sqc.read.json(rdd).toDF.select("id", "name") jsonDF.saveToEs(target, conf) RestUtils.refresh(index) val hit1 = RestUtils.get(s"$docPath/1") val hit2 = RestUtils.get(s"$docPath/2") assertThat(hit1, containsString("suffix")) assertThat(hit2, containsString("suffix")) } @Test def testEsDataFrame11CheckNoWriteNullValueFromRows() { val index = wrapIndex("spark-test-null-data-test-2") val (target, docPath) = makeTargets(index, "data") val data = Seq( Row("1", "Robert", "Downey", "Jr"), Row("2", "Chris", "Evans", null) ) val schema = StructType(Array( StructField("id", StringType), StructField("first", StringType), StructField("last", StringType), StructField("suffix", StringType, nullable = true) )) val conf = Map("es.mapping.id" -> "id") val rdd = sc.makeRDD(data) val df = sqc.createDataFrame(rdd, schema) df.saveToEs(target, conf) RestUtils.refresh(index) val hit1 = RestUtils.get(s"$docPath/1") val hit2 = RestUtils.get(s"$docPath/2") assertThat(hit1, containsString("suffix")) assertThat(hit2, not(containsString("suffix"))) } @Test def testEsDataFrame12CheckYesWriteNullValueFromRows() { val index = wrapIndex("spark-test-null-data-test-3") val (target, docPath) = makeTargets(index, "data") val data = Seq( Row("1", "Robert", "Downey", "Jr"), Row("2", "Chris", "Evans", null) ) val schema = StructType(Array( StructField("id", StringType), StructField("first", StringType), StructField("last", StringType), StructField("suffix", StringType, nullable = true) )) val conf = Map("es.mapping.id" -> "id", "es.spark.dataframe.write.null" -> "true") val rdd = sc.makeRDD(data) val df = sqc.createDataFrame(rdd, schema) df.saveToEs(target, conf) RestUtils.refresh(index) val hit1 = RestUtils.get(s"$docPath/1") val hit2 = RestUtils.get(s"$docPath/2") assertThat(hit1, containsString("suffix")) assertThat(hit2, containsString("suffix")) } @Test def testEsDataFrame2Read() { val index = wrapIndex("sparksql-test-scala-basic-write") val (target, _) = makeTargets(index, "data") val dataFrame = sqc.esDF(target, cfg) dataFrame.printSchema() val schema = dataFrame.schema.treeString assertTrue(schema.contains("id: long")) assertTrue(schema.contains("name: string")) assertTrue(schema.contains("pictures: string")) assertTrue(schema.contains("time: long")) assertTrue(schema.contains("url: string")) assertTrue(dataFrame.count > 300) //dataFrame.take(5).foreach(println) val tempTable = wrapIndex("basicRead") dataFrame.createOrReplaceTempView(wrapTableName(tempTable)) val nameRDD = sqc.sql(s"SELECT name FROM ${wrapTableName(tempTable)} WHERE id >= 1 AND id <=10") nameRDD.take(7).foreach(println) assertEquals(10, nameRDD.count) } @Test def testEsDataFrame2ReadWithIncludeFields() { val index = wrapIndex("sparksql-test-scala-basic-write") val (target, _) = makeTargets(index, "data") val newCfg = collection.mutable.Map(cfg.toSeq: _*) += (ES_READ_FIELD_INCLUDE -> "id, name, url") val dataFrame = sqc.esDF(target, newCfg) val schema = dataFrame.schema.treeString assertTrue(schema.contains("id: long")) assertTrue(schema.contains("name: string")) assertFalse(schema.contains("pictures: string")) assertFalse(schema.contains("time:")) assertTrue(schema.contains("url: string")) assertTrue(dataFrame.count > 300) //dataFrame.take(5).foreach(println) val tempTable = wrapIndex("basicRead") dataFrame.createOrReplaceTempView(wrapTableName(tempTable)) val nameRDD = sqc.sql(s"SELECT name FROM ${wrapTableName(tempTable)} WHERE id >= 1 AND id <=10") nameRDD.take(7).foreach(println) assertEquals(10, nameRDD.count) } @Test(expected = classOf[EsHadoopIllegalStateException]) def testEsDataFrame2ReadWithUserSchemaSpecified() { val index = wrapIndex("sparksql-test-scala-basic-write") val (target, _) = makeTargets(index, "data") val newCfg = collection.mutable.Map(cfg.toSeq: _*) += (ES_READ_FIELD_INCLUDE -> "id, name, url") += (ES_READ_SOURCE_FILTER -> "name") val dataFrame = sqc.esDF(target, newCfg) val schema = dataFrame.schema.treeString assertTrue(schema.contains("id: long")) assertTrue(schema.contains("name: string")) assertFalse(schema.contains("pictures: string")) assertFalse(schema.contains("time:")) assertTrue(schema.contains("url: string")) assertTrue(dataFrame.count > 300) //dataFrame.take(5).foreach(println) val tempTable = wrapIndex("basicRead") dataFrame.createOrReplaceTempView(wrapTableName(tempTable)) val nameRDD = sqc.sql(s"SELECT name FROM ${wrapTableName(tempTable)} WHERE id >= 1 AND id <=10") nameRDD.take(7) } @Test def testEsDataFrame2ReadWithAndWithoutQuery() { val index = wrapIndex("sparksql-test-scala-basic-write") val (target, _) = makeTargets(index, "data") val dfNoQuery = sqc.esDF(target, cfg) val dfWQuery = sqc.esDF(target, "?q=name:me*", cfg) println(dfNoQuery.head()) println(dfWQuery.head()) //assertEquals(dfNoQuery.head().toString(), dfWQuery.head().toString()) } @Test def testEsDataFrame2ReadWithAndWithoutQueryInJava() { val index = wrapIndex("sparksql-test-scala-basic-write") val (target, _) = makeTargets(index, "data") val dfNoQuery = JavaEsSparkSQL.esDF(sqc, target, cfg.asJava) val query = s"""{ "query" : { "query_string" : { "query" : "name:me*" } } //, "fields" : ["name"] }""" val dfWQuery = JavaEsSparkSQL.esDF(sqc, target, query, cfg.asJava) println(dfNoQuery.head()) println(dfWQuery.head()) dfNoQuery.show(3) dfWQuery.show(3) //assertEquals(dfNoQuery.head().toString(), dfWQuery.head().toString()) } @Test def testEsDataFrame3WriteWithRichMapping() { val path = Paths.get(AbstractScalaEsScalaSparkSQL.testData.sampleArtistsDatUri()) // because Windows... val lines = Files.readAllLines(path, StandardCharsets.ISO_8859_1).asScala.toSeq val data = sc.parallelize(lines) val schema = StructType(Seq(StructField("id", IntegerType, false), StructField("name", StringType, false), StructField("url", StringType, true), StructField("pictures", StringType, true), StructField("time", TimestampType, true), StructField("nested", StructType(Seq(StructField("id", IntegerType, false), StructField("name", StringType, false), StructField("url", StringType, true), StructField("pictures", StringType, true), StructField("time", TimestampType, true))), true))) val rowRDD = data.map(_.split("\t")).map(r => Row(r(0).toInt, r(1), r(2), r(3), new Timestamp(DatatypeConverter.parseDateTime(r(4)).getTimeInMillis()), Row(r(0).toInt, r(1), r(2), r(3), new Timestamp(DatatypeConverter.parseDateTime(r(4)).getTimeInMillis())))) val dataFrame = sqc.createDataFrame(rowRDD, schema) val index = wrapIndex("sparksql-test-scala-basic-write-rich-mapping-id-mapping") val (target, docPath) = makeTargets(index, "data") dataFrame.saveToEs(target, Map(ES_MAPPING_ID -> "id")) assertTrue(RestUtils.exists(target)) assertThat(RestUtils.get(target + "/_search?"), containsString("345")) assertThat(RestUtils.exists(docPath + "/1"), is(true)) } @Test(expected = classOf[SparkException]) def testEsDataFrame3WriteDecimalType() { val schema = StructType(Seq(StructField("decimal", DecimalType.USER_DEFAULT, false))) val rowRDD = sc.makeRDD(Seq(Row(Decimal(10)))) val dataFrame = sqc.createDataFrame(rowRDD, schema) val index = wrapIndex("sparksql-test-decimal-exception") val (target, _) = makeTargets(index, "data") dataFrame.saveToEs(target) } @Test def testEsDataFrame4WriteConflictingData(): Unit = { val schema = StructType(Seq(StructField("id", StringType), StructField("version", IntegerType), StructField("field", StringType))) val rowRDD1 = sc.makeRDD(Seq(Row("id", 1, "hello"))) val rowRDD2 = sc.makeRDD(Seq(Row("id", 2, "hello"))) val dataFrame1 = sqc.createDataFrame(rowRDD1, schema) val dataFrame2 = sqc.createDataFrame(rowRDD2, schema) val dataIndex = wrapIndex("sparksql-test-scala-error-handler-es") val errorIndex = wrapIndex("sparksql-test-scala-error-handler-es-errors") val typeName = "data" val (dataTarget, _) = makeTargets(dataIndex, typeName) val (errorTarget, _) = makeTargets(errorIndex, typeName) val conf = Map( ConfigurationOptions.ES_MAPPING_ID -> "id", ConfigurationOptions.ES_MAPPING_VERSION -> "version", "es.write.rest.error.handlers" -> "es", "es.write.rest.error.handler.es.client.resource" -> errorTarget, "es.write.rest.error.handler.es.label.extraData" -> "labelValue", "es.write.rest.error.handler.es.tags" -> "tagValue" ) dataFrame2.saveToEs(dataTarget, conf) dataFrame1.saveToEs(dataTarget, conf) val errorDataSearch = RestUtils.get(errorTarget + "/_search") val errorDoc = JsonUtils.query("hits").get("hits").get(0).apply(JsonUtils.asMap(errorDataSearch)) assertEquals("Encountered Bulk Failure", JsonUtils.query("_source").get("message").apply(errorDoc)) assertEquals("version_conflict_engine_exception", JsonUtils.query("_source").get("error").get("code").apply(errorDoc)) assertEquals("labelValue", JsonUtils.query("_source").get("labels").get("extraData").apply(errorDoc)) assertEquals("tagValue", JsonUtils.query("_source").get("tags").get(0).apply(errorDoc)) } @Test def testEsDataFrame4ReadRichMapping() { val index = wrapIndex("sparksql-test-scala-basic-write-rich-mapping-id-mapping") val (target, _) = makeTargets(index, "data") val dataFrame = sqc.esDF(target, cfg) assertTrue(dataFrame.count > 300) dataFrame.printSchema() } private def artistsAsDataFrame = { val data = readAsRDD(AbstractScalaEsScalaSparkSQL.testData.sampleArtistsDatUri()) val schema = StructType(Seq(StructField("id", IntegerType, false), StructField("name", StringType, false), StructField("url", StringType, true), StructField("pictures", StringType, true), StructField("time", TimestampType, true))) val rowRDD = data.map(_.split("\t")).map(r => Row(r(0).toInt, r(1), r(2), r(3), new Timestamp(DatatypeConverter.parseDateTime(r(4)).getTimeInMillis()))) val dataFrame = sqc.createDataFrame(rowRDD, schema) dataFrame } @Test def testEsDataFrame50ReadAsDataSource() { val index = wrapIndex("sparksql-test-scala-basic-write") val (target, _) = makeTargets(index, "data") var options = s"""resource '$target' """ val table = wrapIndex("sqlbasicread1") val query = s"CREATE TEMPORARY TABLE ${wrapTableName(table)} "+ " USING org.elasticsearch.spark.sql " + s" OPTIONS ($options)" println(query) val dataFrame = sqc.sql(query) val dsCfg = collection.mutable.Map(cfg.toSeq: _*) += ("path" -> target) val dfLoad = sqc.read.format("es").options(dsCfg.toMap).load() println("root data frame") dfLoad.printSchema() val results = dfLoad.filter(dfLoad("id") >= 1 && dfLoad("id") <= 10) println("results data frame") results.printSchema() val allRDD = sqc.sql(s"SELECT * FROM ${wrapTableName(table)} WHERE id >= 1 AND id <=10") println("select all rdd") allRDD.printSchema() val nameRDD = sqc.sql(s"SELECT name FROM ${wrapTableName(table)} WHERE id >= 1 AND id <=10") println("select name rdd") nameRDD.printSchema() assertEquals(10, nameRDD.count) nameRDD.take(7).foreach(println) } @Test def testScrollLimitWithEmptyPartition(): Unit = { val index = wrapIndex("scroll-limit") val (target, docPath) = makeTargets(index, "data") // Make index with two shards RestUtils.delete(index) RestUtils.put(index, """{"settings":{"number_of_shards":2,"number_of_replicas":0}}""".getBytes()) RestUtils.refresh(index) // Write a single record to it (should have one empty shard) val data = artistsAsDataFrame val single = data.where(data("id").equalTo(1)) assertEquals(1L, single.count()) single.saveToEs(target) // Make sure that the scroll limit works with both a shard that has data and a shard that has nothing val count = sqc.read.format("es").option("es.scroll.limit", "10").load(target).count() assertEquals(1L, count) } @Test def testEsDataFrameReadAsDataSourceWithMetadata() { assumeTrue(readMetadata) val index = wrapIndex("sparksql-test-scala-basic-write") val (target, _) = makeTargets(index, "data") val table = wrapIndex("sqlbasicread2") val options = s"""resource '$target' , readMetadata "true" """ val dataFrame = sqc.sql(s"CREATE TEMPORARY TABLE ${wrapTableName(table)}" + " USING es " + s" OPTIONS ($options)") val allRDD = sqc.sql(s"SELECT * FROM ${wrapTableName(table)} WHERE id >= 1 AND id <=10") allRDD.printSchema() allRDD.take(7).foreach(println) val dsCfg = collection.mutable.Map(cfg.toSeq: _*) += ("path" -> target) val dfLoad = sqc.read.format("es").options(dsCfg.toMap).load dfLoad.show() } @Test def testDataSource0Setup() { val index = wrapIndex("spark-test-scala-sql-varcols") val (target, _) = makeTargets(index, "data") val table = wrapIndex("sqlvarcol") val trip1 = Map("reason" -> "business", "airport" -> "SFO", "tag" -> "jan", "date" -> "2015-12-28T20:03:10Z") val trip2 = Map("participants" -> 5, "airport" -> "OTP", "tag" -> "feb", "date" -> "2013-12-28T20:03:10Z") val trip3 = Map("participants" -> 3, "airport" -> "MUC OTP SFO JFK", "tag" -> "long", "date" -> "2012-12-28T20:03:10Z") sc.makeRDD(Seq(trip1, trip2, trip3)).saveToEs(target) } private def esDataSource(table: String) = { val index = wrapIndex("spark-test-scala-sql-varcols") val (target, _) = makeTargets(index, "data") var options = s"""resource "$target" """ // sqc.sql(s"CREATE TEMPORARY TABLE $table" + // " USING org.elasticsearch.spark.sql " + // s" OPTIONS ($options)") val dsCfg = collection.mutable.Map(cfg.toSeq: _*) += ("path" -> target) sqc.read.format("org.elasticsearch.spark.sql").options(dsCfg.toMap).load } @Test def testDataSourcePushDown01EqualTo() { val df = esDataSource("pd_equalto") val filter = df.filter(df("airport").equalTo("OTP")) filter.show if (strictPushDown) { assertEquals(0, filter.count()) // however if we change the arguments to be lower cased, it will be Spark who's going to filter out the data return } else if (!keepHandledFilters) { // term query pick field with multi values assertEquals(2, filter.count()) return } assertEquals(1, filter.count()) assertEquals("feb", filter.select("tag").take(1)(0)(0)) } @Test def testDataSourcePushDown015NullSafeEqualTo() { val df = esDataSource("pd_nullsafeequalto") val filter = df.filter(df("airport").eqNullSafe("OTP")) filter.show if (strictPushDown) { assertEquals(0, filter.count()) // however if we change the arguments to be lower cased, it will be Spark who's going to filter out the data return } else if (!keepHandledFilters) { // term query pick field with multi values assertEquals(2, filter.count()) return } assertEquals(1, filter.count()) assertEquals("feb", filter.select("tag").take(1)(0)(0)) } @Test def testDataSourcePushDown02GT() { val df = esDataSource("pd_gt") val filter = df.filter(df("participants").gt(3)) assertEquals(1, filter.count()) assertEquals("feb", filter.select("tag").take(1)(0)(0)) } @Test def testDataSourcePushDown03GTE() { val df = esDataSource("pd_gte") val filter = df.filter(df("participants").geq(3)) assertEquals(2, filter.count()) assertEquals("long", filter.select("tag").sort("tag").take(2)(1)(0)) } @Test def testDataSourcePushDown04LT() { val df = esDataSource("pd_lt") df.printSchema() val filter = df.filter(df("participants").lt(5)) assertEquals(1, filter.count()) assertEquals("long", filter.select("tag").take(1)(0)(0)) } @Test def testDataSourcePushDown05LTE() { val df = esDataSource("pd_lte") val filter = df.filter(df("participants").leq(5)) assertEquals(2, filter.count()) assertEquals("long", filter.select("tag").sort("tag").take(2)(1)(0)) } @Test def testDataSourcePushDown06IsNull() { val df = esDataSource("pd_is_null") val filter = df.filter(df("participants").isNull) assertEquals(1, filter.count()) assertEquals("jan", filter.select("tag").take(1)(0)(0)) } @Test def testDataSourcePushDown07IsNotNull() { val df = esDataSource("pd_is_not_null") val filter = df.filter(df("reason").isNotNull) assertEquals(1, filter.count()) assertEquals("jan", filter.select("tag").take(1)(0)(0)) } @Test def testDataSourcePushDown08In() { val df = esDataSource("pd_in") var filter = df.filter("airport IN ('OTP', 'SFO', 'MUC')") if (strictPushDown) { assertEquals(0, filter.count()) // however if we change the arguments to be lower cased, it will be Spark who's going to filter out the data return } assertEquals(2, filter.count()) assertEquals("jan", filter.select("tag").sort("tag").take(2)(1)(0)) } @Test def testDataSourcePushDown08InWithNumbersAsStrings() { val df = esDataSource("pd_in_numbers_strings") var filter = df.filter("date IN ('2015-12-28', '2012-12-28')") if (strictPushDown) { assertEquals(0, filter.count()) // however if we change the arguments to be lower cased, it will be Spark who's going to filter out the data return } assertEquals(0, filter.count()) } @Test def testDataSourcePushDown08InWithNumber() { val df = esDataSource("pd_in_number") var filter = df.filter("participants IN (1, 2, 3)") assertEquals(1, filter.count()) assertEquals("long", filter.select("tag").sort("tag").take(1)(0)(0)) } @Test def testDataSourcePushDown08InWithNumberAndStrings() { val df = esDataSource("pd_in_number") var filter = df.filter("participants IN (2, 'bar', 1, 'foo')") assertEquals(0, filter.count()) } @Test def testDataSourcePushDown09StartsWith() { val df = esDataSource("pd_starts_with") var filter = df.filter(df("airport").startsWith("O")) if (!keepHandledFilters && !strictPushDown) { // term query pick field with multi values assertEquals(2, filter.count()) return } filter.show if (strictPushDown) { assertEquals(0, filter.count()) // Strict means specific terms matching, and the terms are lowercased } else { assertEquals(1, filter.count()) assertEquals("feb", filter.select("tag").take(1)(0)(0)) } } @Test def testDataSourcePushDown10EndsWith() { val df = esDataSource("pd_ends_with") var filter = df.filter(df("airport").endsWith("O")) if (!keepHandledFilters && !strictPushDown) { // term query pick field with multi values assertEquals(2, filter.count()) return } filter.show if (strictPushDown) { assertEquals(0, filter.count()) // Strict means specific terms matching, and the terms are lowercased } else { assertEquals(1, filter.count()) assertEquals("jan", filter.select("tag").take(1)(0)(0)) } } @Test def testDataSourcePushDown11Contains() { val df = esDataSource("pd_contains") val filter = df.filter(df("reason").contains("us")) assertEquals(1, filter.count()) assertEquals("jan", filter.select("tag").take(1)(0)(0)) } @Test def testDataSourcePushDown12And() { val df = esDataSource("pd_and") var filter = df.filter(df("reason").isNotNull.and(df("tag").equalTo("jan"))) assertEquals(1, filter.count()) assertEquals("jan", filter.select("tag").take(1)(0)(0)) } @Test def testDataSourcePushDown13Not() { val df = esDataSource("pd_not") val filter = df.filter(!df("reason").isNull) assertEquals(1, filter.count()) assertEquals("jan", filter.select("tag").take(1)(0)(0)) } @Test def testDataSourcePushDown14OR() { val df = esDataSource("pd_or") var filter = df.filter(df("reason").contains("us").or(df("airport").equalTo("OTP"))) if (strictPushDown) { // OTP fails due to strict matching/analyzed assertEquals(1, filter.count()) return } if (!keepHandledFilters) { // term query pick field with multi values assertEquals(3, filter.count()) return } assertEquals(2, filter.count()) assertEquals("feb", filter.select("tag").sort("tag").take(1)(0)(0)) } @Test def testEsSchemaFromDocsWithDifferentProperties() { val table = wrapIndex("sqlvarcol") esDataSource(table) val index = wrapIndex("spark-test-scala-sql-varcols") val (target, _) = makeTargets(index, "data") var options = s"""resource '$target' """ val s = sqc.sql(s"CREATE TEMPORARY TABLE ${wrapTableName(table)}" + " USING org.elasticsearch.spark.sql " + s" OPTIONS ($options)") val allResults = sqc.sql(s"SELECT * FROM ${wrapTableName(table)}") assertEquals(3, allResults.count()) allResults.printSchema() val filter = sqc.sql(s"SELECT * FROM ${wrapTableName(table)} WHERE airport = 'OTP'") assertEquals(1, filter.count()) val nullColumns = sqc.sql(s"SELECT reason, airport FROM ${wrapTableName(table)} ORDER BY airport") val rows = nullColumns.take(3) assertEquals("[null,MUC OTP SFO JFK]", rows(0).toString()) assertEquals("[null,OTP]", rows(1).toString()) assertEquals("[business,SFO]", rows(2).toString()) } @Test() def testJsonLoadAndSavedToEs() { val input = sqc.read.json(readAsRDD(this.getClass.getResource("/simple.json").toURI())) println(input.schema.simpleString) val index = wrapIndex("spark-test-json-file") val (target, docPath) = makeTargets(index, "data") input.saveToEs(target, cfg) val basic = sqc.read.json(readAsRDD(this.getClass.getResource("/basic.json").toURI())) println(basic.schema.simpleString) basic.saveToEs(target, cfg) } @Test def testJsonLoadAndSavedToEsSchema() { assumeFalse(readMetadata) val input = sqc.read.json(readAsRDD(this.getClass.getResource("/multi-level-doc.json").toURI())) println("JSON schema") println(input.schema.treeString) println(input.schema) val sample = input.take(1)(0).toString() val index = wrapIndex("spark-test-json-file-schema") val (target, docPath) = makeTargets(index, "data") input.saveToEs(target, cfg) val dsCfg = collection.mutable.Map(cfg.toSeq: _*) += ("path" -> target) val dfLoad = sqc.read.format("org.elasticsearch.spark.sql").options(dsCfg.toMap).load println("JSON schema") println(input.schema.treeString) println("Reading information from Elastic") println(dfLoad.schema.treeString) val item = dfLoad.take(1)(0) println(item.schema) println(item.toSeq) val nested = item.getStruct(1) println(nested.get(0)) println(nested.get(0).getClass()) assertEquals(input.schema.treeString, dfLoad.schema.treeString.replaceAll("float", "double")) assertEquals(sample, item.toString()) } @Test def testTableJoining() { val index1Name = wrapIndex("sparksql-test-scala-basic-write") val (target1, _) = makeTargets(index1Name, "data") val index2Name = wrapIndex("sparksql-test-scala-basic-write-id-mapping") val (target2, _) = makeTargets(index2Name, "data") val table1 = sqc.read.format("org.elasticsearch.spark.sql").load(target1) val table2 = sqc.read.format("org.elasticsearch.spark.sql").load(target2) table1.persist(DISK_ONLY) table2.persist(DISK_ONLY_2) val table1Name = wrapIndex("table1") val table2Name = wrapIndex("table2") table1.createOrReplaceTempView(wrapTableName(table1Name)) table1.createOrReplaceTempView(wrapTableName(table2Name)) val join = sqc.sql(s"SELECT t1.name, t2.pictures FROM ${wrapTableName(table1Name)} t1, ${wrapTableName(table2Name)} t2 WHERE t1.id = t2.id") println(join.schema.treeString) println(join.take(1)(0).schema) println(join.take(1)(0)(0)) } @Test def testEsDataFrame51WriteToExistingDataSource() { // to keep the select static assumeFalse(readMetadata) val index = wrapIndex("sparksql-test-scala-basic-write") val (target, _) = makeTargets(index, "data") val table = wrapIndex("table_insert") var options = s"resource '$target '" val dataFrame = sqc.sql(s"CREATE TEMPORARY TABLE ${wrapTableName(table)} " + s"USING org.elasticsearch.spark.sql " + s"OPTIONS ($options)"); val insertRDD = sqc.sql(s"INSERT INTO TABLE ${wrapTableName(table)} SELECT 123456789, 'test-sql', 'http://test-sql.com', 12345, ''") val df = sqc.table(wrapTableName(table)) println(df.count) assertTrue(df.count > 100) } @Test def testEsDataFrame52OverwriteExistingDataSource() { // to keep the select static assumeFalse(readMetadata) val srcFrame = artistsAsDataFrame val index = wrapIndex("sparksql-test-scala-sql-overwrite") val (target, _) = makeTargets(index, "data") srcFrame.saveToEs(target, cfg) val table = wrapIndex("table_overwrite") var options = s"resource '$target'" val dataFrame = sqc.sql(s"CREATE TEMPORARY TABLE ${wrapTableName(table)} " + s"USING org.elasticsearch.spark.sql " + s"OPTIONS ($options)"); var df = sqc.table(wrapTableName(table)) assertTrue(df.count > 1) val insertRDD = sqc.sql(s"INSERT OVERWRITE TABLE ${wrapTableName(table)} SELECT 123456789, 'test-sql', 'http://test-sql.com', 12345, ''") df = sqc.table(wrapTableName(table)) assertEquals(1, df.count) } @Test def testEsDataFrame52OverwriteExistingDataSourceWithJoinField() { // Join added in 6.0. EsAssume.versionOnOrAfter(EsMajorVersion.V_6_X, "Join added in 6.0.") // using long-form joiner values val schema = StructType(Seq( StructField("id", StringType, nullable = false), StructField("company", StringType, nullable = true), StructField("name", StringType, nullable = true), StructField("joiner", StructType(Seq( StructField("name", StringType, nullable = false), StructField("parent", StringType, nullable = true) ))) )) val parents = Seq( Row("1", "Elastic", null, Row("company", null)), Row("2", "Fringe Cafe", null, Row("company", null)), Row("3", "WATIcorp", null, Row("company", null)) ) val firstChildren = Seq( Row("10", null, "kimchy", Row("employee", "1")), Row("20", null, "April Ryan", Row("employee", "2")), Row("21", null, "Charlie", Row("employee", "2")), Row("30", null, "Alvin Peats", Row("employee", "3")) ) val index = wrapIndex("sparksql-test-scala-overwrite-join") val typename = "join" val (target, docPath) = makeTargets(index, typename) RestUtils.delete(index) RestUtils.touch(index) if (TestUtils.isTypelessVersion(version)) { RestUtils.putMapping(index, typename, "data/join/mapping/typeless.json") } else { RestUtils.putMapping(index, typename, "data/join/mapping/typed.json") } sqc.createDataFrame(sc.makeRDD(parents ++ firstChildren), schema) .write .format("es") .options(Map(ES_MAPPING_ID -> "id", ES_MAPPING_JOIN -> "joiner")) .save(target) assertThat(RestUtils.get(docPath + "/10?routing=1"), containsString("kimchy")) assertThat(RestUtils.get(docPath + "/10?routing=1"), containsString(""""_routing":"1"""")) // Overwrite the data using a new dataset: val newChildren = Seq( Row("110", null, "costinl", Row("employee", "1")), Row("111", null, "jbaiera", Row("employee", "1")), Row("121", null, "Charlie", Row("employee", "2")), Row("130", null, "Damien", Row("employee", "3")) ) sqc.createDataFrame(sc.makeRDD(parents ++ newChildren), schema) .write .format("es") .options(cfg ++ Map(ES_MAPPING_ID -> "id", ES_MAPPING_JOIN -> "joiner")) .mode(SaveMode.Overwrite) .save(target) assertFalse(RestUtils.exists(docPath + "/10?routing=1")) assertThat(RestUtils.get(docPath + "/110?routing=1"), containsString("costinl")) assertThat(RestUtils.get(docPath + "/110?routing=1"), containsString(""""_routing":"1"""")) } @Test def testEsDataFrame53OverwriteExistingDataSourceFromAnotherDataSource() { // to keep the select static assumeFalse(readMetadata) val source = wrapIndex("sparksql-test-scala-basic-write") val (sourceTarget, _) = makeTargets(source, "data") val index = wrapIndex("sparksql-test-scala-sql-overwrite-from-df") val (target, _) = makeTargets(index, "data") val dstFrame = artistsAsDataFrame dstFrame.saveToEs(target, cfg) val srcTable = wrapIndex("table_overwrite_src") val dstTable = wrapIndex("table_overwrite_dst") var dstOptions = s"resource '$sourceTarget'" var srcOptions = s"resource '$target'" val srcFrame = sqc.sql(s"CREATE TEMPORARY TABLE ${wrapTableName(srcTable)} " + s"USING org.elasticsearch.spark.sql " + s"OPTIONS ($srcOptions)"); val dataFrame = sqc.sql(s"CREATE TEMPORARY TABLE ${wrapTableName(dstTable)} " + s"USING org.elasticsearch.spark.sql " + s"OPTIONS ($dstOptions)"); val insertRDD = sqc.sql(s"INSERT OVERWRITE TABLE ${wrapTableName(dstTable)} SELECT * FROM ${wrapTableName(srcTable)}") val df = sqc.table(wrapTableName(dstTable)) println(df.count) assertTrue(df.count > 100) } private def artistsJsonAsDataFrame = { sqc.read.json(readAsRDD(this.getClass.getResource("/small-sample.json").toURI())) } @Test def testEsDataFrame60DataSourceSaveModeError() { val srcFrame = artistsJsonAsDataFrame val index = wrapIndex("sparksql-test-savemode_error") val (target, _) = makeTargets(index, "data") val table = wrapIndex("save_mode_error") srcFrame.write.format("org.elasticsearch.spark.sql").mode(SaveMode.ErrorIfExists).save(target) try { srcFrame.write.format("org.elasticsearch.spark.sql").mode(SaveMode.ErrorIfExists).save(target) fail() } catch { case _: Throwable => // swallow } } @Test def testEsDataFrame60DataSourceSaveModeAppend() { val srcFrame = artistsJsonAsDataFrame srcFrame.printSchema() val index = wrapIndex("sparksql-test-savemode_append") val (target, _) = makeTargets(index, "data") val table = wrapIndex("save_mode_append") srcFrame.write.format("org.elasticsearch.spark.sql").mode(SaveMode.Append).save(target) val df = EsSparkSQL.esDF(sqc, target) assertEquals(3, df.count()) srcFrame.write.format("org.elasticsearch.spark.sql").mode(SaveMode.Append).save(target) assertEquals(6, df.count()) } @Test def testEsDataFrame60DataSourceSaveModeOverwrite() { val srcFrame = artistsJsonAsDataFrame val index = wrapIndex("sparksql-test-savemode_overwrite") val (target, _) = makeTargets(index, "data") val table = wrapIndex("save_mode_overwrite") srcFrame.write.format("org.elasticsearch.spark.sql").mode(SaveMode.Overwrite).save(target) val df = EsSparkSQL.esDF(sqc, target) assertEquals(3, df.count()) srcFrame.write.format("org.elasticsearch.spark.sql").mode(SaveMode.Overwrite).save(target) assertEquals(3, df.count()) } @Test def testEsDataFrame60DataSourceSaveModeOverwriteWithID() { val srcFrame = artistsJsonAsDataFrame val index = wrapIndex("sparksql-test-savemode_overwrite_id") val (target, _) = makeTargets(index, "data") srcFrame.write.format("org.elasticsearch.spark.sql").mode(SaveMode.Overwrite).option("es.mapping.id", "number").save(target) val df = EsSparkSQL.esDF(sqc, target) assertEquals(3, df.count()) srcFrame.write.format("org.elasticsearch.spark.sql").mode(SaveMode.Overwrite).option("es.mapping.id", "number").save(target) assertEquals(3, df.count()) } @Test def testEsDataFrame60DataSourceSaveModeIgnore() { val srcFrame = artistsJsonAsDataFrame val index = wrapIndex("sparksql-test-savemode_ignore") val (target, docPath) = makeTargets(index, "data") val table = wrapIndex("save_mode_ignore") srcFrame.write.format("org.elasticsearch.spark.sql").mode(SaveMode.Ignore).save(target) val df = EsSparkSQL.esDF(sqc, target) assertEquals(3, df.count()) // should not go through artistsAsDataFrame.write.format("org.elasticsearch.spark.sql").mode(SaveMode.Ignore).save(target) // if it does, this will likely throw an error assertEquals(3, df.count()) } @Test def testArrayWithNestedObject() { val json = """{"0ey" : "val", "another-array": [{ "item" : 1, "key": { "key_a":"val_a", "key_b":"val_b" } }, { "item" : 2, "key": { "key_a":"val_c","key_b":"val_d" } } ]}""" val index = wrapIndex("sparksql-test-array-with-nested-object") val (target, _) = makeTargets(index, "data") sc.makeRDD(Seq(json)).saveJsonToEs(target) val df = sqc.read.format("es").option(ES_READ_FIELD_AS_ARRAY_INCLUDE, "another-array").load(target) df.printSchema() assertEquals("array", df.schema("another-array").dataType.typeName) val array = df.schema("another-array").dataType val key = array.asInstanceOf[ArrayType].elementType.asInstanceOf[StructType]("key") assertEquals("struct", key.dataType.typeName) val head = df.head println(head) val ky = head.getString(0) assertEquals("val", ky) // array val arr = head.getSeq[Row](1) val one = arr(0) assertEquals(1, one.getLong(0)) val nestedOne = one.getStruct(1) assertEquals("val_a", nestedOne.getString(0)) assertEquals("val_b", nestedOne.getString(1)) val two = arr(1) assertEquals(2, two.getLong(0)) val nestedTwo = two.getStruct(1) assertEquals("val_c", nestedTwo.getString(0)) assertEquals("val_d", nestedTwo.getString(1)) } @Test def testNestedEmptyArray() { val json = """{"foo" : 5, "nested": { "bar" : [], "what": "now" } }""" val index = wrapIndex("sparksql-test-empty-nested-array") val (target, _) = makeTargets(index, "data") sc.makeRDD(Seq(json)).saveJsonToEs(target) val df = sqc.read.format("es").load(target) assertEquals("long", df.schema("foo").dataType.typeName) assertEquals("struct", df.schema("nested").dataType.typeName) val struct = df.schema("nested").dataType.asInstanceOf[StructType] assertTrue(struct.fieldNames.contains("what")) assertEquals("string", struct("what").dataType.typeName) val head = df.head assertEquals(5l, head(0)) assertEquals("now", head.getStruct(1)(0)) } @Test def testDoubleNestedArray() { val json = """{"foo" : [5,6], "nested": { "bar" : [{"date":"2015-01-01", "scores":[1,2]},{"date":"2015-01-01", "scores":[3,4]}], "what": "now" } }""" val index = wrapIndex("sparksql-test-double-nested-array") val (target, _) = makeTargets(index, "data") sc.makeRDD(Seq(json)).saveJsonToEs(target) val df = sqc.read.format("es").option(ES_READ_FIELD_AS_ARRAY_INCLUDE, "nested.bar,foo,nested.bar.scores").load(target) assertEquals("array", df.schema("foo").dataType.typeName) val bar = df.schema("nested").dataType.asInstanceOf[StructType]("bar") assertEquals("array", bar.dataType.typeName) val scores = bar.dataType.asInstanceOf[ArrayType].elementType.asInstanceOf[StructType]("scores") assertEquals("array", scores.dataType.typeName) val head = df.head val foo = head.getSeq[Long](0) assertEquals(5, foo(0)) assertEquals(6, foo(1)) // nested val nested = head.getStruct(1) assertEquals("now", nested.getString(1)) val nestedDate = nested.getSeq[Row](0) val nestedScores = nestedDate(0).getSeq[Long](1) assertEquals(2l, nestedScores(1)) } //@Test def testArrayExcludes() { val json = """{"foo" : 6, "nested": { "bar" : [{"date":"2015-01-01", "scores":[1,2]},{"date":"2015-01-01", "scores":[3,4]}], "what": "now" } }""" val index = wrapIndex("sparksql-test-nested-array-exclude") val (target, _) = makeTargets(index, "data") sc.makeRDD(Seq(json)).saveJsonToEs(target) val df = sqc.read.format("es").option(ES_READ_FIELD_EXCLUDE, "nested.bar").load(target) assertEquals("long", df.schema("foo").dataType.typeName) assertEquals("struct", df.schema("nested").dataType.typeName) val struct = df.schema("nested").dataType.asInstanceOf[StructType] assertTrue(struct.fieldNames.contains("what")) assertEquals("string", struct("what").dataType.typeName) df.printSchema() val first = df.first println(first) println(first.getStruct(1)) assertEquals(6, first.getLong(0)) assertEquals("now", first.getStruct(1).getString(0)) } @Test def testMultiDepthArray() { val json = """{"rect":{"type":"foobar","coordinates":[ [50,32],[69,32],[69,50],[50,50],[50,32] ] }}""" val index = wrapIndex("sparksql-test-geo") val (target, _) = makeTargets(index, "data") sc.makeRDD(Seq(json)).saveJsonToEs(target) val df = sqc.read.format("es").option(ES_READ_FIELD_AS_ARRAY_INCLUDE, "rect.coordinates:2").load(target) val coords = df.schema("rect").dataType.asInstanceOf[StructType]("coordinates") assertEquals("array", coords.dataType.typeName) val nested = coords.dataType.asInstanceOf[ArrayType].elementType assertEquals("array", nested.typeName) assertEquals("long", nested.asInstanceOf[ArrayType].elementType.typeName) val first = df.first val vals = first.getStruct(0).getSeq[scala.collection.Seq[Long]](0)(0) assertEquals(50, vals(0)) assertEquals(32, vals(1)) } @Test def testJoinField(): Unit = { // Join added in 6.0. // TODO: Available in 5.6, but we only track major version ids in the connector. EsAssume.versionOnOrAfter(EsMajorVersion.V_6_X, "Join added in 6.0.") // test mix of short-form and long-form joiner values val company1 = Map("id" -> "1", "company" -> "Elastic", "joiner" -> "company") val company2 = Map("id" -> "2", "company" -> "Fringe Cafe", "joiner" -> Map("name" -> "company")) val company3 = Map("id" -> "3", "company" -> "WATIcorp", "joiner" -> Map("name" -> "company")) val employee1 = Map("id" -> "10", "name" -> "kimchy", "joiner" -> Map("name" -> "employee", "parent" -> "1")) val employee2 = Map("id" -> "20", "name" -> "April Ryan", "joiner" -> Map("name" -> "employee", "parent" -> "2")) val employee3 = Map("id" -> "21", "name" -> "Charlie", "joiner" -> Map("name" -> "employee", "parent" -> "2")) val employee4 = Map("id" -> "30", "name" -> "Alvin Peats", "joiner" -> Map("name" -> "employee", "parent" -> "3")) val parents = Seq(company1, company2, company3) val children = Seq(employee1, employee2, employee3, employee4) val docs = parents ++ children { val index = wrapIndex("sparksql-test-scala-write-join-separate") val typename = "join" val (target, docPath) = makeTargets(index, typename) if (TestUtils.isTypelessVersion(version)) { RestUtils.putMapping(index, typename, "data/join/mapping/typeless.json") } else { RestUtils.putMapping(index, typename, "data/join/mapping/typed.json") } sc.makeRDD(parents).saveToEs(target, Map(ES_MAPPING_ID -> "id", ES_MAPPING_JOIN -> "joiner")) sc.makeRDD(children).saveToEs(target, Map(ES_MAPPING_ID -> "id", ES_MAPPING_JOIN -> "joiner")) assertThat(RestUtils.get(docPath + "/10?routing=1"), containsString("kimchy")) assertThat(RestUtils.get(docPath + "/10?routing=1"), containsString(""""_routing":"1"""")) val df = sqc.read.format("es").load(target) val data = df.where(df("id").equalTo("1").or(df("id").equalTo("10"))).sort(df("id")).collect() { val record1 = data(0) assertNotNull(record1.getStruct(record1.fieldIndex("joiner"))) val joiner = record1.getStruct(record1.fieldIndex("joiner")) assertNotNull(joiner.getString(joiner.fieldIndex("name"))) } { val record10 = data(1) assertNotNull(record10.getStruct(record10.fieldIndex("joiner"))) val joiner = record10.getStruct(record10.fieldIndex("joiner")) assertNotNull(joiner.getString(joiner.fieldIndex("name"))) assertNotNull(joiner.getString(joiner.fieldIndex("parent"))) } } { val index = wrapIndex("sparksql-test-scala-write-join-combined") val typename = "join" val (target, docPath) = makeTargets(index, typename) if (TestUtils.isTypelessVersion(version)) { RestUtils.putMapping(index, typename, "data/join/mapping/typeless.json") } else { RestUtils.putMapping(index, typename, "data/join/mapping/typed.json") } sc.makeRDD(docs).saveToEs(target, Map(ES_MAPPING_ID -> "id", ES_MAPPING_JOIN -> "joiner")) assertThat(RestUtils.get(docPath + "/10?routing=1"), containsString("kimchy")) assertThat(RestUtils.get(docPath + "/10?routing=1"), containsString(""""_routing":"1"""")) val df = sqc.read.format("es").load(target) val data = df.where(df("id").equalTo("1").or(df("id").equalTo("10"))).sort(df("id")).collect() { val record1 = data(0) assertNotNull(record1.getStruct(record1.fieldIndex("joiner"))) val joiner = record1.getStruct(record1.fieldIndex("joiner")) assertNotNull(joiner.getString(joiner.fieldIndex("name"))) } { val record10 = data(1) assertNotNull(record10.getStruct(record10.fieldIndex("joiner"))) val joiner = record10.getStruct(record10.fieldIndex("joiner")) assertNotNull(joiner.getString(joiner.fieldIndex("name"))) assertNotNull(joiner.getString(joiner.fieldIndex("parent"))) } } } @Test def testGeoPointAsLatLonString() { val mapping = wrapMapping("data", s"""{ | "properties": { | "name": { | "type": "$keyword" | }, | "location": { | "type": "geo_point" | } | } | } """.stripMargin) // Applies in ES 2.x // | "fielddata" : { // | "format" : "compressed", // | // | } val index = wrapIndex("sparksql-test-geopoint-latlonstring-geopoint") val typed = "data" val (target, docPath) = makeTargets(index, typed) RestUtils.touch(index) RestUtils.putMapping(index, typed, mapping.getBytes(StringUtils.UTF_8)) val latLonString = """{ "name" : "Chipotle Mexican Grill", "location": "40.715, -74.011" }""".stripMargin sc.makeRDD(Seq(latLonString)).saveJsonToEs(target) RestUtils.refresh(index) val df = sqc.read.format("es").load(index) val dataType = df.schema("location").dataType assertEquals("string", dataType.typeName) val head = df.head() assertThat(head.getString(0), containsString("715, ")) assertThat(head.getString(1), containsString("Chipotle")) } @Test def testGeoPointAsGeoHashString() { val mapping = wrapMapping("data", s"""{ | "properties": { | "name": { | "type": "$keyword" | }, | "location": { | "type": "geo_point" | } | } | } """.stripMargin) val index = wrapIndex("sparksql-test-geopoint-geohash-geopoint") val typed = "data" val (target, _) = makeTargets(index, typed) RestUtils.touch(index) RestUtils.putMapping(index, typed, mapping.getBytes(StringUtils.UTF_8)) val geohash = """{ "name": "Red Pepper Restaurant", "location": "9qh0kemfy5k3" }""".stripMargin sc.makeRDD(Seq(geohash)).saveJsonToEs(target) RestUtils.refresh(index) val df = sqc.read.format("es").load(index) val dataType = df.schema("location").dataType assertEquals("string", dataType.typeName) val head = df.head() assertThat(head.getString(0), containsString("9qh0")) assertThat(head.getString(1), containsString("Pepper")) } @Test def testGeoPointAsArrayOfDoubles() { val mapping = wrapMapping("data", s"""{ | "properties": { | "name": { | "type": "$keyword" | }, | "location": { | "type": "geo_point" | } | } | } """.stripMargin) val index = wrapIndex("sparksql-test-geopoint-array-geopoint") val typed = "data" val (target, _) = makeTargets(index, typed) RestUtils.touch(index) RestUtils.putMapping(index, typed, mapping.getBytes(StringUtils.UTF_8)) val arrayOfDoubles = """{ "name": "Mini Munchies Pizza", "location": [ -73.983, 40.719 ]}""".stripMargin sc.makeRDD(Seq(arrayOfDoubles)).saveJsonToEs(target) RestUtils.refresh(index) val df = sqc.read.format("es").load(index) val dataType = df.schema("location").dataType assertEquals("array", dataType.typeName) val array = dataType.asInstanceOf[ArrayType] assertEquals(DoubleType, array.elementType) val head = df.head() println(head(0)) assertThat(head.getString(1), containsString("Mini")) } @Test def testGeoPointAsObject() { val mapping = wrapMapping("data", s"""{ | "properties": { | "name": { | "type": "$keyword" | }, | "location": { | "type": "geo_point" | } | } | } """.stripMargin) val index = wrapIndex("sparksql-test-geopoint-object-geopoint") val typed = "data" val (target, _) = makeTargets(index, typed) RestUtils.touch(index) RestUtils.putMapping(index, typed, mapping.getBytes(StringUtils.UTF_8)) val lonLatObject = """{ "name" : "Pala Pizza","location": {"lat":40.722, "lon":-73.989} }""".stripMargin sc.makeRDD(Seq(lonLatObject)).saveJsonToEs(target) RestUtils.refresh(index) val df = sqc.read.format("es").load(index) val dataType = df.schema("location").dataType assertEquals("struct", dataType.typeName) val struct = dataType.asInstanceOf[StructType] assertTrue(struct.fieldNames.contains("lon")) assertTrue(struct.fieldNames.contains("lat")) assertEquals("double", struct("lon").dataType.simpleString) assertEquals("double", struct("lat").dataType.simpleString) val head = df.head() println(head) val obj = head.getStruct(0) assertThat(obj.getDouble(0), is(40.722d)) assertThat(obj.getDouble(1), is(-73.989d)) assertThat(head.getString(1), containsString("Pizza")) } @Test def testGeoShapePoint() { val mapping = wrapMapping("data", s"""{ | "properties": { | "name": { | "type": "$keyword" | }, | "location": { | "type": "geo_shape" | } | } | } """.stripMargin) val index = wrapIndex("sparksql-test-geoshape-point-geoshape") val typed = "data" val (target, _) = makeTargets(index, typed) RestUtils.touch(index) RestUtils.putMapping(index, typed, mapping.getBytes(StringUtils.UTF_8)) val point = """{"name":"point","location":{ "type" : "point", "coordinates": [100.0, 0.0] }}""".stripMargin sc.makeRDD(Seq(point)).saveJsonToEs(target) val df = sqc.read.format("es").load(index) println(df.schema.treeString) val dataType = df.schema("location").dataType assertEquals("struct", dataType.typeName) val struct = dataType.asInstanceOf[StructType] assertTrue(struct.fieldNames.contains("type")) var coords = struct("coordinates").dataType assertEquals("array", coords.typeName) coords = coords.asInstanceOf[ArrayType].elementType assertEquals("double", coords.typeName) val head = df.head() val obj = head.getStruct(0) assertThat(obj.getString(0), is("point")) val array = obj.getSeq[Double](1) assertThat(array(0), is(100.0d)) assertThat(array(1), is(0.0d)) } @Test def testGeoShapeLine() { val mapping = wrapMapping("data", s"""{ | "properties": { | "name": { | "type": "$keyword" | }, | "location": { | "type": "geo_shape" | } | } | } """.stripMargin) val index = wrapIndex("sparksql-test-geoshape-linestring-geoshape") val typed = "data" val (target, _) = makeTargets(index, typed) RestUtils.touch(index) RestUtils.putMapping(index, typed, mapping.getBytes(StringUtils.UTF_8)) val line = """{"name":"line","location":{ "type": "linestring", "coordinates": [[-77.03, 38.89], [-77.00, 38.88]]} }""".stripMargin sc.makeRDD(Seq(line)).saveJsonToEs(target) val df = sqc.read.format("es").load(index) val dataType = df.schema("location").dataType assertEquals("struct", dataType.typeName) val struct = dataType.asInstanceOf[StructType] assertTrue(struct.fieldNames.contains("type")) var coords = struct("coordinates").dataType assertEquals("array", coords.typeName) coords = coords.asInstanceOf[ArrayType].elementType assertEquals("array", coords.typeName) assertEquals("double", coords.asInstanceOf[ArrayType].elementType.typeName) val head = df.head() val obj = head.getStruct(0) assertThat(obj.getString(0), is("linestring")) val array = obj.getSeq[Seq[Double]](1) assertThat(array(0)(0), is(-77.03d)) assertThat(array(0)(1), is(38.89d)) } @Test def testGeoShapePolygon() { val mapping = wrapMapping("data", s"""{ | "properties": { | "name": { | "type": "$keyword" | }, | "location": { | "type": "geo_shape" | } | } | } """.stripMargin) val index = wrapIndex("sparksql-test-geoshape-poly-geoshape") val typed = "data" val (target, _) = makeTargets(index, typed) RestUtils.touch(index) RestUtils.putMapping(index, typed, mapping.getBytes(StringUtils.UTF_8)) val polygon = """{"name":"polygon","location":{ "type" : "Polygon", "coordinates": [[ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ]], "crs":null, "foo":"bar" }}""".stripMargin sc.makeRDD(Seq(polygon)).saveJsonToEs(target) val df = sqc.read.format("es").load(index) val dataType = df.schema("location").dataType assertEquals("struct", dataType.typeName) val struct = dataType.asInstanceOf[StructType] assertTrue(struct.fieldNames.contains("type")) var coords = struct("coordinates").dataType // level 1 assertEquals("array", coords.typeName) coords = coords.asInstanceOf[ArrayType].elementType // level 2 assertEquals("array", coords.typeName) coords = coords.asInstanceOf[ArrayType].elementType // level 3 assertEquals("double", coords.asInstanceOf[ArrayType].elementType.typeName) val head = df.head() val obj = head.getStruct(0) assertThat(obj.getString(0), is("Polygon")) val array = obj.getSeq[Seq[Seq[Double]]](1) assertThat(array(0)(0)(0), is(100.0d)) assertThat(array(0)(0)(1), is(0.0d)) } @Test def testGeoShapePointMultiPoint() { val mapping = wrapMapping("data", s"""{ | "properties": { | "name": { | "type": "$keyword" | }, | "location": { | "type": "geo_shape" | } | } | } """.stripMargin) val index = wrapIndex("sparksql-test-geoshape-multipoint-geoshape") val typed = "data" val (target, _) = makeTargets(index, typed) RestUtils.touch(index) RestUtils.putMapping(index, typed, mapping.getBytes(StringUtils.UTF_8)) val multipoint = """{"name":"multipoint","location":{ "type" : "multipoint", "coordinates": [ [100.0, 0.0], [101.0, 0.0] ] }}""".stripMargin sc.makeRDD(Seq(multipoint)).saveJsonToEs(target) val df = sqc.read.format("es").load(index) println(df.schema.treeString) val dataType = df.schema("location").dataType assertEquals("struct", dataType.typeName) val struct = dataType.asInstanceOf[StructType] assertTrue(struct.fieldNames.contains("type")) var coords = struct("coordinates").dataType assertEquals("array", coords.typeName) coords = coords.asInstanceOf[ArrayType].elementType assertEquals("array", coords.typeName) assertEquals("double", coords.asInstanceOf[ArrayType].elementType.typeName) val head = df.head() val obj = head.getStruct(0) assertThat(obj.getString(0), is("multipoint")) val array = obj.getSeq[Seq[Double]](1) assertThat(array(0)(0), is(100.0d)) assertThat(array(0)(1), is(0.0d)) } @Test def testGeoShapeMultiLine() { val mapping = wrapMapping("data", s"""{ | "properties": { | "name": { | "type": "$keyword" | }, | "location": { | "type": "geo_shape" | } | } | } """.stripMargin) val index = wrapIndex("sparksql-test-geoshape-multiline-geoshape") val typed = "data" val (target, _) = makeTargets(index, typed) RestUtils.touch(index) RestUtils.putMapping(index, typed, mapping.getBytes(StringUtils.UTF_8)) val multiline = """{"name":"multi-line","location":{ "type": "multilinestring", "coordinates":[ [[-77.0, 38.8], [-78.0, 38.8]], [[100.0, 0.0], [101.0, 1.0]] ]} }""".stripMargin sc.makeRDD(Seq(multiline)).saveJsonToEs(target) val df = sqc.read.format("es").load(index) println(df.schema.treeString) val dataType = df.schema("location").dataType assertEquals("struct", dataType.typeName) val struct = dataType.asInstanceOf[StructType] assertTrue(struct.fieldNames.contains("type")) var coords = struct("coordinates").dataType // level 1 assertEquals("array", coords.typeName) coords = coords.asInstanceOf[ArrayType].elementType // level 2 assertEquals("array", coords.typeName) coords = coords.asInstanceOf[ArrayType].elementType // level 3 assertEquals("double", coords.asInstanceOf[ArrayType].elementType.typeName) val head = df.head() val obj = head.getStruct(0) assertThat(obj.getString(0), is("multilinestring")) val array = obj.getSeq[Seq[Seq[Double]]](1) assertThat(array(0)(0)(0), is(-77.0d)) assertThat(array(0)(0)(1), is(38.8d)) } @Test def testGeoShapeMultiPolygon() { val mapping = wrapMapping("data", s"""{ | "properties": { | "name": { | "type": "$keyword" | }, | "location": { | "type": "geo_shape" | } | } | } """.stripMargin) val index = wrapIndex("sparksql-test-geoshape-multi-poly-geoshape") val typed = "data" val (target, _) = makeTargets(index, typed) RestUtils.touch(index) RestUtils.putMapping(index, typed, mapping.getBytes(StringUtils.UTF_8)) val multipoly = """{"name":"multi-poly","location":{ "type" : "multipolygon", "coordinates": [ [[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 0.0] ]], [[[103.0, 0.0], [104.0, 0.0], [104.0, 1.0], [103.0, 0.0] ]] ]}}""".stripMargin sc.makeRDD(Seq(multipoly)).saveJsonToEs(target) val df = sqc.read.format("es").load(index) println(df.schema.treeString) val dataType = df.schema("location").dataType assertEquals("struct", dataType.typeName) val struct = dataType.asInstanceOf[StructType] assertTrue(struct.fieldNames.contains("type")) var coords = struct("coordinates").dataType // level 1 assertEquals("array", coords.typeName) coords = coords.asInstanceOf[ArrayType].elementType // level 2 assertEquals("array", coords.typeName) coords = coords.asInstanceOf[ArrayType].elementType // level 3 assertEquals("array", coords.typeName) coords = coords.asInstanceOf[ArrayType].elementType // level 4 assertEquals("double", coords.asInstanceOf[ArrayType].elementType.typeName) val head = df.head() val obj = head.getStruct(0) assertThat(obj.getString(0), is("multipolygon")) val array = obj.getSeq[Seq[Seq[Seq[Double]]]](1) assertThat(array(0)(0)(0)(0), is(100.0d)) assertThat(array(0)(0)(0)(1), is(0.0d)) } @Test def testGeoShapeEnvelope() { val mapping = wrapMapping("data", s"""{ | "properties": { | "name": { | "type": "$keyword" | }, | "location": { | "type": "geo_shape" | } | } | } """.stripMargin) val index = wrapIndex("sparksql-test-geoshape-envelope-geoshape") val typed = "data" val (target, _) = makeTargets(index, typed) RestUtils.touch(index) RestUtils.putMapping(index, typed, mapping.getBytes(StringUtils.UTF_8)) val envelope = """{"name":"envelope","location":{ "type" : "envelope", "coordinates": [[-45.0, 45.0], [45.0, -45.0] ] }}""".stripMargin sc.makeRDD(Seq(envelope)).saveJsonToEs(target) val df = sqc.read.format("es").load(index) val dataType = df.schema("location").dataType assertEquals("struct", dataType.typeName) val struct = dataType.asInstanceOf[StructType] assertTrue(struct.fieldNames.contains("type")) var coords = struct("coordinates").dataType assertEquals("array", coords.typeName) coords = coords.asInstanceOf[ArrayType].elementType assertEquals("array", coords.typeName) assertEquals("double", coords.asInstanceOf[ArrayType].elementType.typeName) val head = df.head() val obj = head.getStruct(0) assertThat(obj.getString(0), is("envelope")) val array = obj.getSeq[Seq[Double]](1) assertThat(array(0)(0), is(-45.0d)) assertThat(array(0)(1), is(45.0d)) } @Test def testGeoShapeCircle() { EsAssume.versionOnOrBefore(EsMajorVersion.V_5_X, "circle geo shape is removed in later 6.6+ versions") val mapping = wrapMapping("data", s"""{ | "properties": { | "name": { | "type": "$keyword" | }, | "location": { | "type": "geo_shape" | } | } | } """.stripMargin) val index = wrapIndex("sparksql-test-geoshape-circle-geoshape") val typed = "data" val (target, _) = makeTargets(index, typed) RestUtils.touch(index) RestUtils.putMapping(index, typed, mapping.getBytes(StringUtils.UTF_8)) val circle = """{"name":"circle", "location": {"type":"circle", "coordinates":[ -45.0, 45.0], "radius":"100m"} }""".stripMargin sc.makeRDD(Seq(circle)).saveJsonToEs(target) val df = sqc.read.format("es").load(index) val dataType = df.schema("location").dataType assertEquals("struct", dataType.typeName) val struct = dataType.asInstanceOf[StructType] assertTrue(struct.fieldNames.contains("type")) assertTrue(struct.fieldNames.contains("radius")) val coords = struct("coordinates").dataType assertEquals("array", coords.typeName) assertEquals("double", coords.asInstanceOf[ArrayType].elementType.typeName) val head = df.head() val obj = head.getStruct(0) assertThat(obj.getString(0), is("circle")) val array = obj.getSeq[Double](1) assertThat(array(0), is(-45.0d)) assertThat(array(1), is(45.0d)) assertThat(obj.getString(2), is("100m")) } @Test def testNested() { val mapping = wrapMapping("data", s"""{ | "properties": { | "name": { "type": "$keyword" }, | "employees": { | "type": "nested", | "properties": { | "name": {"type": "$keyword"}, | "salary": {"type": "long"} | } | } | } | } """.stripMargin) val index = wrapIndex("sparksql-test-nested-simple-nested") val typed = "data" val (target, _) = makeTargets(index, typed) RestUtils.touch(index) RestUtils.putMapping(index, typed, mapping.getBytes(StringUtils.UTF_8)) val data = """{"name":"nested-simple","employees":[{"name":"anne","salary":6},{"name":"bob","salary":100}, {"name":"charlie","salary":15}] }""".stripMargin sc.makeRDD(Seq(data)).saveJsonToEs(target) val df = sqc.read.format("es").load(index) println(df.schema.treeString) val dataType = df.schema("employees").dataType assertEquals("array", dataType.typeName) val array = dataType.asInstanceOf[ArrayType] assertEquals("struct", array.elementType.typeName) val struct = array.elementType.asInstanceOf[StructType] assertEquals("string", struct("name").dataType.typeName) assertEquals("long", struct("salary").dataType.typeName) val head = df.head() val nested = head.getSeq[Row](0); assertThat(nested.size, is(3)) assertEquals(nested(0).getString(0), "anne") assertEquals(nested(0).getLong(1), 6) } @Test def testMultiIndexes() { // add some data val jsonDoc = """{"artist" : "buckethead", "album": "mirror realms" }""" val index1 = wrapIndex("sparksql-multi-index-1") val (target1, _) = makeTargets(index1, "data") val index2 = wrapIndex("sparksql-multi-index-2") val (target2, _) = makeTargets(index2, "data") sc.makeRDD(Seq(jsonDoc)).saveJsonToEs(target1) sc.makeRDD(Seq(jsonDoc)).saveJsonToEs(target2) RestUtils.refresh(wrapIndex("sparksql-multi-index-1")) RestUtils.refresh(wrapIndex("sparksql-multi-index-2")) val multiIndex = wrapIndex("sparksql-multi-index-1,") + index2 val df = sqc.read.format("es").load(multiIndex) df.show println(df.selectExpr("count(*)").show(5)) assertEquals(2, df.count()) } @Test def testMultiIndexesWithUpcastableTypes() { // add some data val jsonDoc1 = """{"artist" : "buckethead", "album": "mirror realms", "data": "blah" }""" val jsonDoc2 = """{"artist" : "buckethead", "album": "mirror realms", "data": 42 }""" val index1 = wrapIndex("sparksql-multi-index-upcast-1") val (target1, _) = makeTargets(index1, "data") val index2 = wrapIndex("sparksql-multi-index-upcast-2") val (target2, _) = makeTargets(index2, "data") sc.makeRDD(Seq(jsonDoc1)).saveJsonToEs(target1) sc.makeRDD(Seq(jsonDoc2)).saveJsonToEs(target2) RestUtils.refresh(wrapIndex("sparksql-multi-index-upcast-1")) RestUtils.refresh(wrapIndex("sparksql-multi-index-upcast-1")) val multiIndex = wrapIndex("sparksql-multi-index-upcast-1,") + index2 val df = sqc.read.format("es").load(multiIndex) df.show assertEquals(StringType, df.schema.fields(2).dataType) println(df.selectExpr("count(*)").show(5)) assertEquals(2, df.count()) } @Test def testArraysAndNulls() { val index = wrapIndex("sparksql-test-arrays-and-nulls") val typed = "data" val (target, docPath) = makeTargets(index, typed) RestUtils.touch(index) val document1 = """{ "id": 1, "status_code" : [123]}""".stripMargin val document2 = """{ "id" : 2, "status_code" : []}""".stripMargin val document3 = """{ "id" : 3, "status_code" : null}""".stripMargin sc.makeRDD(Seq(document1, document2, document3)).saveJsonToEs(target) RestUtils.refresh(index) val df = sqc.read.format("es").option("es.read.field.as.array.include","status_code").load(index) .select("id", "status_code") var result = df.where("id = 1").first().getList(1) assertEquals(123, result.get(0)) result = df.where("id = 2").first().getList(1) assertTrue(result.isEmpty) assertTrue(df.where("id = 3").first().isNullAt(1)) } @Test def testReadFieldInclude(): Unit = { val data = Seq( Row(Row(List(Row("hello","2"), Row("world","1")))) ) val rdd: RDD[Row] = sc.parallelize(data) val schema = new StructType() .add("features", new StructType() .add("hashtags", new ArrayType(new StructType() .add("text", StringType) .add("count", StringType), true))) val inputDf = sqc.createDataFrame(rdd, schema) inputDf.write .format("org.elasticsearch.spark.sql") .save("read_field_include_test") val reader = sqc.read.format("org.elasticsearch.spark.sql").option("es.read.field.as.array.include","features.hashtags") // No "es.read.field.include", so everything is included: var df = reader.load("read_field_include_test") var result = df.select("features.hashtags").first().getAs[scala.collection.IndexedSeq[Row]](0) assertEquals(2, result(0).size) assertEquals("hello", result(0).getAs("text")) assertEquals("2", result(0).getAs("count")) // "es.read.field.include" has trailing wildcard, so everything included: df = reader.option("es.read.field.include","features.hashtags.*").load("read_field_include_test") result = df.select("features.hashtags").first().getAs[IndexedSeq[Row]](0) assertEquals(2, result(0).size) assertEquals("hello", result(0).getAs("text")) assertEquals("2", result(0).getAs("count")) // "es.read.field.include" includes text but not count df = reader.option("es.read.field.include","features.hashtags.text").load("read_field_include_test") result = df.select("features.hashtags").first().getAs[IndexedSeq[Row]](0) assertEquals(1, result(0).size) assertEquals("hello", result(0).getAs("text")) // "es.read.field.include" does not include the leaves in the hierarchy so they won't be returned df = reader.option("es.read.field.include","features.hashtags").load("read_field_include_test") result = df.select("features.hashtags").first().getAs[IndexedSeq[Row]](0) assertEquals(0, result(0).size) } @Test def testScriptedUpsert(): Unit = { val testIndex = "scripted_upsert_test" val updateParams = "count: <4>" val updateScript = "if ( ctx.op == 'create' ) {ctx._source.counter = params.count} else {ctx._source.counter += params.count}" val conf = Map("es.mapping.id" -> "id", "es.mapping.exclude" -> "id", "es.write.operation" -> "upsert", "es.update.script.params" -> updateParams, "es.update.script.upsert" -> "true", "es.update.script.inline" -> updateScript) val data = Seq(Row("1", 3)) val rdd: RDD[Row] = sc.parallelize(data) val schema = new StructType().add("id", StringType, nullable = false).add("count", IntegerType, nullable = false) val df = sqc.createDataFrame(rdd, schema) df.write.format("es").options(conf).mode(SaveMode.Append).save(testIndex) val reader = sqc.read.format("es") var readerDf = reader.load(testIndex) var result = readerDf.select("counter").first().get(0) assertEquals(4l, result) df.write.format("es").options(conf).mode(SaveMode.Append).save(testIndex) readerDf = reader.load(testIndex) result = readerDf.select("counter").first().get(0) assertEquals(8l, result) } @Test def testNestedFieldsUpsert(): Unit = { val update_params = "new_samples: samples" val update_script = "ctx._source.samples = params.new_samples" val es_conf = Map( "es.mapping.id" -> "id", "es.write.operation" -> "upsert", "es.update.script.params" -> update_params, "es.update.script.inline" -> update_script ) // First do an upsert with two completely new rows: var data = Seq(Row("2", List(Row("hello"), Row("world"))), Row("1", List())) var rdd: RDD[Row] = sc.parallelize(data) val schema = new StructType() .add("id", StringType, nullable = false) .add("samples", new ArrayType(new StructType() .add("text", StringType), true)) var df = sqc.createDataFrame(rdd, schema) df.write.format("org.elasticsearch.spark.sql").options(es_conf).mode(SaveMode.Append).save("nested_fields_upsert_test") val reader = sqc.read.schema(schema).format("org.elasticsearch.spark.sql").option("es.read.field.as.array.include","samples") var resultDf = reader.load("nested_fields_upsert_test") assertEquals(2, resultDf.count()) var samples = resultDf.select("samples").where("id = '2'").first().getAs[scala.collection.IndexedSeq[Row]](0) assertEquals(2, samples.size) assertEquals("hello", samples(0).get(0)) assertEquals("world", samples(1).get(0)) //Now, do an upsert on the one with the empty samples list: data = Seq(Row("1", List(Row("goodbye"), Row("world")))) rdd = sc.parallelize(data) df = sqc.createDataFrame(rdd, schema) df.write.format("org.elasticsearch.spark.sql").options(es_conf).mode(SaveMode.Append).save("nested_fields_upsert_test") resultDf = reader.load("nested_fields_upsert_test") samples = resultDf.select("samples").where("id = '1'").first().getAs[scala.collection.IndexedSeq[Row]](0) assertEquals(2, samples.size) assertEquals("goodbye", samples(0).get(0)) assertEquals("world", samples(1).get(0)) // Finally, an upsert on the row that had samples values: data = Seq(Row("2", List(Row("goodbye"), Row("again")))) rdd = sc.parallelize(data) df = sqc.createDataFrame(rdd, schema) df.write.format("org.elasticsearch.spark.sql").options(es_conf).mode(SaveMode.Append).save("nested_fields_upsert_test") resultDf = reader.load("nested_fields_upsert_test") samples = resultDf.select("samples").where("id = '2'").first().getAs[scala.collection.IndexedSeq[Row]](0) assertEquals(2, samples.size) assertEquals("goodbye", samples(0).get(0)) assertEquals("again", samples(1).get(0)) } @Test def testMapsUpsert(): Unit = { val update_params = "new_samples: samples" val update_script = "ctx._source.samples = params.new_samples" val es_conf = Map( "es.mapping.id" -> "id", "es.write.operation" -> "upsert", "es.update.script.params" -> update_params, "es.update.script.inline" -> update_script ) // First do an upsert with two completely new rows: var data = Seq(Row("2", Map(("hello", "world"))), Row("1", Map())) var rdd: RDD[Row] = sc.parallelize(data) val schema = new StructType() .add("id", StringType, nullable = false) .add("samples", new MapType(StringType, StringType, true)) var df = sqc.createDataFrame(rdd, schema) df.write.format("org.elasticsearch.spark.sql").options(es_conf).mode(SaveMode.Append).save("map_fields_upsert_test") val reader = sqc.read.format("org.elasticsearch.spark.sql") var resultDf = reader.load("map_fields_upsert_test") assertEquals(2, resultDf.count()) var samples = resultDf.select("samples").where("id = '2'").first() assertEquals(1, samples.size) assertEquals("world", samples.get(0).asInstanceOf[Row].get(0)) //Now, do an upsert on the one with the empty samples list: data = Seq(Row("1", Map(("goodbye", "all")))) rdd = sc.parallelize(data) df = sqc.createDataFrame(rdd, schema) df.write.format("org.elasticsearch.spark.sql").options(es_conf).mode(SaveMode.Append).save("map_fields_upsert_test") resultDf = reader.load("map_fields_upsert_test") samples = resultDf.select("samples").where("id = '1'").first() assertEquals(1, samples.size) assertEquals("all", samples.get(0).asInstanceOf[Row].get(0)) // Finally, an upsert on the row that had samples values: data = Seq(Row("2", Map(("goodbye", "again")))) rdd = sc.parallelize(data) df = sqc.createDataFrame(rdd, schema) df.write.format("org.elasticsearch.spark.sql").options(es_conf).mode(SaveMode.Append).save("map_fields_upsert_test") resultDf = reader.load("map_fields_upsert_test") samples = resultDf.select("samples").where("id = '2'").first() assertEquals(1, samples.size) assertEquals("again", samples.get(0).asInstanceOf[Row].get(0)) } @Test def testWildcard() { val mapping = wrapMapping("data", s"""{ | "properties": { | "name": { | "type": "wildcard" | } | } | } """.stripMargin) val index = wrapIndex("sparksql-test-wildcard") val typed = "data" val (target, docPath) = makeTargets(index, typed) RestUtils.touch(index) RestUtils.putMapping(index, typed, mapping.getBytes(StringUtils.UTF_8)) val wildcardDocument = """{ "name" : "Chipotle Mexican Grill"}""".stripMargin sc.makeRDD(Seq(wildcardDocument)).saveJsonToEs(target) RestUtils.refresh(index) val df = sqc.read.format("es").load(index) val dataType = df.schema("name").dataType assertEquals("string", dataType.typeName) val head = df.head() assertThat(head.getString(0), containsString("Chipotle")) } /** * Dots in field names are supported by Elasticsearch, but not by es-hadoop. We expect them to fail. */ @Test(expected = classOf[SparkException]) def testDotsInFieldNames(): Unit = { val index = wrapIndex("dots-in-names-index") val typed = "data" val (target, docPath) = makeTargets(index, typed) RestUtils.postData(docPath, "{\"b\":0,\"e\":{\"f.g\":\"hello\"}}".getBytes("UTF-8")) val df = sqc.read.format("es").load(index) RestUtils.refresh(index) df.count() } /** * Take advantage of the fixed method order and clear out all created indices. * The indices will last in Elasticsearch for all parameters of this test suite. * This test suite often puts a lot of stress on the system's available file * descriptors due to the volume of indices it creates. */ @Test def zzzz_clearEnvironment() { // Nuke the whole environment after the tests run. RestUtils.delete("_all") } def wrapIndex(index: String) = { prefix + index } def wrapMapping(typename: String, mapping: String): String = { if (TestUtils.isTypelessVersion(version)) { mapping } else { s"""{"$typename":$mapping}""" } } def makeTargets(index: String, typeName: String): (String, String) = { (resource(index, typeName, version), docEndpoint(index, typeName, version)) } /** * When using Unicode characters in table names for SparkSQL, they need to be enclosed in backticks. */ def wrapTableName(name: String) = { if (encodeResources) "`" + name + "`" else name } private def keepHandledFilters = { !pushDown || (pushDown && doubleFiltering) } private def readAsRDD(uri: URI) = { // don't use the sc.read.json/textFile to avoid the whole Hadoop madness val path = Paths.get(uri) // because Windows val lines = Files.readAllLines(path, StandardCharsets.ISO_8859_1).asScala.toSeq sc.parallelize(lines) } }
elastic/elasticsearch-hadoop
spark/sql-30/src/itest/scala/org/elasticsearch/spark/integration/AbstractScalaEsSparkSQL.scala
Scala
apache-2.0
98,464
package com.jejking.rprng.rng.actors import com.jejking.rprng.rng._ import org.apache.commons.math3.random.MersenneTwister import org.scalamock.scalatest.MockFactory import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ import scala.language.postfixOps /** * Created by jking on 09/12/2016. */ class TimeRangeToReseedSpec extends AnyFlatSpec with Matchers with MockFactory { "TimeRangeToReseed" should "provide a default min and max lifetime" in { val lifeSpanRange = TimeRangeToReseed() lifeSpanRange.minLifeTime shouldBe TimeRangeToReseed.defaultMinLifeTime lifeSpanRange.maxLifeTime shouldBe TimeRangeToReseed.defaultMaxLifeTime } it should "accept input where min is less than max" in { TimeRangeToReseed(1 minute, 2 minutes) } it should "reject input where min is greater than max" in { intercept[IllegalArgumentException] { TimeRangeToReseed(1 hour, 1 minute) } } it should "reject input where min is equal to max" in { intercept[IllegalArgumentException] { TimeRangeToReseed(1 minute, 1 minute) } } "the companion object" should "compute an appropriate schedule" in { val byteSource = stub[Rng] (byteSource.randomBytes _).when(*).returns(TestUtils.arrayOfEightZeroBytes()) val minLifeTime: FiniteDuration = 1 minute val maxLifeTime: FiniteDuration = 2 minutes val lifeSpanRange = TimeRangeToReseed(minLifeTime, maxLifeTime) TimeRangeToReseed.durationToReseed(lifeSpanRange, byteSource) shouldBe (1 minute) val mersenneTwister = new MersenneTwister() for (i <- 1 to 100) { val computedScheduledTimeOfDeath = TimeRangeToReseed.durationToReseed(lifeSpanRange, new CommonsMathRng(mersenneTwister)) assert(computedScheduledTimeOfDeath >= minLifeTime) assert(computedScheduledTimeOfDeath <= maxLifeTime) } } }
jejking/rprng
src/test/scala/com/jejking/rprng/rng/actors/TimeRangeToReseedSpec.scala
Scala
apache-2.0
1,921
package cobalt.parser.statement import cobalt.ast.AST._ import cobalt.parser.StatementParser import cobalt.utils.TestUtil import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import org.scalatest.{FunSpec, Matchers} import scala.collection.mutable.ArrayBuffer @RunWith(classOf[JUnitRunner]) class DoBlockParserTest extends FunSpec with Matchers { describe("Do block parser") { it("Should parse do block") { val code = """do | x | y | z """.stripMargin.replace("\\r", "") TestUtil.parse(code, StatementParser.doBlock) shouldBe DoBlock(ArrayBuffer(ExprAsStmt(Identifier(Name("x"))), ExprAsStmt(Identifier(Name("y"))), ExprAsStmt(Identifier(Name("z"))))) } } }
Michael2109/cobalt
src/test/scala/cobalt/parser/statement/DoBlockParserTest.scala
Scala
lgpl-3.0
757
/** * © 2019 Refinitiv. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ import scala.collection.parallel.ParSeq import scala.util.Try import scala.collection.parallel.CollectionConverters._ case class Grid(user: String, password: String, clusterIps: Seq[String], clusterName: String, dataCenter: String, dataDirs: DataDirs, instDirs: InstDirs, esMasters: Int, allocationPlan: ModuleAllocations, useAuthorization: Boolean, deployJava: Boolean, production: Boolean, g1: Boolean = false, su: Boolean = true, casRowCacheSize: Int = 256, ctrlService: Boolean = false, haProxy: Option[HaProxy] = None, dcTarget: Option[String] = None, minMembers: Option[Int] = None, subjectsInSpAreHttps: Boolean = false, defaultRdfProtocol: String = "http", diskOptimizationStrategy:String = "ssd", // we refrain from using Cas Commitlog on cluster, to save disk space and performance, // given we always write in Quorum so there will be no data loss casUseCommitLog:Boolean = false) extends Host( user, password, clusterIps, clusterIps.size + esMasters, clusterIps.size, clusterName, dataCenter, dataDirs, instDirs, 1, allocationPlan, useAuthorization, deployJava, production, su, ctrlService, minMembers, haProxy, subjectsInSpAreHttps = subjectsInSpAreHttps, defaultRdfProtocol = defaultRdfProtocol, diskOptimizationStrategy = diskOptimizationStrategy, casUseCommitLog = casUseCommitLog ) { require(clusterIps.distinct equals clusterIps, "must be unique") //if(!validateNumberOfMasterNodes(esMasters, ips.size)) throw new Exception("Bad number of Elasticsearch master nodes") override def getElasticsearchMasters: Int = esMasters //def hosts = ips.map(ip => s"${user}@${ip}") override def getCassandraHostIDs(host: String): String = { // val ip = command(s"ifconfig ${inet}", host, false).get.split("\\n")(1).trim.split(" ")(1).split(":")(1) // command(s"$nodeToolPath status | $ip",host,false) ??? } override def mkScripts(hosts: ParSeq[String]): ParSeq[ComponentConf] = { val aloc = allocationPlan.getJvmAllocations val casAllocations = aloc.cas //DefaultAlocations(4000,4000,1000,0) val esAllocations = aloc.es //DefaultAlocations(6000,6000,400,0) val esMasterAllocations = JvmMemoryAllocations(2048, 2048, 0, 256) val bgAllocations = aloc.bg //DefaultAlocations(1000,1000,512,0) val wsAllocations = aloc.ws val ctrlAllocations = aloc.ctrl val homeDir = s"${instDirs.globalLocation}/cm-well" val casDataDirs = (1 to dataDirs.casDataDirs.size).map(ResourceBuilder.getIndexedName("cas", _)) hosts.flatMap { host => val cas = CassandraConf( home = homeDir, seeds = getSeedNodes.mkString(","), clusterName = clusterName, resourceManager = casAllocations, snitchType = "GossipingPropertyFileSnitch", ccl_dir = "ccl", dir = "cas", rowCacheSize = casRowCacheSize, replicationFactor = 3, template = "cassandra.yaml", listenAddress = host, rpcAddress = host, sName = "start.sh", index = 1, rs = IpRackSelector(), g1 = g1, hostIp = host, casDataDirs = casDataDirs, casUseCommitLog = casUseCommitLog, numOfCores = calculateCpuAmount, diskOptimizationStrategy = diskOptimizationStrategy ) val es = ElasticsearchConf( clusterName = clusterName, nodeName = host, masterNode = false, dataNode = true, expectedNodes = ips.size, numberOfReplicas = 2, seeds = getSeedNodes.mkString(","), home = homeDir, resourceManager = esAllocations, dir = "es", template = "elasticsearch.yml", listenAddress = host, masterNodes = esMasters, sName = "start.sh", index = 1, rs = IpRackSelector(), g1 = g1, hostIp = host, dirsPerEs = dataDirs.esDataDirs.size ) val esMaster = ElasticsearchConf( clusterName = clusterName, nodeName = s"$host-master", masterNode = true, dataNode = false, expectedNodes = ips.size, numberOfReplicas = 2, seeds = getSeedNodes.mkString(","), home = homeDir, resourceManager = esMasterAllocations, dir = "es-master", template = "elasticsearch.yml", listenAddress = host, masterNodes = esMasters, sName = "start-master.sh", index = 2, rs = IpRackSelector(), g1 = true, hostIp = host ) val bg = BgConf( home = homeDir, zookeeperServers = ips.take(3), clusterName = clusterName, dataCenter = dataCenter, hostName = host, resourceManager = bgAllocations, sName = "start.sh", isMaster = host == ips(0), partition = ips.indexOf(host), logLevel = BgProps(this).LogLevel.getLogLevel, debug = deb, hostIp = host, minMembers = getMinMembers, numOfPartitions = ips.size, seeds = getSeedNodes.mkString(","), defaultRdfProtocol = defaultRdfProtocol, transportAddress = this.getThreesome(ips, host) ) val web = WebConf( home = homeDir, zookeeperServers = ips.take(3), clusterName = clusterName, dataCenter = dataCenter, hostName = host, resourceManager = wsAllocations, sName = "start.sh", useAuthorization = useAuthorization, numOfPartitions = ips.size, logLevel = WebserviceProps(this).LogLevel.getLogLevel, debug = deb, hostIp = host, minMembers = getMinMembers, seeds = getSeedNodes.mkString(","), seedPort = 9300, defaultRdfProtocol = defaultRdfProtocol, transportAddress = this.getThreesome(ips, host) ) val cw = CwConf( home = homeDir, clusterName = clusterName, dataCenter = dataCenter, hostName = host, resourceManager = wsAllocations, sName = "cw-start.sh", logLevel = WebserviceProps(this).LogLevel.getLogLevel, debug = deb, hostIp = host, minMembers = getMinMembers, seeds = getSeedNodes.mkString(","), seedPort = 9300, subjectsInSpAreHttps = subjectsInSpAreHttps, transportAddress = this.getThreesome(ips, host) ) val ctrl = CtrlConf( home = homeDir, sName = "start.sh", seeds = getSeedNodes.mkString(","), clusterName = clusterName, resourceManager = ctrlAllocations, singletonStarter = true, pingIp = host, user = user, logLevel = CtrlProps(this).LogLevel.getLogLevel, debug = deb, hostIp = host, minMembers = getMinMembers ) val dcConf = DcConf( home = homeDir, sName = "start.sh", clusterName = cn, resourceManager = bgAllocations.copy(mxms = 0), target = dcTarget.getOrElse(ips.map(ip => s"$ip:9000").mkString(",")), debug = deb, logLevel = DcProps(this).LogLevel.getLogLevel, pingIp = host, hostIp = host, minMembers = getMinMembers ) val zookeeper = ZookeeperConf( home = homeDir, clusterName = cn, servers = ips.take(3), hostIp = host ) val kafka = KafkaConf( home = homeDir, logDirs = dataDirs.kafkaDataDirs.toList, zookeeperServers = ips.take(3), brokerId = brokerId(host), hostIp = host ) List( cas, es, esMaster, web, cw, ctrl, dcConf, zookeeper, kafka, bg ) } } override def getMode: String = "grid" override def getSeedNodes: List[String] = ips.take(3) override def startElasticsearch(hosts: Seq[String]): Unit = { command(s"cd ${instDirs.globalLocation}/cm-well/app/es/cur; ${startScript("./start-master.sh")}", ips.take(esMasters).intersect(hosts.to(Seq)).to(ParSeq), false) command(s"cd ${instDirs.globalLocation}/cm-well/app/es/cur; ${startScript("./start.sh")}", hosts.to(ParSeq), false) } override def startCassandra(hosts: ParSeq[String]): Unit = { command(s"cd ${instDirs.globalLocation}/cm-well/app/cas/cur/; ${startScript("./start.sh")}", hosts, false) } override def initCassandra(hosts: ParSeq[String]): Unit = { command(s"cd ${instDirs.globalLocation}/cm-well/app/cas/cur/; ${startScript("./start.sh")}", hosts(0), false) Try(CassandraLock().waitForModule(ips(0), 1)) command(s"cd ${instDirs.globalLocation}/cm-well/app/cas/cur/; ${startScript("./start.sh")}", hosts(1), false) Try(CassandraLock().waitForModule(ips(0), 2)) command(s"cd ${instDirs.globalLocation}/cm-well/app/cas/cur/; ${startScript("./start.sh")}", hosts.drop(2), false) } override def initElasticsearch(hosts: Seq[String]): Unit = { command(s"cd ${instDirs.globalLocation}/cm-well/app/es/cur; ${startScript("./start-master.sh")}", hosts.take(esMasters).to(ParSeq), false) Try(ElasticsearchLock().waitForModule(ips(0), esMasters)) // command(s"cd ${instDirs.globalLocation}/cm-well/app/es/cur; ./start-master.sh", hosts(1), false) // ElasticsearchLock().waitForModule(ips(0), 2) // command(s"cd ${instDirs.globalLocation}/cm-well/app/es/cur; ./start-master.sh", hosts.drop(2).take(esMasters - 2), false) command(s"cd ${instDirs.globalLocation}/cm-well/app/es/cur; ${startScript("./start.sh")}", hosts.to(ParSeq), false) } override def getNewHostInstance(ipms: Seq[String]): Host = { this.copy(clusterIps = ipms) } }
e-orz/CM-Well
server/cmwell-cons/src/main/scala/Grid.scala
Scala
apache-2.0
10,895
// Copyright (C) 2019 MapRoulette contributors (see CONTRIBUTORS.md). // Licensed under the Apache License, Version 2.0 (see LICENSE). package org.maproulette.models import org.joda.time.DateTime import org.maproulette.data.{ItemType, TagType} import play.api.libs.json.{Json, Reads, Writes} import play.api.libs.json.JodaWrites._ import play.api.libs.json.JodaReads._ /** * Tags sit outside of the object hierarchy and have no parent or children objects associated it. * It simply has a many to one mapping between tags and tasks. This allows tasks to be easily * searched for and organized. Helping people find tasks related to what interests them. * * @author cuthbertm */ case class Tag(override val id: Long, override val name: String, override val description: Option[String] = None, override val created: DateTime = DateTime.now(), override val modified: DateTime = DateTime.now(), tagType: String = "challenges") extends BaseObject[Long] { override val itemType: ItemType = TagType() } object Tag { implicit val tagWrites: Writes[Tag] = Json.writes[Tag] implicit val tagReads: Reads[Tag] = Json.reads[Tag] val KEY = "tags" }
mvexel/maproulette2
app/org/maproulette/models/Tag.scala
Scala
apache-2.0
1,232
package mesosphere.marathon package core.matcher.base.util import akka.actor.ActorRef import com.typesafe.scalalogging.StrictLogging import mesosphere.marathon.core.matcher.base.OfferMatcher import mesosphere.marathon.core.matcher.base.OfferMatcher.MatchedInstanceOps import mesosphere.marathon.state.{ PathId, Timestamp } import mesosphere.marathon.util.{ Timeout, TimeoutException } import mesosphere.util._ import org.apache.mesos.Protos.Offer import scala.concurrent.duration._ import scala.concurrent.{ Future, Promise } /** * Provides a thin wrapper around an OfferMatcher implemented as an actors. * * @param actorRef Reference to actor that matches offers. * @param precedenceFor Defines which matcher receives offers first. See [[mesosphere.marathon.core.matcher.base.OfferMatcher.precedenceFor]]. */ class ActorOfferMatcher(actorRef: ActorRef, override val precedenceFor: Option[PathId])(implicit scheduler: akka.actor.Scheduler) extends OfferMatcher with StrictLogging { def matchOffer(now: Timestamp, deadline: Timestamp, offer: Offer): Future[MatchedInstanceOps] = { import mesosphere.marathon.core.async.ExecutionContexts.global val timeout: FiniteDuration = now.until(deadline) if (timeout <= ActorOfferMatcher.MinimalOfferComputationTime) { // if deadline is exceeded return no match logger.warn(s"Could not process offer '${offer.getId.getValue}' within ${timeout.toHumanReadable}. (See --offer_matching_timeout)") Future.successful(MatchedInstanceOps.noMatch(offer.getId)) } else { val p = Promise[MatchedInstanceOps]() actorRef ! ActorOfferMatcher.MatchOffer(deadline, offer, p) Timeout(timeout)(p.future).recover { case e: TimeoutException => logger.warn(s"Could not process offer '${offer.getId.getValue}' within ${timeout.toHumanReadable}. (See --offer_matching_timeout)") MatchedInstanceOps.noMatch(offer.getId) } } } override def toString: String = s"ActorOfferMatcher($actorRef)" } object ActorOfferMatcher { // Do not start a offer matching if there is less time than this minimal time // Optimization to prevent timeouts val MinimalOfferComputationTime: FiniteDuration = 50.millis /** * Send to an offer matcher to request a match. * * This should always be replied to with a LaunchTasks message. * TODO(jdef) pods will probably require a non-LaunchTasks message * * @param matchingDeadline Don't match after deadline. * @param remainingOffer Part of the offer that has not been matched. * @param promise The promise to fullfil with match. */ case class MatchOffer(matchingDeadline: Timestamp, remainingOffer: Offer, promise: Promise[MatchedInstanceOps]) }
natemurthy/marathon
src/main/scala/mesosphere/marathon/core/matcher/base/util/ActorOfferMatcher.scala
Scala
apache-2.0
2,752
/* sbt -- Simple Build Tool * Copyright 2009 Mark Harrah */ package org.ensime.config import java.io.{ Closeable, File, FileInputStream, FileOutputStream, InputStream, OutputStream } object Using extends NotNull { def apply[R <: Closeable, T](create: R)(f: R => T): T = withResource(create)(f) def withResource[R <: Closeable, T](r: R)(f: R => T): T = try { f(r) } finally { r.close() } } object Copy { def apply(files: List[File], toDirectory: File): Unit = files.foreach(file => apply(file, toDirectory)) def apply(file: File, toDirectory: File) { toDirectory.mkdirs() Using(new FileInputStream(file)) { in => Using(new FileOutputStream(new File(toDirectory, file.getName))) { out => transfer(in, out) } } } def transfer(in: InputStream, out: OutputStream) { val buffer = new Array[Byte](8192) def next() { val read = in.read(buffer) if (read > 0) { out.write(buffer, 0, read) next() } } next() } }
non/ensime
src/main/scala/org/ensime/config/Using.scala
Scala
gpl-3.0
1,001
package scala.compat.java8.collectionImpl import scala.language.higherKinds /** A `DoubleAccumulator` is a low-level collection specialized for gathering * elements in parallel and then joining them in order by merging them. * This is a manually specialized variant of `Accumulator` with no actual * subclassing relationship with `Accumulator`. */ final class DoubleAccumulator extends AccumulatorLike[Double, DoubleAccumulator] { self => private[java8] var current: Array[Double] = DoubleAccumulator.emptyDoubleArray private[java8] var history: Array[Array[Double]] = DoubleAccumulator.emptyDoubleArrayArray private[java8] def cumulative(i: Int) = { val x = history(i); x(x.length-1).toLong } private def expand(): Unit = { if (index > 0) { current(current.length-1) = (if (hIndex > 0) { val x = history(hIndex-1); x(x.length-1) } else 0) + index if (hIndex >= history.length) hExpand() history(hIndex) = current hIndex += 1 } current = new Array[Double](nextBlockSize+1) index = 0 } private def hExpand(): Unit = { if (hIndex == 0) history = new Array[Array[Double]](4) else history = java.util.Arrays.copyOf(history, history.length << 1) } /** Appends an element to this `DoubleAccumulator`. */ final def +=(a: Double): Unit = { totalSize += 1 if (index+1 >= current.length) expand() current(index) = a index += 1 } /** Removes all elements from `that` and appends them to this `DoubleAccumulator`. */ final def drain(that: DoubleAccumulator): Unit = { var h = 0 var prev = 0L var more = true while (more && h < that.hIndex) { val cuml = that.cumulative(h) val n = (cuml - prev).toInt if (current.length - index - 1 >= n) { System.arraycopy(that.history(h), 0, current, index, n) prev = cuml index += n h += 1 } else more = false } if (h >= that.hIndex && current.length - index - 1>= that.index) { if (that.index > 0) System.arraycopy(that.current, 0, current, index, that.index) index += that.index } else { val slots = (if (index > 0) 1 else 0) + that.hIndex - h if (hIndex + slots > history.length) { val n = math.max(4, 1 << (32 - java.lang.Integer.numberOfLeadingZeros(1 + hIndex + slots))) history = java.util.Arrays.copyOf(history, n) } var pv = (if (hIndex > 0) cumulative(hIndex-1) else 0L) if (index > 0) { val x = if (index < (current.length >>> 3) && current.length - 1 > 32) { val ans = java.util.Arrays.copyOf(current, index + 1) ans(ans.length - 1) = current(current.length - 1) ans } else current pv = pv + index x(x.length - 1) = pv history(hIndex) = x hIndex += 1 } while (h < that.hIndex) { val cuml = that.cumulative(h) pv = pv + cuml - prev prev = cuml val x = that.history(h) x(x.length - 1) = pv history(hIndex) = x h += 1 hIndex += 1 } index = that.index current = that.current } totalSize += that.totalSize that.clear } override def clear(): Unit = { super.clear() current = DoubleAccumulator.emptyDoubleArray history = DoubleAccumulator.emptyDoubleArrayArray } /** Retrieves the `ix`th element. */ final def apply(ix: Long): Double = { if (totalSize - ix <= index || hIndex == 0) current((ix - (totalSize - index)).toInt) else { val w = seekSlot(ix) history((w >>> 32).toInt)((w & 0xFFFFFFFFL).toInt) } } /** Retrieves the `ix`th element, using an `Int` index. */ final def apply(i: Int): Double = apply(i.toLong) /** Returns a `DoubleStepper` over the contents of this `DoubleAccumulator`. */ final def stepper: DoubleStepper = new DoubleAccumulatorStepper(this) /** Returns an `Iterator` over the contents of this `DoubleAccumulator`. The `Iterator` is not specialized. */ final def iterator = stepper.iterator /** Returns a `java.util.Spliterator.OfDouble` over the contents of this `DoubleAccumulator`*/ final def spliterator: java.util.Spliterator.OfDouble = stepper /** Produces a sequential Java 8 `DoubleStream` over the elements of this `DoubleAccumulator`*/ final def seqStream: java.util.stream.DoubleStream = java.util.stream.StreamSupport.doubleStream(spliterator, false) /** Produces a parallel Java 8 `DoubleStream` over the elements of this `DoubleAccumulator`*/ final def parStream: java.util.stream.DoubleStream = java.util.stream.StreamSupport.doubleStream(spliterator, true) /** Copies the elements in this `DoubleAccumulator` into an `Array[Double]` */ final def toArray = { if (totalSize > Int.MaxValue) throw new IllegalArgumentException("Too many elements accumulated for an array: "+totalSize.toString) val a = new Array[Double](totalSize.toInt) var j = 0 var h = 0 var pv = 0L while (h < hIndex) { val x = history(h) val cuml = x(x.length-1).toLong val n = (cuml - pv).toInt pv = cuml System.arraycopy(x, 0, a, j, n) j += n h += 1 } System.arraycopy(current, 0, a, j, index) j += index a } /** Copies the elements in this `DoubleAccumulator` to a `List` */ final def toList: List[Double] = { var ans: List[Double] = Nil var i = index - 1 while (i >= 0) { ans = current(i) :: ans i -= 1 } var h = hIndex - 1 while (h >= 0) { val a = history(h) i = (cumulative(h) - (if (h == 0) 0L else cumulative(h-1))).toInt - 1 while (i >= 0) { ans = a(i) :: ans i -= 1 } h -= 1 } ans } /** Copies the elements in this `DoubleAccumulator` to a specified collection. * Note that the target collection is not specialized. * Usage example: `acc.to[Vector]` */ final def to[Coll[_]](implicit cbf: collection.generic.CanBuildFrom[Nothing, Double, Coll[Double]]): Coll[Double] = { if (totalSize > Int.MaxValue) throw new IllegalArgumentException("Too many elements accumulated for a Scala collection: "+totalSize.toString) val b = cbf() b.sizeHint(totalSize.toInt) var h = 0 var pv = 0L while (h < hIndex) { val x = history(h) val n = cumulative(h) - pv pv = cumulative(h) var i = 0 while (i < n) { b += x(i) i += 1 } h += 1 } var i = 0 while (i < index) { b += current(i) i += 1 } b.result } } object DoubleAccumulator { private val emptyDoubleArray = new Array[Double](0) private val emptyDoubleArrayArray = new Array[Array[Double]](0) /** A `Supplier` of `DoubleAccumulator`s, suitable for use with `java.util.stream.DoubleStream`'s `collect` method. Suitable for `Stream[Double]` also. */ def supplier = new java.util.function.Supplier[DoubleAccumulator]{ def get: DoubleAccumulator = new DoubleAccumulator } /** A `BiConsumer` that adds an element to an `Accumulator`, suitable for use with `java.util.stream.DoubleStream`'s `collect` method. */ def adder = new java.util.function.ObjDoubleConsumer[DoubleAccumulator]{ def accept(ac: DoubleAccumulator, a: Double): Unit = { ac += a } } /** A `BiConsumer` that adds a boxed `Double` to an `DoubleAccumulator`, suitable for use with `java.util.stream.Stream`'s `collect` method. */ def boxedAdder = new java.util.function.BiConsumer[DoubleAccumulator, Double]{ def accept(ac: DoubleAccumulator, a: Double): Unit = { ac += a } } /** A `BiConsumer` that merges `DoubleAccumulator`s, suitable for use with `java.util.stream.DoubleStream`'s `collect` method. Suitable for `Stream[Double]` also. */ def merger = new java.util.function.BiConsumer[DoubleAccumulator, DoubleAccumulator]{ def accept(a1: DoubleAccumulator, a2: DoubleAccumulator): Unit = { a1 drain a2 } } /** Builds a `DoubleAccumulator` from any `Double`-valued `TraversableOnce` */ def from[A](source: TraversableOnce[Double]) = { val a = new DoubleAccumulator source.foreach(a += _) a } } private[java8] class DoubleAccumulatorStepper(private val acc: DoubleAccumulator) extends DoubleStepper { import java.util.Spliterator._ private var h = 0 private var i = 0 private var a = if (acc.hIndex > 0) acc.history(0) else acc.current private var n = if (acc.hIndex > 0) acc.cumulative(0) else acc.index private var N = acc.totalSize private def duplicateSelf(limit: Long): DoubleAccumulatorStepper = { val ans = new DoubleAccumulatorStepper(acc) ans.h = h ans.i = i ans.a = a ans.n = n ans.N = limit ans } private def loadMore(): Unit = { h += 1 if (h < acc.hIndex) { a = acc.history(h); n = acc.cumulative(h) - acc.cumulative(h-1) } else { a = acc.current; n = acc.index } i = 0 } def characteristics() = ORDERED | SIZED | SUBSIZED | NONNULL def estimateSize = N def hasNext = N > 0 def nextDouble: Double = if (n <= 0) throw new NoSuchElementException("next on empty Stepper") else { if (i >= n) loadMore() val ans = a(i) i += 1 N -= 1 ans } // Overridden for efficiency override def tryStep(f: Double => Unit): Boolean = if (N <= 0) false else { if (i >= n) loadMore() f(a(i)) i += 1 N -= 1 true } // Overridden for efficiency override def tryAdvance(f: java.util.function.DoubleConsumer): Boolean = if (N <= 0) false else { if (i >= n) loadMore() f.accept(a(i)) i += 1 N -= 1 true } // Overridden for efficiency override def foreach(f: Double => Unit): Unit = { while (N > 0) { if (i >= n) loadMore() val i0 = i if ((n-i) > N) n = i + N.toInt while (i < n) { f(a(i)) i += 1 } N -= (n - i0) } } // Overridden for efficiency override def forEachRemaining(f: java.util.function.DoubleConsumer): Unit = { while (N > 0) { if (i >= n) loadMore() val i0 = i if ((n-i) > N) n = i + N.toInt while (i < n) { f.accept(a(i)) i += 1 } N -= (n - i0) } } def substep(): DoubleStepper = if (N <= 1) null else { val half = (N >> 1) val M = (if (h <= 0) 0L else acc.cumulative(h-1)) + i val R = M + half val ans = duplicateSelf(half) if (h < acc.hIndex) { val w = acc.seekSlot(R) h = (w >>> 32).toInt if (h < acc.hIndex) { a = acc.history(h) n = acc.cumulative(h) - (if (h > 0) acc.cumulative(h-1) else 0) } else { a = acc.current n = acc.index } i = (w & 0xFFFFFFFFL).toInt } else i += half.toInt N -= half ans } }
Ichoran/scala-java8-compat
src/main/scala/scala/compat/java8/collectionImpl/DoubleAccumulator.scala
Scala
bsd-3-clause
10,935
package com.github.dwiechert.scala.tinyweb.example import com.github.dwiechert.scala.tinyweb.FunctionView import com.github.dwiechert.scala.tinyweb.FunctionController import com.github.dwiechert.scala.tinyweb.HttpRequest import scala.util.Random import com.github.dwiechert.scala.tinyweb.TinyWeb import com.github.dwiechert.scala.tinyweb.FunctionController class TinyWebExample { def greetingViewRenderer(model: Map[String, List[String]]) = "<h1>Friendly Greetings:</h1>\\n%s".format( model getOrElse ("greetings", List[String]()) map (renderGreeting) mkString "\\n") private def renderGreeting(greeting: String) = s"<h2>$greeting</h2>" def greetingView = new FunctionView(greetingViewRenderer) def handleGreetingRequest(request: HttpRequest) = Map("greetings" -> request.body.split(",").toList.map(makeGreeting)) private def random = new Random() private def greetings = Vector("Hello", "Greetings", "Saluations", "Hola") private def makeGreeting(name: String) = "%s, %s".format(greetings(random.nextInt(greetings.size)), name) def greetingController = new FunctionController(greetingView, handleGreetingRequest) def loggingFilter(request: HttpRequest) = { val path = request.path println(s"In Logging Filter - request for path: $path") request } }
DWiechert/functional-programming-patterns
src/main/scala/com/github/dwiechert/scala/tinyweb/example/TinyWebExample.scala
Scala
apache-2.0
1,323
object StringReverse { def reverse(in: String, out: String = ""): String = if (in.isEmpty) out else reverse(in.tail, in.head + out) } object Challenge2 extends App { import StringReverse.reverse println(reverse(args.head)) }
erocs/2017Challenges
challenge_1/scala/ndsmith3/StringReverse.scala
Scala
mit
236
/* * SPDX-License-Identifier: Apache-2.0 * * Copyright 2015-2021 Andre White. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.truthencode.ddo.model.feats import io.truthencode.ddo.model.religions.LordOfBlades import io.truthencode.ddo.support.requisite.{FeatRequisiteImpl, RequiresAllOfFeat} /** * Created by adarr on 4/7/2017. */ trait ChildOfTheLordOfBlades extends FeatRequisiteImpl with EberronReligionNonWarforged with ChildLevelBase with RequiresAllOfFeat with LordOfBlades with TheLordOfBladesFeatBase { self: DeityFeat => override def allOfFeats: Seq[Feat] = List(DeityFeat.FollowerOfTheLordOfBlades) }
adarro/ddo-calc
subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/feats/ChildOfTheLordOfBlades.scala
Scala
apache-2.0
1,187
import sbt._ import Keys._ object ApplicationBuild extends Build { val appName = "mongo-app" val appVersion = "1.0-SNAPSHOT" scalaVersion := "2.10.2" val appDependencies = Seq( "org.reactivemongo" %% "play2-reactivemongo" % "0.10.2") val main = play.Project(appName, appVersion, appDependencies).settings( resolvers += "Sonatype OSS Releases" at "http://oss.sonatype.org/content/repositories/releases" // settings ) }
scalastic/reactivemongo-demo-app
project/Build.scala
Scala
apache-2.0
450
/* * Copyright 2009-2010 WorldWide Conferencing, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.liftweb.json.scalaz import scalaz._ import Scalaz._ import net.liftweb.json._ trait Types { type Result[A] = ValidationNEL[Error, A] sealed trait Error case class UnexpectedJSONError(was: JValue, expected: Class[_ <: JValue]) extends Error case class NoSuchFieldError(name: String, json: JValue) extends Error case class UncategorizedError(key: String, desc: String, args: List[Any]) extends Error case object Fail { def apply[A](key: String, desc: String, args: List[Any]): Result[A] = UncategorizedError(key, desc, args).fail.liftFailNel def apply[A](key: String, desc: String): Result[A] = UncategorizedError(key, desc, Nil).fail.liftFailNel } implicit def JValueShow[A <: JValue]: Show[A] = new Show[A] { def show(json: A) = compact(render(json)).toList } implicit def JValueZero: Zero[JValue] = zero(JNothing) implicit def JValueSemigroup: Semigroup[JValue] = semigroup(_ ++ _) implicit def JValueEqual: Equal[JValue] = equalA trait JSONR[A] { def read(json: JValue): Result[A] } trait JSONW[A] { def write(value: A): JValue } trait JSON[A] extends JSONR[A] with JSONW[A] implicit def Result2JSONR[A](f: JValue => Result[A]): JSONR[A] = new JSONR[A] { def read(json: JValue) = f(json) } def fromJSON[A: JSONR](json: JValue): Result[A] = implicitly[JSONR[A]].read(json) def toJSON[A: JSONW](value: A): JValue = implicitly[JSONW[A]].write(value) def field[A: JSONR](name: String)(json: JValue): Result[A] = json match { case JObject(fs) => fs.find(_.name == name) .map(f => implicitly[JSONR[A]].read(f.value)) .orElse(implicitly[JSONR[A]].read(JNothing).fold(_ => none, x => some(Success(x)))) .getOrElse(NoSuchFieldError(name, json).fail.liftFailNel) case x => UnexpectedJSONError(x, classOf[JObject]).fail.liftFailNel } def validate[A: JSONR](name: String): Kleisli[Result, JValue, A] = kleisli(field[A](name)) def makeObj(fields: Traversable[(String, JValue)]): JObject = JObject(fields.toList.map { case (n, v) => JField(n, v) }) } object JsonScalaz extends Types with Lifting with Base with Tuples
pbrant/framework
core/json-scalaz/src/main/scala/net/lifweb/json/scalaz/JsonScalaz.scala
Scala
apache-2.0
2,783
import com.typesafe.sbteclipse.core.EclipsePlugin.EclipseTransformerFactory import com.typesafe.sbteclipse.plugin.EclipsePlugin.EclipseKeys import sbt.ProjectRef import sbt.State import sbt.ThisBuild import scala.xml._ import scala.xml.transform.RewriteRule import scalaz.Validation import scalaz.syntax.validation._ object SbtEclipseForBuild { def settings = Seq( EclipseKeys.classpathTransformerFactories := Seq( RootSourceDirectory ), EclipseKeys.withBundledScalaContainers := false ) object RootSourceDirectory extends EclipseTransformerFactory[RewriteRule] { override def createTransformer( ref : ProjectRef, state : State ) = RootSourceDirectoryRule.success override def toString : String = "RootSourceDirectory (EclipseTransfomerFactory)" } object RootSourceDirectoryRule extends RewriteRule { private val Cp = "classpath" private val CpEntry = "classpathentry" private val PathAttr = "path" override def transform( node : Node ) : Seq[Node] = node match { case Elem( pf, Cp, attrs, scope, child @ _* ) => Elem( pf, Cp, attrs, scope, true, srcRoot( pf, scope ) ++ child : _* ) case other => other } private def srcRoot( pf : String, scope : NamespaceBinding ) : Node = { val attribs = Attribute( "kind", Text( "src" ), Attribute( "path", Text( "" ), Attribute( "excluding", Text( ".settings/|bin/|project/|target/" ), Null ) ) ) Elem( pf, CpEntry, attribs, scope, true ) } } }
chwthewke/scala.g8
src/main/g8/project/project/SbtEclipseForBuild.scala
Scala
mit
1,500
/* * Copyright 2015 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.ct.computations.calculations import org.scalatest.{Matchers, WordSpec} import uk.gov.hmrc.ct.computations.{CP85, CP86, CP87} class TotalFirstYearAllowanceClaimedCalculationSpec extends WordSpec with Matchers { "TotalFirstYearAllowanceClaimedCalculation" should { "return an option with the calculation value" in new TotalFirstYearAllowanceClaimedCalculation { totalFirstYearAllowanceClaimedCalculation(cp85 = CP85(Some(1)), cp86 = CP86(Some(2))) should be (CP87(3)) } } }
keithhall/ct-calculations
src/test/scala/uk/gov/hmrc/ct/computations/calculations/TotalFirstYearAllowanceClaimedCalculationSpec.scala
Scala
apache-2.0
1,119
/*********************************************************************** * Copyright (c) 2013-2019 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.index.view import java.awt.RenderingHints import com.typesafe.config._ import org.geotools.data.DataAccessFactory.Param import org.geotools.data.{DataStore, DataStoreFactorySpi, DataStoreFinder} import org.geotools.filter.text.ecql.ECQL import org.locationtech.geomesa.index.geotools.GeoMesaDataStoreFactory.{GeoMesaDataStoreInfo, NamespaceParams} import org.locationtech.geomesa.utils.classpath.ServiceLoader import org.locationtech.geomesa.utils.geotools.GeoMesaParam import org.opengis.filter.Filter import scala.util.control.NonFatal import scala.util.{Failure, Success, Try} /** * Data store factory for merged view */ class MergedDataStoreViewFactory extends DataStoreFactorySpi { import MergedDataStoreViewFactory._ import scala.collection.JavaConverters._ override def canProcess(params: java.util.Map[String, java.io.Serializable]): Boolean = MergedDataStoreViewFactory.canProcess(params) override def createDataStore(params: java.util.Map[String, java.io.Serializable]): DataStore = createNewDataStore(params) override def createNewDataStore(params: java.util.Map[String, java.io.Serializable]): DataStore = { val configs: Seq[Config] = { val explicit = Option(ConfigParam.lookup(params)).map(ConfigFactory.parseString) val loaded = ConfigLoaderParam.flatMap(_.lookupOpt(params)).flatMap { name => ServiceLoader.load[MergedViewConfigLoader]().find(_.getClass.getName == name).map(_.load()) } Seq(explicit, loaded).flatten.flatMap { config => if (config.hasPath("stores")) { config.getConfigList("stores").asScala } else { Seq.empty } } } if (configs.isEmpty) { throw new IllegalArgumentException("No 'stores' element defined in configuration") } val namespace = NamespaceParam.lookupOpt(params) val nsConfig = namespace.map(ConfigValueFactory.fromAnyRef) val stores = Seq.newBuilder[(DataStore, Option[Filter])] stores.sizeHint(configs.length) try { configs.foreach { config => lazy val error = new IllegalArgumentException(s"Could not load store using configuration:\\n" + config.root().render(ConfigRenderOptions.concise().setFormatted(true))) // inject the namespace into the underlying stores val storeParams = nsConfig.map(config.withValue(NamespaceParam.key, _)).getOrElse(config).root().unwrapped() val filter = try { StoreFilterParam.lookupOpt(storeParams.asInstanceOf[java.util.Map[String, Serializable]]).map(ECQL.toFilter) } catch { case NonFatal(e) => throw new IllegalArgumentException(s"Invalid store filter '${storeParams.get(StoreFilterParam.key)}'", e) } Try(DataStoreFinder.getDataStore(storeParams)) match { case Success(null) => throw error case Success(store) => stores += store -> filter case Failure(e) => throw error.initCause(e) } } } catch { case NonFatal(e) => stores.result.foreach(_._1.dispose()); throw e } new MergedDataStoreView(stores.result, namespace) } override def getDisplayName: String = DisplayName override def getDescription: String = Description override def getParametersInfo: Array[Param] = ParameterInfo :+ NamespaceParam override def isAvailable: Boolean = true override def getImplementationHints: java.util.Map[RenderingHints.Key, _] = null } object MergedDataStoreViewFactory extends GeoMesaDataStoreInfo with NamespaceParams { override val DisplayName: String = "Merged DataStore View (GeoMesa)" override val Description: String = "A merged, read-only view of multiple data stores" val StoreFilterParam = new GeoMesaParam[String]("geomesa.merged.store.filter") val ConfigLoaderParam: Option[GeoMesaParam[String]] = { val loaders = ServiceLoader.load[MergedViewConfigLoader]().map(_.getClass.getName) if (loaders.isEmpty) { None } else { val param = new GeoMesaParam[String]("geomesa.merged.loader", "Loader used to configure the underlying data stores to query", enumerations = loaders) Some(param) } } val ConfigParam = new GeoMesaParam[String]("geomesa.merged.stores", "Typesafe configuration defining the underlying data stores to query", optional = ConfigLoaderParam.isDefined, largeText = true) override val ParameterInfo: Array[GeoMesaParam[_]] = ConfigLoaderParam.toArray :+ ConfigParam override def canProcess(params: java.util.Map[String, _ <: java.io.Serializable]): Boolean = params.containsKey(ConfigParam.key) || ConfigLoaderParam.exists(p => params.containsKey(p.key)) }
elahrvivaz/geomesa
geomesa-index-api/src/main/scala/org/locationtech/geomesa/index/view/MergedDataStoreViewFactory.scala
Scala
apache-2.0
5,107
package org.jetbrains.plugins.scala.console import com.intellij.execution.console.ConsoleHistoryController import com.intellij.execution.process.ProcessHandler import com.intellij.openapi.editor.Editor import com.intellij.openapi.project.Project import com.intellij.psi.PsiFile import com.intellij.util.containers.ContainerUtil /** * @author Ksenia.Sautina * @since 7/27/12 */ object ScalaConsoleInfo { private val SCALA_LANGUAGE_CONSOLE_KEY = new com.intellij.openapi.util.Key[String]("ScalaLanguageConsoleKey") private val NULL = (null, null, null) private val allConsoles = ContainerUtil.createWeakMap[Project, List[(ScalaLanguageConsole, ConsoleHistoryController, ProcessHandler)]]() def getConsole(file: PsiFile): ScalaLanguageConsole = get(file)._1 def getConsole(project: Project): ScalaLanguageConsole = get(project)._1 def getController(project: Project): ConsoleHistoryController = get(project)._2 def getProcessHandler(project: Project): ProcessHandler = get(project)._3 def getConsole(editor: Editor): ScalaLanguageConsole = get(editor)._1 def getController(editor: Editor): ConsoleHistoryController = get(editor)._2 def getProcessHandler(editor: Editor): ProcessHandler = get(editor)._3 def setIsConsole(file: PsiFile, flag: Boolean) { file.putCopyableUserData(SCALA_LANGUAGE_CONSOLE_KEY, if (flag) "console" else null) } def isConsole(file: PsiFile): Boolean = file.getCopyableUserData(SCALA_LANGUAGE_CONSOLE_KEY) != null def addConsole(console: ScalaLanguageConsole, model: ConsoleHistoryController, processHandler: ProcessHandler) { val project = console.getProject synchronized { allConsoles.get(project) match { case null => allConsoles.put(project, (console, model, processHandler) :: Nil) case list: List[(ScalaLanguageConsole, ConsoleHistoryController, ProcessHandler)] => allConsoles.put(project, (console, model, processHandler) :: list) } } } def disposeConsole(console: ScalaLanguageConsole) { val project = console.getProject synchronized { allConsoles.get(project) match { case null => case list: List[(ScalaLanguageConsole, ConsoleHistoryController, ProcessHandler)] => allConsoles.put(project, list.filter { case (sConsole, _, _) => sConsole != console }) } } } private def get(project: Project): (ScalaLanguageConsole, ConsoleHistoryController, ProcessHandler) = { synchronized { allConsoles.get(project) match { case null => NULL case list => list.headOption.getOrElse(NULL) } } } private def get(editor: Editor) = { synchronized { allConsoles.get(editor.getProject) match { case null => NULL case list => list.find { case (console: ScalaLanguageConsole, _: ConsoleHistoryController, _: ProcessHandler) => console.getConsoleEditor == editor } match { case Some(res) => res case _ => NULL } } } } private def get(file: PsiFile) = { synchronized { allConsoles.get(file.getProject) match { case null => NULL case list => list.find { case (console: ScalaLanguageConsole, _: ConsoleHistoryController, _: ProcessHandler) => console.getFile == file } match { case Some(res) => res case _ => NULL } } } } }
jastice/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/console/ScalaConsoleInfo.scala
Scala
apache-2.0
3,500
// Copyright 2014-2016 Leonardo Schwarz (leoschwarz.com) // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.leoschwarz.quest_on.data sealed trait ImageLocation { def local: Option[Local] def remote: Option[Remote] } case class Local() extends ImageLocation { override def toString = "locally" override val local = Some(this) override val remote = None } case class Remote(url: String) extends ImageLocation { override def toString = "remote:" + url override val local = None override val remote = Some(this) } object ImageLocation { def fromString(str: String): Option[ImageLocation] = { if (str == "locally") { Some(Local()) } else if (str.take(7) == "remote:") { Some(Remote(str.substring(7))) } else { None } } }
evotopid/quest_on
src/main/scala/com/leoschwarz/quest_on/data/ImageLocation.scala
Scala
apache-2.0
1,289
package gapt.formats.tptp import ammonite.ops._ import gapt.expr._ import gapt.expr.formula.All import gapt.expr.formula.And import gapt.expr.formula.Bottom import gapt.expr.formula.Eq import gapt.expr.formula.Ex import gapt.expr.formula.Formula import gapt.expr.formula.Imp import gapt.expr.formula.Neg import gapt.expr.formula.NonLogicalConstant import gapt.expr.formula.Or import gapt.expr.formula.Top import gapt.expr.formula.constants.EqC import gapt.expr.formula.constants.LogicalConstant import gapt.expr.formula.fol.Hol2FolDefinitions import gapt.expr.formula.fol.replaceAbstractions import gapt.expr.formula.hol._ import gapt.expr.ty.->: import gapt.expr.ty.TBase import gapt.expr.ty.Ti import gapt.expr.ty.To import gapt.expr.ty.Ty import gapt.expr.ty.baseTypes import gapt.expr.util.freeVariables import gapt.expr.util.subTerms import gapt.logic.hol.simplifyPropositional import gapt.proofs.{ HOLSequent, Sequent } import gapt.proofs.expansion.{ ETAnd, ETAtom, ETTop, ExpansionProof, ExpansionSequent, ExpansionTree } import gapt.provers.groundFreeVariables sealed class TptpFormulaRole case object TptpAxiom extends TptpFormulaRole { override def toString = "axiom" } case object TptpDefinition extends TptpFormulaRole { override def toString = "definition" } case object TptpConjecture extends TptpFormulaRole { override def toString = "conjecture" } case object TptpNegatedConjecture extends TptpFormulaRole { override def toString = "negated_conjecture" } object TptpHOLExporter extends TptpHOLExporter class TptpHOLExporter { /** * Exports the given FSequent list to the THF fragment of TPTP. The default behavior of the exporter * expects a sequent list in a negative context, i.e. it will encode the refutation of the arguments. * * @note In contrast to prover9, for multiple conjectures, each of them has to be proved. */ private val nLine = sys.props( "line.separator" ) /** * Exports a sequent set as TPTP thf problem to prove unsatisfiability * * @param ls the list of sequents to export * @param filename the filename */ def apply( ls: List[HOLSequent], filename: String ): Unit = write( Path( filename, pwd ), export_negative( ls ) ) /** * Exports a sequent as TPTP thf problem to prove validity * * @param seq the sequent to export * @param filename the filename */ def apply( seq: HOLSequent, filename: String, separate_axioms: Boolean = false ): Unit = write( Path( filename, pwd ), export_positive( seq, separate_axioms ) ) /** * Exports an expansion proof as TPTP thf problem to prove validity * * @param seq the sequent to export * @param filename the filename * @param maximize_axiom_declarations if true, all conjunctions * @param lambda_lifting apply lambda lifting to deep formula and add the definitions into to the antecedent of the formula */ def apply( seq: ExpansionSequent, filename: String, maximize_axiom_declarations: Boolean, lambda_lifting: Boolean ): Unit = write( Path( filename, pwd ), export( seq, maximize_axiom_declarations, lambda_lifting ) ) /** * Exports an expansion proof as TPTP thf problem. The antedent of the * * @param ep the expansion proof to export * @param maximize_axiom_declarations if true, all conjunctions * @param lambda_lifting apply lambda lifting to deep formula and add the definitions into to the antecedent of the formula */ def export( ep: ExpansionSequent, maximize_axiom_declarations: Boolean = true, lambda_lifting: Boolean = false ): String = { val ep1 = if ( maximize_axiom_declarations ) simplify_antecedent( ep ) else ep val es1: HOLSequent = if ( lambda_lifting ) lambda_lift_and_add_definitions( ep1.deep ) else ep1.deep val ( es2, _ ) = groundFreeVariables( es1 ) val es3 = if ( maximize_axiom_declarations ) simplify_antecedent2( es2 ) else es2 //the deep conversion in the antecedent also introduces conjunctions val es4 = es3.map( simplifyPropositional.apply ) //remove top / bottom if possible export_positive( es4, maximize_axiom_declarations ) } /** * Exports a sequent set as TPTP thf problem to prove unsatisfiability * * @param ls the list of sequents to export */ def export_negative( ls: List[HOLSequent] ): String = { require( ls.nonEmpty, "Cannot export an empty sequent list!" ) val ( vs, vnames, cs, cnames ) = createNamesFromSequent( ls ) var index = 0 val types = for ( seq <- ls; f <- seq.elements; st <- subTerms( f ); t <- baseTypes( st.ty ) ) yield t val tdecls = for ( t <- types.distinct if t != Ti && t != To ) yield { index += 1; s"thf($index, type, $t: $$tType).$nLine" } val cdecs_ = for ( c <- cs if c.name != "=" ) yield { index = index + 1 thf_type_dec( index, c, cnames ) + nLine } val cdecs = cdecs_.mkString val sdecs = { val negClauses = Neg( And( ls.map( closedFormula ) ) ) index = index + 1 // since in thf conjectures are seen as conjunction. the negated cnf is one big formula List( thf_formula_dec( index, negClauses, TptpConjecture, vnames, cnames ) ) } s"% type declarations$nLine" + tdecls.mkString + //"% variable type declarations" + nLine + vdecs + "% constant type declarations" + nLine + cdecs + "% sequents" + nLine + sdecs.foldLeft( "" )( ( s, x ) => s + x + nLine ) } /** * Exports a sequent as TPTP thf problem to prove validity * * @param seq the sequent to be proved valid */ def export_positive( seq: HOLSequent, separate_axioms: Boolean = false ): String = { require( freeVariables( seq ).isEmpty, "Can only export ground positive sequent sets!" ) val ( vs, vnames, cs, cnames ) = createNamesFromSequent( seq :: Nil ) var index = 0 val types = for ( f <- seq.elements; st <- subTerms( f ); t <- baseTypes( st.ty ) ) yield t val tdecls = for ( t <- types.distinct if t != Ti && t != To ) yield { index += 1; s"thf($index, type, $t: $$tType).$nLine" } val cdecs_ = for ( c <- cs if c.name != "=" ) yield { index = index + 1 thf_type_dec( index, c, cnames ) + nLine } val cdecs = cdecs_.mkString val sdecs = if ( separate_axioms ) { val axioms = seq.antecedent val goal = Or( seq.succedent ) // work around different ATP's interpretations of multiple conclusions val axiom_decs = for ( fs <- axioms ) yield { index = index + 1 thf_formula_dec( index, fs, TptpAxiom, vnames, cnames ) } ( axiom_decs.foldLeft( "" )( ( s, x ) => s + x + nLine ) + thf_formula_dec( index + 1, goal, TptpConjecture, vnames, cnames ) ) } else { thf_formula_dec( index + 1, seq.toImplication, TptpConjecture, vnames, cnames ) } "% type declarations" + nLine + tdecls.mkString + "% constant type declarations" + nLine + cdecs + "% sequents" + nLine + sdecs } def printStatistics( vnames: NameMap, cnames: CNameMap ): Unit = { if ( cnames.isEmpty && vnames.isEmpty ) { println( "% No symbol translation necessary!" ) return () } println( "% Symbol translation table for THF export:" ) val csyms = cnames.keySet.toList.map( { case Const( s, _, _ ) => s } ) val vsyms = vnames.keySet.toList.map( { case Var( s, _ ) => s } ) val width = ( vsyms ++ csyms ).sortWith( ( x, y ) => y.length < x.length ).head.length for ( ( c, s ) <- cnames ) { val sym = c.name if ( sym != s ) { print( "% " ) print( sym ) for ( i <- sym.length to ( width + 1 ) ) print( " " ) print( " -> " ) print( s ) println() } } val cunchanged = for ( ( c, s ) <- cnames; if c.name == s ) yield { s } if ( cunchanged.nonEmpty ) println( "% Unchanged constants: " + cunchanged.mkString( "," ) ) println( "% " ) for ( ( c, s ) <- vnames ) { val sym = c.name if ( sym != s ) { print( "% " ) print( sym ) for ( i <- sym.length to ( width + 1 ) ) print( " " ) print( " -> " ) print( s ) println() } } val vunchanged = for ( ( c, s ) <- vnames; if c.name == s ) yield { s } if ( vunchanged.nonEmpty ) println( "% Unchanged variables: " + vunchanged.mkString( "," ) ) } type NameMap = Map[Var, String] val emptyNameMap: Map[Var, String] = Map[Var, String]() type CNameMap = Map[Const, String] val emptyCNameMap: Map[Const, String] = Map[Const, String]() def createFormula( f: Expr, map: Map[Var, String] ): String = f match { case Var( _, _ ) => map( f.asInstanceOf[Var] ) } def createNamesFromSequent( l: List[HOLSequent] ): ( List[Var], NameMap, List[Const], CNameMap ) = { val vs = l.foldLeft( Set[Var]() )( ( set, fs ) => getVars( fs.toDisjunction, set ) ).toList val cs = l.foldLeft( Set[Const]() )( ( set, fs ) => getConsts( fs.toDisjunction, set ) ).toList ( vs, createNamesFromVar( vs ), cs, createNamesFromConst( cs ) ) } def createNamesFromVar( l: List[Var] ): NameMap = l.foldLeft( emptyNameMap )( ( map, v ) => { if ( map contains v ) map else { val name = mkVarName( v.name.toString, map ) map + ( ( v, name ) ) } } ) def closedFormula( fs: HOLSequent ): Formula = universalClosure( fs.toDisjunction ) def createNamesFromConst( l: List[Const] ): CNameMap = l.foldLeft( emptyCNameMap )( ( map, v ) => { if ( map contains v ) map else { val name = mkConstName( v.name.toString, map ) map + ( ( v, name ) ) } } ) def thf_formula_dec( i: Int, f: Formula, role: TptpFormulaRole, vmap: NameMap, cmap: CNameMap ): String = { val f_str = thf_formula( f, vmap, cmap, outermost = true ) val internal_str = f.toString.flatMap( { case '\\n' => "\\n% "; case x => s"$x" } ) //add comment after newline s"$nLine% formula: $internal_str ${nLine}thf($i, $role, $f_str )." } private def addparens( str: String, cond: Boolean ) = if ( cond ) "(" + str + ")" else str def thf_formula( f: Expr, vmap: NameMap, cmap: CNameMap, outermost: Boolean = false ): String = { f match { case Top() => "$true" case Bottom() => "$false" case Neg( x ) => addparens( " ~(" + thf_formula( x, vmap, cmap ) + ")", outermost ) //negation of atoms needs parenthesis! case And( x, y ) => addparens( thf_formula( x, vmap, cmap ) + " & " + thf_formula( y, vmap, cmap ), !outermost ) case Or( x, y ) => addparens( thf_formula( x, vmap, cmap ) + " | " + thf_formula( y, vmap, cmap ), !outermost ) case Imp( x, y ) => addparens( thf_formula( x, vmap, cmap ) + " => " + thf_formula( y, vmap, cmap ), !outermost ) case All( x, t ) => addparens( "![" + vmap( x ) + " : " + getTypeString( x.ty ) + "] : (" + thf_formula( t, vmap, cmap ) + ")", !outermost ) case Ex( x, t ) => addparens( "?[" + vmap( x ) + " : " + getTypeString( x.ty ) + "] : (" + thf_formula( t, vmap, cmap ) + ")", !outermost ) case Eq( x, y ) => addparens( thf_formula( x, vmap, cmap ) + " = " + thf_formula( y, vmap, cmap ), !outermost ) case Abs( x, t ) => addparens( "^[" + vmap( x ) + " : " + getTypeString( x.ty ) + "] : (" + thf_formula( t, vmap, cmap ) + ")", !outermost ) case App( s, t ) => addparens( thf_formula( s, vmap, cmap ) + " @ " + thf_formula( t, vmap, cmap ), !outermost ) case Var( _, _ ) => vmap( f.asInstanceOf[Var] ) case NonLogicalConstant( _, _, _ ) => cmap( f.asInstanceOf[Const] ) case _ => throw new Exception( "TPTP export does not support outermost connective of " + f ) } } def thf_type_dec( i: Int, v: Var, vmap: NameMap ): String = { require( vmap.contains( v ), "Did not generate an export name for " + v + "!" ) "thf(" + i + ", type, " + vmap( v ) + ": " + getTypeString( v.ty ) + " )." } def thf_type_dec( i: Int, c: Const, cmap: CNameMap ): String = { require( cmap.contains( c ), "Did not generate an export name for " + c + "!" ) "thf(" + i + ", type, " + cmap( c ) + ": " + getTypeString( c.ty ) + " )." } def getTypeString( t: Ty ): String = getTypeString( t, outer = true ) def getTypeString( t: Ty, outer: Boolean ): String = t match { case Ti => "$i" case To => "$o" case TBase( name, Nil ) => name case t1 ->: t2 if outer => getTypeString( t1, outer = false ) + " > " + getTypeString( t2, outer = false ) case t1 ->: t2 => "(" + getTypeString( t1, outer = false ) + " > " + getTypeString( t2, outer = false ) + ")" case _ => throw new Exception( "TPTP type export for " + t + " not implemented!" ) } def mkVarName( str: String, map: Map[Var, String] ): String = { val fstr_ = str.filter( _.toString.matches( "[a-zA-Z0-9]" ) ) val fstr = if ( fstr_.isEmpty ) { println( "Warning: " + str + " needs to be completely replaced by a fresh variable!" ) "V" } else fstr_ val prefix = if ( fstr.head.isDigit ) "X" + fstr else s"${fstr.head.toUpper}" + fstr.tail val values = map.toList.map( _._2 ) if ( values contains prefix ) appendPostfix( prefix, values ) else prefix } def mkConstName( str: String, map: CNameMap ): String = { val fstr_ = str match { case "=" => "=" //equality is handled explicitly case "+" => "plus" case "-" => "minus" case "*" => "times" case "/" => "div" case "<" => "lt" case ">" => "gt" case _ => str.filter( _.toString.matches( "[a-zA-Z0-9]" ) ) } val fstr = if ( fstr_.isEmpty ) { println( "Warning: " + str + " needs to be completely replaced by a fresh constant!" ) "c" } else fstr_ val prefix = if ( fstr.head.isDigit ) "c" + fstr else fstr.head.toLower.toString + fstr.tail val values = map.toList.map( _._2 ) if ( values contains prefix ) appendPostfix( prefix, values ) else prefix } def appendPostfix( str: String, l: List[String] ): FormulaRole = { var i = 100 while ( l contains ( str + i ) ) { i = i + 1 } str + i } /** extract all variables, bound and free */ def getVars( t: Expr, set: Set[Var] ): Set[Var] = t match { case Const( _, _, _ ) => set case Var( _, _ ) => set + t.asInstanceOf[Var] case App( s, t ) => getVars( s, getVars( t, set ) ) case Abs( x, t ) => getVars( t, set + x ) } def getConsts( t: Expr, set: Set[Const] ): Set[Const] = t match { case EqC( _ ) => set case _: LogicalConstant => set case t @ NonLogicalConstant( _, _, _ ) => set + t case Var( _, _ ) => set case App( s, t ) => getConsts( s, getConsts( t, set ) ) case Abs( x, t ) => getConsts( t, set ) } def simplify_antecedent( es: ExpansionSequent ): ExpansionSequent = { es.antecedent.flatMap( { case ETAnd( e1, e2 ) => List( e1, e2 ) case e => List( e ) } ) match { case ant if ant == es.antecedent => es case ant /* ant !- es.antecedent */ => val ant0 = ant.filterNot( _.deep == Top() ) val et = Sequent( ant0, es.succedent ) simplify_antecedent( et ) } } def simplify_antecedent2( es: HOLSequent ): HOLSequent = { es.antecedent.flatMap( { case And( e1, e2 ) => List( e1, e2 ) case e => List( e ) } ) match { case ant if ant == es.antecedent => es case ant /* ant !- es.antecedent */ => val ant0 = ant.filterNot( _ == Top() ) val et = Sequent( ant0, es.succedent ) simplify_antecedent2( et ) } } def strip_lambdas( e: Expr, context: List[Var] ): ( Expr, List[Var] ) = e match { case Abs( v, t ) => strip_lambdas( t, v :: context ) case t => ( t, context.reverse ) } def lambda_lift_and_add_definitions( seq: HOLSequent ): HOLSequent = { implicit val cmap = new Hol2FolDefinitions() val seq0 = seq.map { replaceAbstractions( _ ) } val qaxioms: Seq[Formula] = cmap.toLegacyMap.toSeq.map { case ( term_, name ) => //term_ should be closed, but to be sure we add the free variables the variables stripped from the outer-most //lambda-block in term_ val fv = freeVariables( term_ ).toList val ( term, all_vars ) = strip_lambdas( term_, fv ) //create the type of q val qtype = all_vars.foldRight( term.ty )( { case ( v, t ) => v.ty ->: t } ) // apply it to the arguments val q_function = Apps( Const( name, qtype ), all_vars ) // build the formula equating it to the stripped term val eq: Formula = Eq( q_function, term ) // and close the formula universally val axiom = all_vars.foldRight( eq ) { case ( v, f ) => All( v, f ) } axiom } qaxioms ++: seq0 } }
gapt/gapt
core/src/main/scala/gapt/formats/tptp/TptpHOLExporter.scala
Scala
gpl-3.0
17,205
package api.route import component.account.domain._ import html._ import api.domain.SessionCookie import api.{ApiRoot, CommonTraits} import spray.http.HttpHeaders.Location import spray.http.{MediaTypes, StatusCodes} private[api] trait Public {self: CommonTraits => val publicRoutes = { get { pathPrefixTest("signin" | "signup") { (sessionCookie | rememberMeCookie) { session => redirect("/", StatusCodes.TemporaryRedirect) // do not show signin or signup page if the user is already logged-in } } ~ path("signin") { complete { page(signin(generateLoginCsrfToken("loginToken"))) } } ~ path("signup") { complete { page(signup()) } } } ~ pathPrefix(ApiRoot) { respondWithMediaType(MediaTypes.`application/json`) { (path("signup") & post) { entity(as[SignUp]) { signUp => onSuccess(accountService.createAccount(signUp)) { case Left(failure) => complete(StatusCodes.Conflict, "The email address you provided is already registered to another account") case Right(acc) => respondWithHeader(Location(s"/$ApiRoot/account/${acc.id.get}")) { setSession(SessionCookie(data = Map("id" -> acc.id.get.toString), path = Some("/"))) { complete(StatusCodes.Created, acc) } } } } } ~ (path("signin") & post) { entity(as[SignIn]) { signIn => verifyLoginCsrfToken { optionalClientIP { ipOpt => authenticateUser(signIn, ipOpt) { account => val sessionCookie = SessionCookie(data = Map("id" -> account.id.get.toString), path = Some("/")) setSession(sessionCookie) { complete(account) if (signIn.rememberMe) { // Set remember-me cookie *if* it was set on the sign-in form setRememberMe(sessionCookie) { complete(account) } } else complete( account) } } } } } } } } } }
onurzdg/spray-app
src/main/scala/api/route/Public.scala
Scala
apache-2.0
2,309
/* * Copyright 2015 [See AUTHORS file for list of authors] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.{File, PrintWriter} import java.util.Random import it.unimi.dsi.webgraph.BVGraph /** * Randomly permutes a BVGraph */ object RandPermWebgraph { def main(args: Array[String]) = { val argmap = args.map { a => val argPair = a.split("=") val name = argPair(0).toLowerCase val value = argPair(1) (name, value) }.toMap // Default options val basename = argmap.getOrElse("inputfile", "data/uk-2002") val randSeed = argmap.getOrElse("randseed", "43").toInt val gen = new Random(randSeed) val g = BVGraph.load(basename) val nVertices = g.numNodes() // randPerm maps old to new : randPerm(old) = new // revIndex maps new to old : revIndex(new) = old val randPerm = (0 until nVertices).toArray shuffle(randPerm, gen) val revIndex = new Array[Int](nVertices) var ii = 0 while (ii < nVertices){ //println(s"$ii -> ${randPerm(ii)}") revIndex(randPerm(ii)) = ii ii += 1 } val asciiWriter = new PrintWriter(new File(basename + "_rand.graph-txt")) asciiWriter.print(s"$nVertices\\n") var i = 0 while (i < nVertices){ var adj = g.successorArray(revIndex(i)) var j = 0 while (j < adj.length){ adj(j) = randPerm(adj(j)) j += 1 } adj = adj.sorted j = 0 while (j < adj.length - 1){ asciiWriter.print(adj(j) + " ") j += 1 } if (adj.length > 0) asciiWriter.print(adj(j)) asciiWriter.println() i += 1 } asciiWriter.close() BVGraph.main(Array("-g", "ASCIIGraph", basename + "_rand", basename + "_rand")) BVGraph.main(Array("-o", "-O", "-L", basename + "_rand")) } def shuffle[T](array: Array[T], rnd: Random): Array[T] = { for (n <- Iterator.range(array.length - 1, 0, -1)) { val k = rnd.nextInt(n + 1) val t = array(k); array(k) = array(n); array(n) = t } array } }
pxinghao/ParallelSubmodularMaximization
src/main/scala/RandPermWebgraph.scala
Scala
apache-2.0
2,590
package com.digdeep.util package iglu package client import java.util.concurrent.TimeUnit import com.fasterxml.jackson.databind.JsonNode import com.google.common.cache.CacheBuilder import com.snowplowanalytics.iglu.client._ import org.apache.commons.logging.LogFactory import scalaz.{Success, Failure} import collection.JavaConverters._ /** * Created by denismo on 5/10/15. */ class ErrorCachingResolver(cacheSize: Int = 500, repos: RepositoryRefs, delegate: Resolver) extends Resolver(cacheSize, repos) { val log = LogFactory.getLog(classOf[ErrorCachingResolver]) val errorCache = CacheBuilder.newBuilder().expireAfterWrite(30, TimeUnit.MINUTES).build[SchemaKey, ValidatedNel[JsonNode]]() def cacheError(schemaKey: SchemaKey, res: ValidatedNel[JsonNode]): ValidatedNel[JsonNode] = { errorCache.put(schemaKey, res) res } override def lookupSchema(schemaKey: SchemaKey): ValidatedNel[JsonNode] = { val stored = errorCache.getIfPresent(schemaKey) if (stored != null) { return stored } val res = delegate.lookupSchema(schemaKey) res match { case Failure(e) => if (e.head.getMessage.startsWith("Could not find schema with key ")) { cacheError(schemaKey, res) } else { res } case Success(s) => res } } }
jramos/snowplow
4-storage/kinesis-redshift-sink/src/main/scala/com.digdeep.util/iglu/client/ErrorCachingResolver.scala
Scala
apache-2.0
1,340
/* * Copyright 2022 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.selfservicetimetopay.models import java.time.LocalDate import play.api.libs.json.{Format, Json} import timetopaytaxpayer.cor.model.Taxpayer final case class EligibilityRequest( dateOfEligibilityCheck: LocalDate, taxpayer: Taxpayer ) object EligibilityRequest { implicit val format: Format[EligibilityRequest] = Json.format[EligibilityRequest] }
hmrc/self-service-time-to-pay-frontend
app/uk/gov/hmrc/selfservicetimetopay/models/EligibilityRequest.scala
Scala
apache-2.0
998
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // scalastyle:off println package org.apache.spark.examples.mllib import org.apache.spark.{SparkConf, SparkContext} // $example on$ import org.apache.spark.mllib.fpm.PrefixSpan // $example off$ object PrefixSpanExample { def main(args: Array[String]) { val conf = new SparkConf().setAppName("PrefixSpanExample") val sc = new SparkContext(conf) // $example on$ val sequences = sc.parallelize(Seq( Array(Array(1, 2), Array(3)), Array(Array(1), Array(3, 2), Array(1, 2)), Array(Array(1, 2), Array(5)), Array(Array(6)) ), 2).cache() val prefixSpan = new PrefixSpan() .setMinSupport(0.5) .setMaxPatternLength(5) val model = prefixSpan.run(sequences) model.freqSequences.collect().foreach { freqSequence => println( freqSequence.sequence.map(_.mkString("[", ", ", "]")).mkString("[", ", ", "]") + ", " + freqSequence.freq) } // $example off$ } } // scalastyle:off println
mrchristine/spark-examples-dbc
src/main/scala/org/apache/spark/examples/mllib/PrefixSpanExample.scala
Scala
apache-2.0
1,776
/* * Part of NDLA article-api. * Copyright (C) 2016 NDLA * * See LICENSE * */ package no.ndla.articleapi.model.api import org.scalatra.swagger.annotations.{ApiModel, ApiModelProperty} import scala.annotation.meta.field @ApiModel(description = "Information about an author") case class Author(@(ApiModelProperty @field)( description = "The description of the author. Eg. Photographer or Supplier") `type`: String, @(ApiModelProperty @field)(description = "The name of the of the author") name: String)
NDLANO/article-api
src/main/scala/no/ndla/articleapi/model/api/Author.scala
Scala
gpl-3.0
548
package fr.laas.fape.anml.model.ir import fr.laas.fape.anml.model.abs.AbstractChronicle import fr.laas.fape.anml.model.abs.statements.AbstractStatement class LeafChronicleGroup(val chronicle: AbstractChronicle) extends AbstractChronicleGroup { override def firsts: List[AbstractStatement] = chronicle.getStatements override def lasts: List[AbstractStatement] = chronicle.getStatements override def statements = chronicle.getStatements }
athy/fape
anml-parser/src/main/scala/fr/laas/fape/anml/model/ir/LeafChronicleGroup.scala
Scala
bsd-2-clause
445
package mock import com.intuit.karate.gatling.PreDef._ import io.gatling.core.Predef._ import scala.concurrent.duration._ class CatsKarateSimulation extends Simulation { MockUtils.startServer() val feeder = Iterator.continually(Map("catName" -> MockUtils.getNextCatName)) val protocol = karateProtocol( "/cats/{id}" -> Nil, "/cats" -> pauseFor("get" -> 15, "post" -> 25) ) protocol.nameResolver = (req, ctx) => req.getHeader("karate-name") val create = scenario("create").feed(feeder).exec(karateFeature("classpath:mock/cats-create.feature")) val delete = scenario("delete").group("delete cats") { exec(karateFeature("classpath:mock/cats-delete.feature@name=delete")) } val custom = scenario("custom").exec(karateFeature("classpath:mock/custom-rpc.feature")) setUp( create.inject(rampUsers(10) during (5 seconds)).protocols(protocol), delete.inject(rampUsers(5) during (5 seconds)).protocols(protocol), custom.inject(rampUsers(10) during (5 seconds)).protocols(protocol) ) }
intuit/karate
examples/gatling/src/test/java/mock/CatsKarateSimulation.scala
Scala
mit
1,030
object Test { def testC { val f1 = (c: C) => c.value val f2 = (x: Int) => new C(x) val f3 = (c1: C) => (c2: C) => (c1, c2) val r1 = f2(2) val r2 = f2(2) val r3 = f3(r1)(r2) val result = f1(r3._2) assert(result == 2) } def testD { val f1 = (c: D) => c.value val f2 = (x: String) => new D(x) val f3 = (c1: D) => (c2: D) => (c1, c2) val r1 = f2("2") val r2 = f2("2") val r3 = f3(r1)(r2) val result = f1(r3._2) assert(result == "2") } def testE { val f1 = (c: E[Int]) => c.value val f2 = (x: Int) => new E(x) val f3 = (c1: E[Int]) => (c2: E[Int]) => (c1, c2) val r1 = f2(2) val r2 = f2(2) val r3 = f3(r1)(r2) val result = f1(r3._2) assert(result == 2) } def main(args: Array[String]) { testC testD testE } }
felixmulder/scala
test/files/run/t8017/value-class-lambda.scala
Scala
bsd-3-clause
831
package gh3.events import gh3._ import gh3.models.{GH3Sender, GH3Repository, GH3IssueComment, GH3Issue} import net.liftweb.json.JsonAST.JValue case class IssueCommentEvent(action: String, issue: GH3Issue, comment: GH3IssueComment, repository: GH3Repository, sender: GH3Sender ) extends GH3Event object IssueCommentEvent extends GH3EventParser { def apply(json: JValue): Option[IssueCommentEvent] = { val n2s = node2String(json)(_) val action = n2s("action") val issue = GH3Issue(json \\ "issue") val comment = GH3IssueComment(json \\ "comment") val repository = GH3Repository(json \\ "repository") val sender = GH3Sender(json \\ "sender") val params = Seq(action, issue, comment, repository, sender) if(params.forall(_.isDefined)) Some(new IssueCommentEvent(action.get, issue.get, comment.get, repository.get, sender.get)) else None } }
mgoeminne/github_etl
src/main/scala/gh3/events/IssueCommentEvent.scala
Scala
mit
1,054
/* * Copyright © 2015 Lukas Rosenthaler, Benjamin Geer, Ivan Subotic, * Tobias Schweizer, André Kilchenmann, and Sepideh Alassi. * This file is part of Knora. * Knora is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * Knora is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * You should have received a copy of the GNU Affero General Public * License along with Knora. If not, see <http://www.gnu.org/licenses/>. */ package org.knora.webapi.e2e.v1 import java.net.URLEncoder import akka.actor.{ActorSystem, Props} import akka.http.scaladsl.model.headers.BasicHttpCredentials import akka.http.scaladsl.model.{ContentTypes, HttpEntity, StatusCodes} import akka.http.scaladsl.testkit.RouteTestTimeout import akka.pattern._ import akka.util.Timeout import org.knora.webapi.messages.v1.responder.ontologymessages.LoadOntologiesRequest import org.knora.webapi.messages.v1.store.triplestoremessages.{RdfDataObject, ResetTriplestoreContent} import org.knora.webapi.responders._ import org.knora.webapi.responders.v1.ResponderManagerV1 import org.knora.webapi.routing.v1.ValuesRouteV1 import org.knora.webapi.store._ import org.knora.webapi.util.{AkkaHttpUtils, MutableTestIri} import org.knora.webapi.{IRI, LiveActorMaker, R2RSpec, SharedAdminTestData} import org.knora.webapi.messages.v1.responder.valuemessages.ApiValueV1JsonProtocol._ import spray.json._ import scala.concurrent.Await import scala.concurrent.duration._ /** * Tests the values route. */ class ValuesV1R2RSpec extends R2RSpec { override def testConfigSource = """ # akka.loglevel = "DEBUG" # akka.stdout-loglevel = "DEBUG" """.stripMargin private val responderManager = system.actorOf(Props(new ResponderManagerV1 with LiveActorMaker), name = RESPONDER_MANAGER_ACTOR_NAME) private val storeManager = system.actorOf(Props(new StoreManager with LiveActorMaker), name = STORE_MANAGER_ACTOR_NAME) private val valuesPath = ValuesRouteV1.knoraApiPath(system, settings, log) private val incunabulaUser = SharedAdminTestData.incunabulaProjectAdminUser implicit val timeout: Timeout = settings.defaultRestoreTimeout implicit def default(implicit system: ActorSystem) = RouteTestTimeout(new DurationInt(15).second) private val integerValueIri = new MutableTestIri private val textValueIri = new MutableTestIri private val linkValueIri = new MutableTestIri private val boringComment = "This is a boring comment." private val rdfDataObjects = List( RdfDataObject(path = "_test_data/all_data/anything-data.ttl", name = "http://www.knora.org/data/anything") ) private val anythingUser = SharedAdminTestData.anythingUser1 private val anythingUserEmail = anythingUser.userData.email.get private val testPass = "test" private val mappingIri = "http://data.knora.org/projects/standoff/mappings/StandardMapping" "Load test data" in { Await.result(storeManager ? ResetTriplestoreContent(rdfDataObjects), 300.seconds) Await.result(responderManager ? LoadOntologiesRequest(incunabulaUser), 10.seconds) } "The Values Endpoint" should { "add an integer value to a resource" in { val params = """ |{ | "res_id": "http://data.knora.org/a-thing", | "prop": "http://www.knora.org/ontology/anything#hasInteger", | "int_value": 1234 |} """.stripMargin Post("/v1/values", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, testPass)) ~> valuesPath ~> check { assert(status == StatusCodes.OK, response.toString) val responseJson: Map[String, JsValue] = responseAs[String].parseJson.asJsObject.fields val valueIri: IRI = responseJson("id").asInstanceOf[JsString].value integerValueIri.set(valueIri) } } "change an integer value" in { val params = """ |{ | "res_id": "http://data.knora.org/a-thing", | "prop": "http://www.knora.org/ontology/anything#hasInteger", | "int_value": 4321 |} """.stripMargin Put(s"/v1/values/${URLEncoder.encode(integerValueIri.get, "UTF-8")}", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, testPass)) ~> valuesPath ~> check { assert(status == StatusCodes.OK, response.toString) val responseJson: Map[String, JsValue] = responseAs[String].parseJson.asJsObject.fields val valueIri: IRI = responseJson("id").asInstanceOf[JsString].value integerValueIri.set(valueIri) } } "mark an integer value as deleted" in { Delete(s"/v1/values/${URLEncoder.encode(integerValueIri.get, "UTF-8")}?deleteComment=deleted%20for%20testing") ~> addCredentials(BasicHttpCredentials(anythingUserEmail, testPass)) ~> valuesPath ~> check { assert(status == StatusCodes.OK, response.toString) } } "get a link value" in { Get(s"/v1/links/${URLEncoder.encode("http://data.knora.org/contained-thing-1", "UTF-8")}/${URLEncoder.encode("http://www.knora.org/ontology/anything#isPartOfOtherThing", "UTF-8")}/${URLEncoder.encode("http://data.knora.org/containing-thing", "UTF-8")}") ~> addCredentials(BasicHttpCredentials(anythingUserEmail, testPass)) ~> valuesPath ~> check { assert(status == StatusCodes.OK, response.toString) val linkValue = AkkaHttpUtils.httpResponseToJson(response).fields("value").asJsObject.fields assert( linkValue("subjectIri").asInstanceOf[JsString].value == "http://data.knora.org/contained-thing-1" && linkValue("predicateIri").asInstanceOf[JsString].value == "http://www.knora.org/ontology/anything#isPartOfOtherThing" && linkValue("objectIri").asInstanceOf[JsString].value == "http://data.knora.org/containing-thing" && linkValue("referenceCount").asInstanceOf[JsNumber].value.toInt == 1 ) } } "not add an empty text value to a resource" in { val params = """ |{ | "res_id": "http://data.knora.org/a-thing", | "prop": "http://www.knora.org/ontology/anything#hasText", | "richtext_value": {"utf8str":""} |} """.stripMargin Post("/v1/values", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, testPass)) ~> valuesPath ~> check { assert(status == StatusCodes.BadRequest, response.toString) } } "add a text value containing a standoff reference to another resource" in { val xmlStr = """<?xml version="1.0" encoding="UTF-8"?> |<text> | This text links to another <a class="salsah-link" href="http://data.knora.org/another-thing">resource</a>. |</text> """.stripMargin val params = s""" |{ | "res_id": "http://data.knora.org/a-thing", | "prop": "http://www.knora.org/ontology/anything#hasText", | "richtext_value": {"xml": ${xmlStr.toJson.compactPrint}, "mapping_id": "$mappingIri"} |} """.stripMargin Post("/v1/values", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, testPass)) ~> valuesPath ~> check { assert(status == StatusCodes.OK, response.toString) val responseJson: Map[String, JsValue] = responseAs[String].parseJson.asJsObject.fields // check for standoff link in value creation response assert(responseJson("value").asInstanceOf[JsObject].fields("xml").toString.contains("http://data.knora.org/another-thing"), "standoff link target is not contained in value creation response") val valueIri: IRI = responseJson("id").asInstanceOf[JsString].value textValueIri.set(valueIri) } } "change a text value containing a standoff reference to another resource" in { val xmlStr = """<?xml version="1.0" encoding="UTF-8"?> |<text> | This new version of the text links to another <a class="salsah-link" href="http://data.knora.org/a-thing-with-text-values">resource</a>. |</text> """.stripMargin val params = s""" |{ | "res_id": "http://data.knora.org/a-thing", | "prop": "http://www.knora.org/ontology/anything#hasText", | "richtext_value": {"xml": ${xmlStr.toJson.compactPrint}, "mapping_id": "$mappingIri"} |} """.stripMargin Put(s"/v1/values/${URLEncoder.encode(textValueIri.get, "UTF-8")}", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, testPass)) ~> valuesPath ~> check { assert(status == StatusCodes.OK, response.toString) val responseJson: Map[String, JsValue] = responseAs[String].parseJson.asJsObject.fields // check for standoff link in value creation response assert(responseJson("value").asInstanceOf[JsObject].fields("xml").toString.contains("http://data.knora.org/a-thing-with-text-values"), "standoff link target is not contained in value creation response") val valueIri: IRI = responseJson("id").asInstanceOf[JsString].value textValueIri.set(valueIri) } } "get the version history of a value" in { Get(s"/v1/values/history/${URLEncoder.encode("http://data.knora.org/a-thing", "UTF-8")}/${URLEncoder.encode("http://www.knora.org/ontology/anything#hasText", "UTF-8")}/${URLEncoder.encode(textValueIri.get, "UTF-8")}") ~> addCredentials(BasicHttpCredentials(anythingUserEmail, testPass)) ~> valuesPath ~> check { assert(status == StatusCodes.OK, response.toString) val versionHistory: JsValue = AkkaHttpUtils.httpResponseToJson(response).fields("valueVersions") val (mostRecentVersion, originalVersion) = versionHistory match { case JsArray(Vector(mostRecent, original)) => (mostRecent.asJsObject.fields, original.asJsObject.fields) } assert(mostRecentVersion("previousValue").asInstanceOf[JsString].value == originalVersion("valueObjectIri").asInstanceOf[JsString].value) assert(originalVersion("previousValue") == JsNull) } } "mark as deleted a text value containing a standoff reference to another resource" in { Delete(s"/v1/values/${URLEncoder.encode(textValueIri.get, "UTF-8")}?deleteComment=deleted%20for%20testing") ~> addCredentials(BasicHttpCredentials(anythingUserEmail, testPass)) ~> valuesPath ~> check { assert(status == StatusCodes.OK, response.toString) } } "add a link value to a resource" in { val params = """ |{ | "res_id": "http://data.knora.org/a-thing", | "prop": "http://www.knora.org/ontology/anything#hasOtherThing", | "link_value": "http://data.knora.org/another-thing" |} """.stripMargin Post("/v1/values", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, testPass)) ~> valuesPath ~> check { assert(status == StatusCodes.OK, response.toString) val responseJson: Map[String, JsValue] = responseAs[String].parseJson.asJsObject.fields val valueIri: IRI = responseJson("id").asInstanceOf[JsString].value linkValueIri.set(valueIri) } } "mark a link value as deleted" in { Delete(s"/v1/values/${URLEncoder.encode(linkValueIri.get, "UTF-8")}?deleteComment=deleted%20for%20testing") ~> addCredentials(BasicHttpCredentials(anythingUserEmail, testPass)) ~> valuesPath ~> check { assert(status == StatusCodes.OK, response.toString) } } "add a link value with a comment to a resource" in { val params = s""" |{ | "res_id": "http://data.knora.org/a-thing", | "prop": "http://www.knora.org/ontology/anything#hasOtherThing", | "link_value": "http://data.knora.org/another-thing", | "comment":"$boringComment" |} """.stripMargin Post("/v1/values", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, testPass)) ~> valuesPath ~> check { assert(status == StatusCodes.OK, response.toString) val responseJson: Map[String, JsValue] = responseAs[String].parseJson.asJsObject.fields val valueIri: IRI = responseJson("id").asInstanceOf[JsString].value linkValueIri.set(valueIri) } } "get a link value with a comment" in { Get(s"/v1/links/${URLEncoder.encode("http://data.knora.org/a-thing", "UTF-8")}/${URLEncoder.encode("http://www.knora.org/ontology/anything#hasOtherThing", "UTF-8")}/${URLEncoder.encode("http://data.knora.org/another-thing", "UTF-8")}") ~> addCredentials(BasicHttpCredentials(anythingUserEmail, testPass)) ~> valuesPath ~> check { assert(status == StatusCodes.OK, response.toString) val responseObj = AkkaHttpUtils.httpResponseToJson(response).fields val comment = responseObj("comment").asInstanceOf[JsString].value val linkValue = responseObj("value").asJsObject.fields assert( linkValue("subjectIri").asInstanceOf[JsString].value == "http://data.knora.org/a-thing" && linkValue("predicateIri").asInstanceOf[JsString].value == "http://www.knora.org/ontology/anything#hasOtherThing" && linkValue("objectIri").asInstanceOf[JsString].value == "http://data.knora.org/another-thing" && linkValue("referenceCount").asInstanceOf[JsNumber].value.toInt == 1 && comment == boringComment ) } } } }
nie-ine/Knora
webapi/src/test/scala/org/knora/webapi/e2e/v1/ValuesV1R2RSpec.scala
Scala
agpl-3.0
15,548
package scalanlp.util import scala.collection.JavaConversions._; /** * caches for memoized functions * @author dramage */ trait Cache[K,V] extends PartialFunction[K,V] { } import java.lang.ref.SoftReference; import scala.collection.mutable.Map; /** * Provides a cache where both keys and values are only weakly referenced * allowing garbage collection of either at any time, backed by a WeakHashMap. * * This is currently a direct port of a corresponding Java class from JavaNLP, * but could well be adapted to be a scala map at some point. * * @author dramage */ class SoftMemCache[K,V] extends Map[K,V] with Cache[K,V] { /** cache of values */ protected val inner = new java.util.HashMap[HashableSoftReference, SoftReference[Option[V]]]; /** queue of objects to remove */ protected val removalQueue = new scala.collection.mutable.Queue[HashableSoftReference]; /** Removes all objects in the removal queue */ protected def dequeue() = { while (!removalQueue.isEmpty) { inner.remove(removalQueue.dequeue); } } /** * Resolves the soft reference, returning None if the reference * has dissappeared or Some(value) or Some(null) depending on whether * null was the stored value. */ private def resolve(key : K, ref : SoftReference[Option[V]]) : Option[V] = { val got = ref.get; if (ref.get == null) { // value has been gc'd, free key inner.remove(new HashableSoftReference(key)); None } else { got match { case Some(value) => Some(value); case None => Some(null.asInstanceOf[V]); } } } override def clear = { dequeue(); removalQueue.clear; inner.clear(); } override def contains(key : K) = { dequeue(); inner.containsKey(new HashableSoftReference(key)); } /** * Returns the value currently associated with the given key if one * has been set with put and not been subsequently garbage collected. */ override def get(key : K) : Option[V] = { dequeue(); val ref = inner.get(new HashableSoftReference(key)); if (ref != null) { resolve(key, ref); } else { None; } }; /** * Returns the expected size of the cache. Note that this may over-report * as objects may have been garbage collected. */ override def size() : Int = { dequeue(); inner.size; } /** * Iterates the elements of the cache that are currently present. */ override def iterator : Iterator[(K,V)] = { dequeue(); for (pair <- inner.entrySet.iterator; k = pair.getKey.get; v = resolve(k, pair.getValue); if k != null && v != None) yield (k, v.asInstanceOf[Some[V]].get); } /** * Associates the given key with a weak reference to the given value. * Either key or value or both may be garbage collected at any point. * Returns the previously associated value or null if none was * associated. Value must be non-null. */ override def update(key : K, value : V) : Unit = { dequeue(); inner.put(new HashableSoftReference(key), new SoftReference(Some(value))); } /** * Removes the given pair to the map. */ override def +=(k : (K,V)) : this.type = { dequeue(); update(k._1,k._2); this } /** * Removes the given key from the map. */ override def -=(key : K) : this.type = { dequeue(); inner.remove(new HashableSoftReference(key)); this } /** * A SoftReference with equality and hashcode based on the underlying * object. Automatically removes itself from the containing map if the * reference has been gc'd. * * @author dramage */ class HashableSoftReference(ref : SoftReference[K], hash : Int) { def this(key : K) = this(new SoftReference(key), key.hashCode); var removing = false; def get = { val got = ref.get; if (!removing && got == null) { removing = true; SoftMemCache.this.removalQueue += this; } got; } override def hashCode = hash; override def equals(other : Any) = { if (other.isInstanceOf[HashableSoftReference]) { val otherref = other.asInstanceOf[HashableSoftReference]; (this eq otherref) || (this.get == otherref.get); } else { false; } } } }
MLnick/scalanlp-core
data/src/main/scala/scalanlp/util/Cache.scala
Scala
apache-2.0
4,357
package memnets.awt import java.awt.Color import memnets.model.GradientHints import memnets.ui.YGradientMap case class YGradientAwt(var hints: GradientHints = GradientHints()) extends YGradientMap[Color] { def apply(i: Int, length: Int): Col = { val hintz = hints import hintz._ val maxlen = Math.min(length, maxLength) Color.getHSBColor( ((hue + i * (spectrum / Math.max(minDivs, maxlen))) % spectrum) .asInstanceOf[Float] / 360.0f, saturation, brightness) } }
MemoryNetworks/memnets
awt/src/main/scala/memnets/awt/YGradientAwt.scala
Scala
apache-2.0
511
package controllers import models.User import play.api.Routes import play.api.libs.json.Json import play.api.mvc.Action import securesocial.core._ case class Message(value: String) class MessageController(override implicit val env: RuntimeEnvironment[User]) extends SecureSocial[User] { implicit val fooWrites = Json.writes[Message] def index() = UserAwareAction { implicit request => val user = request.user Ok(views.html.index("hi user " + user, "")) } def getMessage = Action { Ok(Json.toJson(Message("Hello from Scala"))) } // def javascriptRoutes = Action { implicit request => // import controllers.javascript.routes.CustomLoginController.LoginPage // Ok(Routes.javascriptRouter("jsRoutes")(LoginPage.login)).as(JAVASCRIPT) // } def search = index() // UserAwareAction { // implicit request => // val query = request.getQueryString("query") // // } def search2(query: String) = index() }
jamesward/play2.3-scala2.11-securesocial3-slick2.1-h2-bootstrap3-sample
app/controllers/MessageController.scala
Scala
apache-2.0
960
package org.macrogl import scala.concurrent._ import ExecutionContext.Implicits.global import javax.imageio.ImageIO import java.util.concurrent.ConcurrentLinkedQueue import org.lwjgl.opengl._ import java.io.InputStream import java.io.BufferedReader import java.io.InputStreamReader import java.io.ByteArrayOutputStream object Utils { /** * Specifics when using the LWJGL back end with the JVM. * Not accessible when not using the JVM. */ object LWJGLSpecifics { private val pendingTaskList = new ConcurrentLinkedQueue[() => Unit] /** * Flush all the pending tasks. * You don't need to explicitly use this method if you use the FrameListener loop system. * Warning: this should be called only from the OpenGL thread! */ def flushPendingTaskList(): Unit = { var current: () => Unit = null while ({ current = pendingTaskList.poll(); current } != null) { current() } } /** * Add a task to be executed by the OpenGL thread. * Tasks are usually executed at the beginning of the next iteration of the FrameListener loop system. */ def addPendingTask(task: () => Unit): Unit = { pendingTaskList.add(task) } } /** * Specifics when using the WebGL back end with Scala.js. * Not accessible when not using Scala.js. */ def WebGLSpecifics: Nothing = throw new UnsupportedOperationException("Available only when using the Scala.js platform") private def inputStreamForResource(resourceName: String): InputStream = { this.getClass().getResourceAsStream(resourceName) } private val notFoundMsg = "Resource not found" /** * Load a image from the resources into an OpenGL 2D texture. * The textures are stored using the 32 bits per pixel RGBA format. * Supported images format: PNG, JPEG, BMP and GIF. * * @param resourceName The Fully qualified path of the resource image * @param texture The token of the texture where the decoded texture have to be loaded * @param gl The Macrogl instance to use to load the texture into OpenGL * @param preload Optional function called by the OpenGL thread after the image has been decoded but before it is loaded into OpenGL. * A returned value false means aborting the texture loading */ def loadTexture2DFromResources(resourceName: String, texture: Token.Texture, preload: => Boolean = true)(implicit gl: Macrogl): Unit = { // TODO should we have our own ExecutionContext? Future { try { val stream = inputStreamForResource(resourceName) if(stream == null) throw new RuntimeException(notFoundMsg) // Should support JPEG, PNG, BMP, WBMP and GIF val image = ImageIO.read(stream) val height = image.getHeight() val width = image.getWidth() val byteBuffer = Macrogl.createByteData(4 * width * height) // Stored as RGBA value: 4 bytes per pixel val tmp = new Array[Byte](4) var y = height - 1 while (y >= 0) { var x = 0 while (x < width) { val argb = image.getRGB(x, y) tmp(2) = argb.toByte // blue tmp(1) = (argb >> 8).toByte // green tmp(0) = (argb >> 16).toByte // red tmp(3) = (argb >> 24).toByte // alpha byteBuffer.put(tmp) x += 1 } y -= 1 } stream.close() byteBuffer.rewind // Don't load it now, we want it done synchronously in the main loop to avoid concurrency issue LWJGLSpecifics.addPendingTask({ () => if (preload) { val previousTexture = gl.getParameterTexture(Macrogl.TEXTURE_BINDING_2D) gl.bindTexture(Macrogl.TEXTURE_2D, texture) gl.texImage2D(Macrogl.TEXTURE_2D, 0, Macrogl.RGBA, width, height, 0, Macrogl.RGBA, Macrogl.UNSIGNED_BYTE, byteBuffer) gl.bindTexture(Macrogl.TEXTURE_2D, previousTexture) } }) } catch { case e: Throwable => org.macrogl.Utils.err.println("Error during the loading of texture resource \"" + resourceName + "\": " + e.getMessage()) } } } /** * Asynchronously load a text file from the resources and pass it to the provided callback. * * @param resourceName The Fully qualified path of the resource * @param callback The function to call once the data are in memory */ def getTextFileFromResources(resourceName: String)(callback: Array[String] => Unit): Unit = { Future { try { val stream = inputStreamForResource(resourceName) if(stream == null) throw new RuntimeException(notFoundMsg) val streamReader = new InputStreamReader(stream) val reader = new BufferedReader(streamReader) val buffer = new scala.collection.mutable.ArrayBuffer[String]() var line: String = null while ({ line = reader.readLine(); line } != null) { buffer += line } reader.close() streamReader.close() stream.close() val lines = buffer.toArray LWJGLSpecifics.addPendingTask({ () => callback(lines) }) } catch { case e: Throwable => org.macrogl.Utils.err.println("Error during the loading of text resource \"" + resourceName + "\": " + e.getMessage()) } } } /** * Asynchronously load a binary file from the resources and pass it to the provided callback. * * @param resourceName The Fully qualified path of the resource * @param callback The function to call once the data are in memory */ def getBinaryFileFromResources(resourceName: String)(callback: org.macrogl.Data.Byte => Unit): Unit = { Future { try { val stream = inputStreamForResource(resourceName) if(stream == null) throw new RuntimeException(notFoundMsg) val byteStream = new ByteArrayOutputStream() val tmpData: Array[Byte] = new Array[Byte](1024) // 1KB of temp data var tmpDataContentSize: Int = 0 while ({ tmpDataContentSize = stream.read(tmpData); tmpDataContentSize } >= 0) { byteStream.write(tmpData, 0, tmpDataContentSize) } stream.close() val byteArray = byteStream.toByteArray() val byteBuffer = org.macrogl.Macrogl.createByteData(byteArray.length) byteBuffer.put(byteArray) byteBuffer.rewind() LWJGLSpecifics.addPendingTask({ () => callback(byteBuffer) }) } catch { case e: Throwable => org.macrogl.Utils.err.println("Error during the loading of text resource \"" + resourceName + "\": " + e.getMessage()) } } } /** * Start the FrameListener into a separate thread while the following logical flow: * {{{ * val fl:FrameListener = ... * fl.init * while(fl.continue) { * fl.render * } * fl.close * }}} */ def startFrameListener(fl: FrameListener): Unit = { val frameListenerThread = new Thread(new Runnable { def run() { var lastLoopTime: Long = System.nanoTime() fl.init while (fl.continue) { LWJGLSpecifics.flushPendingTaskList() val currentTime: Long = System.nanoTime() val diff = ((currentTime - lastLoopTime) / 1e9).toFloat lastLoopTime = currentTime val frameEvent = FrameEvent(diff) fl.render(frameEvent) } fl.close } }) // Start listener frameListenerThread.start() } object out { def println(msg: Any): Unit = { System.out.println(if(msg != null) msg.toString() else "<null>") } } object err { def println(msg: Any): Unit = { System.err.println(if(msg != null) msg.toString() else "<null>") } } }
storm-enroute/macrogl
src/api-opengl/scala/org/macrogl/Utils.scala
Scala
bsd-3-clause
7,795
package org.hablapps.twitter.account.list import org.hablapps.{ updatable, react, speech, twitter} import updatable._ import language.reflectiveCalls object TwitterList{ trait State { self: speech.Program with twitter.State => trait TwitterList extends Interaction { type This = TwitterList type Context = Account type ContextCol[x] = Option[x] type Member = Agent type MemberCol[x] = List[x] type Action = SocialAction } implicit val TwitterList = builder[TwitterList] } }
hablapps/app-twitter
src/main/scala/org/hablapps/twitter/account/list/TwitterList.scala
Scala
apache-2.0
546
package masterleague4s package instances import org.scalacheck.Arbitrary import org.scalacheck.Arbitrary._ import org.scalacheck.Gen import masterleague4s.data._ import spinoco.protocol.http._ import spinoco.protocol.http.Uri._ //import org.scalacheck.ScalacheckShapeless._ object Generators { implicit val arbScheme: Arbitrary[HttpScheme.Value] = Arbitrary( Gen.oneOf(HttpScheme.HTTP, HttpScheme.HTTPS, HttpScheme.WS, HttpScheme.WSS)) implicit val arbHP: Arbitrary[HostPort] = Arbitrary(for { host <- arbitrary[String] port <- arbitrary[Option[Int]] } yield HostPort(host, port)) implicit val arbPath: Arbitrary[Path] = Arbitrary(for { init <- arbitrary[Boolean] trailing <- arbitrary[Boolean] segments <- arbitrary[List[String]] } yield Path(init, trailing, segments)) implicit val arbQuery: Arbitrary[Query] = Arbitrary(for { params <- arbitrary[List[(String, String)]] //allows only alphabetically ordered params by key with no duplicate //this is not spec-enforced nor generally valid //but it is for the use-case } yield Query((params.groupBy(_._1).toList.collect { case (_, (k, v) :: _) => (k, v) }).sortBy(_._1))) implicit val arbUri: Arbitrary[Uri] = Arbitrary(for { scheme <- arbitrary[HttpScheme.Value] host <- arbitrary[HostPort] path <- arbitrary[Path] query <- arbitrary[Query] } yield Uri(scheme, host, path, query)) implicit def arbPortrait: Arbitrary[HeroPortrait] = Arbitrary(for { small <- arbitrary[Uri] medium <- arbitrary[Uri] } yield HeroPortrait(small, medium)) implicit def arbherof[A: Arbitrary]: Arbitrary[HeroF[A]] = Arbitrary(for { name <- arbitrary[String] role <- arbitrary[A] url <- arbitrary[Uri] portrait <- arbitrary[HeroPortrait] } yield HeroF(name, role, url, portrait)) import masterleague4s.net.APIError implicit def arbThrottled: Arbitrary[APIError] = Arbitrary(for { cause <- arbitrary[String] } yield APIError(cause)) import masterleague4s.net.authorization.Token implicit def arbToken: Arbitrary[Token] = Arbitrary(for { value <- arbitrary[String] } yield Token(value)) }
martijnhoekstra/masterleague4s
src/test/scala/discipline/data/Generators.scala
Scala
gpl-3.0
2,194
// AORTA is copyright (C) 2012 Dustin Carlino, Mike Depinet, and Piyush // Khandelwal of UT Austin // License: GNU GPL v2 package utexas.aorta.ui import swing._ // TODO figure out exactly what import swing.event._ import java.awt.{Color, RenderingHints, Polygon} import java.awt.geom.{Rectangle2D, RoundRectangle2D} import utexas.aorta.map.Coordinate import utexas.aorta.common.Util // TODO we can't hear the tab key until we figure out how to // have 'with Component.SuperMixin' so we can do setFocusTraversalEnabled(false) // TODO for now, we'll implement the free-hand drawing here, but eventually it // makes more sense as some kind of nifty mixin thing. // x, y are in map-space, not screen-space case class Tooltip(x: Double, y: Double, lines: List[String], dark: Boolean) // SuperMixin lets us get tab back abstract class ScrollingCanvas extends Component { preferredSize = new Dimension(Int.MaxValue, Int.MaxValue) override def focusable = true // for keys to work // this defines the current viewing window. these values are arbitrary; // reset_window will clobber them. var zoom = 1.0 protected var x_off, y_off = 0.0 reset_window() // this defines controls // TODO cfg protected var key_speed = 10 protected var zoom_step = 0.1 protected var zoom_mult = 10 // it's kind of weird why this isn't just ^ // translate between screen and map coordinate systems // define (x_off, y_off) to be the top-left corner of the screen def screen_to_map_x(x: Double) = (x + x_off) / zoom def screen_to_map_y(y: Double) = (y + y_off) / zoom def map_to_screen_x(x: Double) = (x * zoom) - x_off def map_to_screen_y(y: Double) = (y * zoom) - y_off // react to mouse events, tracking various parameters protected var mouse_at_x, mouse_at_y = 0.0 protected var click_x, click_y = 0.0 listenTo(mouse.moves) listenTo(mouse.clicks) listenTo(mouse.wheel) listenTo(keys) // TODO work damnit! // Free-hand drawing stuff protected var drawing_mode = false protected var drawing_polygon = new Polygon() reactions += { // Free-hand drawing stuff case KeyPressed(_, Key.Shift, _, _) => { drawing_mode = true } case KeyReleased(_, Key.Shift, _, _) => { drawing_mode = false } case e: MousePressed if drawing_mode => { grab_focus() val x = screen_to_map_x(e.point.x).toInt val y = screen_to_map_y(e.point.y).toInt drawing_polygon.addPoint(x, y) repaint() } case e: MouseDragged if drawing_mode => { val x = screen_to_map_x(e.point.x).toInt val y = screen_to_map_y(e.point.y).toInt if (drawing_polygon.npoints == 1) { // add it, so we can drag and make the first line drawing_polygon.addPoint(x, y) } drawing_polygon.xpoints(drawing_polygon.npoints - 1) = x drawing_polygon.ypoints(drawing_polygon.npoints - 1) = y repaint() } case KeyPressed(_, Key.R, _, _) if drawing_mode => { // finish it off if (drawing_polygon.npoints < 3) { Util.log("A polygon needs more than one line") } else { handle_ev(EV_Select_Polygon_For_Serialization(drawing_polygon)) } drawing_mode = false drawing_polygon = new Polygon() repaint() } // TODO fast_mode case e: MouseMoved => { // TODO nicer reassignment? mouse_at_x = e.point.x mouse_at_y = e.point.y handle_ev(EV_Mouse_Moved( screen_to_map_x(mouse_at_x), screen_to_map_y(mouse_at_y) )) } case e: MousePressed => { grab_focus() click_x = e.point.x click_y = e.point.y } case e: MouseDragged => { val dx = click_x - e.point.x val dy = click_y - e.point.y // TODO use screen<->map formulas better? x_off += dx y_off += dy fix_oob() // show the pan repaint() // reset for the next event click_x = e.point.x click_y = e.point.y mouse_at_x = e.point.x mouse_at_y = e.point.y handle_ev(EV_Mouse_Moved( screen_to_map_x(mouse_at_x), screen_to_map_y(mouse_at_y) )) } case e: MouseWheelMoved => { val old_zoom = zoom val dz = e.rotation * zoom_mult zoom -= zoom_step * dz // cap zoom zoom = math.max(zoom_step, zoom) // make screen_to_map of mouse_at still point to the same thing after // zooming. TODO comment the derivation; it was a proportion x_off = ((zoom / old_zoom) * (mouse_at_x + x_off)) - mouse_at_x y_off = ((zoom / old_zoom) * (mouse_at_y + y_off)) - mouse_at_y fix_oob() // show the zoom // TODO aargh this.asInstanceOf[MapCanvas].statusbar.zoom.text = f"$zoom%.1f" repaint() } // on my system, this is fired constantly at a repeat rate, rather than // having the usual semantics of one press/release pair. case KeyPressed(_, key, _, _) => { key match { case Key.Up => y_off -= zoom * key_speed case Key.Down => y_off += zoom * key_speed case Key.Left => x_off -= zoom * key_speed case Key.Right => x_off += zoom * key_speed case Key.R => reset_window case _ => handle_ev(EV_Key_Press(key)) } fix_oob() repaint() } } def grab_focus() { if (!hasFocus) { requestFocus() } } // begin in the center def reset_window() { x_off = canvas_width / 2 y_off = canvas_height / 2 zoom = 1.0 } // prevent coordinates from leaving the canvas private def fix_oob() { // upper logical bounds of the current screen val x2 = screen_to_map_x(size.width) val y2 = screen_to_map_y(size.height) val x_fix = canvas_width - x2 val y_fix = canvas_height - y2 if (x_fix < 0) { x_off += x_fix } if (y_fix < 0) { y_off += y_fix } // the lower logical bounds are, of course, the origin x_off = math.max(0, x_off) y_off = math.max(0, y_off) } def center_on(pt: Coordinate) { x_off = (pt.x * zoom) - (size.width / 2) y_off = (pt.y * zoom) - (size.height / 2) } // TODO swing SuperMixin has a window focus listener... var first_focus = true override def paintComponent(g2d: Graphics2D) { // clear things super.paintComponent(g2d) if (first_focus) { grab_focus() first_focus = false } val orig_transform = g2d.getTransform // Perform the transformations to mimic scrolling and zooming g2d.translate(-x_off, -y_off) g2d.scale(zoom, zoom) // TODO antialias cfg // ew, clunky old java crap. val antialiasing = new java.util.HashMap[Any,Any]() antialiasing.put( RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON ) g2d.setRenderingHints(antialiasing) // provide a tool-tip rendering service to the client. val tooltips = render_canvas(g2d, viewing_window) g2d.setTransform(orig_transform) for (tooltip <- tooltips if tooltip.lines.nonEmpty) { draw_tooltip(g2d, tooltip) } } // what logical coordinates are in view? def x1 = screen_to_map_x(0) def y1 = screen_to_map_y(0) def x2 = screen_to_map_x(size.width) def y2 = screen_to_map_y(size.height) def viewing_window: Rectangle2D.Double = new Rectangle2D.Double(x1, y1, x2 - x1, y2 - y1) def draw_tooltip(g2d: Graphics2D, tooltip: Tooltip) { val longest = tooltip.lines.maxBy(_.length) val width = g2d.getFontMetrics.getStringBounds(longest, g2d).getWidth // assume the height of each line is the same val height = g2d.getFontMetrics.getStringBounds(longest, g2d).getHeight val total_height = height * tooltip.lines.length // center the text val x = map_to_screen_x(tooltip.x) - (width / 2.0) // don't draw right on top of the cursor // TODO fancy: when drawing above cursor would occlude, draw below val top = map_to_screen_y(tooltip.y) - total_height - 10.0 // draw a nice backdrop if (tooltip.dark) { g2d.setColor(Color.GRAY) } else { g2d.setColor(Color.WHITE) } g2d.fill(new RoundRectangle2D.Double( x - 2.0, top - 2.0, width + 4.0, total_height + 6.0, 1.0, 1.0 )) g2d.setColor(Color.BLACK) // TODO tweak font size, colors. def draw_line(text: List[String], y: Double): Unit = text match { case line :: more => { g2d.drawString(line, x.toFloat, y.toFloat) draw_line(text.tail, y + height) } case Nil => } draw_line(tooltip.lines, top + height) } // implement these. render_canvas returns tooltip text desired around each // point def render_canvas(g2d: Graphics2D, window: Rectangle2D.Double): List[Tooltip] def canvas_width: Int def canvas_height: Int def handle_ev(ev: UI_Event) } sealed trait UI_Event final case class EV_Mouse_Moved(x: Double, y: Double) extends UI_Event // TODO type erasure issue here; this should really be Option[Any] final case class EV_Param_Set(key: String, value: Option[String]) extends UI_Event // TODO this is really a swing.event.Key (a Enumeration.Value), but I can't get // that to work... final case class EV_Key_Press(key: Any) extends UI_Event final case class EV_Action(key: String) extends UI_Event final case class EV_Select_Polygon_For_Serialization(poly: Polygon) extends UI_Event
dabreegster/aorta
utexas/aorta/ui/ScrollingCanvas.scala
Scala
gpl-2.0
9,357
/** * This file is part of the "seeruk/scala-semver" project. * * Unless required by applicable law or agreed to in writing, software * distributed under the LICENSE is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ package seeruk.semver import org.scalatest.{BeforeAndAfter, FunSpec} /** * SemanticVersion Spec * * @author Elliot Wright <elliot@elliotwright.co> */ class SemanticVersionSpec extends FunSpec with BeforeAndAfter { describe("seeruk.semver.SemanticVersion") { describe("toString()") { it("should return a string") { val semver = new SemanticVersion(1, 2, 3) assert(semver.toString.isInstanceOf[String]) } it("should at least return a version string, including a major, minor, and patch number") { val semver = new SemanticVersion(1, 2, 3) assert(semver.toString == "1.2.3") } it("should include a pre-release string if one is set") { val semver = new SemanticVersion(1, 2, 3, Some(List("alpha"))) assert(semver.toString == "1.2.3-alpha") } it("should include several pre-release strings if they are set, separated by a dot") { val semver = new SemanticVersion(1, 2, 3, Some(List("alpha", "1"))) assert(semver.toString == "1.2.3-alpha.1") } it("should include a metadata string if one is set") { val semver = new SemanticVersion(1, 2, 3, None, Some(List("build"))) assert(semver.toString == "1.2.3+build") } it("should include several metadata strings if they are set, separated by a dot") { val semver = new SemanticVersion(1, 2, 3, None, Some(List("build", "123"))) assert(semver.toString == "1.2.3+build.123") } it("should include both a pre-release string, and a metadata string if they are set") { val semver1 = new SemanticVersion(1, 2, 3, Some(List("alpha")), Some(List("build"))) val semver2 = new SemanticVersion(1, 2, 3, Some(List("alpha", "1")), Some(List("build", "123"))) assert(semver1.toString == "1.2.3-alpha+build") assert(semver2.toString == "1.2.3-alpha.1+build.123") } } } }
SeerUK/scala-semver
src/test/scala/seeruk/semver/SemanticVersionSpec.scala
Scala
mit
2,352
package com.github.agaro1121.http.client import cats.data.{NonEmptyList, Validated, ValidatedNel} import cats.data.Validated.{invalidNel, valid} import com.github.agaro1121.exception.{BadLeaguesEndpointArgument, BadLeagueCompact, BadLeagueLimit, SeasonNotSpecified} import com.github.agaro1121.models.leagues.LeagueType import com.github.agaro1121.models.leagues.LeagueType.SEASON trait LeaguesEndpointArgsValidators { protected def validateSeason(`type`: Option[LeagueType], season: Option[String]): ValidatedNel[BadLeaguesEndpointArgument, Option[String]] = { (`type`, season) match { case (Some(SEASON), None) => invalidNel(SeasonNotSpecified("You must specify a season when type=SEASON")) case _ => valid(season) } } protected def validateCompact(compact: Option[Int]): ValidatedNel[BadLeaguesEndpointArgument, Option[Int]] = { compact match { case Some(value) if !(value == 0 || value == 1) => invalidNel(BadLeagueCompact(s"The value of compact can only be zero or 1")) case _ => valid(compact) } } protected def validateLimit(compact: Option[Int], limit: Option[Int]): ValidatedNel[BadLeaguesEndpointArgument, Option[Int]] = { val MaxLimitWhenCompactIsZero: Int = 50 val MaxLimitWhenCompactIsOne: Int = 230 (compact, limit) match { case (Some(1), Some(l)) if l > MaxLimitWhenCompactIsOne => invalidNel(BadLeagueLimit(s"The value of limit must be less than $MaxLimitWhenCompactIsOne when compact is 1")) case (Some(0), Some(l)) if l > MaxLimitWhenCompactIsZero => invalidNel(BadLeagueLimit(s"The value of limit must be less than $MaxLimitWhenCompactIsZero when compact is zero")) case (None, Some(_)) => invalidNel(BadLeagueLimit(s"Limit cannot be set when compact is not set")) case _ => valid(limit) } } }
agaro1121/PathOfExileApiClient
src/main/scala/com/github/agaro1121/http/client/LeaguesEndpointArgsValidators.scala
Scala
mit
1,855
package nyaya.util import scala.annotation.nowarn object Platform { @nowarn("cat=unused") @inline final def choose[A](jvm: => A, js: => A): A = js }
japgolly/nyaya
util/js/src/main/scala/nyaya/util/Platform.scala
Scala
lgpl-2.1
155
package models.annotation.nml import java.io.File import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.typesafe.scalalogging.LazyLogging import models.annotation.UploadedVolumeLayer import net.liftweb.common.{Box, Empty, Failure, Full} import play.api.libs.Files.TemporaryFile object NmlResults extends LazyLogging { sealed trait NmlParseResult { def fileName: String def description: Option[String] = None def succeeded: Boolean def toSuccessBox: Box[NmlParseSuccess] = this match { case NmlParseFailure(fileName, error) => Failure(s"Couldn’t parse file: $fileName. $error") case success: NmlParseSuccess => Full(success) case _ => Failure(s"Couldn’t parse file: $fileName") } def withName(name: String): NmlParseResult = this } case class NmlParseSuccess(fileName: String, skeletonTracing: Option[SkeletonTracing], volumeLayers: List[UploadedVolumeLayer], _description: String) extends NmlParseResult { def succeeded = true override def description: Option[String] = Some(_description) override def withName(name: String): NmlParseResult = this.copy(fileName = name) } case class NmlParseFailure(fileName: String, error: String) extends NmlParseResult { def succeeded = false } case class NmlParseEmpty(fileName: String) extends NmlParseResult { def succeeded = false } case class MultiNmlParseResult(parseResults: List[NmlParseResult] = Nil, otherFiles: Map[String, TemporaryFile] = Map.empty) { def combineWith(other: MultiNmlParseResult): MultiNmlParseResult = MultiNmlParseResult(parseResults ::: other.parseResults, other.otherFiles ++ otherFiles) def containsNoSuccesses: Boolean = !parseResults.exists(_.succeeded) def containsFailure: Boolean = parseResults.exists { case _: NmlParseFailure => true case _ => false } // Used in task creation. Can only be used with single-layer volumes def toBoxes: List[TracingBoxContainer] = parseResults.map { parseResult => val successBox = parseResult.toSuccessBox val skeletonBox = successBox match { case Full(success) => success.skeletonTracing match { case Some(skeleton) => Full(skeleton) case None => Empty } case f: Failure => f case _ => Failure("") } val volumeBox = successBox match { case Full(success) if success.volumeLayers.length <= 1 => success.volumeLayers.headOption match { case Some(UploadedVolumeLayer(tracing, dataZipLocation, _)) => Full((tracing, otherFiles.get(dataZipLocation).map(_.path.toFile))) case None => Empty } case Full(success) if success.volumeLayers.length > 1 => Failure("Cannot create tasks from multi-layer volume annotations.") case f: Failure => f case _ => Failure("") } TracingBoxContainer(successBox.map(_.fileName), successBox.map(_.description), skeletonBox, volumeBox) } } case class TracingBoxContainer(fileName: Box[String], description: Box[Option[String]], skeleton: Box[SkeletonTracing], volume: Box[(VolumeTracing, Option[File])]) }
scalableminds/webknossos
app/models/annotation/nml/NmlResults.scala
Scala
agpl-3.0
3,670
/** * Bolt * statements/Delete * * Copyright (c) 2017 Osamu Takahashi * * This software is released under the MIT License. * http://opensource.org/licenses/mit-license.php * * @author Osamu Takahashi */ package com.sopranoworks.bolt.statements import com.google.cloud.spanner.{Key, KeySet, Mutation, TransactionContext} import com.google.cloud.spanner.TransactionRunner.TransactionCallable import com.sopranoworks.bolt.{Bolt, QueryContext, Where} import scala.collection.JavaConversions._ case class Delete(nut:Bolt.Nut,qc:QueryContext,tableName:String,where:Where,hint:String) extends Update { def execute():Unit = { Option(where) match { case Some(Where(_,_,w,_)) => nut.transactionContext match { case Some(tr) => if (where.isOptimizedWhere) { nut.addMutations(List(where.asDeleteMutation)) } else { val keys = _getTargetKeys(tr, tableName, w, hint) if (keys.nonEmpty) { val ml = keys.map { k => Mutation.delete(tableName, Key.of(k: _*)) } nut.addMutations(ml) } } case _ => if (where.isOptimizedWhere) { Option(nut.dbClient).foreach(_.write(List(where.asDeleteMutation))) } else { Option(nut.dbClient).foreach(_.readWriteTransaction() .run(new TransactionCallable[Unit] { override def run(transaction: TransactionContext):Unit = { val keys = _getTargetKeys(transaction, tableName, w, hint) if (keys.nonEmpty) { val ml = keys.map { k => Mutation.delete(tableName, Key.of(k: _*)) } transaction.buffer(ml) } } } ))} } case None => val m = Mutation.delete(tableName,KeySet.all()) Option(nut.dbClient).foreach(_.write(List(m))) case _ => } } }
OsamuTakahashi/bolt
src/main/scala/com/sopranoworks/bolt/statements/Delete.scala
Scala
mit
2,161
package net.mentalarray.doozie.Tasks /** * Created by bgilcrease on 10/1/14. */ class HiveTask(name:String) extends WorkflowTask(name) with TaskHasResult[String] with Logging { private var _statements: List[String] = null private def statementsOrEmpty = if (_statements == null) List.empty[String] else _statements override def validate { var isValid = true if ( _statements!=null ) { _statements.foreach(s => isValid &= !s.isNullOrWhitespace) } else { isValid = false } if ( !isValid ) { throw new WorkflowStateException(this, "The hive query must be specified.") } } def statements: List[String] = _statements def setNonQuery(statements: List[String]) = _statements = statements def setNonQuery(statement: String): Unit = setNonQuery(List(statement)) def appendNonQuery(statements: List[String]) = _statements = statementsOrEmpty ++ statements def appendNonQuery(statement: String): Unit = appendNonQuery(List(statement)) } object HiveTask { def apply(cfgFn: HiveTask => Unit): HiveTask = { val task = new HiveTask("Execute Hive Job") cfgFn(task) task } def apply(name: String)(cfgFn: HiveTask => Unit): HiveTask = { val task = new HiveTask(name) cfgFn(task) task } }
antagonist112358/tomahawk
workflow-engine/src/net/mentalarray/doozie/Tasks/HiveTask.scala
Scala
apache-2.0
1,275
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy import scala.collection.JavaConverters._ import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite} import org.apache.spark.internal.config.{SHUFFLE_SERVICE_DB_ENABLED, SHUFFLE_SERVICE_ENABLED} import org.apache.spark.util.Utils class ExternalShuffleServiceMetricsSuite extends SparkFunSuite { var sparkConf: SparkConf = _ var externalShuffleService: ExternalShuffleService = _ override def beforeAll(): Unit = { super.beforeAll() sparkConf = new SparkConf() sparkConf.set(SHUFFLE_SERVICE_ENABLED, true) sparkConf.set(SHUFFLE_SERVICE_DB_ENABLED, false) sparkConf.set("spark.local.dir", System.getProperty("java.io.tmpdir")) Utils.loadDefaultSparkProperties(sparkConf, null) val securityManager = new SecurityManager(sparkConf) externalShuffleService = new ExternalShuffleService(sparkConf, securityManager) externalShuffleService.start() } override def afterAll(): Unit = { if (externalShuffleService != null) { externalShuffleService.stop() } super.afterAll() } test("SPARK-31646: metrics should be registered") { val sourceRef = classOf[ExternalShuffleService].getDeclaredField("shuffleServiceSource") sourceRef.setAccessible(true) val source = sourceRef.get(externalShuffleService).asInstanceOf[ExternalShuffleServiceSource] assert(source.metricRegistry.getMetrics.keySet().asScala == Set( "blockTransferRateBytes", "numActiveConnections", "numCaughtExceptions", "numRegisteredConnections", "openBlockRequestLatencyMillis", "registeredExecutorsSize", "registerExecutorRequestLatencyMillis", "shuffle-server.usedDirectMemory", "shuffle-server.usedHeapMemory", "finalizeShuffleMergeLatencyMillis") ) } }
witgo/spark
core/src/test/scala/org/apache/spark/deploy/ExternalShuffleServiceMetricsSuite.scala
Scala
apache-2.0
2,631
/* * SettingsFrame.scala * (Muta) * * Copyright (c) 2013-2014 Hanns Holger Rutz. All rights reserved. * * This software is published under the GNU Lesser General Public License v3+ * * * For further information, please contact Hanns Holger Rutz at * contact@sciss.de */ package de.sciss.muta package gui import de.sciss.guiflitz.AutoView import de.sciss.desktop.Window import de.sciss.desktop.impl.WindowImpl import scala.swing.{Action, ScrollPane} class SettingsFrame[A](app: GeneticApp[_], view: AutoView[A], title: String) { me => final def value : A = view.cell() final def value_=(eval: A): Unit = view.cell() = eval val window: Window = new WindowImpl { def handler = app.windowHandler title = s"${me.title} Settings" closeOperation = Window.CloseDispose contents = new ScrollPane(view.component) bindMenu("window.pack", Action(null) { view.component.revalidate() pack() }) pack() front() } }
Sciss/Muta
src/main/scala/de/sciss/muta/gui/SettingsFrame.scala
Scala
lgpl-3.0
1,004
package im.actor.server.sms import scala.concurrent.{ ExecutionContext, Future } import akka.actor._ import akka.http.scaladsl.HttpExt import akka.http.scaladsl.model._ import akka.stream.Materializer import com.typesafe.config._ import im.actor.server.util.StringUtils._ class ClickatellSmsEngine(config: Config)(implicit system: ActorSystem, materializer: Materializer, http: HttpExt) extends SmsEngine { private val user = config.getString("user") private val password = config.getString("password") private val apiId = config.getString("api-id") private val baseUri = Uri("http://api.clickatell.com/http/sendmsg") private val baseParams = Map( "user" → user, "password" → password, "api_id" → apiId ) implicit val ec: ExecutionContext = system.dispatcher override def send(phoneNumber: Long, message: String): Future[Unit] = { val params = baseParams + ("to" → phoneNumber.toString) val uri = if (isAsciiString(message)) baseUri.withQuery(params + ("text" → message)) else baseUri.withQuery(params ++ Map( "text" → utfToHexString(message), "unicode" → 1.toString )) val f = http.singleRequest(HttpRequest(uri = uri)) map { case HttpResponse(StatusCodes.OK, _, entity, _) ⇒ // FIXME: check if body starts with OK () case resp ⇒ throw new Exception(s"Wrong response: ${resp}") } f onFailure { case e ⇒ system.log.error(e, "Failed to send sms to clickatell") } f } }
supertanglang/actor-platform
actor-server/actor-sms/src/main/scala/im/actor/server/sms/ClickatellSmsEngine.scala
Scala
mit
1,546
package sylvestris.slick import cats.data._ import cats.implicits._ import scala.slick.ast.ColumnOption.DBType import scala.slick.driver.PostgresDriver.simple.{ Tag => _, _ } import scala.slick.jdbc.meta.MTable import spray.json._ import sylvestris.core, core._ import sylvestris.slick.SlickGraph._ @SuppressWarnings(Array("org.brianmckenna.wartremover.warts.NonUnitStatements")) class SlickGraph(implicit session: Session) extends Graph { for (t <- List(slickNodes, slickEdges) if MTable.getTables(t.baseTableRow.tableName).list.isEmpty) { t.ddl.create } def nodes[T : NodeManifest](): XorT[GraphM, List[Error], Set[Node[T]]] = XorTGraphM { slickNodes.list.map(slickNodeToNode[T]).sequenceU.bimap(List(_), _.toSet) } def getNode[T : NodeManifest](id: Id): XorT[GraphM, Error, Node[T]] = slick { slickNodes .filter(d => d.id === id.v && d.tag === NodeManifest[T].tag.v) .list .map(slickNodeToNode[T]) .sequenceU .flatMap { case h :: Nil => h.right case Nil => Error(s"$id not found").left case nodes => Error(s"more than one node found for $id, $nodes").left } } def addNode[T : NodeManifest](node: Node[T]): XorT[GraphM, Error, Node[T]] = slick { if (slickNodes.filter(_.id === node.id.v).run.nonEmpty) { Error(s"$node already defined").left } else { slickNodes += nodeToSlickNode(node) node.right } } def updateNode[T : NodeManifest](node: Node[T]): XorT[GraphM, Error, Node[T]] = slick { val updatedCount = slickNodes.filter(_.id === node.id.v).update(nodeToSlickNode(node)) if (updatedCount =!= 1) Error(s"updated $updatedCount for $node").left else node.right } def removeNode[T : NodeManifest](id: Id): XorT[GraphM, Error, Node[T]] = getNode(id).flatMap { node => slick { val deletedCount = slickNodes.filter(_.id === id.v).delete if (deletedCount < 1) Error(s"$id not deleted").left else node.right }} def getEdges(id: Id, tag: Tag): XorT[GraphM, Error, Set[Edge]] = slick { filterEdgesQuery(id, tag) .list .map(slickEdgeToEdge) .toSet .right } def getEdges(label: Option[Label], idA: Id, tagA: Tag, tagB: Tag): XorT[GraphM, Error, Set[Edge]] = slick { filterEdgesQuery(label, idA, tagA, tagB) .list .map(slickEdgeToEdge) .toSet .right } def addEdges(edges: Set[Edge]): XorT[GraphM, Error, Set[Edge]] = slick { slickEdges ++= edges.map(edgeToSlickEdge) edges.right } def removeEdges(edges: Set[Edge]): XorT[GraphM, Error, Set[Edge]] = slick { val deletedCount = edges.map(filterEdgesQuery).reduce(_++_).delete if (deletedCount =!= edges.size) Error(s"$deletedCount of ${edges.size} deleted, $edges").left else edges.right } def removeEdges(idA: Id, tagA: Tag, tagB: Tag): XorT[GraphM, Error, Set[Edge]] = getEdges(None, idA, tagA, tagB).flatMap { edges => slick { val deletedCount = filterEdgesQuery(idA, tagA, tagB).delete if (deletedCount =!= edges.size) Error(s"$deletedCount of ${edges.size} deleted, $edges").left else edges.right }} } object SlickGraph { import scala.slick.driver.PostgresDriver.simple.Tag case class SlickNode(id: String, tag: String, content: String) // TODO : migration // - rename table documents → nodes // - rename nodes : poid → id // - rename nodes : type → tag // - rename edges : name → label // - rename edges : from → a_id // - add column a_tag to edges // - rename edges : to → b_id // - add column b_tag to edges class SlickNodes(t: Tag) extends Table[SlickNode](t, "nodes") { def id = column[String]("id", O.PrimaryKey) def tag = column[String]("tag") def content = column[String]("content", DBType("TEXT")) def * = (id, tag, content) <> (SlickNode.tupled, SlickNode.unapply) def idxType = index("idx_type", tag) } val slickNodes = TableQuery[SlickNodes] // TODO : update variable names to be in line with Edge case class SlickEdge(label: Option[String], idA: String, tagA: String, idB: String, tagB: String) class SlickEdges(t: Tag) extends Table[SlickEdge](t, "edges") { def label = column[Option[String]]("label") def idA = column[String]("a_id") def tagA = column[String]("a_tag") def idB = column[String]("b_id") def tagB = column[String]("b_tag") def * = (label, idA, tagA, idB, tagB) <> (SlickEdge.tupled, SlickEdge.unapply) // TODO : do we want delete cascade? def aFk = foreignKey("a_fk", idA, slickNodes)(_.id, onDelete = ForeignKeyAction.Cascade) def bFk = foreignKey("to_fk", idB, slickNodes)(_.id, onDelete = ForeignKeyAction.Cascade) def idx = index("idx_all", (label, idA, tagA, idB, tagB), unique = true) def idxA = index("idx_a", (idA, tagA)) def idxB = index("idx_b", (idB, tagB)) } val slickEdges = TableQuery[SlickEdges] def filterEdgesQuery(idA: Id, tagA: core.Tag) : Query[SlickEdges, SlickEdges#TableElementType, Seq] = filterEdgesQuery(None, idA, tagA, None, None) def filterEdgesQuery(idA: Id, tagA: core.Tag, tagB: core.Tag): Query[SlickEdges, SlickEdges#TableElementType, Seq] = filterEdgesQuery(None, idA, tagA, None, Some(tagB)) def filterEdgesQuery(label: Option[Label], idA: Id, tagA: core.Tag, tagB: core.Tag) : Query[SlickEdges, SlickEdges#TableElementType, Seq] = filterEdgesQuery(label, idA, tagA, None, Some(tagB)) def filterEdgesQuery(edge: Edge): Query[SlickEdges, SlickEdges#TableElementType, Seq] = filterEdgesQuery(edge.label, edge.idA, edge.tagA, Some(edge.idB), Some(edge.tagB)) def filterEdgesQuery(label: Option[Label], idA: Id, tagA: core.Tag, idB: Option[Id], tagB: Option[core.Tag]) : Query[SlickEdges, SlickEdges#TableElementType, Seq] = { val q1 = slickEdges.filter(e => e.idA === idA.v && e.tagA === tagA.v) val q2 = label.fold(q1)(l => q1.filter(_.label === label.map(_.v))) val q3 = idB.fold(q2)(i => q2.filter(_.idB === i.v)) tagB.fold(q3)(t => q3.filter(_.tagB === t.v)) } def slickNodeToNode[T : NodeManifest](v: SlickNode): Error Xor Node[T] = Xor.fromTryCatch(v.content.parseJson.convertTo[T](NodeManifest[T].jsonFormat)) .bimap(t => Error(s"unable to parse $v to Node", Some(t)), Node[T](Id(v.id), _)) def nodeToSlickNode[T : NodeManifest](v: Node[T]): SlickNode = SlickNode(v.id.v, NodeManifest[T].tag.v, v.content.toJson(NodeManifest[T].jsonFormat).compactPrint) def slickEdgeToEdge(v: SlickEdge): Edge = Edge(v.label.map(Label(_)), Id(v.idA), Tag(v.tagA), Id(v.idB), Tag(v.tagB)) def edgeToSlickEdge(v: Edge): SlickEdge = SlickEdge(v.label.map(_.v), v.idA.v, v.tagA.v, v.idB.v, v.tagB.v) def slick[T](op: => Error Xor T): XorT[GraphM, Error, T] = XorTGraphM { Xor.fromTryCatch(op).fold(e => Error("unhandled slick error", Some(e)).left, identity) } }
janrain/sylvestris
slick/src/main/scala/sylvestris/slick/SlickGraph.scala
Scala
mit
6,890
package core.models import com.mohiva.play.silhouette.api.{ Identity, LoginInfo } import reactivemongo.bson.BSONObjectID /** * The user object. * * @param id The unique ID of the user. * @param name Maybe the name of the authenticated user. * @param email Maybe the email of the authenticated provider. * @param avatarURL Maybe the avatar URL of the authenticated provider. * @param registration The registration data. * @param settings The user settings. */ case class User( id: BSONObjectID, loginInfo: Seq[LoginInfo], name: Option[String], email: Option[String], avatarURL: Option[String], registration: Registration, settings: Settings ) extends Identity
setusoft/silhouette-play-react-seed
app-core/src/main/scala/core/models/User.scala
Scala
mit
716
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark import org.scalatest.{FunSuite, PrivateMethodTester} import org.apache.spark.scheduler.{SchedulerBackend, TaskScheduler, TaskSchedulerImpl} import org.apache.spark.scheduler.cluster.{SimrSchedulerBackend, SparkDeploySchedulerBackend} import org.apache.spark.scheduler.cluster.mesos.{CoarseMesosSchedulerBackend, MesosSchedulerBackend} import org.apache.spark.scheduler.local.LocalBackend class SparkContextSchedulerCreationSuite extends FunSuite with LocalSparkContext with PrivateMethodTester with Logging { def createTaskScheduler(master: String): TaskSchedulerImpl = createTaskScheduler(master, new SparkConf()) def createTaskScheduler(master: String, conf: SparkConf): TaskSchedulerImpl = { // Create local SparkContext to setup a SparkEnv. We don't actually want to start() the // real schedulers, so we don't want to create a full SparkContext with the desired scheduler. sc = new SparkContext("local", "test", conf) val createTaskSchedulerMethod = PrivateMethod[Tuple2[SchedulerBackend, TaskScheduler]]('createTaskScheduler) val (_, sched) = SparkContext invokePrivate createTaskSchedulerMethod(sc, master) sched.asInstanceOf[TaskSchedulerImpl] } test("bad-master") { val e = intercept[SparkException] { createTaskScheduler("localhost:1234") } assert(e.getMessage.contains("Could not parse Master URL")) } test("local") { val sched = createTaskScheduler("local") sched.backend match { case s: LocalBackend => assert(s.totalCores === 1) case _ => fail() } } test("local-*") { val sched = createTaskScheduler("local[*]") sched.backend match { case s: LocalBackend => assert(s.totalCores === Runtime.getRuntime.availableProcessors()) case _ => fail() } } test("local-n") { val sched = createTaskScheduler("local[5]") assert(sched.maxTaskFailures === 1) sched.backend match { case s: LocalBackend => assert(s.totalCores === 5) case _ => fail() } } test("local-*-n-failures") { val sched = createTaskScheduler("local[* ,2]") assert(sched.maxTaskFailures === 2) sched.backend match { case s: LocalBackend => assert(s.totalCores === Runtime.getRuntime.availableProcessors()) case _ => fail() } } test("local-n-failures") { val sched = createTaskScheduler("local[4, 2]") assert(sched.maxTaskFailures === 2) sched.backend match { case s: LocalBackend => assert(s.totalCores === 4) case _ => fail() } } test("bad-local-n") { val e = intercept[SparkException] { createTaskScheduler("local[2*]") } assert(e.getMessage.contains("Could not parse Master URL")) } test("bad-local-n-failures") { val e = intercept[SparkException] { createTaskScheduler("local[2*,4]") } assert(e.getMessage.contains("Could not parse Master URL")) } test("local-default-parallelism") { val conf = new SparkConf().set("spark.default.parallelism", "16") val sched = createTaskScheduler("local", conf) sched.backend match { case s: LocalBackend => assert(s.defaultParallelism() === 16) case _ => fail() } } test("simr") { createTaskScheduler("simr://uri").backend match { case s: SimrSchedulerBackend => // OK case _ => fail() } } test("local-cluster") { createTaskScheduler("local-cluster[3, 14, 512]").backend match { case s: SparkDeploySchedulerBackend => // OK case _ => fail() } } def testYarn(master: String, expectedClassName: String) { try { val sched = createTaskScheduler(master) assert(sched.getClass === Class.forName(expectedClassName)) } catch { case e: SparkException => assert(e.getMessage.contains("YARN mode not available")) logWarning("YARN not available, could not test actual YARN scheduler creation") case e: Throwable => fail(e) } } test("yarn-cluster") { testYarn("yarn-cluster", "org.apache.spark.scheduler.cluster.YarnClusterScheduler") } test("yarn-standalone") { testYarn("yarn-standalone", "org.apache.spark.scheduler.cluster.YarnClusterScheduler") } test("yarn-client") { testYarn("yarn-client", "org.apache.spark.scheduler.cluster.YarnScheduler") } def testMesos(master: String, expectedClass: Class[_], coarse: Boolean) { val conf = new SparkConf().set("spark.mesos.coarse", coarse.toString) try { val sched = createTaskScheduler(master, conf) assert(sched.backend.getClass === expectedClass) } catch { case e: UnsatisfiedLinkError => assert(e.getMessage.contains("no mesos in")) logWarning("Mesos not available, could not test actual Mesos scheduler creation") case e: Throwable => fail(e) } } test("mesos fine-grained") { testMesos("mesos://localhost:1234", classOf[MesosSchedulerBackend], coarse = false) } test("mesos coarse-grained") { testMesos("mesos://localhost:1234", classOf[CoarseMesosSchedulerBackend], coarse = true) } test("mesos with zookeeper") { testMesos("zk://localhost:1234,localhost:2345", classOf[MesosSchedulerBackend], coarse = false) } }
Dax1n/spark-core
core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
Scala
apache-2.0
6,019
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.command import java.net.URI import java.util.Locale import org.mockito.ArgumentMatchers.any import org.mockito.Mockito.{mock, when} import org.mockito.invocation.InvocationOnMock import org.apache.spark.sql.{AnalysisException, SaveMode} import org.apache.spark.sql.catalog.v2.{CatalogManager, CatalogNotFoundException, Identifier, TableCatalog, TestTableCatalog} import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.analysis.AnalysisTest import org.apache.spark.sql.catalyst.catalog.{BucketSpec, CatalogStorageFormat, CatalogTable, CatalogTableType} import org.apache.spark.sql.catalyst.parser.CatalystSqlParser import org.apache.spark.sql.catalyst.plans.logical.{CreateTableAsSelect, CreateV2Table, DropTable, LogicalPlan} import org.apache.spark.sql.execution.datasources.{CreateTable, DataSourceResolution} import org.apache.spark.sql.execution.datasources.v2.orc.OrcDataSourceV2 import org.apache.spark.sql.internal.SQLConf.DEFAULT_V2_CATALOG import org.apache.spark.sql.types.{DoubleType, IntegerType, LongType, StringType, StructType} import org.apache.spark.sql.util.CaseInsensitiveStringMap class PlanResolutionSuite extends AnalysisTest { import CatalystSqlParser._ private val orc2 = classOf[OrcDataSourceV2].getName private val testCat: TableCatalog = { val newCatalog = new TestTableCatalog newCatalog.initialize("testcat", CaseInsensitiveStringMap.empty()) newCatalog } private val v2SessionCatalog = { val newCatalog = new TestTableCatalog newCatalog.initialize("session", CaseInsensitiveStringMap.empty()) newCatalog } private val catalogManagerWithDefault = { val manager = mock(classOf[CatalogManager]) when(manager.catalog(any())).thenAnswer((invocation: InvocationOnMock) => { invocation.getArgument[String](0) match { case "testcat" => testCat case "session" => v2SessionCatalog case name => throw new CatalogNotFoundException(s"No such catalog: $name") } }) when(manager.defaultCatalog).thenReturn(Some(testCat)) when(manager.v2SessionCatalog).thenCallRealMethod() manager } private val catalogManagerWithoutDefault = { val manager = mock(classOf[CatalogManager]) when(manager.catalog(any())).thenAnswer((invocation: InvocationOnMock) => { invocation.getArgument[String](0) match { case "testcat" => testCat case "session" => v2SessionCatalog case name => throw new CatalogNotFoundException(s"No such catalog: $name") } }) when(manager.defaultCatalog).thenReturn(None) when(manager.v2SessionCatalog).thenCallRealMethod() manager } def parseAndResolve(query: String, withDefault: Boolean = false): LogicalPlan = { val newConf = conf.copy() newConf.setConfString(DEFAULT_V2_CATALOG.key, "testcat") val catalogManager = if (withDefault) { catalogManagerWithDefault } else { catalogManagerWithoutDefault } DataSourceResolution(newConf, catalogManager).apply(parsePlan(query)) } private def parseResolveCompare(query: String, expected: LogicalPlan): Unit = comparePlans(parseAndResolve(query), expected, checkAnalysis = true) private def extractTableDesc(sql: String): (CatalogTable, Boolean) = { parseAndResolve(sql).collect { case CreateTable(tableDesc, mode, _) => (tableDesc, mode == SaveMode.Ignore) }.head } test("create table - with partitioned by") { val query = "CREATE TABLE my_tab(a INT comment 'test', b STRING) " + "USING parquet PARTITIONED BY (a)" val expectedTableDesc = CatalogTable( identifier = TableIdentifier("my_tab"), tableType = CatalogTableType.MANAGED, storage = CatalogStorageFormat.empty, schema = new StructType() .add("a", IntegerType, nullable = true, "test") .add("b", StringType), provider = Some("parquet"), partitionColumnNames = Seq("a") ) parseAndResolve(query) match { case CreateTable(tableDesc, _, None) => assert(tableDesc == expectedTableDesc.copy(createTime = tableDesc.createTime)) case other => fail(s"Expected to parse ${classOf[CreateTableCommand].getClass.getName} from query," + s"got ${other.getClass.getName}: $query") } } test("create table - partitioned by transforms") { val transforms = Seq( "bucket(16, b)", "years(ts)", "months(ts)", "days(ts)", "hours(ts)", "foo(a, 'bar', 34)", "bucket(32, b), days(ts)") transforms.foreach { transform => val query = s""" |CREATE TABLE my_tab(a INT, b STRING) USING parquet |PARTITIONED BY ($transform) """.stripMargin val ae = intercept[AnalysisException] { parseAndResolve(query) } assert(ae.message .contains(s"Transforms cannot be converted to partition columns: $transform")) } } test("create table - with bucket") { val query = "CREATE TABLE my_tab(a INT, b STRING) USING parquet " + "CLUSTERED BY (a) SORTED BY (b) INTO 5 BUCKETS" val expectedTableDesc = CatalogTable( identifier = TableIdentifier("my_tab"), tableType = CatalogTableType.MANAGED, storage = CatalogStorageFormat.empty, schema = new StructType().add("a", IntegerType).add("b", StringType), provider = Some("parquet"), bucketSpec = Some(BucketSpec(5, Seq("a"), Seq("b"))) ) parseAndResolve(query) match { case CreateTable(tableDesc, _, None) => assert(tableDesc == expectedTableDesc.copy(createTime = tableDesc.createTime)) case other => fail(s"Expected to parse ${classOf[CreateTableCommand].getClass.getName} from query," + s"got ${other.getClass.getName}: $query") } } test("create table - with comment") { val sql = "CREATE TABLE my_tab(a INT, b STRING) USING parquet COMMENT 'abc'" val expectedTableDesc = CatalogTable( identifier = TableIdentifier("my_tab"), tableType = CatalogTableType.MANAGED, storage = CatalogStorageFormat.empty, schema = new StructType().add("a", IntegerType).add("b", StringType), provider = Some("parquet"), comment = Some("abc")) parseAndResolve(sql) match { case CreateTable(tableDesc, _, None) => assert(tableDesc == expectedTableDesc.copy(createTime = tableDesc.createTime)) case other => fail(s"Expected to parse ${classOf[CreateTableCommand].getClass.getName} from query," + s"got ${other.getClass.getName}: $sql") } } test("create table - with table properties") { val sql = "CREATE TABLE my_tab(a INT, b STRING) USING parquet TBLPROPERTIES('test' = 'test')" val expectedTableDesc = CatalogTable( identifier = TableIdentifier("my_tab"), tableType = CatalogTableType.MANAGED, storage = CatalogStorageFormat.empty, schema = new StructType().add("a", IntegerType).add("b", StringType), provider = Some("parquet"), properties = Map("test" -> "test")) parseAndResolve(sql) match { case CreateTable(tableDesc, _, None) => assert(tableDesc == expectedTableDesc.copy(createTime = tableDesc.createTime)) case other => fail(s"Expected to parse ${classOf[CreateTableCommand].getClass.getName} from query," + s"got ${other.getClass.getName}: $sql") } } test("create table - with location") { val v1 = "CREATE TABLE my_tab(a INT, b STRING) USING parquet LOCATION '/tmp/file'" val expectedTableDesc = CatalogTable( identifier = TableIdentifier("my_tab"), tableType = CatalogTableType.EXTERNAL, storage = CatalogStorageFormat.empty.copy(locationUri = Some(new URI("/tmp/file"))), schema = new StructType().add("a", IntegerType).add("b", StringType), provider = Some("parquet")) parseAndResolve(v1) match { case CreateTable(tableDesc, _, None) => assert(tableDesc == expectedTableDesc.copy(createTime = tableDesc.createTime)) case other => fail(s"Expected to parse ${classOf[CreateTableCommand].getClass.getName} from query," + s"got ${other.getClass.getName}: $v1") } val v2 = """ |CREATE TABLE my_tab(a INT, b STRING) |USING parquet |OPTIONS (path '/tmp/file') |LOCATION '/tmp/file' """.stripMargin val e = intercept[AnalysisException] { parseAndResolve(v2) } assert(e.message.contains("you can only specify one of them.")) } test("create table - byte length literal table name") { val sql = "CREATE TABLE 1m.2g(a INT) USING parquet" val expectedTableDesc = CatalogTable( identifier = TableIdentifier("2g", Some("1m")), tableType = CatalogTableType.MANAGED, storage = CatalogStorageFormat.empty, schema = new StructType().add("a", IntegerType), provider = Some("parquet")) parseAndResolve(sql) match { case CreateTable(tableDesc, _, None) => assert(tableDesc == expectedTableDesc.copy(createTime = tableDesc.createTime)) case other => fail(s"Expected to parse ${classOf[CreateTableCommand].getClass.getName} from query," + s"got ${other.getClass.getName}: $sql") } } test("support for other types in OPTIONS") { val sql = """ |CREATE TABLE table_name USING json |OPTIONS (a 1, b 0.1, c TRUE) """.stripMargin val expectedTableDesc = CatalogTable( identifier = TableIdentifier("table_name"), tableType = CatalogTableType.MANAGED, storage = CatalogStorageFormat.empty.copy( properties = Map("a" -> "1", "b" -> "0.1", "c" -> "true") ), schema = new StructType, provider = Some("json") ) parseAndResolve(sql) match { case CreateTable(tableDesc, _, None) => assert(tableDesc == expectedTableDesc.copy(createTime = tableDesc.createTime)) case other => fail(s"Expected to parse ${classOf[CreateTableCommand].getClass.getName} from query," + s"got ${other.getClass.getName}: $sql") } } test("Test CTAS against data source tables") { val s1 = """ |CREATE TABLE IF NOT EXISTS mydb.page_view |USING parquet |COMMENT 'This is the staging page view table' |LOCATION '/user/external/page_view' |TBLPROPERTIES ('p1'='v1', 'p2'='v2') |AS SELECT * FROM src """.stripMargin val s2 = """ |CREATE TABLE IF NOT EXISTS mydb.page_view |USING parquet |LOCATION '/user/external/page_view' |COMMENT 'This is the staging page view table' |TBLPROPERTIES ('p1'='v1', 'p2'='v2') |AS SELECT * FROM src """.stripMargin val s3 = """ |CREATE TABLE IF NOT EXISTS mydb.page_view |USING parquet |COMMENT 'This is the staging page view table' |LOCATION '/user/external/page_view' |TBLPROPERTIES ('p1'='v1', 'p2'='v2') |AS SELECT * FROM src """.stripMargin checkParsing(s1) checkParsing(s2) checkParsing(s3) def checkParsing(sql: String): Unit = { val (desc, exists) = extractTableDesc(sql) assert(exists) assert(desc.identifier.database.contains("mydb")) assert(desc.identifier.table == "page_view") assert(desc.storage.locationUri.contains(new URI("/user/external/page_view"))) assert(desc.schema.isEmpty) // will be populated later when the table is actually created assert(desc.comment.contains("This is the staging page view table")) assert(desc.viewText.isEmpty) assert(desc.viewDefaultDatabase.isEmpty) assert(desc.viewQueryColumnNames.isEmpty) assert(desc.partitionColumnNames.isEmpty) assert(desc.provider.contains("parquet")) assert(desc.properties == Map("p1" -> "v1", "p2" -> "v2")) } } test("Test v2 CreateTable with known catalog in identifier") { val sql = s""" |CREATE TABLE IF NOT EXISTS testcat.mydb.table_name ( | id bigint, | description string, | point struct<x: double, y: double>) |USING parquet |COMMENT 'table comment' |TBLPROPERTIES ('p1'='v1', 'p2'='v2') |OPTIONS (path 's3://bucket/path/to/data', other 20) """.stripMargin val expectedProperties = Map( "p1" -> "v1", "p2" -> "v2", "other" -> "20", "provider" -> "parquet", "location" -> "s3://bucket/path/to/data", "comment" -> "table comment") parseAndResolve(sql) match { case create: CreateV2Table => assert(create.catalog.name == "testcat") assert(create.tableName == Identifier.of(Array("mydb"), "table_name")) assert(create.tableSchema == new StructType() .add("id", LongType) .add("description", StringType) .add("point", new StructType().add("x", DoubleType).add("y", DoubleType))) assert(create.partitioning.isEmpty) assert(create.properties == expectedProperties) assert(create.ignoreIfExists) case other => fail(s"Expected to parse ${classOf[CreateV2Table].getName} from query," + s"got ${other.getClass.getName}: $sql") } } test("Test v2 CreateTable with default catalog") { val sql = s""" |CREATE TABLE IF NOT EXISTS mydb.table_name ( | id bigint, | description string, | point struct<x: double, y: double>) |USING parquet |COMMENT 'table comment' |TBLPROPERTIES ('p1'='v1', 'p2'='v2') |OPTIONS (path 's3://bucket/path/to/data', other 20) """.stripMargin val expectedProperties = Map( "p1" -> "v1", "p2" -> "v2", "other" -> "20", "provider" -> "parquet", "location" -> "s3://bucket/path/to/data", "comment" -> "table comment") parseAndResolve(sql, withDefault = true) match { case create: CreateV2Table => assert(create.catalog.name == "testcat") assert(create.tableName == Identifier.of(Array("mydb"), "table_name")) assert(create.tableSchema == new StructType() .add("id", LongType) .add("description", StringType) .add("point", new StructType().add("x", DoubleType).add("y", DoubleType))) assert(create.partitioning.isEmpty) assert(create.properties == expectedProperties) assert(create.ignoreIfExists) case other => fail(s"Expected to parse ${classOf[CreateV2Table].getName} from query," + s"got ${other.getClass.getName}: $sql") } } test("Test v2 CreateTable with data source v2 provider and no default") { val sql = s""" |CREATE TABLE IF NOT EXISTS mydb.page_view ( | id bigint, | description string, | point struct<x: double, y: double>) |USING $orc2 |COMMENT 'This is the staging page view table' |LOCATION '/user/external/page_view' |TBLPROPERTIES ('p1'='v1', 'p2'='v2') """.stripMargin val expectedProperties = Map( "p1" -> "v1", "p2" -> "v2", "provider" -> orc2, "location" -> "/user/external/page_view", "comment" -> "This is the staging page view table") parseAndResolve(sql) match { case create: CreateV2Table => assert(create.catalog.name == "session") assert(create.tableName == Identifier.of(Array("mydb"), "page_view")) assert(create.tableSchema == new StructType() .add("id", LongType) .add("description", StringType) .add("point", new StructType().add("x", DoubleType).add("y", DoubleType))) assert(create.partitioning.isEmpty) assert(create.properties == expectedProperties) assert(create.ignoreIfExists) case other => fail(s"Expected to parse ${classOf[CreateV2Table].getName} from query," + s"got ${other.getClass.getName}: $sql") } } test("Test v2 CTAS with known catalog in identifier") { val sql = s""" |CREATE TABLE IF NOT EXISTS testcat.mydb.table_name |USING parquet |COMMENT 'table comment' |TBLPROPERTIES ('p1'='v1', 'p2'='v2') |OPTIONS (path 's3://bucket/path/to/data', other 20) |AS SELECT * FROM src """.stripMargin val expectedProperties = Map( "p1" -> "v1", "p2" -> "v2", "other" -> "20", "provider" -> "parquet", "location" -> "s3://bucket/path/to/data", "comment" -> "table comment") parseAndResolve(sql) match { case ctas: CreateTableAsSelect => assert(ctas.catalog.name == "testcat") assert(ctas.tableName == Identifier.of(Array("mydb"), "table_name")) assert(ctas.properties == expectedProperties) assert(ctas.writeOptions == Map("other" -> "20")) assert(ctas.partitioning.isEmpty) assert(ctas.ignoreIfExists) case other => fail(s"Expected to parse ${classOf[CreateTableAsSelect].getName} from query," + s"got ${other.getClass.getName}: $sql") } } test("Test v2 CTAS with default catalog") { val sql = s""" |CREATE TABLE IF NOT EXISTS mydb.table_name |USING parquet |COMMENT 'table comment' |TBLPROPERTIES ('p1'='v1', 'p2'='v2') |OPTIONS (path 's3://bucket/path/to/data', other 20) |AS SELECT * FROM src """.stripMargin val expectedProperties = Map( "p1" -> "v1", "p2" -> "v2", "other" -> "20", "provider" -> "parquet", "location" -> "s3://bucket/path/to/data", "comment" -> "table comment") parseAndResolve(sql, withDefault = true) match { case ctas: CreateTableAsSelect => assert(ctas.catalog.name == "testcat") assert(ctas.tableName == Identifier.of(Array("mydb"), "table_name")) assert(ctas.properties == expectedProperties) assert(ctas.writeOptions == Map("other" -> "20")) assert(ctas.partitioning.isEmpty) assert(ctas.ignoreIfExists) case other => fail(s"Expected to parse ${classOf[CreateTableAsSelect].getName} from query," + s"got ${other.getClass.getName}: $sql") } } test("Test v2 CTAS with data source v2 provider and no default") { val sql = s""" |CREATE TABLE IF NOT EXISTS mydb.page_view |USING $orc2 |COMMENT 'This is the staging page view table' |LOCATION '/user/external/page_view' |TBLPROPERTIES ('p1'='v1', 'p2'='v2') |AS SELECT * FROM src """.stripMargin val expectedProperties = Map( "p1" -> "v1", "p2" -> "v2", "provider" -> orc2, "location" -> "/user/external/page_view", "comment" -> "This is the staging page view table") parseAndResolve(sql) match { case ctas: CreateTableAsSelect => assert(ctas.catalog.name == "session") assert(ctas.tableName == Identifier.of(Array("mydb"), "page_view")) assert(ctas.properties == expectedProperties) assert(ctas.writeOptions.isEmpty) assert(ctas.partitioning.isEmpty) assert(ctas.ignoreIfExists) case other => fail(s"Expected to parse ${classOf[CreateTableAsSelect].getName} from query," + s"got ${other.getClass.getName}: $sql") } } test("drop table") { val tableName1 = "db.tab" val tableIdent1 = TableIdentifier("tab", Option("db")) val tableName2 = "tab" val tableIdent2 = TableIdentifier("tab", None) parseResolveCompare(s"DROP TABLE $tableName1", DropTableCommand(tableIdent1, ifExists = false, isView = false, purge = false)) parseResolveCompare(s"DROP TABLE IF EXISTS $tableName1", DropTableCommand(tableIdent1, ifExists = true, isView = false, purge = false)) parseResolveCompare(s"DROP TABLE $tableName2", DropTableCommand(tableIdent2, ifExists = false, isView = false, purge = false)) parseResolveCompare(s"DROP TABLE IF EXISTS $tableName2", DropTableCommand(tableIdent2, ifExists = true, isView = false, purge = false)) parseResolveCompare(s"DROP TABLE $tableName2 PURGE", DropTableCommand(tableIdent2, ifExists = false, isView = false, purge = true)) parseResolveCompare(s"DROP TABLE IF EXISTS $tableName2 PURGE", DropTableCommand(tableIdent2, ifExists = true, isView = false, purge = true)) } test("drop table in v2 catalog") { val tableName1 = "testcat.db.tab" val tableIdent1 = Identifier.of(Array("db"), "tab") val tableName2 = "testcat.tab" val tableIdent2 = Identifier.of(Array.empty, "tab") parseResolveCompare(s"DROP TABLE $tableName1", DropTable(testCat, tableIdent1, ifExists = false)) parseResolveCompare(s"DROP TABLE IF EXISTS $tableName1", DropTable(testCat, tableIdent1, ifExists = true)) parseResolveCompare(s"DROP TABLE $tableName2", DropTable(testCat, tableIdent2, ifExists = false)) parseResolveCompare(s"DROP TABLE IF EXISTS $tableName2", DropTable(testCat, tableIdent2, ifExists = true)) } test("drop view") { val viewName1 = "db.view" val viewIdent1 = TableIdentifier("view", Option("db")) val viewName2 = "view" val viewIdent2 = TableIdentifier("view") parseResolveCompare(s"DROP VIEW $viewName1", DropTableCommand(viewIdent1, ifExists = false, isView = true, purge = false)) parseResolveCompare(s"DROP VIEW IF EXISTS $viewName1", DropTableCommand(viewIdent1, ifExists = true, isView = true, purge = false)) parseResolveCompare(s"DROP VIEW $viewName2", DropTableCommand(viewIdent2, ifExists = false, isView = true, purge = false)) parseResolveCompare(s"DROP VIEW IF EXISTS $viewName2", DropTableCommand(viewIdent2, ifExists = true, isView = true, purge = false)) } test("drop view in v2 catalog") { intercept[AnalysisException] { parseAndResolve("DROP VIEW testcat.db.view") }.getMessage.toLowerCase(Locale.ROOT).contains( "view support in catalog has not been implemented") } // ALTER VIEW view_name SET TBLPROPERTIES ('comment' = new_comment); // ALTER VIEW view_name UNSET TBLPROPERTIES [IF EXISTS] ('comment', 'key'); test("alter view: alter view properties") { val sql1_view = "ALTER VIEW table_name SET TBLPROPERTIES ('test' = 'test', " + "'comment' = 'new_comment')" val sql2_view = "ALTER VIEW table_name UNSET TBLPROPERTIES ('comment', 'test')" val sql3_view = "ALTER VIEW table_name UNSET TBLPROPERTIES IF EXISTS ('comment', 'test')" val parsed1_view = parseAndResolve(sql1_view) val parsed2_view = parseAndResolve(sql2_view) val parsed3_view = parseAndResolve(sql3_view) val tableIdent = TableIdentifier("table_name", None) val expected1_view = AlterTableSetPropertiesCommand( tableIdent, Map("test" -> "test", "comment" -> "new_comment"), isView = true) val expected2_view = AlterTableUnsetPropertiesCommand( tableIdent, Seq("comment", "test"), ifExists = false, isView = true) val expected3_view = AlterTableUnsetPropertiesCommand( tableIdent, Seq("comment", "test"), ifExists = true, isView = true) comparePlans(parsed1_view, expected1_view) comparePlans(parsed2_view, expected2_view) comparePlans(parsed3_view, expected3_view) } // ALTER TABLE table_name SET TBLPROPERTIES ('comment' = new_comment); // ALTER TABLE table_name UNSET TBLPROPERTIES [IF EXISTS] ('comment', 'key'); test("alter table: alter table properties") { val sql1_table = "ALTER TABLE table_name SET TBLPROPERTIES ('test' = 'test', " + "'comment' = 'new_comment')" val sql2_table = "ALTER TABLE table_name UNSET TBLPROPERTIES ('comment', 'test')" val sql3_table = "ALTER TABLE table_name UNSET TBLPROPERTIES IF EXISTS ('comment', 'test')" val parsed1_table = parseAndResolve(sql1_table) val parsed2_table = parseAndResolve(sql2_table) val parsed3_table = parseAndResolve(sql3_table) val tableIdent = TableIdentifier("table_name", None) val expected1_table = AlterTableSetPropertiesCommand( tableIdent, Map("test" -> "test", "comment" -> "new_comment"), isView = false) val expected2_table = AlterTableUnsetPropertiesCommand( tableIdent, Seq("comment", "test"), ifExists = false, isView = false) val expected3_table = AlterTableUnsetPropertiesCommand( tableIdent, Seq("comment", "test"), ifExists = true, isView = false) comparePlans(parsed1_table, expected1_table) comparePlans(parsed2_table, expected2_table) comparePlans(parsed3_table, expected3_table) } test("support for other types in TBLPROPERTIES") { val sql = """ |ALTER TABLE table_name |SET TBLPROPERTIES ('a' = 1, 'b' = 0.1, 'c' = TRUE) """.stripMargin val parsed = parseAndResolve(sql) val expected = AlterTableSetPropertiesCommand( TableIdentifier("table_name"), Map("a" -> "1", "b" -> "0.1", "c" -> "true"), isView = false) comparePlans(parsed, expected) } test("alter table: set location") { val sql1 = "ALTER TABLE table_name SET LOCATION 'new location'" val parsed1 = parseAndResolve(sql1) val tableIdent = TableIdentifier("table_name", None) val expected1 = AlterTableSetLocationCommand( tableIdent, None, "new location") comparePlans(parsed1, expected1) } }
techaddict/spark
sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala
Scala
apache-2.0
26,363
package com.campudus.tableaux import com.campudus.tableaux.cache.CacheVerticle import com.campudus.tableaux.verticles.JsonSchemaValidator.{JsonSchemaValidatorVerticle, JsonSchemaValidatorClient} import com.campudus.tableaux.database.DatabaseConnection import com.campudus.tableaux.helper.{FileUtils, VertxAccess} import com.campudus.tableaux.router._ import com.typesafe.scalalogging.LazyLogging import io.vertx.lang.scala.ScalaVerticle import io.vertx.scala.SQLConnection import io.vertx.scala.core.http.HttpServer import io.vertx.scala.core.{DeploymentOptions, Vertx} import org.vertx.scala.core.json.{Json, JsonObject} import scala.concurrent.Future import scala.util.{Failure, Success} object Starter { val DEFAULT_HOST = "127.0.0.1" val DEFAULT_PORT = 8181 val DEFAULT_WORKING_DIRECTORY = "./" val DEFAULT_UPLOADS_DIRECTORY = "uploads/" val DEFAULT_ROLE_PERMISSIONS_PATH = "./role-permissions.json" } class Starter extends ScalaVerticle with LazyLogging { private var connection: SQLConnection = _ private var server: HttpServer = _ override def startFuture(): Future[Unit] = { if (config.isEmpty) { logger.error("Provide a config please!") Future.failed(new Exception("Provide a config please!")) } else if (config.getJsonObject("database", Json.obj()).isEmpty) { logger.error("Provide a database config please!") Future.failed(new Exception("Provide a database config please!")) } else { val databaseConfig = config.getJsonObject("database", Json.obj()) val cacheConfig = config.getJsonObject("cache", Json.obj()) if (cacheConfig.isEmpty) { logger.warn("Cache config is empty, using default settings.") } val jsonSchemaConfig = Json.obj() val host = getStringDefault(config, "host", Starter.DEFAULT_HOST) val port = getIntDefault(config, "port", Starter.DEFAULT_PORT) val workingDirectory = getStringDefault(config, "workingDirectory", Starter.DEFAULT_WORKING_DIRECTORY) val uploadsDirectory = getStringDefault(config, "uploadsDirectory", Starter.DEFAULT_UPLOADS_DIRECTORY) val authConfig = config.getJsonObject("auth", Json.obj()) val rolePermissionsPath = getStringDefault(config, "rolePermissionsPath", Starter.DEFAULT_ROLE_PERMISSIONS_PATH) val rolePermissions = FileUtils(vertxAccessContainer()).readJsonFile(rolePermissionsPath, Json.emptyObj()) val tableauxConfig = new TableauxConfig( vertx = this.vertx, databaseConfig = databaseConfig, authConfig = authConfig, workingDirectory = workingDirectory, uploadsDirectory = uploadsDirectory, rolePermissions = rolePermissions ) connection = SQLConnection(vertxAccessContainer(), databaseConfig) for { _ <- createUploadsDirectories(tableauxConfig) server <- deployHttpServer(port, host, tableauxConfig, connection) _ <- deployJsonSchemaValidatorVerticle(jsonSchemaConfig) _ <- deployCacheVerticle(cacheConfig) } yield { this.server = server } } } override def stopFuture(): Future[Unit] = { for { _ <- connection.close() _ <- server.closeFuture() } yield () } private def createUploadsDirectories(config: TableauxConfig): Future[Unit] = { FileUtils(vertxAccessContainer()).mkdirs(config.uploadsDirectoryPath()) } private def deployHttpServer( port: Int, host: String, tableauxConfig: TableauxConfig, connection: SQLConnection ): Future[HttpServer] = { val dbConnection = DatabaseConnection(vertxAccessContainer(), connection) val mainRouter = RouterRegistry.init(tableauxConfig, dbConnection) vertx .createHttpServer() .requestHandler(mainRouter.accept) .listenFuture(port, host) } private def deployJsonSchemaValidatorVerticle(config: JsonObject): Future[String] = { val options = DeploymentOptions() .setConfig(config) val jsonSchemaValidatorClient = JsonSchemaValidatorClient(vertx) val deployFuture = for { deployedVerticle <- vertx .deployVerticleFuture(ScalaVerticle.nameForVerticle[JsonSchemaValidatorVerticle], options) schemas <- FileUtils(vertxAccessContainer()).getSchemaList() _ <- jsonSchemaValidatorClient.registerMultipleSchemas(schemas) } yield (deployedVerticle) deployFuture.onComplete({ case Success(id) => logger.info(s"JsonSchemaValidatorVerticle deployed with ID $id") case Failure(e) => logger.error("JsonSchemaValidatorVerticle couldn't be deployed.", e) }) deployFuture } private def deployCacheVerticle(config: JsonObject): Future[String] = { val options = DeploymentOptions() .setConfig(config) val deployFuture = vertx.deployVerticleFuture(ScalaVerticle.nameForVerticle[CacheVerticle], options) deployFuture.onComplete({ case Success(id) => logger.info(s"CacheVerticle deployed with ID $id") case Failure(e) => logger.error("CacheVerticle couldn't be deployed.", e) }) deployFuture } private def getStringDefault(config: JsonObject, field: String, default: String): String = { if (config.containsKey(field)) { config.getString(field) } else { logger.warn(s"No $field (config) was set. Use default '$default'.") default } } private def getIntDefault(config: JsonObject, field: String, default: Int): Int = { if (config.containsKey(field)) { config.getInteger(field).toInt } else { logger.warn(s"No $field (config) was set. Use default '$default'.") default } } private def vertxAccessContainer(): VertxAccess = new VertxAccess { override val vertx: Vertx = Starter.this.vertx } }
campudus/tableaux
src/main/scala/com/campudus/tableaux/Starter.scala
Scala
apache-2.0
5,776
package com.mfglabs.stream package extensions.postgres import java.io.File import java.util.concurrent.atomic.AtomicLong import akka.actor.ActorSystem import akka.stream.scaladsl._ import akka.util.ByteString import org.scalatest.time._ import org.scalatest._ import concurrent.ScalaFutures import akka.stream._ import scala.util.Try class PostgresExtensionsSpec extends FlatSpec with Matchers with ScalaFutures with BeforeAndAfterAll with DockerTmpDB { self:DockerTmpDB => implicit val as = ActorSystem() implicit val fm = ActorMaterializer() implicit override val patienceConfig = PatienceConfig(timeout = Span(5, Minutes), interval = Span(5, Millis)) "PgStream" should "stream a file to a Postgres table and stream a sql query from a Postgres table" in { val stmt = conn.createStatement() implicit val pgConn = PgStream.sqlConnAsPgConnUnsafe(conn) implicit val blockingEc = ExecutionContextForBlockingOps(scala.concurrent.ExecutionContext.Implicits.global) stmt.execute( s""" create table public.test_postgres( io_id integer, dsp_name text, advertiser_id integer, campaign_id integer, strategy_id integer, day date, impressions integer, clicks integer, post_view_conversions float8, post_click_conversions float8, media_cost float8, total_ad_cost float8, total_cost float8 ) """ ) val insertTable = "test_postgres(io_id, dsp_name, advertiser_id, campaign_id, strategy_id, day, impressions, " + "clicks, post_view_conversions, post_click_conversions, media_cost, total_ad_cost, total_cost)" val nbLinesInserted = new AtomicLong(0L) val futLines = SourceExt .fromFile(new File(getClass.getResource("/report.csv0000_part_00").getPath), maxChunkSize = 5 * 1024 * 1024) .via(FlowExt.rechunkByteStringBySeparator(ByteString("\\n"), maximumChunkBytes = 1 * 1024 * 1024)) .via(PgStream.insertStreamToTable("public", insertTable, Map("DELIMITER" -> "','"), pgVersion = self.version, chunkInsertionConcurrency = 2)) .via(FlowExt.fold(0L)(_ + _)) .map { total => nbLinesInserted.set(total) PgStream.getQueryResultAsStream("select * from public.test_postgres", Map("DELIMITER" -> "','"),pgVersion = self.version) } .flatMapConcat(identity) .via(FlowExt.rechunkByteStringBySize(5 * 1024 * 1024)) .via(FlowExt.rechunkByteStringBySeparator(ByteString("\\n"), maximumChunkBytes = 1 * 1024 * 1024)) .map(_.utf8String) .runWith(Sink.seq) val futExpectedLines = SourceExt .fromFile(new File(getClass.getResource("/report.csv0000_part_00").getPath), maxChunkSize = 5 * 1024 * 1024) .via(FlowExt.rechunkByteStringBySeparator(ByteString("\\n"), maximumChunkBytes = 1 * 1024 * 1024)) .map(_.utf8String) .runWith(Sink.seq) whenReady(futLines zip futExpectedLines) { case (lines, expectedLines) => lines.length shouldEqual expectedLines.length lines.length shouldEqual nbLinesInserted.get lines.sorted.zip(expectedLines.sorted).foreach { case (line, expectedLine) => line.split(",").map { s => Try(s.toDouble).map(BigDecimal(_).setScale(3, BigDecimal.RoundingMode.HALF_UP).toDouble).getOrElse(s) } shouldEqual expectedLine.split(",").map { s => Try(s.toDouble).map(BigDecimal(_).setScale(3, BigDecimal.RoundingMode.HALF_UP).toDouble).getOrElse(s) } } stmt.close() } } }
MfgLabs/akka-stream-extensions
extensions/postgres/src/test/scala/PostgresExtensionsSpec.scala
Scala
apache-2.0
3,529
package scorex.transaction import org.scalatest.prop.PropertyChecks import org.scalatest.{Matchers, PropSpec} import scorex.account.PrivateKeyAccount class TransactionSpecification extends PropSpec with PropertyChecks with Matchers with TransactionGen { property("transaction fields should be constructed in a right way") { forAll(bytes32gen, bytes32gen, positiveLongGen, positiveLongGen, positiveLongGen) { (senderSeed: Array[Byte], recipientSeed: Array[Byte], time: Long, amount: Long, fee: Long) => val sender = new PrivateKeyAccount(senderSeed) val recipient = new PrivateKeyAccount(recipientSeed) val tx = PaymentTransaction.create(sender, recipient, amount, fee, time).right.get tx.timestamp shouldEqual time tx.amount shouldEqual amount tx.fee shouldEqual fee tx.sender shouldEqual sender tx.recipient shouldEqual recipient } } property("bytes()/parse() roundtrip should preserve a transaction") { forAll(bytes32gen, bytes32gen, positiveLongGen, positiveLongGen, positiveLongGen) { (senderSeed: Array[Byte], recipientSeed: Array[Byte], time: Long, amount: Long, fee: Long) => val sender = new PrivateKeyAccount(senderSeed) val recipient = new PrivateKeyAccount(recipientSeed) val tx = PaymentTransaction.create(sender, recipient, amount, fee, time).right.get val txAfter = PaymentTransaction.parseBytes(tx.bytes).get txAfter.getClass.shouldBe(tx.getClass) tx.signature shouldEqual txAfter.signature tx.sender shouldEqual txAfter.asInstanceOf[PaymentTransaction].sender tx.recipient shouldEqual txAfter.recipient tx.timestamp shouldEqual txAfter.timestamp tx.amount shouldEqual txAfter.amount tx.fee shouldEqual txAfter.fee } } property("PaymentTransaction should deserialize to LagonakiTransaction") { forAll(bytes32gen, bytes32gen, positiveLongGen, positiveLongGen, positiveLongGen) { (senderSeed: Array[Byte], recipientSeed: Array[Byte], time: Long, amount: Long, fee: Long) => val sender = new PrivateKeyAccount(senderSeed) val recipient = new PrivateKeyAccount(recipientSeed) val tx = PaymentTransaction.create(sender, recipient, amount, fee, time).right.get val txAfter = TypedTransaction.parseBytes(tx.bytes).get.asInstanceOf[PaymentTransaction] txAfter.getClass.shouldBe(tx.getClass) tx.signature shouldEqual txAfter.signature tx.sender shouldEqual txAfter.asInstanceOf[PaymentTransaction].sender tx.recipient shouldEqual txAfter.recipient tx.timestamp shouldEqual txAfter.timestamp tx.amount shouldEqual txAfter.amount tx.fee shouldEqual txAfter.fee } } }
B83YPoj/Waves
src/test/scala/scorex/transaction/TransactionSpecification.scala
Scala
apache-2.0
2,780
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.columnar import org.apache.commons.lang3.StringUtils import org.apache.spark.network.util.JavaUtils import org.apache.spark.rdd.RDD import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.plans.QueryPlan import org.apache.spark.sql.catalyst.plans.logical import org.apache.spark.sql.catalyst.plans.logical.{HintInfo, LogicalPlan, Statistics} import org.apache.spark.sql.execution.SparkPlan import org.apache.spark.storage.StorageLevel import org.apache.spark.util.{LongAccumulator, Utils} /** * CachedBatch is a cached batch of rows. * * @param numRows The total number of rows in this batch * @param buffers The buffers for serialized columns * @param stats The stat of columns */ private[columnar] case class CachedBatch(numRows: Int, buffers: Array[Array[Byte]], stats: InternalRow) case class CachedRDDBuilder( useCompression: Boolean, batchSize: Int, storageLevel: StorageLevel, @transient cachedPlan: SparkPlan, tableName: Option[String])( @transient private var _cachedColumnBuffers: RDD[CachedBatch] = null) { val sizeInBytesStats: LongAccumulator = cachedPlan.sqlContext.sparkContext.longAccumulator def cachedColumnBuffers: RDD[CachedBatch] = { if (_cachedColumnBuffers == null) { synchronized { if (_cachedColumnBuffers == null) { _cachedColumnBuffers = buildBuffers() } } } _cachedColumnBuffers } def clearCache(blocking: Boolean = true): Unit = { if (_cachedColumnBuffers != null) { synchronized { if (_cachedColumnBuffers != null) { _cachedColumnBuffers.unpersist(blocking) _cachedColumnBuffers = null } } } } def withCachedPlan(cachedPlan: SparkPlan): CachedRDDBuilder = { new CachedRDDBuilder( useCompression, batchSize, storageLevel, cachedPlan = cachedPlan, tableName )(_cachedColumnBuffers) } private def buildBuffers(): RDD[CachedBatch] = { val output = cachedPlan.output val cached = cachedPlan.execute().mapPartitionsInternal { rowIterator => new Iterator[CachedBatch] { def next(): CachedBatch = { val columnBuilders = output.map { attribute => ColumnBuilder(attribute.dataType, batchSize, attribute.name, useCompression) }.toArray var rowCount = 0 var totalSize = 0L while (rowIterator.hasNext && rowCount < batchSize && totalSize < ColumnBuilder.MAX_BATCH_SIZE_IN_BYTE) { val row = rowIterator.next() // Added for SPARK-6082. This assertion can be useful for scenarios when something // like Hive TRANSFORM is used. The external data generation script used in TRANSFORM // may result malformed rows, causing ArrayIndexOutOfBoundsException, which is somewhat // hard to decipher. assert( row.numFields == columnBuilders.length, s"Row column number mismatch, expected ${output.size} columns, " + s"but got ${row.numFields}." + s"\nRow content: $row") var i = 0 totalSize = 0 while (i < row.numFields) { columnBuilders(i).appendFrom(row, i) totalSize += columnBuilders(i).columnStats.sizeInBytes i += 1 } rowCount += 1 } sizeInBytesStats.add(totalSize) val stats = InternalRow.fromSeq( columnBuilders.flatMap(_.columnStats.collectedStatistics)) CachedBatch(rowCount, columnBuilders.map { builder => JavaUtils.bufferToArray(builder.build()) }, stats) } def hasNext: Boolean = rowIterator.hasNext } }.persist(storageLevel) cached.setName( tableName.map(n => s"In-memory table $n") .getOrElse(StringUtils.abbreviate(cachedPlan.toString, 1024))) cached } } object InMemoryRelation { def apply( useCompression: Boolean, batchSize: Int, storageLevel: StorageLevel, child: SparkPlan, tableName: Option[String], logicalPlan: LogicalPlan): InMemoryRelation = { val cacheBuilder = CachedRDDBuilder(useCompression, batchSize, storageLevel, child, tableName)() new InMemoryRelation(child.output, cacheBuilder)( statsOfPlanToCache = logicalPlan.stats, outputOrdering = logicalPlan.outputOrdering) } def apply(cacheBuilder: CachedRDDBuilder, logicalPlan: LogicalPlan): InMemoryRelation = { new InMemoryRelation(cacheBuilder.cachedPlan.output, cacheBuilder)( statsOfPlanToCache = logicalPlan.stats, outputOrdering = logicalPlan.outputOrdering) } } case class InMemoryRelation( output: Seq[Attribute], @transient cacheBuilder: CachedRDDBuilder)( statsOfPlanToCache: Statistics, override val outputOrdering: Seq[SortOrder]) extends logical.LeafNode with MultiInstanceRelation { override protected def innerChildren: Seq[SparkPlan] = Seq(cachedPlan) override def doCanonicalize(): logical.LogicalPlan = copy(output = output.map(QueryPlan.normalizeExprId(_, cachedPlan.output)), cacheBuilder)( statsOfPlanToCache, outputOrdering) override def producedAttributes: AttributeSet = outputSet @transient val partitionStatistics = new PartitionStatistics(output) def cachedPlan: SparkPlan = cacheBuilder.cachedPlan override def computeStats(): Statistics = { if (cacheBuilder.sizeInBytesStats.value == 0L) { // Underlying columnar RDD hasn't been materialized, use the stats from the plan to cache. // Note that we should drop the hint info here. We may cache a plan whose root node is a hint // node. When we lookup the cache with a semantically same plan without hint info, the plan // returned by cache lookup should not have hint info. If we lookup the cache with a // semantically same plan with a different hint info, `CacheManager.useCachedData` will take // care of it and retain the hint info in the lookup input plan. statsOfPlanToCache.copy(hints = HintInfo()) } else { Statistics(sizeInBytes = cacheBuilder.sizeInBytesStats.value.longValue) } } def withOutput(newOutput: Seq[Attribute]): InMemoryRelation = { InMemoryRelation(newOutput, cacheBuilder)(statsOfPlanToCache, outputOrdering) } override def newInstance(): this.type = { new InMemoryRelation( output.map(_.newInstance()), cacheBuilder)( statsOfPlanToCache, outputOrdering).asInstanceOf[this.type] } override protected def otherCopyArgs: Seq[AnyRef] = Seq(statsOfPlanToCache) override def simpleString: String = s"InMemoryRelation [${Utils.truncatedString(output, ", ")}], ${cacheBuilder.storageLevel}" }
michalsenkyr/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryRelation.scala
Scala
apache-2.0
7,772
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy import java.io._ import java.net.URL import java.util.concurrent.TimeoutException import scala.collection.mutable.ListBuffer import scala.concurrent.{Await, future, promise} import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration._ import scala.language.postfixOps import scala.sys.process._ import org.json4s._ import org.json4s.jackson.JsonMethods import org.apache.spark.{Logging, SparkConf, SparkContext} import org.apache.spark.deploy.master.RecoveryState import org.apache.spark.util.Utils /** * This suite tests the fault tolerance of the Spark standalone scheduler, mainly the Master. * In order to mimic a real distributed cluster more closely, Docker is used. * Execute using * ./bin/spark-class org.apache.spark.deploy.FaultToleranceTest * * Make sure that that the environment includes the following properties in SPARK_DAEMON_JAVA_OPTS * *and* SPARK_JAVA_OPTS: * - spark.deploy.recoveryMode=ZOOKEEPER * - spark.deploy.zookeeper.url=172.17.42.1:2181 * Note that 172.17.42.1 is the default docker ip for the host and 2181 is the default ZK port. * * In case of failure, make sure to kill off prior docker containers before restarting: * docker kill $(docker ps -q) * * Unfortunately, due to the Docker dependency this suite cannot be run automatically without a * working installation of Docker. In addition to having Docker, the following are assumed: * - Docker can run without sudo (see http://docs.docker.io/en/latest/use/basics/) * - The docker images tagged spark-test-master and spark-test-worker are built from the * docker/ directory. Run 'docker/spark-test/build' to generate these. */ private object FaultToleranceTest extends App with Logging { private val conf = new SparkConf() private val ZK_DIR = conf.get("spark.deploy.zookeeper.dir", "/spark") private val masters = ListBuffer[TestMasterInfo]() private val workers = ListBuffer[TestWorkerInfo]() private var sc: SparkContext = _ private val zk = SparkCuratorUtil.newClient(conf) private var numPassed = 0 private var numFailed = 0 private val sparkHome = System.getenv("SPARK_HOME") assertTrue(sparkHome != null, "Run with a valid SPARK_HOME") private val containerSparkHome = "/opt/spark" private val dockerMountDir = "%s:%s".format(sparkHome, containerSparkHome) System.setProperty("spark.driver.host", "172.17.42.1") // default docker host ip private def afterEach() { if (sc != null) { sc.stop() sc = null } terminateCluster() // Clear ZK directories in between tests (for speed purposes) SparkCuratorUtil.deleteRecursive(zk, ZK_DIR + "/spark_leader") SparkCuratorUtil.deleteRecursive(zk, ZK_DIR + "/master_status") } test("sanity-basic") { addMasters(1) addWorkers(1) createClient() assertValidClusterState() } test("sanity-many-masters") { addMasters(3) addWorkers(3) createClient() assertValidClusterState() } test("single-master-halt") { addMasters(3) addWorkers(2) createClient() assertValidClusterState() killLeader() delay(30 seconds) assertValidClusterState() createClient() assertValidClusterState() } test("single-master-restart") { addMasters(1) addWorkers(2) createClient() assertValidClusterState() killLeader() addMasters(1) delay(30 seconds) assertValidClusterState() killLeader() addMasters(1) delay(30 seconds) assertValidClusterState() } test("cluster-failure") { addMasters(2) addWorkers(2) createClient() assertValidClusterState() terminateCluster() addMasters(2) addWorkers(2) assertValidClusterState() } test("all-but-standby-failure") { addMasters(2) addWorkers(2) createClient() assertValidClusterState() killLeader() workers.foreach(_.kill()) workers.clear() delay(30 seconds) addWorkers(2) assertValidClusterState() } test("rolling-outage") { addMasters(1) delay() addMasters(1) delay() addMasters(1) addWorkers(2) createClient() assertValidClusterState() assertTrue(getLeader == masters.head) (1 to 3).foreach { _ => killLeader() delay(30 seconds) assertValidClusterState() assertTrue(getLeader == masters.head) addMasters(1) } } private def test(name: String)(fn: => Unit) { try { fn numPassed += 1 logInfo("==============================================") logInfo("Passed: " + name) logInfo("==============================================") } catch { case e: Exception => numFailed += 1 logInfo("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!") logError("FAILED: " + name, e) logInfo("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!") sys.exit(1) } afterEach() } private def addMasters(num: Int) { logInfo(s">>>>> ADD MASTERS $num <<<<<") (1 to num).foreach { _ => masters += SparkDocker.startMaster(dockerMountDir) } } private def addWorkers(num: Int) { logInfo(s">>>>> ADD WORKERS $num <<<<<") val masterUrls = getMasterUrls(masters) (1 to num).foreach { _ => workers += SparkDocker.startWorker(dockerMountDir, masterUrls) } } /** Creates a SparkContext, which constructs a Client to interact with our cluster. */ private def createClient() = { logInfo(">>>>> CREATE CLIENT <<<<<") if (sc != null) { sc.stop() } // Counter-hack: Because of a hack in SparkEnv#create() that changes this // property, we need to reset it. System.setProperty("spark.driver.port", "0") sc = new SparkContext(getMasterUrls(masters), "fault-tolerance", containerSparkHome) } private def getMasterUrls(masters: Seq[TestMasterInfo]): String = { "spark://" + masters.map(master => master.ip + ":7077").mkString(",") } private def getLeader: TestMasterInfo = { val leaders = masters.filter(_.state == RecoveryState.ALIVE) assertTrue(leaders.size == 1) leaders(0) } private def killLeader(): Unit = { logInfo(">>>>> KILL LEADER <<<<<") masters.foreach(_.readState()) val leader = getLeader masters -= leader leader.kill() } private def delay(secs: Duration = 5.seconds) = Thread.sleep(secs.toMillis) private def terminateCluster() { logInfo(">>>>> TERMINATE CLUSTER <<<<<") masters.foreach(_.kill()) workers.foreach(_.kill()) masters.clear() workers.clear() } /** This includes Client retry logic, so it may take a while if the cluster is recovering. */ private def assertUsable() = { val f = future { try { val res = sc.parallelize(0 until 10).collect() assertTrue(res.toList == (0 until 10)) true } catch { case e: Exception => logError("assertUsable() had exception", e) e.printStackTrace() false } } // Avoid waiting indefinitely (e.g., we could register but get no executors). assertTrue(Await.result(f, 120 seconds)) } /** * Asserts that the cluster is usable and that the expected masters and workers * are all alive in a proper configuration (e.g., only one leader). */ private def assertValidClusterState() = { logInfo(">>>>> ASSERT VALID CLUSTER STATE <<<<<") assertUsable() var numAlive = 0 var numStandby = 0 var numLiveApps = 0 var liveWorkerIPs: Seq[String] = List() def stateValid(): Boolean = { (workers.map(_.ip) -- liveWorkerIPs).isEmpty && numAlive == 1 && numStandby == masters.size - 1 && numLiveApps >= 1 } val f = future { try { while (!stateValid()) { Thread.sleep(1000) numAlive = 0 numStandby = 0 numLiveApps = 0 masters.foreach(_.readState()) for (master <- masters) { master.state match { case RecoveryState.ALIVE => numAlive += 1 liveWorkerIPs = master.liveWorkerIPs case RecoveryState.STANDBY => numStandby += 1 case _ => // ignore } numLiveApps += master.numLiveApps } } true } catch { case e: Exception => logError("assertValidClusterState() had exception", e) false } } try { assertTrue(Await.result(f, 120 seconds)) } catch { case e: TimeoutException => logError("Master states: " + masters.map(_.state)) logError("Num apps: " + numLiveApps) logError("IPs expected: " + workers.map(_.ip) + " / found: " + liveWorkerIPs) throw new RuntimeException("Failed to get into acceptable cluster state after 2 min.", e) } } private def assertTrue(bool: Boolean, message: String = "") { if (!bool) { throw new IllegalStateException("Assertion failed: " + message) } } logInfo("Ran %s tests, %s passed and %s failed".format(numPassed + numFailed, numPassed, numFailed)) } private class TestMasterInfo(val ip: String, val dockerId: DockerId, val logFile: File) extends Logging { implicit val formats = org.json4s.DefaultFormats var state: RecoveryState.Value = _ var liveWorkerIPs: List[String] = _ var numLiveApps = 0 logDebug("Created master: " + this) def readState() { try { val masterStream = new InputStreamReader(new URL("http://%s:8080/json".format(ip)).openStream) val json = JsonMethods.parse(masterStream) val workers = json \\ "workers" val liveWorkers = workers.children.filter(w => (w \\ "state").extract[String] == "ALIVE") // Extract the worker IP from "webuiaddress" (rather than "host") because the host name // on containers is a weird hash instead of the actual IP address. liveWorkerIPs = liveWorkers.map { w => (w \\ "webuiaddress").extract[String].stripPrefix("http://").stripSuffix(":8081") } numLiveApps = (json \\ "activeapps").children.size val status = json \\\\ "status" val stateString = status.extract[String] state = RecoveryState.values.filter(state => state.toString == stateString).head } catch { case e: Exception => // ignore, no state update logWarning("Exception", e) } } def kill() { Docker.kill(dockerId) } override def toString: String = "[ip=%s, id=%s, logFile=%s, state=%s]". format(ip, dockerId.id, logFile.getAbsolutePath, state) } private class TestWorkerInfo(val ip: String, val dockerId: DockerId, val logFile: File) extends Logging { implicit val formats = org.json4s.DefaultFormats logDebug("Created worker: " + this) def kill() { Docker.kill(dockerId) } override def toString: String = "[ip=%s, id=%s, logFile=%s]".format(ip, dockerId, logFile.getAbsolutePath) } private object SparkDocker { def startMaster(mountDir: String): TestMasterInfo = { val cmd = Docker.makeRunCmd("spark-test-master", mountDir = mountDir) val (ip, id, outFile) = startNode(cmd) new TestMasterInfo(ip, id, outFile) } def startWorker(mountDir: String, masters: String): TestWorkerInfo = { val cmd = Docker.makeRunCmd("spark-test-worker", args = masters, mountDir = mountDir) val (ip, id, outFile) = startNode(cmd) new TestWorkerInfo(ip, id, outFile) } private def startNode(dockerCmd: ProcessBuilder) : (String, DockerId, File) = { val ipPromise = promise[String]() val outFile = File.createTempFile("fault-tolerance-test", "", Utils.createTempDir()) val outStream: FileWriter = new FileWriter(outFile) def findIpAndLog(line: String): Unit = { if (line.startsWith("CONTAINER_IP=")) { val ip = line.split("=")(1) ipPromise.success(ip) } outStream.write(line + "\\n") outStream.flush() } dockerCmd.run(ProcessLogger(findIpAndLog _)) val ip = Await.result(ipPromise.future, 30 seconds) val dockerId = Docker.getLastProcessId (ip, dockerId, outFile) } } private class DockerId(val id: String) { override def toString: String = id } private object Docker extends Logging { def makeRunCmd(imageTag: String, args: String = "", mountDir: String = ""): ProcessBuilder = { val mountCmd = if (mountDir != "") { " -v " + mountDir } else "" val cmd = "docker run -privileged %s %s %s".format(mountCmd, imageTag, args) logDebug("Run command: " + cmd) cmd } def kill(dockerId: DockerId) : Unit = { "docker kill %s".format(dockerId.id).! } def getLastProcessId: DockerId = { var id: String = null "docker ps -l -q".!(ProcessLogger(line => id = line)) new DockerId(id) } }
ArvinDevel/onlineAggregationOnSparkV2
core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
Scala
apache-2.0
13,575
/* * Copyright 2021 ACINQ SAS * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package fr.acinq.eclair.channel.publish import akka.actor.typed.scaladsl.{ActorContext, Behaviors, TimerScheduler} import akka.actor.typed.{ActorRef, Behavior} import fr.acinq.eclair.NodeParams import fr.acinq.eclair.blockchain.bitcoind.ZmqWatcher import fr.acinq.eclair.blockchain.bitcoind.rpc.BitcoinCoreClient import fr.acinq.eclair.channel.publish.TxPublisher.TxPublishContext import fr.acinq.eclair.channel.publish.TxTimeLocksMonitor.CheckTx import scala.concurrent.ExecutionContext import scala.concurrent.duration.DurationLong import scala.util.{Failure, Random, Success} /** * Created by t-bast on 10/06/2021. */ /** * This actor publishes a fully signed transaction without modifying it. * It waits for confirmation or failure before reporting back to the requesting actor. */ object FinalTxPublisher { // @formatter:off sealed trait Command case class Publish(replyTo: ActorRef[TxPublisher.PublishTxResult], cmd: TxPublisher.PublishFinalTx) extends Command private case object TimeLocksOk extends Command private case object CheckParentTx extends Command private case object ParentTxOk extends Command private case object ParentTxMissing extends Command private case class WrappedTxResult(result: MempoolTxMonitor.TxResult) extends Command private case class UnknownFailure(reason: Throwable) extends Command case object Stop extends Command // @formatter:on def apply(nodeParams: NodeParams, bitcoinClient: BitcoinCoreClient, watcher: ActorRef[ZmqWatcher.Command], txPublishContext: TxPublishContext): Behavior[Command] = { Behaviors.setup { context => Behaviors.withTimers { timers => Behaviors.withMdc(txPublishContext.mdc()) { Behaviors.receiveMessagePartial { case Publish(replyTo, cmd) => new FinalTxPublisher(nodeParams, replyTo, cmd, bitcoinClient, watcher, context, timers, txPublishContext).checkTimeLocks() case Stop => Behaviors.stopped } } } } } } private class FinalTxPublisher(nodeParams: NodeParams, replyTo: ActorRef[TxPublisher.PublishTxResult], cmd: TxPublisher.PublishFinalTx, bitcoinClient: BitcoinCoreClient, watcher: ActorRef[ZmqWatcher.Command], context: ActorContext[FinalTxPublisher.Command], timers: TimerScheduler[FinalTxPublisher.Command], txPublishContext: TxPublishContext)(implicit ec: ExecutionContext = ExecutionContext.Implicits.global) { import FinalTxPublisher._ private val log = context.log def checkTimeLocks(): Behavior[Command] = { val timeLocksChecker = context.spawn(TxTimeLocksMonitor(nodeParams, watcher, txPublishContext), "time-locks-monitor") timeLocksChecker ! CheckTx(context.messageAdapter[TxTimeLocksMonitor.TimeLocksOk](_ => TimeLocksOk), cmd.tx, cmd.desc) Behaviors.receiveMessagePartial { case TimeLocksOk => checkParentPublished() case Stop => Behaviors.stopped } } def checkParentPublished(): Behavior[Command] = { cmd.parentTx_opt match { case Some(parentTxId) => context.self ! CheckParentTx Behaviors.receiveMessagePartial { case CheckParentTx => context.pipeToSelf(bitcoinClient.getTxConfirmations(parentTxId)) { case Success(Some(_)) => ParentTxOk case Success(None) => ParentTxMissing case Failure(reason) => UnknownFailure(reason) } Behaviors.same case ParentTxOk => publish() case ParentTxMissing => log.debug("parent tx is missing, retrying after delay...") timers.startSingleTimer(CheckParentTx, (1 + Random.nextLong(nodeParams.channelConf.maxTxPublishRetryDelay.toMillis)).millis) Behaviors.same case UnknownFailure(reason) => log.error("could not check parent tx: ", reason) sendResult(TxPublisher.TxRejected(txPublishContext.id, cmd, TxPublisher.TxRejectedReason.UnknownTxFailure)) case Stop => Behaviors.stopped } case None => publish() } } def publish(): Behavior[Command] = { val txMonitor = context.spawn(MempoolTxMonitor(nodeParams, bitcoinClient, txPublishContext), "mempool-tx-monitor") txMonitor ! MempoolTxMonitor.Publish(context.messageAdapter[MempoolTxMonitor.TxResult](WrappedTxResult), cmd.tx, cmd.input, cmd.desc, cmd.fee) Behaviors.receiveMessagePartial { case WrappedTxResult(txResult) => txResult match { case _: MempoolTxMonitor.IntermediateTxResult => Behaviors.same case MempoolTxMonitor.TxRejected(_, reason) => sendResult(TxPublisher.TxRejected(txPublishContext.id, cmd, reason)) case MempoolTxMonitor.TxDeeplyBuried(tx) => sendResult(TxPublisher.TxConfirmed(cmd, tx)) } case Stop => Behaviors.stopped } } def sendResult(result: TxPublisher.PublishTxResult): Behavior[Command] = { replyTo ! result Behaviors.receiveMessagePartial { case Stop => Behaviors.stopped } } }
ACINQ/eclair
eclair-core/src/main/scala/fr/acinq/eclair/channel/publish/FinalTxPublisher.scala
Scala
apache-2.0
5,832
package debop4s.core.compress import debop4s.core.AbstractCoreFunSuite /** * debop4s.core.tests.compress.CompressTest * @author 배성혁 sunghyouk.bae@gmail.com * @since 2013. 12. 10. 오후 9:31 */ class CompressorFunSuite extends AbstractCoreFunSuite { test("gzip test") { val gzip = new GZipCompressor() compressorTest(gzip) } test("deflate test") { val deflater = new DeflateCompressor() compressorTest(deflater) } test("snappy test") { val snappy = new SnappyCompressor() compressorTest(snappy) } private def compressorTest(compressor: Compressor) { log.debug(s"Compressor test: compress=${ compressor.getClass.getSimpleName }") assert(compressor.compress(null) == Array.emptyByteArray) assert(compressor.compress(Array.emptyByteArray) == Array.emptyByteArray) val text = "동해물과 백두산이 마르고 닳도록 Hello World! " * 100 val compressedBytes = compressor.compress(text.getBytes("UTF-8")) assert(compressedBytes != null) assert(compressedBytes != Array.emptyByteArray) val textBytes = compressor.decompress(compressedBytes) assert(textBytes != null) assert(textBytes != Array.emptyByteArray) val text2 = new String(textBytes, "UTF-8") assert(text2 != null) assert(text2 == text) } }
debop/debop4s
debop4s-core/src/test/scala/debop4s/core/compress/CompressorFunSuite.scala
Scala
apache-2.0
1,317
/* * Copyright (c) Microsoft. All rights reserved. * Licensed under the MIT license. See LICENSE file in the project root for full license information. */ package org.apache.spark.sql.api.csharp import java.util import org.apache.spark.SparkConf import scala.collection.JavaConverters._ /* * Utils for JvmBridge */ object JvmBridgeUtils { /* * Converts Java HashMap to Scala mutalbe Map */ def toMutableMap[K, V](map: util.HashMap[K, V]) : Map[K, V] = { map.asScala.toMap } def getKeyValuePairAsString(kvp: Tuple2[String, String]) : String = { return kvp._1 + "=" + kvp._2 } def getKeyValuePairArrayAsString(kvpArray : Array[Tuple2[String, String]]) : String = { val sb = new StringBuilder for(kvp <- kvpArray) { sb.append(getKeyValuePairAsString(kvp)) sb.append(";") } sb.toString } def getSparkConfAsString(sparkConf: SparkConf): String = { getKeyValuePairArrayAsString(sparkConf.getAll) } }
hebinhuang/Mobius
scala/src/main/org/apache/spark/sql/api/csharp/JvmBridgeUtils.scala
Scala
mit
971
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ai.h2o.sparkling.backend.api.scalainterpreter import ai.h2o.sparkling.backend.api.ParameterBase import javax.servlet.http.HttpServletRequest; /** * This object is returned by jobs executing the Scala code */ case class ScalaCodeResult(code: String, scalaStatus: String, scalaResponse: String, scalaOutput: String) object ScalaCodeResult extends ParameterBase { private[scalainterpreter] case class ScalaCodeResultParameters(resultKey: String) { def validate(): Unit = {} } object ScalaCodeResultParameters { private[scalainterpreter] def parse(request: HttpServletRequest): ScalaCodeResultParameters = { val resultKey = request.getPathInfo.drop(1).split("/")(1) ScalaCodeResultParameters(resultKey) } } }
h2oai/sparkling-water
core/src/main/scala/ai/h2o/sparkling/backend/api/scalainterpreter/ScalaCodeResult.scala
Scala
apache-2.0
1,555
sealed trait Bool case object True extends Bool case object False extends Bool
hmemcpy/milewski-ctfp-pdf
src/content/1.6/code/scala/snippet25.scala
Scala
gpl-3.0
78
import java.lang.IllegalArgumentException import common._ import barneshut.conctrees._ package object barneshut { /** `Boundaries` determine the size of the scene into which all bodies fit. * It starts as a very large 2D Cartesian plane, with X increasing left to right and Y * increasing top to bottom. * * NOTE: We initialize backwards so we can always use `math.min()` to update the `boundaries.min` * values and `math.max` to update the `boundaries.max` values. */ class Boundaries { /** The X coordinate of the left side of the scene. */ var minX = Float.MaxValue /** The Y coordinate of the top side of the scene. */ var minY = Float.MaxValue /** The X coordinate of the right side of the scene. */ var maxX = Float.MinValue /** The Y coordinate of the bottom side of the scene. */ var maxY = Float.MinValue /** The length of the top and bottom edges of the scene. */ def width = maxX - minX /** The length of the left and right edges of the scene. */ def height = maxY - minY /** The length of a side of the scene, the maximum of `width` and `height`. Which begs the * question: "what actually enforces the scene's squareness?" "What happens when `width` * or `height` doesn't equal `size`??? */ def size = math.max(width, height) /** The X coordinate of the center of the scene in 2D space. */ def centerX = minX + width / 2 /** The Y coordinate of the center of the scene in 2D space. */ def centerY = minY + height / 2 /** Prints `Boundaries` by including their `minX`, `minY`, `maxX`, and `maxY` coordinates. */ override def toString = s"Boundaries($minX, $minY, $maxX, $maxY)" } /** A QuadTree is used to divide the 2D cartesian plane into square cells, one for each quadrant. * (Hence the name.) This allows the simulation to approximate mass at a distance as 1 big `Body` * with the mass of all the actual bodies in the cluster, positioned at the center of mass of the * cluster. * * The cell origin (0,0) is the top left corner. The X axis increases to the right and the Y axis * increases downward. Negative X and Y coordinates make no sense in a cell. * * QuadTrees are made from 3 different kinds of data types: `Empty`, `Leaf`, and `Fork`. */ sealed abstract class Quad { /** The X coordinate of the center of mass of the bodies in the represented cell. */ def massX: Float /** The Y coordinate of the center of mass of the bodies in the represented cell. */ def massY: Float /** The total mass of the bodies in the represented cell. */ def mass: Float /** The X coordinate of the center of the represented cell in 2D space. */ def centerX: Float /** The Y coordinate of the center of the represented cell in 2D space. */ def centerY: Float /** The length of a side of the represented cell. The cell is square. */ def size: Float /** The number of bodies contained in the represented cell. */ def total: Int /** Add Body `b` to the represented cell without changing the area in space the cell covers. * * @param b The Body to add to the represented cell. * @return A new QuadTree that contains what the old QuadTree contained plus `b`. */ def insert(b: Body): Quad } /** `Empty` is an empty QuadTree. It has no mass, no bodies, and the center of mass is the cell's * center in space. * * @param centerX The X coordinate of the cell's center in space. * @param centerY The Y coordinate of the cell's center in space. * @param size The length of each side of the cell. */ case class Empty(centerX: Float, centerY: Float, size: Float) extends Quad { /** The X coordinate of the center of mass is always the X coordinate of the center of the cell. */ def massX: Float = centerX /** The Y coordinate of the center of mass is always the Y coordinate of the center of the cell. */ def massY: Float = centerY /** The total mass of an `Empty` is always 0. */ def mass: Float = 0f /** There are no bodies in an `Empty`. */ def total: Int = 0 /** Add `Body` `b` to an `Empty` and you get a `Leaf` back. * * @param b The `Body` to add to the represented cell. * @return A new QuadTree that contains just `b`. */ def insert(b: Body): Quad = Leaf(centerX, centerY, size, Seq(b)) } /** `Fork` represents 4 quadrants: `nw`, `ne`, `sw` and `se`, each of which is itself a QuadTree. * If you divide the cell into 4 sub-cells at the center of the cell, these 4 QuadTrees are the bodies * in those 4 sub-cells. * * The constructor assumes these 4 sub-cells are the same size and adjacent to each other, and laid out * as described cover the space of this cell. * * @param nw The QuadTree in the top left of the cell. * @param ne The QuadTree in the top right of the cell. * @param sw The QuadTree in the bottom left of the cell. * @param se The QuadTree in the bottom right of the cell. */ case class Fork( nw: Quad, ne: Quad, sw: Quad, se: Quad ) extends Quad { // The sub-cells are supposed to be square, which means all their sizes must be equal. require(nw.size == ne.size && ne.size == sw.size && sw.size == se.size && se.size == nw.size, "Attempted to create a QuadTree out of sub-cells that aren't equal in size.") /** The X coordinate of the center is half-way between the nw and ne cells' center. */ // val centerX: Float =(quads.map(_.centerX).min + quads.map(_.centerX).max) / 2f val centerX: Float = (nw.centerX + ne.centerX) / 2f /** The Y coordinate of the center is half-way between the nw and sw cells' center. */ //val centerY: Float =(quads.map(_.centerY).min + quads.map(_.centerY).max) / 2f val centerY: Float = (nw.centerY + sw.centerY) / 2f /** The `size` of this cell is double the size of any sub-cell. Above I enforce "squareness". */ val size: Float = 2f * nw.size /** `mass` is the total mass of all the quads. */ val mass: Float = nw.mass + ne.mass + sw.mass + se.mass /** The center of mass X coordinate is `centerX` if `mass` is zero. Otherwise compute the formula * from the homework assignment: (for each quad sum(mass * massX))/ mass * I could get cute and use a fold, but there are only 4 quads and writing it out is easier to follow. */ val massX: Float = { if (mass == 0.0) centerX else (nw.mass*nw.massX + ne.mass*ne.massX + sw.mass*sw.massX + se.mass*se.massX)/mass } /** The center of mass Y coordinate is `centerY` if `mass` is zero. Otherwise compute the formula * from the homework assignment: (for each quad sum(mass * massY))/ mass * I could get cute and use a fold, but there are only 4 quads and writing it out is easier to follow. */ val massY: Float = { if (mass == 0.0) centerY else (nw.mass*nw.massY + ne.mass*ne.massY + sw.mass*sw.massY + se.mass*se.massY)/mass } /** The overall `total` is just the totals of each sub-cell. */ val total: Int = nw.total + ne.total + sw.total + se.total /** Add Body `b` into the bodies we already have. We have to figure out which sub-cell `b` will be in, then * create a new `Fork` with `b` inserted into that sub-cell. * * `nw` occupies left of `centerX` and above `centerY`. * `ne` occupies right of `centerX` and above `centerY`. * `sw` occupies left of `centerX` and below `centerY`. * `se` occupies right of `centerX` and below `centerY`. * * @param b The Body to add to the represented cell. * @return A new QuadTree that contains what the old QuadTree contained plus `b`. */ def insert(b: Body): Fork = { if (b.x < centerX && b.y < centerY) Fork(nw.insert(b), ne, sw, se) else if (!(b.x < centerX) && b.y < centerY) Fork(nw, ne.insert(b), sw, se) else if (b.x < centerY && !(b.y < centerY)) Fork(nw, ne, sw.insert(b), se) else Fork(nw, nw, sw, se.insert(b)) } } /** A `Leaf` is used when the size of an edge of the cell is <= `minimumSize`. Essentially this * stops the recursive build of the QuadTree. Even in such a small space, a `Leaf` can hold * quite a few `bodies`, so don't assume a `Leaf` has just one `Body` in it. * * @param centerX The X coordinate of the cell's center in space (not necessarily center of mass). * @param centerY The Y coordinate of the cell's center in space (not necessarily center of mass). * @param size The length of each side of this `Leaf` cell. * @param bodies The sequence of `Body` objects covered by this cell. */ case class Leaf(centerX: Float, centerY: Float, size: Float, bodies: Seq[Body]) extends Quad { /** `mass` is the sum of the masses in `bodies`. `bodies.map(_.mass).sum` also works, but less efficiently. */ val mass: Float = bodies.foldLeft(0f){ (sum, b) => sum + b.mass } /** `massX` is the X coordinate of the center of mass of this Leaf. * Calculate it by summing each body's mass*X position, then divide the sum by `mass`. * `bodies.map(b => b.mass*b.x).sum / mass` also works. (but does 2 passes thru `bodies`) */ val massX: Float = bodies.foldLeft(0f){ (sum, b) => sum + b.mass*b.x } / mass /** `massY` is the Y coordinate of the center of mass of this Leaf. * Calculate it by summing each body's mass*Y position, then divide the sum by `mass`. * `bodies.map(b => b.mass*b.y).sum / mass` also works (but does 2 passes thru `bodies`) */ val massY: Float = bodies.foldLeft(0f){ (sum, b) => sum + b.mass*b.y } / mass /** `total` is just the number of Body objects we have. */ val total: Int = bodies.length /** Add `b` to our existing `bodies`. If the size of a `Leaf` is greater than a predefined * `minimumSize`, inserting an additional body into that `Leaf` QuadTree creates a `Fork` QuadTree * with empty children, and adds all the `bodies` into that `Fork` (including the new body `b`). * Otherwise, inserting creates another `Leaf` with all the existing `bodies` and the new one. * * The tricky part is making a `Leaf` vs. making a `Fork`. To make the `Fork` we have to make * each of the sub-cells then spread `b` + `bodies` amongst them. * * @param b The Body to add to the represented cell. * @return A new QuadTree that contains what the old QuadTree contained plus `b`. */ def insert(b: Body): Quad = { val new_bodies = bodies :+ b if (size > minimumSize) { // Figure out where the centers of the 4 sub-cells will be. Not the edges, the center // coordinates of the 4 sub-cells in space. Remember that `size` spans 2 sub-cells, // so half_size is 1 sub-cell and quarter_size is the offset of the center in that 1 sub-cell. val half_size = size / 2f val quarter_size = size / 4f val leftX: Float = centerX - quarter_size val rightX: Float = centerX + quarter_size val topY: Float = centerY - quarter_size val bottomY: Float = centerY + quarter_size // nw centered at (leftX, topY); ne centered at (rightX, topY); // sw centered at (leftX, bottomY); se centered at (rightX, bottomY). // Make a new `Fork` that's all empty. Then insert each of the // bodies in turn, including b, into the new `Fork` and return it. val fork: Fork = Fork(Empty(leftX, topY, half_size), // nw Empty(rightX, topY, half_size), // ne Empty(leftX, bottomY, half_size), // sw Empty(rightX, bottomY, half_size) // se ) new_bodies.foldLeft(fork){ (f, body) => f.insert(body) } } else Leaf(centerX, centerY, size, new_bodies) } } /** Specifies the cell edge length that decides between making a `Leaf` and a `Fork`. A `Leaf` has * an edge length no bigger than `minimumSize`. (This is the recursion threshold value.) */ def minimumSize = 0.00001f /** The gravitational constant `G`. Not sure what measurement system though; * I learned 32 ft/sec^2^ or 9.8 m/sec^2^ for Earth, but those are little `g`. Maybe it is a * rounding of 98 dm/sec^2^? But who talks in decimeters??? */ def gee: Float = 100.0f /** The simulated change in time (in seconds) for a single algorithm iteration. */ def deltaT: Float = 0.01f /** The threshold for deciding if a QuadTree is "far enough away to approximate with a single point". * Specifically, if `quad.size / distance to quad center of mass < theta`, the QuadTree is far enough * to approximate with a single point. */ def theta = 0.5f /** Defines what "far enough away" means for eliminating bodies moving fast enough to leave the scene * and never make it back. `eliminationThreshold` is applied to the size of the current scene, and a * body farther away than `eliminationThreshold * boundaries.size` is a candidate for eliminating. */ def eliminationThreshold = 0.5f /** Returns the force of attraction between two masses `m1` and `m2` at distance `r` between them. Unlike * in real physics, we are not negating the value to show attraction. So this is the magnitude of the * force vector between the 2 objects. */ def force(m1: Float, m2: Float, r: Float): Float = gee * m1 * m2 / (r * r) /** Returns the `distance` between 2 Cartesian points `(x0, y0)` and `(x1, y1)`. */ def distance(x0: Float, y0: Float, x1: Float, y1: Float): Float = { math.sqrt((x1 - x0) * (x1 - x0) + (y1 - y0) * (y1 - y0)).toFloat } /** `Body` represents an object with mass in our 2D Cartesian plane. * * @param mass The mass of the object. * @param x The X coordinate of this object in 2D space. * @param y The Y coordinate of this object in 2D space. * @param xspeed How fast the X coordinate is changing. * @param yspeed How fast the Y coordinate is changing. */ class Body(val mass: Float, val x: Float, val y: Float, val xspeed: Float, val yspeed: Float) { /** Returns `true` if `quad` is far enough away from this body to approximate the affect of * `quad` with a single point at `(quad.massX, quad.massY)` of mass `quad.mass`. * * @param quad The QuadTree being compared to this body's position. * @return `true` if it is OK to approximate `quad` and `false` if not. */ def isFarEnoughAway(quad: Quad): Boolean = quad.size/distance(x, y, quad.massX, quad.massY) < theta /** Apply the QuadTree `quad`, which represents the bodies that can act gravitationally on this body, * to this body and return an updated version of the body. * * @param quad The QuadTree describing this Body's surroundings. * @return A new Body that applies the forces from the surroundings. */ def updated(quad: Quad): Body = { // Accumulates the magnitude of the force vector applied on this body by `quad`. // `|force|` = `(netforcex, netforcey)`. var netforcex = 0.0f var netforcey = 0.0f /** If the distance is smaller than 1f, we enter the realm of close * body interactions. Since we do not model them in this simplistic * implementation, bodies at extreme proximities get a huge acceleration, * and are catapulted from each other's gravitational pull at extreme * velocities (something like this: * http://en.wikipedia.org/wiki/Interplanetary_spaceflight#Gravitational_slingshot). * To decrease the effect of this gravitational slingshot, as a very * simple approximation, we ignore gravity at extreme proximities. */ def addForce(thatMass: Float, thatMassX: Float, thatMassY: Float): Unit = { val dist = distance(thatMassX, thatMassY, x, y) if (dist > 1f) { val dforce = force(mass, thatMass, dist) val xn = (thatMassX - x) / dist val yn = (thatMassY - y) / dist val dforcex = dforce * xn val dforcey = dforce * yn netforcex += dforcex netforcey += dforcey } } /** Examine `quad` and apply the forces each `body` in it represents to this body. */ def traverse(quad: Quad): Unit = (quad: Quad) match { case Empty(_, _, _) => // no force so no change in position. case Leaf(_, _, _, bodies) => // add force contribution of each body by calling addForce bodies.foreach{ b: Body => addForce(b.mass, b.x, b.y) } case Fork(nw, ne, sw, se) => // see if node is far enough from the body, or recursion is needed on each sub-cell. if (isFarEnoughAway(quad)) addForce(quad.mass, quad.massX, quad.massY) else { traverse(nw) traverse(ne) traverse(sw) traverse(se) } } traverse(quad) // Remember: // position `n` = `n0` + `v`*`change in time`. `n` = `(nx, ny)` // velocity `v` = `v0` + `a`*`change in time`. `v` = `(nxspeed, nyspeed)` // acceleration `a` = `net change in force`/`mass` . `netforce` = `(netforcex, netforcey)` val nx = x + xspeed * deltaT val ny = y + yspeed * deltaT val nxspeed = xspeed + netforcex / mass * deltaT val nyspeed = yspeed + netforcey / mass * deltaT new Body(mass, nx, ny, nxspeed, nyspeed) } } /** A convenient "default" for the number of rows and the number of columns in `SectorMatrix`. The * actual value used by `SectorMatrix` is passed to it in its constructor. */ val SECTOR_PRECISION = 8 /** `SectorMatrix` is a square matrix that covers a square region of space specified by `boundaries`. * Each square is a `sector`, and each `sector` will be represented by its own QuadTree. It acts as * a combiner by partitioning the square region of space into `sectorPrecision^2^` buckets. This * makes it easy to parallelize the combining operation, since each data bucket is separate and doesn't * overlap. A `Body` will belong to at most a single `sector`. * * @param boundaries The `Boundaries` of the scene (e.g. the "universe"). * @param sectorPrecision The width and height of the matrix; there are `sectorPrecision^2^` squares * in the `SectorMatrix`. */ class SectorMatrix(val boundaries: Boundaries, val sectorPrecision: Int) { // The size of the side of each `sector`, which will also be the size of the QuadTree cell // for each `sector`. val sectorSize = boundaries.size / sectorPrecision // `matrix` is a linear array instead of an actual matrix (2D array). An `apply` method below // provides a translation between `x`, `y` coordinates and the index into `matrix`. val matrix = new Array[ConcBuffer[Body]](sectorPrecision * sectorPrecision) // Initialize each `sector` with a `ConcBuffer`. We'll use that to build QuadTrees for each `sector` // and to combine QuadTrees to cover the entire scene in parallel. for (i <- 0 until matrix.length) matrix(i) = new ConcBuffer /** Given a position `pos` and the `least` allowed position on that axis, return the sector index that * covers the position. Snap any position < `least` to index 0, and any position outside the maximum * boundary to `sectorPrecision-1`. We know we are outside the boundary if the distance in sectors is * bigger than `sectorPrecision-1`. * * @param pos The position to convert to a sector index. * @param least The lowest in-bounds position on this axis. * @return The sector index that covers `pos`. Index 0 covers any position left or above * the minimum boundaries. Index `sectorPrecision-1` covers any position right or * below the maximum boundaries. */ private def sectorCoordinate(pos: Float, least: Float): Int = { // Figure out the distance between `pos` and `least` in sectors, converted to an Int. val num_sectors: Int = ( (pos - least) / sectorSize ).toInt // If num_sectors < 0, snap to 0 (return the least row/col index). // If num_sectors > sectorPrecision-1, snap to sectorPrecision-1 (return the most row/col index). // Otherwise, return num_sectors. This is just min(max(0, num_sectors), sectorPrecision-1)! // Simple and fast! math.min(math.max(0, num_sectors), sectorPrecision-1) } /** Figure out which `sector` should hold `Body` `b` and add it to the `ConcBuffer` representing that * sector. The work is figuring out the correct `x` and `y` sector coordinates; we use the * `ConcBuffer`'s `+=` method to actually add it. If `b` lies outside `boundaries`, we are to * consider it to be snapped to the closest boundary side to figure out into which sector it goes. * * @param b The `Body` being added to our scene. * @return The current `SectorMatrix` so we can chain operations. */ def +=(b: Body): SectorMatrix = { // Figure out the sector coordinates `x` and `y`. It is critical that the X line be all X variables, // and the Y line be all Y variables. val x: Int = sectorCoordinate(b.x, boundaries.minX) val y: Int = sectorCoordinate(b.y, boundaries.minY) // The adding is easy now, thanks to the `apply` method below.... this(x, y) += b this } // Given `x` and `y` sector coordinates, return the `ConcBuffer` for that `sector`. // WARNING: This method came with the homework and I haven't changed it. It has NO ERROR CHECKING, // so bad coordinates can read outside `matrix` and cause Java exceptions. def apply(x: Int, y: Int) = matrix(y * sectorPrecision + x) /** Given another `SectorMatrix` `that`, construct a new `SectorMatrix` which represents the union * of bodies from `this` and `that`. Both `this` and `that` are invalidated in the combination process. * * NOTE: we are told we can safely assume that `combine()` will only be called on matrices of the same * dimensions, boundaries and sector precision. I'm still checking it though. * * On completion, the # of bodies in the new `SectorMatrix` must equal the # of bodies in `this` and `that`. * * @param that The other `SectorMatrix` we are combining with. * @throws IllegalArgumentException if `this` and `that` are not compatible (see NOTE above). * @return A new `SectorMatrix` that contains all the bodies from `this` and `that`. */ @throws[IllegalArgumentException] def combine(that: SectorMatrix): SectorMatrix = { // We're told we can assume `this` and `that` are compatible. BS. Confirm it. require(this.matrix.length == that.matrix.length, s"SectorMatrix.combine() expected matrix of size ${this.matrix.length}, received matrix of size ${that.matrix.length}") require(this.sectorPrecision == that.sectorPrecision, s"SectorMatrix.combine() expected sectorPrecision of ${this.sectorPrecision}, received ${that.sectorPrecision} instead.") require(this.boundaries.size == that.boundaries.size, s"SectorMatrix.combine() expected boundaries of size ${this.boundaries.size}, received ${that.boundaries.size} instead.") // Remember what we started with. val this_bodies: Int = this.matrix.map(_.size).sum val that_bodies: Int = that.matrix.map(_.size).sum // OK, get to work. We need to loop over each `ConcBuffer` in `this` and combine it with `that`. // In this instance, this.matrix(i).combine(that.matrix(i)) is harder to read.... for (i: Int <- 0 until matrix.length) { this.matrix(i) = this.matrix(i) combine that.matrix(i) } // Test the postcondition. val new_body_count: Int = this.matrix.map(_.size).sum val old_body_count: Int = this_bodies + that_bodies require(new_body_count == old_body_count, s"SectorMatrix.combine() postcondition expects ${old_body_count} bodies, but found ${new_body_count}.") // Return ourselves so we can chain. this } /** Converts the ConcBuffers we're using to a QuadTree that contains all the bodies added with `+=`. * This code was provided as part of the homework assignment. Thank goodness. * * @param parallelism Helps figure out how much parallelism we can expect, so we don't spawn more * tasks than the hardware can really run in parallel. * @return The QuadTree that represents `this` `SectorMatrix`. */ def toQuad(parallelism: Int): Quad = { def BALANCING_FACTOR = 4 def quad(x: Int, y: Int, span: Int, achievedParallelism: Int): Quad = { if (span == 1) { val sectorSize = boundaries.size / sectorPrecision val centerX = boundaries.minX + x * sectorSize + sectorSize / 2 val centerY = boundaries.minY + y * sectorSize + sectorSize / 2 var emptyQuad: Quad = Empty(centerX, centerY, sectorSize) val sectorBodies = this(x, y) sectorBodies.foldLeft(emptyQuad)(_ insert _) } else { val nspan = span / 2 val nAchievedParallelism = achievedParallelism * 4 val (nw, ne, sw, se) = if (parallelism > 1 && achievedParallelism < parallelism * BALANCING_FACTOR) parallel( quad(x, y, nspan, nAchievedParallelism), quad(x + nspan, y, nspan, nAchievedParallelism), quad(x, y + nspan, nspan, nAchievedParallelism), quad(x + nspan, y + nspan, nspan, nAchievedParallelism) ) else ( quad(x, y, nspan, nAchievedParallelism), quad(x + nspan, y, nspan, nAchievedParallelism), quad(x, y + nspan, nspan, nAchievedParallelism), quad(x + nspan, y + nspan, nspan, nAchievedParallelism) ) Fork(nw, ne, sw, se) } } quad(0, 0, sectorPrecision, 1) } override def toString = s"SectorMatrix(#bodies: ${matrix.map(_.size).sum})" } class TimeStatistics { private val timeMap = collection.mutable.Map[String, (Double, Int)]() def clear() = timeMap.clear() def timed[T](title: String)(body: =>T): T = { var res: T = null.asInstanceOf[T] val totalTime = /*measure*/ { val startTime = System.currentTimeMillis() res = body (System.currentTimeMillis() - startTime) } timeMap.get(title) match { case Some((total, num)) => timeMap(title) = (total + totalTime, num + 1) case None => timeMap(title) = (0.0, 0) } println(s"$title: ${totalTime} ms; avg: ${timeMap(title)._1 / timeMap(title)._2}") res } override def toString = { timeMap map { case (k, (total, num)) => k + ": " + (total / num * 100).toInt / 100.0 + " ms" } mkString("\\n") } } }
jeffreylloydbrown/classwork
ParallelProgrammingInScala/barneshut/src/main/scala/barneshut/package.scala
Scala
unlicense
27,392
package spark import scala.collection.mutable.{ArrayBuffer, HashSet} import spark.storage.{BlockManager, StorageLevel} /** Spark class responsible for passing RDDs split contents to the BlockManager and making sure a node doesn't load two copies of an RDD at once. */ private[spark] class CacheManager(blockManager: BlockManager) extends Logging { private val loading = new HashSet[String] /** Gets or computes an RDD split. Used by RDD.iterator() when an RDD is cached. */ def getOrCompute[T](rdd: RDD[T], split: Partition, context: TaskContext, storageLevel: StorageLevel) : Iterator[T] = { val key = "rdd_%d_%d".format(rdd.id, split.index) logInfo("Cache key is " + key) blockManager.get(key) match { case Some(cachedValues) => // Partition is in cache, so just return its values logInfo("Found partition in cache!") return cachedValues.asInstanceOf[Iterator[T]] case None => // Mark the split as loading (unless someone else marks it first) loading.synchronized { if (loading.contains(key)) { logInfo("Loading contains " + key + ", waiting...") while (loading.contains(key)) { try {loading.wait()} catch {case _ : Throwable =>} } logInfo("Loading no longer contains " + key + ", so returning cached result") // See whether someone else has successfully loaded it. The main way this would fail // is for the RDD-level cache eviction policy if someone else has loaded the same RDD // partition but we didn't want to make space for it. However, that case is unlikely // because it's unlikely that two threads would work on the same RDD partition. One // downside of the current code is that threads wait serially if this does happen. blockManager.get(key) match { case Some(values) => return values.asInstanceOf[Iterator[T]] case None => logInfo("Whoever was loading " + key + " failed; we'll try it ourselves") loading.add(key) } } else { loading.add(key) } } try { // If we got here, we have to load the split val elements = new ArrayBuffer[Any] logInfo("Computing partition " + split) elements ++= rdd.computeOrReadCheckpoint(split, context) // Try to put this block in the blockManager blockManager.put(key, elements, storageLevel, true) return elements.iterator.asInstanceOf[Iterator[T]] } finally { loading.synchronized { loading.remove(key) loading.notifyAll() } } } } }
koeninger/spark
core/src/main/scala/spark/CacheManager.scala
Scala
bsd-3-clause
2,782
package com.codelab27.cards9.models.cards import com.codelab27.cards9.models.players.Player import enumeratum._ /** * Battle class of the card. * * - Physical attacks physical def stat * - Magical attacks magical def stat * - Flexible attacks lowest def stat * - Assault attacks the lowest stat * * Reference: [[http://finalfantasy.wikia.com/wiki/Tetra_Master_(Minigame)#Battle_class_stat Final Fantasy Wiki]] */ sealed trait BattleClass extends EnumEntry { def uiChar: Char } object BattleClass extends Enum[BattleClass] { val values = findValues case object Physical extends BattleClass { val uiChar: Char = 'P' } case object Magical extends BattleClass { val uiChar: Char = 'M' } case object Flexible extends BattleClass { val uiChar: Char = 'X' } case object Assault extends BattleClass { val uiChar: Char = 'A' } } /** * Unique card instance. * * @param ownerId player identifier * @param cardType type of card * @param power offensive stat * @param bclass battle class * @param pdef physical defense stat * @param mdef magical defense stat * @param arrows list of atk/def arrows * @param id unique identifier */ final case class Card( ownerId: Player.Id, cardType: CardClass.Id, power: Int, bclass: BattleClass, pdef: Int, mdef: Int, arrows: List[Arrow], id: Option[Card.Id] = None) { // require(power < gameSettings.CARD_MAX_LEVEL) // require(pdef < gameSettings.CARD_MAX_LEVEL) // require(mdef < gameSettings.CARD_MAX_LEVEL) require(arrows.distinct.size == arrows.size && arrows.size <= Arrow.MAX_ARROWS) } object Card { case class Id(value: Int) extends AnyVal }
Codelab27/cards9-server
app/com/codelab27/cards9/models/cards/Card.scala
Scala
gpl-2.0
1,636
/** * Generated by Scrooge * version: 4.7.0 * rev: d9d56174937f524a1981b38ebd6280eef7eeda4a * built at: 20160427-121531 */ package com.komanov.serialization.domain.thriftscala import com.twitter.scrooge.{ LazyTProtocol, TFieldBlob, ThriftException, ThriftStruct, ThriftStructCodec3, ThriftStructFieldInfo, ThriftStructMetaData, ThriftUtil} import org.apache.thrift.protocol._ import org.apache.thrift.transport.{TMemoryBuffer, TTransport} import java.nio.ByteBuffer import java.util.Arrays import scala.collection.immutable.{Map => immutable$Map} import scala.collection.mutable.Builder import scala.collection.mutable.{ ArrayBuffer => mutable$ArrayBuffer, Buffer => mutable$Buffer, HashMap => mutable$HashMap, HashSet => mutable$HashSet} import scala.collection.{Map, Set} object SiteDescriptionSetPb extends ThriftStructCodec3[SiteDescriptionSetPb] { private val NoPassthroughFields = immutable$Map.empty[Short, TFieldBlob] val Struct = new TStruct("SiteDescriptionSetPb") val DescriptionField = new TField("description", TType.STRING, 1) val DescriptionFieldManifest = implicitly[Manifest[String]] /** * Field information in declaration order. */ lazy val fieldInfos: scala.List[ThriftStructFieldInfo] = scala.List[ThriftStructFieldInfo]( new ThriftStructFieldInfo( DescriptionField, true, false, DescriptionFieldManifest, _root_.scala.None, _root_.scala.None, immutable$Map.empty[String, String], immutable$Map.empty[String, String] ) ) lazy val structAnnotations: immutable$Map[String, String] = immutable$Map.empty[String, String] /** * Checks that all required fields are non-null. */ def validate(_item: SiteDescriptionSetPb): Unit = { } def withoutPassthroughFields(original: SiteDescriptionSetPb): SiteDescriptionSetPb = new Immutable( description = { val field = original.description field.map { field => field } } ) override def encode(_item: SiteDescriptionSetPb, _oproto: TProtocol): Unit = { _item.write(_oproto) } private[this] def lazyDecode(_iprot: LazyTProtocol): SiteDescriptionSetPb = { var descriptionOffset: Int = -1 var _passthroughFields: Builder[(Short, TFieldBlob), immutable$Map[Short, TFieldBlob]] = null var _done = false val _start_offset = _iprot.offset _iprot.readStructBegin() while (!_done) { val _field = _iprot.readFieldBegin() if (_field.`type` == TType.STOP) { _done = true } else { _field.id match { case 1 => _field.`type` match { case TType.STRING => descriptionOffset = _iprot.offsetSkipString case _actualType => val _expectedType = TType.STRING throw new TProtocolException( "Received wrong type for field 'description' (expected=%s, actual=%s).".format( ttypeToString(_expectedType), ttypeToString(_actualType) ) ) } case _ => if (_passthroughFields == null) _passthroughFields = immutable$Map.newBuilder[Short, TFieldBlob] _passthroughFields += (_field.id -> TFieldBlob.read(_field, _iprot)) } _iprot.readFieldEnd() } } _iprot.readStructEnd() new LazyImmutable( _iprot, _iprot.buffer, _start_offset, _iprot.offset, descriptionOffset, if (_passthroughFields == null) NoPassthroughFields else _passthroughFields.result() ) } override def decode(_iprot: TProtocol): SiteDescriptionSetPb = _iprot match { case i: LazyTProtocol => lazyDecode(i) case i => eagerDecode(i) } private[this] def eagerDecode(_iprot: TProtocol): SiteDescriptionSetPb = { var description: _root_.scala.Option[String] = _root_.scala.None var _passthroughFields: Builder[(Short, TFieldBlob), immutable$Map[Short, TFieldBlob]] = null var _done = false _iprot.readStructBegin() while (!_done) { val _field = _iprot.readFieldBegin() if (_field.`type` == TType.STOP) { _done = true } else { _field.id match { case 1 => _field.`type` match { case TType.STRING => description = _root_.scala.Some(readDescriptionValue(_iprot)) case _actualType => val _expectedType = TType.STRING throw new TProtocolException( "Received wrong type for field 'description' (expected=%s, actual=%s).".format( ttypeToString(_expectedType), ttypeToString(_actualType) ) ) } case _ => if (_passthroughFields == null) _passthroughFields = immutable$Map.newBuilder[Short, TFieldBlob] _passthroughFields += (_field.id -> TFieldBlob.read(_field, _iprot)) } _iprot.readFieldEnd() } } _iprot.readStructEnd() new Immutable( description, if (_passthroughFields == null) NoPassthroughFields else _passthroughFields.result() ) } def apply( description: _root_.scala.Option[String] = _root_.scala.None ): SiteDescriptionSetPb = new Immutable( description ) def unapply(_item: SiteDescriptionSetPb): _root_.scala.Option[_root_.scala.Option[String]] = _root_.scala.Some(_item.description) @inline private def readDescriptionValue(_iprot: TProtocol): String = { _iprot.readString() } @inline private def writeDescriptionField(description_item: String, _oprot: TProtocol): Unit = { _oprot.writeFieldBegin(DescriptionField) writeDescriptionValue(description_item, _oprot) _oprot.writeFieldEnd() } @inline private def writeDescriptionValue(description_item: String, _oprot: TProtocol): Unit = { _oprot.writeString(description_item) } object Immutable extends ThriftStructCodec3[SiteDescriptionSetPb] { override def encode(_item: SiteDescriptionSetPb, _oproto: TProtocol): Unit = { _item.write(_oproto) } override def decode(_iprot: TProtocol): SiteDescriptionSetPb = SiteDescriptionSetPb.decode(_iprot) override lazy val metaData: ThriftStructMetaData[SiteDescriptionSetPb] = SiteDescriptionSetPb.metaData } /** * The default read-only implementation of SiteDescriptionSetPb. You typically should not need to * directly reference this class; instead, use the SiteDescriptionSetPb.apply method to construct * new instances. */ class Immutable( val description: _root_.scala.Option[String], override val _passthroughFields: immutable$Map[Short, TFieldBlob]) extends SiteDescriptionSetPb { def this( description: _root_.scala.Option[String] = _root_.scala.None ) = this( description, Map.empty ) } /** * This is another Immutable, this however keeps strings as lazy values that are lazily decoded from the backing * array byte on read. */ private[this] class LazyImmutable( _proto: LazyTProtocol, _buf: Array[Byte], _start_offset: Int, _end_offset: Int, descriptionOffset: Int, override val _passthroughFields: immutable$Map[Short, TFieldBlob]) extends SiteDescriptionSetPb { override def write(_oprot: TProtocol): Unit = { _oprot match { case i: LazyTProtocol => i.writeRaw(_buf, _start_offset, _end_offset - _start_offset) case _ => super.write(_oprot) } } lazy val description: _root_.scala.Option[String] = if (descriptionOffset == -1) None else { Some(_proto.decodeString(_buf, descriptionOffset)) } /** * Override the super hash code to make it a lazy val rather than def. * * Calculating the hash code can be expensive, caching it where possible * can provide significant performance wins. (Key in a hash map for instance) * Usually not safe since the normal constructor will accept a mutable map or * set as an arg * Here however we control how the class is generated from serialized data. * With the class private and the contract that we throw away our mutable references * having the hash code lazy here is safe. */ override lazy val hashCode = super.hashCode } /** * This Proxy trait allows you to extend the SiteDescriptionSetPb trait with additional state or * behavior and implement the read-only methods from SiteDescriptionSetPb using an underlying * instance. */ trait Proxy extends SiteDescriptionSetPb { protected def _underlying_SiteDescriptionSetPb: SiteDescriptionSetPb override def description: _root_.scala.Option[String] = _underlying_SiteDescriptionSetPb.description override def _passthroughFields = _underlying_SiteDescriptionSetPb._passthroughFields } } trait SiteDescriptionSetPb extends ThriftStruct with scala.Product1[Option[String]] with java.io.Serializable { import SiteDescriptionSetPb._ def description: _root_.scala.Option[String] def _passthroughFields: immutable$Map[Short, TFieldBlob] = immutable$Map.empty def _1 = description /** * Gets a field value encoded as a binary blob using TCompactProtocol. If the specified field * is present in the passthrough map, that value is returned. Otherwise, if the specified field * is known and not optional and set to None, then the field is serialized and returned. */ def getFieldBlob(_fieldId: Short): _root_.scala.Option[TFieldBlob] = { lazy val _buff = new TMemoryBuffer(32) lazy val _oprot = new TCompactProtocol(_buff) _passthroughFields.get(_fieldId) match { case blob: _root_.scala.Some[TFieldBlob] => blob case _root_.scala.None => { val _fieldOpt: _root_.scala.Option[TField] = _fieldId match { case 1 => if (description.isDefined) { writeDescriptionValue(description.get, _oprot) _root_.scala.Some(SiteDescriptionSetPb.DescriptionField) } else { _root_.scala.None } case _ => _root_.scala.None } _fieldOpt match { case _root_.scala.Some(_field) => val _data = Arrays.copyOfRange(_buff.getArray, 0, _buff.length) _root_.scala.Some(TFieldBlob(_field, _data)) case _root_.scala.None => _root_.scala.None } } } } /** * Collects TCompactProtocol-encoded field values according to `getFieldBlob` into a map. */ def getFieldBlobs(ids: TraversableOnce[Short]): immutable$Map[Short, TFieldBlob] = (ids flatMap { id => getFieldBlob(id) map { id -> _ } }).toMap /** * Sets a field using a TCompactProtocol-encoded binary blob. If the field is a known * field, the blob is decoded and the field is set to the decoded value. If the field * is unknown and passthrough fields are enabled, then the blob will be stored in * _passthroughFields. */ def setField(_blob: TFieldBlob): SiteDescriptionSetPb = { var description: _root_.scala.Option[String] = this.description var _passthroughFields = this._passthroughFields _blob.id match { case 1 => description = _root_.scala.Some(readDescriptionValue(_blob.read)) case _ => _passthroughFields += (_blob.id -> _blob) } new Immutable( description, _passthroughFields ) } /** * If the specified field is optional, it is set to None. Otherwise, if the field is * known, it is reverted to its default value; if the field is unknown, it is removed * from the passthroughFields map, if present. */ def unsetField(_fieldId: Short): SiteDescriptionSetPb = { var description: _root_.scala.Option[String] = this.description _fieldId match { case 1 => description = _root_.scala.None case _ => } new Immutable( description, _passthroughFields - _fieldId ) } /** * If the specified field is optional, it is set to None. Otherwise, if the field is * known, it is reverted to its default value; if the field is unknown, it is removed * from the passthroughFields map, if present. */ def unsetDescription: SiteDescriptionSetPb = unsetField(1) override def write(_oprot: TProtocol): Unit = { SiteDescriptionSetPb.validate(this) _oprot.writeStructBegin(Struct) if (description.isDefined) writeDescriptionField(description.get, _oprot) if (_passthroughFields.nonEmpty) { _passthroughFields.values.foreach { _.write(_oprot) } } _oprot.writeFieldStop() _oprot.writeStructEnd() } def copy( description: _root_.scala.Option[String] = this.description, _passthroughFields: immutable$Map[Short, TFieldBlob] = this._passthroughFields ): SiteDescriptionSetPb = new Immutable( description, _passthroughFields ) override def canEqual(other: Any): Boolean = other.isInstanceOf[SiteDescriptionSetPb] override def equals(other: Any): Boolean = canEqual(other) && _root_.scala.runtime.ScalaRunTime._equals(this, other) && _passthroughFields == other.asInstanceOf[SiteDescriptionSetPb]._passthroughFields override def hashCode: Int = _root_.scala.runtime.ScalaRunTime._hashCode(this) override def toString: String = _root_.scala.runtime.ScalaRunTime._toString(this) override def productArity: Int = 1 override def productElement(n: Int): Any = n match { case 0 => this.description case _ => throw new IndexOutOfBoundsException(n.toString) } override def productPrefix: String = "SiteDescriptionSetPb" }
dkomanov/scala-serialization
scala-serialization/src/main/scala/com/komanov/serialization/domain/thriftscala/SiteDescriptionSetPb.scala
Scala
mit
13,853
package com.twitter.finagle.server import com.twitter.concurrent.AsyncSemaphore import com.twitter.finagle._ import com.twitter.finagle.builder.SourceTrackingMonitor import com.twitter.finagle.filter.{ HandletimeFilter, MaskCancelFilter, MkJvmFilter, MonitorFilter, RequestSemaphoreFilter } import com.twitter.finagle.service.{TimeoutFilter, StatsFilter} import com.twitter.finagle.stats.{StatsReceiver, NullStatsReceiver, ServerStatsReceiver} import com.twitter.finagle.tracing.{Tracer, TracingFilter, DefaultTracer} import com.twitter.finagle.transport.Transport import com.twitter.finagle.util.{DefaultMonitor, DefaultTimer, DefaultLogger} import com.twitter.jvm.Jvm import com.twitter.util.{CloseAwaitably, Duration, Monitor, Timer, Closable, Return, Throw, Time} import java.net.SocketAddress import java.util.Collections import java.util.concurrent.ConcurrentHashMap import scala.collection.JavaConverters._ object DefaultServer { private val newJvmFilter = new MkJvmFilter(Jvm()) } /* * The default Server implementation. It is given a Listener (eg. * [[com.twitter.finagle.netty3.Netty3Listener]]) and a function, * serveTransport, that binds a transport and a service. It will then * dispatch requests onto a standard service stack parameterized as * described below. * * @param listener The Listener from which to accept new typed * Transports. * * @param serveTransport The function used to bind an accepted * Transport with a Service. Requests read from the transport are * dispatched onto the Service, with replies written back. * * @param requestTimeout The maximum amount of time the server is * allowed to handle a request. If the timeout expires, the server * will cancel the future and terminate the client connection. * * @param maxConcurrentRequests The maximum number of concurrent * requests the server is willing to handle. * * @param cancelOnHangup The maximum amount of time the server is * allowed to handle a request. If the timeout expires, the server * will cancel the future and terminate the client connection. * * @param prepare Prepare the given `ServiceFactory` before use. */ case class DefaultServer[Req, Rep, In, Out]( name: String, listener: Listener[In, Out], serviceTransport: (Transport[In, Out], Service[Req, Rep]) => Closable, requestTimeout: Duration = Duration.Top, maxConcurrentRequests: Int = Int.MaxValue, cancelOnHangup: Boolean = true, prepare: ServiceFactory[Req, Rep] => ServiceFactory[Req, Rep] = (sf: ServiceFactory[Req, Rep]) => sf, timer: Timer = DefaultTimer.twitter, monitor: Monitor = DefaultMonitor, logger: java.util.logging.Logger = DefaultLogger, statsReceiver: StatsReceiver = ServerStatsReceiver, tracer: Tracer = DefaultTracer ) extends Server[Req, Rep] { private[this] val connections = Collections.newSetFromMap( new ConcurrentHashMap[Closable, java.lang.Boolean]) protected def makeNewStack(statsReceiver: StatsReceiver): Transformer[Req, Rep] = { val outer: Transformer[Req, Rep] = { val handletimeFilter = new HandletimeFilter[Req, Rep](statsReceiver) val monitorFilter = new MonitorFilter[Req, Rep]( monitor andThen new SourceTrackingMonitor(logger, "server")) val tracingFilter = new TracingFilter[Req, Rep](tracer) val jvmFilter= DefaultServer.newJvmFilter[Req, Rep]() val filter = handletimeFilter andThen // to measure total handle time monitorFilter andThen // to maximize surface area for exception handling tracingFilter andThen // to prepare tracing prior to codec tracing support jvmFilter // to maximize surface area filter andThen _ } val inner: Transformer[Req, Rep] = { val maskCancelFilter: SimpleFilter[Req, Rep] = if (cancelOnHangup) Filter.identity else new MaskCancelFilter val statsFilter: SimpleFilter[Req, Rep] = if (statsReceiver ne NullStatsReceiver) new StatsFilter(statsReceiver) else Filter.identity val timeoutFilter: SimpleFilter[Req, Rep] = if (requestTimeout < Duration.Top) { val exc = new IndividualRequestTimeoutException(requestTimeout) new TimeoutFilter(requestTimeout, exc, timer) } else Filter.identity val requestSemaphoreFilter: SimpleFilter[Req, Rep] = if (maxConcurrentRequests == Int.MaxValue) Filter.identity else { val sem = new AsyncSemaphore(maxConcurrentRequests) new RequestSemaphoreFilter[Req, Rep](sem) { // We capture the gauges inside of here so their // (reference) lifetime is tied to that of the filter // itself. val g0 = statsReceiver.addGauge("request_concurrency") { maxConcurrentRequests - sem.numPermitsAvailable } val g1 = statsReceiver.addGauge("request_queue_size") { sem.numWaiters } } } val filter = maskCancelFilter andThen requestSemaphoreFilter andThen statsFilter andThen timeoutFilter filter andThen _ } outer compose prepare compose inner } def serveTransport(serviceFactory: ServiceFactory[Req, Rep], transport: Transport[In, Out]) { val clientConn = new ClientConnection { val remoteAddress = transport.remoteAddress val localAddress = transport.localAddress def close(deadline: Time) = transport.close(deadline) val onClose = transport.onClose.map(_ => ()) } serviceFactory(clientConn) respond { case Return(service) => val conn = serviceTransport(transport, service) connections.add(conn) transport.onClose ensure { connections.remove(conn) } case Throw(_) => transport.close() } } def serve(addr: SocketAddress, factory: ServiceFactory[Req, Rep]): ListeningServer = new ListeningServer with CloseAwaitably { val scopedStatsReceiver = statsReceiver match { case ServerStatsReceiver => statsReceiver.scope(ServerRegistry.nameOf(addr) getOrElse name) case sr => sr } val newStack = makeNewStack(scopedStatsReceiver) val underlying = listener.listen(addr) { transport => serveTransport(newStack(factory), transport) } def closeServer(deadline: Time) = closeAwaitably { // The order here is important: by calling underlying.close() // first, we guarantee that no further connections are // created. // // TODO: it would be cleaner to fully represent the draining // states: accepting no further connections (requests) then // fully drained, then closed. val closable = Closable.sequence(underlying, Closable.all(connections.asScala.toSeq:_*)) connections.clear() closable.close(deadline) } def boundAddress = underlying.boundAddress } }
foursquare/finagle
finagle-core/src/main/scala/com/twitter/finagle/server/DefaultServer.scala
Scala
apache-2.0
6,881
package drt.client.components import drt.client.SPAMain import drt.client.SPAMain.{Loc, TerminalPageTabLoc, UrlDateParameter} import drt.client.modules.GoogleEventTracker import drt.client.services.JSDateConversions.SDate import drt.shared.CrunchApi.{ForecastPeriodWithHeadlines, ForecastTimeSlot, MillisSinceEpoch} import drt.shared.{Forecast, MilliDate, Queues, SDateLike} import io.kinoplan.scalajs.react.material.ui.core.MuiButton._ import io.kinoplan.scalajs.react.material.ui.core.{MuiButton, MuiGrid} import io.kinoplan.scalajs.react.material.ui.icons.MuiIcons import io.kinoplan.scalajs.react.material.ui.icons.MuiIconsModule.GetApp import japgolly.scalajs.react.component.Scala.Component import japgolly.scalajs.react.extra.router.RouterCtl import japgolly.scalajs.react.vdom.html_<^.{<, _} import japgolly.scalajs.react.{Callback, CtorType, ReactEventFromInput, Reusability, ScalaComponent} import org.scalajs.dom.html.Select import scala.collection.immutable.Seq object TerminalPlanningComponent { def getLastSunday(start: SDateLike): SDateLike = { val sunday = start.getLastSunday SDate(f"${sunday.getFullYear()}-${sunday.getMonth()}%02d-${sunday.getDate()}%02dT00:00:00") } case class Props(forecastPeriod: ForecastPeriodWithHeadlines, page: TerminalPageTabLoc, router: RouterCtl[Loc]) { def hash: Int = { forecastPeriod.forecast.days.toList.map { case (_, slots) => slots.hashCode } }.hashCode } val forecastWeeks: Seq[SDateLike] = (0 to 30).map(w => getLastSunday(SDate.now()).addDays(w * 7)) implicit val propsReuse: Reusability[Props] = Reusability.by((_: Props).hash) val component: Component[Props, Unit, Unit, CtorType.Props] = ScalaComponent.builder[Props](displayName = "TerminalForecast") .renderP((_, props) => { val sortedDays = props.forecastPeriod.forecast.days.toList.sortBy(_._1) val byTimeSlot: Seq[List[Option[ForecastTimeSlot]]] = Forecast.periodByTimeSlotAcrossDays(props.forecastPeriod.forecast) def drawSelect(names: Seq[String], values: List[String], value: String): VdomTagOf[Select] = { <.select(^.className := "form-control", ^.value := value.toString, ^.onChange ==> ((e: ReactEventFromInput) => { props.router.set(props.page.withUrlParameters(UrlDateParameter(Option(SDate(e.target.value).toLocalDateTimeString())))) }), values.zip(names).map { case (value, name) => <.option(^.value := value.toString, name) }.toTagMod) } val slotStartTimes = Forecast.timeSlotStartTimes( props.forecastPeriod.forecast, (millis: MillisSinceEpoch) => SDate(millis).toHoursAndMinutes ) <.div( <.div(^.className := "form-group row planning-week", <.div(^.className := "col-sm-3 no-gutters", <.label(^.className := "col-form-label", "Select week start day")), <.div(^.className := "col-sm-2 no-gutters", drawSelect( forecastWeeks.map(_.ddMMyyString), forecastWeeks.map(_.toISOString()).toList, defaultStartDate(props.page.dateFromUrlOrNow).toISOString()) ) ), <.div( <.span( MuiButton(color = Color.default, variant = "outlined", size = "medium")( MuiIcons(GetApp)(fontSize = "small"), "Export Headlines", ^.className := "btn btn-link muiButton", ^.href := SPAMain.absoluteUrl(s"export/headlines/${defaultStartDate(props.page.dateFromUrlOrNow).millisSinceEpoch}/${props.page.terminal}"), ^.target := "_blank" )), <.span(^.className := "planning-export", MuiButton(color = Color.default, variant = "outlined", size = "medium")( MuiIcons(GetApp)(fontSize = "small"), "Export Week", ^.className := "btn btn-link muiButton", ^.href := SPAMain.absoluteUrl(s"export/planning/${defaultStartDate(props.page.dateFromUrlOrNow).millisSinceEpoch}/${props.page.terminal}"), ^.target := "_blank" )) ), <.h3("Headline Figures"), <.table(^.className := "headlines", <.thead( <.tr( <.th(^.className := "queue-heading"), props.forecastPeriod.headlines.queueDayHeadlines.map(_.day).toSet.toList.sorted.map( day => <.th(s"${SDate(MilliDate(day)).getDate()}/${SDate(MilliDate(day)).getMonth()}") ).toTagMod ), { val groupedByQ = props.forecastPeriod.headlines.queueDayHeadlines.groupBy(_.queue) Queues.queueOrder.flatMap(q => groupedByQ.get(q).map(qhls => <.tr( <.th(^.className := "queue-heading", s"${Queues.displayName(q)}"), qhls.toList.sortBy(_.day).map(qhl => <.td(qhl.paxNos)).toTagMod ))).toTagMod }, { val byDay = props.forecastPeriod.headlines.queueDayHeadlines.groupBy(_.day).toList List( <.tr(^.className := "total", <.th(^.className := "queue-heading", "Total Pax"), byDay.sortBy(_._1).map(hl => <.th(hl._2.map(_.paxNos).sum)).toTagMod), <.tr(^.className := "total", <.th(^.className := "queue-heading", "Workloads"), byDay.sortBy(_._1).map(hl => <.th(hl._2.map(_.workload).sum)).toTagMod) ).toTagMod } ) ), <.h3("Total staff required at each hour of the day"), <.table(^.className := "forecast", <.thead( <.tr( <.th(^.colSpan := 2, ^.className := "heading"), sortedDays.map { case (day, _) => <.th(^.colSpan := 2, ^.className := "heading", s"${SDate(MilliDate(day)).getDate()}/${SDate(MilliDate(day)).getMonth()}") }.toTagMod ), <.tr( <.th(^.colSpan := 2, ^.className := "heading", "Time"), sortedDays.flatMap(_ => List(<.th(^.className := "sub-heading", "Avail"), <.th(^.className := "sub-heading", "Rec"))).toTagMod )), <.tbody( byTimeSlot.zip(slotStartTimes).map { case (row, startTime) => <.tr( <.td(s"$startTime"), row.flatMap { case Some(col) => val ragClass = TerminalDesksAndQueuesRow.ragStatus(col.required, col.available) List(<.td(^.className := ragClass, col.available), <.td(col.required)) case None => List(<.td(), <.td()) }.toTagMod ) }.toTagMod ) ) ) }) .configure(Reusability.shouldComponentUpdate) .componentDidMount(p => Callback { GoogleEventTracker.sendPageView(s"${p.props.page.terminal}/planning/${defaultStartDate(p.props.page.dateFromUrlOrNow).toISODateOnly}") }) .build def defaultStartDate(date: SDateLike): SDateLike = getLastSunday(date) def apply(props: Props): VdomElement = component(props) }
UKHomeOffice/drt-scalajs-spa-exploration
client/src/main/scala/drt/client/components/TerminalPlanningComponent.scala
Scala
apache-2.0
7,072
/* * Copyright (c) 2015 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.tudarmstadt.lt.flinkdt import java.io.File import com.typesafe.config.impl.{SimpleConfigObject, SimpleConfig} import com.typesafe.config.{ConfigValueFactory, ConfigRenderOptions, Config, ConfigFactory} import de.tudarmstadt.lt.flinkdt.tasks.DSTaskConfig import org.apache.flink.api.java.utils.ParameterTool import org.scalatest.FunSuite import org.slf4j.LoggerFactory import scala.collection.JavaConversions._ /** * Created by Steffen Remus. */ class ConfTest extends FunSuite { val LOG = LoggerFactory.getLogger(classOf[ConfTest]) test("Test configuration via typesafe:config") { var conf:Config = null conf = ConfigFactory.parseFile(new File("app.conf")).withFallback(ConfigFactory.parseResources("myapplication.conf")).withFallback(ConfigFactory.load("application")).resolve() System.setProperty("dt.question","What?") for(entry <- conf.getConfig("dt").entrySet()){ println(entry.getKey + "\\t" + entry.getValue.render() + ":" + entry.getValue.valueType()) } println(conf.getConfig("dt").getConfig("output").hasPath("dt")) LOG.trace("this is a trace message") LOG.debug("this is a debug message") LOG.info("this is a info message") LOG.warn("this is a warn message") LOG.error("this is a error message") } test("config from args") { DSTaskConfig.load(DSTaskConfig.resolveConfig(Array( "--dt.io.ct.raw", Thread.currentThread().getContextClassLoader().getResource("ct-raw").getPath(), "--dt.io.dir", "file:///tmp/testoutput", "--c", """ dt.io.ct.raw-fields="0,1" dt.jobname=testImpliCtJBT dt.filter.max-odot1 = Infinity """ ))) print(DSTaskConfig.toString()) } } //object testapp extends App { // // val p = ParameterTool.fromArgs(args) // println(p.getProperties) // println(p.get("__NO_VALUE_KEY")) // println(p.toMap.keySet()) // //}
remstef/flinkfun
src/test/scala/de/tudarmstadt/lt/flinkdt/ConfTest.scala
Scala
apache-2.0
2,486
/* * Copyright (C) FuseSource, Inc. * http://fusesource.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fusesource.fabric.groups import org.scalatest.matchers.ShouldMatchers import org.apache.zookeeper.server.{ZooKeeperServer, NIOServerCnxnFactory} import org.apache.zookeeper.server.persistence.FileTxnSnapLog import org.linkedin.zookeeper.client.ZKClient import org.linkedin.util.clock.Timespan import java.net.InetSocketAddress import scala.collection.immutable.List import java.io.File import org.scalatest.junit.JUnitRunner import org.junit.runner.RunWith import org.scalatest.{BeforeAndAfterEach, BeforeAndAfterAll, FunSuite} import java.util.concurrent.TimeUnit import collection.JavaConversions._ /** * <p> * </p> * * @author <a href="http://hiramchirino.com">Hiram Chirino</a> */ @RunWith(classOf[JUnitRunner]) abstract class ZooKeeperFunSuiteSupport extends FunSuite with BeforeAndAfterAll with BeforeAndAfterEach { var connector : NIOServerCnxnFactory = _ override protected def beforeAll() = { println("Starting ZooKeeper") val zk_server = new ZooKeeperServer(); val data_dir = new File(new File("target"), "test-data") def deleteFilesInDir(dir: File): Unit = { var files: Array[File] = dir.listFiles if (files != null) { for (file <- files) { if (file.isDirectory) { deleteFilesInDir(file) } else { file.delete } } } } deleteFilesInDir(data_dir) zk_server.setTxnLogFactory(new FileTxnSnapLog(new File(data_dir, "zk-log"), new File(data_dir, "zk-data"))) connector = new NIOServerCnxnFactory connector.configure(new InetSocketAddress(0), 100) connector.startup(zk_server) println("ZooKeeper Started") } override protected def afterAll() = { if( connector!=null ) { connector.shutdown connector = null } } var zk_clients = List[ZKClient]() def create_zk_client() = { val client = new ZKClient("localhost:"+connector.getLocalPort, Timespan.parse("30s"), null) client.start zk_clients ::= client client.waitForStart(Timespan.parse("30s")) client } override protected def afterEach(): Unit = { zk_clients.foreach{ client=> try { client.close } catch { case _ => } } zk_clients = List() } private class BreakWithin(e:Throwable) extends RuntimeException(e) def breaks_within[T](func: => T) = { try { func } catch { case e:Throwable => throw new BreakWithin(e) } } def within[T](timeout:Long, unit:TimeUnit)(func: => Unit ):Unit = { val start = System.currentTimeMillis var amount = unit.toMillis(timeout) var sleep_amount = amount / 100 var last:Throwable = null if( sleep_amount < 1 ) { sleep_amount = 1 } try { func return } catch { case e:BreakWithin => throw e.getCause case e:Throwable => last = e } while( (System.currentTimeMillis-start) < amount ) { Thread.sleep(sleep_amount) try { func return } catch { case e:BreakWithin => throw e.getCause case e:Throwable => last = e } } throw last } } class GroupsTest extends ZooKeeperFunSuiteSupport with ShouldMatchers { test("cluster events") { val cluster1 = ZooKeeperGroupFactory.create(create_zk_client, "/example") val cluster2 = ZooKeeperGroupFactory.create(create_zk_client, "/example") import TimeUnit._ val c1id1 = cluster1.join("1".getBytes) within(2, SECONDS) { expect(List("1"))(cluster1.members.toMap.values.map(new String(_)).toList) } val c2id2 = cluster2.join("2".getBytes) within(2, SECONDS) { expect(List("1", "2"))(cluster1.members.toMap.values.map(new String(_)).toList) } // Check the we can get the member list without creating a Group object expect(List("1", "2"))(ZooKeeperGroupFactory.members(create_zk_client, "/example").toMap.values.map(new String(_)).toList) // Check updating member data... expect("2")(new String(cluster1.members.get(c2id2))) cluster2.update(c2id2, "Hello!".getBytes()) within(2, SECONDS) { expect("Hello!")(new String(cluster1.members.get(c2id2))) } // Check leaving the cluster cluster1.leave(c1id1) within(2, SECONDS) { expect(List("Hello!"))(cluster1.members.toMap.values.map(new String(_)).toList) } } }
Jitendrakry/fuse
fabric/fabric-groups/src/test/scala/org/fusesource/fabric/groups/GroupsTest.scala
Scala
apache-2.0
4,993
/* * Copyright 2001-2013 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalautils import org.scalatest._ class TypeCheckedTripleEqualsExplicitlySpec extends Spec with Matchers with TypeCheckedTripleEquals with ExplicitlySpecHelpers { object `The Explicitly DSL` { object `when used with === on identical types` { def `should allow an Equality to be specified explicitly` { assert(1 !== 2) assert((1 === 2)(decided by intInequality)) assert(1 === 1) assert((1 !== 1)(decided by intInequality)) implicit val strIneq = stringInequality assert(" Hi" === "hI ") assert { (" Hi" !== "hI ") (decided by defaultEquality[String]) } } } object `when used with supertype === subtype` { def `should allow an Equality to be specified explicitly` { assert(new Fruit("orange") !== new Apple) assert((new Fruit("orange") === new Apple)(decided by fruitInequality)) assert(new Fruit("apple") === new Apple) assert((new Fruit("apple") !== new Apple)(decided by fruitInequality)) } } object `when used with subtype === supertype` { def `should allow an Equality to be specified explicitly` { assert(new Apple !== new Fruit("orange")) assert((new Apple === new Fruit("orange"))(decided by fruitInequality)) assert(new Apple === new Fruit("apple")) assert((new Apple !== new Fruit("apple"))(decided by fruitInequality)) } } } object `The determined by syntax` { def `should produce an Equivalence if used with an Equivalence (that is not an Equality)` { assert(1 !== 2) 1 should !== (2) assert((1 === 2)(determined by intInequivalence)) (1 should === (2)) (determined by intInequivalence) assert(1 === 1) 1 should === (1) assert((1 !== 1)(determined by intInequivalence)) (1 should !== (1)) (determined by intInequivalence) } def `should produce an Equivalence from "after being" syntax` { assert(("Hi" !== "hI")) assert(("Hi" === "hI")(after being downCased)) } def `should produce an Equivalence from "after being X and Y" syntax` { assert((" Hi" !== "hI ")) assert((" Hi" === "hI ")(after being downCased and chopped)) } def `should produce an Equivalence from "determined by <equivalence> afterBeing" syntax` { implicit val stringIneq = stringInequivalence assert(("Hi" === "hI")) assert { ("Hi" !== "hI") (after being downCased) } assert { ("Hi" === "hI") (determined by defaultEquality[String] afterBeing downCased) } } } }
travisbrown/scalatest
src/test/scala/org/scalautils/TypeCheckedTripleEqualsExplicitlySpec.scala
Scala
apache-2.0
3,175
package isabelle.eclipse.ui.editors import org.eclipse.jface.resource.ResourceManager import org.eclipse.jface.text.ITextHover import org.eclipse.jface.text.ITextViewerExtension2.DEFAULT_HOVER_STATE_MASK import org.eclipse.jface.text.hyperlink.IHyperlinkDetector import org.eclipse.jface.text.presentation.{IPresentationReconciler, PresentationReconciler} import org.eclipse.jface.text.rules.{IToken, ITokenScanner} import org.eclipse.jface.text.source.{Annotation, ISourceViewer} import org.eclipse.jface.util.PropertyChangeEvent import org.eclipse.swt.SWT import org.eclipse.ui.PlatformUI import org.eclipse.ui.editors.text.{EditorsUI, TextSourceViewerConfiguration} import org.eclipse.ui.texteditor.ChainedPreferenceStore import isabelle.{Outer_Syntax, Session} import isabelle.Document.Snapshot import isabelle.eclipse.ui.internal.IsabelleUIPlugin import isabelle.eclipse.ui.preferences.{ IsabelleMarkupToSyntaxClass, IsabellePartitionToSyntaxClass, IsabelleTokenToSyntaxClass } import isabelle.eclipse.ui.text.{ AbstractIsabelleScanner, ChainedTokenScanner, IsabelleActionMarkupScanner, IsabelleMarkupScanner, IsabelleTokenScanner, SingleTokenScanner, TokenUtil } import isabelle.eclipse.ui.text.hover.IsabelleTextHover import isabelle.eclipse.ui.text.hyperlink.{IsabelleActionHyperlinkDetector, IsabelleHyperlinkDetector} /** * A SourceViewer configuration used for Isabelle viewers. * * This configuration is intended to be used both in Editors and in Views. * See IsabelleTheoryConfiguration for Editors-specific extension. * * @author Andrius Velykis */ class IsabelleTheoryViewerConfiguration( session: => Option[Session], snapshot: => Option[Snapshot], targetEditor: => Option[TheoryEditor], resourceManager: ResourceManager, annotationHoverMask: Int = DEFAULT_HOVER_STATE_MASK, isabelleHoverMask: Int = DEFAULT_HOVER_STATE_MASK) extends TextSourceViewerConfiguration(new ChainedPreferenceStore(Array( // chain the preference store to get default editor preference values as well as Isabelle-specific IsabelleUIPlugin.plugin.getPreferenceStore, EditorsUI.getPreferenceStore(), PlatformUI.getPreferenceStore()))) { def preferenceStore() = fPreferenceStore override def getConfiguredDocumentPartitioning(sourceViewer: ISourceViewer) = IsabellePartitions.ISABELLE_PARTITIONING override def getConfiguredContentTypes(sourceViewer: ISourceViewer): Array[String] = // add Isabelle content types super.getConfiguredContentTypes(sourceViewer) ++ IsabellePartitions.contentTypes private var codeHighlightingScanners: List[AbstractIsabelleScanner] = Nil override def getPresentationReconciler(sourceViewer: ISourceViewer): IPresentationReconciler = { val reconciler = super.getPresentationReconciler(sourceViewer).asInstanceOf[PresentationReconciler] /** Sets damager/repairer for the given partition type */ def handlePartition(partitionType: String, scanner: ITokenScanner) { val dr = new ExtendedStylesDamagerRepairer(scanner) reconciler.setDamager(dr, partitionType) reconciler.setRepairer(dr, partitionType) } // set damager/repairer for each content type val contentTypes = getConfiguredContentTypes(sourceViewer) // assign scanners to all partitions and collect them import IsabellePartitions._ val partitionScanners = contentTypes.toList map { // for comments, only use the partition scanner - no need to display further scanning case ISABELLE_COMMENT => { val partScanner = partitionScanner(ISABELLE_COMMENT) handlePartition(ISABELLE_COMMENT, partScanner) List(partScanner) } // for other content types, use markup & token scanners in addition to partition scanner case contentType => { val sourceScanners = List(sourceMarkupScanner, tokenScanner, partitionScanner(contentType)) val semanticScanners = List(markupScanner, actionMarkupScanner) // join the source scanners without merging - make source tokens exclusive. For example, if // document_markup token is found, it overrides string token completely. val sourceScanner = join(sourceScanners, false) // merge semantic scanners since they can represent different things of the same element val semanticScanner = join(semanticScanners, true) val fullScanner = join(List(semanticScanner, sourceScanner), true) handlePartition(contentType, fullScanner) semanticScanners ::: sourceScanners } } // record the scanners used - they will be refreshed upon preference change this.codeHighlightingScanners = partitionScanners.flatten reconciler } private val prefs = fPreferenceStore /** Resolve the color manager and preference store values for the abstract trait */ private trait IsabelleScanner extends AbstractIsabelleScanner { def resourceManager = IsabelleTheoryViewerConfiguration.this.resourceManager // cannot reference parent fPreferenceStore directly here - Scala-IDE Juno crashes // bug reported: http://www.assembla.com/spaces/scala-ide/support/tickets/1001114-sbt-crash-for-mixed-scala-java-project def preferenceStore = prefs } private def join(scanners: List[ITokenScanner], merge: Boolean): ITokenScanner = scanners.reduceRight(join(merge)) /** Joins the scanners in a chained composite scanner */ private def join(merge: Boolean)(top: ITokenScanner, bottom: ITokenScanner): ITokenScanner = { val mergeStrat = if (merge) TokenUtil.Merge.mergeTextTokens _ else TokenUtil.Merge.takeTopToken _ new ChainedTokenScanner(top, bottom, mergeStrat) } /** Creates a single-token partition scanner which provides tokens for different partition types */ private def partitionScanner(partition: String): ITokenScanner with AbstractIsabelleScanner = new SingleTokenScanner with IsabelleScanner { override def getToken() = getToken(IsabellePartitionToSyntaxClass(partition)) } /** Creates a scanner for Isabelle tokens */ private def tokenScanner(): ITokenScanner with AbstractIsabelleScanner = new IsabelleTokenScanner(session) with IsabelleScanner { override def getToken(syntax: Outer_Syntax, token: isabelle.Token) = getToken(IsabelleTokenToSyntaxClass(syntax, token)) } /** Creates a scanner for Isabelle markup information */ private def markupScanner(): ITokenScanner with AbstractIsabelleScanner = new IsabelleMarkupScanner(snapshot, IsabelleMarkupToSyntaxClass.markupClasses.keySet) with IsabelleScanner { override def getToken(markupType: String) = getToken(IsabelleMarkupToSyntaxClass(markupType)) } /** Creates a scanner for Isabelle markup information for document source */ private def sourceMarkupScanner(): ITokenScanner with AbstractIsabelleScanner = new IsabelleMarkupScanner(snapshot, IsabelleMarkupToSyntaxClass.sourceMarkupClasses.keySet) with IsabelleScanner { override def getToken(markupType: String) = getToken(IsabelleMarkupToSyntaxClass(markupType)) } /** Creates a scanner for Isabelle markup information for action links */ private def actionMarkupScanner(): ITokenScanner with AbstractIsabelleScanner = new IsabelleActionMarkupScanner(snapshot) with IsabelleScanner { override def getToken(markupType: String) = getToken(IsabelleMarkupToSyntaxClass(markupType)) } def handlePropertyChangeEvent(event: PropertyChangeEvent) { codeHighlightingScanners foreach (_ adaptToPreferenceChange event) } override def getHyperlinkDetectors(sourceViewer: ISourceViewer): Array[IHyperlinkDetector] = { val detectors = Option(super.getHyperlinkDetectors(sourceViewer)) getOrElse Array() val isabelleHyperlinks = new IsabelleHyperlinkDetector(snapshot) val actionHyperlinks = new IsabelleActionHyperlinkDetector(session, snapshot, targetEditor) Array(actionHyperlinks, isabelleHyperlinks) ++ detectors } override def getConfiguredTextHoverStateMasks(sourceViewer: ISourceViewer, contentType: String): Array[Int] = Array(DEFAULT_HOVER_STATE_MASK, SWT.ALT) override def getTextHover(sourceViewer: ISourceViewer, contentType: String, stateMask: Int): ITextHover = { val showAnnTooltips = annotationHoverMask == stateMask val showIsaTooltips = isabelleHoverMask == stateMask new IsabelleTextHover(session, snapshot, sourceViewer, showAnnTooltips, showIsaTooltips) { // support annotation configuration for "show in text" override protected def isIncluded(annotation: Annotation): Boolean = isShownInText(annotation) } } }
andriusvelykis/isabelle-eclipse
isabelle.eclipse.ui/src/isabelle/eclipse/ui/editors/IsabelleTheoryViewerConfiguration.scala
Scala
epl-1.0
8,841
/* * This file is part of CoAnSys project. * Copyright (c) 2012-2015 ICM-UW * * CoAnSys is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * CoAnSys is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with CoAnSys. If not, see <http://www.gnu.org/licenses/>. */ package pl.edu.icm.coansys.citations.data import feature_calculators._ import pl.edu.icm.ceon.scala_commons.classification.features.FeatureVectorBuilder import pl.edu.icm.ceon.scala_commons.classification.svm.SvmClassifier /** * @author Mateusz Fedoryszak (m.fedoryszak@icm.edu.pl) */ class SimilarityMeasurer(val featureVectorBuilder:FeatureVectorBuilder[(MatchableEntity, MatchableEntity)] = SimilarityMeasurer.advancedFvBuilder) { val classifier = SvmClassifier.fromResource("/pl/edu/icm/coansys/citations/coraHeurBal.model") def similarity(e1: MatchableEntity, e2: MatchableEntity): Double = classifier.predictProbabilities(featureVectorBuilder.calculateFeatureVectorValues((e1, e2)))(1) } object SimilarityMeasurer { val simpleFvBuilder = new FeatureVectorBuilder(List( AuthorTrigramMatchFactor, AuthorTokenMatchFactor, PagesMatchFactor, SourceMatchFactor, TitleMatchFactor, YearMatchFactor)) val advancedFvBuilder = new FeatureVectorBuilder(List( AuthorMatchFactor, AuthorTrigramMatchFactor, AuthorTokenMatchFactor, PagesMatchFactor, PagesRawTextMatchFactor, SourceMatchFactor, SourceRawTextMatchFactor, TitleMatchFactor, TitleTokenMatchFactor, YearMatchFactor, YearRawTextMatchFactor)) def main(args: Array[String]) { val measurer = new SimilarityMeasurer val doc1 = MatchableEntity.fromParameters("1", "Jan Kowalski", "J. App. Phis.", "Some random title", "120-126", "2010") val doc2 = MatchableEntity.fromParameters("2", "Jan Kowalski", "J. App. Phis.", "Totally different title", "32-36", "2010") val doc3 = MatchableEntity.fromParameters("3", "Zbigniew Nowak", "Przegląd leśniczy", "Inny tytuł", "15-20", "1995") println(measurer.similarity(doc1, doc1)) println(measurer.similarity(doc1, doc2)) println(measurer.similarity(doc1, doc3)) } }
pdendek/CoAnSys
citation-matching/citation-matching-core-code/src/main/scala/pl/edu/icm/coansys/citations/data/SimilarityMeasurer.scala
Scala
agpl-3.0
2,638
package reopp.common.benchmarks import _root_.z3.scala.{Z3Config, Z3AST, Z3Context} import reopp.common.guardedcommands._ import reopp.common.guardedcommands.dataconnectors._ import scala.math.pow import choco.kernel.model.variables.integer.IntegerExpressionVariable import choco.Choco import reopp.common.{OptionSol, Solution, IntPredicate, Utils} import Utils._ import reopp.common.guardedcommands.IntPred import z3.{Z3Solution, Z3} import reopp.common.guardedcommands.dataconnectors.ConstraintGen._ import java.util.concurrent.{TimeoutException, Callable, FutureTask, TimeUnit} /** * Created with IntelliJ IDEA. * User: jose * Date: 21/06/12 * Time: 10:25 * To change this template use File | Settings | File Templates. */ class AllApproval object AllApproval extends App { Warmup.go val n = if (!args.isEmpty) Integer.parseInt(args(0)) else 5 val satfull = if (args.size > 1) args(1) startsWith "s" else false val chocosat = if (args.size > 1) args(1) startsWith "cs" else false val choco = if (args.size > 1) (args(1) startsWith "c") && !chocosat else false val z3sat = if (args.size > 1) args(1) startsWith "zs" else false val z3 = if (args.size > 1) (args(1) startsWith "z") && !z3sat else false val quicksat = if (args.size > 1) args(1) startsWith "q" else false val lazyy = if (args.size > 1) args(1) startsWith "l" else false val justInit = if (args.size > 2) args(2) startsWith "i" else false def join(id:Int, f0:Int, f1:Int, f2:Int, f3:Int) = id*21*21*21*21 + f0*21*21*21 + f1*21*21 + f2*21 + f3 def split(joined:Int) = { val f3 = joined % 21 val f2 = ((joined - f3) / 21 ) % 21 val f1 = ((joined - f3 - f2*21) / 441 ) % 21 val f0 = ((joined - f3 - f2*21 - f1*441) / (9261) ) % 21 val id = ((joined - f3 - f2*21 - f1*441 - f0*9261) / (194481) ) (id,f0,f1,f2,f3) } def choF3(n:IntegerExpressionVariable) = Choco.mod(n,21) def choF2(n:IntegerExpressionVariable) = Choco.mod(Choco.div(Choco.minus(n,choF3(n)),21),21) def choF1(n:IntegerExpressionVariable) = Choco.mod(Choco.div(Choco.minus(Choco.minus(n,choF3(n)),Choco.mult(choF2(n),21)),441),21) def choF0(n:IntegerExpressionVariable): IntegerExpressionVariable = Choco.mod(Choco.div(Choco.minus(Choco.minus(Choco.minus(n,choF3(n)),Choco.mult(choF2(n),21)) ,Choco.mult(choF1(n),441)) ,9216) ,21) def z3F3(z:Z3Context,v:Z3AST) = z.mkMod(v,z.mkInt(21,z.mkIntSort())) def z3F2(z:Z3Context,v:Z3AST) = z.mkMod(z.mkDiv(z.mkSub(v,z3F3(z,v)),z.mkInt(21,z.mkIntSort())),z.mkInt(21,z.mkIntSort())) def z3F1(z:Z3Context,v:Z3AST) = z.mkMod(z.mkDiv(z.mkSub(v,z3F3(z,v),z.mkMul(z3F2(z,v),z.mkInt(21,z.mkIntSort()))) ,z.mkInt(441,z.mkIntSort())),z.mkInt(21,z.mkIntSort())) // ((v - f3.v - (f2.v * 21)) / 441) % 21) def z3F0(z:Z3Context,v:Z3AST) = z.mkMod(z.mkDiv(z.mkSub(v,z3F3(z,v),z.mkMul(z3F2(z,v),z.mkInt(21,z.mkIntSort())) ,z.mkMul(z3F1(z,v),z.mkInt(441,z.mkIntSort()))) ,z.mkInt(9216,z.mkIntSort())) ,z.mkInt(21,z.mkIntSort())) def genClients(n:Int): Iterable[GCWriter] = { var res = List[GCWriter]() for (i <- n to 1 by -1) { res ::= new GCWriter("w"+i,List(join(i,(i*3 % 16)+5,(i*4 % 16)+5,(i*5 % 16)+5,(i*6 % 16)+5))) // println("new writer: "+(i,(i*3 % 16)+5,(i*4 % 16)+5,(i*5 % 16)+5)+ " -- "+ // join(i,(i*3 % 16)+5,(i*4 % 16)+5,(i*5 % 16)+5)) } res } def genMergers(height:Int): Formula= { val size = pow(2,height) var srcs = List("x") var res = Formula() for (level <- 1 to height) { var newsrcs = List[String]() for (x <- srcs) { res ++= merger(x+"1",x+"2",x) newsrcs :::= List(x+"1",x+"2") } srcs = newsrcs } // println("size / n.of srcs: "+size+"/"+srcs.size) // println("clients: "+genClients(size.toInt).map(_.x)) for (wr <- genClients(size.toInt)) { srcs match { case hd::tl => res ++= (wr.getConstraints ++ sync(wr.x,hd)) srcs = tl case Nil => {} } } res } // def genMergers2(size:Int): Formula= { // val height = scala.math.log(size) / scala.math.log(2) // var srcs = List("x") // var res = Formula() // for (level <- 1 to height.toInt) { // var newsrcs = List[String]() // for (x <- srcs) { // res = res ++ new GCMerger(x+1,x+2,x,0).constraints // newsrcs :::= List(x+1,x+2) // } // srcs = newsrcs // } // // for (wr <- genClients(size.toInt)) { // srcs match { // case hd::tl => // res ++= (wr.constraints ++ new GCSync(wr.x,hd,0).constraints) // srcs = tl // case Nil => {} // } // } //// println("res: "+res.commands.mkString(",")) // res // } val approve = new Approve() val deny = new Deny() val problem = genMergers(n) ++ filter("x","app-ok",approve) ++ filter("x","den-ok",deny) ++ genfilter("x","neither-ok", v => Neg(IntPred(v,approve)) and Neg(IntPred(v,deny))) // flow("x") ++ // flow("app-ok") // writer("x",List(19)) if (justInit) problem.justInit = true else if (quicksat) { val time = System.currentTimeMillis() val res = (problem.quickDataSolveSAT4J) val spent = System.currentTimeMillis() - time print(spent) } else if (satfull) { val time = System.currentTimeMillis() val res = (problem.solveIterative) val spent = System.currentTimeMillis() - time print(spent) } else if (chocosat) { val time = System.currentTimeMillis() val res = (problem.solveChocoSat) val spent = System.currentTimeMillis() - time print(spent) } else if (choco) { val time = System.currentTimeMillis() val res = (problem.solveChoco) val spent = System.currentTimeMillis() - time print(spent) } else if (z3sat) { val z3 = new Z3Context(new Z3Config("MODEL" -> true)) val time = System.currentTimeMillis() val res = (problem.quickDataSolveZ3(z3)) val spent = System.currentTimeMillis() - time print(spent) } else if (z3) { val z3 = new Z3Context(new Z3Config("MODEL" -> true)) val time = System.currentTimeMillis() val res = (Z3.solvez3(Z3.gc2z3(problem,z3),z3)) val spent = System.currentTimeMillis() - time print(spent) } else if (lazyy) { val time = System.currentTimeMillis() val res = (problem.solveChocoPredAbstVarOrdered) val spent = System.currentTimeMillis() - time print(spent) } /// EXPERIMENTS: else { // println(" # THE PROBLEM:\n"+problem.commands.mkString(" - ","\n - ","\n")) var time: Long = 0 var res: OptionSol[Solution[_]] = null var spent: Long = 0 //// DYN-CHOCO //// time = System.currentTimeMillis() res = problem.solveChocoDyn spent = System.currentTimeMillis() - time // if (res.isDefined) println("lazy-sat - solved in "+spent+" ms:\n"+res.get.pretty) // else println("lazy-sat - no solution (in "+spent+" ms)") println("dyn-choco - "+spent) //// QUICK-SAT //// time = System.currentTimeMillis() res = problem.quickDataSolveSAT4J spent = System.currentTimeMillis() - time // if (res.isDefined) println("quick-sat - solved in "+spent+" ms:\n"+res.get.pretty) // else println("quick-sat - no solution (in "+spent+" ms)") println("quick-sat - "+spent) //// SAT-FULL //// time = System.currentTimeMillis() res = problem.solveIterative spent = System.currentTimeMillis() - time // if (res.isDefined) println("SAT-full - solved in "+spent+" ms:\n"+res.get.pretty) // else println("SAT-full - no solution (in "+spent+" ms)") println("SAT-full - "+spent) //// SATC-FULL //// time = System.currentTimeMillis() res = problem.solveChocoSat spent = System.currentTimeMillis() - time // if (res.isDefined) println("SATC-full - solved in "+spent+" ms:\n"+res.get.pretty) // else println("SATC-full - no solution (in "+spent+" ms)") println("SATC-full - "+spent) // //// CHOCO //// // time = System.currentTimeMillis() // res = problem.solveChoco // spent = System.currentTimeMillis() - time // // if (res.isDefined) println("Choco - solved in "+spent+" ms:\n"+res.get.pretty) // // else println("Choco - no solution (in "+spent+" ms)") // println("Choco - "+spent) /// Z3 //// val z3 = new Z3Context(new Z3Config("MODEL" -> true)) // z3.updateParamValue(":timeout","10000") time = System.currentTimeMillis() res = (Z3.solvez3(Z3.gc2z3(problem,z3),z3)) spent = System.currentTimeMillis() - time // if (res.isDefined) println("Z3 - solved in "+spent+" ms:\n"+res.get) // else println("Z3 - no solution (in "+spent+" ms)") println("Z3 - "+spent) //// QUICK-SAT-Z3 //// val zz3 = new Z3Context(new Z3Config("MODEL" -> true)) time = System.currentTimeMillis() res = (problem.quickDataSolveZ3(zz3)) spent = System.currentTimeMillis() - time // if (res.isDefined) println("quick-z3 - solved in "+spent+" ms:\n"+res.get) // else println("quick-z3 - no solution (in "+spent+" ms)") println("quick-z3 - "+spent) // LAZY-SAT //// time = System.currentTimeMillis() res = (problem.solveChocoPredAbstVarOrdered) spent = System.currentTimeMillis() - time // if (res.isDefined) println("lazy-sat - solved in "+spent+" ms:\n"+res.get.pretty) // else println("lazy-sat - no solution (in "+spent+" ms)") println("lazy-sat - "+spent) } } /// PREDICATES class Approve extends IntPredicate { val choPred = (x:IntegerExpressionVariable) => Choco.leq(140,Choco.sum( Choco.mult(AllApproval.choF0(x),2), Choco.mult(AllApproval.choF1(x),2), Choco.mult(AllApproval.choF2(x),3), Choco.mult(AllApproval.choF3(x),5) )) val funPred = (x:Int) => { val v = AllApproval.split(x) (v._2*2 + v._3*2 + v._4*3 + v._5*5) >= 140 } val z3Pred = (z:Z3Context,v:Z3AST) => //z.mkGT(v,z.mkInt(i,z.mkIntSort())) z.mkLE(z.mkInt(140,z.mkIntSort()),z.mkAdd( z.mkMul(AllApproval.z3F0(z,v),z.mkInt(2,z.mkIntSort())), z.mkMul(AllApproval.z3F1(z,v),z.mkInt(2,z.mkIntSort())), z.mkMul(AllApproval.z3F2(z,v),z.mkInt(3,z.mkIntSort())), z.mkMul(AllApproval.z3F3(z,v),z.mkInt(5,z.mkIntSort())) )) override def toString = "Approve" } class Deny extends IntPredicate { val choPred = (x:IntegerExpressionVariable) => Choco.geq(90,Choco.sum( Choco.mult(AllApproval.choF0(x),2), Choco.mult(AllApproval.choF1(x),2), Choco.mult(AllApproval.choF2(x),3), Choco.mult(AllApproval.choF3(x),5) )) val funPred = (x:Int) => { val v = AllApproval.split(x) (v._2*2 + v._3*2 + v._4*3 + v._5*5) <= 90 } // val z3Pred = null val z3Pred = (z:Z3Context,v:Z3AST) => //z.mkGT(v,z.mkInt(i,z.mkIntSort())) z.mkGE(z.mkInt(90,z.mkIntSort()),z.mkAdd( z.mkMul(AllApproval.z3F0(z,v),z.mkInt(2,z.mkIntSort())), z.mkMul(AllApproval.z3F1(z,v),z.mkInt(2,z.mkIntSort())), z.mkMul(AllApproval.z3F2(z,v),z.mkInt(3,z.mkIntSort())), z.mkMul(AllApproval.z3F3(z,v),z.mkInt(5,z.mkIntSort())) )) override def toString = "Deny" }
joseproenca/ip-constraints
code/src/main/scala/reopp/common/benchmarks/AllApproval.scala
Scala
mit
11,541
package controllers import client.Client import Client.{SuccessMessage, Data, FailureMessage, ClientMessage} import storage.Storage import Storage.{Entry, Get} import global.Global._ import play.api.Logger import play.api.libs.json._ import play.api.mvc.{Action, Controller} import storage.Storage import scala.concurrent.Future import play.api.libs.concurrent.Execution.Implicits.defaultContext import akka.pattern.ask import akka.util.Timeout import scala.concurrent.duration._ import play.api.libs.functional.syntax._ object Application extends Controller { implicit val timeout = Timeout(5 seconds) def index = Action { Ok(views.html.index("Discuss")) } case class Discussion(name: String) implicit val discussionReads: Reads[Discussion] = ( (JsPath \ "name").read[String].map(Discussion(_)) ) def discussion() = Action.async(parse.json) { implicit request => request.body.validate[Discussion] match { case success: JsSuccess[Discussion] => { val discussion = success.get.name val f = (dataStore ? Get("discussions")).mapTo[ClientMessage] f.flatMap { clientMessage => clientMessage match { case Data(key, value) => { Json.parse(value).validate[List[(Long, String)]] match { case success: JsSuccess[List[(Long, String)]] => { val list = success.get val count = list.size + 1 val dis = list ++ List((count.toLong, discussion)) val f = (dataStore ? Entry(key, Json.stringify(Json.toJson(dis)))) f.flatMap { clientMessage => clientMessage match { case SuccessMessage(key, msg) => { val f = (dataStore ? Entry("discussion_" + count, Json.stringify(Json.toJson(List("comment here"))))) f.flatMap {result => result match { case SuccessMessage(key, msg) => Future(Ok(Json.obj("success" -> "operation successful."))) case FailureMessage(key, msg) => Future(Ok(Json.obj("error" -> "Found not create comments key."))) } } } case FailureMessage(key, msg) => Future(Ok(Json.obj("error" -> "operation failed."))) } } } case error: JsError => Future(Ok(Json.obj("error" -> "Internal Json format in bad shape."))) } } case FailureMessage(key, msg) => { Future(Ok(Json.obj("error" -> s"reason: ${msg}"))) } } } } case error: JsError => { Future(Ok(Json.obj("error" -> "bad json format"))) } } } implicit val writes: Writes[(Long, String)] = new Writes[(Long, String)] { override def writes(o: (Long, String)): JsValue = { Json.obj("id" -> o._1, "headline" -> o._2) } } implicit val reads: Reads[(Long, String)] = new Reads[(Long, String)] { override def reads(json: JsValue): JsResult[(Long, String)] = { for { id <- (json \ "id").validate[Long] headline <- (json \ "headline").validate[String] } yield (id, headline) } } def discussions() = Action.async {implicit request => { val future = (dataStore ? Get("discussions")).mapTo[ClientMessage] future.map(value => { value match { case Data(key, value) => { Ok(Json.obj("discussions" -> Json.parse(value))) } case FailureMessage(key, msg) => Ok(Json.obj("error" -> "operation failed.")) } }).recover{case throwable: Throwable => Ok(Json.obj("error" -> throwable.getMessage))} }} def discuss(id: Long) = Action.async { implicit request => val future = (dataStore ? Get("discussions")).mapTo[ClientMessage] future.flatMap {value => { value match { case Data(key, value) => { Json.parse(value).validate[List[(Long, String)]] match { case success: JsSuccess[List[(Long, String)]] => { val listMap = success.get.toMap Future(Ok(views.html.discuss(listMap(id), id))) } case error: JsError => Future(Ok(Json.obj("error" -> "internal json parsing error"))) } } case FailureMessage(key, value) => Future(Ok(Json.obj("error" -> s"Failed reason ${value}"))) } }} } case class Comment(did: Long, comment: String) implicit val commentReads: Reads[Comment] = ( (JsPath \ "did").read[Long] and (JsPath \ "comment").read[String] )(Comment.apply _) def comment() = Action.async(parse.json) { implicit request => request.body.validate[Comment] match { case success: JsSuccess[Comment] => { val com = success.get.comment val did = success.get.did val f = (dataStore ? Get("discussion_" + success.get.did)).mapTo[ClientMessage] f.flatMap{result => { result match { case Data(key, value) => { Json.parse(value).validate[List[String]] match { case success: JsSuccess[List[String]] => { val list = success.get val newList = list ++ List(com) Logger.info("play log" + newList.mkString(" ")) val future = (dataStore ? Entry("discussion_" + did, Json.stringify(Json.toJson(newList)))).mapTo[ClientMessage] future.flatMap { result => { result match { case SuccessMessage(key, msg) => Future(Ok(Json.obj("done" -> msg))) case FailureMessage(key, msg) => Future(Ok(Json.obj("error" -> msg))) } }} } case error: JsError => Future(Ok(Json.obj("error" -> "internal json parsing error"))) } } case FailureMessage(key, msg) => Future(Ok(Json.obj("error" -> s"failed reason ${msg}"))) } }} } case error: JsError => { Future(Ok(Json.obj("error" -> "json format not accepted"))) } } } def comments(id: Long) = Action.async { implicit request => { val key = "discussion_"+id val f = (dataStore ? Get(key)).mapTo[ClientMessage] f.flatMap {result => { result match { case Data(key, value) => { Json.parse(value).validate[List[String]] match { case success: JsSuccess[List[String]] => { Future(Ok(Json.obj("comments" -> success.get))) } case error: JsError => Future(Ok(Json.obj("error" -> "internal json parsing error"))) } } case FailureMessage(key, msg) => Future(Ok(Json.obj("error" -> msg))) } }}.recover{ case throwable: Throwable => Ok(Json.obj("error" -> throwable.getMessage))} }} }
pamu/discuss
app/controllers/Application.scala
Scala
apache-2.0
6,988
object t4202_1 { () => { trait T { def t = () } } } object t4202_2 { () => { trait T { def t = () } object T2 extends T { t } } }
AlexSikia/dotty
tests/pending/pos/t4202.scala
Scala
bsd-3-clause
181
package com.twitter.diffy.proxy import javax.inject.Singleton import com.google.inject.Provides import com.twitter.diffy.analysis._ import com.twitter.diffy.lifter.Message import com.twitter.finagle._ import com.twitter.inject.TwitterModule import com.twitter.logging.Logger import com.twitter.util._ object DifferenceProxyModule extends TwitterModule { @Provides @Singleton def providesDifferenceProxy( settings: Settings, collector: InMemoryDifferenceCollector, joinedDifferences: JoinedDifferences, analyzer: DifferenceAnalyzer ): DifferenceProxy = settings.protocol match { case "thrift" => ThriftDifferenceProxy(settings, collector, joinedDifferences, analyzer) case "http" => SimpleHttpDifferenceProxy(settings, collector, joinedDifferences, analyzer) case "https" => SimpleHttpsDifferenceProxy(settings, collector, joinedDifferences, analyzer) } } object DifferenceProxy { object NoResponseException extends Exception("No responses provided by diffy") val NoResponseExceptionFuture = Future.exception(NoResponseException) val log = Logger(classOf[DifferenceProxy]) } trait DifferenceProxy { import DifferenceProxy._ type Req type Rep type Srv <: ClientService[Req, Rep] val server: ListeningServer val settings: Settings var lastReset: Time = Time.now def serviceFactory(serverset: String, label: String): Srv def liftRequest(req: Req): Future[Message] def liftResponse(rep: Try[Rep]): Future[Message] // Clients for services val candidate = serviceFactory(settings.candidate.path, "candidate") val primary = serviceFactory(settings.primary.path, "primary") val secondary = serviceFactory(settings.secondary.path, "secondary") val collector: InMemoryDifferenceCollector val joinedDifferences: JoinedDifferences val analyzer: DifferenceAnalyzer private[this] lazy val multicastHandler = new SequentialMulticastService(Seq(primary.client, candidate.client, secondary.client)) def proxy = new Service[Req, Rep] { override def apply(req: Req): Future[Rep] = { val rawResponses = multicastHandler(req) respond { case Return(_) => log.debug("success networking") case Throw(t) => log.debug(t, "error networking") } val responses: Future[Seq[Message]] = rawResponses flatMap { reps => Future.collect(reps map liftResponse) respond { case Return(rs) => log.debug(s"success lifting ${rs.head.endpoint}") case Throw(t) => log.debug(t, "error lifting") } } responses foreach { case Seq(primaryResponse, candidateResponse, secondaryResponse) => liftRequest(req) respond { case Return(m) => log.debug(s"success lifting request for ${m.endpoint}") case Throw(t) => log.debug(t, "error lifting request") } foreach { req => analyzer(req, candidateResponse, primaryResponse, secondaryResponse, settings.differenceConf) } } NoResponseExceptionFuture } } def clear() = { lastReset = Time.now analyzer.clear() } }
ljbx/diffy
src/main/scala/com/twitter/diffy/proxy/DifferenceProxy.scala
Scala
apache-2.0
3,171
package com.outr.arango.api.model import io.circe.Json case class PutApiQueryCacheProperties(includeSystem: Option[Boolean] = None, maxEntrySize: Option[Long] = None, maxResults: Option[Long] = None, maxResultsSize: Option[Long] = None, mode: Option[String] = None)
outr/arangodb-scala
api/src/main/scala/com/outr/arango/api/model/PutApiQueryCacheProperties.scala
Scala
mit
420
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql import java.{lang => jl} import scala.collection.JavaConverters._ import org.apache.spark.annotation.InterfaceStability import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.functions._ import org.apache.spark.sql.types._ /** * Functionality for working with missing data in `DataFrame`s. * * @since 1.3.1 */ @InterfaceStability.Stable final class DataFrameNaFunctions private[sql](df: DataFrame) { /** * Returns a new `DataFrame` that drops rows containing any null or NaN values. * * @since 1.3.1 */ def drop(): DataFrame = drop("any", df.columns) /** * Returns a new `DataFrame` that drops rows containing null or NaN values. * * If `how` is "any", then drop rows containing any null or NaN values. * If `how` is "all", then drop rows only if every column is null or NaN for that row. * * @since 1.3.1 */ def drop(how: String): DataFrame = drop(how, df.columns) /** * Returns a new `DataFrame` that drops rows containing any null or NaN values * in the specified columns. * * @since 1.3.1 */ def drop(cols: Array[String]): DataFrame = drop(cols.toSeq) /** * (Scala-specific) Returns a new `DataFrame` that drops rows containing any null or NaN values * in the specified columns. * * @since 1.3.1 */ def drop(cols: Seq[String]): DataFrame = drop(cols.size, cols) /** * Returns a new `DataFrame` that drops rows containing null or NaN values * in the specified columns. * * If `how` is "any", then drop rows containing any null or NaN values in the specified columns. * If `how` is "all", then drop rows only if every specified column is null or NaN for that row. * * @since 1.3.1 */ def drop(how: String, cols: Array[String]): DataFrame = drop(how, cols.toSeq) /** * (Scala-specific) Returns a new `DataFrame` that drops rows containing null or NaN values * in the specified columns. * * If `how` is "any", then drop rows containing any null or NaN values in the specified columns. * If `how` is "all", then drop rows only if every specified column is null or NaN for that row. * * @since 1.3.1 */ def drop(how: String, cols: Seq[String]): DataFrame = { how.toLowerCase match { case "any" => drop(cols.size, cols) case "all" => drop(1, cols) case _ => throw new IllegalArgumentException(s"how ($how) must be 'any' or 'all'") } } /** * Returns a new `DataFrame` that drops rows containing * less than `minNonNulls` non-null and non-NaN values. * * @since 1.3.1 */ def drop(minNonNulls: Int): DataFrame = drop(minNonNulls, df.columns) /** * Returns a new `DataFrame` that drops rows containing * less than `minNonNulls` non-null and non-NaN values in the specified columns. * * @since 1.3.1 */ def drop(minNonNulls: Int, cols: Array[String]): DataFrame = drop(minNonNulls, cols.toSeq) /** * (Scala-specific) Returns a new `DataFrame` that drops rows containing less than * `minNonNulls` non-null and non-NaN values in the specified columns. * * @since 1.3.1 */ def drop(minNonNulls: Int, cols: Seq[String]): DataFrame = { // Filtering condition: // only keep the row if it has at least `minNonNulls` non-null and non-NaN values. val predicate = AtLeastNNonNulls(minNonNulls, cols.map(name => df.resolve(name))) df.filter(Column(predicate)) } /** * Returns a new `DataFrame` that replaces null or NaN values in numeric columns with `value`. * * @since 2.1.1 */ def fill(value: Long): DataFrame = fill(value, df.columns) /** * Returns a new `DataFrame` that replaces null or NaN values in numeric columns with `value`. * @since 1.3.1 */ def fill(value: Double): DataFrame = fill(value, df.columns) /** * Returns a new `DataFrame` that replaces null values in string columns with `value`. * * @since 1.3.1 */ def fill(value: String): DataFrame = fill(value, df.columns) /** * Returns a new `DataFrame` that replaces null or NaN values in specified numeric columns. * If a specified column is not a numeric column, it is ignored. * * @since 2.1.1 */ def fill(value: Long, cols: Array[String]): DataFrame = fill(value, cols.toSeq) /** * Returns a new `DataFrame` that replaces null or NaN values in specified numeric columns. * If a specified column is not a numeric column, it is ignored. * * @since 1.3.1 */ def fill(value: Double, cols: Array[String]): DataFrame = fill(value, cols.toSeq) /** * (Scala-specific) Returns a new `DataFrame` that replaces null or NaN values in specified * numeric columns. If a specified column is not a numeric column, it is ignored. * * @since 2.1.1 */ def fill(value: Long, cols: Seq[String]): DataFrame = fillValue(value, cols) /** * (Scala-specific) Returns a new `DataFrame` that replaces null or NaN values in specified * numeric columns. If a specified column is not a numeric column, it is ignored. * * @since 1.3.1 */ def fill(value: Double, cols: Seq[String]): DataFrame = fillValue(value, cols) /** * Returns a new `DataFrame` that replaces null values in specified string columns. * If a specified column is not a string column, it is ignored. * * @since 1.3.1 */ def fill(value: String, cols: Array[String]): DataFrame = fill(value, cols.toSeq) /** * (Scala-specific) Returns a new `DataFrame` that replaces null values in * specified string columns. If a specified column is not a string column, it is ignored. * * @since 1.3.1 */ def fill(value: String, cols: Seq[String]): DataFrame = fillValue(value, cols) /** * Returns a new `DataFrame` that replaces null values. * * The key of the map is the column name, and the value of the map is the replacement value. * The value must be of the following type: * `Integer`, `Long`, `Float`, `Double`, `String`, `Boolean`. * Replacement values are cast to the column data type. * * For example, the following replaces null values in column "A" with string "unknown", and * null values in column "B" with numeric value 1.0. * {{{ * import com.google.common.collect.ImmutableMap; * df.na.fill(ImmutableMap.of("A", "unknown", "B", 1.0)); * }}} * * @since 1.3.1 */ def fill(valueMap: java.util.Map[String, Any]): DataFrame = fillMap(valueMap.asScala.toSeq) /** * (Scala-specific) Returns a new `DataFrame` that replaces null values. * * The key of the map is the column name, and the value of the map is the replacement value. * The value must be of the following type: `Int`, `Long`, `Float`, `Double`, `String`, `Boolean`. * Replacement values are cast to the column data type. * * For example, the following replaces null values in column "A" with string "unknown", and * null values in column "B" with numeric value 1.0. * {{{ * df.na.fill(Map( * "A" -> "unknown", * "B" -> 1.0 * )) * }}} * * @since 1.3.1 */ def fill(valueMap: Map[String, Any]): DataFrame = fillMap(valueMap.toSeq) /** * Replaces values matching keys in `replacement` map with the corresponding values. * Key and value of `replacement` map must have the same type, and * can only be doubles, strings or booleans. * If `col` is "*", then the replacement is applied on all string columns or numeric columns. * * {{{ * import com.google.common.collect.ImmutableMap; * * // Replaces all occurrences of 1.0 with 2.0 in column "height". * df.replace("height", ImmutableMap.of(1.0, 2.0)); * * // Replaces all occurrences of "UNKNOWN" with "unnamed" in column "name". * df.replace("name", ImmutableMap.of("UNKNOWN", "unnamed")); * * // Replaces all occurrences of "UNKNOWN" with "unnamed" in all string columns. * df.replace("*", ImmutableMap.of("UNKNOWN", "unnamed")); * }}} * * @param col name of the column to apply the value replacement * @param replacement value replacement map, as explained above * * @since 1.3.1 */ def replace[T](col: String, replacement: java.util.Map[T, T]): DataFrame = { replace[T](col, replacement.asScala.toMap) } /** * Replaces values matching keys in `replacement` map with the corresponding values. * Key and value of `replacement` map must have the same type, and * can only be doubles, strings or booleans. * * {{{ * import com.google.common.collect.ImmutableMap; * * // Replaces all occurrences of 1.0 with 2.0 in column "height" and "weight". * df.replace(new String[] {"height", "weight"}, ImmutableMap.of(1.0, 2.0)); * * // Replaces all occurrences of "UNKNOWN" with "unnamed" in column "firstname" and "lastname". * df.replace(new String[] {"firstname", "lastname"}, ImmutableMap.of("UNKNOWN", "unnamed")); * }}} * * @param cols list of columns to apply the value replacement * @param replacement value replacement map, as explained above * * @since 1.3.1 */ def replace[T](cols: Array[String], replacement: java.util.Map[T, T]): DataFrame = { replace(cols.toSeq, replacement.asScala.toMap) } /** * (Scala-specific) Replaces values matching keys in `replacement` map. * Key and value of `replacement` map must have the same type, and * can only be doubles, strings or booleans. * If `col` is "*", * then the replacement is applied on all string columns , numeric columns or boolean columns. * * {{{ * // Replaces all occurrences of 1.0 with 2.0 in column "height". * df.replace("height", Map(1.0 -> 2.0)) * * // Replaces all occurrences of "UNKNOWN" with "unnamed" in column "name". * df.replace("name", Map("UNKNOWN" -> "unnamed") * * // Replaces all occurrences of "UNKNOWN" with "unnamed" in all string columns. * df.replace("*", Map("UNKNOWN" -> "unnamed") * }}} * * @param col name of the column to apply the value replacement * @param replacement value replacement map, as explained above * * @since 1.3.1 */ def replace[T](col: String, replacement: Map[T, T]): DataFrame = { if (col == "*") { replace0(df.columns, replacement) } else { replace0(Seq(col), replacement) } } /** * (Scala-specific) Replaces values matching keys in `replacement` map. * Key and value of `replacement` map must have the same type, and * can only be doubles , strings or booleans. * * {{{ * // Replaces all occurrences of 1.0 with 2.0 in column "height" and "weight". * df.replace("height" :: "weight" :: Nil, Map(1.0 -> 2.0)); * * // Replaces all occurrences of "UNKNOWN" with "unnamed" in column "firstname" and "lastname". * df.replace("firstname" :: "lastname" :: Nil, Map("UNKNOWN" -> "unnamed"); * }}} * * @param cols list of columns to apply the value replacement * @param replacement value replacement map, as explained above * * @since 1.3.1 */ def replace[T](cols: Seq[String], replacement: Map[T, T]): DataFrame = replace0(cols, replacement) private def replace0[T](cols: Seq[String], replacement: Map[T, T]): DataFrame = { if (replacement.isEmpty || cols.isEmpty) { return df } // replacementMap is either Map[String, String] or Map[Double, Double] or Map[Boolean,Boolean] val replacementMap: Map[_, _] = replacement.head._2 match { case v: String => replacement case v: Boolean => replacement case _ => replacement.map { case (k, v) => (convertToDouble(k), convertToDouble(v)) } } // targetColumnType is either DoubleType or StringType or BooleanType val targetColumnType = replacement.head._1 match { case _: jl.Double | _: jl.Float | _: jl.Integer | _: jl.Long => DoubleType case _: jl.Boolean => BooleanType case _: String => StringType } val columnEquals = df.sparkSession.sessionState.analyzer.resolver val projections = df.schema.fields.map { f => val shouldReplace = cols.exists(colName => columnEquals(colName, f.name)) if (f.dataType.isInstanceOf[NumericType] && targetColumnType == DoubleType && shouldReplace) { replaceCol(f, replacementMap) } else if (f.dataType == targetColumnType && shouldReplace) { replaceCol(f, replacementMap) } else { df.col(f.name) } } df.select(projections : _*) } private def fillMap(values: Seq[(String, Any)]): DataFrame = { // Error handling values.foreach { case (colName, replaceValue) => // Check column name exists df.resolve(colName) // Check data type replaceValue match { case _: jl.Double | _: jl.Float | _: jl.Integer | _: jl.Long | _: jl.Boolean | _: String => // This is good case _ => throw new IllegalArgumentException( s"Unsupported value type ${replaceValue.getClass.getName} ($replaceValue).") } } val columnEquals = df.sparkSession.sessionState.analyzer.resolver val projections = df.schema.fields.map { f => values.find { case (k, _) => columnEquals(k, f.name) }.map { case (_, v) => v match { case v: jl.Float => fillCol[Float](f, v) case v: jl.Double => fillCol[Double](f, v) case v: jl.Long => fillCol[Long](f, v) case v: jl.Integer => fillCol[Integer](f, v) case v: jl.Boolean => fillCol[Boolean](f, v.booleanValue()) case v: String => fillCol[String](f, v) } }.getOrElse(df.col(f.name)) } df.select(projections : _*) } /** * Returns a [[Column]] expression that replaces null value in `col` with `replacement`. */ private def fillCol[T](col: StructField, replacement: T): Column = { val quotedColName = "`" + col.name + "`" val colValue = col.dataType match { case DoubleType | FloatType => nanvl(df.col(quotedColName), lit(null)) // nanvl only supports these types case _ => df.col(quotedColName) } coalesce(colValue, lit(replacement).cast(col.dataType)).as(col.name) } /** * Returns a [[Column]] expression that replaces value matching key in `replacementMap` with * value in `replacementMap`, using [[CaseWhen]]. * * TODO: This can be optimized to use broadcast join when replacementMap is large. */ private def replaceCol(col: StructField, replacementMap: Map[_, _]): Column = { val keyExpr = df.col(col.name).expr def buildExpr(v: Any) = Cast(Literal(v), keyExpr.dataType) val branches = replacementMap.flatMap { case (source, target) => Seq(buildExpr(source), buildExpr(target)) }.toSeq new Column(CaseKeyWhen(keyExpr, branches :+ keyExpr)).as(col.name) } private def convertToDouble(v: Any): Double = v match { case v: Float => v.toDouble case v: Double => v case v: Long => v.toDouble case v: Int => v.toDouble case v => throw new IllegalArgumentException( s"Unsupported value type ${v.getClass.getName} ($v).") } /** * Returns a new `DataFrame` that replaces null or NaN values in specified * numeric, string columns. If a specified column is not a numeric, string column, * it is ignored. */ private def fillValue[T](value: T, cols: Seq[String]): DataFrame = { // the fill[T] which T is Long/Double, // should apply on all the NumericType Column, for example: // val input = Seq[(java.lang.Integer, java.lang.Double)]((null, 164.3)).toDF("a","b") // input.na.fill(3.1) // the result is (3,164.3), not (null, 164.3) val targetType = value match { case _: Double | _: Long => NumericType case _: String => StringType case _ => throw new IllegalArgumentException( s"Unsupported value type ${value.getClass.getName} ($value).") } val columnEquals = df.sparkSession.sessionState.analyzer.resolver val projections = df.schema.fields.map { f => val typeMatches = (targetType, f.dataType) match { case (NumericType, dt) => dt.isInstanceOf[NumericType] case (StringType, dt) => dt == StringType } // Only fill if the column is part of the cols list. if (typeMatches && cols.exists(col => columnEquals(f.name, col))) { fillCol[T](f, value) } else { df.col(f.name) } } df.select(projections : _*) } }
u2009cf/spark-radar
sql/core/src/main/scala/org/apache/spark/sql/DataFrameNaFunctions.scala
Scala
apache-2.0
17,215
package maker import ch.qos.logback.classic.Logger import org.slf4j.LoggerFactory import org.slf4j.helpers.NOPLogger trait Log { lazy val logger = { Log.getLogger() } } object Log { private def getLogger(): org.slf4j.Logger = { // Moved to a synchronized block as the LoggerFactory returns NOPLoggers whenever // these are constructed in parallel this.synchronized { def rec(numTries: Int): org.slf4j.Logger = { val l = LoggerFactory.getLogger(getClass) if (l.isInstanceOf[NOPLogger]) { if (numTries >= 5) throw new Exception(s"Failed to get logger after $numTries attempts") Thread.sleep(100) rec(numTries + 1) } else { l } } rec(0) } } }
cage433/maker
maker/src/maker/Log.scala
Scala
bsd-2-clause
775
package toguru.impl import java.util.concurrent.atomic.AtomicReference import java.util.concurrent.{Executors, ScheduledExecutorService, TimeUnit} import com.hootsuite.circuitbreaker.CircuitBreakerBuilder import com.typesafe.scalalogging.StrictLogging import play.api.libs.functional.syntax._ import play.api.libs.json.Reads._ import play.api.libs.json._ import toguru.api.{Activations, DefaultActivations} import toguru.impl.RemoteActivationsProvider._ import scala.concurrent.duration._ import scala.util.{Failure, Success, Try} import scalaj.http.Http object RemoteActivationsProvider { val MimeApiV3 = "application/vnd.toguru.v3+json" case class PollResponse(code: Int, contentType: String, content: String) type TogglePoller = (Option[Long]) => PollResponse val toggleStateReadsUntilV2: Reads[ToggleStates] = { val toggleStateV1Reads = ( (JsPath \\ "id").read[String] and (JsPath \\ "rolloutPercentage").readNullable[Int] and (JsPath \\ "tags").read[Map[String, String]] )((id, p, tags) => ToggleState(id, tags, Seq(ToggleActivation(p.map(Rollout))))) val toggleStatesV1Reads = Reads.seq(toggleStateV1Reads).map(ts => ToggleStates(None, ts)) val toggleStatesV2Reads = ( (JsPath \\ "sequenceNo").read[Int] and (JsPath \\ "toggles").read(Reads.list(toggleStateV1Reads)) )((seqNo, toggles) => ToggleStates(Some(seqNo), toggles)) toggleStatesV2Reads or toggleStatesV1Reads } val toggleStateReads = { implicit val rolloutReads = Json.reads[Rollout] implicit val activationReads = Json.reads[ToggleActivation] implicit val toggleStateReads = ( (JsPath \\ "id").read[String] and (JsPath \\ "activations").read[Seq[ToggleActivation]] and (JsPath \\ "tags").read[Map[String, String]] )((id, acts, tags) => ToggleState(id, tags, acts)) Json.reads[ToggleStates] } val executor = Executors.newScheduledThreadPool(1) sys.addShutdownHook(executor.shutdownNow()) private val circuitBreakerBuilder = CircuitBreakerBuilder( name = "toguru-server-breaker", failLimit = 5, retryDelay = FiniteDuration(20, TimeUnit.SECONDS) ) /** * Create an activation provider that fetches the toggle activations conditions from the toguru server given. * * @param endpointUrl the endpoint of the toguru server, e.g. <code>http://localhost:9000</code> * @param pollInterval the poll interval to use for querying the toguru server * @return */ def apply(endpointUrl: String, pollInterval: Duration = 2.seconds, circuitBreakerBuilder: CircuitBreakerBuilder = circuitBreakerBuilder): RemoteActivationsProvider = { val poller: TogglePoller = { maybeSeqNo => val maybeSeqNoParam = maybeSeqNo.map(seqNo => s"?seqNo=$seqNo").mkString val response = Http(endpointUrl + s"/togglestate$maybeSeqNoParam").header("Accept", MimeApiV3).timeout(500, 750).asString PollResponse(response.code, response.contentType.getOrElse(""), response.body) } new RemoteActivationsProvider(poller, executor, pollInterval, circuitBreakerBuilder) } } /** * Fetches the toggle activation conditions from a toguru server. * * @param poller the poller to fetch toggle states * @param executor the executor service for scheduling polling * @param pollInterval the polling interval * @param circuitBreakerBuilder the circuit breaker builder to use for creating the circuit breaker. */ class RemoteActivationsProvider( poller: TogglePoller, executor: ScheduledExecutorService, val pollInterval: Duration = 2.seconds, val circuitBreakerBuilder: CircuitBreakerBuilder = RemoteActivationsProvider.circuitBreakerBuilder) extends Activations.Provider with ToguruClientMetrics with StrictLogging { val circuitBreaker = circuitBreakerBuilder.build() val schedule = executor.scheduleAtFixedRate(new Runnable() { def run(): Unit = update() }, pollInterval.toMillis, pollInterval.toMillis, TimeUnit.MILLISECONDS) sys.addShutdownHook { close() } val currentActivation = new AtomicReference[Activations](DefaultActivations) def update() = { val sequenceNo = currentActivation.get().stateSequenceNo fetchToggleStates(sequenceNo).foreach(ts => currentActivation.set(new ToggleStateActivations(ts))) } def close(): RemoteActivationsProvider = { schedule.cancel(true) deregister() this } def fetchToggleStates(sequenceNo: Option[Long]): Option[ToggleStates] = { def sequenceNoValid(toggleStates: ToggleStates) = (sequenceNo, toggleStates.sequenceNo) match { case (None, _) => true case (Some(a), Some(b)) => a <= b case (Some(_), None) => false } def parseBody(response: PollResponse): Try[ToggleStates] = { val reads = response.contentType match { case MimeApiV3 => toggleStateReads case _ => toggleStateReadsUntilV2 } Try(Json.parse(response.content).as(reads)) } Try(circuitBreaker() { poller(sequenceNo) }) match { case Success(r @ PollResponse(code, _, body)) => val tryToggleStates = parseBody(r) (code, tryToggleStates) match { case (200, Success(toggleStates)) if sequenceNoValid(toggleStates) => fetchSuccess() Some(toggleStates) case (200, Success(toggleStates)) => logger.warn(s"Server response contains stale state (sequenceNo. is '${toggleStates.sequenceNo.mkString}'), client sequenceNo is '${sequenceNo.mkString}'.") fetchFailed() None case _ => logger.warn(s"Polling registry failed, got response code $code and body '$body'") fetchFailed() None } case Failure(e) => logger.warn(s"Polling registry failed (${e.getClass.getName}: ${e.getMessage})") connectError() None } } override def apply() = currentActivation.get() override def currentSequenceNo: Option[Long] = apply().stateSequenceNo }
andreas-schroeder/toguru-scala-client
src/main/scala/toguru/impl/RemoteActivationsProvider.scala
Scala
mit
6,118
package free import scala.language.higherKinds case class Mu[F[_]](in: F[Mu[F]]) { def fold[B](f: F[B] => B)(implicit F: Functor[F]): B = f(F.map(in)(_.fold(f))) }
YoEight/psug-free
src/main/scala/free/Mu.scala
Scala
mit
172
/** Copyright 2015 TappingStone, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prediction.workflow import io.prediction.controller.EmptyParams import io.prediction.controller.Engine import io.prediction.controller.EngineParams import io.prediction.controller.IPersistentModel import io.prediction.controller.LAlgorithm import io.prediction.controller.PAlgorithm import io.prediction.controller.Metric import io.prediction.controller.Params import io.prediction.controller.Utils import io.prediction.controller.NiceRendering import io.prediction.controller.SanityCheck /* import io.prediction.controller.java.LJavaDataSource import io.prediction.controller.java.LJavaPreparator import io.prediction.controller.java.LJavaAlgorithm import io.prediction.controller.java.LJavaServing import io.prediction.controller.java.JavaEvaluator import io.prediction.controller.java.JavaUtils import io.prediction.controller.java.JavaEngine import io.prediction.controller.java.PJavaAlgorithm */ import io.prediction.controller.WorkflowParams import io.prediction.core.BaseAlgorithm import io.prediction.core.BaseDataSource import io.prediction.core.BaseEvaluator import io.prediction.core.BasePreparator import io.prediction.core.BaseServing import io.prediction.core.BaseEngine import io.prediction.core.Doer // import io.prediction.core.LModelAlgorithm import io.prediction.data.storage.EngineInstance import io.prediction.data.storage.EngineInstances import io.prediction.data.storage.Model import io.prediction.data.storage.Storage import com.github.nscala_time.time.Imports.DateTime import com.twitter.chill.KryoInjection import grizzled.slf4j.{ Logger, Logging } import org.apache.spark.SparkContext import org.apache.spark.SparkContext._ import org.apache.spark.SparkConf import org.apache.spark.rdd.RDD import org.json4s._ import org.json4s.native.Serialization.write import org.json4s.native.Serialization.writePretty import scala.collection.JavaConversions._ import scala.language.existentials import scala.reflect.ClassTag import scala.reflect.Manifest import java.io.FileOutputStream import java.io.ObjectOutputStream import java.io.FileInputStream import java.io.ObjectInputStream import java.lang.{ Iterable => JIterable } import java.util.{ HashMap => JHashMap, Map => JMap } // FIXME: move to better location. object WorkflowContext extends Logging { def apply( batch: String = "", executorEnv: Map[String, String] = Map(), sparkEnv: Map[String, String] = Map(), mode: String = "" ): SparkContext = { val conf = new SparkConf() val prefix = if (mode == "") "PredictionIO" else s"PredictionIO ${mode}" conf.setAppName(s"${prefix}: ${batch}") debug(s"Executor environment received: ${executorEnv}") executorEnv.map(kv => conf.setExecutorEnv(kv._1, kv._2)) debug(s"SparkConf executor environment: ${conf.getExecutorEnv}") debug(s"Application environment received: ${sparkEnv}") conf.setAll(sparkEnv) val sparkConfString = conf.getAll.toSeq debug(s"SparkConf environment: $sparkConfString") new SparkContext(conf) } }
nvoron23/PredictionIO
core/src/main/scala/workflow/WorkflowContext.scala
Scala
apache-2.0
3,644
class C { def +++(a: Int, b: Int): C = new C } var v = new C v /* line: 2, name: +++, applicable: false */ +++= 1 v /* line: 2, name: +++ */ +++= (1, 2)
ilinum/intellij-scala
testdata/resolve2/function/assignment/TwoArguments.scala
Scala
apache-2.0
158