code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package a17
class 第二重 {
type Tail = 开始
type Head = 我
class 开始 {
type Tail = 继续
type Head = 叼
class 继续
}
}
| djx314/ubw | a17-432/src/main/scala/第二重.scala | Scala | bsd-3-clause | 153 |
package org.http4s.server.websocket
import cats.Applicative
import cats.implicits._
import fs2.{Pipe, Stream}
import org.http4s.websocket.{WebSocket, WebSocketContext, WebSocketFrame}
import org.http4s.{Headers, Response, Status}
/**
* Build a response which will accept an HTTP websocket upgrade request and initiate a websocket connection using the
* supplied exchange to process and respond to websocket messages.
* @param send The send side of the Exchange represents the outgoing stream of messages that should be sent to the client
* @param receive The receive side of the Exchange is a sink to which the framework will push the incoming websocket messages
* Once both streams have terminated, the server will initiate a close of the websocket connection.
* As defined in the websocket specification, this means the server
* will send a CloseFrame to the client and wait for a CloseFrame in response before closing the
* connection, this ensures that no messages are lost in flight. The server will shutdown the
* connection when it receives the `CloseFrame` message back from the client. The connection will also
* be closed if the client does not respond with a `CloseFrame` after some reasonable amount of
* time.
* Another way of closing the connection is by emitting a `CloseFrame` in the stream of messages
* heading to the client. This method allows one to attach a message to the `CloseFrame` as defined
* by the websocket protocol.
* Unfortunately the current implementation does not quite respect the description above, it violates
* the websocket protocol by terminating the connection immediately upon reception
* of a `CloseFrame`. This bug will be addressed soon in an upcoming release and this message will be
* removed.
* Currently, there is no way for the server to be notified when the connection is closed, neither in
* the case of a normal disconnection such as a Close handshake or due to a connection error. There
* are plans to address this limitation in the future.
* @param headers Handshake response headers, such as such as:Sec-WebSocket-Protocol.
* @param onNonWebSocketRequest The status code to return to a client making a non-websocket HTTP request to this route.
* default: NotImplemented
* @param onHandshakeFailure The status code to return when failing to handle a websocket HTTP request to this route.
* default: BadRequest
*/
final case class WebSocketBuilder[F[_]](
send: Stream[F, WebSocketFrame],
receive: Pipe[F, WebSocketFrame, Unit],
headers: Headers,
onNonWebSocketRequest: F[Response[F]],
onHandshakeFailure: F[Response[F]])
object WebSocketBuilder {
class Builder[F[_]: Applicative] {
def build(
send: Stream[F, WebSocketFrame],
receive: Pipe[F, WebSocketFrame, Unit],
headers: Headers = Headers.empty,
onNonWebSocketRequest: F[Response[F]] =
Response[F](Status.NotImplemented).withEntity("This is a WebSocket route.").pure[F],
onHandshakeFailure: F[Response[F]] = Response[F](Status.BadRequest)
.withEntity("WebSocket handshake failed.")
.pure[F],
onClose: F[Unit] = Applicative[F].unit): F[Response[F]] =
WebSocketBuilder(send, receive, headers, onNonWebSocketRequest, onHandshakeFailure).onNonWebSocketRequest
.map(
_.withAttribute(
websocketKey[F],
WebSocketContext(WebSocket(send, receive, onClose), headers, onHandshakeFailure))
)
}
def apply[F[_]: Applicative]: Builder[F] = new Builder[F]
}
| aeons/http4s | server/src/main/scala/org/http4s/server/websocket/WebSocketBuilder.scala | Scala | apache-2.0 | 3,904 |
package org.json4s
package examples
import com.ning.http.client.Response
import dispatch._, Defaults._
import com.mongodb._
import mongo.JObjectParser
object MongoExamples extends App with jackson.JsonMethods {
import Api.formats
object read {
object Json extends (Response => JValue) {
def apply(r: Response) =
(dispatch.as.String andThen (parse(_)))(r)
}
}
val mongo = com.mongodb.Mongo.connect(new DBAddress("127.0.0.1", "json4s_examples"))
val coll = mongo.getCollection("swagger_data")
val f = for {
listing <- Http(url("http://petstore.swagger.wordnik.com/api/api-docs") OK read.Json)
api <- Http(url("http://petstore.swagger.wordnik.com/api/api-docs/pet") OK read.Json)
} yield {
println("Listing received, storing...")
coll.save(JObjectParser.parse(listing))
println("Api description received, storing...")
coll save {
JObjectParser parse {
api transformField {
case JField(nm, v) if nm.startsWith("$") => JField(s"#${nm.substring(1)}", v)
}
}
}
println("Swagger api has been harvested.")
}
f onFailure { case r =>
println(r)
}
f onComplete { r =>
Http.shutdown()
}
} | tramchamploo/json4s | examples/src/main/scala/org/json4s/examples/MongoExamples.scala | Scala | apache-2.0 | 1,209 |
package flaky.report
import _root_.flaky.slack.model._
import flaky.web._
import flaky.{FlakyCase, FlakyTestReport, TimeReport, web}
import io.circe.generic.auto._
import io.circe.syntax._
import scala.collection.immutable.Iterable
import scala.language.implicitConversions
object SlackReport {
object Icons {
val link = ":link:"
val failedClass = ":bangbang:"
val detailForClass = ":poop:"
val failCase = ":small_orange_diamond:"
}
def render(flakyTestReport: FlakyTestReport,
htmlReportsUrl: Option[String] = None,
details: Boolean = false): String = {
if (flakyTestReport.flakyTests.exists(_.failures > 0)) {
if (details) {
renderFailed(flakyTestReport, htmlReportsUrl).asJson.noSpaces
} else {
renderFailedShort(flakyTestReport, htmlReportsUrl).asJson.noSpaces
}
} else {
renderNoFailures(flakyTestReport).asJson.noSpaces
}
}
def renderNoFailures(flakyTestReport: FlakyTestReport): Message = {
val timestamp = flakyTestReport.timeDetails.start
val projectName = flakyTestReport.projectName
val flaky = flakyTestReport.flakyTests
val duration = flakyTestReport.timeDetails.duration()
val timeSpend = TimeReport.formatSeconds(duration / 1000)
val timeSpendPerIteration = if (flakyTestReport.testRuns.nonEmpty) {
TimeReport.formatSeconds((duration / flakyTestReport.testRuns.size) / 1000)
} else {
"?"
}
val summary = Attachment(
fallback = s"Flaky test result for $projectName",
color = "#36a64f",
pretext = s"Flaky test report for $projectName",
author_name = "sbt-flaky",
title = "Flaky test result",
text = s"All tests are correct [${flaky.headOption.map(f => f.totalRun).getOrElse(0)} runs]\nTest were running for $timeSpend [$timeSpendPerIteration/iteration]",
footer = "sbt-flaky",
ts = timestamp
)
Message(attachments = List(summary))
}
def renderFailed(flakyTestReport: FlakyTestReport, htmlReportsUrl: Option[String]): Message = {
val timestamp = flakyTestReport.timeDetails.start
val projectName = flakyTestReport.projectName
val flaky = flakyTestReport.flakyTests
val failedCount = flaky.count(_.failures > 0)
val duration = flakyTestReport.timeDetails.duration()
val timeSpend = TimeReport.formatSeconds(duration / 1000)
val timeSpendPerIteration = TimeReport.formatSeconds((duration / flakyTestReport.testRuns.size) / 1000)
val htmlReportLink = htmlReportsUrl.map(h => s"<$h/index.html|HTML report> ${Icons.link}")
val flakyText = flaky
.filter(_.failures > 0)
.groupBy(_.test.clazz)
.map { kv =>
val clazz = kv._1
val list = kv._2
val r = list
.sortBy(_.failures())
.map { flaky =>
val link = htmlReportsUrl
.map(l => if (l.endsWith("/")) l else l + "/")
.map(host => s"<$host/${linkToSingleTest(flaky.test)}|${Icons.link}>")
f"`${100 - flaky.failures * 100f / flaky.totalRun}%.2f%%` `${flaky.test.test}` ${link.getOrElse("")}"
}
.mkString("\n")
s"${Icons.failedClass} *$clazz:*\n$r"
}.mkString("Success rate for tests:\n","\n","")
val attachment = Attachment(
fallback = "Flaky test result for $projectName",
color = "danger",
pretext = s"Flaky test report for $projectName. Test were run ${flakyTestReport.testRuns.size} times",
author_name = "sbt-flaky",
title =
s"""$failedCount of ${flaky.size} test cases are flaky.
|Build success probability is ${flakyTestReport.successProbabilityPercent()}.
|Test were running for $timeSpend [$timeSpendPerIteration/iteration]
|${htmlReportLink.getOrElse("")}""".stripMargin,
text = flakyText,
footer = "sbt-flaky",
mrkdwn_in = Seq("text")
)
val flakyCases: Map[String, List[FlakyCase]] = flakyTestReport.groupFlakyCases()
val failedAttachments: Iterable[Attachment] = flakyCases.map {
case (testClass, flakyTestCases) =>
val flakyTestsDescription: String = flakyTestCases
.sortBy(_.runNames.size)
.map {
fc =>
val test = fc.test
val message = fc.message.getOrElse("?")
val text =
s"""| ${Icons.failCase}[${fc.runNames.size} times] ${test.test}
| Message: $message
| ${fc.stacktrace}""".stripMargin
text
}.mkString("\n")
val link = htmlReportsUrl
.map(l => if (l.endsWith("/")) l else l + "/")
.map(host => s"<$host/${web.linkToSingleTestClass(testClass)}|${Icons.link}>")
Attachment(
fallback = s"Flaky test report for $testClass",
color = "danger",
title = s"${Icons.failedClass} Details for $testClass: ${link.getOrElse("")}",
text = flakyTestsDescription,
ts = timestamp
)
}
Message(attachments = attachment :: failedAttachments.toList)
}
def renderFailedShort(flakyTestReport: FlakyTestReport, htmlReportUrl: Option[String]): Message = {
val timestamp = flakyTestReport.timeDetails.start
val projectName = flakyTestReport.projectName
val flaky = flakyTestReport.flakyTests
val failedCount = flaky.count(_.failures > 0)
val duration = flakyTestReport.timeDetails.duration()
val timeSpend = TimeReport.formatSeconds(duration / 1000)
val timeSpendPerIteration = TimeReport.formatSeconds((duration / flakyTestReport.testRuns.size) / 1000)
val flakyText = flaky
.filter(_.failures > 0)
.groupBy(_.test.clazz)
.map { kv =>
val clazz = kv._1
val list = kv._2
val r = list
.sortBy(_.failures())
.map { flaky =>
import _root_.flaky.web._
val link = htmlReportUrl
.map(l => if (l.endsWith("/")) l else l + "/")
.map(host => s"<$host/${linkToSingleTest(flaky.test)}|${Icons.link}>")
f" `${100 - (flaky.failures * 100f) / flaky.totalRun}%.2f%%` `${flaky.test.test}` ${link.getOrElse("")}"
}
.mkString("\n")
s"${Icons.failedClass} *$clazz*:\n$r"
}.mkString("Success rate for tests:\n", "\n", "")
val htmlReportLink = htmlReportUrl.map(h => s"<$h/index.html|HTML report> ${Icons.link}")
val attachment = Attachment(
fallback = s"Flaky test result for $projectName",
color = "danger",
pretext = s"Flaky test report for $projectName. Test were run ${flakyTestReport.testRuns.size} times",
author_name = "sbt-flaky",
title =
f"""$failedCount of ${flaky.size} test cases are flaky.
|Build success probability is ${flakyTestReport.successProbabilityPercent()}%.2f%%.
|Test were running for $timeSpend [$timeSpendPerIteration/iteration]
|${htmlReportLink.getOrElse("")}""".stripMargin,
text = flakyText,
footer = "sbt-flaky",
mrkdwn_in = Seq("text"),
ts = timestamp
)
Message(attachments = List(attachment))
}
}
| otrebski/sbt-flaky | src/main/scala/flaky/report/SlackReport.scala | Scala | apache-2.0 | 7,163 |
/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.nd4s
import org.nd4s.Implicits._
import org.nd4j.linalg.indexing.{ IntervalIndex, NDArrayIndexAll, PointIndex }
import org.scalatest.FlatSpec
class NDArrayIndexingTest extends FlatSpec {
"IndexRange" should "convert -> DSL to indices" in {
val ndArray =
Array(
Array(1, 2, 3),
Array(4, 5, 6),
Array(7, 8, 9)
).mkNDArray(NDOrdering.C)
val indices = ndArray.indicesFrom(0 -> 2, ->)
assert(indices.indices == List(0, 1, 2, 3, 4, 5))
assert(indices.targetShape.toList == List(2, 3))
}
it should "convert -> DSL to NDArrayIndex interval with stride 1 or 2" in {
val ndArray =
Array(
Array(1, 2, 3),
Array(4, 5, 6),
Array(7, 8, 9)
).mkNDArray(NDOrdering.C)
val indices = ndArray.getINDArrayIndexfrom(0 -> 2, 0 -> 3 by 2)
val rowI = indices(0)
assert(rowI.isInstanceOf[IntervalIndex])
val columnI = indices(1)
assert(columnI.isInstanceOf[IntervalIndex])
}
it should "convert -> DSL to NDArrayIndex point,all" in {
val ndArray =
Array(
Array(1, 2, 3),
Array(4, 5, 6),
Array(7, 8, 9)
).mkNDArray(NDOrdering.C)
val indices = ndArray.getINDArrayIndexfrom(0, ->)
val rowI = indices(0)
assert(rowI.isInstanceOf[PointIndex])
val columnI = indices(1)
assert(columnI.isInstanceOf[NDArrayIndexAll])
}
}
| deeplearning4j/deeplearning4j | nd4s/src/test/scala/org/nd4s/NDArrayIndexingTest.scala | Scala | apache-2.0 | 2,165 |
/*
* Copyright 2013 - 2020 Outworkers Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.outworkers.phantom.builder.query.db.crud
import com.outworkers.phantom.PhantomSuite
import com.outworkers.phantom.dsl._
import com.outworkers.phantom.tables._
import com.outworkers.util.samplers._
class CountTest extends PhantomSuite {
override def beforeAll(): Unit = {
super.beforeAll()
val _ = database.primitivesJoda.createSchema()
}
it should "retrieve a count of 0 if the table has been truncated" in {
val chain = for {
_ <- database.primitivesJoda.truncate.future()
count <- database.primitivesJoda.select.count.one()
} yield count
whenReady(chain) { res =>
res.value shouldEqual 0L
}
}
it should "correctly retrieve a count of 1000 when select a column" in {
val limit = 100
val rows = genList[JodaRow](limit)
val batch = rows.foldLeft(Batch.unlogged)((b, row) => {
b.add(TestDatabase.primitivesJoda.store(row))
})
val chain = for {
_ <- database.primitivesJoda.truncate.future()
_ <- batch.future()
count <- database.primitivesJoda.select.count(_.timestamp).one()
} yield count
whenReady(chain) { res =>
res.value shouldEqual limit.toLong
}
}
it should "correctly retrieve a count of 1000" in {
val limit = 100
val rows = genList[JodaRow](limit)
val batch = rows.foldLeft(Batch.unlogged)((b, row) => {
b.add(TestDatabase.primitivesJoda.store(row))
})
val chain = for {
_ <- database.primitivesJoda.truncate.future()
_ <- batch.future()
count <- database.primitivesJoda.select.count.one()
} yield count
whenReady(chain) { res =>
res.value shouldEqual limit.toLong
}
}
}
| outworkers/phantom | phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/crud/CountTest.scala | Scala | apache-2.0 | 2,295 |
package org.jetbrains.plugins.scala
package lang
package psi
package api
package base
package patterns
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScReferenceExpression
/**
* @author ilyas
*/
trait ScStableReferenceElementPattern extends ScPattern {
def refElement = findChild(classOf[ScReferenceElement])
def getReferenceExpression = findChild(classOf[ScReferenceExpression])
} | triggerNZ/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/api/base/patterns/ScStableReferenceElementPattern.scala | Scala | apache-2.0 | 399 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import java.io.File
import kafka.utils._
import junit.framework.Assert._
import java.util.{Random, Properties}
import kafka.consumer.SimpleConsumer
import org.junit.{After, Before, Test}
import kafka.message.{NoCompressionCodec, ByteBufferMessageSet, Message}
import kafka.zk.ZooKeeperTestHarness
import org.scalatest.junit.JUnit3Suite
import kafka.admin.AdminUtils
import kafka.api.{PartitionOffsetRequestInfo, FetchRequestBuilder, OffsetRequest}
import kafka.utils.TestUtils._
import kafka.common.{ErrorMapping, TopicAndPartition}
import kafka.utils.nonthreadsafe
import kafka.utils.threadsafe
import org.junit.After
import org.junit.Before
import org.junit.Test
class LogOffsetTest extends JUnit3Suite with ZooKeeperTestHarness {
val random = new Random()
var logDir: File = null
var topicLogDir: File = null
var server: KafkaServer = null
var logSize: Int = 100
val brokerPort: Int = 9099
var simpleConsumer: SimpleConsumer = null
var time: Time = new MockTime()
@Before
override def setUp() {
super.setUp()
val config: Properties = createBrokerConfig(1, brokerPort)
val logDirPath = config.getProperty("log.dir")
logDir = new File(logDirPath)
time = new MockTime()
server = TestUtils.createServer(new KafkaConfig(config), time)
simpleConsumer = new SimpleConsumer("localhost", brokerPort, 1000000, 64*1024, "")
}
@After
override def tearDown() {
simpleConsumer.close
server.shutdown
Utils.rm(logDir)
super.tearDown()
}
@Test
def testGetOffsetsForUnknownTopic() {
val topicAndPartition = TopicAndPartition("foo", 0)
val request = OffsetRequest(
Map(topicAndPartition -> PartitionOffsetRequestInfo(OffsetRequest.LatestTime, 10)))
val offsetResponse = simpleConsumer.getOffsetsBefore(request)
assertEquals(ErrorMapping.UnknownTopicOrPartitionCode,
offsetResponse.partitionErrorAndOffsets(topicAndPartition).error)
}
@Test
def testGetOffsetsBeforeLatestTime() {
val topicPartition = "kafka-" + 0
val topic = topicPartition.split("-").head
val part = Integer.valueOf(topicPartition.split("-").last).intValue
// setup brokers in zookeeper as owners of partitions for this test
AdminUtils.createTopic(zkClient, topic, 1, 1)
val logManager = server.getLogManager
assertTrue("Log for partition [topic,0] should be created",
waitUntilTrue(() => logManager.getLog(TopicAndPartition(topic, part)).isDefined, 1000))
val log = logManager.getLog(TopicAndPartition(topic, part)).get
val message = new Message(Integer.toString(42).getBytes())
for(i <- 0 until 20)
log.append(new ByteBufferMessageSet(NoCompressionCodec, message))
log.flush()
val offsets = server.apis.fetchOffsets(logManager, TopicAndPartition(topic, part), OffsetRequest.LatestTime, 10)
assertEquals(Seq(20L, 16L, 12L, 8L, 4L, 0L), offsets)
waitUntilTrue(() => isLeaderLocalOnBroker(topic, part, server), 1000)
val topicAndPartition = TopicAndPartition(topic, part)
val offsetRequest = OffsetRequest(
Map(topicAndPartition -> PartitionOffsetRequestInfo(OffsetRequest.LatestTime, 10)),
replicaId = 0)
val consumerOffsets =
simpleConsumer.getOffsetsBefore(offsetRequest).partitionErrorAndOffsets(topicAndPartition).offsets
assertEquals(Seq(20L, 16L, 12L, 8L, 4L, 0L), consumerOffsets)
// try to fetch using latest offset
val fetchResponse = simpleConsumer.fetch(
new FetchRequestBuilder().addFetch(topic, 0, consumerOffsets.head, 300 * 1024).build())
assertFalse(fetchResponse.messageSet(topic, 0).iterator.hasNext)
}
@Test
def testEmptyLogsGetOffsets() {
val topicPartition = "kafka-" + random.nextInt(10)
val topicPartitionPath = getLogDir.getAbsolutePath + "/" + topicPartition
topicLogDir = new File(topicPartitionPath)
topicLogDir.mkdir
val topic = topicPartition.split("-").head
// setup brokers in zookeeper as owners of partitions for this test
AdminUtils.createTopic(zkClient, topic, 1, 1)
TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, topic, 0, 500)
var offsetChanged = false
for(i <- 1 to 14) {
val topicAndPartition = TopicAndPartition(topic, 0)
val offsetRequest =
OffsetRequest(Map(topicAndPartition -> PartitionOffsetRequestInfo(OffsetRequest.EarliestTime, 1)))
val consumerOffsets =
simpleConsumer.getOffsetsBefore(offsetRequest).partitionErrorAndOffsets(topicAndPartition).offsets
if(consumerOffsets(0) == 1) {
offsetChanged = true
}
}
assertFalse(offsetChanged)
}
@Test
def testGetOffsetsBeforeNow() {
val topicPartition = "kafka-" + random.nextInt(3)
val topic = topicPartition.split("-").head
val part = Integer.valueOf(topicPartition.split("-").last).intValue
// setup brokers in zookeeper as owners of partitions for this test
AdminUtils.createTopic(zkClient, topic, 3, 1)
val logManager = server.getLogManager
val log = logManager.createLog(TopicAndPartition(topic, part), logManager.defaultConfig)
val message = new Message(Integer.toString(42).getBytes())
for(i <- 0 until 20)
log.append(new ByteBufferMessageSet(NoCompressionCodec, message))
log.flush()
val now = time.milliseconds + 30000 // pretend it is the future to avoid race conditions with the fs
val offsets = server.apis.fetchOffsets(logManager, TopicAndPartition(topic, part), now, 10)
assertEquals(Seq(20L, 16L, 12L, 8L, 4L, 0L), offsets)
waitUntilTrue(() => isLeaderLocalOnBroker(topic, part, server), 1000)
val topicAndPartition = TopicAndPartition(topic, part)
val offsetRequest = OffsetRequest(Map(topicAndPartition -> PartitionOffsetRequestInfo(now, 10)), replicaId = 0)
val consumerOffsets =
simpleConsumer.getOffsetsBefore(offsetRequest).partitionErrorAndOffsets(topicAndPartition).offsets
assertEquals(Seq(20L, 16L, 12L, 8L, 4L, 0L), consumerOffsets)
}
@Test
def testGetOffsetsBeforeEarliestTime() {
val topicPartition = "kafka-" + random.nextInt(3)
val topic = topicPartition.split("-").head
val part = Integer.valueOf(topicPartition.split("-").last).intValue
// setup brokers in zookeeper as owners of partitions for this test
AdminUtils.createTopic(zkClient, topic, 3, 1)
val logManager = server.getLogManager
val log = logManager.createLog(TopicAndPartition(topic, part), logManager.defaultConfig)
val message = new Message(Integer.toString(42).getBytes())
for(i <- 0 until 20)
log.append(new ByteBufferMessageSet(NoCompressionCodec, message))
log.flush()
val offsets = server.apis.fetchOffsets(logManager, TopicAndPartition(topic, part), OffsetRequest.EarliestTime, 10)
assertEquals(Seq(0L), offsets)
waitUntilTrue(() => isLeaderLocalOnBroker(topic, part, server), 1000)
val topicAndPartition = TopicAndPartition(topic, part)
val offsetRequest =
OffsetRequest(Map(topicAndPartition -> PartitionOffsetRequestInfo(OffsetRequest.EarliestTime, 10)))
val consumerOffsets =
simpleConsumer.getOffsetsBefore(offsetRequest).partitionErrorAndOffsets(topicAndPartition).offsets
assertEquals(Seq(0L), consumerOffsets)
}
private def createBrokerConfig(nodeId: Int, port: Int): Properties = {
val props = new Properties
props.put("broker.id", nodeId.toString)
props.put("port", port.toString)
props.put("log.dir", getLogDir.getAbsolutePath)
props.put("log.flush.interval.messages", "1")
props.put("enable.zookeeper", "false")
props.put("num.partitions", "20")
props.put("log.retention.hours", "10")
props.put("log.retention.check.interval.ms", (5*1000*60).toString)
props.put("log.segment.bytes", logSize.toString)
props.put("zookeeper.connect", zkConnect.toString)
props
}
private def getLogDir(): File = {
val dir = TestUtils.tempDir()
dir
}
}
| unix1986/universe | tool/kafka-0.8.1.1-src/core/src/test/scala/unit/kafka/server/LogOffsetTest.scala | Scala | bsd-2-clause | 8,750 |
package com.twitter.finatra.http.integration.doeverything.main.domain
case class ExampleResponse(
id: String,
name: String,
magic: String,
moduleMagic: String)
| deanh/finatra | http/src/test/scala/com/twitter/finatra/http/integration/doeverything/main/domain/ExampleResponse.scala | Scala | apache-2.0 | 173 |
package nak.liblinear
import breeze.util.Index
import breeze.linalg.DenseMatrix
import nak.liblinear.{Function=>TronFunction, _}
/**
* Specify a binomial problem with success/failure counts for each
* feature vector and offsets.
*/
class BinomialProblem(
val y: Array[(Int,Int)], // target values
val x: Array[Array[Feature]], // features
val n: Int, // number of features
val offsets: Array[Double] // offsets
) {
lazy val l: Int = y.length
}
/**
* Companion to BinomialProblem.
*/
object BinomialProblem {
/**
* Construct a binomial problem with offsets equal to zero.
*/
def apply(y: Array[(Int,Int)], x: Array[Array[Feature]], n: Int) =
new BinomialProblem(y, x, n, Array.fill(y.length)(0.0))
}
/**
* Train a Liblinear classifier from data with binomial success/failure counts.
*/
class LiblinearTrainerBinomial(config: LiblinearConfig) {
if (!config.showDebug) Linear.disableDebugOutput
val param = new Parameter(SolverType.L2R_LR, config.cost, config.eps)
def apply(
binomialResponses: Array[(Int,Int)],
x: Array[Array[Feature]],
numFeatures: Int,
offsetsOpt: Option[Array[Double]] = None
): Model = {
val problem = offsetsOpt match {
case Some(offsets) =>
new BinomialProblem(binomialResponses, x, numFeatures, offsets)
case None =>
BinomialProblem(binomialResponses, x, numFeatures)
}
train(problem, param)
}
def train (prob: BinomialProblem, param: Parameter) = {
val w_size = prob.n
val model = new Model
model.solverType = param.solverType;
model.bias = 0
model.nr_class = 2
model.label = Array(1,0)
model.nr_feature = prob.n
model.w = Array.fill(w_size)(0.0)
train_one(prob, param, model.w, param.C, param.C)
model
}
def train_one(prob: BinomialProblem, param: Parameter, w: Array[Double], Cp: Double, Cn: Double) {
val eps = param.eps
val pos = prob.y.map(_._1).sum
val neg = prob.y.map(_._2).sum
val primal_solver_tol = eps * math.max(math.min(pos, neg), 1) / (pos+neg)
val fun_obj = new L2R_LrFunction_Binomial(prob, Cp, Cn)
new Tron(fun_obj, primal_solver_tol).tron(w)
}
}
/**
* Binomial logistic regression function with offsets.
*
* This is horrifically mutable code, but is made to line up with how
* other liblinear functions were done in Java. Also, did the while-loop
* thing to iterate over arrays since it is a bit more efficient than
* equivalent for-loops in Scala.
*/
class L2R_LrFunction_Binomial(prob: BinomialProblem, Cp: Double, Cn: Double)
extends TronFunction {
private val l = prob.l
private val z = Array.fill(l)(0.0)
private val D = Array.fill(l)(0.0)
private def Xv(v: Array[Double], Xv: Array[Double]) {
var i = 0
while (i < prob.l) {
Xv(i) = prob.offsets(i)
for (s <- prob.x(i))
Xv(i) += v(s.getIndex - 1) * s.getValue
i += 1
}
}
private def XTv(v: Array[Double], XTv: Array[Double]) {
val w_size = get_nr_variable
val x = prob.x
var i = 0
while (i < w_size) {
XTv(i) = 0.0
i += 1
}
i = 0
while (i < l) {
val v_i = v(i)
for (s <- x(i))
XTv(s.getIndex-1) += v_i * s.getValue
i += 1
}
}
// Calculate log-likelihood
def fun(w: Array[Double]): Double = {
val y = prob.y
val w_size = get_nr_variable
Xv(w, z)
var i = 0
var f = 0.0
while (i < w_size) {
val p = w(i)
f += p*p
i += 1
}
f /= 2.0
i = 0
while (i < l) {
val (numPosObs, numNegObs) = y(i)
val logOnePlusZi = math.log(1+math.exp(z(i)))
f += (if (numPosObs > 0) numPosObs*(logOnePlusZi - z(i)) else 0.0)
f += (if (numNegObs > 0) numNegObs*logOnePlusZi else 0.0)
i += 1
}
f
}
// Calculate gradient
def grad(w: Array[Double], g: Array[Double]) {
val y = prob.y;
val w_size = get_nr_variable
var i = 0
while (i < l) {
val (numPosObs, numNegObs) = y(i)
val numTrials = numPosObs + numNegObs
val zPos = 1 / (1 + math.exp(-z(i)))
D(i) = numTrials * zPos * (1-zPos)
z(i) = -(numPosObs - numTrials*zPos)
i += 1
}
XTv(z, g)
i = 0
while (i < w_size) {
g(i) += w(i)
i += 1
}
}
def Hv(s: Array[Double], Hs: Array[Double]) {
val w_size = get_nr_variable
val wa = Array.fill(l)(0.0)
Xv(s, wa)
var i = 0
while (i < l) {
val (numPosObs, numNegObs) = prob.y(i)
wa(i) = numPosObs * Cp * D(i) * wa(i) + numNegObs * Cn * D(i) * wa(i)
i += 1
}
XTv(wa, Hs)
i = 0
while (i < w_size) {
Hs(i) += s(i)
i += 1
}
}
lazy val get_nr_variable = prob.n
}
| scalanlp/nak | src/main/scala/nak/liblinear/binomial.scala | Scala | apache-2.0 | 4,769 |
package io.skysail.app.github.resources
import org.restlet.data.Reference
import org.restlet.data.MediaType
import io.skysail.ext.oauth2.domain.OAuth2Parameters
import io.skysail.ext.oauth2.domain.Token
import org.restlet.ext.json.JsonRepresentation
import org.osgi.service.component.annotations.Component
import io.skysail.ext.oauth2.resources.AccessTokenClientResource
import io.skysail.app.github.TokenResponse
@Component(service = Array(classOf[AccessTokenClientResource]))
@Component(service = Array(classOf[AccessTokenClientResource]))
class FacebookAccessTokenClientResource extends AccessTokenClientResource(new Reference("https://github.com/login/oauth/access_token")) {
def getApiProviderUriMatcher(): String = "github.com"
def requestToken(parameters: OAuth2Parameters): Token = {
println(clientId);
parameters.clientId(clientId)
parameters.clientSecret(clientSecret)
val input = parameters.toRepresentation();
println(input);
accept(MediaType.APPLICATION_JSON);
val result = new JsonRepresentation(post(input)).getJsonObject();
println(result)
val token = TokenResponse.apply(result);
return token;
}
} | evandor/skysail | skysail.app.github/src/io/skysail/app/github/resources/GithubAccessTokenClientResource.scala | Scala | apache-2.0 | 1,170 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.classification
import scala.util.Random
import breeze.linalg.{DenseVector => BDV}
import org.scalatest.Assertions._
import org.apache.spark.ml.classification.LinearSVCSuite._
import org.apache.spark.ml.feature.LabeledPoint
import org.apache.spark.ml.linalg.{DenseVector, SparseVector, Vector, Vectors}
import org.apache.spark.ml.param.ParamsSuite
import org.apache.spark.ml.util.{DefaultReadWriteTest, MLTest, MLTestingUtils}
import org.apache.spark.ml.util.TestingUtils._
import org.apache.spark.sql.{Dataset, Row}
import org.apache.spark.sql.functions._
class LinearSVCSuite extends MLTest with DefaultReadWriteTest {
import testImplicits._
private val nPoints = 50
@transient var smallBinaryDataset: Dataset[_] = _
@transient var smallValidationDataset: Dataset[_] = _
@transient var binaryDataset: Dataset[_] = _
@transient var smallSparseBinaryDataset: Dataset[_] = _
@transient var smallSparseValidationDataset: Dataset[_] = _
override def beforeAll(): Unit = {
super.beforeAll()
// NOTE: Intercept should be small for generating equal 0s and 1s
val A = 0.01
val B = -1.5
val C = 1.0
smallBinaryDataset = generateSVMInput(A, Array[Double](B, C), nPoints, 42).toDF()
smallValidationDataset = generateSVMInput(A, Array[Double](B, C), nPoints, 17).toDF()
binaryDataset = generateSVMInput(1.0, Array[Double](1.0, 2.0, 3.0, 4.0), 10000, 42).toDF()
// Dataset for testing SparseVector
val toSparse: Vector => SparseVector = _.asInstanceOf[DenseVector].toSparse
val sparse = udf(toSparse)
smallSparseBinaryDataset = smallBinaryDataset.withColumn("features", sparse($"features"))
smallSparseValidationDataset =
smallValidationDataset.withColumn("features", sparse($"features"))
}
/**
* Enable the ignored test to export the dataset into CSV format,
* so we can validate the training accuracy compared with R's e1071 package.
*/
ignore("export test data into CSV format") {
binaryDataset.rdd.map { case Row(label: Double, features: Vector) =>
label + "," + features.toArray.mkString(",")
}.repartition(1).saveAsTextFile("target/tmp/LinearSVC/binaryDataset")
}
test("Linear SVC binary classification") {
val svm = new LinearSVC()
val model = svm.fit(smallBinaryDataset)
assert(model.transform(smallValidationDataset)
.where("prediction=label").count() > nPoints * 0.8)
val sparseModel = svm.fit(smallSparseBinaryDataset)
checkModels(model, sparseModel)
}
test("Linear SVC binary classification with regularization") {
val svm = new LinearSVC()
val model = svm.setRegParam(0.1).fit(smallBinaryDataset)
assert(model.transform(smallValidationDataset)
.where("prediction=label").count() > nPoints * 0.8)
val sparseModel = svm.fit(smallSparseBinaryDataset)
checkModels(model, sparseModel)
}
test("params") {
ParamsSuite.checkParams(new LinearSVC)
val model = new LinearSVCModel("linearSVC", Vectors.dense(0.0), 0.0)
ParamsSuite.checkParams(model)
}
test("linear svc: default params") {
val lsvc = new LinearSVC()
assert(lsvc.getRegParam === 0.0)
assert(lsvc.getMaxIter === 100)
assert(lsvc.getFitIntercept)
assert(lsvc.getTol === 1E-6)
assert(lsvc.getStandardization)
assert(!lsvc.isDefined(lsvc.weightCol))
assert(lsvc.getThreshold === 0.0)
assert(lsvc.getAggregationDepth === 2)
assert(lsvc.getLabelCol === "label")
assert(lsvc.getFeaturesCol === "features")
assert(lsvc.getPredictionCol === "prediction")
assert(lsvc.getRawPredictionCol === "rawPrediction")
val model = lsvc.setMaxIter(5).fit(smallBinaryDataset)
val transformed = model.transform(smallBinaryDataset)
checkNominalOnDF(transformed, "prediction", model.numClasses)
checkVectorSizeOnDF(transformed, "rawPrediction", model.numClasses)
transformed
.select("label", "prediction", "rawPrediction")
.collect()
assert(model.getThreshold === 0.0)
assert(model.getFeaturesCol === "features")
assert(model.getPredictionCol === "prediction")
assert(model.getRawPredictionCol === "rawPrediction")
assert(model.intercept !== 0.0)
assert(model.hasParent)
assert(model.numFeatures === 2)
MLTestingUtils.checkCopyAndUids(lsvc, model)
}
test("LinearSVC threshold acts on rawPrediction") {
val lsvc =
new LinearSVCModel(uid = "myLSVCM", coefficients = Vectors.dense(1.0), intercept = 0.0)
val df = spark.createDataFrame(Seq(
(1, Vectors.dense(1e-7)),
(0, Vectors.dense(0.0)),
(-1, Vectors.dense(-1e-7)))).toDF("id", "features")
def checkOneResult(
model: LinearSVCModel,
threshold: Double,
expected: Set[(Int, Double)]): Unit = {
model.setThreshold(threshold)
testTransformerByGlobalCheckFunc[(Int, Vector)](df, model, "id", "prediction") {
rows: Seq[Row] =>
val results = rows.map(r => (r.getInt(0), r.getDouble(1))).toSet
assert(results === expected, s"Failed for threshold = $threshold")
}
}
def checkResults(threshold: Double, expected: Set[(Int, Double)]): Unit = {
// Check via code path using Classifier.raw2prediction
lsvc.setRawPredictionCol("rawPrediction")
checkOneResult(lsvc, threshold, expected)
// Check via code path using Classifier.predict
lsvc.setRawPredictionCol("")
checkOneResult(lsvc, threshold, expected)
}
checkResults(0.0, Set((1, 1.0), (0, 0.0), (-1, 0.0)))
checkResults(Double.PositiveInfinity, Set((1, 0.0), (0, 0.0), (-1, 0.0)))
checkResults(Double.NegativeInfinity, Set((1, 1.0), (0, 1.0), (-1, 1.0)))
}
test("linear svc doesn't fit intercept when fitIntercept is off") {
val lsvc = new LinearSVC().setFitIntercept(false).setMaxIter(5)
val model = lsvc.fit(smallBinaryDataset)
assert(model.intercept === 0.0)
val lsvc2 = new LinearSVC().setFitIntercept(true).setMaxIter(5)
val model2 = lsvc2.fit(smallBinaryDataset)
assert(model2.intercept !== 0.0)
}
test("linearSVC with sample weights") {
def modelEquals(m1: LinearSVCModel, m2: LinearSVCModel): Unit = {
assert(m1.coefficients ~== m2.coefficients relTol 0.05)
assert(m1.intercept ~== m2.intercept absTol 0.05)
}
val estimator = new LinearSVC().setRegParam(0.01).setTol(0.001)
val dataset = smallBinaryDataset
MLTestingUtils.testArbitrarilyScaledWeights[LinearSVCModel, LinearSVC](
dataset.as[LabeledPoint], estimator, modelEquals)
MLTestingUtils.testOutliersWithSmallWeights[LinearSVCModel, LinearSVC](
dataset.as[LabeledPoint], estimator, 2, modelEquals, outlierRatio = 3)
MLTestingUtils.testOversamplingVsWeighting[LinearSVCModel, LinearSVC](
dataset.as[LabeledPoint], estimator, modelEquals, 42L)
}
test("LinearSVC on blocks") {
for (dataset <- Seq(smallBinaryDataset, smallSparseBinaryDataset);
fitIntercept <- Seq(true, false)) {
val lsvc = new LinearSVC()
.setFitIntercept(fitIntercept)
.setMaxIter(5)
val model = lsvc.fit(dataset)
Seq(0, 0.01, 0.1, 1, 2, 4).foreach { s =>
val model2 = lsvc.setMaxBlockSizeInMB(s).fit(dataset)
assert(model.intercept ~== model2.intercept relTol 1e-9)
assert(model.coefficients ~== model2.coefficients relTol 1e-9)
}
}
}
test("prediction on single instance") {
val trainer = new LinearSVC()
val model = trainer.fit(smallBinaryDataset)
testPredictionModelSinglePrediction(model, smallBinaryDataset)
testClassificationModelSingleRawPrediction(model, smallBinaryDataset)
}
test("linearSVC comparison with R e1071 and scikit-learn") {
val trainer1 = new LinearSVC()
.setRegParam(0.00002) // set regParam = 2.0 / datasize / c
.setMaxIter(200)
.setTol(1e-4)
val model1 = trainer1.fit(binaryDataset)
/*
Use the following R code to load the data and train the model using e1071 package.
library(e1071)
data <- read.csv("path/target/tmp/LinearSVC/binaryDataset/part-00000", header=FALSE)
label <- factor(data$V1)
features <- as.matrix(data.frame(data$V2, data$V3, data$V4, data$V5))
svm_model <- svm(features, label, type='C', kernel='linear', cost=10, scale=F, tolerance=1e-4)
w <- -t(svm_model$coefs) %*% svm_model$SV
w
svm_model$rho
> w
data.V2 data.V3 data.V4 data.V5
[1,] 7.310338 14.89741 22.21005 29.83508
> svm_model$rho
[1] 7.440177
*/
val coefficientsR = Vectors.dense(7.310338, 14.89741, 22.21005, 29.83508)
val interceptR = 7.440177
assert(model1.intercept ~== interceptR relTol 1E-3)
assert(model1.coefficients ~== coefficientsR relTol 5E-3)
/*
Use the following python code to load the data and train the model using scikit-learn package.
import numpy as np
from sklearn import svm
f = open("path/target/tmp/LinearSVC/binaryDataset/part-00000")
data = np.loadtxt(f, delimiter=",")
X = data[:, 1:] # select columns 1 through end
y = data[:, 0] # select column 0 as label
clf = svm.LinearSVC(fit_intercept=True, C=10, loss='hinge', tol=1e-4, random_state=42)
m = clf.fit(X, y)
print m.coef_
print m.intercept_
[[ 7.24690165 14.77029087 21.99924004 29.5575729 ]]
[ 7.36947518]
*/
val coefficientsSK = Vectors.dense(7.24690165, 14.77029087, 21.99924004, 29.5575729)
val interceptSK = 7.36947518
assert(model1.intercept ~== interceptSK relTol 1E-2)
assert(model1.coefficients ~== coefficientsSK relTol 1E-2)
}
test("summary and training summary") {
val lsvc = new LinearSVC()
val model = lsvc.setMaxIter(5).fit(smallBinaryDataset)
val summary = model.evaluate(smallBinaryDataset)
assert(model.summary.accuracy === summary.accuracy)
assert(model.summary.weightedPrecision === summary.weightedPrecision)
assert(model.summary.weightedRecall === summary.weightedRecall)
assert(model.summary.pr.collect() === summary.pr.collect())
assert(model.summary.roc.collect() === summary.roc.collect())
assert(model.summary.areaUnderROC === summary.areaUnderROC)
// verify instance weight works
val lsvc2 = new LinearSVC()
.setMaxIter(5)
.setWeightCol("weight")
val smallBinaryDatasetWithWeight =
smallBinaryDataset.select(col("label"), col("features"), lit(2.5).as("weight"))
val summary2 = model.evaluate(smallBinaryDatasetWithWeight)
val model2 = lsvc2.fit(smallBinaryDatasetWithWeight)
assert(model2.summary.accuracy === summary2.accuracy)
assert(model2.summary.weightedPrecision ~== summary2.weightedPrecision relTol 1e-6)
assert(model2.summary.weightedRecall === summary2.weightedRecall)
assert(model2.summary.pr.collect() === summary2.pr.collect())
assert(model2.summary.roc.collect() === summary2.roc.collect())
assert(model2.summary.areaUnderROC === summary2.areaUnderROC)
assert(model2.summary.accuracy === model.summary.accuracy)
assert(model2.summary.weightedPrecision ~== model.summary.weightedPrecision relTol 1e-6)
assert(model2.summary.weightedRecall === model.summary.weightedRecall)
assert(model2.summary.pr.collect() === model.summary.pr.collect())
assert(model2.summary.roc.collect() === model.summary.roc.collect())
assert(model2.summary.areaUnderROC === model.summary.areaUnderROC)
}
test("linearSVC training summary totalIterations") {
Seq(1, 5, 10, 20, 100).foreach { maxIter =>
val trainer = new LinearSVC().setMaxIter(maxIter)
val model = trainer.fit(smallBinaryDataset)
if (maxIter == 1) {
assert(model.summary.totalIterations === maxIter)
} else {
assert(model.summary.totalIterations <= maxIter)
}
}
}
test("read/write: SVM") {
def checkModelData(model: LinearSVCModel, model2: LinearSVCModel): Unit = {
assert(model.intercept === model2.intercept)
assert(model.coefficients === model2.coefficients)
assert(model.numFeatures === model2.numFeatures)
}
val svm = new LinearSVC()
testEstimatorAndModelReadWrite(svm, smallBinaryDataset, LinearSVCSuite.allParamSettings,
LinearSVCSuite.allParamSettings, checkModelData)
}
}
object LinearSVCSuite {
val allParamSettings: Map[String, Any] = Map(
"regParam" -> 0.01,
"maxIter" -> 2, // intentionally small
"fitIntercept" -> true,
"tol" -> 0.8,
"standardization" -> false,
"threshold" -> 0.6,
"predictionCol" -> "myPredict",
"rawPredictionCol" -> "myRawPredict",
"aggregationDepth" -> 3
)
// Generate noisy input of the form Y = signum(x.dot(weights) + intercept + noise)
def generateSVMInput(
intercept: Double,
weights: Array[Double],
nPoints: Int,
seed: Int): Seq[LabeledPoint] = {
val rnd = new Random(seed)
val weightsMat = new BDV(weights)
val x = Array.fill[Array[Double]](nPoints)(
Array.fill[Double](weights.length)(rnd.nextDouble() * 2.0 - 1.0))
val y = x.map { xi =>
val yD = new BDV(xi).dot(weightsMat) + intercept + 0.01 * rnd.nextGaussian()
if (yD > 0) 1.0 else 0.0
}
y.zip(x).map(p => LabeledPoint(p._1, Vectors.dense(p._2)))
}
def checkModels(model1: LinearSVCModel, model2: LinearSVCModel): Unit = {
assert(model1.intercept ~== model2.intercept relTol 1e-9)
assert(model1.coefficients ~== model2.coefficients relTol 1e-9)
}
}
| ueshin/apache-spark | mllib/src/test/scala/org/apache/spark/ml/classification/LinearSVCSuite.scala | Scala | apache-2.0 | 14,327 |
package functional
import play.api.test._
import play.api.test.Helpers._
import play.api.i18n.MessagesApi
import play.api.i18n.{Lang, Messages, MessagesImpl, MessagesProvider}
import java.time.Instant
import play.api.{Application => PlayApp}
import play.api.inject.guice.GuiceApplicationBuilder
import helpers.InjectorSupport
import play.api.db.Database
import views.Titles
import helpers.UrlHelper
import helpers.UrlHelper._
import play.api.test._
import play.api.test.Helpers._
import java.sql.Connection
import models._
import helpers.ViewHelpers
import org.specs2.mutable.Specification
import play.api.test.{Helpers, TestServer}
import play.api.test.TestServer
import java.util.concurrent.TimeUnit
import helpers.Helper._
import play.api.i18n.{Lang, Messages}
import com.ruimo.scoins.Scoping._
class PaypalSpec extends Specification with SalesSpecBase with InjectorSupport {
lazy val baseUrl = "http://localhost:" + play.api.test.Helpers.testServerPort
"Paypal" should {
"Normal paypal transaction." in new WithBrowser(
WebDriverFactory(CHROME),
appl(
inMemoryDatabase() ++ defaultConf ++ disableMailer +
("anonymousUserPurchase" -> true) +
("fakePaypalRespons.enabled" -> true) +
("acceptableTenders.ANONYMOUS_BUYER" -> List("PAYPAL")) +
("fakePaypalRespons.body" -> "TOKEN=PAYPALTOKEN&CORRELATIONID=AAA&ACK=Success&VERSION=124.0&BUILD=18316154") +
("fakePaypalRespons.statusCode" -> "200") +
(
"paypal.redirectUrl" -> (
baseUrl + controllers.routes.Paypal.fakePaypal("", "").url.takeWhile(_ != '?')
)
)
)
) {
inject[Database].withConnection { implicit conn =>
val currencyInfo = inject[CurrencyRegistry]
val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val adminUser = loginWithTestUser(browser)
logoff(browser)
val site = inject[SiteRepo].createNew(Ja, "Store01")
val cat = inject[CategoryRepo].createNew(Map(Ja -> "Cat01"))
val tax = inject[TaxRepo].createNew
val taxName = inject[TaxNameRepo].createNew(tax, Ja, "外税")
val taxHis = inject[TaxHistoryRepo].createNew(tax, TaxType.OUTER_TAX, BigDecimal("5"), date("9999-12-31"))
val tax2 = inject[TaxRepo].createNew
val taxName2 = inject[TaxNameRepo].createNew(tax2, Ja, "内税")
val taxHis2 = inject[TaxHistoryRepo].createNew(tax2, TaxType.INNER_TAX, BigDecimal("5"), date("9999-12-31"))
val item = inject[ItemRepo].createNew(cat)
ItemNumericMetadata.createNew(
item, ItemNumericMetadataType.HEIGHT, 1
)
ItemTextMetadata.createNew(
item, ItemTextMetadataType.ABOUT_HEIGHT, "Hello"
)
inject[SiteItemNumericMetadataRepo].createNew(
site.id.get, item.id.get, SiteItemNumericMetadataType.STOCK, 20
)
SiteItemTextMetadata.createNew(
site.id.get, item.id.get, SiteItemTextMetadataType.PRICE_MEMO, "World"
)
val siteItem = inject[SiteItemRepo].createNew(site, item)
val itemClass = 1L
inject[SiteItemNumericMetadataRepo].createNew(
site.id.get, item.id.get, SiteItemNumericMetadataType.SHIPPING_SIZE, itemClass
)
val itemName = inject[ItemNameRepo].createNew(item, Map(Ja -> "かえで"))
val itemDesc = inject[ItemDescriptionRepo].createNew(item, site, "かえで説明")
val itemPrice = inject[ItemPriceRepo].createNew(item, site)
val iph = inject[ItemPriceHistoryRepo].createNew(
itemPrice, tax, currencyInfo.Jpy, BigDecimal(999), None, BigDecimal("888"), date("9999-12-31")
)
browser.goTo(itemQueryUrl())
browser.waitUntil(
failFalse(browser.find(".addToCartButton").first().displayed())
)
browser.find(".addToCartButton").click()
browser.waitUntil(
failFalse(browser.find("#doAnonymousLoginButton").first().displayed())
)
browser.find("#doAnonymousLoginButton").size === 1
browser.find("#doAnonymousLoginButton").click()
browser.find(".toEnterShippingAddress a").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
val box = inject[ShippingBoxRepo].createNew(site.id.get, itemClass, 3, "box01")
val fee = inject[ShippingFeeRepo].createNew(box.id.get, CountryCode.JPN, JapanPrefecture.東京都.code())
val feeHistory = inject[ShippingFeeHistoryRepo].createNew(
fee.id.get, tax2.id.get, BigDecimal(123), Some(100), date("9999-12-31")
)
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find("#firstName").fill().`with`("firstName01")
browser.find("#lastName").fill().`with`("lastName01")
browser.find("#firstNameKana").fill().`with`("firstNameKana01")
browser.find("#lastNameKana").fill().`with`("lastNameKana01")
browser.find("#email").fill().`with`("foo@bar.com")
browser.find("input[name='zip1']").fill().`with`("146")
browser.find("input[name='zip2']").fill().`with`("0082")
browser.waitUntil(30, TimeUnit.SECONDS) {
failFalse {
browser.find("#address1").attribute("value") == "大田区" &&
browser.find("#address2").attribute("value") == "池上"
}
}
browser.find("#tel1").fill().`with`("11111111")
if (browser.find("#agreeCheck").size != 0) {
browser.find("#agreeCheck").click()
}
browser.find("#enterShippingAddressForm input[type='submit']").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find("#paypalimg").size === 1
browser.find("#paypalimg").click()
browser.find("#cmd").text === "_express-checkout"
browser.find("#token").text === "PAYPALTOKEN"
val headers: Seq[TransactionLogHeader] = TransactionLogHeader.list()
headers.size === 1
headers(0).transactionType === TransactionTypeCode.PAYPAL_EXPRESS_CHECKOUT
val paypalTran: TransactionLogPaypalStatus = TransactionLogPaypalStatus.byTransactionId(headers(0).id.get)
paypalTran.transactionId === headers(0).id.get
paypalTran.status === PaypalStatus.PREPARED
browser.goTo(
controllers.routes.Paypal.onSuccess(
paypalTran.transactionId + 1, paypalTran.token
).url.addParm("lang", lang.code).toString
)
browser.webDriver.getTitle === Messages("commonTitle", Titles.top).trim
doWith(TransactionLogPaypalStatus.byTransactionId(headers(0).id.get)) { paypal =>
paypal.status === PaypalStatus.PREPARED
}
browser.goTo(
controllers.routes.Paypal.onSuccess(
paypalTran.transactionId, paypalTran.token + 1
).url.addParm("lang", lang.code).toString
)
browser.webDriver.getTitle === Messages("commonTitle", Titles.top).trim
doWith(TransactionLogPaypalStatus.byTransactionId(headers(0).id.get)) { paypal =>
paypal.status === PaypalStatus.PREPARED
}
browser.goTo(
controllers.routes.Paypal.onSuccess(
paypalTran.transactionId, paypalTran.token
).url.addParm("lang", lang.code).toString
)
browser.webDriver.getTitle === Messages("commonTitle", Messages("end.transaction"))
browser.find(".itemTableBody").index(0).find(".itemNameBody").text === "かえで"
browser.find(".itemTableBody").index(0).find(".siteName").text === "Store01"
browser.find(".itemTableBody").index(0).find(".quantity").text === "1"
browser.find(".itemTableBody").index(0).find(".itemPrice").text === "999円"
browser.find(".itemTableBody").index(1).find(".subtotal").text === "999円"
browser.find(".itemTableBody").index(2).find(".outerTaxAmount").text ===
ViewHelpers.toAmount(BigDecimal((999 * 0.05).asInstanceOf[Int]))
browser.find(".itemTableBody").index(3).find(".grandTotal").text ===
ViewHelpers.toAmount(BigDecimal((999 * 1.05).asInstanceOf[Int]))
browser.find(".salesTotalBody").index(0).find(".itemQuantity").text === "1"
browser.find(".salesTotalBody").index(0).find(".itemPrice").text === "1,048円"
browser.find(".salesTotalBody").index(1).find(".itemQuantity").text === "1 箱"
browser.find(".salesTotalBody").index(1).find(".itemPrice").text === "123円"
browser.find(".salesTotalBody").index(2).find(".itemPrice").text === "1,171円"
browser.find(".shippingAddress").find(".name").text === "firstName01 lastName01"
browser.find(".shippingAddress").find(".nameKana").text === "firstNameKana01 lastNameKana01"
browser.find(".shippingAddress").find(".zip").text === "146 - 0082"
browser.find(".shippingAddress").find(".prefecture").text === "東京都"
browser.find(".shippingAddress").find(".address1").text === "大田区"
browser.find(".shippingAddress").find(".address2").text === "池上"
browser.find(".shippingAddress").find(".tel1").text === "11111111"
doWith(TransactionLogPaypalStatus.byTransactionId(headers(0).id.get)) { paypal =>
paypal.transactionId === headers(0).id.get
paypal.status === PaypalStatus.COMPLETED
}
}
}
"Paypal transaction error." in new WithBrowser(
WebDriverFactory(CHROME),
appl(
inMemoryDatabase() ++ defaultConf ++ disableMailer +
("anonymousUserPurchase" -> true) +
("fakePaypalRespons.enabled" -> true) +
("acceptableTenders.ANONYMOUS_BUYER" -> List("PAYPAL")) +
("fakePaypalRespons.body" -> "ACK=Fail&VERSION=124.0&BUILD=18316154") +
("fakePaypalRespons.statusCode" -> "200") +
(
"paypal.redirectUrl" -> (
baseUrl + controllers.routes.Paypal.fakePaypal("", "").url.takeWhile(_ != '?')
)
)
)
) {
inject[Database].withConnection { implicit conn =>
val currencyInfo = inject[CurrencyRegistry]
val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val adminUser = loginWithTestUser(browser)
logoff(browser)
val site = inject[SiteRepo].createNew(Ja, "Store01")
val cat = inject[CategoryRepo].createNew(Map(Ja -> "Cat01"))
val tax = inject[TaxRepo].createNew
val taxName = inject[TaxNameRepo].createNew(tax, Ja, "内税")
val taxHis = inject[TaxHistoryRepo].createNew(tax, TaxType.INNER_TAX, BigDecimal("5"), date("9999-12-31"))
val item = inject[ItemRepo].createNew(cat)
ItemNumericMetadata.createNew(
item, ItemNumericMetadataType.HEIGHT, 1
)
ItemTextMetadata.createNew(
item, ItemTextMetadataType.ABOUT_HEIGHT, "Hello"
)
inject[SiteItemNumericMetadataRepo].createNew(
site.id.get, item.id.get, SiteItemNumericMetadataType.STOCK, 20
)
SiteItemTextMetadata.createNew(
site.id.get, item.id.get, SiteItemTextMetadataType.PRICE_MEMO, "World"
)
val siteItem = inject[SiteItemRepo].createNew(site, item)
val itemClass = 1L
inject[SiteItemNumericMetadataRepo].createNew(site.id.get, item.id.get, SiteItemNumericMetadataType.SHIPPING_SIZE, itemClass)
val itemName = inject[ItemNameRepo].createNew(item, Map(Ja -> "かえで"))
val itemDesc = inject[ItemDescriptionRepo].createNew(item, site, "かえで説明")
val itemPrice = inject[ItemPriceRepo].createNew(item, site)
val iph = inject[ItemPriceHistoryRepo].createNew(
itemPrice, tax, currencyInfo.Jpy, BigDecimal(999), None, BigDecimal("888"), date("9999-12-31")
)
browser.goTo(itemQueryUrl())
browser.waitUntil(
failFalse(browser.find(".addToCartButton").first().displayed())
)
browser.find(".addToCartButton").click()
browser.waitUntil(
failFalse(browser.find("#doAnonymousLoginButton").first().displayed())
)
browser.find("#doAnonymousLoginButton").size === 1
browser.find("#doAnonymousLoginButton").click()
browser.find(".toEnterShippingAddress a").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
val box = inject[ShippingBoxRepo].createNew(site.id.get, itemClass, 3, "box01")
val fee = inject[ShippingFeeRepo].createNew(box.id.get, CountryCode.JPN, JapanPrefecture.東京都.code())
val feeHistory = inject[ShippingFeeHistoryRepo].createNew(fee.id.get, tax.id.get, BigDecimal(123), Some(100), date("9999-12-31"))
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find("#firstName").fill().`with`("firstName01")
browser.find("#lastName").fill().`with`("lastName01")
browser.find("#firstNameKana").fill().`with`("firstNameKana01")
browser.find("#lastNameKana").fill().`with`("lastNameKana01")
browser.find("#email").fill().`with`("foo@bar.com")
browser.find("input[name='zip1']").fill().`with`("146")
browser.find("input[name='zip2']").fill().`with`("0082")
browser.waitUntil(30, TimeUnit.SECONDS) {
failFalse {
browser.find("#address1").attribute("value") == "大田区" &&
browser.find("#address2").attribute("value") == "池上"
}
}
browser.find("#tel1").fill().`with`("11111111")
if (browser.find("#agreeCheck").size != 0) {
browser.find("#agreeCheck").click()
}
browser.find("#enterShippingAddressForm input[type='submit']").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find("#paypalimg").size === 1
browser.find("#paypalimg").click()
browser.webDriver.getTitle === Messages("commonTitle", Messages("paypalErrorTitle"))
val headers: Seq[TransactionLogHeader] = TransactionLogHeader.list()
headers.size === 1
headers(0).transactionType === TransactionTypeCode.PAYPAL_EXPRESS_CHECKOUT
val paypalTran: TransactionLogPaypalStatus = TransactionLogPaypalStatus.byTransactionId(headers(0).id.get)
paypalTran.transactionId === headers(0).id.get
paypalTran.status === PaypalStatus.ERROR
// on success should be rejected.
browser.goTo(
controllers.routes.Paypal.onSuccess(
paypalTran.transactionId, paypalTran.token
).url.addParm("lang", lang.code).toString
)
browser.webDriver.getTitle === Messages("commonTitle", Titles.top).trim
doWith(TransactionLogPaypalStatus.byTransactionId(headers(0).id.get)) { paypal =>
paypal.status === PaypalStatus.ERROR
}
}
}
"Paypal cancel transaction." in new WithBrowser(
WebDriverFactory(CHROME),
appl(
inMemoryDatabase() ++ defaultConf ++ disableMailer +
("anonymousUserPurchase" -> true) +
("fakePaypalRespons.enabled" -> true) +
("fakePaypalRespons.body" -> "TOKEN=PAYPALTOKEN&CORRELATIONID=AAA&ACK=Success&VERSION=124.0&BUILD=18316154") +
("acceptableTenders.ANONYMOUS_BUYER" -> List("PAYPAL")) +
("fakePaypalRespons.statusCode" -> "200") +
(
"paypal.redirectUrl" -> (
baseUrl + controllers.routes.Paypal.fakePaypal("", "").url.takeWhile(_ != '?')
)
)
)
) {
inject[Database].withConnection { implicit conn =>
val currencyInfo = inject[CurrencyRegistry]
val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val adminUser = loginWithTestUser(browser)
logoff(browser)
val site = inject[SiteRepo].createNew(Ja, "Store01")
val cat = inject[CategoryRepo].createNew(Map(Ja -> "Cat01"))
val tax = inject[TaxRepo].createNew
val taxName = inject[TaxNameRepo].createNew(tax, Ja, "内税")
val taxHis = inject[TaxHistoryRepo].createNew(tax, TaxType.INNER_TAX, BigDecimal("5"), date("9999-12-31"))
val item = inject[ItemRepo].createNew(cat)
ItemNumericMetadata.createNew(
item, ItemNumericMetadataType.HEIGHT, 1
)
ItemTextMetadata.createNew(
item, ItemTextMetadataType.ABOUT_HEIGHT, "Hello"
)
inject[SiteItemNumericMetadataRepo].createNew(
site.id.get, item.id.get, SiteItemNumericMetadataType.STOCK, 20
)
SiteItemTextMetadata.createNew(
site.id.get, item.id.get, SiteItemTextMetadataType.PRICE_MEMO, "World"
)
val siteItem = inject[SiteItemRepo].createNew(site, item)
val itemClass = 1L
inject[SiteItemNumericMetadataRepo].createNew(site.id.get, item.id.get, SiteItemNumericMetadataType.SHIPPING_SIZE, itemClass)
val itemName = inject[ItemNameRepo].createNew(item, Map(Ja -> "かえで"))
val itemDesc = inject[ItemDescriptionRepo].createNew(item, site, "かえで説明")
val itemPrice = inject[ItemPriceRepo].createNew(item, site)
val iph = inject[ItemPriceHistoryRepo].createNew(
itemPrice, tax, currencyInfo.Jpy, BigDecimal(999), None, BigDecimal("888"), date("9999-12-31")
)
browser.goTo(itemQueryUrl())
browser.waitUntil(
failFalse(browser.find(".addToCartButton").first().displayed())
)
browser.find(".addToCartButton").click()
browser.waitUntil(
failFalse(browser.find("#doAnonymousLoginButton").first().displayed())
)
browser.find("#doAnonymousLoginButton").size === 1
browser.find("#doAnonymousLoginButton").click()
browser.find(".toEnterShippingAddress a").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
val box = inject[ShippingBoxRepo].createNew(site.id.get, itemClass, 3, "box01")
val fee = inject[ShippingFeeRepo].createNew(box.id.get, CountryCode.JPN, JapanPrefecture.東京都.code())
val feeHistory = inject[ShippingFeeHistoryRepo].createNew(fee.id.get, tax.id.get, BigDecimal(123), Some(100), date("9999-12-31"))
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find("#firstName").fill().`with`("firstName01")
browser.find("#lastName").fill().`with`("lastName01")
browser.find("#firstNameKana").fill().`with`("firstNameKana01")
browser.find("#lastNameKana").fill().`with`("lastNameKana01")
browser.find("#email").fill().`with`("foo@bar.com")
browser.find("input[name='zip1']").fill().`with`("146")
browser.find("input[name='zip2']").fill().`with`("0082")
browser.waitUntil(30, TimeUnit.SECONDS) {
failFalse {
browser.find("#address1").attribute("value") == "大田区" &&
browser.find("#address2").attribute("value") == "池上"
}
}
browser.find("#tel1").fill().`with`("11111111")
if (browser.find("#agreeCheck").size != 0) {
browser.find("#agreeCheck").click()
}
browser.find("#enterShippingAddressForm input[type='submit']").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find("#paypalimg").size === 1
browser.find("#paypalimg").click()
browser.find("#cmd").text === "_express-checkout"
browser.find("#token").text === "PAYPALTOKEN"
val headers: Seq[TransactionLogHeader] = TransactionLogHeader.list()
headers.size === 1
headers(0).transactionType === TransactionTypeCode.PAYPAL_EXPRESS_CHECKOUT
val paypalTran: TransactionLogPaypalStatus = TransactionLogPaypalStatus.byTransactionId(headers(0).id.get)
paypalTran.transactionId === headers(0).id.get
paypalTran.status === PaypalStatus.PREPARED
browser.goTo(
controllers.routes.Paypal.onCancel(
paypalTran.transactionId, paypalTran.token
).url.addParm("lang", lang.code).toString
)
browser.webDriver.getTitle === Messages("commonTitle", Messages("cancelPayaplTitle"))
doWith(TransactionLogPaypalStatus.byTransactionId(headers(0).id.get)) { paypal =>
paypal.status === PaypalStatus.CANCELED
}
}
}
}
}
| ruimo/store2 | test/functional/PaypalSpec.scala | Scala | apache-2.0 | 21,140 |
package io.getquill
import io.getquill.idiom.{ Idiom => BaseIdiom }
import io.getquill.context.sql.SqlContext
class SqlMirrorContext[Idiom <: BaseIdiom, Naming <: NamingStrategy]
extends MirrorContext[Idiom, Naming] with SqlContext[Idiom, Naming] | jcranky/quill | quill-sql/src/main/scala/io/getquill/SqlMirrorContext.scala | Scala | apache-2.0 | 250 |
package io.mpjsons.impl.deserializer.jsontypes
import io.mpjsons.JsonTypeDeserializer
import io.mpjsons.impl.util.{Context, TypesUtil}
import io.mpjsons.impl.{DeserializerFactory, StringIterator}
import scala.collection.mutable.ArrayBuffer
import scala.reflect.runtime.universe._
/**
* @author Marcin Pieciukiewicz
*/
abstract class AbstractJsonArrayDeserializer[T, C]
(private val deserializerFactory: DeserializerFactory, private val tpe: Type, context: Context)
extends JsonTypeDeserializer[C] {
val elementsType: Type = getSubElementsType(tpe)
val deserializer = deserializerFactory.getDeserializer[T](elementsType, context)
protected def deserializeArray(jsonIterator: StringIterator, tpe: Type): ArrayBuffer[T] = {
jsonIterator.consumeArrayStart()
val buffer = ArrayBuffer[T]()
jsonIterator.skipWhitespaceChars()
while (jsonIterator.currentChar != ']') {
val value = deserializer.deserialize(jsonIterator)
buffer += value
jsonIterator.skipWhitespaceChars()
if (jsonIterator.currentChar == ',') {
jsonIterator.nextChar()
}
}
jsonIterator.nextChar()
buffer
}
protected def getSubElementsType[S](tpe: Type): Type = TypesUtil.getSubElementsType(tpe)
}
| marpiec/mpjsons | src/main/scala/io/mpjsons/impl/deserializer/jsontypes/AbstractJsonArrayDeserializer.scala | Scala | apache-2.0 | 1,248 |
package chapter.three
import ExerciseSix.reverseSort
import ExerciseSix.reverseSortBuffer
import org.scalatest._
import org.scalatest.junit.JUnitRunner
import org.junit.runner.RunWith
@RunWith(classOf[JUnitRunner])
class ExerciseSixSpec extends FlatSpec with Matchers {
"reverseSort" should "return an array in reverse sorted order" in {
val array = Array(1, 2, 3, 4, 5)
val expected = Array(5, 4, 3, 2, 1)
reverseSort(array) shouldBe expected
}
"reverseSortBuffer" should "return an arrayBuffer in reverse sorted order" in {
val buffer = Array(1, 2, 3, 4, 5).toBuffer
val expected = Array(5, 4, 3, 2, 1).toBuffer
reverseSortBuffer(buffer) shouldBe expected
}
}
| deekim/impatient-scala | src/test/scala/chapter/three/ExerciseSixSpec.scala | Scala | apache-2.0 | 701 |
package net.bhardy.braintree.scala
import java.text.SimpleDateFormat
import java.util.Calendar
object SettlementBatchSummaryRequest {
def dateString(settlementDate: Calendar): String = {
val dateFormat = new SimpleDateFormat("yyyy-MM-dd")
dateFormat.setCalendar(settlementDate)
dateFormat.format(settlementDate.getTime)
}
}
class SettlementBatchSummaryRequest extends BaseRequest {
private var settlementDate: Option[Calendar] = None
private var groupByCustomField: Option[String] = None
def settlementDate(settlementDate: Calendar): SettlementBatchSummaryRequest = {
this.settlementDate = Option(settlementDate)
this
}
def groupByCustomField(groupByCustomField: String): SettlementBatchSummaryRequest = {
this.groupByCustomField = Option(groupByCustomField)
this
}
override val xmlName = "settlement-batch-summary"
protected def buildRequest(root: String): RequestBuilder = {
import SettlementBatchSummaryRequest.dateString
new RequestBuilder(root).
addElement("settlement-date", settlementDate map dateString).
addElement("group-by-custom-field", groupByCustomField)
}
} | benhardy/braintree-scala | src/main/scala/SettlementBatchSummaryRequest.scala | Scala | mit | 1,155 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.fs.tools.ingest
import java.time.temporal.ChronoUnit
import org.apache.hadoop.conf.Configuration
import org.junit.runner.RunWith
import org.locationtech.geomesa.fs.storage.common.{DateTimeScheme, PartitionScheme}
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class ParquetJobUtilsTest extends Specification {
"ParquetJobUtils" should {
"properly serialize sft with partition scheme user data" >> {
val sft = SimpleFeatureTypes.createType("test", "name:String,age:Int,dtg:Date,*geom:Point:srid=4326")
val partitionScheme = new DateTimeScheme(DateTimeScheme.Formats.Daily, ChronoUnit.DAYS, 1, "dtg", false)
val conf = new Configuration
PartitionScheme.addToSft(sft, partitionScheme)
ParquetJobUtils.setSimpleFeatureType(conf, sft)
val newSFT = ParquetJobUtils.getSimpleFeatureType(conf)
val extractedScheme = PartitionScheme.extractFromSft(newSFT)
extractedScheme.name mustEqual partitionScheme.name()
}
}
} | ronq/geomesa | geomesa-fs/geomesa-fs-tools/src/test/scala/org/locationtech/geomesa/fs/tools/ingest/ParquetJobUtilsTest.scala | Scala | apache-2.0 | 1,597 |
package dhg.util.math
import scala.math._
import scalaz._
import scalaz.Scalaz._
/**
* This Numeric class represents values using logarithms. The underlying
* logarithmic representation is completely hidden from the calling code.
*
* This class exists to allow for the use of obvious operators (* for
* multiplication instead of + on logarithms) and to prevent coding mistakes
* resulting from the inadvertent mixing of logarithmic and non-logarithmic
* Double representations of probabilities. Additionally, it is possible to
* use the `sum` and `product` collection methods on collections of
* Probabilities, and get the expected results.
*
* All to* methods return the (non-logarithmic) value stored. The only
* way to access the actual logarithmic value is by the 'logValue' field.
*/
class LogDouble(val logValue: Double) extends AnyVal with Ordered[LogDouble] {
def +(other: LogDouble): LogDouble = {
val oLogValue = other.logValue
if (logValue == Double.NegativeInfinity)
other
else if (oLogValue == Double.NegativeInfinity)
this
else if (logValue > oLogValue)
new LogDouble(logValue + log1p(exp(oLogValue - logValue)))
else
new LogDouble(oLogValue + log1p(exp(logValue - oLogValue)))
}
def -(other: LogDouble): LogDouble = {
val oLogValue = other.logValue
if (oLogValue == 0.0)
this
else if (logValue < oLogValue)
sys.error("subtraction results in a negative LogDouble")
else
new LogDouble(logValue + log1p(-exp(oLogValue - logValue)))
}
def *(other: LogDouble): LogDouble = new LogDouble(logValue + other.logValue)
def /(other: LogDouble): LogDouble = new LogDouble(logValue - other.logValue)
def **(pow: Int): LogDouble = new LogDouble(pow * logValue)
def **(pow: Double): LogDouble = new LogDouble(pow * logValue)
override def compare(that: LogDouble) = logValue.compare(that.logValue)
def max(that: LogDouble): LogDouble = if (this.logValue > that.logValue) this else that
def min(that: LogDouble): LogDouble = if (this.logValue < that.logValue) this else that
def approx(o: LogDouble, tolerance: Double): Boolean = (logValue - o.logValue).abs < tolerance
def approx(o: LogDouble): Boolean = this.approx(o, 0.00000001)
def toInt = toDouble.toInt
def toLong = toDouble.toLong
def toFloat = toDouble.toFloat
def toDouble = exp(logValue)
override def toString = s"LogDouble(${toDouble})"
}
object LogDouble {
def apply[N](n: N)(implicit num: Numeric[N]): LogDouble = {
n match {
case logDouble: LogDouble => logDouble
case _ => new LogDouble(log(num.toDouble(n)))
}
}
val zero = new LogDouble(Double.NegativeInfinity)
val one = new LogDouble(0.0)
trait LogDoubleOrdering extends scala.math.Ordering[LogDouble] {
override def compare(a: LogDouble, b: LogDouble) = a compare b
}
implicit object LogDoubleIsFractional extends LogDoubleIsFractional with LogDoubleOrdering
trait LogDoubleIsFractional extends Fractional[LogDouble] {
def plus(x: LogDouble, y: LogDouble): LogDouble = x + y
def minus(x: LogDouble, y: LogDouble): LogDouble = x - y
def times(x: LogDouble, y: LogDouble): LogDouble = x * y
def div(x: LogDouble, y: LogDouble): LogDouble = x / y
def negate(x: LogDouble): LogDouble = sys.error("LogDouble values cannot be negated")
def fromInt(x: Int): LogDouble = new LogDouble(log(x))
def toInt(x: LogDouble): Int = x.toInt
def toLong(x: LogDouble): Long = x.toLong
def toFloat(x: LogDouble): Float = x.toFloat
def toDouble(x: LogDouble): Double = x.toDouble
override def zero = LogDouble.zero
override def one = LogDouble.one
}
// implicit class NumericWithToLogDouble[N](self: N)(implicit num: Numeric[N]) {
// def toLogDouble = new LogDouble(math.log(num.toDouble(self)))
// def log = toLogDouble
// }
implicit class IntWithToLogDouble(val self: Int) extends AnyVal {
def toLogDouble: LogDouble = new LogDouble(math.log(self))
def log: LogDouble = toLogDouble
}
implicit class DoubleWithToLogDouble(val self: Double) extends AnyVal {
def toLogDouble: LogDouble = new LogDouble(math.log(self))
def log: LogDouble = toLogDouble
}
}
| dhgarrette/low-resource-pos-tagging-2013 | src/main/scala/dhg/util/math/LogDouble.scala | Scala | apache-2.0 | 4,229 |
package plantae.citrus.mqtt.actors.session
import java.io._
import java.text.SimpleDateFormat
import java.util.{Date, UUID}
import akka.actor.ActorRef
import com.google.common.base.Throwables
import org.slf4j.LoggerFactory
import plantae.citrus.mqtt.actors.SystemRoot
import plantae.citrus.mqtt.packet.{FixedHeader, PublishPacket}
import scodec.bits.ByteVector
class Storage(sessionName: String) extends Serializable {
private val log = LoggerFactory.getLogger(getClass() + sessionName)
private val chunkSize = {
try {
if (SystemRoot.config.hasPath("mqtt.broker.session.chunk.size"))
SystemRoot.config.getInt("mqtt.broker.session.chunk.size")
else 200
} catch {
case t: Throwable => 200
}
}
sealed trait Location
case object OnMemory extends Location
case class OnDisk(location: String) extends Location
case class ReadyMessage(payload: Array[Byte], qos: Short, retain: Boolean, topic: String)
case class ChunkMessage(var location: Location, var readyMessages: List[ReadyMessage]) {
def serialize = {
try {
val directory = new File(SystemRoot.config.getString("mqtt.broker.session.storeDir") + "/" + sessionName + "/" + (new SimpleDateFormat("yyyy/MM/dd").format(new Date())))
directory.mkdirs()
val path: String = directory.getAbsolutePath + "/" + UUID.randomUUID().toString
val outputStreamer = new ObjectOutputStream(new FileOutputStream(path))
outputStreamer.writeObject(this)
outputStreamer.close()
location = OnDisk(path)
readyMessages = List()
} catch {
case t: Throwable => location = OnMemory
log.error(" Chunk serialize error : {} ", Throwables.getStackTraceAsString(t))
}
}
def deserialize = {
location match {
case OnMemory =>
case OnDisk(path) =>
try {
val inputStreamer = new ObjectInputStream(new FileInputStream(path))
readyMessages = inputStreamer.readObject().asInstanceOf[ChunkMessage].readyMessages
location = OnMemory
new File(path).delete()
inputStreamer.close()
} catch {
case t: Throwable => location = OnMemory
log.error(" Chunk deserialize error : {} ", Throwables.getStackTraceAsString(t))
}
}
}
def clear = {
location match {
case OnMemory => readyMessages = List()
case OnDisk(path) => new File(path).delete()
}
}
}
private var packetIdGenerator: Int = 0
private var topics: List[String] = List()
private var readyQueue: List[ChunkMessage] = List()
private var workQueue: List[PublishPacket] = List()
private var redoQueue: List[PublishPacket] = List()
def persist(payload: Array[Byte], qos: Short, retain: Boolean, topic: String) = {
readyQueue match {
case head :: rest => {
if (readyQueue.last.readyMessages.size >= chunkSize) {
if (readyQueue.last != readyQueue.head) {
readyQueue.last.serialize
}
readyQueue = readyQueue :+ ChunkMessage(OnMemory, List(ReadyMessage(payload, qos, retain, topic)))
} else
readyQueue.last.readyMessages = (readyQueue.last.readyMessages :+ ReadyMessage(payload, qos, retain, topic))
}
case List() => readyQueue = readyQueue :+ ChunkMessage(OnMemory, List(ReadyMessage(payload, qos, retain, topic)))
}
println("STORAGE session {} ready {}", sessionName, readyQueue)
}
def complete(packetId: Option[Int]) =
packetId match {
case Some(y) => workQueue = workQueue.filterNot(_.packetId match {
case Some(x) => x == y
case None => false
})
case None =>
}
def popFirstMessage: Option[ReadyMessage] = {
readyQueue match {
case headChunk :: tailChunk =>
headChunk.deserialize
headChunk.readyMessages match {
case headMessage :: tailMessage =>
headChunk.readyMessages = tailMessage
Some(headMessage)
case List() =>
readyQueue = tailChunk
popFirstMessage
}
case List() => None
}
}
def nextMessage: Option[PublishPacket] = {
if (workQueue.size > 10) {
None
} else {
redoQueue match {
case head :: tail =>
redoQueue = tail
workQueue = workQueue :+ head
Some(PublishPacket(FixedHeader(true, head.fixedHeader.qos, head.fixedHeader.retain), head.topic, head.packetId, head.payload))
case Nil =>
popFirstMessage match {
case Some(message) =>
val publish = message.qos match {
case x if (x > 0) =>
val publishPacket = PublishPacket(FixedHeader(dup = false, qos = message.qos, retain = message.retain),
topic = message.topic,
packetId = Some(nextPacketId),
ByteVector(message.payload)
)
workQueue = workQueue :+ publishPacket
publishPacket
case x if (x == 0) =>
PublishPacket(FixedHeader(dup = false, qos = message.qos, retain = message.retain),
topic = message.topic,
packetId = Some(nextPacketId),
ByteVector(message.payload)
)
}
Some(publish)
case None => None
}
}
}
}
def socketClose = {
redoQueue = redoQueue ++ workQueue
workQueue = Nil
}
def clear = {
topics = List()
readyQueue.foreach(chunk => chunk.clear)
readyQueue = List()
workQueue = List()
}
private def nextPacketId: Int = {
packetIdGenerator = {
if ((packetIdGenerator + 1) >= Short.MaxValue)
1
else (packetIdGenerator + 1)
}
packetIdGenerator
}
def messageSize = {
readyQueue.foldLeft(0)((a, b) => a + (b.location match {
case OnMemory => b.readyMessages.size
case x: OnDisk => chunkSize
}))
}
}
object Storage {
def apply(sessionName: String) = new Storage(sessionName)
def apply(session: ActorRef) = new Storage(session.path.name)
} | sureddy/mqttd | src/main/scala-2.11/plantae/citrus/mqtt/actors/session/Storage.scala | Scala | mit | 6,221 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.dstream
import org.apache.spark.rdd.{PartitionerAwareUnionRDD, RDD, UnionRDD}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming._
import org.apache.spark.streaming.Duration
import scala.reflect.ClassTag
private[streaming]
class WindowedDStream[T: ClassTag](
parent: DStream[T],
_windowDuration: Duration,
_slideDuration: Duration)
extends DStream[T](parent.ssc) {
if (!_windowDuration.isMultipleOf(parent.slideDuration)) {
throw new Exception("The window duration of windowed DStream (" + _windowDuration + ") " +
"must be a multiple of the slide duration of parent DStream (" + parent.slideDuration + ")")
}
if (!_slideDuration.isMultipleOf(parent.slideDuration)) {
throw new Exception("The slide duration of windowed DStream (" + _slideDuration + ") " +
"must be a multiple of the slide duration of parent DStream (" + parent.slideDuration + ")")
}
// Persist parent level by default, as those RDDs are going to be obviously reused.
//默认持久化级别,那些将要明显重用RDDS
parent.persist(StorageLevel.MEMORY_ONLY_SER)
def windowDuration: Duration = _windowDuration
override def dependencies: List[DStream[_]] = List(parent)
override def slideDuration: Duration = _slideDuration
override def parentRememberDuration: Duration = rememberDuration + windowDuration
override def persist(level: StorageLevel): DStream[T] = {
// Do not let this windowed DStream be persisted as windowed (union-ed) RDDs share underlying
// RDDs and persisting the windowed RDDs would store numerous copies of the underlying data.
// Instead control the persistence of the parent DStream.
parent.persist(level)
this
}
override def compute(validTime: Time): Option[RDD[T]] = {
val currentWindow = new Interval(validTime - windowDuration + parent.slideDuration, validTime)
val rddsInWindow = parent.slice(currentWindow)
val windowRDD = if (rddsInWindow.flatMap(_.partitioner).distinct.length == 1) {
logDebug("Using partition aware union for windowing at " + validTime)
new PartitionerAwareUnionRDD(ssc.sc, rddsInWindow)
} else {
logDebug("Using normal union for windowing at " + validTime)
new UnionRDD(ssc.sc, rddsInWindow)
}
Some(windowRDD)
}
}
| tophua/spark1.52 | streaming/src/main/scala/org/apache/spark/streaming/dstream/WindowedDStream.scala | Scala | apache-2.0 | 3,141 |
// Copyright: 2010 - 2018 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
/*
* This file contains derivative works that require the following
* header to be displayed:
*
* Copyright 2002-2014 EPFL.
* Copyright 2011-2014 Typesafe, Inc.
* All rights reserved.
*
* Permission to use, copy, modify, and distribute this software in
* source or binary form for any purpose with or without fee is hereby
* granted, provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following
* disclaimer.
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials
* provided with the distribution.
* 3. Neither the name of the EPFL nor the names of its
* contributors may be used to endorse or promote products
* derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
package org.ensime.core
import scala.collection.mutable
import scala.concurrent.{ ExecutionContext, Future }
import scala.concurrent.duration._
import scala.reflect.internal.util.{ BatchSourceFile, SourceFile }
import akka.actor.ActorRef
import akka.pattern.Patterns
import akka.util.Timeout
import org.ensime.api._
import org.ensime.indexer.lucene.SimpleLucene
import org.ensime.util.Timing.dilation
trait CompletionControl {
self: RichPresentationCompiler =>
sealed trait CompletionContext {
val source: SourceFile
val offset: Int
val prefix: String
val constructing: Boolean
}
case class ScopeContext(
source: SourceFile,
offset: Int,
prefix: String,
constructing: Boolean
) extends CompletionContext
case class MemberContext(
source: SourceFile,
offset: Int,
prefix: String,
constructing: Boolean
) extends CompletionContext
import CompletionUtil._
def completionsAt(inputP: Position, maxResultsArg: Int, caseSens: Boolean)(
implicit ec: ExecutionContext
): Future[CompletionInfoList] = {
val origContents = inputP.source.content
val point = inputP.endOrCursor
if (point > origContents.length) {
// invalid request - completion point is outside of file
// this causes an ArrayOutOfBounds in the array copy below
logger.warn("completionsAt request has point outside of file")
Future.successful(CompletionInfoList("", List.empty))
} else {
val maxResults =
if (maxResultsArg == 0) SimpleLucene.MaxResults else maxResultsArg
val preceding = inputP.source.content.slice(
Math.max(0, inputP.point - 100),
inputP.point
)
val defaultPrefix = IdentRegexp.findFirstMatchIn(preceding) match {
case Some(m) => m.group(1)
case _ => ""
}
val constructing =
ConstructingRegexp.findFirstMatchIn(preceding).isDefined
val (src, p, patched) = if (defaultPrefix.isEmpty) {
// Add a fake prefix if none was provided by the user. Otherwise the
// compiler will give us a weird tree.
val orig = origContents
// Uses array logic to minimise memory spikes of large Strings
val contents = Array.ofDim[Char](orig.length + 1)
System.arraycopy(orig, 0, contents, 0, point)
contents(point) = 'a'
System.arraycopy(orig, point, contents, point + 1, orig.length - point)
// uses the same VirtualFile as the original
val src = new BatchSourceFile(inputP.source.file, contents)
(src, inputP.withSource(src).withShift(1), true)
} else {
(inputP.source, inputP, false)
}
askReloadFile(src)
val x = new Response[Tree]
askTypeAt(p, x)
val contextOpt = x.get match {
case Left(tree) =>
if (logger.isTraceEnabled())
logger.trace("Completing at tree:" + tree.summaryString)
tree match {
case Apply(fun, _) =>
fun match {
case Select(qualifier: New, name) =>
Some(
ScopeContext(src,
qualifier.pos.endOrCursor,
defaultPrefix,
constructing = true)
)
case Select(qual, name)
if qual.pos.isDefined && qual.pos.isRange =>
val prefix = if (patched) "" else name.decoded
Some(
MemberContext(src,
qual.pos.endOrCursor,
prefix,
constructing)
)
case _ =>
val prefix =
if (patched) ""
else
src.content
.slice(fun.pos.startOrCursor, fun.pos.endOrCursor)
.mkString
Some(
ScopeContext(src, fun.pos.endOrCursor, prefix, constructing)
)
}
case Literal(Constant(_)) => None
case New(name) =>
Some(
ScopeContext(src,
name.pos.endOrCursor,
defaultPrefix,
constructing = true)
)
case Select(qualifier, name)
if qualifier.pos.isDefined && qualifier.pos.isRange =>
Some(
MemberContext(src,
qualifier.pos.endOrCursor,
defaultPrefix,
constructing)
)
case Import(expr, _) =>
val topLevel =
ImportTopLevelRegexp.findFirstMatchIn(preceding).isDefined
if (topLevel) {
Some(
ScopeContext(src,
expr.pos.endOrCursor,
defaultPrefix,
constructing = false)
)
} else {
Some(
MemberContext(src,
expr.pos.endOrCursor,
defaultPrefix,
constructing = false)
)
}
case other =>
Some(ScopeContext(src, p.point, defaultPrefix, constructing))
}
case _ =>
logger.error("Unrecognized completion context.")
None
}
contextOpt match {
case Some(context) =>
for {
compResults <- makeAll(context, maxResults, caseSens)
sorted = compResults.sortWith { (c1, c2) =>
c1.relevance > c2.relevance ||
(c1.relevance == c2.relevance &&
c1.name.length < c2.name.length)
}
filtered = sorted.take(maxResults)
} yield CompletionInfoList(context.prefix, filtered)
case _ =>
Future.successful(CompletionInfoList("", Nil))
}
}
}
def makeAll(context: CompletionContext, maxResults: Int, caseSens: Boolean)(
implicit ec: ExecutionContext
): Future[List[CompletionInfo]] = {
def toCompletionInfo(
context: CompletionContext,
sym: Symbol,
tpe: Type,
inherited: Boolean,
viaView: Symbol
): List[CompletionInfo] = {
var score = 0
if (sym.nameString.startsWith(context.prefix)) score += 10
if (!inherited) score += 10
if (!sym.hasPackageFlag) score += 10
if (!sym.isType) score += 10
if (sym.isLocalToBlock) score += 10
if (sym.isPublic) score += 10
if (viaView == NoSymbol) score += 10
if (sym.owner != definitions.AnyClass &&
sym.owner != definitions.AnyRefClass &&
sym.owner != definitions.ObjectClass) score += 30
val infos = List(CompletionInfoBuilder.fromSymbolAndType(sym, tpe, score))
if (context.constructing) {
val constructorSyns = constructorSynonyms(sym).map { c =>
CompletionInfoBuilder.fromSymbolAndType(sym, c.tpe, score + 50)
}
infos ++ constructorSyns
} else {
val applySyns = applySynonyms(sym).map { c =>
CompletionInfoBuilder.fromSymbolAndType(sym, c.tpe, score)
}
infos ++ applySyns
}
}
val buff = new mutable.LinkedHashSet[CompletionInfo]()
// Kick off an index search if the name looks like a type.
// Do this before the lookups below, so the two can
// proceed concurrently.
val typeSearch = context match {
case ScopeContext(_, _, prefix, _) =>
if (TypeNameRegex.findFirstMatchIn(prefix).isDefined) {
Some(fetchTypeSearchCompletions(prefix, maxResults, indexer))
} else None
case _ => None
}
var members = List[Member]()
val x = new Response[List[Member]]
context match {
case ScopeContext(src, offset, _, _) =>
askScopeCompletion(rangePos(src, offset, offset, offset), x)
case MemberContext(src, offset, _, _) =>
askTypeCompletion(rangePos(src, offset, offset, offset), x)
}
do {
x.get match {
case Left(mems) => members ++= mems
case _ =>
}
} while (!x.isComplete)
// Any interaction with the members (their types and symbols) must be done
// on the compiler thread.
askOption[Unit] {
val filtered = members.filter { m =>
val s = m.sym.nameString
matchesPrefix(s,
context.prefix,
matchEntire = false,
caseSens = caseSens) && !s.contains("$")
}
for (m <- filtered) {
m match {
case m @ ScopeMember(sym, tpe, accessible, viaView) =>
val p = sym.pos
val inSymbol = p.isRange && (context.offset >= p.startOrCursor && context.offset <= p.endOrCursor)
val isBrokenType = tpe match {
// ByNameParamClass without args is the invalid object. PC generates it when there is no
// completion context. See the ignored test in RichPresentationCompilerSpec.
case TypeRef(_, definitions.ByNameParamClass, Nil) => true
case _ => false
}
if (!sym.isConstructor && !inSymbol & !isBrokenType) {
buff ++= toCompletionInfo(context,
sym,
tpe,
inherited = false,
NoSymbol)
}
case m @ TypeMember(sym, tpe, accessible, inherited, viaView) =>
if (!sym.isConstructor) {
buff ++= toCompletionInfo(context, sym, tpe, inherited, viaView)
}
case _ =>
}
}
}
def keywordCompletions(prefix: String): Seq[CompletionInfo] =
if (prefix.length > 0) {
Keywords.keywordCompletions.filter(_.name.startsWith(prefix))
} else
Seq()
val pcResults = buff.toList
typeSearch
.getOrElse(Future.successful(None))
.map { searchResults =>
pcResults ++ searchResults.getOrElse(Nil) ++ keywordCompletions(
context.prefix
)
}
}
}
object Keywords {
val keywords = Seq(
"abstract",
"case",
"catch",
"class",
"def",
//"do",
"else",
"extends",
"false",
"final",
"finally",
"for",
"forSome",
//"if",
"implicit",
"import",
"lazy",
"match",
"new",
"null",
"object",
"override",
"package",
"private",
"protected",
"return",
"requires",
"sealed",
"super",
"this",
"throw",
"trait",
"try",
"true",
"type",
"val",
"var",
"while",
"with",
"yield"
)
val keywordCompletions = keywords map { keyword =>
CompletionInfo(None, keyword, 100, None)
}
}
object CompletionUtil {
val IdentRegexp = """([a-zA-Z0-9_#:<=>@!%&*+/?\\\\^|~-]*)\\z""".r
val JavaIdentRegexp = """([a-zA-Z0-9_]+)\\z""".r
val ImportTopLevelRegexp = """import [^\\.]*\\z""".r
val ImportRegexp = """import [a-zA-Z0-9_\\.]*\\z""".r
val ImportSubtypeRegexp =
"""import [a-z0-9_\\.]*[A-Z][a-zA-Z0-9_]*\\.[a-zA-Z0-9_\\.]*\\z""".r
val ConstructingRegexp = """new [\\.a-zA-Z0-9_]*\\z""".r
val TypeNameRegex = """^[A-Z][a-zA-Z0-9]*\\z""".r
def matchesPrefix(m: String,
prefix: String,
matchEntire: Boolean,
caseSens: Boolean): Boolean = {
val prefixUpper = prefix.toUpperCase
val prefixRegexp = "[A-Za-z0-9]*" + prefix.replaceAll(
"(?<!^)([A-Z])",
"[A-Za-z0-9]*$1"
) + "[A-Za-z0-9]*"
(matchEntire && m == prefix) ||
(!matchEntire && caseSens && m.startsWith(prefix)) ||
(!matchEntire && !caseSens && m.toUpperCase.startsWith(prefixUpper)) ||
(!matchEntire && prefix.exists(_.isUpper) && m.matches(prefixRegexp))
}
def fetchTypeSearchCompletions(
prefix: String,
maxResults: Int,
indexer: ActorRef
): Future[Option[List[CompletionInfo]]] = {
val req = TypeCompletionsReq(prefix, maxResults)
import scala.concurrent.ExecutionContext.Implicits.{ global => exe }
val askRes = Patterns.ask(indexer, req, Timeout((1 * dilation).seconds))
askRes.map {
case s: SymbolSearchResults =>
s.syms.map { s =>
CompletionInfo(
None,
s.localName,
40,
None
)
}
case unknown =>
throw new IllegalStateException(
"Unexpected response type from request:" + unknown
)
}.map(Some(_)).recover { case _ => None }
}
}
| yyadavalli/ensime-server | core/src/main/scala/org/ensime/core/Completion.scala | Scala | gpl-3.0 | 14,848 |
/*
* Copyright 2013 - 2015, Daniel Krzywicki <daniel.krzywicki@agh.edu.pl>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package pl.edu.agh.scalamas.mas
import akka.actor.{Actor, ActorContext, Props}
import org.apache.commons.math3.random.RandomGenerator
import pl.edu.agh.scalamas.mas.LogicTypes.{Agent, MeetingFunction, Migration}
import pl.edu.agh.scalamas.mas.RootEnvironment.{Add, Migrate}
object RootEnvironment {
/**
* Message send from children islands to the root environment to request migration of the provided agents.
* @param agents the agents to be migrated
*/
case class Migrate(agents: Seq[Agent])
/**
* Message send from the root environment to its children islands to request the addition of an agent to the island.
* @param agent the agent to be added to the island
*/
case class Add(agent: Agent)
/**
* Props for a RootEnvironment.
* @param islandProps the props of the island children
* @param islandsNumber the number of children to spawn
* @return the props for a RootEnvironment
*/
def props(islandProps: Props, islandsNumber: Int, rand: RandomGenerator) = Props(classOf[RootEnvironment], islandProps, islandsNumber, rand)
/**
* Generic migration behaviour, which delegates the migration to the parent of the current context.
* This parent is assumed to be a RootEnvironment
* @param context current context
* @return the result of a migration meeting - an empty list
*/
def migration(implicit context: ActorContext): MeetingFunction = {
case (Migration(cap), agents) =>
// agents grouped(cap) foreach { context.parent ! Migrate(_)}
context.parent ! Migrate(agents);
List.empty
}
}
/**
* Root environment for the simulation. Handles migrations between children islands.
* @param islandProps the props of the island children
* @param islandsNumber the number of children to spawn
*/
class RootEnvironment(islandProps: Props, islandsNumber: Int, rand: RandomGenerator) extends Actor {
require(islandsNumber > 0)
val islands = Array.tabulate(islandsNumber)(i => context.actorOf(islandProps, s"island-$i"))
def receive = {
case Migrate(agents) =>
agents.foreach {
agent => randomIsland ! Add(agent)
}
}
def randomIsland = islands(rand.nextInt(islands.size))
} | ros3n/IntOb | core/src/main/scala/pl/edu/agh/scalamas/mas/RootEnvironment.scala | Scala | mit | 3,354 |
package gsd.linux.stats
import java.io.PrintStream
import gsd.linux.KConfigParser
object ASEStatisticsMain {
def representationStats(stats: ASEStatistics)(out: PrintStream) {
out.println("Configs: " + stats.configs.size)
out.println("Menuconfigs: " + stats.menuconfigs.size)
out.println("Menus: " + stats.menus.size)
out.println
out.println("Number of identifiers: " + stats.k.identifiers.size)
out.println
out.println("Bool: " + stats.boolType.size)
out.println("Tri: " + stats.tristateType.size)
out.println
out.println("Int: " + stats.intConfigs.size)
out.println("Hex: " + stats.hexConfigs.size)
out.println("String: " + stats.stringConfigs.size)
out.println
out.println("Menus: " + stats.menus.size)
}
def groupingStats(stats: ASEStatistics)(out: PrintStream) {
out.println("Menus: " + stats.menus.size)
out.println("Menuconfigs: " + stats.menuconfigs.size)
out.println
out.println("XOR: " + stats.xorGroups.size)
out.println("OR: " + stats.orGroups.size)
out.println("MUTEX: " + stats.mutexGroups.size)
out.println("OPT: " + stats.optGroups.size)
out.println
out.println("Optional Groups:")
stats.optGroups foreach { g => println(g.prompt.text) }
}
def dependencyStats(stats: ASEStatistics)(out: PrintStream) {
// visibility conditions
out.println("Vis. Cond.: %4d / %4d".format(
stats.configsWithVisConds.size, stats.allConfigs.size))
out.println("No Vis. Cond.: %4d / %4d".format(
stats.configsWithNoVisConds.size, stats.allConfigs.size))
// unconditionally derived
out.println("Uncond. Derived: %4d / %4d".format(
stats.configsWithUncondDerived.size, stats.allConfigs.size))
out.println
// TODO categories can overlap
out.println("Real Defs: %4d / %4d".format(
stats.configsWithTrueDefs.size, stats.allConfigs.size))
out.println("Cond. Derived: %4d / %4d".format(
stats.configsWithCondDerived.size, stats.allConfigs.size))
out.println
out.println("Reverse Deps: %4d / %4d".format(
stats.configsWithRevDeps.size, stats.allConfigs.size))
out.println
out.println("Def (lit): %4d / %4d".format(
stats.defaultsValued.size, stats.defaults.size))
out.println("Def (comp): %4d / %4d".format(
stats.defaultsComputed.size, stats.defaults.size))
}
def intersectionStats(implicit stats: ASEStatistics) {
import Intersection._
// val res =
// calculatePairWise(Array((stats.configsWithUncondDerived map { _.id }).toSet,
// (stats.configsWithTrueDefs map { _.id }).toSet,
// (stats.configsWithRevDeps map { _.id }).toSet))
//
// printPairWiseSizes(res, Array("Uncond.Derived", "w/RealDefaults", "w/RevDeps"))
//
// println
val combos =
calculatePartitions(
Array((stats.configsWithUncondDerived map { _.name }).toSet,
(stats.configsWithCondDerived map { _.name }).toSet,
(stats.configsWithTrueDefs map { _.name }).toSet),
Array((stats.configsWithTrueDefs map { _.name }).toSet,
(stats.configsWithRevDeps map { _.name }).toSet),
Array("w/RealDefaults", "w/RevDep"))
printComboSizes(combos, Array("Uncond.Derived", "Cond.Derived", "RealDefs."))
}
def main(args: Array[String]) {
if (args.size < 2) {
System.err.println("Usage: ASEStatisticsMain [-r|-g|-d] <input-exconfig-file> [<output-file>]")
System exit 1
}
val k = KConfigParser.parseKConfigFile(args(1))
val stats = new ASEStatistics(k)
val out = if (args.size > 2) new PrintStream(args(2))
else System.out
args(0) match {
case "-r" => representationStats(stats)(out)
case "-g" => groupingStats(stats)(out)
case "-d" => dependencyStats(stats)(out)
}
}
}
| scas-mdd/linux-variability-analysis-tools | src/main/scala/gsd/linux/stats/ASEStatisticsMain.scala | Scala | gpl-3.0 | 3,963 |
package com.sksamuel.elastic4s.searches.queries
import com.sksamuel.elastic4s.DocumentRef
case class PercolateQueryDefinition(field: String,
`type`: String,
ref: Option[DocumentRef] = None,
source: Option[String] = None) extends QueryDefinition
| tyth/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/searches/queries/PercolateQueryDefinition.scala | Scala | apache-2.0 | 355 |
/*
* Copyright 2015-2016 IBM Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package whisk.core.entity
import scala.util.Try
import spray.json.JsString
import spray.json.JsValue
import spray.json.RootJsonFormat
import spray.json.deserializationError
/**
* Authentication key, consisting of a UUID and Secret.
*
* It is a value type (hence == is .equals, immutable and cannot be assigned null).
* The constructor is private so that argument requirements are checked and normalized
* before creating a new instance.
*
* @param (uuid, key) the uuid and key, assured to be non-null because both types are values
*/
protected[core] class AuthKey private (private val k: (UUID, Secret)) extends AnyVal {
def uuid = k._1
def key = k._2
def revoke = new AuthKey(uuid, Secret())
def compact = s"$uuid:$key"
override def toString = uuid.toString
}
protected[core] object AuthKey {
/**
* Creates AuthKey.
*
* @param uuid the uuid, assured to be non-null because UUID is a value
* @param key the key, assured to be non-null because Secret is a value
*/
protected[core] def apply(uuid: UUID, key: Secret): AuthKey = new AuthKey(uuid, key)
/**
* Creates an auth key for a randomly generated UUID with a randomly generated secret.
*
* @return AuthKey
*/
protected[core] def apply(): AuthKey = new AuthKey(UUID(), Secret())
/**
* Creates AuthKey from a string where the uuid and key are separated by a colon.
* If the string contains more than one colon, all values are ignored except for
* the first two hence "k:v*" produces ("k","v").
*
* @param str the string containing uuid and key separated by colon
* @return AuthKey if argument is properly formated
* @throws IllegalArgumentException if argument is not well formed
*/
@throws[IllegalArgumentException]
protected[core] def apply(str: String): AuthKey = {
val (k, v) = split(str)
new AuthKey(UUID(k), Secret(v))
}
/**
* Makes a tuple from a string where the values are separated by a colon.
* If the string contains more than one colon, all values are ignored except for
* the first two hence "k:v*" produces the tuple ("k","v") and "::*" produces ("","").
*
* @param string to create pair from
* @return (key, value) where both are null, value is null, or neither is null
*/
private def split(str: String): (String, String) = {
val parts = if (str != null && str.nonEmpty) str.split(":") else Array[String]()
val k = if (parts.size >= 1) parts(0).trim else null
val v = if (parts.size == 2) parts(1).trim else null
(k, v)
}
protected[core] implicit val serdes = new RootJsonFormat[AuthKey] {
def write(k: AuthKey) = JsString(k.compact)
def read(value: JsValue) = Try {
val JsString(s) = value
AuthKey(s)
} getOrElse deserializationError("authorization key malformed")
}
}
| xin-cai/openwhisk | common/scala/src/main/scala/whisk/core/entity/AuthKey.scala | Scala | apache-2.0 | 3,544 |
package org.hibernate.cache.rediscala.region
import java.util
import org.hibernate.cache.rediscala.RedisRegionFactory
import org.hibernate.cache.rediscala.strategy.AbstractReadWriteRedisAccessStrategy
import org.hibernate.cfg.{AvailableSettings, Configuration}
/**
* RedisRegionFactoryImplTest
*
* @author sunghyouk.bae@gmail.com
* @since 2014. 2. 28.
*/
class RedisRegionFactoryImplTest extends AbstractRedisRegionTest {
val ABSTRACT_READ_WRITE_REDIS_ACCESS_STRATEGY_CLASS_NAME =
classOf[AbstractReadWriteRedisAccessStrategy[_]].getName + "#Item"
override protected def configCache(cfg: Configuration) {
cfg.setProperty(AvailableSettings.CACHE_REGION_FACTORY, classOf[RedisRegionFactory].getName)
cfg.setProperty(AvailableSettings.CACHE_PROVIDER_CONFIG, "hibernate-redis.conf")
}
override protected def getMapFromCacheEntry(entry: Any): util.Map[Any, Any] = {
val isReadWriteStrategy = entry.getClass.getName.equals(ABSTRACT_READ_WRITE_REDIS_ACCESS_STRATEGY_CLASS_NAME)
if (isReadWriteStrategy) {
val field = entry.getClass.getDeclaredField("value")
field.setAccessible(true)
field.get(entry).asInstanceOf[util.Map[Any, Any]]
}
else {
entry.asInstanceOf[util.Map[Any, Any]]
}
}
}
| debop/debop4s | hibernate-rediscala/src/test/scala/org/hibernate/cache/rediscala/region/RedisRegionFactoryImplTest.scala | Scala | apache-2.0 | 1,260 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mxnetexamples.profiler
import org.kohsuke.args4j.{CmdLineParser, Option}
import org.slf4j.LoggerFactory
import scala.collection.JavaConverters._
import java.io.File
import org.apache.mxnet.Profiler
import org.apache.mxnet.Random
import org.apache.mxnet.Shape
import org.apache.mxnet.NDArray
import org.apache.mxnet.Context
/**
* @author Depeng Liang
*/
object ProfilerNDArray {
private val logger = LoggerFactory.getLogger(classOf[ProfilerNDArray])
def testBroadcast(): Unit = {
val sampleNum = 1000
def testBroadcastTo(): Unit = {
for (i <- 0 until sampleNum) {
val nDim = scala.util.Random.nextInt(2) + 1
val targetShape = Shape((0 until nDim).map(i => scala.util.Random.nextInt(10) + 1))
val shape = targetShape.toArray.map { s =>
if (scala.util.Random.nextInt(2) == 1) 1
else s
}
val dat = NDArray.empty(shape: _*)
val randomRet = (0 until shape.product)
.map(r => scala.util.Random.nextFloat() - 0.5f).toArray
dat.set(randomRet)
val ndArrayRet = NDArray.broadcast_to(Map("shape" -> targetShape))(dat).get
require(ndArrayRet.shape == targetShape)
val err = {
// implementation of broadcast
val ret = {
(randomRet /: shape.zipWithIndex.reverse){ (acc, elem) => elem match { case (s, i) =>
if (s != targetShape(i)) {
acc.grouped(shape.takeRight(shape.length - i).product).map {g =>
(0 until targetShape(i)).map(x => g).flatten
}.flatten.toArray
} else acc
}}
}
val tmp = ndArrayRet.toArray.zip(ret).map{ case (l, r) => Math.pow(l - r, 2) }
tmp.sum / tmp.length
}
require(err < 1E-8)
ndArrayRet.dispose()
dat.dispose()
}
}
testBroadcastTo()
}
def randomNDArray(dim: Int): NDArray = {
val tmp = Math.pow(1000, 1.0 / dim).toInt
val shape = Shape((0 until dim).map(d => scala.util.Random.nextInt(tmp) + 1))
Random.uniform(-10f, 10f, shape)
}
def testNDArraySaveload(): Unit = {
val maxDim = 5
val nRepeat = 10
val fileName = s"${System.getProperty("java.io.tmpdir")}/tmpList.bin"
for (repeat <- 0 until nRepeat) {
try {
val data = (0 until 10).map(i => randomNDArray(scala.util.Random.nextInt(4) + 1))
NDArray.save(fileName, data)
val data2 = NDArray.load2Array(fileName)
require(data.length == data2.length)
for ((x, y) <- data.zip(data2)) {
val tmp = x - y
require(tmp.toArray.sum == 0)
tmp.dispose()
}
val dMap = data.zipWithIndex.map { case (arr, i) =>
s"NDArray xx $i" -> arr
}.toMap
NDArray.save(fileName, dMap)
val dMap2 = NDArray.load2Map(fileName)
require(dMap.size == dMap2.size)
for ((k, x) <- dMap) {
val y = dMap2(k)
val tmp = x - y
require(tmp.toArray.sum == 0)
tmp.dispose()
}
data.foreach(_.dispose())
} finally {
val file = new File(fileName)
file.delete()
}
}
}
def testNDArrayCopy(): Unit = {
val c = Random.uniform(-10f, 10f, Shape(10, 10))
val d = c.copyTo(Context.cpu(0))
val tmp = c - d
require(tmp.toArray.map(Math.abs).sum == 0)
c.dispose()
d.dispose()
}
def reldiff(a: NDArray, b: NDArray): Float = {
val diff = NDArray.sum(NDArray.abs(a - b)).toScalar
val norm = NDArray.sum(NDArray.abs(a)).toScalar
diff / norm
}
def reldiff(a: Array[Float], b: Array[Float]): Float = {
val diff =
(a zip b).map { case (aElem, bElem) => Math.abs(aElem - bElem) }.sum
val norm: Float = a.reduce(Math.abs(_) + Math.abs(_))
diff / norm
}
def testNDArrayNegate(): Unit = {
val rand = Random.uniform(-10f, 10f, Shape(2, 3, 4))
val npy = rand.toArray
val arr = NDArray.empty(Shape(2, 3, 4))
arr.set(npy)
require(reldiff(npy, arr.toArray) < 1e-6f)
val negativeArr = -arr
require(reldiff(npy.map(_ * -1f), negativeArr.toArray) < 1e-6f)
// a final check to make sure the negation (-) is not implemented
// as inplace operation, so the contents of arr does not change after
// we compute (-arr)
require(reldiff(npy, arr.toArray) < 1e-6f)
rand.dispose()
arr.dispose()
negativeArr.dispose()
}
def testNDArrayScalar(): Unit = {
val c = NDArray.empty(10, 10)
val d = NDArray.empty(10, 10)
c.set(0.5f)
d.set(1.0f)
d -= c * 2f / 3f * 6f
c += 0.5f
require(c.toArray.sum - 100f < 1e-5f)
require(d.toArray.sum + 100f < 1e-5f)
c.set(2f)
require(c.toArray.sum - 200f < 1e-5f)
d.set(-c + 2f)
require(d.toArray.sum < 1e-5f)
c.dispose()
d.dispose()
}
def testClip(): Unit = {
val shape = Shape(10)
val A = Random.uniform(-10f, 10f, shape)
val B = NDArray.clip(A, -2f, 2f)
val B1 = B.toArray
require(B1.forall { x => x >= -2f && x <= 2f })
}
def testDot(): Unit = {
val a = Random.uniform(-3f, 3f, Shape(3, 4))
val b = Random.uniform(-3f, 3f, Shape(4, 5))
val c = NDArray.dot(a, b)
val A = a.toArray.grouped(4).toArray
val B = b.toArray.grouped(5).toArray
val C = (Array[Array[Float]]() /: A)((acc, row) => acc :+ row.zip(B).map(z =>
z._2.map(_ * z._1)).reduceLeft(_.zip(_).map(x => x._1 + x._2))).flatten
require(reldiff(c.toArray, C) < 1e-5f)
a.dispose()
b.dispose()
c.dispose()
}
def testNDArrayOnehot(): Unit = {
val shape = Shape(100, 20)
var npy = (0 until shape.product).toArray.map(_.toFloat)
val arr = NDArray.empty(shape)
arr.set(npy)
val nRepeat = 3
for (repeat <- 0 until nRepeat) {
val indices = (0 until shape(0)).map(i => scala.util.Random.nextInt(shape(1)))
npy = npy.map(i => 0f)
for (i <- 0 until indices.length) npy(i * shape(1) + indices(i)) = 1f
val ind = NDArray.empty(shape(0))
ind.set(indices.toArray.map(_.toFloat))
NDArray.onehotEncode(ind, arr)
require(arr.toArray.zip(npy).map(x => x._1 - x._2).sum == 0f)
ind.dispose()
}
arr.dispose()
}
def main(args: Array[String]): Unit = {
val eray = new ProfilerNDArray
val parser: CmdLineParser = new CmdLineParser(eray)
try {
parser.parseArgument(args.toList.asJava)
val path = s"${eray.outputPath}${File.separator}${eray.profilerName}"
Profiler.profilerSetConfig(mode = eray.profilerMode, fileName = path)
logger.info(s"profile file save to $path")
Profiler.profilerSetState("run")
testBroadcast()
testNDArraySaveload()
testNDArrayCopy()
testNDArrayNegate()
testNDArrayScalar()
testClip()
testDot()
testNDArrayOnehot()
Profiler.profilerSetState("stop")
} catch {
case ex: Exception => {
logger.error(ex.getMessage, ex)
parser.printUsage(System.err)
sys.exit(1)
}
}
}
}
class ProfilerNDArray {
@Option(name = "--profiler-mode", usage = "the profiler mode, can be \\"symbolic\\" or \\"all\\".")
private val profilerMode: String = "all"
@Option(name = "--output-path", usage = "the profile file output directory.")
private val outputPath: String = "."
@Option(name = "--profile-filename", usage = "the profile file name.")
private val profilerName: String = "profile_ndarray.json"
}
| weleen/mxnet | scala-package/examples/src/main/scala/org/apache/mxnetexamples/profiler/ProfilerNDArray.scala | Scala | apache-2.0 | 8,287 |
package com.datawizards.dqm.filter
import java.sql.Date
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.functions._
case object FilterByYearMonthDayColumns extends FilterByProcessingDateStrategy {
override def filter(input: DataFrame, processingDate: Date): DataFrame = {
val localDate = processingDate.toLocalDate
val year = localDate.getYear
val month = localDate.getMonthValue
val day = localDate.getDayOfMonth
input.filter( (col("year") === lit(year)) && (col("month") === lit(month)) && (col("day") === lit(day)))
}
}
| piotr-kalanski/data-quality-monitoring | src/main/scala/com/datawizards/dqm/filter/FilterByYearMonthDayColumns.scala | Scala | apache-2.0 | 569 |
package com.tuvistavie.scalog.engine
class ExecutionException(msg: String) extends RuntimeException(msg)
| tuvistavie/scalog | src/main/scala/com/tuvistavie/scalog/engine/Exceptions.scala | Scala | mit | 106 |
/**
* Copyright (C) 2012-2013 Kaj Magnus Lindberg (born 1979)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package controllers
import actions.ApiActions.PostJsonAction
import com.debiki.core._
import com.debiki.core.Prelude._
import com.debiki.core.{PostActionPayload => PAP}
import controllers.Utils.OkSafeJson
import debiki._
import debiki.DebikiHttp._
import play.api._
import play.api.mvc.{Action => _, _}
import requests._
/** Handles reply form submissions. Lazily creates pages for embedded discussions
* — such pages aren't created until the very first reply is posted.
*/
object ReplyController extends mvc.Controller {
def handleReply = PostJsonAction(maxLength = MaxPostSize) { request: JsonPostRequest =>
val body = request.body
val pageId = (body \\ "pageId").as[PageId]
val anyPageUrl = (body \\ "pageUrl").asOpt[String]
val postIds = (body \\ "postIds").as[Set[PostId]]
val text = (body \\ "text").as[String]
val wherePerhapsEmpty = (body \\ "where").asOpt[String]
val whereOpt = if (wherePerhapsEmpty == Some("")) None else wherePerhapsEmpty
// Construct a request that concerns the specified page. Create the page
// lazily if it's supposed to be a discussion embedded on a static HTML page.
val pageReq = PageRequest.forPageThatExists(request, pageId = pageId) match {
case Some(req) => req
case None =>
val page = tryCreateEmbeddedCommentsPage(request, pageId, anyPageUrl)
.getOrElse(throwNotFound("Dw2XEG60", s"Page `$pageId' does not exist"))
PageRequest.forPageThatExists(request, pageId = page.id) getOrDie "DwE77PJE0"
}
if (text.isEmpty)
throwBadReq("DwE85FK03", "Empty post")
val post = saveReply(pageReq, replyToPostIds = postIds, text, whereOpt)
val json = ReactJson.postToJson(post, includeUnapproved = true)
OkSafeJson(json)
}
private def saveReply(pageReq: PageRequest[_], replyToPostIds: Set[PostId],
text: String, whereOpt: Option[String] = None) = {
if (pageReq.oldPageVersion.isDefined)
throwBadReq("DwE72XS8", "Can only reply to latest page version")
val commonAncestorPostId = pageReq.thePageParts.findCommonAncestorPost(replyToPostIds.toSeq)
val anyParentPostId =
if (commonAncestorPostId == PageParts.NoId) {
if (pageReq.thePageRole == PageRole.EmbeddedComments) {
// There is no page body. Allow new comment threads with no parent post.
None
}
else {
throwBadReq(
"DwE260G8", "This is not an embedded discussion; must reply to an existing post")
}
}
else if (pageReq.thePageParts.getPost(commonAncestorPostId).isDefined) {
Some(commonAncestorPostId)
}
else {
throwBadReq("DwEe8HD36", o"""Cannot reply to common ancestor post `$commonAncestorPostId';
it does not exist""")
}
val approval = AutoApprover.perhapsApprove(pageReq)
val multireplyPostIds = if (replyToPostIds.size == 1) Set[PostId]() else replyToPostIds
val rawPostNoId = RawPostAction(id = PageParts.UnassignedId, postId = PageParts.UnassignedId,
creationDati = pageReq.ctime, userIdData = pageReq.userIdData,
payload = PAP.CreatePost(
parentPostId = anyParentPostId, text = text,
multireplyPostIds = multireplyPostIds, where = whereOpt, approval = approval))
val (pageWithNewPost, List(rawPostWithId: RawPostAction[PAP.CreatePost])) =
pageReq.dao.savePageActionsGenNotfs(pageReq, rawPostNoId::Nil)
val partsInclAuthor = pageWithNewPost.parts ++ pageReq.anyMeAsPeople
partsInclAuthor.thePost(rawPostWithId.id)
}
private def tryCreateEmbeddedCommentsPage(
request: DebikiRequest[_], pageId: PageId, anyPageUrl: Option[String]): Option[Page] = {
if (anyPageUrl.isEmpty)
throwBadReq("Cannot create embedded page: embedding page URL unknown")
val site = request.dao.loadSite()
val shallCreateEmbeddedTopic = EmbeddedTopicsController.isUrlFromEmbeddingUrl(
anyPageUrl.get, site.embeddingSiteUrl)
if (!shallCreateEmbeddedTopic)
return None
val topicPagePath = PagePath(
request.siteId,
folder = "/",
pageId = Some(pageId),
showId = true,
pageSlug = "")
val pageToCreate = Page.newPage(
PageRole.EmbeddedComments,
topicPagePath,
PageParts(pageId),
publishDirectly = true,
author = SystemUser.User,
url = anyPageUrl)
val newPage = request.dao.createPage(pageToCreate)
Some(newPage)
}
}
| debiki/debiki-server-old | app/controllers/ReplyController.scala | Scala | agpl-3.0 | 5,196 |
package com.softwaremill.react.kafka
import java.util.UUID
import akka.actor.{ActorSystem, Props}
import akka.pattern.ask
import akka.stream.actor.{ActorSubscriber, ActorSubscriberMessage, WatermarkRequestStrategy}
import akka.testkit.{ImplicitSender, TestKit}
import akka.util.Timeout
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.language.postfixOps
class ReactiveKafkaIntegrationSpec(_system: ActorSystem) extends TestKit(_system) with ImplicitSender with WordSpecLike
with Matchers with BeforeAndAfterAll {
def this() = this(ActorSystem("ReactiveKafkaIntegrationSpec"))
val topic = UUID.randomUUID().toString
val group = "group"
implicit val timeout = Timeout(1 second)
override def afterAll {
TestKit.shutdownActorSystem(system)
}
"Reactive kafka streams" must {
"combine well" in {
// given
val kafka = new ReactiveKafka("localhost:9092", "localhost:2181")
val publisher = kafka.consume(topic, group)(system)
val kafkaSubscriber = kafka.publish(topic, group)(system)
val subscriberActor = system.actorOf(Props(new ReactiveTestSubscriber))
val testSubscriber = ActorSubscriber[String](subscriberActor)
publisher.subscribe(testSubscriber)
// when
kafkaSubscriber.onNext("one")
kafkaSubscriber.onNext("two")
// then
awaitCond {
val collectedStrings = Await.result(subscriberActor ? "get elements", atMost = 1 second)
collectedStrings == List("one", "two")
}
}
}
}
class ReactiveTestSubscriber extends ActorSubscriber {
protected def requestStrategy = WatermarkRequestStrategy(10)
var elements: Vector[String] = Vector.empty
def receive = {
case ActorSubscriberMessage.OnNext(element) => elements = elements :+ element.asInstanceOf[String]
case "get elements" => sender ! elements
}
} | amorfis/reactive-kafka | src/test/scala/com/softwaremill/react/kafka/ReactiveKafkaIntegrationSpec.scala | Scala | apache-2.0 | 1,936 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding
import java.util.TimeZone
import java.util.regex.Pattern
/*
* All the Globification logic is encoded in this one
* class. It has a list of child ranges that share boundaries
* with the current range and are completely contained within
* current range. This children must be ordered from largest
* to smallest in size.
*/
class BaseGlobifier(
dur: Duration,
val sym: String,
pattern: String,
tz: TimeZone,
child: Option[BaseGlobifier]
) extends java.io.Serializable {
// result <= rd
private def greatestLowerBound(rd: RichDate) = dur.floorOf(rd)
// rd <= result
private def leastUpperBound(rd: RichDate): RichDate =
greatestLowerBound(rd) + dur
def format(rd: RichDate) = rd.format(pattern)(tz)
// Generate a lazy list of all children
final def children: Stream[BaseGlobifier] = child match {
case Some(c) => Stream.cons(c, c.children)
case None => Stream.empty
}
final def asteriskChildren(rd: RichDate): String = {
val childStarPattern = children.foldLeft(pattern) { (this_pat, child) =>
this_pat.replaceAll(Pattern.quote(child.sym), "*")
}
rd.format(childStarPattern)(tz)
}
// Handles the case of zero interior boundaries
// with potential boundaries only at the end points.
private def simpleCase(dr: DateRange): List[String] = {
val sstr = format(dr.start)
val estr = format(dr.end)
if (dr.end < dr.start) {
Nil
} else {
child match {
case None =>
//There is only one block:
assert(sstr == estr, "Malformed hierarchy" + sstr + " != " + estr)
List(sstr)
case Some(c) =>
/*
* Two cases: we should asterisk our children, or we need
* to recurse. If we fill this entire range, just asterisk,
*/
val bottom = children.last
val fillsright = format(leastUpperBound(dr.end)) ==
format(bottom.leastUpperBound(dr.end))
val fillsleft = format(greatestLowerBound(dr.start)) ==
format(bottom.greatestLowerBound(dr.start))
if (fillsright && fillsleft) {
List(asteriskChildren(dr.start))
} else {
c.globify(dr)
}
}
}
}
def globify(dr: DateRange): List[String] = {
/* We know:
* start <= end : by assumption
* mid1 - start < delta : mid1 is least upper bound
* end - mid2 < delta : mid2 is greatest lower bound
* mid1 = mid2 + n*delta : both on the boundary.
* if mid1 <= mid2, then we contain a boundary point,
* else we do not.
*/
val mid1 = leastUpperBound(dr.start)
val mid2 = greatestLowerBound(dr.end)
//Imprecise patterns may not need to drill down, let's see if we can stop early:
val sstr = format(dr.start)
val estr = format(dr.end)
if (sstr == estr) {
List(sstr)
} else if (dr.end < dr.start) {
//This is nonsense:
Nil
} else if (mid2 < mid1) {
//We do not contain a boundary point:
simpleCase(dr)
} // otherwise we contain one or more than one boundary points
else if (mid1 == mid2) {
//we contain exactly one boundary point:
simpleCase(DateRange(dr.start, mid1 - Millisecs(1))) ++
simpleCase(DateRange(mid1, dr.end))
} else {
//We contain 2 or more boundary points:
// [start <= mid1 < mid2 <= end]
// First check to see if we even need to check our children:
simpleCase(DateRange(dr.start, mid1 - Millisecs(1))) ++
(asteriskChildren(mid1) ::
globify(DateRange(mid1 + dur, dr.end)))
}
}
}
case class HourGlob(pat: String)(implicit tz: TimeZone)
extends BaseGlobifier(Hours(1), "%1$tH", pat, tz, None)
case class DayGlob(pat: String)(implicit tz: TimeZone)
extends BaseGlobifier(Days(1)(tz), "%1$td", pat, tz, Some(HourGlob(pat)))
case class MonthGlob(pat: String)(implicit tz: TimeZone)
extends BaseGlobifier(Months(1)(tz), "%1$tm", pat, tz, Some(DayGlob(pat)))
/*
* This is the outermost globifier and should generally be used to globify
*/
case class Globifier(pat: String)(implicit tz: TimeZone)
extends BaseGlobifier(Years(1)(tz), "%1$tY", pat, tz, Some(MonthGlob(pat)))
with java.io.Serializable
| twitter/scalding | scalding-date/src/main/scala/com/twitter/scalding/Globifier.scala | Scala | apache-2.0 | 4,830 |
/*
* Copyright 2010-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package util
import net.liftweb.common._
import scala.xml._
import java.util.{ResourceBundle, Enumeration, Locale}
/**
* Converts a NodeSeq of a particular format into
* a ResourceBundle. Basically, each of the second-level
* nodes that contain the attribute "name" the name becomes an
* entry in the resulting resource bundle. It is possible
* to localize each of the entries with the lang and country
* attributes which will be compared against the incoming
* Locale. If the default attribute is true, then that entry
* is used if no others match. Note that language is weighted
* more heavily than country.<br/><br/>
* If the node
* is a Text or PCData node, it will be returned as a String.
* Otherwise, it will be returned as a NodeSeq.
*/
object BundleBuilder {
private object IsText {
def unapply(in: NodeSeq): Option[String] = in.toList match {
case (x: Atom[_]) :: Nil => Some(x.text)
case _ => None
}
}
final private case class EntryInfo(name: String, lang: Option[String], country: Option[String], default: Boolean)
/**
* Convers
*/
def convert(nodes: NodeSeq, loc: Locale): Box[ResourceBundle] = {
val country = Some(loc.getCountry()).filter(_.length > 0)
val lang = Some(loc.getLanguage()).filter(_.length > 0)
val vals: List[ResourceBundle] =
nodes.toList.flatMap {
case e: Elem => {
val all: List[(EntryInfo, NodeSeq)] =
e.child.toList.flatMap {
case e: Elem => {
e.attribute("name").toList.
map(attr => EntryInfo(attr.text,
e.attribute("lang").map(_.text),
e.attribute("country").map(_.text),
e.attribute("default").map(_.text).
flatMap(Helpers.asBoolean) getOrElse false) -> (e.child: NodeSeq))
}
case _ => Nil
}
val map = all.foldLeft[Map[String, List[(EntryInfo, NodeSeq)]]](Map()) {
case (map, pair @ (info, ns)) =>
map + (info.name -> (pair :: map.getOrElse(info.name, Nil)))
}
def points(i: EntryInfo): Int = {
(if (i.lang == lang) 4 else 0) +
(if (i.country == country) 2 else 0) +
(if (i.default) 1 else 0)
}
def choose(lst: List[(EntryInfo, NodeSeq)]): NodeSeq =
lst.reduceLeft{
(a, b) => {
val ap = points(a._1)
val bp = points(b._1)
if (ap > bp) {
a
} else if (bp > ap) {
b
} else if (a._1.default) a
else b
}
}._2
val res: Map[String, NodeSeq] = Map(map.map {
case (name, lst) => name -> choose(lst)
}.toSeq :_*)
List(new ResourceBundle {
def getKeys(): Enumeration[String] = {
val it = res.keys.iterator
new Enumeration[String] {
def hasMoreElements() = it.hasNext
def nextElement() = it.next
}
}
def handleGetObject(key: String): Object =
res.get(key) match {
case Some(IsText(str)) => str
case Some(ns) => ns
case _ => null
}
})
}
case _ => Nil
}
vals.headOption
}
}
| lzpfmh/framework-2 | core/util/src/main/scala/net/liftweb/util/BundleBuilder.scala | Scala | apache-2.0 | 4,199 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.recommendation
import java.{util => ju}
import java.io.IOException
import java.util.Locale
import scala.collection.mutable
import scala.reflect.ClassTag
import scala.util.{Sorting, Try}
import scala.util.hashing.byteswap64
import com.github.fommil.netlib.BLAS.{getInstance => blas}
import org.apache.hadoop.fs.Path
import org.json4s.DefaultFormats
import org.json4s.JsonDSL._
import org.apache.spark.{Dependency, Partitioner, ShuffleDependency, SparkContext}
import org.apache.spark.annotation.{DeveloperApi, Since}
import org.apache.spark.internal.Logging
import org.apache.spark.ml.{Estimator, Model}
import org.apache.spark.ml.linalg.BLAS
import org.apache.spark.ml.param._
import org.apache.spark.ml.param.shared._
import org.apache.spark.ml.util._
import org.apache.spark.mllib.linalg.CholeskyDecomposition
import org.apache.spark.mllib.optimization.NNLS
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types._
import org.apache.spark.storage.StorageLevel
import org.apache.spark.util.{BoundedPriorityQueue, Utils}
import org.apache.spark.util.collection.{OpenHashMap, OpenHashSet, SortDataFormat, Sorter}
import org.apache.spark.util.random.XORShiftRandom
/**
* Common params for ALS and ALSModel.
*/
private[recommendation] trait ALSModelParams extends Params with HasPredictionCol {
/**
* Param for the column name for user ids. Ids must be integers. Other
* numeric types are supported for this column, but will be cast to integers as long as they
* fall within the integer value range.
* Default: "user"
* @group param
*/
val userCol = new Param[String](this, "userCol", "column name for user ids. Ids must be within " +
"the integer value range.")
/** @group getParam */
def getUserCol: String = $(userCol)
/**
* Param for the column name for item ids. Ids must be integers. Other
* numeric types are supported for this column, but will be cast to integers as long as they
* fall within the integer value range.
* Default: "item"
* @group param
*/
val itemCol = new Param[String](this, "itemCol", "column name for item ids. Ids must be within " +
"the integer value range.")
/** @group getParam */
def getItemCol: String = $(itemCol)
/**
* Attempts to safely cast a user/item id to an Int. Throws an exception if the value is
* out of integer range or contains a fractional part.
*/
protected[recommendation] val checkedCast = udf { (n: Any) =>
n match {
case v: Int => v // Avoid unnecessary casting
case v: Number =>
val intV = v.intValue
// Checks if number within Int range and has no fractional part.
if (v.doubleValue == intV) {
intV
} else {
throw new IllegalArgumentException(s"ALS only supports values in Integer range " +
s"and without fractional part for columns ${$(userCol)} and ${$(itemCol)}. " +
s"Value $n was either out of Integer range or contained a fractional part that " +
s"could not be converted.")
}
case _ => throw new IllegalArgumentException(s"ALS only supports values in Integer range " +
s"for columns ${$(userCol)} and ${$(itemCol)}. Value $n was not numeric.")
}
}
/**
* Param for strategy for dealing with unknown or new users/items at prediction time.
* This may be useful in cross-validation or production scenarios, for handling user/item ids
* the model has not seen in the training data.
* Supported values:
* - "nan": predicted value for unknown ids will be NaN.
* - "drop": rows in the input DataFrame containing unknown ids will be dropped from
* the output DataFrame containing predictions.
* Default: "nan".
* @group expertParam
*/
val coldStartStrategy = new Param[String](this, "coldStartStrategy",
"strategy for dealing with unknown or new users/items at prediction time. This may be " +
"useful in cross-validation or production scenarios, for handling user/item ids the model " +
"has not seen in the training data. Supported values: " +
s"${ALSModel.supportedColdStartStrategies.mkString(",")}.",
(s: String) =>
ALSModel.supportedColdStartStrategies.contains(s.toLowerCase(Locale.ROOT)))
/** @group expertGetParam */
def getColdStartStrategy: String = $(coldStartStrategy).toLowerCase(Locale.ROOT)
}
/**
* Common params for ALS.
*/
private[recommendation] trait ALSParams extends ALSModelParams with HasMaxIter with HasRegParam
with HasPredictionCol with HasCheckpointInterval with HasSeed {
/**
* Param for rank of the matrix factorization (positive).
* Default: 10
* @group param
*/
val rank = new IntParam(this, "rank", "rank of the factorization", ParamValidators.gtEq(1))
/** @group getParam */
def getRank: Int = $(rank)
/**
* Param for number of user blocks (positive).
* Default: 10
* @group param
*/
val numUserBlocks = new IntParam(this, "numUserBlocks", "number of user blocks",
ParamValidators.gtEq(1))
/** @group getParam */
def getNumUserBlocks: Int = $(numUserBlocks)
/**
* Param for number of item blocks (positive).
* Default: 10
* @group param
*/
val numItemBlocks = new IntParam(this, "numItemBlocks", "number of item blocks",
ParamValidators.gtEq(1))
/** @group getParam */
def getNumItemBlocks: Int = $(numItemBlocks)
/**
* Param to decide whether to use implicit preference.
* Default: false
* @group param
*/
val implicitPrefs = new BooleanParam(this, "implicitPrefs", "whether to use implicit preference")
/** @group getParam */
def getImplicitPrefs: Boolean = $(implicitPrefs)
/**
* Param for the alpha parameter in the implicit preference formulation (nonnegative).
* Default: 1.0
* @group param
*/
val alpha = new DoubleParam(this, "alpha", "alpha for implicit preference",
ParamValidators.gtEq(0))
/** @group getParam */
def getAlpha: Double = $(alpha)
/**
* Param for the column name for ratings.
* Default: "rating"
* @group param
*/
val ratingCol = new Param[String](this, "ratingCol", "column name for ratings")
/** @group getParam */
def getRatingCol: String = $(ratingCol)
/**
* Param for whether to apply nonnegativity constraints.
* Default: false
* @group param
*/
val nonnegative = new BooleanParam(
this, "nonnegative", "whether to use nonnegative constraint for least squares")
/** @group getParam */
def getNonnegative: Boolean = $(nonnegative)
/**
* Param for StorageLevel for intermediate datasets. Pass in a string representation of
* `StorageLevel`. Cannot be "NONE".
* Default: "MEMORY_AND_DISK".
*
* @group expertParam
*/
val intermediateStorageLevel = new Param[String](this, "intermediateStorageLevel",
"StorageLevel for intermediate datasets. Cannot be 'NONE'.",
(s: String) => Try(StorageLevel.fromString(s)).isSuccess && s != "NONE")
/** @group expertGetParam */
def getIntermediateStorageLevel: String = $(intermediateStorageLevel)
/**
* Param for StorageLevel for ALS model factors. Pass in a string representation of
* `StorageLevel`.
* Default: "MEMORY_AND_DISK".
*
* @group expertParam
*/
val finalStorageLevel = new Param[String](this, "finalStorageLevel",
"StorageLevel for ALS model factors.",
(s: String) => Try(StorageLevel.fromString(s)).isSuccess)
/** @group expertGetParam */
def getFinalStorageLevel: String = $(finalStorageLevel)
setDefault(rank -> 10, maxIter -> 10, regParam -> 0.1, numUserBlocks -> 10, numItemBlocks -> 10,
implicitPrefs -> false, alpha -> 1.0, userCol -> "user", itemCol -> "item",
ratingCol -> "rating", nonnegative -> false, checkpointInterval -> 10,
intermediateStorageLevel -> "MEMORY_AND_DISK", finalStorageLevel -> "MEMORY_AND_DISK",
coldStartStrategy -> "nan")
/**
* Validates and transforms the input schema.
*
* @param schema input schema
* @return output schema
*/
protected def validateAndTransformSchema(schema: StructType): StructType = {
// user and item will be cast to Int
SchemaUtils.checkNumericType(schema, $(userCol))
SchemaUtils.checkNumericType(schema, $(itemCol))
// rating will be cast to Float
SchemaUtils.checkNumericType(schema, $(ratingCol))
SchemaUtils.appendColumn(schema, $(predictionCol), FloatType)
}
}
/**
* Model fitted by ALS.
*
* @param rank rank of the matrix factorization model
* @param userFactors a DataFrame that stores user factors in two columns: `id` and `features`
* @param itemFactors a DataFrame that stores item factors in two columns: `id` and `features`
*/
@Since("1.3.0")
class ALSModel private[ml] (
@Since("1.4.0") override val uid: String,
@Since("1.4.0") val rank: Int,
@transient val userFactors: DataFrame,
@transient val itemFactors: DataFrame)
extends Model[ALSModel] with ALSModelParams with MLWritable {
/** @group setParam */
@Since("1.4.0")
def setUserCol(value: String): this.type = set(userCol, value)
/** @group setParam */
@Since("1.4.0")
def setItemCol(value: String): this.type = set(itemCol, value)
/** @group setParam */
@Since("1.3.0")
def setPredictionCol(value: String): this.type = set(predictionCol, value)
/** @group expertSetParam */
@Since("2.2.0")
def setColdStartStrategy(value: String): this.type = set(coldStartStrategy, value)
private val predict = udf { (featuresA: Seq[Float], featuresB: Seq[Float]) =>
if (featuresA != null && featuresB != null) {
// TODO(SPARK-19759): try dot-producting on Seqs or another non-converted type for
// potential optimization.
blas.sdot(rank, featuresA.toArray, 1, featuresB.toArray, 1)
} else {
Float.NaN
}
}
@Since("2.0.0")
override def transform(dataset: Dataset[_]): DataFrame = {
transformSchema(dataset.schema)
// create a new column named map(predictionCol) by running the predict UDF.
val predictions = dataset
.join(userFactors,
checkedCast(dataset($(userCol))) === userFactors("id"), "left")
.join(itemFactors,
checkedCast(dataset($(itemCol))) === itemFactors("id"), "left")
.select(dataset("*"),
predict(userFactors("features"), itemFactors("features")).as($(predictionCol)))
getColdStartStrategy match {
case ALSModel.Drop =>
predictions.na.drop("all", Seq($(predictionCol)))
case ALSModel.NaN =>
predictions
}
}
@Since("1.3.0")
override def transformSchema(schema: StructType): StructType = {
// user and item will be cast to Int
SchemaUtils.checkNumericType(schema, $(userCol))
SchemaUtils.checkNumericType(schema, $(itemCol))
SchemaUtils.appendColumn(schema, $(predictionCol), FloatType)
}
@Since("1.5.0")
override def copy(extra: ParamMap): ALSModel = {
val copied = new ALSModel(uid, rank, userFactors, itemFactors)
copyValues(copied, extra).setParent(parent)
}
@Since("1.6.0")
override def write: MLWriter = new ALSModel.ALSModelWriter(this)
/**
* Returns top `numItems` items recommended for each user, for all users.
* @param numItems max number of recommendations for each user
* @return a DataFrame of (userCol: Int, recommendations), where recommendations are
* stored as an array of (itemCol: Int, rating: Float) Rows.
*/
@Since("2.2.0")
def recommendForAllUsers(numItems: Int): DataFrame = {
recommendForAll(userFactors, itemFactors, $(userCol), $(itemCol), numItems)
}
/**
* Returns top `numUsers` users recommended for each item, for all items.
* @param numUsers max number of recommendations for each item
* @return a DataFrame of (itemCol: Int, recommendations), where recommendations are
* stored as an array of (userCol: Int, rating: Float) Rows.
*/
@Since("2.2.0")
def recommendForAllItems(numUsers: Int): DataFrame = {
recommendForAll(itemFactors, userFactors, $(itemCol), $(userCol), numUsers)
}
/**
* Makes recommendations for all users (or items).
*
* Note: the previous approach used for computing top-k recommendations
* used a cross-join followed by predicting a score for each row of the joined dataset.
* However, this results in exploding the size of intermediate data. While Spark SQL makes it
* relatively efficient, the approach implemented here is significantly more efficient.
*
* This approach groups factors into blocks and computes the top-k elements per block,
* using dot product and an efficient [[BoundedPriorityQueue]] (instead of gemm).
* It then computes the global top-k by aggregating the per block top-k elements with
* a [[TopByKeyAggregator]]. This significantly reduces the size of intermediate and shuffle data.
* This is the DataFrame equivalent to the approach used in
* [[org.apache.spark.mllib.recommendation.MatrixFactorizationModel]].
*
* @param srcFactors src factors for which to generate recommendations
* @param dstFactors dst factors used to make recommendations
* @param srcOutputColumn name of the column for the source ID in the output DataFrame
* @param dstOutputColumn name of the column for the destination ID in the output DataFrame
* @param num max number of recommendations for each record
* @return a DataFrame of (srcOutputColumn: Int, recommendations), where recommendations are
* stored as an array of (dstOutputColumn: Int, rating: Float) Rows.
*/
private def recommendForAll(
srcFactors: DataFrame,
dstFactors: DataFrame,
srcOutputColumn: String,
dstOutputColumn: String,
num: Int): DataFrame = {
import srcFactors.sparkSession.implicits._
val srcFactorsBlocked = blockify(srcFactors.as[(Int, Array[Float])])
val dstFactorsBlocked = blockify(dstFactors.as[(Int, Array[Float])])
val ratings = srcFactorsBlocked.crossJoin(dstFactorsBlocked)
.as[(Seq[(Int, Array[Float])], Seq[(Int, Array[Float])])]
.flatMap { case (srcIter, dstIter) =>
val m = srcIter.size
val n = math.min(dstIter.size, num)
val output = new Array[(Int, Int, Float)](m * n)
var i = 0
val pq = new BoundedPriorityQueue[(Int, Float)](num)(Ordering.by(_._2))
srcIter.foreach { case (srcId, srcFactor) =>
dstIter.foreach { case (dstId, dstFactor) =>
// We use F2jBLAS which is faster than a call to native BLAS for vector dot product
val score = BLAS.f2jBLAS.sdot(rank, srcFactor, 1, dstFactor, 1)
pq += dstId -> score
}
pq.foreach { case (dstId, score) =>
output(i) = (srcId, dstId, score)
i += 1
}
pq.clear()
}
output.toSeq
}
// We'll force the IDs to be Int. Unfortunately this converts IDs to Int in the output.
val topKAggregator = new TopByKeyAggregator[Int, Int, Float](num, Ordering.by(_._2))
val recs = ratings.as[(Int, Int, Float)].groupByKey(_._1).agg(topKAggregator.toColumn)
.toDF("id", "recommendations")
val arrayType = ArrayType(
new StructType()
.add(dstOutputColumn, IntegerType)
.add("rating", FloatType)
)
recs.select($"id".as(srcOutputColumn), $"recommendations".cast(arrayType))
}
/**
* Blockifies factors to improve the efficiency of cross join
* TODO: SPARK-20443 - expose blockSize as a param?
*/
private def blockify(
factors: Dataset[(Int, Array[Float])],
blockSize: Int = 4096): Dataset[Seq[(Int, Array[Float])]] = {
import factors.sparkSession.implicits._
factors.mapPartitions(_.grouped(blockSize))
}
}
@Since("1.6.0")
object ALSModel extends MLReadable[ALSModel] {
private val NaN = "nan"
private val Drop = "drop"
private[recommendation] final val supportedColdStartStrategies = Array(NaN, Drop)
@Since("1.6.0")
override def read: MLReader[ALSModel] = new ALSModelReader
@Since("1.6.0")
override def load(path: String): ALSModel = super.load(path)
private[ALSModel] class ALSModelWriter(instance: ALSModel) extends MLWriter {
override protected def saveImpl(path: String): Unit = {
val extraMetadata = "rank" -> instance.rank
DefaultParamsWriter.saveMetadata(instance, path, sc, Some(extraMetadata))
val userPath = new Path(path, "userFactors").toString
instance.userFactors.write.format("parquet").save(userPath)
val itemPath = new Path(path, "itemFactors").toString
instance.itemFactors.write.format("parquet").save(itemPath)
}
}
private class ALSModelReader extends MLReader[ALSModel] {
/** Checked against metadata when loading model */
private val className = classOf[ALSModel].getName
override def load(path: String): ALSModel = {
val metadata = DefaultParamsReader.loadMetadata(path, sc, className)
implicit val format = DefaultFormats
val rank = (metadata.metadata \\ "rank").extract[Int]
val userPath = new Path(path, "userFactors").toString
val userFactors = sparkSession.read.format("parquet").load(userPath)
val itemPath = new Path(path, "itemFactors").toString
val itemFactors = sparkSession.read.format("parquet").load(itemPath)
val model = new ALSModel(metadata.uid, rank, userFactors, itemFactors)
DefaultParamsReader.getAndSetParams(model, metadata)
model
}
}
}
/**
* Alternating Least Squares (ALS) matrix factorization.
*
* ALS attempts to estimate the ratings matrix `R` as the product of two lower-rank matrices,
* `X` and `Y`, i.e. `X * Yt = R`. Typically these approximations are called 'factor' matrices.
* The general approach is iterative. During each iteration, one of the factor matrices is held
* constant, while the other is solved for using least squares. The newly-solved factor matrix is
* then held constant while solving for the other factor matrix.
*
* This is a blocked implementation of the ALS factorization algorithm that groups the two sets
* of factors (referred to as "users" and "products") into blocks and reduces communication by only
* sending one copy of each user vector to each product block on each iteration, and only for the
* product blocks that need that user's feature vector. This is achieved by pre-computing some
* information about the ratings matrix to determine the "out-links" of each user (which blocks of
* products it will contribute to) and "in-link" information for each product (which of the feature
* vectors it receives from each user block it will depend on). This allows us to send only an
* array of feature vectors between each user block and product block, and have the product block
* find the users' ratings and update the products based on these messages.
*
* For implicit preference data, the algorithm used is based on
* "Collaborative Filtering for Implicit Feedback Datasets", available at
* http://dx.doi.org/10.1109/ICDM.2008.22, adapted for the blocked approach used here.
*
* Essentially instead of finding the low-rank approximations to the rating matrix `R`,
* this finds the approximations for a preference matrix `P` where the elements of `P` are 1 if
* r is greater than 0 and 0 if r is less than or equal to 0. The ratings then act as 'confidence'
* values related to strength of indicated user
* preferences rather than explicit ratings given to items.
*/
@Since("1.3.0")
class ALS(@Since("1.4.0") override val uid: String) extends Estimator[ALSModel] with ALSParams
with DefaultParamsWritable {
import org.apache.spark.ml.recommendation.ALS.Rating
@Since("1.4.0")
def this() = this(Identifiable.randomUID("als"))
/** @group setParam */
@Since("1.3.0")
def setRank(value: Int): this.type = set(rank, value)
/** @group setParam */
@Since("1.3.0")
def setNumUserBlocks(value: Int): this.type = set(numUserBlocks, value)
/** @group setParam */
@Since("1.3.0")
def setNumItemBlocks(value: Int): this.type = set(numItemBlocks, value)
/** @group setParam */
@Since("1.3.0")
def setImplicitPrefs(value: Boolean): this.type = set(implicitPrefs, value)
/** @group setParam */
@Since("1.3.0")
def setAlpha(value: Double): this.type = set(alpha, value)
/** @group setParam */
@Since("1.3.0")
def setUserCol(value: String): this.type = set(userCol, value)
/** @group setParam */
@Since("1.3.0")
def setItemCol(value: String): this.type = set(itemCol, value)
/** @group setParam */
@Since("1.3.0")
def setRatingCol(value: String): this.type = set(ratingCol, value)
/** @group setParam */
@Since("1.3.0")
def setPredictionCol(value: String): this.type = set(predictionCol, value)
/** @group setParam */
@Since("1.3.0")
def setMaxIter(value: Int): this.type = set(maxIter, value)
/** @group setParam */
@Since("1.3.0")
def setRegParam(value: Double): this.type = set(regParam, value)
/** @group setParam */
@Since("1.3.0")
def setNonnegative(value: Boolean): this.type = set(nonnegative, value)
/** @group setParam */
@Since("1.4.0")
def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
/** @group setParam */
@Since("1.3.0")
def setSeed(value: Long): this.type = set(seed, value)
/** @group expertSetParam */
@Since("2.0.0")
def setIntermediateStorageLevel(value: String): this.type = set(intermediateStorageLevel, value)
/** @group expertSetParam */
@Since("2.0.0")
def setFinalStorageLevel(value: String): this.type = set(finalStorageLevel, value)
/** @group expertSetParam */
@Since("2.2.0")
def setColdStartStrategy(value: String): this.type = set(coldStartStrategy, value)
/**
* Sets both numUserBlocks and numItemBlocks to the specific value.
*
* @group setParam
*/
@Since("1.3.0")
def setNumBlocks(value: Int): this.type = {
setNumUserBlocks(value)
setNumItemBlocks(value)
this
}
@Since("2.0.0")
override def fit(dataset: Dataset[_]): ALSModel = {
transformSchema(dataset.schema)
import dataset.sparkSession.implicits._
val r = if ($(ratingCol) != "") col($(ratingCol)).cast(FloatType) else lit(1.0f)
val ratings = dataset
.select(checkedCast(col($(userCol))), checkedCast(col($(itemCol))), r)
.rdd
.map { row =>
Rating(row.getInt(0), row.getInt(1), row.getFloat(2))
}
val instr = Instrumentation.create(this, ratings)
instr.logParams(rank, numUserBlocks, numItemBlocks, implicitPrefs, alpha, userCol,
itemCol, ratingCol, predictionCol, maxIter, regParam, nonnegative, checkpointInterval,
seed, intermediateStorageLevel, finalStorageLevel)
val (userFactors, itemFactors) = ALS.train(ratings, rank = $(rank),
numUserBlocks = $(numUserBlocks), numItemBlocks = $(numItemBlocks),
maxIter = $(maxIter), regParam = $(regParam), implicitPrefs = $(implicitPrefs),
alpha = $(alpha), nonnegative = $(nonnegative),
intermediateRDDStorageLevel = StorageLevel.fromString($(intermediateStorageLevel)),
finalRDDStorageLevel = StorageLevel.fromString($(finalStorageLevel)),
checkpointInterval = $(checkpointInterval), seed = $(seed))
val userDF = userFactors.toDF("id", "features")
val itemDF = itemFactors.toDF("id", "features")
val model = new ALSModel(uid, $(rank), userDF, itemDF).setParent(this)
instr.logSuccess(model)
copyValues(model)
}
@Since("1.3.0")
override def transformSchema(schema: StructType): StructType = {
validateAndTransformSchema(schema)
}
@Since("1.5.0")
override def copy(extra: ParamMap): ALS = defaultCopy(extra)
}
/**
* :: DeveloperApi ::
* An implementation of ALS that supports generic ID types, specialized for Int and Long. This is
* exposed as a developer API for users who do need other ID types. But it is not recommended
* because it increases the shuffle size and memory requirement during training. For simplicity,
* users and items must have the same type. The number of distinct users/items should be smaller
* than 2 billion.
*/
@DeveloperApi
object ALS extends DefaultParamsReadable[ALS] with Logging {
/**
* :: DeveloperApi ::
* Rating class for better code readability.
*/
@DeveloperApi
case class Rating[@specialized(Int, Long) ID](user: ID, item: ID, rating: Float)
@Since("1.6.0")
override def load(path: String): ALS = super.load(path)
/** Trait for least squares solvers applied to the normal equation. */
private[recommendation] trait LeastSquaresNESolver extends Serializable {
/** Solves a least squares problem with regularization (possibly with other constraints). */
def solve(ne: NormalEquation, lambda: Double): Array[Float]
}
/** Cholesky solver for least square problems. */
private[recommendation] class CholeskySolver extends LeastSquaresNESolver {
/**
* Solves a least squares problem with L2 regularization:
*
* min norm(A x - b)^2^ + lambda * norm(x)^2^
*
* @param ne a [[NormalEquation]] instance that contains AtA, Atb, and n (number of instances)
* @param lambda regularization constant
* @return the solution x
*/
override def solve(ne: NormalEquation, lambda: Double): Array[Float] = {
val k = ne.k
// Add scaled lambda to the diagonals of AtA.
var i = 0
var j = 2
while (i < ne.triK) {
ne.ata(i) += lambda
i += j
j += 1
}
CholeskyDecomposition.solve(ne.ata, ne.atb)
val x = new Array[Float](k)
i = 0
while (i < k) {
x(i) = ne.atb(i).toFloat
i += 1
}
ne.reset()
x
}
}
/** NNLS solver. */
private[recommendation] class NNLSSolver extends LeastSquaresNESolver {
private var rank: Int = -1
private var workspace: NNLS.Workspace = _
private var ata: Array[Double] = _
private var initialized: Boolean = false
private def initialize(rank: Int): Unit = {
if (!initialized) {
this.rank = rank
workspace = NNLS.createWorkspace(rank)
ata = new Array[Double](rank * rank)
initialized = true
} else {
require(this.rank == rank)
}
}
/**
* Solves a nonnegative least squares problem with L2 regularization:
*
* min_x_ norm(A x - b)^2^ + lambda * n * norm(x)^2^
* subject to x >= 0
*/
override def solve(ne: NormalEquation, lambda: Double): Array[Float] = {
val rank = ne.k
initialize(rank)
fillAtA(ne.ata, lambda)
val x = NNLS.solve(ata, ne.atb, workspace)
ne.reset()
x.map(x => x.toFloat)
}
/**
* Given a triangular matrix in the order of fillXtX above, compute the full symmetric square
* matrix that it represents, storing it into destMatrix.
*/
private def fillAtA(triAtA: Array[Double], lambda: Double) {
var i = 0
var pos = 0
var a = 0.0
while (i < rank) {
var j = 0
while (j <= i) {
a = triAtA(pos)
ata(i * rank + j) = a
ata(j * rank + i) = a
pos += 1
j += 1
}
ata(i * rank + i) += lambda
i += 1
}
}
}
/**
* Representing a normal equation to solve the following weighted least squares problem:
*
* minimize \\sum,,i,, c,,i,, (a,,i,,^T^ x - d,,i,,)^2^ + lambda * x^T^ x.
*
* Its normal equation is given by
*
* \\sum,,i,, c,,i,, (a,,i,, a,,i,,^T^ x - d,,i,, a,,i,,) + lambda * x = 0.
*
* Distributing and letting b,,i,, = c,,i,, * d,,i,,
*
* \\sum,,i,, c,,i,, a,,i,, a,,i,,^T^ x - b,,i,, a,,i,, + lambda * x = 0.
*/
private[recommendation] class NormalEquation(val k: Int) extends Serializable {
/** Number of entries in the upper triangular part of a k-by-k matrix. */
val triK = k * (k + 1) / 2
/** A^T^ * A */
val ata = new Array[Double](triK)
/** A^T^ * b */
val atb = new Array[Double](k)
private val da = new Array[Double](k)
private val upper = "U"
private def copyToDouble(a: Array[Float]): Unit = {
var i = 0
while (i < k) {
da(i) = a(i)
i += 1
}
}
/** Adds an observation. */
def add(a: Array[Float], b: Double, c: Double = 1.0): this.type = {
require(c >= 0.0)
require(a.length == k)
copyToDouble(a)
blas.dspr(upper, k, c, da, 1, ata)
if (b != 0.0) {
blas.daxpy(k, b, da, 1, atb, 1)
}
this
}
/** Merges another normal equation object. */
def merge(other: NormalEquation): this.type = {
require(other.k == k)
blas.daxpy(ata.length, 1.0, other.ata, 1, ata, 1)
blas.daxpy(atb.length, 1.0, other.atb, 1, atb, 1)
this
}
/** Resets everything to zero, which should be called after each solve. */
def reset(): Unit = {
ju.Arrays.fill(ata, 0.0)
ju.Arrays.fill(atb, 0.0)
}
}
/**
* :: DeveloperApi ::
* Implementation of the ALS algorithm.
*
* This implementation of the ALS factorization algorithm partitions the two sets of factors among
* Spark workers so as to reduce network communication by only sending one copy of each factor
* vector to each Spark worker on each iteration, and only if needed. This is achieved by
* precomputing some information about the ratings matrix to determine which users require which
* item factors and vice versa. See the Scaladoc for `InBlock` for a detailed explanation of how
* the precomputation is done.
*
* In addition, since each iteration of calculating the factor matrices depends on the known
* ratings, which are spread across Spark partitions, a naive implementation would incur
* significant network communication overhead between Spark workers, as the ratings RDD would be
* repeatedly shuffled during each iteration. This implementation reduces that overhead by
* performing the shuffling operation up front, precomputing each partition's ratings dependencies
* and duplicating those values to the appropriate workers before starting iterations to solve for
* the factor matrices. See the Scaladoc for `OutBlock` for a detailed explanation of how the
* precomputation is done.
*
* Note that the term "rating block" is a bit of a misnomer, as the ratings are not partitioned by
* contiguous blocks from the ratings matrix but by a hash function on the rating's location in
* the matrix. If it helps you to visualize the partitions, it is easier to think of the term
* "block" as referring to a subset of an RDD containing the ratings rather than a contiguous
* submatrix of the ratings matrix.
*/
@DeveloperApi
def train[ID: ClassTag]( // scalastyle:ignore
ratings: RDD[Rating[ID]],
rank: Int = 10,
numUserBlocks: Int = 10,
numItemBlocks: Int = 10,
maxIter: Int = 10,
regParam: Double = 0.1,
implicitPrefs: Boolean = false,
alpha: Double = 1.0,
nonnegative: Boolean = false,
intermediateRDDStorageLevel: StorageLevel = StorageLevel.MEMORY_AND_DISK,
finalRDDStorageLevel: StorageLevel = StorageLevel.MEMORY_AND_DISK,
checkpointInterval: Int = 10,
seed: Long = 0L)(
implicit ord: Ordering[ID]): (RDD[(ID, Array[Float])], RDD[(ID, Array[Float])]) = {
require(!ratings.isEmpty(), s"No ratings available from $ratings")
require(intermediateRDDStorageLevel != StorageLevel.NONE,
"ALS is not designed to run without persisting intermediate RDDs.")
val sc = ratings.sparkContext
// Precompute the rating dependencies of each partition
val userPart = new ALSPartitioner(numUserBlocks)
val itemPart = new ALSPartitioner(numItemBlocks)
val blockRatings = partitionRatings(ratings, userPart, itemPart)
.persist(intermediateRDDStorageLevel)
val (userInBlocks, userOutBlocks) =
makeBlocks("user", blockRatings, userPart, itemPart, intermediateRDDStorageLevel)
userOutBlocks.count() // materialize blockRatings and user blocks
val swappedBlockRatings = blockRatings.map {
case ((userBlockId, itemBlockId), RatingBlock(userIds, itemIds, localRatings)) =>
((itemBlockId, userBlockId), RatingBlock(itemIds, userIds, localRatings))
}
val (itemInBlocks, itemOutBlocks) =
makeBlocks("item", swappedBlockRatings, itemPart, userPart, intermediateRDDStorageLevel)
itemOutBlocks.count() // materialize item blocks
// Encoders for storing each user/item's partition ID and index within its partition using a
// single integer; used as an optimization
val userLocalIndexEncoder = new LocalIndexEncoder(userPart.numPartitions)
val itemLocalIndexEncoder = new LocalIndexEncoder(itemPart.numPartitions)
// These are the user and item factor matrices that, once trained, are multiplied together to
// estimate the rating matrix. The two matrices are stored in RDDs, partitioned by column such
// that each factor column resides on the same Spark worker as its corresponding user or item.
val seedGen = new XORShiftRandom(seed)
var userFactors = initialize(userInBlocks, rank, seedGen.nextLong())
var itemFactors = initialize(itemInBlocks, rank, seedGen.nextLong())
val solver = if (nonnegative) new NNLSSolver else new CholeskySolver
var previousCheckpointFile: Option[String] = None
val shouldCheckpoint: Int => Boolean = (iter) =>
sc.checkpointDir.isDefined && checkpointInterval != -1 && (iter % checkpointInterval == 0)
val deletePreviousCheckpointFile: () => Unit = () =>
previousCheckpointFile.foreach { file =>
try {
val checkpointFile = new Path(file)
checkpointFile.getFileSystem(sc.hadoopConfiguration).delete(checkpointFile, true)
} catch {
case e: IOException =>
logWarning(s"Cannot delete checkpoint file $file:", e)
}
}
if (implicitPrefs) {
for (iter <- 1 to maxIter) {
userFactors.setName(s"userFactors-$iter").persist(intermediateRDDStorageLevel)
val previousItemFactors = itemFactors
itemFactors = computeFactors(userFactors, userOutBlocks, itemInBlocks, rank, regParam,
userLocalIndexEncoder, implicitPrefs, alpha, solver)
previousItemFactors.unpersist()
itemFactors.setName(s"itemFactors-$iter").persist(intermediateRDDStorageLevel)
// TODO: Generalize PeriodicGraphCheckpointer and use it here.
val deps = itemFactors.dependencies
if (shouldCheckpoint(iter)) {
itemFactors.checkpoint() // itemFactors gets materialized in computeFactors
}
val previousUserFactors = userFactors
userFactors = computeFactors(itemFactors, itemOutBlocks, userInBlocks, rank, regParam,
itemLocalIndexEncoder, implicitPrefs, alpha, solver)
if (shouldCheckpoint(iter)) {
ALS.cleanShuffleDependencies(sc, deps)
deletePreviousCheckpointFile()
previousCheckpointFile = itemFactors.getCheckpointFile
}
previousUserFactors.unpersist()
}
} else {
for (iter <- 0 until maxIter) {
itemFactors = computeFactors(userFactors, userOutBlocks, itemInBlocks, rank, regParam,
userLocalIndexEncoder, solver = solver)
if (shouldCheckpoint(iter)) {
val deps = itemFactors.dependencies
itemFactors.checkpoint()
itemFactors.count() // checkpoint item factors and cut lineage
ALS.cleanShuffleDependencies(sc, deps)
deletePreviousCheckpointFile()
previousCheckpointFile = itemFactors.getCheckpointFile
}
userFactors = computeFactors(itemFactors, itemOutBlocks, userInBlocks, rank, regParam,
itemLocalIndexEncoder, solver = solver)
}
}
val userIdAndFactors = userInBlocks
.mapValues(_.srcIds)
.join(userFactors)
.mapPartitions({ items =>
items.flatMap { case (_, (ids, factors)) =>
ids.view.zip(factors)
}
// Preserve the partitioning because IDs are consistent with the partitioners in userInBlocks
// and userFactors.
}, preservesPartitioning = true)
.setName("userFactors")
.persist(finalRDDStorageLevel)
val itemIdAndFactors = itemInBlocks
.mapValues(_.srcIds)
.join(itemFactors)
.mapPartitions({ items =>
items.flatMap { case (_, (ids, factors)) =>
ids.view.zip(factors)
}
}, preservesPartitioning = true)
.setName("itemFactors")
.persist(finalRDDStorageLevel)
if (finalRDDStorageLevel != StorageLevel.NONE) {
userIdAndFactors.count()
itemFactors.unpersist()
itemIdAndFactors.count()
userInBlocks.unpersist()
userOutBlocks.unpersist()
itemInBlocks.unpersist()
itemOutBlocks.unpersist()
blockRatings.unpersist()
}
(userIdAndFactors, itemIdAndFactors)
}
/**
* Factor block that stores factors (Array[Float]) in an Array.
*/
private type FactorBlock = Array[Array[Float]]
/**
* A mapping of the columns of the items factor matrix that are needed when calculating each row
* of the users factor matrix, and vice versa.
*
* Specifically, when calculating a user factor vector, since only those columns of the items
* factor matrix that correspond to the items that that user has rated are needed, we can avoid
* having to repeatedly copy the entire items factor matrix to each worker later in the algorithm
* by precomputing these dependencies for all users, storing them in an RDD of `OutBlock`s. The
* items' dependencies on the columns of the users factor matrix is computed similarly.
*
* =Example=
*
* Using the example provided in the `InBlock` Scaladoc, `userOutBlocks` would look like the
* following:
*
* {{{
* userOutBlocks.collect() == Seq(
* 0 -> Array(Array(0, 1), Array(0, 1)),
* 1 -> Array(Array(0), Array(0))
* )
* }}}
*
* Each value in this map-like sequence is of type `Array[Array[Int]]`. The values in the
* inner array are the ranks of the sorted user IDs in that partition; so in the example above,
* `Array(0, 1)` in partition 0 refers to user IDs 0 and 6, since when all unique user IDs in
* partition 0 are sorted, 0 is the first ID and 6 is the second. The position of each inner
* array in its enclosing outer array denotes the partition number to which item IDs map; in the
* example, the first `Array(0, 1)` is in position 0 of its outer array, denoting item IDs that
* map to partition 0.
*
* In summary, the data structure encodes the following information:
*
* * There are ratings with user IDs 0 and 6 (encoded in `Array(0, 1)`, where 0 and 1 are the
* indices of the user IDs 0 and 6 on partition 0) whose item IDs map to partitions 0 and 1
* (represented by the fact that `Array(0, 1)` appears in both the 0th and 1st positions).
*
* * There are ratings with user ID 3 (encoded in `Array(0)`, where 0 is the index of the user
* ID 3 on partition 1) whose item IDs map to partitions 0 and 1 (represented by the fact that
* `Array(0)` appears in both the 0th and 1st positions).
*/
private type OutBlock = Array[Array[Int]]
/**
* In-link block for computing user and item factor matrices.
*
* The ALS algorithm partitions the columns of the users factor matrix evenly among Spark workers.
* Since each column of the factor matrix is calculated using the known ratings of the correspond-
* ing user, and since the ratings don't change across iterations, the ALS algorithm preshuffles
* the ratings to the appropriate partitions, storing them in `InBlock` objects.
*
* The ratings shuffled by item ID are computed similarly and also stored in `InBlock` objects.
* Note that this means every rating is stored twice, once as shuffled by user ID and once by item
* ID. This is a necessary tradeoff, since in general a rating will not be on the same worker
* when partitioned by user as by item.
*
* =Example=
*
* Say we have a small collection of eight items to offer the seven users in our application. We
* have some known ratings given by the users, as seen in the matrix below:
*
* {{{
* Items
* 0 1 2 3 4 5 6 7
* +---+---+---+---+---+---+---+---+
* 0 | |0.1| | |0.4| | |0.7|
* +---+---+---+---+---+---+---+---+
* 1 | | | | | | | | |
* +---+---+---+---+---+---+---+---+
* U 2 | | | | | | | | |
* s +---+---+---+---+---+---+---+---+
* e 3 | |3.1| | |3.4| | |3.7|
* r +---+---+---+---+---+---+---+---+
* s 4 | | | | | | | | |
* +---+---+---+---+---+---+---+---+
* 5 | | | | | | | | |
* +---+---+---+---+---+---+---+---+
* 6 | |6.1| | |6.4| | |6.7|
* +---+---+---+---+---+---+---+---+
* }}}
*
* The ratings are represented as an RDD, passed to the `partitionRatings` method as the `ratings`
* parameter:
*
* {{{
* ratings.collect() == Seq(
* Rating(0, 1, 0.1f),
* Rating(0, 4, 0.4f),
* Rating(0, 7, 0.7f),
* Rating(3, 1, 3.1f),
* Rating(3, 4, 3.4f),
* Rating(3, 7, 3.7f),
* Rating(6, 1, 6.1f),
* Rating(6, 4, 6.4f),
* Rating(6, 7, 6.7f)
* )
* }}}
*
* Say that we are using two partitions to calculate each factor matrix:
*
* {{{
* val userPart = new ALSPartitioner(2)
* val itemPart = new ALSPartitioner(2)
* val blockRatings = partitionRatings(ratings, userPart, itemPart)
* }}}
*
* Ratings are mapped to partitions using the user/item IDs modulo the number of partitions. With
* two partitions, ratings with even-valued user IDs are shuffled to partition 0 while those with
* odd-valued user IDs are shuffled to partition 1:
*
* {{{
* userInBlocks.collect() == Seq(
* 0 -> Seq(
* // Internally, the class stores the ratings in a more optimized format than
* // a sequence of `Rating`s, but for clarity we show it as such here.
* Rating(0, 1, 0.1f),
* Rating(0, 4, 0.4f),
* Rating(0, 7, 0.7f),
* Rating(6, 1, 6.1f),
* Rating(6, 4, 6.4f),
* Rating(6, 7, 6.7f)
* ),
* 1 -> Seq(
* Rating(3, 1, 3.1f),
* Rating(3, 4, 3.4f),
* Rating(3, 7, 3.7f)
* )
* )
* }}}
*
* Similarly, ratings with even-valued item IDs are shuffled to partition 0 while those with
* odd-valued item IDs are shuffled to partition 1:
*
* {{{
* itemInBlocks.collect() == Seq(
* 0 -> Seq(
* Rating(0, 4, 0.4f),
* Rating(3, 4, 3.4f),
* Rating(6, 4, 6.4f)
* ),
* 1 -> Seq(
* Rating(0, 1, 0.1f),
* Rating(0, 7, 0.7f),
* Rating(3, 1, 3.1f),
* Rating(3, 7, 3.7f),
* Rating(6, 1, 6.1f),
* Rating(6, 7, 6.7f)
* )
* )
* }}}
*
* @param srcIds src ids (ordered)
* @param dstPtrs dst pointers. Elements in range [dstPtrs(i), dstPtrs(i+1)) of dst indices and
* ratings are associated with srcIds(i).
* @param dstEncodedIndices encoded dst indices
* @param ratings ratings
* @see [[LocalIndexEncoder]]
*/
private[recommendation] case class InBlock[@specialized(Int, Long) ID: ClassTag](
srcIds: Array[ID],
dstPtrs: Array[Int],
dstEncodedIndices: Array[Int],
ratings: Array[Float]) {
/** Size of the block. */
def size: Int = ratings.length
require(dstEncodedIndices.length == size)
require(dstPtrs.length == srcIds.length + 1)
}
/**
* Initializes factors randomly given the in-link blocks.
*
* @param inBlocks in-link blocks
* @param rank rank
* @return initialized factor blocks
*/
private def initialize[ID](
inBlocks: RDD[(Int, InBlock[ID])],
rank: Int,
seed: Long): RDD[(Int, FactorBlock)] = {
// Choose a unit vector uniformly at random from the unit sphere, but from the
// "first quadrant" where all elements are nonnegative. This can be done by choosing
// elements distributed as Normal(0,1) and taking the absolute value, and then normalizing.
// This appears to create factorizations that have a slightly better reconstruction
// (<1%) compared picking elements uniformly at random in [0,1].
inBlocks.map { case (srcBlockId, inBlock) =>
val random = new XORShiftRandom(byteswap64(seed ^ srcBlockId))
val factors = Array.fill(inBlock.srcIds.length) {
val factor = Array.fill(rank)(random.nextGaussian().toFloat)
val nrm = blas.snrm2(rank, factor, 1)
blas.sscal(rank, 1.0f / nrm, factor, 1)
factor
}
(srcBlockId, factors)
}
}
/**
* A rating block that contains src IDs, dst IDs, and ratings, stored in primitive arrays.
*/
private[recommendation] case class RatingBlock[@specialized(Int, Long) ID: ClassTag](
srcIds: Array[ID],
dstIds: Array[ID],
ratings: Array[Float]) {
/** Size of the block. */
def size: Int = srcIds.length
require(dstIds.length == srcIds.length)
require(ratings.length == srcIds.length)
}
/**
* Builder for [[RatingBlock]]. `mutable.ArrayBuilder` is used to avoid boxing/unboxing.
*/
private[recommendation] class RatingBlockBuilder[@specialized(Int, Long) ID: ClassTag]
extends Serializable {
private val srcIds = mutable.ArrayBuilder.make[ID]
private val dstIds = mutable.ArrayBuilder.make[ID]
private val ratings = mutable.ArrayBuilder.make[Float]
var size = 0
/** Adds a rating. */
def add(r: Rating[ID]): this.type = {
size += 1
srcIds += r.user
dstIds += r.item
ratings += r.rating
this
}
/** Merges another [[RatingBlockBuilder]]. */
def merge(other: RatingBlock[ID]): this.type = {
size += other.srcIds.length
srcIds ++= other.srcIds
dstIds ++= other.dstIds
ratings ++= other.ratings
this
}
/** Builds a [[RatingBlock]]. */
def build(): RatingBlock[ID] = {
RatingBlock[ID](srcIds.result(), dstIds.result(), ratings.result())
}
}
/**
* Groups an RDD of [[Rating]]s by the user partition and item partition to which each `Rating`
* maps according to the given partitioners. The returned pair RDD holds the ratings, encoded in
* a memory-efficient format but otherwise unchanged, keyed by the (user partition ID, item
* partition ID) pair.
*
* Performance note: This is an expensive operation that performs an RDD shuffle.
*
* Implementation note: This implementation produces the same result as the following but
* generates fewer intermediate objects:
*
* {{{
* ratings.map { r =>
* ((srcPart.getPartition(r.user), dstPart.getPartition(r.item)), r)
* }.aggregateByKey(new RatingBlockBuilder)(
* seqOp = (b, r) => b.add(r),
* combOp = (b0, b1) => b0.merge(b1.build()))
* .mapValues(_.build())
* }}}
*
* @param ratings raw ratings
* @param srcPart partitioner for src IDs
* @param dstPart partitioner for dst IDs
* @return an RDD of rating blocks in the form of ((srcBlockId, dstBlockId), ratingBlock)
*/
private def partitionRatings[ID: ClassTag](
ratings: RDD[Rating[ID]],
srcPart: Partitioner,
dstPart: Partitioner): RDD[((Int, Int), RatingBlock[ID])] = {
val numPartitions = srcPart.numPartitions * dstPart.numPartitions
ratings.mapPartitions { iter =>
val builders = Array.fill(numPartitions)(new RatingBlockBuilder[ID])
iter.flatMap { r =>
val srcBlockId = srcPart.getPartition(r.user)
val dstBlockId = dstPart.getPartition(r.item)
val idx = srcBlockId + srcPart.numPartitions * dstBlockId
val builder = builders(idx)
builder.add(r)
if (builder.size >= 2048) { // 2048 * (3 * 4) = 24k
builders(idx) = new RatingBlockBuilder
Iterator.single(((srcBlockId, dstBlockId), builder.build()))
} else {
Iterator.empty
}
} ++ {
builders.view.zipWithIndex.filter(_._1.size > 0).map { case (block, idx) =>
val srcBlockId = idx % srcPart.numPartitions
val dstBlockId = idx / srcPart.numPartitions
((srcBlockId, dstBlockId), block.build())
}
}
}.groupByKey().mapValues { blocks =>
val builder = new RatingBlockBuilder[ID]
blocks.foreach(builder.merge)
builder.build()
}.setName("ratingBlocks")
}
/**
* Builder for uncompressed in-blocks of (srcId, dstEncodedIndex, rating) tuples.
*
* @param encoder encoder for dst indices
*/
private[recommendation] class UncompressedInBlockBuilder[@specialized(Int, Long) ID: ClassTag](
encoder: LocalIndexEncoder)(
implicit ord: Ordering[ID]) {
private val srcIds = mutable.ArrayBuilder.make[ID]
private val dstEncodedIndices = mutable.ArrayBuilder.make[Int]
private val ratings = mutable.ArrayBuilder.make[Float]
/**
* Adds a dst block of (srcId, dstLocalIndex, rating) tuples.
*
* @param dstBlockId dst block ID
* @param srcIds original src IDs
* @param dstLocalIndices dst local indices
* @param ratings ratings
*/
def add(
dstBlockId: Int,
srcIds: Array[ID],
dstLocalIndices: Array[Int],
ratings: Array[Float]): this.type = {
val sz = srcIds.length
require(dstLocalIndices.length == sz)
require(ratings.length == sz)
this.srcIds ++= srcIds
this.ratings ++= ratings
var j = 0
while (j < sz) {
this.dstEncodedIndices += encoder.encode(dstBlockId, dstLocalIndices(j))
j += 1
}
this
}
/** Builds a [[UncompressedInBlock]]. */
def build(): UncompressedInBlock[ID] = {
new UncompressedInBlock(srcIds.result(), dstEncodedIndices.result(), ratings.result())
}
}
/**
* A block of (srcId, dstEncodedIndex, rating) tuples stored in primitive arrays.
*/
private[recommendation] class UncompressedInBlock[@specialized(Int, Long) ID: ClassTag](
val srcIds: Array[ID],
val dstEncodedIndices: Array[Int],
val ratings: Array[Float])(
implicit ord: Ordering[ID]) {
/** Size the of block. */
def length: Int = srcIds.length
/**
* Compresses the block into an `InBlock`. The algorithm is the same as converting a sparse
* matrix from coordinate list (COO) format into compressed sparse column (CSC) format.
* Sorting is done using Spark's built-in Timsort to avoid generating too many objects.
*/
def compress(): InBlock[ID] = {
val sz = length
assert(sz > 0, "Empty in-link block should not exist.")
sort()
val uniqueSrcIdsBuilder = mutable.ArrayBuilder.make[ID]
val dstCountsBuilder = mutable.ArrayBuilder.make[Int]
var preSrcId = srcIds(0)
uniqueSrcIdsBuilder += preSrcId
var curCount = 1
var i = 1
while (i < sz) {
val srcId = srcIds(i)
if (srcId != preSrcId) {
uniqueSrcIdsBuilder += srcId
dstCountsBuilder += curCount
preSrcId = srcId
curCount = 0
}
curCount += 1
i += 1
}
dstCountsBuilder += curCount
val uniqueSrcIds = uniqueSrcIdsBuilder.result()
val numUniqueSrdIds = uniqueSrcIds.length
val dstCounts = dstCountsBuilder.result()
val dstPtrs = new Array[Int](numUniqueSrdIds + 1)
var sum = 0
i = 0
while (i < numUniqueSrdIds) {
sum += dstCounts(i)
i += 1
dstPtrs(i) = sum
}
InBlock(uniqueSrcIds, dstPtrs, dstEncodedIndices, ratings)
}
private def sort(): Unit = {
val sz = length
// Since there might be interleaved log messages, we insert a unique id for easy pairing.
val sortId = Utils.random.nextInt()
logDebug(s"Start sorting an uncompressed in-block of size $sz. (sortId = $sortId)")
val start = System.nanoTime()
val sorter = new Sorter(new UncompressedInBlockSort[ID])
sorter.sort(this, 0, length, Ordering[KeyWrapper[ID]])
val duration = (System.nanoTime() - start) / 1e9
logDebug(s"Sorting took $duration seconds. (sortId = $sortId)")
}
}
/**
* A wrapper that holds a primitive key.
*
* @see [[UncompressedInBlockSort]]
*/
private class KeyWrapper[@specialized(Int, Long) ID: ClassTag](
implicit ord: Ordering[ID]) extends Ordered[KeyWrapper[ID]] {
var key: ID = _
override def compare(that: KeyWrapper[ID]): Int = {
ord.compare(key, that.key)
}
def setKey(key: ID): this.type = {
this.key = key
this
}
}
/**
* [[SortDataFormat]] of [[UncompressedInBlock]] used by [[Sorter]].
*/
private class UncompressedInBlockSort[@specialized(Int, Long) ID: ClassTag](
implicit ord: Ordering[ID])
extends SortDataFormat[KeyWrapper[ID], UncompressedInBlock[ID]] {
override def newKey(): KeyWrapper[ID] = new KeyWrapper()
override def getKey(
data: UncompressedInBlock[ID],
pos: Int,
reuse: KeyWrapper[ID]): KeyWrapper[ID] = {
if (reuse == null) {
new KeyWrapper().setKey(data.srcIds(pos))
} else {
reuse.setKey(data.srcIds(pos))
}
}
override def getKey(
data: UncompressedInBlock[ID],
pos: Int): KeyWrapper[ID] = {
getKey(data, pos, null)
}
private def swapElements[@specialized(Int, Float) T](
data: Array[T],
pos0: Int,
pos1: Int): Unit = {
val tmp = data(pos0)
data(pos0) = data(pos1)
data(pos1) = tmp
}
override def swap(data: UncompressedInBlock[ID], pos0: Int, pos1: Int): Unit = {
swapElements(data.srcIds, pos0, pos1)
swapElements(data.dstEncodedIndices, pos0, pos1)
swapElements(data.ratings, pos0, pos1)
}
override def copyRange(
src: UncompressedInBlock[ID],
srcPos: Int,
dst: UncompressedInBlock[ID],
dstPos: Int,
length: Int): Unit = {
System.arraycopy(src.srcIds, srcPos, dst.srcIds, dstPos, length)
System.arraycopy(src.dstEncodedIndices, srcPos, dst.dstEncodedIndices, dstPos, length)
System.arraycopy(src.ratings, srcPos, dst.ratings, dstPos, length)
}
override def allocate(length: Int): UncompressedInBlock[ID] = {
new UncompressedInBlock(
new Array[ID](length), new Array[Int](length), new Array[Float](length))
}
override def copyElement(
src: UncompressedInBlock[ID],
srcPos: Int,
dst: UncompressedInBlock[ID],
dstPos: Int): Unit = {
dst.srcIds(dstPos) = src.srcIds(srcPos)
dst.dstEncodedIndices(dstPos) = src.dstEncodedIndices(srcPos)
dst.ratings(dstPos) = src.ratings(srcPos)
}
}
/**
* Creates in-blocks and out-blocks from rating blocks.
*
* @param prefix prefix for in/out-block names
* @param ratingBlocks rating blocks
* @param srcPart partitioner for src IDs
* @param dstPart partitioner for dst IDs
* @return (in-blocks, out-blocks)
*/
private def makeBlocks[ID: ClassTag](
prefix: String,
ratingBlocks: RDD[((Int, Int), RatingBlock[ID])],
srcPart: Partitioner,
dstPart: Partitioner,
storageLevel: StorageLevel)(
implicit srcOrd: Ordering[ID]): (RDD[(Int, InBlock[ID])], RDD[(Int, OutBlock)]) = {
val inBlocks = ratingBlocks.map {
case ((srcBlockId, dstBlockId), RatingBlock(srcIds, dstIds, ratings)) =>
// The implementation is a faster version of
// val dstIdToLocalIndex = dstIds.toSet.toSeq.sorted.zipWithIndex.toMap
val start = System.nanoTime()
val dstIdSet = new OpenHashSet[ID](1 << 20)
dstIds.foreach(dstIdSet.add)
val sortedDstIds = new Array[ID](dstIdSet.size)
var i = 0
var pos = dstIdSet.nextPos(0)
while (pos != -1) {
sortedDstIds(i) = dstIdSet.getValue(pos)
pos = dstIdSet.nextPos(pos + 1)
i += 1
}
assert(i == dstIdSet.size)
Sorting.quickSort(sortedDstIds)
val dstIdToLocalIndex = new OpenHashMap[ID, Int](sortedDstIds.length)
i = 0
while (i < sortedDstIds.length) {
dstIdToLocalIndex.update(sortedDstIds(i), i)
i += 1
}
logDebug(
"Converting to local indices took " + (System.nanoTime() - start) / 1e9 + " seconds.")
val dstLocalIndices = dstIds.map(dstIdToLocalIndex.apply)
(srcBlockId, (dstBlockId, srcIds, dstLocalIndices, ratings))
}.groupByKey(new ALSPartitioner(srcPart.numPartitions))
.mapValues { iter =>
val builder =
new UncompressedInBlockBuilder[ID](new LocalIndexEncoder(dstPart.numPartitions))
iter.foreach { case (dstBlockId, srcIds, dstLocalIndices, ratings) =>
builder.add(dstBlockId, srcIds, dstLocalIndices, ratings)
}
builder.build().compress()
}.setName(prefix + "InBlocks")
.persist(storageLevel)
val outBlocks = inBlocks.mapValues { case InBlock(srcIds, dstPtrs, dstEncodedIndices, _) =>
val encoder = new LocalIndexEncoder(dstPart.numPartitions)
val activeIds = Array.fill(dstPart.numPartitions)(mutable.ArrayBuilder.make[Int])
var i = 0
val seen = new Array[Boolean](dstPart.numPartitions)
while (i < srcIds.length) {
var j = dstPtrs(i)
ju.Arrays.fill(seen, false)
while (j < dstPtrs(i + 1)) {
val dstBlockId = encoder.blockId(dstEncodedIndices(j))
if (!seen(dstBlockId)) {
activeIds(dstBlockId) += i // add the local index in this out-block
seen(dstBlockId) = true
}
j += 1
}
i += 1
}
activeIds.map { x =>
x.result()
}
}.setName(prefix + "OutBlocks")
.persist(storageLevel)
(inBlocks, outBlocks)
}
/**
* Compute dst factors by constructing and solving least square problems.
*
* @param srcFactorBlocks src factors
* @param srcOutBlocks src out-blocks
* @param dstInBlocks dst in-blocks
* @param rank rank
* @param regParam regularization constant
* @param srcEncoder encoder for src local indices
* @param implicitPrefs whether to use implicit preference
* @param alpha the alpha constant in the implicit preference formulation
* @param solver solver for least squares problems
* @return dst factors
*/
private def computeFactors[ID](
srcFactorBlocks: RDD[(Int, FactorBlock)],
srcOutBlocks: RDD[(Int, OutBlock)],
dstInBlocks: RDD[(Int, InBlock[ID])],
rank: Int,
regParam: Double,
srcEncoder: LocalIndexEncoder,
implicitPrefs: Boolean = false,
alpha: Double = 1.0,
solver: LeastSquaresNESolver): RDD[(Int, FactorBlock)] = {
val numSrcBlocks = srcFactorBlocks.partitions.length
val YtY = if (implicitPrefs) Some(computeYtY(srcFactorBlocks, rank)) else None
val srcOut = srcOutBlocks.join(srcFactorBlocks).flatMap {
case (srcBlockId, (srcOutBlock, srcFactors)) =>
srcOutBlock.view.zipWithIndex.map { case (activeIndices, dstBlockId) =>
(dstBlockId, (srcBlockId, activeIndices.map(idx => srcFactors(idx))))
}
}
val merged = srcOut.groupByKey(new ALSPartitioner(dstInBlocks.partitions.length))
dstInBlocks.join(merged).mapValues {
case (InBlock(dstIds, srcPtrs, srcEncodedIndices, ratings), srcFactors) =>
val sortedSrcFactors = new Array[FactorBlock](numSrcBlocks)
srcFactors.foreach { case (srcBlockId, factors) =>
sortedSrcFactors(srcBlockId) = factors
}
val dstFactors = new Array[Array[Float]](dstIds.length)
var j = 0
val ls = new NormalEquation(rank)
while (j < dstIds.length) {
ls.reset()
if (implicitPrefs) {
ls.merge(YtY.get)
}
var i = srcPtrs(j)
var numExplicits = 0
while (i < srcPtrs(j + 1)) {
val encoded = srcEncodedIndices(i)
val blockId = srcEncoder.blockId(encoded)
val localIndex = srcEncoder.localIndex(encoded)
val srcFactor = sortedSrcFactors(blockId)(localIndex)
val rating = ratings(i)
if (implicitPrefs) {
// Extension to the original paper to handle rating < 0. confidence is a function
// of |rating| instead so that it is never negative. c1 is confidence - 1.
val c1 = alpha * math.abs(rating)
// For rating <= 0, the corresponding preference is 0. So the second argument of add
// is only there for rating > 0.
if (rating > 0.0) {
numExplicits += 1
}
ls.add(srcFactor, if (rating > 0.0) 1.0 + c1 else 0.0, c1)
} else {
ls.add(srcFactor, rating)
numExplicits += 1
}
i += 1
}
// Weight lambda by the number of explicit ratings based on the ALS-WR paper.
dstFactors(j) = solver.solve(ls, numExplicits * regParam)
j += 1
}
dstFactors
}
}
/**
* Computes the Gramian matrix of user or item factors, which is only used in implicit preference.
* Caching of the input factors is handled in [[ALS#train]].
*/
private def computeYtY(factorBlocks: RDD[(Int, FactorBlock)], rank: Int): NormalEquation = {
factorBlocks.values.aggregate(new NormalEquation(rank))(
seqOp = (ne, factors) => {
factors.foreach(ne.add(_, 0.0))
ne
},
combOp = (ne1, ne2) => ne1.merge(ne2))
}
/**
* Encoder for storing (blockId, localIndex) into a single integer.
*
* We use the leading bits (including the sign bit) to store the block id and the rest to store
* the local index. This is based on the assumption that users/items are approximately evenly
* partitioned. With this assumption, we should be able to encode two billion distinct values.
*
* @param numBlocks number of blocks
*/
private[recommendation] class LocalIndexEncoder(numBlocks: Int) extends Serializable {
require(numBlocks > 0, s"numBlocks must be positive but found $numBlocks.")
private[this] final val numLocalIndexBits =
math.min(java.lang.Integer.numberOfLeadingZeros(numBlocks - 1), 31)
private[this] final val localIndexMask = (1 << numLocalIndexBits) - 1
/** Encodes a (blockId, localIndex) into a single integer. */
def encode(blockId: Int, localIndex: Int): Int = {
require(blockId < numBlocks)
require((localIndex & ~localIndexMask) == 0)
(blockId << numLocalIndexBits) | localIndex
}
/** Gets the block id from an encoded index. */
@inline
def blockId(encoded: Int): Int = {
encoded >>> numLocalIndexBits
}
/** Gets the local index from an encoded index. */
@inline
def localIndex(encoded: Int): Int = {
encoded & localIndexMask
}
}
/**
* Partitioner used by ALS. We require that getPartition is a projection. That is, for any key k,
* we have getPartition(getPartition(k)) = getPartition(k). Since the default HashPartitioner
* satisfies this requirement, we simply use a type alias here.
*/
private[recommendation] type ALSPartitioner = org.apache.spark.HashPartitioner
/**
* Private function to clean up all of the shuffles files from the dependencies and their parents.
*/
private[spark] def cleanShuffleDependencies[T](
sc: SparkContext,
deps: Seq[Dependency[_]],
blocking: Boolean = false): Unit = {
// If there is no reference tracking we skip clean up.
sc.cleaner.foreach { cleaner =>
/**
* Clean the shuffles & all of its parents.
*/
def cleanEagerly(dep: Dependency[_]): Unit = {
if (dep.isInstanceOf[ShuffleDependency[_, _, _]]) {
val shuffleId = dep.asInstanceOf[ShuffleDependency[_, _, _]].shuffleId
cleaner.doCleanupShuffle(shuffleId, blocking)
}
val rdd = dep.rdd
val rddDeps = rdd.dependencies
if (rdd.getStorageLevel == StorageLevel.NONE && rddDeps != null) {
rddDeps.foreach(cleanEagerly)
}
}
deps.foreach(cleanEagerly)
}
}
}
| aokolnychyi/spark | mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala | Scala | apache-2.0 | 65,825 |
package org.duffqiu.rest.common
abstract class RestBody
case object EmptyBody extends RestBody
| duffqiu/rest-test-dsl | src/main/scala/org/duffqiu/rest/common/RestBody.scala | Scala | apache-2.0 | 102 |
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.accumulo.tools.status
import com.beust.jcommander.Parameters
import org.locationtech.geomesa.accumulo.data.AccumuloDataStore
import org.locationtech.geomesa.accumulo.tools.{AccumuloDataStoreCommand, AccumuloDataStoreParams}
import org.locationtech.geomesa.accumulo.tools.status.AccumuloGetTypeNamesCommand.GetTypeNamesParams
import org.locationtech.geomesa.tools.status.GetTypeNamesCommand
class AccumuloGetTypeNamesCommand extends GetTypeNamesCommand[AccumuloDataStore] with AccumuloDataStoreCommand {
override val params = new GetTypeNamesParams()
}
object AccumuloGetTypeNamesCommand {
@Parameters(commandDescription = "List the feature types for a given catalog")
class GetTypeNamesParams extends AccumuloDataStoreParams
}
| elahrvivaz/geomesa | geomesa-accumulo/geomesa-accumulo-tools/src/main/scala/org/locationtech/geomesa/accumulo/tools/status/AccumuloGetTypeNamesCommand.scala | Scala | apache-2.0 | 1,234 |
/*
* SolrResultInfo.scala
*
* Updated: Sep 23, 2014
*
* Copyright (c) 2014, CodeMettle
*/
package com.codemettle.akkasolr.solrtypes
/**
* @author steven
*
*/
@SerialVersionUID(1L)
case class SolrResultInfo(numFound: Long, start: Long, maxScore: Option[Float])
object SolrResultInfo {
def apply(numFound: Long, start: Long, maxScore: java.lang.Float): SolrResultInfo = {
SolrResultInfo(numFound, start, Option(maxScore) map (_.floatValue()))
}
}
| CodeMettle/akka-solr | src/main/scala/com/codemettle/akkasolr/solrtypes/SolrResultInfo.scala | Scala | apache-2.0 | 472 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala.collection
package immutable
import generic._
import mutable.Builder
/** A base trait for iterable collections that are guaranteed immutable.
* $iterableInfo
*
* @define Coll immutable.Iterable
* @define coll immutable iterable collection
*/
trait Iterable[+A] extends Traversable[A]
with scala.collection.Iterable[A]
with GenericTraversableTemplate[A, Iterable]
with IterableLike[A, Iterable[A]] {
override def companion: GenericCompanion[Iterable] = Iterable
}
/** $factoryInfo
* @define Coll immutable.Iterable
* @define coll immutable iterable collection
*/
object Iterable extends TraversableFactory[Iterable] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Iterable[A]] = new GenericCanBuildFrom[A]
def newBuilder[A]: Builder[A, Iterable[A]] = new mutable.ListBuffer
}
| cran/rkafkajars | java/scala/collection/immutable/Iterable.scala | Scala | apache-2.0 | 1,417 |
package net.ground5hark.sbt.concat
import com.typesafe.sbt.web.{PathMapping, SbtWeb}
import sbt.Keys._
import sbt._
import com.typesafe.sbt.web.pipeline.Pipeline
import collection.mutable
import mutable.ListBuffer
import java.io.File
object Import {
val concat = TaskKey[Pipeline.Stage]("web-concat", "Concatenates groups of web assets")
object Concat {
val groups = SettingKey[Seq[ConcatGroup]]("web-concat-groups", "List of ConcatGroup items")
val parentDir = SettingKey[String]("web-concat-parent-dir", "Parent directory name in the target folder to write concatenated files to, default: \\"\\" (no parentDir)")
}
def group(o: AnyRef): Either[Seq[String], PathFinder] = o match {
case o: Seq[_] => Left(o.asInstanceOf[Seq[String]])
case o: PathFinder => Right(o)
case u =>
sys.error(s"Can't create a concat group from $u. Must provide either Seq[String] or a PathFinder for the concat group values")
}
}
object NotHiddenFileFilter extends FileFilter {
override def accept(f: File): Boolean = !HiddenFileFilter.accept(f)
}
object SbtConcat extends AutoPlugin {
override def requires = SbtWeb
override def trigger = AllRequirements
val autoImport = Import
import SbtWeb.autoImport._
import WebKeys._
import autoImport._
import Concat._
override def projectSettings = Seq(
groups := ListBuffer.empty[ConcatGroup],
includeFilter in concat := NotHiddenFileFilter,
parentDir := "",
concat := concatFiles.value
)
private def toFileNames(values: Seq[ConcatGroup],
srcDirs: Seq[File],
webModuleDirs: Seq[File]): Seq[(String, Iterable[String])] = values.map {
case (groupName, fileNames) =>
fileNames match {
case Left(fileNamesSeq) => (groupName, fileNamesSeq)
case Right(fileNamesPathFinder) =>
val r = fileNamesPathFinder.pair(Path.relativeTo(srcDirs ++ webModuleDirs) | Path.flat)
(groupName, r.map(_._2))
case u => sys.error(s"Expected Seq[String] or PathFinder, but got $u")
}
}
private def concatFiles: Def.Initialize[Task[Pipeline.Stage]] = Def.task {
val logValue = streams.value.log
mappings: Seq[PathMapping] =>
val groupsValue = toFileNames(groups.value,
(sourceDirectories in Assets).value,
(webModuleDirectories in Assets).value)
val groupMappings = if (groupsValue.nonEmpty) {
logValue.info(s"Building ${groupsValue.size} concat group(s)")
// Mutable map so we can pop entries we've already seen, in case there are similarly named files
val reverseMapping = ReverseGroupMapping.get(groupsValue, logValue)
val concatGroups = mutable.Map.empty[String, StringBuilder]
val filteredMappings = mappings.filter(m => (includeFilter in concat).value.accept(m._1) && m._1.isFile)
val targetDir = webTarget.value / parentDir.value
groupsValue.foreach {
case (groupName, fileNames) =>
fileNames.foreach { fileName =>
val separator = File.separatorChar
def normalize(path: String) = path.replace('\\\\', separator).replace('/', separator)
val mapping = filteredMappings.filter(entry => normalize(entry._2) == normalize(fileName))
if (mapping.nonEmpty) {
// TODO This is not as memory efficient as it could be, write to file instead
concatGroups.getOrElseUpdate(groupName, new StringBuilder)
.append(s"\\n/** $fileName **/\\n")
.append(IO.read(mapping.head._1))
reverseMapping.remove(fileName)
} else logValue.warn(s"Unable to process $fileName. Not found.")
}
}
concatGroups.map {
case (groupName, concatenatedContents) =>
val outputFile = targetDir / groupName
IO.write(outputFile, concatenatedContents.toString())
outputFile
}.pair(Path.relativeTo(webTarget.value))
} else {
Seq.empty[PathMapping]
}
groupMappings ++ mappings
}
}
private object ReverseGroupMapping {
def get(groups: Seq[(String, Iterable[String])], logger: Logger): mutable.Map[String, String] = {
val ret = mutable.Map.empty[String, String]
groups.foreach {
case (groupName, fileNames) => fileNames.foreach { fileName =>
ret(fileName) = groupName
}
}
ret
}
}
| ground5hark/sbt-concat | src/main/scala/net/ground5hark/sbt/concat/SbtConcat.scala | Scala | mit | 4,443 |
/*
* Copyright (c) 2020. Peerapat Asoktummarungsri <https://www.linkedin.com/in/peerapat>
*/
package yoda.orm
import org.scalatest.funsuite.AnyFunSuite
import yoda.commons.Naming
class NamingTest extends AnyFunSuite
with Naming {
test("testCamelToSnakecase 1") {
val input = "thisIsA1Test"
val expected = "this_is_a_1_test"
val result = camelToSnakecase(input)
assert(result === expected)
}
test("testCamelToSnakecase 2") {
val input = "citizenId"
val expected = "citizen_id"
val result = camelToSnakecase(input)
assert(result === expected)
}
test("testSnakecaseToCamel") {
val input = "this_is_a_1_test"
val expected = "thisIsA1Test"
val result = snakecaseToCamel(input)
assert(result === expected)
}
}
| nuboat/yoda-orm | src/test/scala/yoda/orm/NamingTest.scala | Scala | mit | 778 |
package dk.gp.hgpc
import breeze.linalg.DenseMatrix
import breeze.linalg.DenseVector
import breeze.linalg._
import java.io.File
object getHgpcTestData {
/**
* Returns (x,y,u)
*/
def apply(): (DenseMatrix[Double], DenseVector[Double], DenseMatrix[Double]) = {
val x = csvread(new File("src/test/resources/gpml/classification_x.csv"))
val y = csvread(new File("src/test/resources/gpml/classification_y.csv")).toDenseVector
val x1Idx = (0 until x.rows - 1).filter(idx => idx % 2 == 0)
val x2Idx = (0 until x.rows - 1).filter(idx => idx % 2 == 1)
val x1 = DenseMatrix.horzcat(DenseMatrix.zeros[Double](x1Idx.size, 1) + 1.0, x(x1Idx, ::))
val x2 = DenseMatrix.horzcat(DenseMatrix.zeros[Double](x2Idx.size, 1) + 2.0, x(x2Idx, ::))
val x3 = DenseMatrix.horzcat(DenseMatrix.zeros[Double](1, 1) + 3.0, x(x.rows - 1 to x.rows - 1, ::))
val allX = DenseMatrix.vertcat(x1, x2, x3)
val allY = DenseVector.vertcat(y(x1Idx).toDenseVector, y(x2Idx).toDenseVector, y(19 to 19))
val u = DenseMatrix.horzcat(DenseMatrix.zeros[Double](y.size, 1) - 1.0, x)
(allX, allY, u)
}
} | danielkorzekwa/bayes-scala-gp | src/test/scala/dk/gp/hgpc/getHgpcTestData.scala | Scala | bsd-2-clause | 1,119 |
/*
* StageImpl.scala
* (FScape)
*
* Copyright (c) 2001-2022 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Affero General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* contact@sciss.de
*/
package de.sciss.fscape
package stream
package impl
import akka.stream.stage.GraphStage
import akka.stream.{Attributes, Shape}
abstract class StageImpl[S <: Shape](final protected val name: String)
extends GraphStage[S] {
override def toString = s"$name@${hashCode.toHexString}"
override def initialAttributes: Attributes = Attributes.name(toString)
/** We ensure that we use the more specific implementation class,
* because it registers with the control. */
override def createLogic(attr: Attributes): NodeImpl[S]
}
| Sciss/FScape-next | core/shared/src/main/scala/de/sciss/fscape/stream/impl/StageImpl.scala | Scala | agpl-3.0 | 823 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE
* file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file
* to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package kafka.admin
import java.nio.ByteBuffer
import java.util.Properties
import java.util.concurrent.atomic.AtomicInteger
import kafka.common.KafkaException
import kafka.coordinator.{GroupOverview, GroupSummary, MemberSummary}
import kafka.utils.Logging
import org.apache.kafka.clients._
import org.apache.kafka.clients.consumer.internals.{ConsumerNetworkClient, ConsumerProtocol, RequestFuture, SendFailedException}
import org.apache.kafka.common.config.ConfigDef.{Importance, Type}
import org.apache.kafka.common.config.{AbstractConfig, ConfigDef}
import org.apache.kafka.common.errors.DisconnectException
import org.apache.kafka.common.metrics.Metrics
import org.apache.kafka.common.network.Selector
import org.apache.kafka.common.protocol.types.Struct
import org.apache.kafka.common.protocol.{ApiKeys, Errors}
import org.apache.kafka.common.requests._
import org.apache.kafka.common.utils.{SystemTime, Time, Utils}
import org.apache.kafka.common.{Cluster, Node, TopicPartition}
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
class AdminClient(val time: Time,
val requestTimeoutMs: Int,
val client: ConsumerNetworkClient,
val bootstrapBrokers: List[Node]) extends Logging {
private def send(target: Node,
api: ApiKeys,
request: AbstractRequest): Struct = {
var now = time.milliseconds()
val deadline = now + requestTimeoutMs
var future: RequestFuture[ClientResponse] = null
do {
future = client.send(target, api, request)
client.poll(future)
if (future.succeeded())
return future.value().responseBody()
now = time.milliseconds()
} while (now < deadline && future.exception().isInstanceOf[SendFailedException])
throw future.exception()
}
private def sendAnyNode(api: ApiKeys, request: AbstractRequest): Struct = {
bootstrapBrokers.foreach {
case broker =>
try {
return send(broker, api, request)
} catch {
case e: Exception =>
debug(s"Request ${api} failed against node ${broker}", e)
}
}
throw new RuntimeException(s"Request ${api} failed on brokers ${bootstrapBrokers}")
}
private def findCoordinator(groupId: String): Node = {
val request = new GroupCoordinatorRequest(groupId)
val responseBody = sendAnyNode(ApiKeys.GROUP_COORDINATOR, request)
val response = new GroupCoordinatorResponse(responseBody)
Errors.forCode(response.errorCode()).maybeThrow()
response.node()
}
def listGroups(node: Node): List[GroupOverview] = {
val responseBody = send(node, ApiKeys.LIST_GROUPS, new ListGroupsRequest())
val response = new ListGroupsResponse(responseBody)
Errors.forCode(response.errorCode()).maybeThrow()
response.groups().map(group => GroupOverview(group.groupId(), group.protocolType())).toList
}
private def findAllBrokers(): List[Node] = {
val request = new MetadataRequest(List[String]())
val responseBody = sendAnyNode(ApiKeys.METADATA, request)
val response = new MetadataResponse(responseBody)
if (!response.errors().isEmpty)
debug(s"Metadata request contained errors: ${response.errors()}")
response.cluster().nodes().asScala.toList
}
def listAllGroups(): Map[Node, List[GroupOverview]] = {
findAllBrokers.map {
case broker =>
broker -> {
try {
listGroups(broker)
} catch {
case e: Exception =>
debug(s"Failed to find groups from broker ${broker}", e)
List[GroupOverview]()
}
}
}.toMap
}
def listAllConsumerGroups(): Map[Node, List[GroupOverview]] = {
listAllGroups().mapValues { groups =>
groups.filter(_.protocolType == ConsumerProtocol.PROTOCOL_TYPE)
}
}
def listAllGroupsFlattened(): List[GroupOverview] = {
listAllGroups.values.flatten.toList
}
def listAllConsumerGroupsFlattened(): List[GroupOverview] = {
listAllGroupsFlattened.filter(_.protocolType == ConsumerProtocol.PROTOCOL_TYPE)
}
def describeGroup(groupId: String): GroupSummary = {
val coordinator = findCoordinator(groupId)
val responseBody = send(coordinator, ApiKeys.DESCRIBE_GROUPS, new DescribeGroupsRequest(List(groupId).asJava))
val response = new DescribeGroupsResponse(responseBody)
val metadata = response.groups().get(groupId)
if (metadata == null)
throw new KafkaException(s"Response from broker contained no metadata for group ${groupId}")
Errors.forCode(metadata.errorCode()).maybeThrow()
val members = metadata.members().map { member =>
val metadata = Utils.readBytes(member.memberMetadata())
val assignment = Utils.readBytes(member.memberAssignment())
MemberSummary(member.memberId(), member.clientId(), member.clientHost(), metadata, assignment)
}.toList
GroupSummary(metadata.state(), metadata.protocolType(), metadata.protocol(), members)
}
case class ConsumerSummary(memberId: String,
clientId: String,
clientHost: String,
assignment: List[TopicPartition])
def describeConsumerGroup(groupId: String): List[ConsumerSummary] = {
val group = describeGroup(groupId)
if (group.state == "Dead")
return List.empty[ConsumerSummary]
if (group.protocolType != ConsumerProtocol.PROTOCOL_TYPE)
throw new IllegalArgumentException(s"Group ${groupId} with protocol type '${group.protocolType}' is not a valid consumer group")
if (group.state == "Stable") {
group.members.map { member =>
val assignment = ConsumerProtocol.deserializeAssignment(ByteBuffer.wrap(member.assignment))
new ConsumerSummary(member.memberId, member.clientId, member.clientHost, assignment.partitions().asScala.toList)
}
} else {
List.empty
}
}
def close() {
client.close()
}
}
object AdminClient {
val DefaultConnectionMaxIdleMs = 9 * 60 * 1000
val DefaultRequestTimeoutMs = 5000
val DefaultMaxInFlightRequestsPerConnection = 100
val DefaultReconnectBackoffMs = 50
val DefaultSendBufferBytes = 128 * 1024
val DefaultReceiveBufferBytes = 32 * 1024
val DefaultRetryBackoffMs = 100
val AdminClientIdSequence = new AtomicInteger(1)
val AdminConfigDef = {
val config = new ConfigDef()
.define(
CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG,
Type.LIST,
Importance.HIGH,
CommonClientConfigs.BOOSTRAP_SERVERS_DOC)
.define(
CommonClientConfigs.SECURITY_PROTOCOL_CONFIG,
ConfigDef.Type.STRING,
CommonClientConfigs.DEFAULT_SECURITY_PROTOCOL,
ConfigDef.Importance.MEDIUM,
CommonClientConfigs.SECURITY_PROTOCOL_DOC)
.withClientSslSupport()
.withClientSaslSupport()
config
}
class AdminConfig(originals: Map[_,_]) extends AbstractConfig(AdminConfigDef, originals, false)
def createSimplePlaintext(brokerUrl: String): AdminClient = {
val config = Map(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG -> brokerUrl)
create(new AdminConfig(config))
}
def create(props: Properties): AdminClient = create(props.asScala.toMap)
def create(props: Map[String, _]): AdminClient = create(new AdminConfig(props))
def create(config: AdminConfig): AdminClient = {
val time = new SystemTime
val metrics = new Metrics(time)
val metadata = new Metadata
val channelBuilder = ClientUtils.createChannelBuilder(config.values())
val brokerUrls = config.getList(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG)
val brokerAddresses = ClientUtils.parseAndValidateAddresses(brokerUrls)
val bootstrapCluster = Cluster.bootstrap(brokerAddresses)
metadata.update(bootstrapCluster, 0)
val selector = new Selector(
DefaultConnectionMaxIdleMs,
metrics,
time,
"admin",
Map[String, String](),
channelBuilder)
val networkClient = new NetworkClient(
selector,
metadata,
"admin-" + AdminClientIdSequence.getAndIncrement(),
DefaultMaxInFlightRequestsPerConnection,
DefaultReconnectBackoffMs,
DefaultSendBufferBytes,
DefaultReceiveBufferBytes,
DefaultRequestTimeoutMs,
time)
val highLevelClient = new ConsumerNetworkClient(
networkClient,
metadata,
time,
DefaultRetryBackoffMs)
new AdminClient(
time,
DefaultRequestTimeoutMs,
highLevelClient,
bootstrapCluster.nodes().asScala.toList)
}
}
| racker/kafka | core/src/main/scala/kafka/admin/AdminClient.scala | Scala | apache-2.0 | 9,360 |
/*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.flaminem.flamy.graph.export
import org.scalatest.FreeSpec
/**
* Created by fpin on 12/5/16.
*/
class AlignmentTest extends FreeSpec {
"toString must be correct" in {
assert(Alignment.CENTER.toString == "CENTER")
assert(Alignment.LEFT.toString == "LEFT")
assert(Alignment.RIGHT.toString == "RIGHT")
}
}
| flaminem/flamy | src/test/scala/com/flaminem/flamy/graph/export/AlignmentTest.scala | Scala | apache-2.0 | 908 |
package nl.rabobank.oss.rules.dsl.core
import nl.rabobank.oss.rules.dsl.core.TableSelectorGlossary._
import nl.rabobank.oss.rules.dsl.nl.grammar.Table
import nl.rabobank.oss.rules.utils.InternalBerekeningenTester
class TableSelectorTest extends InternalBerekeningenTester(new TableSelectorBerekening) {
val simpleTable = new Table[String, Int, Int] {
override def get(x: Int, y: Int): String = "Hello World"
}
test("eenvoudige Table Test") gegeven (
IndexX is 1,
IndexY is 1,
TableFact is simpleTable
) verwacht (
ResultString is "Hello World"
)
}
| scala-rules/scala-rules | engine/src/test/scala/nl/rabobank/oss/rules/dsl/core/TableSelectorTest.scala | Scala | mit | 583 |
// scalac: -Xfatal-warnings
//
sealed trait Foo
sealed trait Bar extends Foo
sealed trait W[T >: Bar <: Foo]
case class X() extends W[Foo]
case class XX() extends W[Bar]
case class Y() extends W[Bar]
case class Z[T >: Bar <: Foo](
z1: W[T]
) extends W[T]
object Main {
// should warn for not including XX()
def f1(w: W[Bar]): Int = {
w match {
// case XX() => 2
case Y() => 1
case Z(z) => f1(z)
}
}
}
| scala/scala | test/files/neg/t3683a.scala | Scala | apache-2.0 | 436 |
/*
* Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s.internal.parboiled2
class ErrorReportingSpec extends TestParserSpec {
"The Parser should properly report errors" >> {
"example 1" in new TestParser0 {
import CharPredicate.UpperAlpha
val hex = CharPredicate.UpperHexLetter
def targetRule = rule {
'a' ~ oneOrMore('b') ~ anyOf("cde") ~ ("fgh" | CharPredicate.Digit | hex | UpperAlpha) ~ noneOf("def") ~ EOI
}
"" must beMismatchedWithErrorMsg(
"""Unexpected end of input, expected targetRule (line 1, column 1):
|
|^
|
|1 rule mismatched at error location:
| /targetRule/ 'a'
|""")
"ax" must beMismatchedWithErrorMsg(
"""Invalid input 'x', expected 'b' (line 1, column 2):
|ax
| ^
|
|1 rule mismatched at error location:
| /targetRule/ + / 'b'
|""")
"abx" must beMismatchedWithErrorMsg(
"""Invalid input 'x', expected 'b' or [cde] (line 1, column 3):
|abx
| ^
|
|2 rules mismatched at error location:
| /targetRule/ +:-1 / 'b'
| /targetRule/ [cde]
|""")
"abcx" must beMismatchedWithErrorMsg(
"""Invalid input 'x', expected 'f', Digit, hex or UpperAlpha (line 1, column 4):
|abcx
| ^
|
|4 rules mismatched at error location:
| /targetRule/ | / "fgh" / 'f'
| /targetRule/ | / Digit:<CharPredicate>
| /targetRule/ | / hex:<CharPredicate>
| /targetRule/ | / UpperAlpha:<CharPredicate>
|""")
"abcfghe" must beMismatchedWithErrorMsg(
"""Invalid input 'e', expected [^def] (line 1, column 7):
|abcfghe
| ^
|
|1 rule mismatched at error location:
| /targetRule/ [^def]
|""")
}
"for rules with negative syntactic predicates" in new TestParser0 {
def targetRule = rule { (!"a" ~ ANY | 'z') ~ !foo ~ EOI }
def foo = rule { "bcd" }
"a" must beMismatchedWithErrorMsg(
"""Invalid input 'a', expected !"a" or 'z' (line 1, column 1):
|a
|^
|
|2 rules mismatched at error location:
| /targetRule/ | / !"a"
| /targetRule/ | / 'z'
|""")
"xbcd" must beMismatchedWithErrorMsg(
"""Invalid input "bcd", expected !foo (line 1, column 2):
|xbcd
| ^
|
|1 rule mismatched at error location:
| /targetRule/ !foo
|""")
}
"for rules with backtick identifiers" in new TestParser0 {
val `this*that` = CharPredicate.Alpha
def targetRule = rule { `foo-bar` ~ `this*that` ~ `#hash#` ~ EOI }
def `foo-bar` = 'x'
def `#hash#` = rule { '#' }
"a" must beMismatchedWithErrorMsg(
"""Invalid input 'a', expected targetRule (line 1, column 1):
|a
|^
|
|1 rule mismatched at error location:
| /targetRule/ 'x'
|""")
"x" must beMismatchedWithErrorMsg(
"""Unexpected end of input, expected this*that (line 1, column 2):
|x
| ^
|
|1 rule mismatched at error location:
| /targetRule/ this*that:<CharPredicate>
|""")
"xyz" must beMismatchedWithErrorMsg(
"""Invalid input 'z', expected #hash# (line 1, column 3):
|xyz
| ^
|
|1 rule mismatched at error location:
| /targetRule/ /#hash#/ '#'
|""")
}
"if the error location is the newline at line-end" in new TestParser0 {
def targetRule = rule { "abc" ~ EOI }
"ab\\nc" must beMismatchedWithErrorMsg(
"""Invalid input '\\n', expected 'c' (line 1, column 3):
|ab
| ^
|
|1 rule mismatched at error location:
| /targetRule/ "abc":-2 / 'c'
|""")
}
"for rules with an explicitly attached name" in new TestParser0 {
def targetRule = namedRule("foo") { "abc".named("prefix") ~ ("def" | "xyz").named("suffix") ~ EOI }
"abx" must beMismatchedWithErrorMsg(
"""Invalid input 'x', expected 'c' (line 1, column 3):
|abx
| ^
|
|1 rule mismatched at error location:
| /foo/ prefix:"abc":-2 / 'c'
|""")
"abc-" must beMismatchedWithErrorMsg(
"""Invalid input '-', expected 'd' or 'x' (line 1, column 4):
|abc-
| ^
|
|2 rules mismatched at error location:
| /foo/ suffix:| / "def" / 'd'
| /foo/ suffix:| / "xyz" / 'x'
|""")
}
"for rules containing `fail`" in new TestParser0 {
def targetRule = rule { "foo" | fail("something cool") }
"x" must beMismatchedWithErrorMsg(
"""Invalid input 'x', expected something cool (line 1, column 1):
|x
|^
|
|1 rule mismatched at error location:
| something cool
|""")
"foo" must beMatched
}
"respecting the `errorTraceCollectionLimit`" in new TestParser0 {
def targetRule = rule { "a" | 'b' | "c" | "d" | "e" | "f" }
override def errorTraceCollectionLimit = 3
"x" must beMismatchedWithErrorMsg(
"""Invalid input 'x', expected 'a', 'b' or 'c' (line 1, column 1):
|x
|^
|
|3 rules mismatched at error location:
| /targetRule/ | / "a" / 'a'
| /targetRule/ | / 'b'
| /targetRule/ | / "c" / 'c'
|""")
}
"respecting `atomic` markers (example 1)" in new TestParser0 {
def targetRule = rule { ch('-').* ~ (atomic("foo") | atomic("bar") | atomic("baz")) }
"---fox" must beMismatchedWithErrorMsg(
"""Invalid input "fox", expected '-', "foo", "bar" or "baz" (line 1, column 4):
|---fox
| ^
|
|4 rules mismatched at error location:
| /targetRule/ *:-3 / '-'
| /targetRule/ | / atomic / "foo":-2 / 'o'
| /targetRule/ | / atomic / "bar" / 'b'
| /targetRule/ | / atomic / "baz" / 'b'
|""")
}
"respecting `atomic` markers (example 2)" in new TestParser0 {
def targetRule = rule { atomic(ch('a') | 'b') }
"c" must beMismatchedWithErrorMsg(
"""Invalid input 'c', expected targetRule (line 1, column 1):
|c
|^
|
|2 rules mismatched at error location:
| /targetRule/ atomic / | / 'a'
| /targetRule/ atomic / | / 'b'
|""")
}
"respecting `quiet` markers" in new TestParser0 {
def targetRule = rule { "abc" ~ (quiet("dxy") | "def") }
// quiet rule mismatch must be suppressed
"abcd-" must beMismatchedWithErrorMsg(
"""Invalid input '-', expected 'e' (line 1, column 5):
|abcd-
| ^
|
|1 rule mismatched at error location:
| /targetRule/ |:-1 / "def":-1 / 'e'
|""")
// since the error location is only reached by a quiet rule we need to report it
"abcdx" must beMismatchedWithErrorMsg(
"""Unexpected end of input, expected 'y' (line 1, column 6):
|abcdx
| ^
|
|1 rule mismatched at error location:
| /targetRule/ |:-2 / quiet:-2 / "dxy":-2 / 'y'
|""")
}
"expanding tabs as configured" in new TestParser0 {
def targetRule = rule { ch('\\t').* ~ (atomic("foo") | atomic("bar") | atomic("baz")) }
override def errorFormatter = new ErrorFormatter(expandTabs = 4, showTraces = true)
"\\t\\t\\tfox\\t\\tbar" must beMismatchedWithErrorMsg(
"""Invalid input "fox", expected '\\t', "foo", "bar" or "baz" (line 1, column 4):
| fox bar
| ^
|
|4 rules mismatched at error location:
| /targetRule/ *:-3 / '\\t'
| /targetRule/ | / atomic / "foo":-2 / 'o'
| /targetRule/ | / atomic / "bar" / 'b'
| /targetRule/ | / atomic / "baz" / 'b'
|""")
}
}
}
| ZizhengTai/http4s | parboiled2/src/test/scala/org/http4s/internal/parboiled2/ErrorReportingSpec.scala | Scala | apache-2.0 | 8,890 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.test
import play.api._
import play.api.inject.guice.GuiceApplicationBuilder
import play.core.server._
import scala.util.control.NonFatal
/**
* A test web server.
*
* @param config The server configuration.
* @param application The Application to load in this server.
* @param serverProvider The type of server to use. If not provided, uses Play's default provider.
*/
case class TestServer(
config: ServerConfig,
application: Application,
serverProvider: Option[ServerProvider]) {
private var testServerProcess: TestServerProcess = _
/**
* Starts this server.
*/
def start() {
if (testServerProcess != null) {
sys.error("Server already started!")
}
try {
testServerProcess = TestServer.start(serverProvider, config, application)
} catch {
case NonFatal(t) =>
t.printStackTrace
throw new RuntimeException(t)
}
}
/**
* Stops this server.
*/
def stop() {
if (testServerProcess != null) {
val shuttingDownProcess = testServerProcess
testServerProcess = null
shuttingDownProcess.shutdown()
}
}
/**
* The port that the server is running on.
*/
def port: Int = config.port.getOrElse(throw new IllegalStateException("No HTTP port defined"))
}
object TestServer {
/**
* A test web server.
*
* @param port HTTP port to bind on.
* @param application The Application to load in this server.
* @param sslPort HTTPS port to bind on.
* @param serverProvider The type of server to use. If not provided, uses Play's default provider.
*/
def apply(
port: Int,
application: Application = GuiceApplicationBuilder().build(),
sslPort: Option[Int] = None,
serverProvider: Option[ServerProvider] = None) = new TestServer(
ServerConfig(port = Some(port), sslPort = sslPort, mode = Mode.Test,
rootDir = application.path), application, serverProvider
)
/**
* Start a TestServer with the given config and application. To stop it,
* call `shutdown` on the returned TestServerProcess.
*/
private[play] def start(
testServerProvider: Option[ServerProvider],
config: ServerConfig,
application: Application): TestServerProcess = {
val process = new TestServerProcess
val serverProvider: ServerProvider = {
testServerProvider
} getOrElse {
ServerProvider.fromConfiguration(process.classLoader, config.configuration)
}
Play.start(application)
val server = serverProvider.createServer(config, application)
process.addShutdownHook { server.stop() }
process
}
}
/**
* A mock system process for a TestServer to run within. A ServerProcess
* can mock command line arguments, System properties, a ClassLoader,
* System.exit calls and shutdown hooks.
*
* When the process is finished, call `shutdown()` to run all registered
* shutdown hooks.
*/
private[play] class TestServerProcess extends ServerProcess {
private var hooks = Seq.empty[() => Unit]
override def addShutdownHook(hook: => Unit) = {
hooks = hooks :+ (() => hook)
}
def shutdown(): Unit = {
for (h <- hooks) h.apply()
}
override def classLoader = getClass.getClassLoader
override def args = Seq()
override def properties = System.getProperties
override def pid = None
override def exit(message: String, cause: Option[Throwable] = None, returnCode: Int = -1): Nothing = {
throw new TestServerExitException(message, cause, returnCode)
}
}
private[play] case class TestServerExitException(
message: String,
cause: Option[Throwable] = None,
returnCode: Int = -1) extends Exception(s"Exit with $message, $cause, $returnCode", cause.orNull)
| ktoso/playframework | framework/src/play-test/src/main/scala/play/api/test/TestServer.scala | Scala | apache-2.0 | 3,773 |
package org.automanlang.core.question
import org.automanlang.core.AutomanAdapter
import org.automanlang.core.answer._
import org.automanlang.core.info.QuestionType
import org.automanlang.core.policy.aggregation.BootstrapEstimationPolicy
import org.automanlang.core.policy.price.MLEPricePolicy
import org.automanlang.core.policy.timeout.DoublingTimeoutPolicy
import org.automanlang.core.question.confidence._
import scala.concurrent.ExecutionContext.Implicits.global
abstract class EstimationQuestion extends Question {
type A = Double
type AA = AbstractEstimate
type O = EstimationOutcome
type AP = BootstrapEstimationPolicy
type PP = MLEPricePolicy
type TP = DoublingTimeoutPolicy
protected var _confidence: Double = 0.95
protected var _confidence_interval: ConfidenceInterval = UnconstrainedCI()
protected var _default_sample_size: Int = 12
protected var _estimator: Seq[Double] => Double = {
// by default, use the mean
ds => ds.sum / ds.length
}
protected var _min_value: Option[Double] = None
protected var _max_value: Option[Double] = None
def confidence_=(c: Double) { _confidence = c }
def confidence: Double = _confidence
def confidence_interval_=(ci: ConfidenceInterval) { _confidence_interval = ci }
def confidence_interval: ConfidenceInterval = _confidence_interval
def default_sample_size: Int = _default_sample_size
def default_sample_size_=(n: Int) { _default_sample_size = n }
def estimator: Seq[Double] => Double = _estimator
def estimator_=(fn: Seq[Double] => Double) { _estimator = fn }
def max_value: Double = _max_value match {
case Some(v) => v
case None => Double.PositiveInfinity
}
def max_value_=(max: Double) { _max_value = Some(max) }
def min_value: Double = _min_value match {
case Some(v) => v
case None => Double.NegativeInfinity
}
def min_value_=(min: Double) { _min_value = Some(min) }
protected[automanlang] def getQuestionType = QuestionType.EstimationQuestion
protected[automanlang] def getOutcome(adapter: AutomanAdapter) : O = {
EstimationOutcome(this, schedulerFuture(adapter))
}
protected[automanlang] def composeOutcome(o: O, adapter: AutomanAdapter) : O = {
// unwrap future from previous Outcome
val f = o.f map {
case Estimate(value, low, high, cost, conf, id, dist) =>
if (this.confidence <= conf) {
Estimate(
value,
low,
high,
BigDecimal(0.00).setScale(2, math.BigDecimal.RoundingMode.FLOOR),
conf,
id,
dist
)
} else {
startScheduler(adapter)
}
case _ => startScheduler(adapter)
}
EstimationOutcome(this, f)
}
// private methods
override private[automanlang] def init_validation_policy(): Unit = {
_validation_policy_instance = _validation_policy match {
case None => new AP(this)
case Some(policy) => policy.getConstructor(classOf[Question]).newInstance(this)
}
}
override private[automanlang] def init_price_policy(): Unit = {
_price_policy_instance = _price_policy match {
case None => new PP(this)
case Some(policy) => policy.getConstructor(classOf[Question]).newInstance(this)
}
}
override private[automanlang] def init_timeout_policy(): Unit = {
_timeout_policy_instance = _timeout_policy match {
case None => new TP(this)
case Some(policy) => policy.getConstructor(classOf[Question]).newInstance(this)
}
}
override protected[automanlang] def prettyPrintAnswer(answer: Double): String = {
answer.toString
}
protected[automanlang] def cloneWithConfidence(conf: Double) : EstimationQuestion
}
| dbarowy/AutoMan | libautoman/src/main/scala/org/automanlang/core/question/EstimationQuestion.scala | Scala | gpl-2.0 | 3,692 |
package org.opencoin.issuer
import scala.slick.driver.H2Driver.simple._
import Database.threadLocalSession
import java.util.Date
import java.net.URL
import org.opencoin.core.token.CDD
import org.opencoin.core.token.CDDCore
import org.opencoin.core.token.PublicRSAKey
import org.opencoin.issuer.TypeMappers._
object CDDTable extends Table[CDD]("CDD") {
//TODO Currently all CDDs are "latest". Define a method to add latest CDD and make existing CDDs not-latest.
//See https://groups.google.com/forum/?fromgroups=#!topic/scalaquery/qtsXlD_pytE
//This might help: http://stackoverflow.com/questions/13906684/scala-slick-method-i-can-not-understand-so-far
def latest = column[Boolean]("latest", O NotNull)
def protocol_version = column[URL]("protocol_version", O NotNull)
def cdd_location = column[URL]("cdd_location", O NotNull)
def issuer_cipher_suite = column[String]("issuer_cipher_suite", O NotNull)
def issuer_key_modulus = column[BigInt]("issuer_key_modulus", O NotNull)
def issuer_key_public_exponent = column[BigInt]("issuer_key_public_exponent", O NotNull)
def cdd_serial = column[Int]("cdd_serial", O PrimaryKey)
def cdd_signing_date = column[Date]("cdd_signing_date", O NotNull)
def cdd_expiry_date = column[Date]("cdd_expiry_date", O NotNull)
def currency_name = column[String]("currency_name", O NotNull)
def currency_divisor = column[Int]("currency_divisor", O NotNull)
def info_service_prio = column[List[Int]]("info_service_prio", O NotNull)
def info_service_url = column[List[URL]]("info_service_url", O NotNull)
def validation_service_prio = column[List[Int]]("validation_service_prio", O NotNull)
def validation_service_url = column[List[URL]]("validation_service_url", O NotNull)
def renewal_service_prio = column[List[Int]]("renewal_service_prio", O NotNull)
def renewal_service_url = column[List[URL]]("renewal_service_url", O NotNull)
def invalidation_service_prio = column[List[Int]]("invalidation_service_prio", O NotNull)
def invalidation_service_url = column[List[URL]]("invalidation_service_url", O NotNull)
def denominations = column[List[Int]]("denominations", O NotNull)
def additional_info = column[String]("additional_info")
def signature = column[BigInt]("signature", O NotNull)
//def * = latest ~ protocol_version ~ cdd_location ~ issuer_cipher_suite ~ issuer_key_modulus ~ issuer_key_public_exponent ~ cdd_serial ~ cdd_signing_date ~ cdd_expiry_date ~ currency_name ~ currency_divisor ~ info_service_prio ~ info_service_url ~ validation_service_prio ~ validation_service_url ~ renewal_service_prio ~ renewal_service_url ~ invalidation_service_prio ~ invalidation_service_url ~ denominations ~ additional_info ~ signature <> (FlatCDD, FlatCDD.unapply _)
//With latest:
/*
def * = latest ~ protocol_version ~ cdd_location ~ issuer_cipher_suite ~ issuer_key_modulus ~ issuer_key_public_exponent ~ cdd_serial ~ cdd_signing_date ~ cdd_expiry_date ~ currency_name ~ currency_divisor ~ info_service_prio ~ info_service_url ~ validation_service_prio ~ validation_service_url ~ renewal_service_prio ~ renewal_service_url ~ invalidation_service_prio ~ invalidation_service_url ~ denominations ~ additional_info ~ signature <>
//apply:
((latest,
protocol_version,
cdd_location,
issuer_cipher_suite,
issuer_key_modulus,
issuer_key_public_exponent,
cdd_serial,
cdd_signing_date,
cdd_expiry_date,
currency_name,
currency_divisor,
info_service_prio,
info_service_url,
validation_service_prio,
validation_service_url,
renewal_service_prio,
renewal_service_url,
invalidation_service_prio,
invalidation_service_url,
denominations,
additional_info,
signature
)=>
(latest,
CDD("cdd certificate",
CDDCore(
"cdd",
protocol_version,
cdd_location,
issuer_cipher_suite,
PublicRSAKey(
issuer_key_modulus,
issuer_key_public_exponent),
cdd_serial,
cdd_signing_date,
cdd_expiry_date,
currency_name,
currency_divisor,
info_service_prio.zip(info_service_url),
validation_service_prio.zip(validation_service_url),
renewal_service_prio.zip(renewal_service_url),
invalidation_service_prio.zip(invalidation_service_url),
denominations,
additional_info),
signature)),
//unapply:
(f:(Boolean, CDD)) => Some((
f._1,
f._2.cdd.protocol_version,
f._2.cdd.cdd_location,
f._2.cdd.issuer_cipher_suite,
f._2.cdd.issuer_public_master_key.modulus,
f._2.cdd.issuer_public_master_key.public_exponent,
f._2.cdd.cdd_serial,
f._2.cdd.cdd_signing_date,
f._2.cdd.cdd_expiry_date,
f._2.cdd.currency_name,
f._2.cdd.currency_divisor,
f._2.cdd.info_service.map(x => x._1),
f._2.cdd.info_service.map(x => x._2),
f._2.cdd.validation_service.map(x => x._1),
f._2.cdd.validation_service.map(x => x._2),
f._2.cdd.renewal_service.map(x => x._1),
f._2.cdd.renewal_service.map(x => x._2),
f._2.cdd.invalidation_service.map(x => x._1),
f._2.cdd.invalidation_service.map(x => x._2),
f._2.cdd.denominations,
f._2.cdd.additional_info,
f._2.signature))
) */
//Without latest:
/* def * =
protocol_version ~
cdd_location ~
issuer_cipher_suite ~
issuer_key_modulus ~
issuer_key_public_exponent ~
cdd_serial ~ cdd_signing_date ~
cdd_expiry_date ~ currency_name ~
currency_divisor ~
info_service_prio ~
info_service_url ~
validation_service_prio ~
validation_service_url ~
renewal_service_prio ~
renewal_service_url ~
invalidation_service_prio ~
invalidation_service_url ~
denominations ~
additional_info ~
signature <> (
//From a row to a CDD object (previously "apply"):
(protocol_version,
cdd_location,
issuer_cipher_suite,
issuer_key_modulus,
issuer_key_public_exponent,
cdd_serial,
cdd_signing_date,
cdd_expiry_date,
currency_name,
currency_divisor,
info_service_prio,
info_service_url,
validation_service_prio,
validation_service_url,
renewal_service_prio,
renewal_service_url,
invalidation_service_prio,
invalidation_service_url,
denominations,
additional_info,
signature)
=> CDD("cdd certificate",
CDDCore(
"cdd",
protocol_version,
cdd_location,
issuer_cipher_suite,
PublicRSAKey(
issuer_key_modulus,
issuer_key_public_exponent),
cdd_serial,
cdd_signing_date,
cdd_expiry_date,
currency_name,
currency_divisor,
info_service_prio.zip(info_service_url),
validation_service_prio.zip(validation_service_url),
renewal_service_prio.zip(renewal_service_url),
invalidation_service_prio.zip(invalidation_service_url),
denominations,
additional_info),
signature),
//From a CDD object to a row (previously "unapply"):
(x:CDD) => Some(
x.cdd.protocol_version,
x.cdd.cdd_location,
x.cdd.issuer_cipher_suite,
x.cdd.issuer_public_master_key.modulus,
x.cdd.issuer_public_master_key.public_exponent,
x.cdd.cdd_serial,
x.cdd.cdd_signing_date,
x.cdd.cdd_expiry_date,
x.cdd.currency_name,
x.cdd.currency_divisor,
x.cdd.info_service.map(x => x._1),
x.cdd.info_service.map(x => x._2),
x.cdd.validation_service.map(x => x._1),
x.cdd.validation_service.map(x => x._2),
x.cdd.renewal_service.map(x => x._1),
x.cdd.renewal_service.map(x => x._2),
x.cdd.invalidation_service.map(x => x._1),
x.cdd.invalidation_service.map(x => x._2),
x.cdd.denominations,
x.cdd.additional_info,
x.signature)
)
*/
def * =
protocol_version ~
cdd_location ~
issuer_cipher_suite ~
issuer_key_modulus ~
issuer_key_public_exponent ~
cdd_serial ~ cdd_signing_date ~
cdd_expiry_date ~ currency_name ~
currency_divisor ~
info_service_prio ~
info_service_url ~
validation_service_prio ~
validation_service_url ~
renewal_service_prio ~
renewal_service_url ~
invalidation_service_prio ~
invalidation_service_url ~
denominations ~
additional_info ~
signature <> (CDD.fromRow _, CDD.toRow _)
//See https://groups.google.com/forum/?fromgroups=#!topic/scalaquery/x5ZmHrOaDKo
// def insert = `type` ~ protocol_version ~ cdd_location ~ issuer_public_master_key ~ issuer_cipher_suite ~ cdd_serial ~ cdd_signing_date ~ cdd_expiry_date ~ currency_name ~ currency_divisor ~ info_service ~ validation_service ~ renewal_service ~ invalidation_service ~ denominations ~ additional_info <> (CDD, CDD.unapply _)
//def forInsert = first ~ last <> ({ (f, l) => User(0, f, l) }, { u:
//User => Some((u.first, u.last)) })
/* def getCdd(db: Database, serial: Int): Option[CDD] = db withSession { //s: Session =>
(for { b <- CDDTable if b.cdd_serial === serial} yield b).first.getCDD match {
case x: CDD => Some(x)
case _ => None
}
}
*/
def getCdd(db: Database, serial: Int): Option[CDD] = db withSession { //s: Session =>
(for { b <- CDDTable if b.cdd_serial === serial} yield b).firstOption
}
//TODO This is a dummy method which returns the first record it retrieves.
// Instead the latest CDD should be returned.
def getLatestCdd(db: Database): CDD = db withSession { //s: Session =>
(for { b <- CDDTable } yield b).first
//(for { b <- CDDTable if b.latest === true} yield b).first
}
}
| OpenCoin/opencoin-issuer-scala | src/main/scala/org/opencoin/issuer/CDDTable.scala | Scala | gpl-3.0 | 10,654 |
package sttp.client3
import org.scalajs.dom.experimental.{
AbortController,
BodyInit,
Fetch,
HttpMethod,
RequestCredentials,
RequestInit,
RequestMode,
RequestRedirect,
ResponseInit,
ResponseType,
Headers => JSHeaders,
Request => FetchRequest,
Response => FetchResponse
}
import org.scalajs.dom.raw._
import org.scalajs.dom.{FormData, WebSocket => JSWebSocket}
import sttp.capabilities.{Effect, Streams, WebSockets}
import sttp.client3.SttpClientException.ReadException
import sttp.client3.WebSocketImpl.BinaryType
import sttp.client3.dom.experimental.{FilePropertyBag, File => DomFile}
import sttp.client3.internal.ws.WebSocketEvent
import sttp.client3.internal.{SttpFile, _}
import sttp.client3.ws.{GotAWebSocketException, NotAWebSocketException}
import sttp.model._
import sttp.monad.MonadError
import sttp.monad.syntax._
import sttp.ws.{WebSocket, WebSocketFrame}
import java.nio.ByteBuffer
import scala.collection.immutable.Seq
import scala.concurrent.Promise
import scala.concurrent.duration.FiniteDuration
import scala.scalajs.js
import scala.scalajs.js.JSConverters._
import scala.scalajs.js.timers._
import scala.scalajs.js.typedarray._
object FetchOptions {
val Default = FetchOptions(
credentials = None,
mode = None
)
}
final case class FetchOptions(
credentials: Option[RequestCredentials],
mode: Option[RequestMode]
)
/** A backend that uses the `fetch` JavaScript api.
*
* @see
* https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API
*/
abstract class AbstractFetchBackend[F[_], S <: Streams[S], P](
options: FetchOptions,
customizeRequest: FetchRequest => FetchRequest,
monad: MonadError[F]
) extends SttpBackend[F, P] {
override implicit def responseMonad: MonadError[F] = monad
val streams: Streams[S]
type PE = P with Effect[F] with WebSockets
override def send[T, R >: PE](request: Request[T, R]): F[Response[T]] =
if (request.isWebSocket) sendWebSocket(request) else sendRegular(request)
private def sendRegular[T, R >: PE](request: Request[T, R]): F[Response[T]] = {
// https://stackoverflow.com/q/31061838/4094860
val readTimeout = request.options.readTimeout
val (signal, cancelTimeout) = readTimeout match {
case timeout: FiniteDuration =>
val controller = new AbortController()
val signal = controller.signal
val timeoutHandle = setTimeout(timeout) {
controller.abort()
}
(Some(signal), () => clearTimeout(timeoutHandle))
case _ =>
(None, () => ())
}
val rheaders = new JSHeaders()
request.headers.foreach { header =>
// for multipart/form-data requests dom.FormData is responsible for setting the Content-Type header
// as it will also compute and set the boundary for the different parts, so we have to leave it out here
if (header.is(HeaderNames.ContentType) && header.value.toLowerCase.startsWith("multipart/")) {
if (!header.value.toLowerCase.startsWith(MediaType.MultipartFormData.toString))
throw new IllegalArgumentException("Multipart bodies other than multipart/form-data are not supported")
} else {
rheaders.set(header.name, header.value)
}
}
val req = createBody(request.body).map { rbody =>
// use manual so we can return a specific error instead of the generic "TypeError: Failed to fetch"
val rredirect = if (request.options.followRedirects) RequestRedirect.follow else RequestRedirect.manual
val rsignal = signal.orUndefined
val requestInitStatic = new RequestInit() {
this.method = request.method.method.asInstanceOf[HttpMethod]
this.headers = rheaders
this.body = rbody
this.referrer = js.undefined
this.referrerPolicy = js.undefined
this.mode = options.mode.orUndefined
this.credentials = options.credentials.orUndefined
this.cache = js.undefined
this.redirect = rredirect
this.integrity = js.undefined
this.keepalive = js.undefined
this.signal = rsignal
this.window = js.undefined
}
val requestInitDynamic = requestInitStatic.asInstanceOf[js.Dynamic]
signal.foreach(s => requestInitDynamic.updateDynamic("signal")(s))
requestInitDynamic.updateDynamic("redirect")(rredirect) // named wrong in RequestInit
val requestInit = requestInitDynamic.asInstanceOf[RequestInit]
new FetchRequest(request.uri.toString, requestInit)
}
val result = req
.flatMap { r => convertFromFuture(Fetch.fetch(customizeRequest(r)).toFuture) }
.flatMap { resp =>
if (resp.`type` == ResponseType.opaqueredirect) {
responseMonad.error[FetchResponse](new RuntimeException("Unexpected redirect"))
} else {
responseMonad.unit(resp)
}
}
.flatMap { resp =>
val headers = convertResponseHeaders(resp.headers)
val metadata = ResponseMetadata(StatusCode(resp.status), resp.statusText, headers)
val body: F[T] = bodyFromResponseAs(request.response, metadata, Left(resp))
body.map { b =>
Response[T](
body = b,
code = StatusCode(resp.status),
statusText = resp.statusText,
headers = headers,
history = Nil,
request = request.onlyMetadata
)
}
}
addCancelTimeoutHook(result, cancelTimeout)
}
protected def addCancelTimeoutHook[T](result: F[T], cancel: () => Unit): F[T]
private def convertResponseHeaders(headers: JSHeaders): Seq[Header] = {
headers
.jsIterator()
.toIterator
.flatMap { hs =>
// this will only ever be 2 but the types dont enforce that
if (hs.length >= 2) {
val name = hs(0)
hs.toList.drop(1).map(v => Header(name, v))
} else {
Seq.empty
}
}
.toList
}
private def createBody[R >: PE](body: RequestBody[R]): F[js.UndefOr[BodyInit]] = {
body match {
case NoBody =>
responseMonad.unit(js.undefined) // skip
case b: BasicRequestBody =>
responseMonad.unit(writeBasicBody(b))
case StreamBody(s) =>
handleStreamBody(s.asInstanceOf[streams.BinaryStream])
case mp: MultipartBody[_] =>
val formData = new FormData()
mp.parts.foreach { part =>
val value = part.body match {
case NoBody => Array[Byte]().toTypedArray.asInstanceOf[BodyInit]
case body: BasicRequestBody => writeBasicBody(body)
case StreamBody(_) => throw new IllegalArgumentException("Streaming multipart bodies are not supported")
case MultipartBody(_) => throwNestedMultipartNotAllowed
}
// the only way to set the content type is to use a blob
val blob =
value match {
case b: Blob => b
case v =>
new Blob(Array(v.asInstanceOf[js.Any]).toJSArray, BlobPropertyBag(part.contentType.orUndefined))
}
part.fileName match {
case None => formData.append(part.name, blob)
case Some(fileName) => formData.append(part.name, blob, fileName)
}
}
responseMonad.unit(formData)
}
}
private def writeBasicBody(body: BasicRequestBody): BodyInit = {
body match {
case StringBody(b, encoding, _) =>
if (encoding.compareToIgnoreCase(Utf8) == 0) b
else b.getBytes(encoding).toTypedArray.asInstanceOf[BodyInit]
case ByteArrayBody(b, _) =>
b.toTypedArray.asInstanceOf[BodyInit]
case ByteBufferBody(b, _) =>
byteBufferToArray(b).toTypedArray.asInstanceOf[BodyInit]
case InputStreamBody(is, _) =>
toByteArray(is).toTypedArray.asInstanceOf[BodyInit]
case FileBody(f, _) =>
f.toDomFile
}
}
// https://stackoverflow.com/questions/679298/gets-byte-array-from-a-bytebuffer-in-java
private def byteBufferToArray(bb: ByteBuffer): Array[Byte] = {
val b = new Array[Byte](bb.remaining())
bb.get(b)
b
}
private def sendWebSocket[T, R >: PE](request: Request[T, R]): F[Response[T]] = {
val queue = new JSSimpleQueue[F, WebSocketEvent]
val ws = new JSWebSocket(request.uri.toString)
ws.binaryType = BinaryType
val isOpen = Promise[Unit]()
ws.onopen = (_: Event) => {
isOpen.success(())
queue.offer(WebSocketEvent.Open())
}
ws.onmessage = (event: MessageEvent) => queue.offer(toWebSocketEvent(event))
ws.onerror = (_: Event) => {
val msg = "Something went wrong in web socket or it could not be opened"
if (!isOpen.isCompleted) isOpen.failure(new ReadException(request, new RuntimeException(msg)))
else queue.offer(WebSocketEvent.Error(new RuntimeException(msg)))
}
ws.onclose = (event: CloseEvent) => queue.offer(toWebSocketEvent(event))
convertFromFuture(isOpen.future).flatMap { _ =>
val webSocket = WebSocketImpl.newJSCoupledWebSocket(ws, queue)
bodyFromResponseAs
.apply(request.response, ResponseMetadata(StatusCode.Ok, "", request.headers), Right(webSocket))
.map(Response.ok)
}
}
private def toWebSocketEvent(msg: MessageEvent): WebSocketEvent =
msg.data match {
case payload: ArrayBuffer =>
val dv = new DataView(payload)
val bytes = new Array[Byte](dv.byteLength)
0 until dv.byteLength foreach { i => bytes(i) = dv.getInt8(i) }
WebSocketEvent.Frame(WebSocketFrame.binary(bytes))
case payload: String => WebSocketEvent.Frame(WebSocketFrame.text(payload))
case _ => throw new RuntimeException(s"Unknown format of event.data ${msg.data}")
}
private def toWebSocketEvent(close: CloseEvent): WebSocketEvent =
WebSocketEvent.Frame(WebSocketFrame.Close(close.code, close.reason))
protected def handleStreamBody(s: streams.BinaryStream): F[js.UndefOr[BodyInit]]
private lazy val bodyFromResponseAs = new BodyFromResponseAs[F, FetchResponse, WebSocket[F], streams.BinaryStream]() {
override protected def withReplayableBody(
response: FetchResponse,
replayableBody: Either[Array[Byte], SttpFile]
): F[FetchResponse] = {
val bytes = replayableBody match {
case Left(byteArray) => byteArray
case Right(_) => throw new IllegalArgumentException("Replayable file bodies are not supported")
}
new FetchResponse(bytes.toTypedArray.asInstanceOf[BodyInit], response.asInstanceOf[ResponseInit]).unit
}
override protected def regularIgnore(response: FetchResponse): F[Unit] =
convertFromFuture(response.arrayBuffer().toFuture).map(_ => ())
override protected def regularAsByteArray(response: FetchResponse): F[Array[Byte]] =
convertFromFuture(response.arrayBuffer().toFuture).map { ab => new Int8Array(ab).toArray }
override protected def regularAsFile(response: FetchResponse, file: SttpFile): F[SttpFile] =
convertFromFuture(response.arrayBuffer().toFuture)
.map { ab =>
SttpFile.fromDomFile(
new DomFile(
Array(ab.asInstanceOf[js.Any]).toJSArray,
file.name,
FilePropertyBag(`type` = file.toDomFile.`type`)
)
)
}
override protected def regularAsStream(response: FetchResponse): F[(streams.BinaryStream, () => F[Unit])] =
handleResponseAsStream(response)
override protected def handleWS[T](
responseAs: WebSocketResponseAs[T, _],
meta: ResponseMetadata,
ws: WebSocket[F]
): F[T] =
responseAs match {
case ResponseAsWebSocket(f) =>
f.asInstanceOf[(WebSocket[F], ResponseMetadata) => F[T]].apply(ws, meta)
case ResponseAsWebSocketUnsafe() => ws.unit.asInstanceOf[F[T]]
case ResponseAsWebSocketStream(_, pipe) =>
compileWebSocketPipe(ws, pipe.asInstanceOf[streams.Pipe[WebSocketFrame.Data[_], WebSocketFrame]])
}
override protected def cleanupWhenNotAWebSocket(response: FetchResponse, e: NotAWebSocketException): F[Unit] =
monad.unit(())
override protected def cleanupWhenGotWebSocket(response: WebSocket[F], e: GotAWebSocketException): F[Unit] =
monad.unit(response.close())
}
protected def handleResponseAsStream(response: FetchResponse): F[(streams.BinaryStream, () => F[Unit])]
protected def compileWebSocketPipe(
ws: WebSocket[F],
pipe: streams.Pipe[WebSocketFrame.Data[_], WebSocketFrame]
): F[Unit]
override def close(): F[Unit] = monad.unit(())
implicit def convertFromFuture: ConvertFromFuture[F]
}
| softwaremill/sttp | core/src/main/scalajs/sttp/client3/AbstractFetchBackend.scala | Scala | apache-2.0 | 12,684 |
package net.cassite.jsonbind.views
import net.cassite.jsonbind.View
import play.api.libs.json.JsValue
/**
* map view
*/
class JsValueView(val name: String, val jsValue: JsValue) extends View {
override def load(): Unit = {}
override def refresh(): Boolean = false
override def parse(): JsValue = jsValue
}
object JsValueView {
def apply(name: String, jsValue: JsValue) = new JsValueView(name, jsValue)
} | wkgcass/JsonBind | src/main/scala/net/cassite/jsonbind/views/JsValueView.scala | Scala | mit | 418 |
package opennlp.scalabha.tree
import org.clapper.argot._
import collection.mutable.MutableList
import opennlp.scalabha.model._
import opennlp.scalabha.log.SimpleLogger
import java.io.{OutputStreamWriter, BufferedWriter}
object Parser {
import ArgotConverters._
val parser = new ArgotParser(this.getClass().getName, preUsage = Some("Version 0.0"))
val help = parser.flag[Boolean](List("h", "help"), "print help")
val input = parser.option[String](List("i", "input"), "FILE", "input inputFile to tokenize")
val log = new SimpleLogger(this.getClass().getName, SimpleLogger.WARN, new BufferedWriter(new OutputStreamWriter(System.err)))
val noLog = new SimpleLogger(this.getClass().getName, SimpleLogger.NONE, new BufferedWriter(new OutputStreamWriter(System.err)))
def apply(index: Int, line: String, prefix: String, log: SimpleLogger): Option[(TreeNode, String)] = {
log.trace("%sparsing:<<%s>>\\n".format(prefix, line))
val regex = """\\s*(\\(?)\\s*([^\\s)(]+)\\s*(.*)""".r
// if (line.matches("""\\s*(\\(?)\\s*([^\\s)(]+)\\s*(.*)"""))
line match {
case regex(open, sym, rest) =>
if (open != "") {
// then we are parsing a full node.
val name = sym
var children = List[TreeNode]()
var next: TreeNode = null
var rest2 = rest
while (!rest2.matches("\\\\s*\\\\).*")) {
if (rest2 == "") {
log.err("Line %d: Missing closing paren in:<<%s>>\\n".format(index, line))
return None
}
apply(index, rest2, "|\\t%s".format(prefix), log) match {
case Some((a, b)) =>
next = a
rest2 = b
children = children ::: List(next)
case None => return None
}
}
val cutoff = rest2.indexOf(')')
if (children.length == 0 || (children.length > 0 && children.map( _.isInstanceOf[Value] ).reduce(_ || _) && children.length != 1)){
log.err("Line %d: A leaf node may only contain a tag and a token. I.e., (TAG token). Tree node %s fails this test.\\n".format(index, Node(name, children).getCanonicalString))
}
log.trace("%sresult: %s,\\"%s\\"\\n".format(prefix, Node(name, children), rest2.substring(cutoff + 1)))
return Some((Node(name, children), rest2.substring(cutoff + 1)))
} else {
// then we are only looking at a value
log.trace("%sresult: %s,\\"%s\\"\\n".format(prefix, Value(sym), rest))
return Some((Value(sym), rest))
}
case "\\\\s*" =>
log.err("Line %d: Got an empty input line\\n".format(index))
return None
case x =>
log.err("Line %d: Could not parse input line <<%s>>\\n".format(index, x))
return None
}
}
def apply(index: Int, line: String, log: SimpleLogger): Option[TreeNode] = {
apply(index, line, "", log) match {
case Some((tree, "")) =>
tree match {
case Node(_, _) => ()
case _ =>
log.warn("Line %d: Top-level element is not a tree:<<%s>>\\n".format(index, line))
}
Some(tree)
case Some((tree, leftover)) =>
if (leftover.matches("\\\\s+")) {
log.warn("Line %d: Please delete the extra whitespace found at the end of this line.\\n".format(index))
Some(tree)
} else {
log.err("Line %d: Malformed tree:<<%s>>\\tleftover text:<<%s>>\\n".format(index, line, leftover))
None
}
case None =>
None
}
}
def apply(index: Int, line: String): Option[TreeNode] = {
apply(index, line, noLog)
}
def main(args: Array[String]) {
try {
parser.parse(args)
if (help.value.isDefined) {
parser.usage()
}
val input_file =
(if (input.value.isDefined) {
scala.io.Source.fromFile(input.value.get, "UTF-8")
} else {
scala.io.Source.stdin
}).getLines().zipWithIndex
for ((line, index) <- input_file) {
println(Parser(index, line, log))
}
log.summary("Warnings,Errors: %s\\n".format(log.getStats()))
}
catch {
case e: ArgotUsageException =>
println(e.message)
}
}
}
| eponvert/Scalabha | src/main/scala/opennlp/scalabha/tree/Parser.scala | Scala | apache-2.0 | 4,227 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.adam.cli
import java.io.ByteArrayOutputStream
import com.google.inject.{ AbstractModule, Guice }
import net.codingwell.scalaguice.ScalaModule
import net.codingwell.scalaguice.InjectorExtensions._
import org.bdgenomics.adam.cli.ADAMMain.defaultCommandGroups
import org.scalatest.FunSuite
class ADAMMainSuite extends FunSuite {
test("default command groups is non empty") {
assert(defaultCommandGroups.nonEmpty)
}
test("module provides default command groups") {
val injector = Guice.createInjector(new ADAMModule())
val commandGroups = injector.instance[List[CommandGroup]]
assert(commandGroups == defaultCommandGroups)
}
test("inject default command groups when called via main") {
val stream = new ByteArrayOutputStream()
Console.withOut(stream) {
ADAMMain.main(Array())
}
val out = stream.toString()
// the help text has been displayed
assert(out.contains("Usage"))
// ...and transform (from default groups) is one of the commands listed
assert(out.contains("transform"))
}
test("command groups is empty when called via apply") {
val stream = new ByteArrayOutputStream()
Console.withOut(stream) {
new ADAMMain(List.empty)(Array())
}
val out = stream.toString()
assert(out.contains("Usage"))
assert(!out.contains("transform"))
}
test("single command group") {
val stream = new ByteArrayOutputStream()
Console.withOut(stream) {
new ADAMMain(List(CommandGroup("SINGLE COMMAND GROUP", List(Transform)))).apply(Array())
}
val out = stream.toString()
assert(out.contains("Usage"))
assert(out.contains("SINGLE"))
assert(out.contains("transform"))
}
test("add new command group to default command groups") {
val stream = new ByteArrayOutputStream()
Console.withOut(stream) {
val commandGroups = defaultCommandGroups.union(List(CommandGroup("NEW COMMAND GROUP", List(Transform))))
new ADAMMain(commandGroups)(Array())
}
val out = stream.toString()
assert(out.contains("Usage"))
assert(out.contains("NEW"))
}
test("module restores default command groups when called via apply") {
val stream = new ByteArrayOutputStream()
Console.withOut(stream) {
val injector = Guice.createInjector(new ADAMModule())
val commandGroups = injector.instance[List[CommandGroup]]
new ADAMMain(commandGroups).apply(Array())
}
val out = stream.toString()
assert(out.contains("Usage"))
assert(out.contains("transform"))
}
test("custom module with single command group") {
val stream = new ByteArrayOutputStream()
Console.withOut(stream) {
val module = new AbstractModule with ScalaModule {
def configure() = {
bind[List[CommandGroup]].toInstance(List(CommandGroup("SINGLE COMMAND GROUP", List(Transform))))
}
}
val injector = Guice.createInjector(module)
val commandGroups = injector.instance[List[CommandGroup]]
new ADAMMain(commandGroups).apply(Array())
}
val out = stream.toString()
assert(out.contains("Usage"))
assert(out.contains("SINGLE"))
assert(out.contains("transform"))
}
test("custom module with new command group added to default command groups") {
val stream = new ByteArrayOutputStream()
Console.withOut(stream) {
val module = new AbstractModule with ScalaModule {
def configure() = {
bind[List[CommandGroup]].toInstance(defaultCommandGroups.union(List(CommandGroup("NEW COMMAND GROUP", List(Transform)))))
}
}
val injector = Guice.createInjector(module)
val commandGroups = injector.instance[List[CommandGroup]]
new ADAMMain(commandGroups).apply(Array())
}
val out = stream.toString()
assert(out.contains("Usage"))
assert(out.contains("NEW"))
}
}
| massie/adam | adam-cli/src/test/scala/org/bdgenomics/adam/cli/ADAMMainSuite.scala | Scala | apache-2.0 | 4,655 |
/**
* Copyright (c) 2002-2014, OnPoint Digital, Inc. All rights reserved
*
* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
* @author Alex Westphal 29/May/2014
* @version 29/May/2014
*/
package timez.instances.time
import java.time.ZoneId
import scalaz.{Equal, Show}
trait ZonedIdInstances {
implicit def ZoneIdEqual = Equal.equalA[ZoneId]
implicit def ZoneIdShow = Show.showA[ZoneId]
}
| phantomspectre/timez | src/main/scala/timez/instances/time/ZonedIdInstances.scala | Scala | bsd-3-clause | 836 |
package service
import org.specs2.mutable.Specification
import java.util.Date
import model.GroupMember
class AccountServiceSpec extends Specification with ServiceSpecBase {
"AccountService" should {
val RootMailAddress = "root@localhost"
"getAllUsers" in { withTestDB{
AccountService.getAllUsers() must be like{
case List(model.Account("root", "root", RootMailAddress, _, true, _, _, _, None, None, false, false)) => ok
}
}}
"getAccountByUserName" in { withTestDB{
AccountService.getAccountByUserName("root") must beSome.like{
case user => user.userName must_== "root"
}
AccountService.getAccountByUserName("invalid user name") must beNone
}}
"getAccountByMailAddress" in { withTestDB{
AccountService.getAccountByMailAddress(RootMailAddress) must beSome
}}
"updateLastLoginDate" in { withTestDB{
val root = "root"
def user() =
AccountService.getAccountByUserName(root).getOrElse(sys.error(s"user $root does not exists"))
user().lastLoginDate must beNone
val date1 = new Date
AccountService.updateLastLoginDate(root)
user().lastLoginDate must beSome.like{ case date =>
date must be_>(date1)
}
val date2 = new Date
Thread.sleep(1000)
AccountService.updateLastLoginDate(root)
user().lastLoginDate must beSome.like{ case date =>
date must be_>(date2)
}
}}
"updateAccount" in { withTestDB{
val root = "root"
def user() =
AccountService.getAccountByUserName(root).getOrElse(sys.error(s"user $root does not exists"))
val newAddress = "new mail address"
AccountService.updateAccount(user().copy(mailAddress = newAddress))
user().mailAddress must_== newAddress
}}
"group" in { withTestDB {
val group1 = "group1"
val user1 = "root"
AccountService.createGroup(group1, None)
AccountService.getGroupMembers(group1) must_== Nil
AccountService.getGroupsByUserName(user1) must_== Nil
AccountService.updateGroupMembers(group1, List((user1, true)))
AccountService.getGroupMembers(group1) must_== List(GroupMember(group1, user1, true))
AccountService.getGroupsByUserName(user1) must_== List(group1)
AccountService.updateGroupMembers(group1, Nil)
AccountService.getGroupMembers(group1) must_== Nil
AccountService.getGroupsByUserName(user1) must_== Nil
}}
}
}
| milligramme/gitbucket | src/test/scala/service/AccountServiceSpec.scala | Scala | apache-2.0 | 2,465 |
package skinny.validator
import org.scalatest._
class intValueSpec extends FlatSpec with Matchers {
behavior of "intValue"
it should "be available" in {
val validate = intValue
validate.name should equal("intValue")
validate(param("id", "abc")).isSuccess should equal(false)
validate(param("id", "あ")).isSuccess should equal(false)
validate(param("id", "1a")).isSuccess should equal(false)
validate(param("id", null)).isSuccess should equal(true)
validate(param("id", "")).isSuccess should equal(true)
validate(param("id", "0")).isSuccess should equal(true)
validate(param("id", 0)).isSuccess should equal(true)
validate(param("id", -1)).isSuccess should equal(true)
validate(param("id", -0.1D)).isSuccess should equal(false)
validate(param("id", 1)).isSuccess should equal(true)
validate(param("id", 2)).isSuccess should equal(true)
validate(param("id", 3)).isSuccess should equal(true)
validate(param("id", java.lang.Integer.MAX_VALUE)).isSuccess should equal(true)
validate(param("id", s"${java.lang.Integer.MAX_VALUE}1")).isSuccess should equal(false)
validate(param("id", java.lang.Integer.MIN_VALUE)).isSuccess should equal(true)
validate(param("id", s"${java.lang.Integer.MIN_VALUE}1")).isSuccess should equal(false)
}
}
| skinny-framework/skinny-framework | validator/src/test/scala/skinny/validator/intValueSpec.scala | Scala | mit | 1,320 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.expressions
import org.apache.calcite.rex._
import org.apache.flink.api.common.typeinfo.BasicTypeInfo._
import org.apache.flink.api.common.typeinfo.{LocalTimeTypeInfo, SqlTimeTypeInfo, TypeInformation}
import org.apache.flink.table.planner.calcite.FlinkRelBuilder
import org.apache.flink.table.planner.functions.sql.FlinkSqlOperatorTable
import org.apache.flink.table.planner.typeutils.TypeInfoCheckUtils
import org.apache.flink.table.planner.typeutils.TypeInfoCheckUtils.isTimeInterval
import org.apache.flink.table.planner.validate.{ValidationFailure, ValidationResult, ValidationSuccess}
import org.apache.flink.table.runtime.typeutils.LegacyLocalDateTimeTypeInfo
import org.apache.flink.table.typeutils.TimeIntervalTypeInfo
case class Extract(timeIntervalUnit: PlannerExpression, temporal: PlannerExpression)
extends PlannerExpression {
override private[flink] def children: Seq[PlannerExpression] = timeIntervalUnit :: temporal :: Nil
override private[flink] def resultType: TypeInformation[_] = LONG_TYPE_INFO
override private[flink] def validateInput(): ValidationResult = {
if (!TypeInfoCheckUtils.isTemporal(temporal.resultType)) {
return ValidationFailure(s"Extract operator requires Temporal input, " +
s"but $temporal is of type ${temporal.resultType}")
}
timeIntervalUnit match {
case SymbolPlannerExpression(PlannerTimeIntervalUnit.YEAR)
| SymbolPlannerExpression(PlannerTimeIntervalUnit.QUARTER)
| SymbolPlannerExpression(PlannerTimeIntervalUnit.MONTH)
| SymbolPlannerExpression(PlannerTimeIntervalUnit.WEEK)
| SymbolPlannerExpression(PlannerTimeIntervalUnit.DAY)
if temporal.resultType == SqlTimeTypeInfo.DATE
|| temporal.resultType == SqlTimeTypeInfo.TIMESTAMP
|| temporal.resultType == LocalTimeTypeInfo.LOCAL_DATE
|| temporal.resultType == LocalTimeTypeInfo.LOCAL_DATE_TIME
|| temporal.resultType.isInstanceOf[LegacyLocalDateTimeTypeInfo]
|| temporal.resultType == TimeIntervalTypeInfo.INTERVAL_MILLIS
|| temporal.resultType == TimeIntervalTypeInfo.INTERVAL_MONTHS =>
ValidationSuccess
case SymbolPlannerExpression(PlannerTimeIntervalUnit.HOUR)
| SymbolPlannerExpression(PlannerTimeIntervalUnit.MINUTE)
| SymbolPlannerExpression(PlannerTimeIntervalUnit.SECOND)
if temporal.resultType == SqlTimeTypeInfo.TIME
|| temporal.resultType == SqlTimeTypeInfo.TIMESTAMP
|| temporal.resultType == LocalTimeTypeInfo.LOCAL_TIME
|| temporal.resultType == LocalTimeTypeInfo.LOCAL_DATE_TIME
|| temporal.resultType.isInstanceOf[LegacyLocalDateTimeTypeInfo]
|| temporal.resultType == TimeIntervalTypeInfo.INTERVAL_MILLIS =>
ValidationSuccess
case _ =>
ValidationFailure(s"Extract operator does not support unit '$timeIntervalUnit' for input" +
s" of type '${temporal.resultType}'.")
}
}
override def toString: String = s"($temporal).extract($timeIntervalUnit)"
}
abstract class CurrentTimePoint(
targetType: TypeInformation[_],
local: Boolean)
extends LeafExpression {
override private[flink] def resultType: TypeInformation[_] = targetType
override private[flink] def validateInput(): ValidationResult = {
if (!TypeInfoCheckUtils.isTimePoint(targetType)) {
ValidationFailure(s"CurrentTimePoint operator requires Time Point target type, " +
s"but get $targetType.")
} else if (local && targetType == SqlTimeTypeInfo.DATE) {
ValidationFailure(s"Localized CurrentTimePoint operator requires Time or Timestamp target " +
s"type, but get $targetType.")
} else {
ValidationSuccess
}
}
override def toString: String = if (local) {
s"local$targetType()"
} else {
s"current$targetType()"
}
}
case class CurrentDate() extends CurrentTimePoint(SqlTimeTypeInfo.DATE, local = false)
case class CurrentTime() extends CurrentTimePoint(SqlTimeTypeInfo.TIME, local = false)
case class CurrentTimestamp() extends CurrentTimePoint(SqlTimeTypeInfo.TIMESTAMP, local = false)
case class LocalTime() extends CurrentTimePoint(SqlTimeTypeInfo.TIME, local = true)
case class LocalTimestamp() extends CurrentTimePoint(SqlTimeTypeInfo.TIMESTAMP, local = true)
/**
* Determines whether two anchored time intervals overlap.
*/
case class TemporalOverlaps(
leftTimePoint: PlannerExpression,
leftTemporal: PlannerExpression,
rightTimePoint: PlannerExpression,
rightTemporal: PlannerExpression)
extends PlannerExpression {
override private[flink] def children: Seq[PlannerExpression] =
Seq(leftTimePoint, leftTemporal, rightTimePoint, rightTemporal)
override private[flink] def resultType: TypeInformation[_] = BOOLEAN_TYPE_INFO
override private[flink] def validateInput(): ValidationResult = {
if (!TypeInfoCheckUtils.isTimePoint(leftTimePoint.resultType)) {
return ValidationFailure(s"TemporalOverlaps operator requires leftTimePoint to be of type " +
s"Time Point, but get ${leftTimePoint.resultType}.")
}
if (!TypeInfoCheckUtils.isTimePoint(rightTimePoint.resultType)) {
return ValidationFailure(s"TemporalOverlaps operator requires rightTimePoint to be of " +
s"type Time Point, but get ${rightTimePoint.resultType}.")
}
if (leftTimePoint.resultType != rightTimePoint.resultType) {
return ValidationFailure(s"TemporalOverlaps operator requires leftTimePoint and " +
s"rightTimePoint to be of same type.")
}
// leftTemporal is point, then it must be comparable with leftTimePoint
if (TypeInfoCheckUtils.isTimePoint(leftTemporal.resultType)) {
if (leftTemporal.resultType != leftTimePoint.resultType) {
return ValidationFailure(s"TemporalOverlaps operator requires leftTemporal and " +
s"leftTimePoint to be of same type if leftTemporal is of type Time Point.")
}
} else if (!isTimeInterval(leftTemporal.resultType)) {
return ValidationFailure(s"TemporalOverlaps operator requires leftTemporal to be of " +
s"type Time Point or Time Interval.")
}
// rightTemporal is point, then it must be comparable with rightTimePoint
if (TypeInfoCheckUtils.isTimePoint(rightTemporal.resultType)) {
if (rightTemporal.resultType != rightTimePoint.resultType) {
return ValidationFailure(s"TemporalOverlaps operator requires rightTemporal and " +
s"rightTimePoint to be of same type if rightTemporal is of type Time Point.")
}
} else if (!isTimeInterval(rightTemporal.resultType)) {
return ValidationFailure(s"TemporalOverlaps operator requires rightTemporal to be of " +
s"type Time Point or Time Interval.")
}
ValidationSuccess
}
override def toString: String = s"temporalOverlaps(${children.mkString(", ")})"
/**
* Standard conversion of the OVERLAPS operator.
* Source: [[org.apache.calcite.sql2rel.StandardConvertletTable#convertOverlaps()]]
*/
private def convertOverlaps(
leftP: RexNode,
leftT: RexNode,
rightP: RexNode,
rightT: RexNode,
relBuilder: FlinkRelBuilder)
: RexNode = {
val convLeftT = convertOverlapsEnd(relBuilder, leftP, leftT, leftTemporal.resultType)
val convRightT = convertOverlapsEnd(relBuilder, rightP, rightT, rightTemporal.resultType)
// sort end points into start and end, such that (s0 <= e0) and (s1 <= e1).
val (s0, e0) = buildSwap(relBuilder, leftP, convLeftT)
val (s1, e1) = buildSwap(relBuilder, rightP, convRightT)
// (e0 >= s1) AND (e1 >= s0)
val leftPred = relBuilder.call(FlinkSqlOperatorTable.GREATER_THAN_OR_EQUAL, e0, s1)
val rightPred = relBuilder.call(FlinkSqlOperatorTable.GREATER_THAN_OR_EQUAL, e1, s0)
relBuilder.call(FlinkSqlOperatorTable.AND, leftPred, rightPred)
}
private def convertOverlapsEnd(
relBuilder: FlinkRelBuilder,
start: RexNode, end: RexNode,
endType: TypeInformation[_]) = {
if (isTimeInterval(endType)) {
relBuilder.call(FlinkSqlOperatorTable.DATETIME_PLUS, start, end)
} else {
end
}
}
private def buildSwap(relBuilder: FlinkRelBuilder, start: RexNode, end: RexNode) = {
val le = relBuilder.call(FlinkSqlOperatorTable.LESS_THAN_OR_EQUAL, start, end)
val l = relBuilder.call(FlinkSqlOperatorTable.CASE, le, start, end)
val r = relBuilder.call(FlinkSqlOperatorTable.CASE, le, end, start)
(l, r)
}
}
case class DateFormat(timestamp: PlannerExpression, format: PlannerExpression)
extends PlannerExpression {
override private[flink] def children = timestamp :: format :: Nil
override def toString: String = s"$timestamp.dateFormat($format)"
override private[flink] def resultType = STRING_TYPE_INFO
}
case class TimestampDiff(
timePointUnit: PlannerExpression,
timePoint1: PlannerExpression,
timePoint2: PlannerExpression)
extends PlannerExpression {
override private[flink] def children: Seq[PlannerExpression] =
timePointUnit :: timePoint1 :: timePoint2 :: Nil
override private[flink] def validateInput(): ValidationResult = {
if (!TypeInfoCheckUtils.isTimePoint(timePoint1.resultType)) {
return ValidationFailure(
s"$this requires an input time point type, " +
s"but timePoint1 is of type '${timePoint1.resultType}'.")
}
if (!TypeInfoCheckUtils.isTimePoint(timePoint2.resultType)) {
return ValidationFailure(
s"$this requires an input time point type, " +
s"but timePoint2 is of type '${timePoint2.resultType}'.")
}
timePointUnit match {
case SymbolPlannerExpression(PlannerTimePointUnit.YEAR)
| SymbolPlannerExpression(PlannerTimePointUnit.QUARTER)
| SymbolPlannerExpression(PlannerTimePointUnit.MONTH)
| SymbolPlannerExpression(PlannerTimePointUnit.WEEK)
| SymbolPlannerExpression(PlannerTimePointUnit.DAY)
| SymbolPlannerExpression(PlannerTimePointUnit.HOUR)
| SymbolPlannerExpression(PlannerTimePointUnit.MINUTE)
| SymbolPlannerExpression(PlannerTimePointUnit.SECOND)
if timePoint1.resultType == SqlTimeTypeInfo.DATE
|| timePoint1.resultType == SqlTimeTypeInfo.TIMESTAMP
|| timePoint2.resultType == SqlTimeTypeInfo.DATE
|| timePoint2.resultType == SqlTimeTypeInfo.TIMESTAMP
|| timePoint1.resultType == LocalTimeTypeInfo.LOCAL_DATE
|| timePoint1.resultType == LocalTimeTypeInfo.LOCAL_DATE_TIME
|| timePoint2.resultType == LocalTimeTypeInfo.LOCAL_DATE
|| timePoint2.resultType == LocalTimeTypeInfo.LOCAL_DATE_TIME =>
ValidationSuccess
case _ =>
ValidationFailure(s"$this operator does not support unit '$timePointUnit'" +
s" for input of type ('${timePoint1.resultType}', '${timePoint2.resultType}').")
}
}
override def toString: String = s"timestampDiff(${children.mkString(", ")})"
override private[flink] def resultType = INT_TYPE_INFO
}
case class ToTimestampLtz(
numericEpochTime: PlannerExpression,
precision: PlannerExpression)
extends PlannerExpression {
override private[flink] def children: Seq[PlannerExpression] =
numericEpochTime :: precision :: Nil
override private[flink] def validateInput(): ValidationResult = {
if (TypeInfoCheckUtils.assertNumericExpr(
numericEpochTime.resultType, "toTimestampLtz").isFailure) {
return ValidationFailure(
s"$this requires numeric type for the first input, " +
s"but the actual type '${numericEpochTime.resultType}'.")
}
if (TypeInfoCheckUtils
.assertNumericExpr(precision.resultType, "toTimestampLtz").isFailure) {
return ValidationFailure(
s"$this requires numeric type for the second input, " +
s"but the actual type '${precision.resultType}'.")
}
ValidationSuccess
}
override def toString: String = s"toTimestampLtz(${children.mkString(", ")})"
override private[flink] def resultType = SqlTimeTypeInfo.TIMESTAMP
}
| lincoln-lil/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/expressions/time.scala | Scala | apache-2.0 | 12,932 |
package colossus.service
import akka.actor.ActorRef
import scala.reflect.ClassTag
import java.net.InetSocketAddress
import colossus.core.{WorkerItem, WorkerRef}
trait PermutationGenerator[T] {
def next(): List[T]
}
/**
* The PermutationGenerator creates permutations such that consecutive calls
* are guaranteed to cycle though all items as the first element.
*
* This currently doesn't iterate through every possible permutation, but it
* does evenly distribute 1st and 2nd tries...needs some more work
*/
class PrinciplePermutationGenerator[T: ClassTag](val seedlist: Seq[T])
extends PermutationGenerator[T]
with Iterator[List[T]] {
private val items: Array[T] = seedlist.toArray
private var swapIndex = 1
private val cycleSize = seedlist.size * (seedlist.size - 1)
private var cycleCount = 0
def hasNext = true
private def swap(indexA: Int, indexB: Int) {
val tmp = items(indexA)
items(indexA) = items(indexB)
items(indexB) = tmp
}
def next(): List[T] = {
if (items.length == 1) {
items.head
} else {
swap(0, swapIndex)
swapIndex += 1
if (swapIndex == items.length) {
swapIndex = 1
}
cycleCount += 1
if (items.length > 3) {
if (cycleCount == cycleSize) {
cycleCount = 0
swapIndex += 1
if (swapIndex == items.length) {
swapIndex = 1
}
}
}
}
items.toList
}
}
class LoadBalancingClientException(message: String) extends Exception(message)
class SendFailedException(tries: Int, finalCause: Throwable)
extends Exception(
s"Failed after ${tries} tries, error on last try: ${finalCause.getMessage}",
finalCause
)
/**
* The LoadBalancingClient will evenly distribute requests across a set of
* clients. If one client begins failing, the balancer will retry up to
* numRetries times across the other clients (with each failover hitting
* different clients to avoid a cascading pileup
*
* Note that the balancer will never try the same client twice for a request,
* so setting maxTries to a very large number will mean that every client will
* be tried once
*
*
*/
@deprecated("Load balancer now built into client", "0.11.0")
class LoadBalancingClient[P <: Protocol](
worker: WorkerRef,
generator: InetSocketAddress => Sender[P, Callback],
maxTries: Int = Int.MaxValue,
initialClients: Seq[InetSocketAddress] = Nil
) extends WorkerItem
with Sender[P, Callback] {
def this(
addresses: Seq[InetSocketAddress],
baseConfig: ClientConfig,
factory: ClientFactory[P, Callback, Sender[P, Callback], WorkerRef],
maxTries: Int
)(implicit worker: WorkerRef) = {
this(worker, address => factory(baseConfig.copy(address = Seq(address))), maxTries, addresses)
}
val context = worker.generateContext
worker.bind(_ => this)
case class Client(address: InetSocketAddress, client: Sender[P, Callback])
private val clients = collection.mutable.ArrayBuffer[Client]()
private var permutations = new PrinciplePermutationGenerator(clients.map { _.client })
update(initialClients, true)
private def regeneratePermutations() {
permutations = new PrinciplePermutationGenerator(clients.map { _.client })
}
private def addClient(address: InetSocketAddress, regen: Boolean): Unit = {
val client = Client(address, generator(address))
clients append client
regeneratePermutations()
}
def addClient(address: InetSocketAddress): Unit = addClient(address, true)
private def removeFilter(filter: Client => Boolean) {
var i = 0
while (i < clients.length) {
if (filter(clients(i))) {
clients(i).client.disconnect()
clients.remove(i)
} else {
i += 1
}
}
}
def removeClient(address: InetSocketAddress) {
removeFilter { c =>
c.address == address
}
regeneratePermutations()
}
/**
* Updates the client list, creating connections for new addresses not in the
* existing list and closing connections not in the new list
*/
def update(addresses: Seq[InetSocketAddress], allowDuplicates: Boolean = false) {
removeFilter(c => !addresses.contains(c.address))
addresses.foreach { address =>
if (!clients.exists { _.address == address } || allowDuplicates) {
addClient(address, false)
}
}
regeneratePermutations()
}
def disconnect() {
clients.foreach { _.client.disconnect() }
clients.clear()
}
def send(request: P#Request): Callback[P#Response] = {
val retryList = permutations.next().take(maxTries)
def go(next: Sender[P, Callback], list: List[Sender[P, Callback]]): Callback[P#Response] =
next.send(request).recoverWith {
case err =>
list match {
case head :: tail => go(head, tail)
case Nil => Callback.failed(new SendFailedException(retryList.size, err))
}
}
if (retryList.isEmpty) {
Callback.failed(new SendFailedException(retryList.size, new Exception("Empty client list!")))
} else {
go(retryList.head, retryList.tail)
}
}
override def receivedMessage(message: Any, sender: ActorRef) {}
override def address(): InetSocketAddress = throw new NotImplementedError("Deprecated class")
override def update(addresses: Seq[InetSocketAddress]): Unit = {
update(addresses, allowDuplicates = false)
}
}
| tumblr/colossus | colossus/src/main/scala/colossus/service/LoadBalancingClient.scala | Scala | apache-2.0 | 5,469 |
package com.citypay.pan.search.source
import com.citypay.pan.search.db.JdbcScanner
import com.citypay.pan.search.nio.NioFileSystemScanner
import com.citypay.pan.search.util.ConfigExt._
import com.citypay.pan.search.util.{CredentialsCallback, NoOpCredentials}
import com.typesafe.config.Config
import scala.collection.JavaConverters._
/**
* Factory which loads a [[ScanSource]] bsaed on configuration
*/
object ScanSourceConfigFactory {
def apply(config: Config): List[ScanSource] = {
config.getObjectList("search.source").asScala.toList
.flatMap(c => adapt(c.toConfig))
}
def expandRoots(list: List[String]): List[String] = {
list.collect {
case "user.home" => System.getProperty("user.home")
case s:String => s
}
}
def adapt(c: Config): Option[ScanSource] = {
Option(c.getString("type").toLowerCase match {
case "file" =>
NioFileSystemScanner(
expandRoots(c.getStringList("root").asScala.toList),
c.string("pattern", "**"), // default glob
c.getStringList("exclude").asScala.toList,
c.boolean("includeHiddenFiles", default = false),
c.boolean("recursive", default = true),
c.int("maxDepth", default = -1)
)
case "db" =>
val credentials = c.stringOpt("credentials").fold[CredentialsCallback](new NoOpCredentials)(s =>
Class.forName(s).newInstance().asInstanceOf[CredentialsCallback]
)
new JdbcScanner(
c.getString("driver"),
c.getString("url"),
credentials,
c.stringOpt("catalog"),
c.stringOpt("schema"),
c.stringOpt("tableNameRegex"),
c.stringOpt("colNameRegex")
)
case _ => null
})
}
}
| citypay/citypay-pan-search | src/main/scala/com/citypay/pan/search/source/ScanSourceConfigFactory.scala | Scala | mit | 1,765 |
package by.verkpavel.iofs.graph
import java.awt.Color
import org.jfree.chart.plot.PlotOrientation
import org.jfree.chart.{ChartFactory, ChartPanel}
import org.jfree.data.xy.{XYSeries, XYSeriesCollection}
import org.jfree.ui.{ApplicationFrame, RefineryUtilities}
class LineChart(title: String) extends ApplicationFrame(title) {
val dataset = new XYSeriesCollection()
def addDataset(values: List[Double]) {
val series = new XYSeries("Graph")
for (i <- values.indices) {
series.add(i + 1.0, values(i))
}
dataset.addSeries(series)
}
def draw(x : String , y : String ) {
val chart = ChartFactory.createXYLineChart("Line Charts", x, y, dataset,
PlotOrientation.VERTICAL, true, true, false)
chart.setBackgroundPaint(Color.white)
val plot = chart.getXYPlot
plot.setBackgroundPaint(Color.lightGray)
plot.setDomainGridlinePaint(Color.white)
plot.setRangeGridlinePaint(Color.white)
val chartPanel = new ChartPanel(chart)
chartPanel.setPreferredSize(new java.awt.Dimension(500, 270))
setContentPane(chartPanel)
this.pack()
RefineryUtilities.centerFrameOnScreen(this)
this.setVisible(true)
}
} | VerkhovtsovPavel/BSUIR_Labs | Labs/IOFS/IOFS-1/src/by/verkpavel/iofs/graph/LineChart.scala | Scala | mit | 1,177 |
import org.apache.spark.{SparkContext, SparkConf}
/**
* Created by ma on 15-1-27.
*/
class QueryT4 extends BaseQuery{
System.setProperty("spark.cores.max",String.valueOf(ParamSet.cores))
val conf = new SparkConf()
conf.setAppName("TPCH-Q4")
val sc = new SparkContext(conf)
val sqlContext = new org.apache.spark.sql.hive.HiveContext(sc)
override def execute: Unit ={
// setAppName("TPC-H_Q4")
//get the time before the query be executed
val t0 = System.nanoTime : Double
var t1 = System.nanoTime : Double
println("ID: "+ID+"query 4 will be parsed")
val choosDdatabase = sqlContext.sql("use "+ParamSet.database)
choosDdatabase.count()
println("DATABASE: "+ParamSet.database)
//the query
val res0 = sqlContext.sql("""INSERT OVERWRITE TABLE q4_order_priority_tmp
select
DISTINCT l_orderkey
from
lineitem
where
l_commitdate < l_receiptdate""")
val res1 = sqlContext.sql("""select o_orderpriority, count(1) as order_count
from orders o join q4_order_priority_tmp t
on o.o_orderkey = t.o_orderkey and o.o_orderdate >= '1993-07-01' and o.o_orderdate < '1993-10-01'
group by o_orderpriority
order by o_orderpriority""")
t1 = System.nanoTime : Double
println("ID: "+ID+"query 4 parse done, parse time:"+ (t1 - t0) / 1000000000.0 + " secs")
if(ParamSet.isExplain){
println(res0.queryExecution.executedPlan)
println(res1.queryExecution.executedPlan)
}else{
if (ParamSet.showResult){
res0.collect().foreach(println)
}else{
res0.count()
}
t1 = System.nanoTime : Double
println("ID: "+ID+"query 4's execution time : " + (t1 - t0) / 1000000000.0 + " secs")
}
println("ID: "+ID+"Query 4 completed!")
sc.stop()
println("ID: "+ID+"Query 4's context successfully stopped")
Runtime.getRuntime.exec(ParamSet.execFREE)
// "OK"
}
}
| f7753/spark-SQL-tpch-test-tool | QueryT4.scala | Scala | apache-2.0 | 2,211 |
package ca.uwaterloo.gsd.rangeFix
import java.io._
import collection._
import ExpressionHelper._
import Expression._
import scala.collection.mutable.ArrayBuffer
class ErrorChecker(constraints:IndexedSeq[Expression],
valuation:Map[String, Literal]) {
def getSatisfiedConstraintIndexes():Iterable[Int] = {
if (needsUpdate) update()
satisfiedConstraintIndexes
}
def getUnsatisfiedConstraintIndexes():Iterable[Int] = {
if (needsUpdate) update()
unsatisfiedConstraintIndexes
}
def changeValuation(id:String, value:Literal) {
curValuation = curValuation + (id -> value)
needsUpdate = true
}
private var needsUpdate:Boolean = true
private var satisfiedConstraintIndexes:Iterable[Int] = List()
private var unsatisfiedConstraintIndexes:Iterable[Int] = List()
private var curValuation = valuation
private def update() {
val satisfiedConstraints = mutable.Set[Int]()
val unsatisfiedConstraints = mutable.Set[Int]()
for (i <- 0 until constraints.size) {
val result = ExpressionHelper.evaluateTypeCorrectExpression(
constraints(i), curValuation)
assert(result.isInstanceOf[BoolLiteral])
if (result == BoolLiteral(true))
satisfiedConstraints += i
else
unsatisfiedConstraints += i
}
satisfiedConstraintIndexes = satisfiedConstraints
unsatisfiedConstraintIndexes = unsatisfiedConstraints
needsUpdate = false
}
}
case class FixGenResult(
fixes:Iterable[DataFix],
milliseconds:Long
)
class MultiConstraintFixer(allConstraints:IndexedSeq[Expression],
reqConstraintSize:Int,
valuation:Map[String, Literal],
executionTime:Int = 1) {
val errorChecker = new ErrorChecker(allConstraints.slice(0, reqConstraintSize), valuation)
val fg = FixGenerator.create(allConstraints, configuration2Types(valuation), valuation)
def fixWithIgnorance(constraintIndex:Int):FixGenResult = fixIgnoranceImpl(constraintIndex, fg)
def fixWithElimination(constraintIndex:Int):FixGenResult = fixEliminationImpl(constraintIndex, fg)
def fixWithPropagation(constraintIndex:Int):FixGenResult = fixPropagationImpl(Set(constraintIndex), fg)
private def fixImpl(f: =>Iterable[DataFix]):FixGenResult = {
val result = Timer.measureTime(executionTime) {
f
}
FixGenResult(result, Timer.lastExecutionMillis)
}
private def fixIgnoranceImpl(cIndex:Int, fg:FixGenerator):FixGenResult = fixImpl {
fg.fixWithIgnorance(cIndex)
}
private def fixEliminationImpl(cIndex:Int, fg:FixGenerator):FixGenResult = fixImpl {
fg.fixWithElimination(cIndex, errorChecker.getSatisfiedConstraintIndexes.toSet)
}
private def fixPropagationImpl(cIndex: Set[Int], fg:FixGenerator):FixGenResult = fixImpl {
val satisfiedConstraintIndexes = Timer.measureTime(errorChecker.getSatisfiedConstraintIndexes.toSet)
fg.fix(cIndex, satisfiedConstraintIndexes, Set[String]())
}
def addEqualConstraint(l: Literal, exprIndex: Int) = {
}
// fix a violated new equal constraint
def fixEqIgnorance(features: ArrayBuffer[(Literal, Int)]): FixGenResult = {
var nfg: FixGenerator = fg
var cIndex: Int = 0
for ((l, exprIndex) <- features) {
val tuple: (FixGenerator, Int) = FixGenerator.addEqualConstraint(fg, l, exprIndex)
nfg = tuple._1
cIndex = tuple._2
}
fixIgnoranceImpl(cIndex, nfg)
}
def fixEqElimination(features: ArrayBuffer[(Literal, Int)]): FixGenResult = {
var nfg: FixGenerator = fg
var cIndex: Int = 0
for ((l, exprIndex) <- features) {
val tuple: (FixGenerator, Int) = FixGenerator.addEqualConstraint(fg, l, exprIndex)
nfg = tuple._1
cIndex = tuple._2
}
fixEliminationImpl(cIndex, nfg)
}
def fixEqPropagation(features: ArrayBuffer[(Literal, Int)]): FixGenResult = {
var nfg: FixGenerator = fg
var cIndex: Set[Int] = Set()
for ((l, exprIndex) <- features) {
val tuple: (FixGenerator, Int) = FixGenerator.addEqualConstraint(nfg, l, exprIndex)
nfg = tuple._1
cIndex += tuple._2
}
fixPropagationImpl(cIndex, nfg)
}
}
class SerializedEccManager(serializedArray: Array[Byte]) extends Serializable {
def this(eccManager:EccManager) = this{
val byteArrayStream = new ByteArrayOutputStream
val output = new ObjectOutputStream(byteArrayStream)
try {
eccManager.saveExcutionTimesAndLoaderPath(output)
eccManager.saveLoaderConfig(output)
} finally {
output.close()
}
val result = byteArrayStream.toByteArray
result
}
def get() = {
val input = new ObjectInputStream(new ByteArrayInputStream(serializedArray))
try {
val et = input.readInt()
val file = input.readObject.asInstanceOf[String]
val annotation = input.readObject.asInstanceOf[String]
val eccManager = new EccManager(new EccLoader(file,annotation),et)
eccManager.loadLoaderConfig(input)
eccManager
} finally {
input.close
}
}
}
class EccManager(loader:EccLoader, executionTime:Int=1)
extends ConfigManager(loader, executionTime) with Serializable {
def activateFeature(id: String, strategy: Strategy = PropagationStrategy): FixGenResult = {
if (getActiveConstraintIndex(id).isEmpty)
throw new java.lang.IllegalArgumentException("feature cannot be found.")
val index = getActiveConstraintIndex(id).get
strategy.fix(index)
}
def getConstraintSize = loader.reqConstraintSize
def getFeatureSize = loader.getFeatureSize
def getActiveConstraintIndex(id:String):Option[Int] =
loader.getActiveConstraintIndex(id)
def isNodeActive(id:String) = {
val optIndex = getActiveConstraintIndex(id)
if (optIndex.isEmpty) throw new Exception(id + " not found.")
ExpressionHelper.evaluateTypeCorrectExpression(
loader.allExpressions(
getActiveConstraintIndex(id).get) getConstraint,
loader.valuation) == BoolLiteral(true)
}
def save(file:String){
val oos = new ObjectOutputStream(new FileOutputStream(file))
try {
oos.writeObject(this)
}finally{oos.close()}
}
def saveExcutionTimesAndLoaderPath(out:ObjectOutputStream){
out.writeInt(executionTime)
loader.savePath(out)
}
def saveLoaderConfig(out:ObjectOutputStream){
loader.saveConfig(out)
}
def loadLoaderConfig(in:ObjectInputStream){
loader.loadConfig(in)
_fixerShouldUpdate = true
}
def convertSingleOptionValueToValuation(id: String, value: OptionValue): Iterable[(String, Literal)] =
loader.convertOptionValue(value, id) match {
case SingleConfigValue(v) =>
if (getFeatureFlavor(id) == Flavor.Bool)
List((NodeHelper.toBoolVar(id), v))
else {
assert(getFeatureFlavor(id) == Flavor.Data)
List((NodeHelper.toDataVar(id), v))
}
case DoubleConfigValue(b, v) => List((NodeHelper.toBoolVar(id), b), (NodeHelper.toDataVar(id), v))
}
def getFeatureFlavor(id:String) = loader.getFeatureFlavor(id)
def getCloneEccManager():EccManager={
val byteArrayOut = new ByteArrayOutputStream
val objectOut = new ObjectOutputStream(byteArrayOut)
try {
loader.testSerializable()
objectOut.writeObject(this)
}
finally {objectOut.close()}
val objectIn = new ObjectInputStream(new ByteArrayInputStream(byteArrayOut.toByteArray()))
try {
objectIn.readObject.asInstanceOf[EccManager]
}finally{
objectIn.close()
}
}
def getValuation=loader.valuation
def changeFeature(id:String,value:OptionValue){
loader.changeFeature(id,value)
_fixerShouldUpdate = true
}
}
class KconfigManager(loader:KconfigLoader, executionTime:Int=1) extends ConfigManager(loader, executionTime) {
private val features: ArrayBuffer[(Literal, Int)] = ArrayBuffer[(Literal, Int)]()
def setFeature(id:String, l:Literal) = {
val optId = loader.getEffectiveIndex(id)
if (optId.isEmpty)
throw new java.lang.IllegalArgumentException("feature cannot be found.")
features += ((l, optId.get))
}
def getFixes(strategy:Strategy = PropagationStrategy): FixGenResult = {
strategy.fixEq(features)
}
}
class ConfigManager(loader:ModelLoader, executionTime:Int=1){
protected abstract class Strategy {
def fix(index:Int):FixGenResult
def fixEq(features: ArrayBuffer[(Literal, Int)]):FixGenResult
}
object PropagationStrategy
extends Strategy {
override def fix(index:Int) = {
fixer.fixWithPropagation(index)
}
override def fixEq(features: ArrayBuffer[(Literal, Int)]):FixGenResult = {
fixer.fixEqPropagation(features)
}
}
object EliminationStrategy
extends Strategy {
override def fix(index:Int) = {
fixer.fixWithElimination(index)
}
override def fixEq(features: ArrayBuffer[(Literal, Int)]):FixGenResult = {
fixer.fixEqElimination(features)
}
}
object IgnoranceStrategy
extends Strategy {
override def fix(index:Int) = {
fixer.fixWithIgnorance(index)
}
override def fixEq(features: ArrayBuffer[(Literal, Int)]):FixGenResult = {
fixer.fixEqElimination(features)
}
}
var _fixerShouldUpdate:Boolean = true
var _fixer:MultiConstraintFixer = null
private def fixer():MultiConstraintFixer={
if (_fixerShouldUpdate){
val allConstraintsNoSource = loader.allExpressions.map(_.getConstraint)
_fixer = new MultiConstraintFixer(
allConstraintsNoSource,
loader.reqConstraintSize,
loader.valuation,
executionTime)
}
_fixerShouldUpdate=false
_fixer
}
def getSatisfiedConstraintIndexes = fixer.errorChecker.getSatisfiedConstraintIndexes
def getUnsatisfiedConstraintIndexes =
fixer.errorChecker.getUnsatisfiedConstraintIndexes
def getConstraint(index:Int):ConstraintWithSource = loader.allExpressions(index)
def generateFix(index:Int, strategy:Strategy=PropagationStrategy) =
strategy.fix(index)
}
trait ModelLoader {
def allExpressions:IndexedSeq[ConstraintWithSource]
def reqConstraintSize:Int
def valuation:Map[String, Literal]
}
abstract class ConstraintWithSource(constraint:Expression, source:Source) {
def getConstraint():Expression = constraint
def getSource():String = source.getSource
def getSourceObject() = source
def getNodeID():String = source.nodeID
override def toString():String = getSource
}
abstract class Source(val nodeID:String) {
def getSource():String
}
class KconfigEffectiveSource(nodeID:String) extends Source(nodeID) {
override def getSource():String = "Effective(%s)".format(nodeID)
}
object KconfigChoiceSource extends Source("choice") {
override def getSource():String = "Choice"
}
class KconfigDomainSource(nodeID:String) extends Source(nodeID) {
override def getSource():String = "Domain(%s)".format(nodeID)
}
case class EffectiveExpr(e:Expression, id:String)
extends ConstraintWithSource(e, new KconfigEffectiveSource(id))
case class ChoiceConstraint(c:Expression)
extends ConstraintWithSource(c, KconfigChoiceSource)
case class DomainConstraint(c:Expression, id:String)
extends ConstraintWithSource(c, new KconfigDomainSource(id))
| matachi/rangeFix | src/main/scala/common/ConfigManager.scala | Scala | mit | 11,519 |
package mesosphere.marathon
package state
import mesosphere.UnitTest
import mesosphere.marathon.core.readiness.ReadinessCheckTestHelper
class ReadinessCheckSerializerTest extends UnitTest {
"ReadinessCheckSerialization" should {
"get defaultHttp for empty protobuf" in {
Given("an empty protobuf")
val proto = Protos.ReadinessCheckDefinition.getDefaultInstance
When("reading it")
val check = ReadinessCheckSerializer.fromProto(proto)
Then("we get the defaults")
check should equal(ReadinessCheckTestHelper.defaultHttp)
}
"defaultHttp example serialized/deserializes" in {
Given("a defaultHttp readinessCheck")
val defaultHttp = ReadinessCheckTestHelper.defaultHttp
When("serializing it to a proto")
val proto = ReadinessCheckSerializer.toProto(defaultHttp)
And("deserializing it again")
val reread = ReadinessCheckSerializer.fromProto(proto)
Then("we get the original check back")
reread should equal(defaultHttp)
}
"alternativeHttps example serialized/deserializes" in {
Given("a alternativeHttps readinessCheck")
val alternativeHttps = ReadinessCheckTestHelper.alternativeHttps
When("serializing it to a proto")
val proto = ReadinessCheckSerializer.toProto(alternativeHttps)
And("deserializing it again")
val reread = ReadinessCheckSerializer.fromProto(proto)
Then("we get the original check back")
reread should equal(alternativeHttps)
}
}
}
| guenter/marathon | src/test/scala/mesosphere/marathon/state/ReadinessCheckSerializerTest.scala | Scala | apache-2.0 | 1,509 |
package org.jetbrains.plugins.scala
package lang
package psi
package types
import com.intellij.openapi.progress.ProgressManager
import com.intellij.openapi.util.{Computable, RecursionManager}
import com.intellij.psi._
import com.intellij.util.containers.ConcurrentWeakHashMap
import org.jetbrains.plugins.scala.decompiler.DecompilerUtil
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.base.ScFieldId
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScBindingPattern
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScExistentialClause
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScTypeParametersOwner
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTypeDefinition
import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.synthetic.ScSyntheticClass
import org.jetbrains.plugins.scala.lang.psi.impl.{ScalaPsiElementFactory, ScalaPsiManager}
import org.jetbrains.plugins.scala.lang.psi.types.nonvalue.{ScMethodType, ScTypePolymorphicType}
import org.jetbrains.plugins.scala.lang.psi.types.result.{Success, TypingContext}
import org.jetbrains.plugins.scala.lang.refactoring.util.ScTypeUtil.AliasType
import org.jetbrains.plugins.scala.lang.resolve.processor.{CompoundTypeCheckSignatureProcessor, CompoundTypeCheckTypeAliasProcessor}
import org.jetbrains.plugins.scala.util.ScEquivalenceUtil
import _root_.scala.collection.immutable.HashSet
import scala.collection.mutable.ArrayBuffer
import scala.collection.{Seq, immutable, mutable}
object Conformance {
/**
* Checks, whether the following assignment is correct:
* val x: l = (y: r)
*/
def conforms(l: ScType, r: ScType, checkWeak: Boolean = false): Boolean =
conformsInner(l, r, HashSet.empty, new ScUndefinedSubstitutor, checkWeak)._1
def undefinedSubst(l: ScType, r: ScType, checkWeak: Boolean = false): ScUndefinedSubstitutor =
conformsInner(l, r, HashSet.empty, new ScUndefinedSubstitutor, checkWeak)._2
private def checkParameterizedType(parametersIterator: Iterator[PsiTypeParameter], args1: scala.Seq[ScType],
args2: scala.Seq[ScType], _undefinedSubst: ScUndefinedSubstitutor,
visited: Set[PsiClass], checkWeak: Boolean): (Boolean, ScUndefinedSubstitutor) = {
var undefinedSubst = _undefinedSubst
def addAbstract(upper: ScType, lower: ScType, tp: ScType, alternateTp: ScType): Boolean = {
if (!upper.equiv(Any)) {
val t = conformsInner(upper, tp, visited, undefinedSubst, checkWeak)
if (!t._1) {
val t = conformsInner(upper, alternateTp, visited, undefinedSubst, checkWeak)
if (!t._1) return false
else undefinedSubst = t._2
} else undefinedSubst = t._2
}
if (!lower.equiv(Nothing)) {
val t = conformsInner(tp, lower, visited, undefinedSubst, checkWeak)
if (!t._1) {
val t = conformsInner(alternateTp, lower, visited, undefinedSubst, checkWeak)
if (!t._1) return false
else undefinedSubst = t._2
} else undefinedSubst = t._2
}
true
}
val args1Iterator = args1.iterator
val args2Iterator = args2.iterator
while (parametersIterator.hasNext && args1Iterator.hasNext && args2Iterator.hasNext) {
val tp = parametersIterator.next()
val argsPair = (args1Iterator.next(), args2Iterator.next())
tp match {
case scp: ScTypeParam if scp.isContravariant =>
val y = Conformance.conformsInner(argsPair._2, argsPair._1, HashSet.empty, undefinedSubst)
if (!y._1) return (false, undefinedSubst)
else undefinedSubst = y._2
case scp: ScTypeParam if scp.isCovariant =>
val y = Conformance.conformsInner(argsPair._1, argsPair._2, HashSet.empty, undefinedSubst)
if (!y._1) return (false, undefinedSubst)
else undefinedSubst = y._2
//this case filter out such cases like undefined type
case _ =>
argsPair match {
case (u: ScUndefinedType, rt) =>
undefinedSubst = undefinedSubst.addLower((u.tpt.name, u.tpt.getId), rt, variance = 0)
undefinedSubst = undefinedSubst.addUpper((u.tpt.name, u.tpt.getId), rt, variance = 0)
case (lt, u: ScUndefinedType) =>
undefinedSubst = undefinedSubst.addLower((u.tpt.name, u.tpt.getId), lt, variance = 0)
undefinedSubst = undefinedSubst.addUpper((u.tpt.name, u.tpt.getId), lt, variance = 0)
case (ScAbstractType(tpt, lower, upper), r) =>
val (right, alternateRight) =
if (tpt.args.length > 0 && !r.isInstanceOf[ScParameterizedType])
(ScParameterizedType(r, tpt.args), r)
else (r, r)
if (!addAbstract(upper, lower, right, alternateRight)) return (false, undefinedSubst)
case (l, ScAbstractType(tpt, lower, upper)) =>
val (left, alternateLeft) =
if (tpt.args.length > 0 && !l.isInstanceOf[ScParameterizedType])
(ScParameterizedType(l, tpt.args), l)
else (l, l)
if (!addAbstract(upper, lower, left, alternateLeft)) return (false, undefinedSubst)
case (aliasType, _) if aliasType.isAliasType != None && aliasType.isAliasType.get.ta.isExistentialTypeAlias =>
val y = Conformance.conformsInner(argsPair._1, argsPair._2, HashSet.empty, undefinedSubst)
if (!y._1) return (false, undefinedSubst)
else undefinedSubst = y._2
case _ =>
val t = Equivalence.equivInner(argsPair._1, argsPair._2, undefinedSubst, falseUndef = false)
if (!t._1) return (false, undefinedSubst)
undefinedSubst = t._2
}
}
}
(true, undefinedSubst)
}
private class LeftConformanceVisitor(l: ScType, r: ScType, visited: Set[PsiClass],
subst: ScUndefinedSubstitutor,
checkWeak: Boolean = false) extends ScalaTypeVisitor {
/*
Different checks from right type in order of appearence.
todo: It's seems it's possible to check order and simplify code in many places.
*/
trait ValDesignatorSimplification extends ScalaTypeVisitor {
override def visitDesignatorType(d: ScDesignatorType) {
d.getValType match {
case Some(v) =>
result = conformsInner(l, v, visited, subst, checkWeak)
return
case _ =>
}
}
}
trait UndefinedSubstVisitor extends ScalaTypeVisitor {
override def visitUndefinedType(u: ScUndefinedType) {
result = (true, undefinedSubst.addUpper((u.tpt.name, u.tpt.getId), l))
}
}
trait AbstractVisitor extends ScalaTypeVisitor {
override def visitAbstractType(a: ScAbstractType) {
val left =
if (a.tpt.args.length > 0 && !l.isInstanceOf[ScParameterizedType])
ScParameterizedType(l, a.tpt.args)
else l
if (!a.lower.equiv(Nothing)) {
result = conformsInner(left, a.lower, visited, undefinedSubst, checkWeak)
} else {
result = (true, undefinedSubst)
}
if (result._1 && !a.upper.equiv(Any)) {
val t = conformsInner(a.upper, left, visited, result._2, checkWeak)
if (t._1) result = t //this is optionally
}
}
}
trait ParameterizedAbstractVisitor extends ScalaTypeVisitor {
override def visitParameterizedType(p: ScParameterizedType) {
p.designator match {
case a: ScAbstractType =>
val subst = new ScSubstitutor(Map(a.tpt.args.zip(p.typeArgs).map {
case (tpt: ScTypeParameterType, tp: ScType) =>
((tpt.param.name, ScalaPsiUtil.getPsiElementId(tpt.param)), tp)
}: _*), Map.empty, None)
val lower: ScType =
subst.subst(a.lower) match {
case ScParameterizedType(lower, _) => ScParameterizedType(lower, p.typeArgs)
case lower => ScParameterizedType(lower, p.typeArgs)
}
if (!lower.equiv(Nothing)) {
result = conformsInner(l, lower, visited, undefinedSubst, checkWeak)
}
case _ =>
}
}
}
private def checkEquiv() {
val isEquiv = Equivalence.equivInner(l, r, undefinedSubst)
if (isEquiv._1) result = isEquiv
}
trait ExistentialSimplification extends ScalaTypeVisitor {
override def visitExistentialType(e: ScExistentialType) {
val simplified = e.simplify()
if (simplified != r) result = conformsInner(l, simplified, visited, undefinedSubst, checkWeak)
}
}
trait SkolemizeVisitor extends ScalaTypeVisitor {
override def visitSkolemizedType(s: ScSkolemizedType) {
result = conformsInner(l, s.upper, HashSet.empty, undefinedSubst)
}
}
trait ParameterizedSkolemizeVisitor extends ScalaTypeVisitor {
override def visitParameterizedType(p: ScParameterizedType) {
p.designator match {
case s: ScSkolemizedType =>
s.upper match {
case ScParameterizedType(upper, _) =>
result = conformsInner(l, upper, visited, undefinedSubst, checkWeak)
case upper =>
result = conformsInner(l, upper, visited, undefinedSubst, checkWeak)
}
case _ =>
}
}
}
trait OtherNonvalueTypesVisitor extends ScalaTypeVisitor {
override def visitUndefinedType(u: ScUndefinedType) {
result = (false, undefinedSubst)
}
override def visitMethodType(m: ScMethodType) {
result = (false, undefinedSubst)
}
override def visitAbstractType(a: ScAbstractType) {
result = (false, undefinedSubst)
}
override def visitTypePolymorphicType(t: ScTypePolymorphicType) {
result = (false, undefinedSubst)
}
}
trait NothingNullVisitor extends ScalaTypeVisitor {
override def visitStdType(x: StdType) {
if (x eq types.Nothing) result = (true, undefinedSubst)
else if (x eq types.Null) {
/*
this case for checking: val x: T = null
This is good if T class type: T <: AnyRef and !(T <: NotNull)
*/
if (!conforms(types.AnyRef, l)) {
result = (false, undefinedSubst)
return
}
ScType.extractDesignated(l, withoutAliases = false) match {
case Some((el, _)) =>
val notNullClass = ScalaPsiManager.instance(el.getProject).getCachedClass("scala.NotNull", el.getResolveScope, ScalaPsiManager.ClassCategory.TYPE)
if (notNullClass != null) {
val notNullType = ScDesignatorType(notNullClass)
result = (!conforms(notNullType, l), undefinedSubst) //todo: think about undefinedSubst
} else {
result = (true, undefinedSubst)
}
case _ => result = (true, undefinedSubst)
}
}
}
}
trait TypeParameterTypeVisitor extends ScalaTypeVisitor {
override def visitTypeParameterType(tpt: ScTypeParameterType) {
result = conformsInner(l, tpt.upper.v, HashSet.empty, undefinedSubst)
}
}
trait ThisVisitor extends ScalaTypeVisitor {
override def visitThisType(t: ScThisType) {
val clazz = t.clazz
val res = clazz.getTypeWithProjections(TypingContext.empty)
if (res.isEmpty) result = (false, undefinedSubst)
else result = conformsInner(l, res.get, visited, subst, checkWeak)
}
}
trait DesignatorVisitor extends ScalaTypeVisitor {
override def visitDesignatorType(d: ScDesignatorType) {
d.element match {
case v: ScBindingPattern =>
val res = v.getType(TypingContext.empty)
if (res.isEmpty) result = (false, undefinedSubst)
else result = conformsInner(l, res.get, visited, undefinedSubst)
case v: ScParameter =>
val res = v.getType(TypingContext.empty)
if (res.isEmpty) result = (false, undefinedSubst)
else result = conformsInner(l, res.get, visited, undefinedSubst)
case v: ScFieldId =>
val res = v.getType(TypingContext.empty)
if (res.isEmpty) result = (false, undefinedSubst)
else result = conformsInner(l, res.get, visited, undefinedSubst)
case _ =>
}
}
}
trait ParameterizedAliasVisitor extends ScalaTypeVisitor {
override def visitParameterizedType(p: ScParameterizedType) {
p.designator match {
case proj: ScProjectionType if proj.actualElement.isInstanceOf[ScTypeAlias] =>
val args = p.typeArgs
val a = proj.actualElement.asInstanceOf[ScTypeAlias]
val subst = proj.actualSubst
val upper: ScType = a.upperBound.toOption match {
case Some(up) => up
case _ =>
result = (false, undefinedSubst)
return
}
val uBound = subst.subst(upper)
val genericSubst = ScalaPsiUtil.
typesCallSubstitutor(a.typeParameters.map(tp => (tp.name, ScalaPsiUtil.getPsiElementId(tp))), args)
val s = subst.followed(genericSubst)
result = conformsInner(l, s.subst(uBound), visited, undefinedSubst)
case des: ScDesignatorType =>
val des = p.designator.asInstanceOf[ScDesignatorType]
des.element match {
case a: ScTypeAlias =>
val args = p.typeArgs
val uBound = a.upperBound.toOption match {
case Some(tp) => tp
case _ =>
result = (false, undefinedSubst)
return
}
val genericSubst = ScalaPsiUtil.
typesCallSubstitutor(a.typeParameters.map(tp => (tp.name, ScalaPsiUtil.getPsiElementId(tp))), args)
result = conformsInner(l, genericSubst.subst(uBound), visited, undefinedSubst)
case _ =>
}
case _ =>
}
}
}
trait AliasDesignatorVisitor extends ScalaTypeVisitor {
def stopDesignatorAliasOnFailure: Boolean = false
override def visitDesignatorType(des: ScDesignatorType) {
des.element match {
case a: ScTypeAlias =>
val upper: ScType = a.upperBound.toOption match {
case Some(up) => up
case _ => return
}
val res = conformsInner(l, upper, visited, undefinedSubst)
if (stopDesignatorAliasOnFailure || res._1) result = res
case _ =>
}
}
}
trait CompoundTypeVisitor extends ScalaTypeVisitor {
override def visitCompoundType(c: ScCompoundType) {
val comps = c.components
val iterator = comps.iterator
while (iterator.hasNext) {
val comp = iterator.next()
val t = conformsInner(l, comp, HashSet.empty, undefinedSubst)
if (t._1) {
result = (true, t._2)
return
}
}
result = l.isAliasType match {
case Some(AliasType(_: ScTypeAliasDefinition, Success(comp: ScCompoundType, _), _)) =>
conformsInner(comp, c, HashSet.empty, undefinedSubst)
case _ => (false, undefinedSubst)
}
}
}
trait ExistentialVisitor extends ScalaTypeVisitor {
override def visitExistentialType(ex: ScExistentialType) {
result = conformsInner(l, ex.skolem, HashSet.empty, undefinedSubst)
}
}
trait ProjectionVisitor extends ScalaTypeVisitor {
def stopProjectionAliasOnFailure: Boolean = false
override def visitProjectionType(proj2: ScProjectionType) {
proj2.actualElement match {
case ta: ScTypeAlias =>
val subst = proj2.actualSubst
val upper: ScType = ta.upperBound.toOption match {
case Some(up) => up
case _ => return
}
val uBound = subst.subst(upper)
val res = conformsInner(l, uBound, visited, undefinedSubst)
if (stopProjectionAliasOnFailure || res._1) result = res
case _ =>
l match {
case proj1: ScProjectionType if ScEquivalenceUtil.smartEquivalence(proj1.actualElement, proj2.actualElement) =>
val projected1 = proj1.projected
val projected2 = proj2.projected
result = conformsInner(projected1, projected2, visited, undefinedSubst)
case _ =>
proj2.actualElement match {
case syntheticClass: ScSyntheticClass =>
result = conformsInner(l, syntheticClass.t, HashSet.empty, undefinedSubst)
case v: ScBindingPattern =>
val res = v.getType(TypingContext.empty)
if (res.isEmpty) result = (false, undefinedSubst)
else result = conformsInner(l, proj2.actualSubst.subst(res.get), visited, undefinedSubst)
case v: ScParameter =>
val res = v.getType(TypingContext.empty)
if (res.isEmpty) result = (false, undefinedSubst)
else result = conformsInner(l, proj2.actualSubst.subst(res.get), visited, undefinedSubst)
case v: ScFieldId =>
val res = v.getType(TypingContext.empty)
if (res.isEmpty) result = (false, undefinedSubst)
else result = conformsInner(l, proj2.actualSubst.subst(res.get), visited, undefinedSubst)
case _ =>
}
}
}
}
}
private var result: (Boolean, ScUndefinedSubstitutor) = null
private var undefinedSubst: ScUndefinedSubstitutor = subst
def getResult = result
override def visitStdType(x: StdType) {
var rightVisitor: ScalaTypeVisitor =
new ValDesignatorSimplification with UndefinedSubstVisitor
with AbstractVisitor
with ParameterizedAbstractVisitor {}
r.visitType(rightVisitor)
if (result != null) return
checkEquiv()
if (result != null) return
rightVisitor = new ExistentialSimplification with SkolemizeVisitor
with ParameterizedSkolemizeVisitor with OtherNonvalueTypesVisitor {}
r.visitType(rightVisitor)
if (result != null) return
if (checkWeak && r.isInstanceOf[ValType]) {
(r, x) match {
case (types.Byte, types.Short | types.Int | types.Long | types.Float | types.Double) =>
result = (true, undefinedSubst)
return
case (types.Short, types.Int | types.Long | types.Float | types.Double) =>
result = (true, undefinedSubst)
return
case (types.Char, types.Byte | types.Short | types.Int | types.Long | types.Float | types.Double) =>
result = (true, undefinedSubst)
return
case (types.Int, types.Long | types.Float | types.Double) =>
result = (true, undefinedSubst)
return
case (types.Long, types.Float | types.Double) =>
result = (true, undefinedSubst)
return
case (types.Float, types.Double) =>
result = (true, undefinedSubst)
return
case _ =>
}
}
if (x eq types.Any) {
result = (true, undefinedSubst)
return
}
if (x == types.Nothing && r == types.Null) {
result = (false, undefinedSubst)
return
}
rightVisitor = new NothingNullVisitor with TypeParameterTypeVisitor {}
r.visitType(rightVisitor)
if (result != null) return
rightVisitor = new ThisVisitor with DesignatorVisitor
with ParameterizedAliasVisitor {}
r.visitType(rightVisitor)
if (result != null) return
rightVisitor = new AliasDesignatorVisitor with CompoundTypeVisitor with ExistentialVisitor
with ProjectionVisitor {}
r.visitType(rightVisitor)
if (result != null) return
if (x eq types.Null) {
result = (r == types.Nothing, undefinedSubst)
return
}
if (x eq types.AnyRef) {
if (r eq types.Any) {
result = (false, undefinedSubst)
return
}
else if (r eq types.AnyVal) {
result = (false, undefinedSubst)
return
}
else if (r.isInstanceOf[ValType]) {
result = (false, undefinedSubst)
return
}
else if (!r.isInstanceOf[ScExistentialType]) {
rightVisitor = new AliasDesignatorVisitor with ProjectionVisitor {
override def stopProjectionAliasOnFailure: Boolean = true
override def stopDesignatorAliasOnFailure: Boolean = true
}
r.visitType(rightVisitor)
if (result != null) return
result = (true, undefinedSubst)
return
}
}
if (x eq Singleton) {
result = (false, undefinedSubst)
}
if (x eq types.AnyVal) {
result = (r.isInstanceOf[ValType], undefinedSubst)
return
}
if (l.isInstanceOf[ValType] && r.isInstanceOf[ValType]) {
result = (false, undefinedSubst)
return
}
}
override def visitCompoundType(c: ScCompoundType) {
var rightVisitor: ScalaTypeVisitor =
new ValDesignatorSimplification with UndefinedSubstVisitor with AbstractVisitor
with ParameterizedAbstractVisitor {}
r.visitType(rightVisitor)
if (result != null) return
checkEquiv()
if (result != null) return
rightVisitor = new ExistentialSimplification with SkolemizeVisitor
with ParameterizedSkolemizeVisitor with OtherNonvalueTypesVisitor with NothingNullVisitor
with TypeParameterTypeVisitor with ThisVisitor with DesignatorVisitor {}
r.visitType(rightVisitor)
if (result != null) return
rightVisitor = new ParameterizedAliasVisitor with AliasDesignatorVisitor {}
r.visitType(rightVisitor)
if (result != null) return
/*If T<:Ui for i=1,...,n and for every binding d of a type or value x in R there exists a member binding
of x in T which subsumes d, then T conforms to the compound type U1 with . . . with Un {R }.
U1 with . . . with Un {R } === t1
T === t2
U1 with . . . with Un === comps1
Un === compn*/
def workWithSignature(s: Signature, retType: ScType): Boolean = {
val processor = new CompoundTypeCheckSignatureProcessor(s,retType, undefinedSubst, s.substitutor)
processor.processType(r, s.namedElement)
undefinedSubst = processor.getUndefinedSubstitutor
processor.getResult
}
def workWithTypeAlias(sign: TypeAliasSignature): Boolean = {
val processor = new CompoundTypeCheckTypeAliasProcessor(sign, undefinedSubst, ScSubstitutor.empty)
processor.processType(r, sign.ta)
undefinedSubst = processor.getUndefinedSubstitutor
processor.getResult
}
result = (c.components.forall(comp => {
val t = conformsInner(comp, r, HashSet.empty, undefinedSubst)
undefinedSubst = t._2
t._1
}) && c.signatureMap.forall {
case (s: Signature, retType) => workWithSignature(s, retType)
} && c.typesMap.forall {
case (s, sign) => workWithTypeAlias(sign)
}, undefinedSubst)
}
override def visitProjectionType(proj: ScProjectionType) {
var rightVisitor: ScalaTypeVisitor =
new ValDesignatorSimplification with UndefinedSubstVisitor with AbstractVisitor
with ParameterizedAbstractVisitor {}
r.visitType(rightVisitor)
if (result != null) return
checkEquiv()
if (result != null) return
rightVisitor = new ExistentialSimplification with SkolemizeVisitor
with ParameterizedSkolemizeVisitor with OtherNonvalueTypesVisitor with NothingNullVisitor
with TypeParameterTypeVisitor with ThisVisitor with DesignatorVisitor with ParameterizedAliasVisitor {}
r.visitType(rightVisitor)
if (result != null) return
rightVisitor = new ParameterizedAliasVisitor with AliasDesignatorVisitor with CompoundTypeVisitor with ProjectionVisitor {}
r.visitType(rightVisitor)
if (result != null) return
r match {
case proj1: ScProjectionType if ScEquivalenceUtil.smartEquivalence(proj1.actualElement, proj.actualElement) =>
val projected1 = proj.projected
val projected2 = proj1.projected
result = conformsInner(projected1, projected2, visited, undefinedSubst)
if (result != null) return
case proj1: ScProjectionType if proj1.actualElement.name == proj.actualElement.name =>
val projected1 = proj.projected
val projected2 = proj1.projected
val t = conformsInner(projected1, projected2, visited, undefinedSubst)
if (t._1) {
result = t
return
}
case _ =>
}
proj.actualElement match {
case ta: ScTypeAlias =>
val subst = proj.actualSubst
if (!ta.isExistentialTypeAlias) {
val lower = ta.lowerBound.toOption match {
case Some(low) => low
case _ =>
result = (false, undefinedSubst)
return
}
result = conformsInner(subst.subst(lower), r, visited, undefinedSubst)
return
} else {
val lower = ta.lowerBound.toOption match {
case Some(low) => low
case _ =>
result = (false, undefinedSubst)
return
}
val upper = ta.upperBound.toOption match {
case Some(up) => up
case _ =>
result = (false, undefinedSubst)
return
}
val t = conformsInner(subst.subst(upper), r, visited, undefinedSubst)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
result = conformsInner(r, subst.subst(lower), visited, undefinedSubst)
return
}
case _ =>
}
rightVisitor = new ExistentialVisitor {}
r.visitType(rightVisitor)
if (result != null) return
}
override def visitJavaArrayType(a1: JavaArrayType) {
var rightVisitor: ScalaTypeVisitor =
new ValDesignatorSimplification with UndefinedSubstVisitor with AbstractVisitor
with ParameterizedAbstractVisitor {}
r.visitType(rightVisitor)
if (result != null) return
checkEquiv()
if (result != null) return
rightVisitor = new ExistentialSimplification with SkolemizeVisitor
with ParameterizedSkolemizeVisitor with OtherNonvalueTypesVisitor with NothingNullVisitor
with TypeParameterTypeVisitor with ThisVisitor with DesignatorVisitor {}
r.visitType(rightVisitor)
if (result != null) return
rightVisitor = new ParameterizedAliasVisitor {}
r.visitType(rightVisitor)
if (result != null) return
r match {
case a2: JavaArrayType =>
val arg1 = a1.arg
val arg2 = a2.arg
val argsPair = (arg1, arg2)
argsPair match {
case (ScAbstractType(tpt, lower, upper), r) =>
val right =
if (tpt.args.length > 0 && !r.isInstanceOf[ScParameterizedType])
ScParameterizedType(r, tpt.args)
else r
if (!upper.equiv(Any)) {
val t = conformsInner(upper, right, visited, undefinedSubst, checkWeak)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
}
if (!lower.equiv(Nothing)) {
val t = conformsInner(right, lower, visited, undefinedSubst, checkWeak)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
}
case (l, ScAbstractType(tpt, lower, upper)) =>
val left =
if (tpt.args.length > 0 && !l.isInstanceOf[ScParameterizedType])
ScParameterizedType(l, tpt.args)
else l
if (!upper.equiv(Any)) {
var t = conformsInner(upper, left, visited, undefinedSubst, checkWeak)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
}
if (!lower.equiv(Nothing)) {
val t = conformsInner(left, lower, visited, undefinedSubst, checkWeak)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
}
case (u: ScUndefinedType, rt) =>
undefinedSubst = undefinedSubst.addLower((u.tpt.name, u.tpt.getId), rt, variance = 0)
undefinedSubst = undefinedSubst.addUpper((u.tpt.name, u.tpt.getId), rt, variance = 0)
case (lt, u: ScUndefinedType) =>
undefinedSubst = undefinedSubst.addLower((u.tpt.name, u.tpt.getId), lt, variance = 0)
undefinedSubst = undefinedSubst.addUpper((u.tpt.name, u.tpt.getId), lt, variance = 0)
case (tp, _) if tp.isAliasType != None && tp.isAliasType.get.ta.isExistentialTypeAlias =>
val y = Conformance.conformsInner(argsPair._1, argsPair._2, HashSet.empty, undefinedSubst)
if (!y._1) {
result = (false, undefinedSubst)
return
}
else undefinedSubst = y._2
case _ =>
val t = Equivalence.equivInner(argsPair._1, argsPair._2, undefinedSubst, falseUndef = false)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
}
result = (true, undefinedSubst)
return
case p2: ScParameterizedType =>
val args = p2.typeArgs
val des = p2.designator
if (args.length == 1 && (ScType.extractClass(des) match {
case Some(q) => q.qualifiedName == "scala.Array"
case _ => false
})) {
val arg = a1.arg
val argsPair = (arg, args(0))
argsPair match {
case (ScAbstractType(tpt, lower, upper), r) =>
val right =
if (tpt.args.length > 0 && !r.isInstanceOf[ScParameterizedType])
ScParameterizedType(r, tpt.args)
else r
if (!upper.equiv(Any)) {
val t = conformsInner(upper, right, visited, undefinedSubst, checkWeak)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
}
if (!lower.equiv(Nothing)) {
val t = conformsInner(right, lower, visited, undefinedSubst, checkWeak)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
}
case (l, ScAbstractType(tpt, lower, upper)) =>
val left =
if (tpt.args.length > 0 && !l.isInstanceOf[ScParameterizedType])
ScParameterizedType(l, tpt.args)
else l
if (!upper.equiv(Any)) {
val t = conformsInner(upper, left, visited, undefinedSubst, checkWeak)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
}
if (!lower.equiv(Nothing)) {
val t = conformsInner(left, lower, visited, undefinedSubst, checkWeak)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
}
case (u: ScUndefinedType, rt) =>
undefinedSubst = undefinedSubst.addLower((u.tpt.name, u.tpt.getId), rt, variance = 0)
undefinedSubst = undefinedSubst.addUpper((u.tpt.name, u.tpt.getId), rt, variance = 0)
case (lt, u: ScUndefinedType) =>
undefinedSubst = undefinedSubst.addLower((u.tpt.name, u.tpt.getId), lt, variance = 0)
undefinedSubst = undefinedSubst.addUpper((u.tpt.name, u.tpt.getId), lt, variance = 0)
case (tp, _) if tp.isAliasType != None && tp.isAliasType.get.ta.isExistentialTypeAlias =>
val y = Conformance.conformsInner(argsPair._1, argsPair._2, HashSet.empty, undefinedSubst)
if (!y._1) {
result = (false, undefinedSubst)
return
}
else undefinedSubst = y._2
case _ =>
val t = Equivalence.equivInner(argsPair._1, argsPair._2, undefinedSubst, falseUndef = false)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
}
result = (true, undefinedSubst)
return
}
case _ =>
}
rightVisitor = new AliasDesignatorVisitor with CompoundTypeVisitor with ExistentialVisitor
with ProjectionVisitor {}
r.visitType(rightVisitor)
if (result != null) return
}
override def visitParameterizedType(p: ScParameterizedType) {
var rightVisitor: ScalaTypeVisitor =
new ValDesignatorSimplification with UndefinedSubstVisitor with AbstractVisitor {}
r.visitType(rightVisitor)
if (result != null) return
p.designator match {
case a: ScAbstractType =>
val subst = new ScSubstitutor(Map(a.tpt.args.zip(p.typeArgs).map {
case (tpt: ScTypeParameterType, tp: ScType) =>
((tpt.param.name, ScalaPsiUtil.getPsiElementId(tpt.param)), tp)
}: _*), Map.empty, None)
val upper: ScType =
subst.subst(a.upper) match {
case ScParameterizedType(upper, _) => ScParameterizedType(upper, p.typeArgs)
case upper => ScParameterizedType(upper, p.typeArgs)
}
if (!upper.equiv(Any)) {
result = conformsInner(upper, r, visited, undefinedSubst, checkWeak)
} else {
result = (true, undefinedSubst)
}
if (result._1) {
val lower: ScType =
subst.subst(a.lower) match {
case ScParameterizedType(lower, _) => ScParameterizedType(lower, p.typeArgs)
case lower => ScParameterizedType(lower, p.typeArgs)
}
if (!lower.equiv(Nothing)) {
val t = conformsInner(r, lower, visited, result._2, checkWeak)
if (t._1) result = t
}
}
return
case _ =>
}
rightVisitor = new ParameterizedAbstractVisitor {}
r.visitType(rightVisitor)
if (result != null) return
checkEquiv()
if (result != null) return
rightVisitor = new ExistentialSimplification with SkolemizeVisitor {}
r.visitType(rightVisitor)
if (result != null) return
p.designator match {
case s: ScSkolemizedType =>
s.lower match {
case ScParameterizedType(lower, _) =>
result = conformsInner(lower, r, visited, undefinedSubst, checkWeak)
return
case lower =>
result = conformsInner(lower, r, visited, undefinedSubst, checkWeak)
return
}
case _ =>
}
rightVisitor = new ParameterizedSkolemizeVisitor with OtherNonvalueTypesVisitor with NothingNullVisitor
with ThisVisitor with DesignatorVisitor {}
r.visitType(rightVisitor)
if (result != null) return
def processEquivalentDesignators(args2: Seq[ScType]): Unit = {
val args1 = p.typeArgs
val des1 = p.designator
if (args1.length != args2.length) {
result = (false, undefinedSubst)
return
}
ScType.extractDesignated(des1, withoutAliases = false) match {
case Some((ownerDesignator, _)) =>
val parametersIterator = ownerDesignator match {
case td: ScTypeParametersOwner => td.typeParameters.iterator
case ownerDesignator: PsiTypeParameterListOwner => ownerDesignator.getTypeParameters.iterator
case _ =>
result = (false, undefinedSubst)
return
}
result = checkParameterizedType(parametersIterator, args1, args2,
undefinedSubst, visited, checkWeak)
return
case _ =>
result = (false, undefinedSubst)
return
}
}
//todo: looks like this code can be simplified and unified.
//todo: what if left is type alias declaration, right is type alias definition, which is alias to that declaration?
p.designator match {
case proj: ScProjectionType if proj.actualElement.isInstanceOf[ScTypeAlias] =>
r match {
case ScParameterizedType(proj2: ScProjectionType, args2)
if proj.actualElement.isInstanceOf[ScTypeAliasDeclaration] && (proj equiv proj2) =>
processEquivalentDesignators(args2)
return
case _ =>
}
val args = p.typeArgs
val a = proj.actualElement.asInstanceOf[ScTypeAlias]
val subst = proj.actualSubst
val lower: ScType = a.lowerBound.toOption match {
case Some(low) => low
case _ =>
result = (false, undefinedSubst)
return
}
val lBound = subst.subst(lower)
val genericSubst = ScalaPsiUtil.
typesCallSubstitutor(a.typeParameters.map(tp => (tp.name, ScalaPsiUtil.getPsiElementId(tp))), args)
val s = subst.followed(genericSubst)
result = conformsInner(s.subst(lBound), r, visited, undefinedSubst)
return
case ScDesignatorType(a: ScTypeAlias) =>
r match {
case ScParameterizedType(des2@ScDesignatorType(a2: ScTypeAlias), args2)
if a.isInstanceOf[ScTypeAliasDeclaration] && (p.designator equiv des2) =>
processEquivalentDesignators(args2)
return
case _ =>
}
val args = p.typeArgs
val lower: ScType = a.lowerBound.toOption match {
case Some(low) => low
case _ =>
result = (false, undefinedSubst)
return
}
val lBound = lower
val genericSubst = ScalaPsiUtil.
typesCallSubstitutor(a.typeParameters.map(tp => (tp.name, ScalaPsiUtil.getPsiElementId(tp))), args)
result = conformsInner(genericSubst.subst(lBound), r, visited, undefinedSubst)
return
case _ =>
}
rightVisitor = new ParameterizedAliasVisitor with TypeParameterTypeVisitor {}
r.visitType(rightVisitor)
if (result != null) return
r match {
case _: JavaArrayType =>
val args = p.typeArgs
val des = p.designator
if (args.length == 1 && (ScType.extractClass(des) match {
case Some(q) => q.qualifiedName == "scala.Array"
case _ => false
})) {
val arg = r.asInstanceOf[JavaArrayType].arg
val argsPair = (arg, args(0))
argsPair match {
case (ScAbstractType(tpt, lower, upper), r) =>
val right =
if (tpt.args.length > 0 && !r.isInstanceOf[ScParameterizedType])
ScParameterizedType(r, tpt.args)
else r
if (!upper.equiv(Any)) {
val t = conformsInner(upper, right, visited, undefinedSubst, checkWeak)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
}
if (!lower.equiv(Nothing)) {
val t = conformsInner(right, lower, visited, undefinedSubst, checkWeak)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
}
case (l, ScAbstractType(tpt, lower, upper)) =>
val left =
if (tpt.args.length > 0 && !l.isInstanceOf[ScParameterizedType])
ScParameterizedType(l, tpt.args)
else l
if (!upper.equiv(Any)) {
val t = conformsInner(upper, left, visited, undefinedSubst, checkWeak)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
}
if (!lower.equiv(Nothing)) {
val t = conformsInner(left, lower, visited, undefinedSubst, checkWeak)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
}
case (u: ScUndefinedType, rt) =>
undefinedSubst = undefinedSubst.addLower((u.tpt.name, u.tpt.getId), rt, variance = 0)
undefinedSubst = undefinedSubst.addUpper((u.tpt.name, u.tpt.getId), rt, variance = 0)
case (lt, u: ScUndefinedType) =>
undefinedSubst = undefinedSubst.addLower((u.tpt.name, u.tpt.getId), lt, variance = 0)
undefinedSubst = undefinedSubst.addUpper((u.tpt.name, u.tpt.getId), lt, variance = 0)
case (tp, _) if tp.isAliasType.isDefined && tp.isAliasType.get.ta.isExistentialTypeAlias =>
val y = Conformance.conformsInner(argsPair._1, argsPair._2, HashSet.empty, undefinedSubst)
if (!y._1) {
result = (false, undefinedSubst)
return
}
else undefinedSubst = y._2
case _ =>
val t = Equivalence.equivInner(argsPair._1, argsPair._2, undefinedSubst, falseUndef = false)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
}
result = (true, undefinedSubst)
return
}
case _ =>
}
r match {
case p2: ScParameterizedType =>
val des1 = p.designator
val des2 = p2.designator
val args1 = p.typeArgs
val args2 = p2.typeArgs
(des1, des2) match {
case (owner1: ScTypeParameterType, _: ScTypeParameterType) =>
if (des1 equiv des2) {
if (args1.length != args2.length) {
result = (false, undefinedSubst)
return
}
result = checkParameterizedType(owner1.args.map(_.param).iterator, args1, args2,
undefinedSubst, visited, checkWeak)
return
} else {
result = (false, undefinedSubst)
return
}
case (_: ScUndefinedType, owner2: ScUndefinedType) =>
val parameterType = owner2.tpt
var anotherType: ScType = ScParameterizedType(des1, parameterType.args)
var args1replace = args1
if (args1.length != args2.length) {
ScType.extractClassType(l) match {
case Some((clazz, classSubst)) =>
val t: (Boolean, ScType) = parentWithArgNumber(clazz, classSubst, args2.length)
if (!t._1) {
result = (false, undefinedSubst)
return
}
t._2 match {
case ScParameterizedType(newDes, newArgs) =>
args1replace = newArgs
anotherType = ScParameterizedType(newDes, parameterType.args)
case _ =>
result = (false, undefinedSubst)
return
}
case _ =>
result = (false, undefinedSubst)
return
}
}
undefinedSubst = undefinedSubst.addUpper((owner2.tpt.name, owner2.tpt.getId), anotherType)
result = checkParameterizedType(owner2.tpt.args.map(_.param).iterator, args1replace, args2,
undefinedSubst, visited, checkWeak)
return
case (owner1: ScUndefinedType, _) =>
val parameterType = owner1.tpt
var anotherType: ScType = ScParameterizedType(des2, parameterType.args)
var args2replace = args2
if (args1.length != args2.length) {
ScType.extractClassType(r) match {
case Some((clazz, classSubst)) =>
val t: (Boolean, ScType) = parentWithArgNumber(clazz, classSubst, args1.length)
if (!t._1) {
result = (false, undefinedSubst)
return
}
t._2 match {
case ScParameterizedType(newDes, newArgs) =>
args2replace = newArgs
anotherType = ScParameterizedType(newDes, parameterType.args)
case _ =>
result = (false, undefinedSubst)
return
}
case _ =>
result = (false, undefinedSubst)
return
}
}
undefinedSubst = undefinedSubst.addLower((owner1.tpt.name, owner1.tpt.getId), anotherType)
result = checkParameterizedType(owner1.tpt.args.map(_.param).iterator, args1, args2replace,
undefinedSubst, visited, checkWeak)
return
case (_, owner2: ScUndefinedType) =>
val parameterType = owner2.tpt
var anotherType: ScType = ScParameterizedType(des1, parameterType.args)
var args1replace = args1
if (args1.length != args2.length) {
ScType.extractClassType(l) match {
case Some((clazz, classSubst)) =>
val t: (Boolean, ScType) = parentWithArgNumber(clazz, classSubst, args2.length)
if (!t._1) {
result = (false, undefinedSubst)
return
}
t._2 match {
case ScParameterizedType(newDes, newArgs) =>
args1replace = newArgs
anotherType = ScParameterizedType(newDes, parameterType.args)
case _ =>
result = (false, undefinedSubst)
return
}
case _ =>
result = (false, undefinedSubst)
return
}
}
undefinedSubst = undefinedSubst.addUpper((owner2.tpt.name, owner2.tpt.getId), anotherType)
result = checkParameterizedType(owner2.tpt.args.map(_.param).iterator, args1, args1replace,
undefinedSubst, visited, checkWeak)
return
case _ if des1 equiv des2 =>
if (args1.length != args2.length) {
result = (false, undefinedSubst)
return
}
ScType.extractClass(des1) match {
case Some(ownerClazz) =>
val parametersIterator = ownerClazz match {
case td: ScTypeDefinition => td.typeParameters.iterator
case _ => ownerClazz.getTypeParameters.iterator
}
result = checkParameterizedType(parametersIterator, args1, args2,
undefinedSubst, visited, checkWeak)
return
case _ =>
result = (false, undefinedSubst)
return
}
case (_, t: ScTypeParameterType) if t.args.length == p2.typeArgs.length =>
val subst = new ScSubstitutor(Map(t.args.zip(p.typeArgs).map {
case (tpt: ScTypeParameterType, tp: ScType) =>
((tpt.param.name, ScalaPsiUtil.getPsiElementId(tpt.param)), tp)
}: _*), Map.empty, None)
result = conformsInner(l, subst.subst(t.upper.v), visited, undefinedSubst, checkWeak)
return
case (proj1: ScProjectionType, proj2: ScProjectionType)
if ScEquivalenceUtil.smartEquivalence(proj1.actualElement, proj2.actualElement) =>
val t = conformsInner(proj1, proj2, visited, undefinedSubst)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
if (args1.length != args2.length) {
result = (false, undefinedSubst)
return
}
val parametersIterator = proj1.actualElement match {
case td: ScTypeParametersOwner => td.typeParameters.iterator
case td: PsiTypeParameterListOwner => td.getTypeParameters.iterator
case _ =>
result = (false, undefinedSubst)
return
}
result = checkParameterizedType(parametersIterator, args1, args2,
undefinedSubst, visited, checkWeak)
return
case _ =>
}
case _ =>
}
p.designator match {
case t: ScTypeParameterType if t.args.length == p.typeArgs.length =>
val subst = new ScSubstitutor(Map(t.args.zip(p.typeArgs).map {
case (tpt: ScTypeParameterType, tp: ScType) =>
((tpt.param.name, ScalaPsiUtil.getPsiElementId(tpt.param)), tp)
}: _*), Map.empty, None)
result = conformsInner(subst.subst(t.lower.v), r, visited, undefinedSubst, checkWeak)
return
case _ =>
}
rightVisitor = new AliasDesignatorVisitor with CompoundTypeVisitor with ExistentialVisitor
with ProjectionVisitor {}
r.visitType(rightVisitor)
if (result != null) return
}
override def visitExistentialType(e: ScExistentialType) {
var rightVisitor: ScalaTypeVisitor =
new ValDesignatorSimplification with UndefinedSubstVisitor
with AbstractVisitor
with ParameterizedAbstractVisitor {}
r.visitType(rightVisitor)
if (result != null) return
checkEquiv()
if (result != null) return
val simplified = e.simplify()
if (simplified != l) {
result = conformsInner(simplified, r, visited, undefinedSubst, checkWeak)
return
}
rightVisitor = new ExistentialSimplification with SkolemizeVisitor
with ParameterizedSkolemizeVisitor with OtherNonvalueTypesVisitor with NothingNullVisitor
with TypeParameterTypeVisitor with ThisVisitor with DesignatorVisitor {}
r.visitType(rightVisitor)
if (result != null) return
rightVisitor = new ParameterizedAliasVisitor with AliasDesignatorVisitor {}
r.visitType(rightVisitor)
if (result != null) return
val tptsMap = new mutable.HashMap[String, ScTypeParameterType]()
def updateType(t: ScType): ScType = {
t.recursiveUpdate {
case t: ScTypeVariable =>
e.wildcards.find(_.name == t.name) match {
case Some(wild) =>
val tpt = tptsMap.getOrElseUpdate(wild.name,
ScTypeParameterType(wild.name,
wild.args,
wild.lowerBound,
wild.upperBound,
ScalaPsiElementFactory.createTypeParameterFromText(
wild.name, PsiManager.getInstance(DecompilerUtil.obtainProject) //todo: remove obtainProject?
))
)
(true, tpt)
case _ => (false, t)
}
case tp@ScDesignatorType(ta: ScTypeAlias) if ta.getContext.isInstanceOf[ScExistentialClause] =>
e.wildcards.find(_.name == ta.name) match {
case Some(wild) =>
val tpt = tptsMap.getOrElseUpdate(ta.name,
ScTypeParameterType(wild.name,
wild.args,
wild.lowerBound,
wild.upperBound,
ScalaPsiElementFactory.createTypeParameterFromText(
wild.name, PsiManager.getInstance(ta.getProject)
))
)
(true, tpt)
case _ => (false, tp)
}
case ex: ScExistentialType => (true, ex) //todo: this seems just fast solution
case tp: ScType => (false, tp)
}
}
val q = updateType(e.quantified)
val subst = tptsMap.foldLeft(ScSubstitutor.empty) {
case (subst: ScSubstitutor, (_, tpt)) => subst.bindT((tpt.name, ScalaPsiUtil.getPsiElementId(tpt.param)),
ScUndefinedType(tpt))
}
val res = conformsInner(subst.subst(q), r, HashSet.empty, undefinedSubst)
if (!res._1) {
result = (false, undefinedSubst)
} else {
val unSubst: ScUndefinedSubstitutor = res._2
unSubst.getSubstitutor(notNonable = false) match {
case Some(uSubst) =>
for (tpt <- tptsMap.values if result == null) {
val substedTpt = uSubst.subst(tpt)
var t = conformsInner(substedTpt, uSubst.subst(updateType(tpt.lower.v)), immutable.Set.empty, undefinedSubst)
if (substedTpt != tpt && !t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
t = conformsInner(uSubst.subst(updateType(tpt.upper.v)), substedTpt, immutable.Set.empty, undefinedSubst)
if (substedTpt != tpt && !t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
}
if (result == null) {
val filterFunction: (((String, String), HashSet[ScType])) => Boolean = {
case (id: (String, String), types: HashSet[ScType]) =>
!tptsMap.values.exists {
case tpt: ScTypeParameterType => id ==(tpt.name, ScalaPsiUtil.getPsiElementId(tpt.param))
}
}
val newUndefSubst = new ScUndefinedSubstitutor(
unSubst.upperMap.filter(filterFunction), unSubst.lowerMap.filter(filterFunction),
unSubst.upperAdditionalMap.filter(filterFunction), unSubst.lowerAdditionalMap.filter(filterFunction))
undefinedSubst += newUndefSubst
result = (true, undefinedSubst)
}
case None => result = (false, undefinedSubst)
}
}
}
override def visitThisType(t: ScThisType) {
var rightVisitor: ScalaTypeVisitor =
new ValDesignatorSimplification with UndefinedSubstVisitor with AbstractVisitor
with ParameterizedAbstractVisitor {}
r.visitType(rightVisitor)
if (result != null) return
checkEquiv()
if (result != null) return
rightVisitor = new ExistentialSimplification with SkolemizeVisitor
with ParameterizedSkolemizeVisitor with OtherNonvalueTypesVisitor with NothingNullVisitor
with TypeParameterTypeVisitor {}
r.visitType(rightVisitor)
if (result != null) return
val clazz = t.clazz
val res = clazz.getTypeWithProjections(TypingContext.empty)
if (res.isEmpty) result = (false, undefinedSubst)
else result = conformsInner(res.get, r, visited, subst, checkWeak)
}
override def visitDesignatorType(des: ScDesignatorType) {
des.getValType match {
case Some(v) =>
result = conformsInner(v, r, visited, subst, checkWeak)
return
case _ =>
}
var rightVisitor: ScalaTypeVisitor =
new ValDesignatorSimplification with UndefinedSubstVisitor with AbstractVisitor
with ParameterizedAbstractVisitor {}
r.visitType(rightVisitor)
if (result != null) return
checkEquiv()
if (result != null) return
rightVisitor = new ExistentialSimplification with SkolemizeVisitor
with ParameterizedSkolemizeVisitor with OtherNonvalueTypesVisitor with NothingNullVisitor
{}
r.visitType(rightVisitor)
if (result != null) return
des.element match {
case a: ScTypeAlias if a.isExistentialTypeAlias =>
val upper: ScType = a.upperBound.toOption match {
case Some(u) => u
case _ =>
result = (false, undefinedSubst)
return
}
val t = conformsInner(upper, r, visited, undefinedSubst)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
val lower: ScType = a.lowerBound.toOption match {
case Some(low) => low
case _ =>
result = (false, undefinedSubst)
return
}
result = conformsInner(r, lower, visited, undefinedSubst)
return
case _ =>
}
rightVisitor = new TypeParameterTypeVisitor
with ThisVisitor with DesignatorVisitor with ParameterizedAliasVisitor {}
r.visitType(rightVisitor)
if (result != null) return
rightVisitor = new AliasDesignatorVisitor with ProjectionVisitor {}
r.visitType(rightVisitor)
if (result != null) return
des.element match {
case a: ScTypeAlias =>
if (!a.isExistentialTypeAlias) {
val lower: ScType = a.lowerBound.toOption match {
case Some(low) => low
case _ =>
result = (false, undefinedSubst)
return
}
result = conformsInner(lower, r, visited, undefinedSubst)
}
else {
val upper: ScType = a.upperBound.toOption match {
case Some(low) => low
case _ =>
result = (false, undefinedSubst)
return
}
val t = conformsInner(upper, r, visited, undefinedSubst)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
val lower: ScType = a.lowerBound.toOption match {
case Some(low) => low
case _ =>
result = (false, undefinedSubst)
return
}
result = conformsInner(r, lower, visited, undefinedSubst)
}
return
case _ =>
}
rightVisitor = new CompoundTypeVisitor with ExistentialVisitor {}
r.visitType(rightVisitor)
if (result != null) return
}
override def visitTypeParameterType(tpt1: ScTypeParameterType) {
var rightVisitor: ScalaTypeVisitor =
new ValDesignatorSimplification with UndefinedSubstVisitor with AbstractVisitor
with ParameterizedAbstractVisitor {}
r.visitType(rightVisitor)
if (result != null) return
checkEquiv()
if (result != null) return
trait TypeParameterTypeNothingNullVisitor extends NothingNullVisitor {
override def visitStdType(x: StdType) {
if (x eq types.Nothing) result = (true, undefinedSubst)
else if (x eq types.Null) {
result = conformsInner(tpt1.lower.v, r, HashSet.empty, undefinedSubst)
}
}
}
rightVisitor = new ExistentialSimplification with SkolemizeVisitor
with ParameterizedSkolemizeVisitor with OtherNonvalueTypesVisitor with TypeParameterTypeNothingNullVisitor
with DesignatorVisitor {}
r.visitType(rightVisitor)
if (result != null) return
r match {
case tpt2: ScTypeParameterType =>
val res = conformsInner(tpt1.lower.v, r, HashSet.empty, undefinedSubst)
if (res._1) {
result = res
return
}
result = conformsInner(l, tpt2.upper.v, HashSet.empty, undefinedSubst)
return
case _ =>
}
val t = conformsInner(tpt1.lower.v, r, HashSet.empty, undefinedSubst)
if (t._1) {
result = t
return
}
rightVisitor = new ParameterizedAliasVisitor with AliasDesignatorVisitor with CompoundTypeVisitor
with ExistentialVisitor with ProjectionVisitor {}
r.visitType(rightVisitor)
if (result != null) return
result = (false, undefinedSubst)
}
override def visitSkolemizedType(s: ScSkolemizedType) {
var rightVisitor: ScalaTypeVisitor =
new ValDesignatorSimplification with UndefinedSubstVisitor
with AbstractVisitor
with ParameterizedAbstractVisitor {}
r.visitType(rightVisitor)
if (result != null) return
checkEquiv()
if (result != null) return
val t = conformsInner(s.lower, r, HashSet.empty, undefinedSubst)
if (t._1) {
result = t
return
}
rightVisitor = new OtherNonvalueTypesVisitor with NothingNullVisitor
with TypeParameterTypeVisitor with ThisVisitor with DesignatorVisitor {}
r.visitType(rightVisitor)
if (result != null) return
rightVisitor = new ParameterizedAliasVisitor with AliasDesignatorVisitor with CompoundTypeVisitor
with ExistentialVisitor with ProjectionVisitor {}
r.visitType(rightVisitor)
if (result != null) return
}
override def visitTypeVariable(t: ScTypeVariable) {
var rightVisitor: ScalaTypeVisitor =
new ValDesignatorSimplification with UndefinedSubstVisitor with AbstractVisitor
with ParameterizedAbstractVisitor {}
r.visitType(rightVisitor)
if (result != null) return
checkEquiv()
if (result != null) return
rightVisitor = new ExistentialSimplification with SkolemizeVisitor
with ParameterizedSkolemizeVisitor with OtherNonvalueTypesVisitor with NothingNullVisitor
with TypeParameterTypeVisitor with ThisVisitor with DesignatorVisitor {}
r.visitType(rightVisitor)
if (result != null) return
rightVisitor = new ParameterizedAliasVisitor with AliasDesignatorVisitor with CompoundTypeVisitor
with ExistentialVisitor with ProjectionVisitor {}
r.visitType(rightVisitor)
if (result != null) return
}
override def visitUndefinedType(u: ScUndefinedType) {
val rightVisitor = new ValDesignatorSimplification {
override def visitUndefinedType(u2: ScUndefinedType) {
if (u2.level > u.level) {
result = (true, undefinedSubst.addUpper((u2.tpt.name, u2.tpt.getId), u))
} else if (u.level > u2.level) {
result = (true, undefinedSubst.addUpper((u2.tpt.name, u2.tpt.getId), u))
} else {
result = (true, undefinedSubst)
}
}
}
r.visitType(rightVisitor)
if (result == null)
result = (true, undefinedSubst.addLower((u.tpt.name, u.tpt.getId), r))
}
override def visitMethodType(m1: ScMethodType) {
var rightVisitor: ScalaTypeVisitor =
new ValDesignatorSimplification with UndefinedSubstVisitor
with AbstractVisitor
with ParameterizedAbstractVisitor {}
r.visitType(rightVisitor)
if (result != null) return
checkEquiv()
if (result != null) return
rightVisitor = new ExistentialSimplification {}
r.visitType(rightVisitor)
if (result != null) return
r match {
case m2: ScMethodType =>
val params1 = m1.params
val params2 = m2.params
val returnType1 = m1.returnType
val returnType2 = m2.returnType
if (params1.length != params2.length) {
result = (false, undefinedSubst)
return
}
var t = conformsInner(returnType1, returnType2, HashSet.empty, undefinedSubst)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
var i = 0
while (i < params1.length) {
if (params1(i).isRepeated != params2(i).isRepeated) {
result = (false, undefinedSubst)
return
}
t = Equivalence.equivInner(params1(i).paramType, params2(i).paramType, undefinedSubst, falseUndef = false)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
i = i + 1
}
result = (true, undefinedSubst)
case _ =>
result = (false, undefinedSubst)
}
}
override def visitAbstractType(a: ScAbstractType) {
val rightVisitor = new ValDesignatorSimplification with UndefinedSubstVisitor {}
r.visitType(rightVisitor)
if (result != null) return
val right =
if (a.tpt.args.length > 0 && !r.isInstanceOf[ScParameterizedType])
ScParameterizedType(r, a.tpt.args)
else r
result = conformsInner(a.upper, right, visited, undefinedSubst, checkWeak)
if (result._1) {
val t = conformsInner(right, a.lower, visited, result._2, checkWeak)
if (t._1) result = t
}
}
override def visitTypePolymorphicType(t1: ScTypePolymorphicType) {
var rightVisitor: ScalaTypeVisitor =
new ValDesignatorSimplification with UndefinedSubstVisitor
with AbstractVisitor
with ParameterizedAbstractVisitor {}
r.visitType(rightVisitor)
if (result != null) return
checkEquiv()
if (result != null) return
rightVisitor = new ExistentialSimplification {}
r.visitType(rightVisitor)
if (result != null) return
r match {
case t2: ScTypePolymorphicType =>
val typeParameters1 = t1.typeParameters
val typeParameters2 = t2.typeParameters
val internalType1 = t1.internalType
val internalType2 = t2.internalType
if (typeParameters1.length != typeParameters2.length) {
result = (false, undefinedSubst)
return
}
var i = 0
while (i < typeParameters1.length) {
var t = conformsInner(typeParameters1(i).lowerType(), typeParameters2(i).lowerType(), HashSet.empty, undefinedSubst)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
t = conformsInner(typeParameters2(i).upperType(), typeParameters1(i).lowerType(), HashSet.empty, undefinedSubst)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
i = i + 1
}
val subst = new ScSubstitutor(new collection.immutable.HashMap[(String, String), ScType] ++ typeParameters1.zip(typeParameters2).map({
tuple => ((tuple._1.name, ScalaPsiUtil.getPsiElementId(tuple._1.ptp)),
new ScTypeParameterType(tuple._2.name,
tuple._2.ptp match {
case p: ScTypeParam => p.typeParameters.toList.map{new ScTypeParameterType(_, ScSubstitutor.empty)}
case _ => Nil
}, new Suspension(tuple._2.lowerType), new Suspension(tuple._2.upperType), tuple._2.ptp))
}), Map.empty, None)
val t = conformsInner(subst.subst(internalType1), internalType2, HashSet.empty, undefinedSubst)
if (!t._1) {
result = (false, undefinedSubst)
return
}
undefinedSubst = t._2
result = (true, undefinedSubst)
case _ =>
result = (false, undefinedSubst)
}
}
}
val guard = RecursionManager.createGuard("conformance.guard")
val cache: ConcurrentWeakHashMap[(ScType, ScType, Boolean), (Boolean, ScUndefinedSubstitutor)] =
new ConcurrentWeakHashMap[(ScType, ScType, Boolean), (Boolean, ScUndefinedSubstitutor)]()
def conformsInner(l: ScType, r: ScType, visited: Set[PsiClass], unSubst: ScUndefinedSubstitutor,
checkWeak: Boolean = false): (Boolean, ScUndefinedSubstitutor) = {
ProgressManager.checkCanceled()
val key = (l, r, checkWeak)
val tuple = cache.get(key)
if (tuple != null) {
if (unSubst.isEmpty) return tuple
return tuple.copy(_2 = unSubst + tuple._2)
}
if (guard.currentStack().contains(key)) {
return (false, new ScUndefinedSubstitutor())
}
val uSubst = new ScUndefinedSubstitutor()
def comp(): (Boolean, ScUndefinedSubstitutor) = {
val leftVisitor = new LeftConformanceVisitor(l, r, visited, uSubst, checkWeak)
l.visitType(leftVisitor)
if (leftVisitor.getResult != null) return leftVisitor.getResult
//tail, based on class inheritance
ScType.extractClassType(r) match {
case Some((clazz: PsiClass, _)) if visited.contains(clazz) => (false, uSubst)
case Some((rClass: PsiClass, subst: ScSubstitutor)) =>
ScType.extractClass(l) match {
case Some(lClass) =>
if (rClass.qualifiedName == "java.lang.Object") {
return conformsInner(l, types.AnyRef, visited, uSubst, checkWeak)
} else if (lClass.qualifiedName == "java.lang.Object") {
return conformsInner(types.AnyRef, r, visited, uSubst, checkWeak)
}
val inh = smartIsInheritor(rClass, subst, lClass)
if (!inh._1) return (false, uSubst)
val tp = inh._2
//Special case for higher kind types passed to generics.
if (lClass.hasTypeParameters) {
l match {
case p: ScParameterizedType =>
case _ => return (true, uSubst)
}
}
val t = conformsInner(l, tp, visited + rClass, uSubst, checkWeak = false)
if (t._1) return (true, t._2)
else return (false, uSubst)
case _ =>
}
case _ =>
}
val bases: Seq[ScType] = BaseTypes.get(r)
val iterator = bases.iterator
while (iterator.hasNext) {
ProgressManager.checkCanceled()
val tp = iterator.next()
val t = conformsInner(l, tp, visited, uSubst, checkWeak = true)
if (t._1) return (true, t._2)
}
(false, uSubst)
}
val res = guard.doPreventingRecursion(key, false, new Computable[(Boolean, ScUndefinedSubstitutor)] {
def compute(): (Boolean, ScUndefinedSubstitutor) = comp()
})
if (res == null) return (false, new ScUndefinedSubstitutor())
cache.put(key, res)
if (unSubst.isEmpty) return res
res.copy(_2 = unSubst + res._2)
}
private def smartIsInheritor(leftClass: PsiClass, substitutor: ScSubstitutor, rightClass: PsiClass) : (Boolean, ScType) = {
if (ScEquivalenceUtil.areClassesEquivalent(leftClass, rightClass)) return (false, null)
if (!ScalaPsiUtil.cachedDeepIsInheritor(leftClass, rightClass)) return (false, null)
smartIsInheritor(leftClass, substitutor, ScEquivalenceUtil.areClassesEquivalent(_, rightClass), new collection.immutable.HashSet[PsiClass])
}
private def parentWithArgNumber(leftClass: PsiClass, substitutor: ScSubstitutor, argsNumber: Int): (Boolean, ScType) = {
smartIsInheritor(leftClass, substitutor, c => c.getTypeParameters.length == argsNumber, new collection.immutable.HashSet[PsiClass]())
}
private def smartIsInheritor(leftClass: PsiClass, substitutor: ScSubstitutor, condition: PsiClass => Boolean,
visited: collection.immutable.HashSet[PsiClass]): (Boolean, ScType) = {
ProgressManager.checkCanceled()
val bases: Seq[Any] = leftClass match {
case td: ScTypeDefinition => td.superTypes
case _ => leftClass.getSuperTypes
}
val iterator = bases.iterator
val later: ArrayBuffer[(PsiClass, ScSubstitutor)] = new ArrayBuffer[(PsiClass, ScSubstitutor)]()
var res: ScType = null
while (iterator.hasNext) {
val tp: ScType = iterator.next() match {
case tp: ScType => substitutor.subst(tp)
case pct: PsiClassType =>
substitutor.subst(ScType.create(pct, leftClass.getProject)) match {
case ex: ScExistentialType => ex.skolem //it's required for the raw types
case r => r
}
}
ScType.extractClassType(tp) match {
case Some((clazz: PsiClass, _)) if visited.contains(clazz) =>
case Some((clazz: PsiClass, subst)) if condition(clazz) =>
if (res == null) res = tp
else if (tp.conforms(res)) res = tp
case Some((clazz: PsiClass, subst)) =>
later += ((clazz, subst))
case _ =>
}
}
val laterIterator = later.iterator
while (laterIterator.hasNext) {
val (clazz, subst) = laterIterator.next()
val recursive = smartIsInheritor(clazz, subst, condition, visited + clazz)
if (recursive._1) {
if (res == null) res = recursive._2
else if (recursive._2.conforms(res)) res = recursive._2
}
}
if (res == null) (false, null)
else (true, res)
}
} | LPTK/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/types/Conformance.scala | Scala | apache-2.0 | 74,812 |
// Equites, a Scala chess playground
// Copyright © 2013 Frank S. Thomas <frank@timepit.eu>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package eu.timepit.equites
package util
import org.scalacheck.Arbitrary
import org.specs2.ScalaCheck
import org.specs2.matcher.DataTables
import org.specs2._
import scala.reflect._
import Math._
class MathSpec extends Specification with DataTables with ScalaCheck {
def is =
"gcd should" ^
"be symmetric in its arguments" ! prop {
(a: Int, b: Int) => (a >= 0 && b >= 0) ==> (gcd(a, b) must_== gcd(b, a))
} ^
p ^
"isEven and isOdd should" ^
"yield correct results for some positive numbers" ! {
"a" | "isEven" |
0 ! true |
1 ! false |
2 ! true |
3 ! false |
4 ! true |> {
(a, r) => (isEven(a) must_== r) and (isOdd(a) must_== !r)
}
} ^
br ^
workWith[Byte] ^
workWith[Short] ^
workWith[Int] ^
workWith[Long] ^
workWith[BigInt]
def workWith[A: Arbitrary: Integral: ClassTag] =
s"work with ${classTag[A]}" ^
eitherEvenOrOdd[A] ^
beEvenFunctions[A] ^
p
def eitherEvenOrOdd[A: Arbitrary: Integral] =
"yield different results for the same input" ! prop {
(a: A) => isEven(a) must_!= isOdd(a)
}
def beEvenFunctions[A: Arbitrary: Integral] =
"be even functions" ! prop {
(a: A) =>
beEvenFunction(isEven[A]).apply(a) and
beEvenFunction(isOdd[A]).apply(a)
}
def beEvenFunction[A, B](f: A => B)(implicit A: Numeric[A]) =
(a: A) => f(a) must_== f(A.negate(a))
}
| equites-chess/equites-core | src/test/scala/eu/timepit/equites/util/MathSpec.scala | Scala | gpl-3.0 | 2,245 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.status.api.v1
import java.text.{ParseException, SimpleDateFormat}
import java.util.TimeZone
import javax.ws.rs.WebApplicationException
import javax.ws.rs.core.Response
import javax.ws.rs.core.Response.Status
private[v1] class SimpleDateParam(val originalValue: String) {
val timestamp: Long = {
val format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSz")
try {
format.parse(originalValue).getTime()
} catch {
case _: ParseException =>
val gmtDay = new SimpleDateFormat("yyyy-MM-dd")
gmtDay.setTimeZone(TimeZone.getTimeZone("GMT"))
try {
gmtDay.parse(originalValue).getTime()
} catch {
case _: ParseException =>
throw new WebApplicationException(
Response
.status(Status.BAD_REQUEST)
.entity("Couldn't parse date: " + originalValue)
.build()
)
}
}
}
}
| gioenn/xSpark | core/src/main/scala/org/apache/spark/status/api/v1/SimpleDateParam.scala | Scala | apache-2.0 | 1,762 |
abstract class Bomb {
type T
val x: T
def size(that: T): Int
}
class StringBomb extends Bomb {
type T = String
val x = "abc"
def size(that: String): Int = that.length
}
class IntBomb extends Bomb {
type T = Int
val x = 10
def size(that: Int) = x + that
}
case class Mean(var bomb: Bomb)
object Test extends App {
def foo(x: Mean) = x match {
case Mean(b) =>
// BUG: b is assumed to be a stable identifier, but it can actually be mutated
println(b.size({ mutate(); b.x }))
}
def mutate() {
m.bomb = new IntBomb
}
val m = Mean(new StringBomb)
foo(m) // should print 3
} | felixmulder/scala | test/files/run/t6070.scala | Scala | bsd-3-clause | 625 |
package x7c1.linen.repository.inspector
import java.net.{MalformedURLException, URL}
import x7c1.linen.repository.inspector.ActionPageUrlError.{EmptyUrl, InvalidFormat, Unexpected}
import x7c1.linen.repository.loader.queueing.UrlEnclosure
import x7c1.wheat.modern.formatter.ThrowableFormatter.format
case class ActionPageUrl(
accountId: Long,
override val raw: URL) extends UrlEnclosure
object ActionPageUrl {
def create(accountId: Long, url: String): Either[ActionPageUrlError, ActionPageUrl] = {
try {
if (Option(url).getOrElse("").isEmpty) {
Left apply EmptyUrl()
} else {
Right apply ActionPageUrl(
accountId = accountId,
raw = new URL(url)
)
}
} catch {
case e: MalformedURLException =>
Left apply InvalidFormat(format(e)("[malformed]"), Some(e))
case e: Exception =>
Left apply Unexpected(format(e)("[unexpected]"), Some(e))
}
}
}
trait ActionPageUrlError {
def detail: String
def cause: Option[Throwable]
}
object ActionPageUrlError {
case class EmptyUrl() extends ActionPageUrlError {
override val detail: String = "URL required"
override val cause: Option[Throwable] = None
}
case class InvalidFormat(
override val detail: String,
override val cause: Option[Throwable]) extends ActionPageUrlError {
}
case class Unexpected(
override val detail: String,
override val cause: Option[Throwable]) extends ActionPageUrlError {
}
}
| x7c1/Linen | linen-repository/src/main/scala/x7c1/linen/repository/inspector/ActionPageUrl.scala | Scala | mit | 1,496 |
package cats.monocle
import monocle.macros.Lenses
import org.scalatest._
import Matchers._
class MonocleLensFunctionsSpec extends WordSpec {
import MonocleLens._
import cats.instances.all._
@Lenses case class Address(street: String, city: String, zip: Int)
@Lenses case class Person(name: String, address: Address)
"MonocleLens" should {
"set" in {
val address = Address("1 Market St.", "Foo", 12345)
val s1 = "2 Central St."
val state = set(Address.street)(s1)
val (a2, s) = state.run(address).value
a2 shouldBe Address(s1, "Foo", 12345)
s shouldBe s1
}
"transform" in {
val address = Address("1 Market St.", "Foo", 12345)
val person = Person("R2D2", address)
val s1 = "2 Central St."
val state1 = set(Address.street)(s1)
val state2 = transform(Person.address)(state1)
val (p2, s) = state2.run(person).value
p2 shouldBe Person("R2D2", Address(s1, "Foo", 12345))
s shouldBe s1
}
"plus" in {
val address = Address("1 Market St.", "Foo", 12345)
val st = plus(Address.zip)(1)
val (s1, a) = st.run(address).value
s1 shouldBe Address("1 Market St.", "Foo", 12346)
a shouldBe 12346
}
"zero" in {
val address = Address("1 Market St.", "Foo", 12345)
val st = zero(Address.zip)
val (s1, a) = st.run(address).value
s1 shouldBe Address("1 Market St.", "Foo", 0)
a shouldBe 0
}
}
}
| dragisak/monocle-cats | src/test/scala/cats/monocle/MonocleLensFunctionsSpec.scala | Scala | mit | 1,495 |
package is.hail.types.physical
import is.hail.annotations._
import is.hail.asm4s._
import is.hail.expr.ir.{EmitCodeBuilder, EmitMethodBuilder}
import is.hail.utils.FastIndexedSeq
import is.hail.variant._
object PCanonicalLocus {
def apply(rg: ReferenceGenome): PLocus = PCanonicalLocus(rg.broadcastRG)
def apply(rg: ReferenceGenome, required: Boolean): PLocus = PCanonicalLocus(rg.broadcastRG, required)
private def representation(required: Boolean = false): PStruct = PCanonicalStruct(
required,
"contig" -> PCanonicalString(required = true),
"position" -> PInt32(required = true))
def schemaFromRG(rg: Option[ReferenceGenome], required: Boolean = false): PType = rg match {
case Some(ref) => PCanonicalLocus(ref, required)
case None => representation(required)
}
}
final case class PCanonicalLocus(rgBc: BroadcastRG, required: Boolean = false) extends PLocus {
def rg: ReferenceGenome = rgBc.value
def _asIdent = "locus"
override def _pretty(sb: StringBuilder, indent: Call, compact: Boolean): Unit = sb.append(s"PCLocus($rg)")
def setRequired(required: Boolean) = if(required == this.required) this else PCanonicalLocus(this.rgBc, required)
val representation: PStruct = PCanonicalLocus.representation(required)
private[physical] def contigAddr(address: Code[Long]): Code[Long] = representation.loadField(address, 0)
private[physical] def contigAddr(address: Long): Long = representation.loadField(address, 0)
def contig(address: Long): String = contigType.loadString(contigAddr(address))
lazy val contigType: PCanonicalString = representation.field("contig").typ.asInstanceOf[PCanonicalString]
def position(off: Code[Long]): Code[Int] = Region.loadInt(representation.loadField(off, 1))
lazy val positionType: PInt32 = representation.field("position").typ.asInstanceOf[PInt32]
// FIXME: Remove when representation of contig/position is a naturally-ordered Long
override def unsafeOrdering(): UnsafeOrdering = {
val repr = representation.fundamentalType
val localRGBc = rgBc
val binaryOrd = repr.fieldType("contig").asInstanceOf[PBinary].unsafeOrdering()
new UnsafeOrdering {
def compare(o1: Long, o2: Long): Int = {
val cOff1 = repr.loadField(o1, 0)
val cOff2 = repr.loadField(o2, 0)
if (binaryOrd.compare(cOff1, cOff2) == 0) {
val posOff1 = repr.loadField(o1, 1)
val posOff2 = repr.loadField(o2, 1)
java.lang.Integer.compare(Region.loadInt(posOff1), Region.loadInt(posOff2))
} else {
val contig1 = contigType.loadString(cOff1)
val contig2 = contigType.loadString(cOff2)
localRGBc.value.compare(contig1, contig2)
}
}
}
}
def codeOrdering(mb: EmitMethodBuilder[_], other: PType): CodeOrdering = {
assert(other isOfType this)
new CodeOrderingCompareConsistentWithOthers {
type T = Long
val bincmp = representation.fundamentalType.fieldType("contig").asInstanceOf[PBinary].codeOrdering(mb)
override def compareNonnull(x: Code[Long], y: Code[Long]): Code[Int] = {
val c1 = mb.newLocal[Long]("c1")
val c2 = mb.newLocal[Long]("c2")
val s1 = contigType.loadString(c1)
val s2 = contigType.loadString(c2)
val cmp = bincmp.compareNonnull(coerce[bincmp.T](c1), coerce[bincmp.T](c2))
val codeRG = mb.getReferenceGenome(rg)
Code.memoize(x, "plocus_code_ord_x", y, "plocus_code_ord_y") { (x, y) =>
val p1 = Region.loadInt(representation.fieldOffset(x, 1))
val p2 = Region.loadInt(representation.fieldOffset(y, 1))
Code(
c1 := representation.loadField(x, 0),
c2 := representation.loadField(y, 0),
cmp.ceq(0).mux(
Code.invokeStatic2[java.lang.Integer, Int, Int, Int]("compare", p1, p2),
codeRG.invoke[String, String, Int]("compare", s1, s2)))
}
}
}
}
}
object PCanonicalLocusSettable {
def apply(sb: SettableBuilder, pt: PCanonicalLocus, name: String): PCanonicalLocusSettable = {
new PCanonicalLocusSettable(pt,
sb.newSettable[Long](s"${ name }_a"),
sb.newSettable[Long](s"${ name }_contig"),
sb.newSettable[Int](s"${ name }_position"))
}
}
class PCanonicalLocusSettable(
val pt: PCanonicalLocus,
val a: Settable[Long],
_contig: Settable[Long],
val position: Settable[Int]
) extends PLocusValue with PSettable {
def get = new PCanonicalLocusCode(pt, a)
def settableTuple(): IndexedSeq[Settable[_]] = FastIndexedSeq(a, _contig, position)
def store(pc: PCode): Code[Unit] = {
Code(
a := pc.asInstanceOf[PCanonicalLocusCode].a,
_contig := pt.contigAddr(a),
position := pt.position(a))
}
def contig(): PStringCode = new PCanonicalStringCode(pt.contigType.asInstanceOf[PCanonicalString], _contig)
}
class PCanonicalLocusCode(val pt: PCanonicalLocus, val a: Code[Long]) extends PLocusCode {
def code: Code[_] = a
def codeTuple(): IndexedSeq[Code[_]] = FastIndexedSeq(a)
def contig(): PStringCode = new PCanonicalStringCode(pt.contigType, pt.contigAddr(a))
def position(): Code[Int] = pt.position(a)
def getLocusObj(): Code[Locus] = {
Code.memoize(a, "get_locus_code_memo") { a =>
Code.invokeStatic2[Locus, String, Int, Locus]("apply",
pt.contigType.loadString(pt.contigAddr(a)),
pt.position(a))
}
}
def memoize(cb: EmitCodeBuilder, name: String, sb: SettableBuilder): PLocusValue = {
val s = PCanonicalLocusSettable(sb, pt, name)
cb.assign(s, this)
s
}
def memoize(cb: EmitCodeBuilder, name: String): PLocusValue = memoize(cb, name, cb.localBuilder)
def memoizeField(cb: EmitCodeBuilder, name: String): PLocusValue = memoize(cb, name, cb.fieldBuilder)
def store(mb: EmitMethodBuilder[_], r: Value[Region], dst: Code[Long]): Code[Unit] = Region.storeAddress(dst, a)
}
| cseed/hail | hail/src/main/scala/is/hail/types/physical/PCanonicalLocus.scala | Scala | mit | 5,907 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.helptosavefrontend.controllers
import play.api.mvc.Result
import uk.gov.hmrc.helptosavefrontend.services.HelpToSaveService
import uk.gov.hmrc.helptosavefrontend.util.{Logging, toFuture}
import uk.gov.hmrc.http.HeaderCarrier
import uk.gov.hmrc.play.bootstrap.frontend.controller.FrontendController
import scala.concurrent.{ExecutionContext, Future}
trait CapCheckBehaviour {
this: FrontendController with Logging ⇒
val helpToSaveService: HelpToSaveService
def checkIfAccountCreateAllowed(
ifAllowed: ⇒ Future[Result]
)(implicit hc: HeaderCarrier, ec: ExecutionContext): Future[Result] =
helpToSaveService.isAccountCreationAllowed().value.flatMap {
_.fold(
error ⇒ {
logger.warn(s"Could not check if account create is allowed, due to: $error")
ifAllowed
}, { userCapResponse ⇒
if (userCapResponse.isTotalCapDisabled && userCapResponse.isDailyCapDisabled) {
SeeOther(routes.RegisterController.getServiceUnavailablePage.url)
} else if (userCapResponse.isTotalCapDisabled || userCapResponse.isTotalCapReached) {
SeeOther(routes.RegisterController.getTotalCapReachedPage.url)
} else if (userCapResponse.isDailyCapDisabled || userCapResponse.isDailyCapReached) {
SeeOther(routes.RegisterController.getDailyCapReachedPage.url)
} else {
ifAllowed
}
}
)
}
}
| hmrc/help-to-save-frontend | app/uk/gov/hmrc/helptosavefrontend/controllers/CapCheckBehaviour.scala | Scala | apache-2.0 | 2,061 |
package org.http4s
package laws
import cats.laws._
import org.http4s.util.Renderer
trait HttpCodecLaws[A] {
implicit def C: HttpCodec[A]
def httpCodecRoundTrip(a: A): IsEq[ParseResult[A]] =
C.parse(Renderer.renderString(a)) <-> Right(a)
}
object HttpCodecLaws {
def apply[A](implicit httpCodecA: HttpCodec[A]): HttpCodecLaws[A] = new HttpCodecLaws[A] {
val C = httpCodecA
}
}
| ChristopherDavenport/http4s | laws/src/main/scala/org/http4s/laws/HttpCodecLaws.scala | Scala | apache-2.0 | 396 |
import leon.lang._
object ArrayParamMutation9 {
def abs(a: Array[Int]) {
require(a.length > 0)
var i = 0;
(while (i < a.length) {
a(i) = if (a(i) < 0) -a(i) else a(i) // <-- this makes Leon crash
i = i + 1
}) invariant(i >= 0)
}
def main = {
val a = Array(0, -1, 2, -3)
abs(a)
a(0) + a(1) - 1 + a(2) - 2 + a(3) - 3 // == 0
}
}
| epfl-lara/leon | src/test/resources/regression/verification/xlang/valid/ArrayParamMutation9.scala | Scala | gpl-3.0 | 382 |
package clustering
import models._
import utils.Enrichments._
import play.Logger
/**
* Home Made clustering, based on a hiearchy of cluster of 1 to 10.
* i.e there is up to 10 level of clustering.
*/
class ClustHC(leaves: List[LeafCluster], rows: Int, cols: Int) {
val areaCorrector = getConfDouble("clustHC.areaCorrector", "ClustHC: areaCorrector constant not defined in conf.")
val thresholdCorrector = getConfDouble("clustHC.thresholdCorrector", "ClustHC: thresholdCorrector constant not defined in conf.")
val minDensityCorrector = getConfDouble("clustHC.minDensityCorrector", "ClustHC: minDensityCorrector constant not defined in conf.")
val total = leaves.map(_.numTweets).sum
val totalArea = rows * cols
val totalTweetDensity: Double = total.toDouble / totalArea.toDouble
val originalClusters: Set[Cluster] = leaves.map(l => Cluster(Set(l))) toSet
/** Iterates to obtain 10 different hierarchy of clusters */
def compute: List[Set[Cluster]] = {
var res: List[Set[Cluster]] = (originalClusters) :: Nil
(1 to 10).foreach { i =>
val beta = i.toDouble / 10.0
res :+= clusterOnce(res.last, beta * totalArea.toDouble * areaCorrector, (beta * totalTweetDensity) * thresholdCorrector)
}
/* Avoiding filtering on the first level: we don't want to have any clustering, just raw circles */
val cleaned = res.head :: res.tail.map(clst => cleanClusters(clst))
cleaned.foreach{i => Logger.info(s"ClustHC: a cluster size ${i.size}")}
Logger.info("ClustHC: Done")
cleaned
}
/** Iterates at one granularity until no more clusters are formed */
private def clusterOnce(oldC: Set[Cluster], maxArea: Double, threshold: Double): Set[Cluster] = {
var lst = oldC.toList.shuffle /* Let's shuffle (once more) the list for random start of clustering */
val couples = (for(i <- 0 until lst.size; j <- i + 1 until lst.size if (lst(i).computeArea(lst(j)) <= maxArea)) yield(i,j)).toList.groupBy(_._1)
val map: Map[Int, List[Int]] = couples.map(e => (e._1, e._2.map(_._2).shuffle /*.sorted*/)).filter(_._2 != Nil)
val p: Set[Cluster] = map.map(entry => findSweetSpot(entry._1, entry._2, lst,threshold)).filter(_ != None).map(_.get).toSet
/*val p = (for (i <- 0 until lst.size; j <- i + 1 until lst.size if (lst(i).computeArea(lst(j)) <= maxArea))
yield (aggregate(lst(i), lst(j)))).toSet.filter(c => c.size <= maxArea && c.tweetMeter >= threshold)*/
val filtered = p.filter(c1 => !p.exists(c2 => c2.intersect(c1) && c2.tweetMeter > c1.tweetMeter))
val res = filtered ++ oldC.filter(l => !filtered.exists(c => c.intersect(l)))
if (res == oldC) oldC
else clusterOnce(res, maxArea, threshold)
}
/** Finds the biggest aggregation possible */
def findSweetSpot(key: Int, values: List[Int], lst: List[Cluster], threshold: Double): Option[Cluster] = {
val sorted = (values.sorted).reverse
for(j <- sorted ){
val aggr = aggregate(lst(key), lst(j))
if (aggr.tweetMeter >= threshold)
return Some(aggr)
}
None
}
/** Aggregates two cluster by computing the rectangel area that joins them And adds all the LeafClusters contained in this one */
private def aggregate(c1: Cluster, c2: Cluster): Cluster = {
val aggreg = Cluster(c1.subClusters ++ c2.subClusters)
/* val sorted_leaves = leaves.sortBy(_.pos)
val bounds = c1.computeAreaBounds(c2)
val index1 = bounds._1._1 * rows + bounds._1._2
val index2 = bounds._2._1 * rows + bounds._2._2
assert(index1 <= index2 && index2 < leaves.size && index1 >= 0)*/
Cluster(aggreg.subClusters ++ /*leaves.slice(index1, index2)*/ leaves.filter(x => aggreg.contains(x.pos)))
}
/** Removes the clusters that are not important */
private def cleanClusters(clusters: Set[Cluster]): Set[Cluster] = {
val maxDensity = if(!clusters.isEmpty) clusters.maxBy(_.tweetMeter).tweetMeter else 0
clusters.filter(c => c.tweetMeter >= (maxDensity * minDensityCorrector))
}
}
| TweetAggregator/CrossTalk | app/clustering/ClustHC.scala | Scala | gpl-2.0 | 3,977 |
package struct
import org.scalatest.FunSuite
import scala.util.Random
/**
* Created by yuJieShui on 2016/8/1.
*/
class HeapTest extends FunSuite {
import struct.Heap._
val heap = apply(Seq(1, 2, 3))
test("apply") {
println(heap)
assert(heap.max === 3)
println(dropMax(heap))
assert(dropMax(heap).max === 2)
println(dropMax(heap))
assert(dropMax(dropMax(heap)).max === 1)
}
test("insert") {
println(insert(4, heap))
assert(insert(4, heap).max === 4)
assert(insert(4, insert(5, heap)).max === 5)
assert(dropMax(insert(heap.max, heap)).max === heap.max)
}
test("merge") {
val heap2 = apply(Seq(4, 5, 6))
val _heap = merge(heap, heap2)
println(_heap)
assert(_heap.max === 6)
assert(dropMax(_heap).max === 5)
assert(dropMax(dropMax(_heap)).max === 4)
assert(dropMax(dropMax(dropMax(_heap))).max === 3)
}
test("measure apply") {
import org.scalameter._
val r = new Random(444)
val seq: Seq[Seq[Int]] = 1 to 10000 map (_ => 1 to (r.nextInt() % 100) map (_ => r.nextInt()))
val time =
config().withWarmer(Warmer.Zero).measure {
seq map (seq => apply(seq))
}
println(time)
}
}
| 1178615156/hackerrank | src/test/scala/struct/HeapTest.scala | Scala | apache-2.0 | 1,211 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.k8s
import java.util.concurrent.TimeUnit
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config.ConfigBuilder
private[spark] object Config extends Logging {
val KUBERNETES_NAMESPACE =
ConfigBuilder("spark.kubernetes.namespace")
.doc("The namespace that will be used for running the driver and executor pods.")
.stringConf
.createWithDefault("default")
val CONTAINER_IMAGE =
ConfigBuilder("spark.kubernetes.container.image")
.doc("Container image to use for Spark containers. Individual container types " +
"(e.g. driver or executor) can also be configured to use different images if desired, " +
"by setting the container type-specific image name.")
.stringConf
.createOptional
val DRIVER_CONTAINER_IMAGE =
ConfigBuilder("spark.kubernetes.driver.container.image")
.doc("Container image to use for the driver.")
.fallbackConf(CONTAINER_IMAGE)
val EXECUTOR_CONTAINER_IMAGE =
ConfigBuilder("spark.kubernetes.executor.container.image")
.doc("Container image to use for the executors.")
.fallbackConf(CONTAINER_IMAGE)
val CONTAINER_IMAGE_PULL_POLICY =
ConfigBuilder("spark.kubernetes.container.image.pullPolicy")
.doc("Kubernetes image pull policy. Valid values are Always, Never, and IfNotPresent.")
.stringConf
.checkValues(Set("Always", "Never", "IfNotPresent"))
.createWithDefault("IfNotPresent")
val IMAGE_PULL_SECRETS =
ConfigBuilder("spark.kubernetes.container.image.pullSecrets")
.doc("Comma separated list of the Kubernetes secrets used " +
"to access private image registries.")
.stringConf
.createOptional
val KUBERNETES_AUTH_DRIVER_CONF_PREFIX =
"spark.kubernetes.authenticate.driver"
val KUBERNETES_AUTH_DRIVER_MOUNTED_CONF_PREFIX =
"spark.kubernetes.authenticate.driver.mounted"
val KUBERNETES_AUTH_CLIENT_MODE_PREFIX = "spark.kubernetes.authenticate"
val OAUTH_TOKEN_CONF_SUFFIX = "oauthToken"
val OAUTH_TOKEN_FILE_CONF_SUFFIX = "oauthTokenFile"
val CLIENT_KEY_FILE_CONF_SUFFIX = "clientKeyFile"
val CLIENT_CERT_FILE_CONF_SUFFIX = "clientCertFile"
val CA_CERT_FILE_CONF_SUFFIX = "caCertFile"
val KUBERNETES_SERVICE_ACCOUNT_NAME =
ConfigBuilder(s"$KUBERNETES_AUTH_DRIVER_CONF_PREFIX.serviceAccountName")
.doc("Service account that is used when running the driver pod. The driver pod uses " +
"this service account when requesting executor pods from the API server. If specific " +
"credentials are given for the driver pod to use, the driver will favor " +
"using those credentials instead.")
.stringConf
.createOptional
val KUBERNETES_DRIVER_LIMIT_CORES =
ConfigBuilder("spark.kubernetes.driver.limit.cores")
.doc("Specify the hard cpu limit for the driver pod")
.stringConf
.createOptional
val KUBERNETES_DRIVER_SUBMIT_CHECK =
ConfigBuilder("spark.kubernetes.submitInDriver")
.internal()
.booleanConf
.createWithDefault(false)
val KUBERNETES_EXECUTOR_LIMIT_CORES =
ConfigBuilder("spark.kubernetes.executor.limit.cores")
.doc("Specify the hard cpu limit for each executor pod")
.stringConf
.createOptional
val KUBERNETES_EXECUTOR_REQUEST_CORES =
ConfigBuilder("spark.kubernetes.executor.request.cores")
.doc("Specify the cpu request for each executor pod")
.stringConf
.createOptional
val KUBERNETES_DRIVER_POD_NAME =
ConfigBuilder("spark.kubernetes.driver.pod.name")
.doc("Name of the driver pod.")
.stringConf
.createOptional
val KUBERNETES_EXECUTOR_POD_NAME_PREFIX =
ConfigBuilder("spark.kubernetes.executor.podNamePrefix")
.doc("Prefix to use in front of the executor pod names.")
.internal()
.stringConf
.createWithDefault("spark")
val KUBERNETES_PYSPARK_PY_FILES =
ConfigBuilder("spark.kubernetes.python.pyFiles")
.doc("The PyFiles that are distributed via client arguments")
.internal()
.stringConf
.createOptional
val KUBERNETES_PYSPARK_MAIN_APP_RESOURCE =
ConfigBuilder("spark.kubernetes.python.mainAppResource")
.doc("The main app resource for pyspark jobs")
.internal()
.stringConf
.createOptional
val KUBERNETES_PYSPARK_APP_ARGS =
ConfigBuilder("spark.kubernetes.python.appArgs")
.doc("The app arguments for PySpark Jobs")
.internal()
.stringConf
.createOptional
val KUBERNETES_R_MAIN_APP_RESOURCE =
ConfigBuilder("spark.kubernetes.r.mainAppResource")
.doc("The main app resource for SparkR jobs")
.internal()
.stringConf
.createOptional
val KUBERNETES_R_APP_ARGS =
ConfigBuilder("spark.kubernetes.r.appArgs")
.doc("The app arguments for SparkR Jobs")
.internal()
.stringConf
.createOptional
val KUBERNETES_ALLOCATION_BATCH_SIZE =
ConfigBuilder("spark.kubernetes.allocation.batch.size")
.doc("Number of pods to launch at once in each round of executor allocation.")
.intConf
.checkValue(value => value > 0, "Allocation batch size should be a positive integer")
.createWithDefault(5)
val KUBERNETES_ALLOCATION_BATCH_DELAY =
ConfigBuilder("spark.kubernetes.allocation.batch.delay")
.doc("Time to wait between each round of executor allocation.")
.timeConf(TimeUnit.MILLISECONDS)
.checkValue(value => value > 0, "Allocation batch delay must be a positive time value.")
.createWithDefaultString("1s")
val KUBERNETES_EXECUTOR_LOST_REASON_CHECK_MAX_ATTEMPTS =
ConfigBuilder("spark.kubernetes.executor.lostCheck.maxAttempts")
.doc("Maximum number of attempts allowed for checking the reason of an executor loss " +
"before it is assumed that the executor failed.")
.intConf
.checkValue(value => value > 0, "Maximum attempts of checks of executor lost reason " +
"must be a positive integer")
.createWithDefault(10)
val WAIT_FOR_APP_COMPLETION =
ConfigBuilder("spark.kubernetes.submission.waitAppCompletion")
.doc("In cluster mode, whether to wait for the application to finish before exiting the " +
"launcher process.")
.booleanConf
.createWithDefault(true)
val REPORT_INTERVAL =
ConfigBuilder("spark.kubernetes.report.interval")
.doc("Interval between reports of the current app status in cluster mode.")
.timeConf(TimeUnit.MILLISECONDS)
.checkValue(interval => interval > 0, s"Logging interval must be a positive time value.")
.createWithDefaultString("1s")
val KUBERNETES_EXECUTOR_API_POLLING_INTERVAL =
ConfigBuilder("spark.kubernetes.executor.apiPollingInterval")
.doc("Interval between polls against the Kubernetes API server to inspect the " +
"state of executors.")
.timeConf(TimeUnit.MILLISECONDS)
.checkValue(interval => interval > 0, s"API server polling interval must be a" +
" positive time value.")
.createWithDefaultString("30s")
val KUBERNETES_EXECUTOR_EVENT_PROCESSING_INTERVAL =
ConfigBuilder("spark.kubernetes.executor.eventProcessingInterval")
.doc("Interval between successive inspection of executor events sent from the" +
" Kubernetes API.")
.timeConf(TimeUnit.MILLISECONDS)
.checkValue(interval => interval > 0, s"Event processing interval must be a positive" +
" time value.")
.createWithDefaultString("1s")
val MEMORY_OVERHEAD_FACTOR =
ConfigBuilder("spark.kubernetes.memoryOverheadFactor")
.doc("This sets the Memory Overhead Factor that will allocate memory to non-JVM jobs " +
"which in the case of JVM tasks will default to 0.10 and 0.40 for non-JVM jobs")
.doubleConf
.checkValue(mem_overhead => mem_overhead >= 0 && mem_overhead < 1,
"Ensure that memory overhead is a double between 0 --> 1.0")
.createWithDefault(0.1)
val PYSPARK_MAJOR_PYTHON_VERSION =
ConfigBuilder("spark.kubernetes.pyspark.pythonVersion")
.doc("This sets the major Python version. Either 2 or 3. (Python2 or Python3)")
.stringConf
.checkValue(pv => List("2", "3").contains(pv),
"Ensure that major Python version is either Python2 or Python3")
.createWithDefault("2")
val KUBERNETES_LOCAL_DIRS_TMPFS =
ConfigBuilder("spark.kubernetes.local.dirs.tmpfs")
.doc("If set to true then emptyDir volumes created to back SPARK_LOCAL_DIRS will have " +
"their medium set to Memory so that they will be created as tmpfs (i.e. RAM) backed " +
"volumes. This may improve performance but scratch space usage will count towards " +
"your pods memory limit so you may wish to request more memory.")
.booleanConf
.createWithDefault(false)
val KUBERNETES_AUTH_SUBMISSION_CONF_PREFIX =
"spark.kubernetes.authenticate.submission"
val KUBERNETES_NODE_SELECTOR_PREFIX = "spark.kubernetes.node.selector."
val KUBERNETES_DRIVER_LABEL_PREFIX = "spark.kubernetes.driver.label."
val KUBERNETES_DRIVER_ANNOTATION_PREFIX = "spark.kubernetes.driver.annotation."
val KUBERNETES_DRIVER_SECRETS_PREFIX = "spark.kubernetes.driver.secrets."
val KUBERNETES_DRIVER_SECRET_KEY_REF_PREFIX = "spark.kubernetes.driver.secretKeyRef."
val KUBERNETES_DRIVER_VOLUMES_PREFIX = "spark.kubernetes.driver.volumes."
val KUBERNETES_EXECUTOR_LABEL_PREFIX = "spark.kubernetes.executor.label."
val KUBERNETES_EXECUTOR_ANNOTATION_PREFIX = "spark.kubernetes.executor.annotation."
val KUBERNETES_EXECUTOR_SECRETS_PREFIX = "spark.kubernetes.executor.secrets."
val KUBERNETES_EXECUTOR_SECRET_KEY_REF_PREFIX = "spark.kubernetes.executor.secretKeyRef."
val KUBERNETES_EXECUTOR_VOLUMES_PREFIX = "spark.kubernetes.executor.volumes."
val KUBERNETES_VOLUMES_HOSTPATH_TYPE = "hostPath"
val KUBERNETES_VOLUMES_PVC_TYPE = "persistentVolumeClaim"
val KUBERNETES_VOLUMES_EMPTYDIR_TYPE = "emptyDir"
val KUBERNETES_VOLUMES_MOUNT_PATH_KEY = "mount.path"
val KUBERNETES_VOLUMES_MOUNT_READONLY_KEY = "mount.readOnly"
val KUBERNETES_VOLUMES_OPTIONS_PATH_KEY = "options.path"
val KUBERNETES_VOLUMES_OPTIONS_CLAIM_NAME_KEY = "options.claimName"
val KUBERNETES_VOLUMES_OPTIONS_MEDIUM_KEY = "options.medium"
val KUBERNETES_VOLUMES_OPTIONS_SIZE_LIMIT_KEY = "options.sizeLimit"
val KUBERNETES_DRIVER_ENV_PREFIX = "spark.kubernetes.driverEnv."
}
| lvdongr/spark | resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala | Scala | apache-2.0 | 11,250 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.k8s.features
import scala.collection.JavaConverters._
import io.fabric8.kubernetes.api.model.LocalObjectReferenceBuilder
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.deploy.k8s.{KubernetesConf, KubernetesDriverSpecificConf, SparkPod}
import org.apache.spark.deploy.k8s.Config._
import org.apache.spark.deploy.k8s.Constants._
class BasicDriverFeatureStepSuite extends SparkFunSuite {
private val APP_ID = "spark-app-id"
private val RESOURCE_NAME_PREFIX = "spark"
private val DRIVER_LABELS = Map("labelkey" -> "labelvalue")
private val CONTAINER_IMAGE_PULL_POLICY = "IfNotPresent"
private val APP_NAME = "spark-test"
private val MAIN_CLASS = "org.apache.spark.examples.SparkPi"
private val APP_ARGS = Array("arg1", "arg2", "\\"arg 3\\"")
private val CUSTOM_ANNOTATION_KEY = "customAnnotation"
private val CUSTOM_ANNOTATION_VALUE = "customAnnotationValue"
private val DRIVER_ANNOTATIONS = Map(CUSTOM_ANNOTATION_KEY -> CUSTOM_ANNOTATION_VALUE)
private val DRIVER_CUSTOM_ENV1 = "customDriverEnv1"
private val DRIVER_CUSTOM_ENV2 = "customDriverEnv2"
private val DRIVER_ENVS = Map(
DRIVER_CUSTOM_ENV1 -> DRIVER_CUSTOM_ENV1,
DRIVER_CUSTOM_ENV2 -> DRIVER_CUSTOM_ENV2)
private val TEST_IMAGE_PULL_SECRETS = Seq("my-secret-1", "my-secret-2")
private val TEST_IMAGE_PULL_SECRET_OBJECTS =
TEST_IMAGE_PULL_SECRETS.map { secret =>
new LocalObjectReferenceBuilder().withName(secret).build()
}
test("Check the pod respects all configurations from the user.") {
val sparkConf = new SparkConf()
.set(KUBERNETES_DRIVER_POD_NAME, "spark-driver-pod")
.set("spark.driver.cores", "2")
.set(KUBERNETES_DRIVER_LIMIT_CORES, "4")
.set(org.apache.spark.internal.config.DRIVER_MEMORY.key, "256M")
.set(org.apache.spark.internal.config.DRIVER_MEMORY_OVERHEAD, 200L)
.set(CONTAINER_IMAGE, "spark-driver:latest")
.set(IMAGE_PULL_SECRETS, TEST_IMAGE_PULL_SECRETS.mkString(","))
val kubernetesConf = KubernetesConf(
sparkConf,
KubernetesDriverSpecificConf(
None,
APP_NAME,
MAIN_CLASS,
APP_ARGS),
RESOURCE_NAME_PREFIX,
APP_ID,
DRIVER_LABELS,
DRIVER_ANNOTATIONS,
Map.empty,
DRIVER_ENVS)
val featureStep = new BasicDriverFeatureStep(kubernetesConf)
val basePod = SparkPod.initialPod()
val configuredPod = featureStep.configurePod(basePod)
assert(configuredPod.container.getName === DRIVER_CONTAINER_NAME)
assert(configuredPod.container.getImage === "spark-driver:latest")
assert(configuredPod.container.getImagePullPolicy === CONTAINER_IMAGE_PULL_POLICY)
assert(configuredPod.container.getEnv.size === 3)
val envs = configuredPod.container
.getEnv
.asScala
.map(env => (env.getName, env.getValue))
.toMap
assert(envs(DRIVER_CUSTOM_ENV1) === DRIVER_ENVS(DRIVER_CUSTOM_ENV1))
assert(envs(DRIVER_CUSTOM_ENV2) === DRIVER_ENVS(DRIVER_CUSTOM_ENV2))
assert(configuredPod.pod.getSpec().getImagePullSecrets.asScala ===
TEST_IMAGE_PULL_SECRET_OBJECTS)
assert(configuredPod.container.getEnv.asScala.exists(envVar =>
envVar.getName.equals(ENV_DRIVER_BIND_ADDRESS) &&
envVar.getValueFrom.getFieldRef.getApiVersion.equals("v1") &&
envVar.getValueFrom.getFieldRef.getFieldPath.equals("status.podIP")))
val resourceRequirements = configuredPod.container.getResources
val requests = resourceRequirements.getRequests.asScala
assert(requests("cpu").getAmount === "2")
assert(requests("memory").getAmount === "456Mi")
val limits = resourceRequirements.getLimits.asScala
assert(limits("memory").getAmount === "456Mi")
assert(limits("cpu").getAmount === "4")
val driverPodMetadata = configuredPod.pod.getMetadata
assert(driverPodMetadata.getName === "spark-driver-pod")
assert(driverPodMetadata.getLabels.asScala === DRIVER_LABELS)
assert(driverPodMetadata.getAnnotations.asScala === DRIVER_ANNOTATIONS)
assert(configuredPod.pod.getSpec.getRestartPolicy === "Never")
val expectedSparkConf = Map(
KUBERNETES_DRIVER_POD_NAME.key -> "spark-driver-pod",
"spark.app.id" -> APP_ID,
KUBERNETES_EXECUTOR_POD_NAME_PREFIX.key -> RESOURCE_NAME_PREFIX,
"spark.kubernetes.submitInDriver" -> "true")
assert(featureStep.getAdditionalPodSystemProperties() === expectedSparkConf)
}
test("Additional system properties resolve jars and set cluster-mode confs.") {
val allJars = Seq("local:///opt/spark/jar1.jar", "hdfs:///opt/spark/jar2.jar")
val allFiles = Seq("https://localhost:9000/file1.txt", "local:///opt/spark/file2.txt")
val sparkConf = new SparkConf()
.set(KUBERNETES_DRIVER_POD_NAME, "spark-driver-pod")
.setJars(allJars)
.set("spark.files", allFiles.mkString(","))
.set(CONTAINER_IMAGE, "spark-driver:latest")
val kubernetesConf = KubernetesConf(
sparkConf,
KubernetesDriverSpecificConf(
None,
APP_NAME,
MAIN_CLASS,
APP_ARGS),
RESOURCE_NAME_PREFIX,
APP_ID,
DRIVER_LABELS,
DRIVER_ANNOTATIONS,
Map.empty,
Map.empty)
val step = new BasicDriverFeatureStep(kubernetesConf)
val additionalProperties = step.getAdditionalPodSystemProperties()
val expectedSparkConf = Map(
KUBERNETES_DRIVER_POD_NAME.key -> "spark-driver-pod",
"spark.app.id" -> APP_ID,
KUBERNETES_EXECUTOR_POD_NAME_PREFIX.key -> RESOURCE_NAME_PREFIX,
"spark.kubernetes.submitInDriver" -> "true",
"spark.jars" -> "/opt/spark/jar1.jar,hdfs:///opt/spark/jar2.jar",
"spark.files" -> "https://localhost:9000/file1.txt,/opt/spark/file2.txt")
assert(additionalProperties === expectedSparkConf)
}
}
| ddna1021/spark | resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/BasicDriverFeatureStepSuite.scala | Scala | apache-2.0 | 6,600 |
package net.elodina.mesos.zipkin.cli
import java.io.File
import joptsimple.{NonOptionArgumentSpec, OptionParser, OptionException, OptionSet}
import net.elodina.mesos.zipkin.Config
import net.elodina.mesos.zipkin.mesos.Scheduler
import net.elodina.mesos.zipkin.utils.{BindAddress, Period}
object SchedulerCli {
def isEnabled: Boolean = System.getenv("ZM_NO_SCHEDULER") == null
def handle(args: Array[String], help: Boolean = false): Unit = {
val parser = newParser()
parser.accepts("debug", "Debug mode. Default - " + Config.debug)
.withRequiredArg().ofType(classOf[java.lang.Boolean])
parser.accepts("genTraces", "Make scheduler generate traces by sending random framework messages from executor to scheduler. Default - " + Config.genTraces)
.withRequiredArg().ofType(classOf[java.lang.Boolean])
configureCLParser(parser,
Map(
"storage" -> ("""Storage for cluster state. Examples:
| - file:zipkin-mesos.json
| - zk:/zipkin-mesos
|Default - """.stripMargin + Config.storage),
"master" -> """Master connection settings. Examples:
| - master:5050
| - master:5050,master2:5050
| - zk://master:2181/mesos
| - zk://username:password@master:2181
| - zk://master:2181,master2:2181/mesos""".stripMargin,
"user" -> "Mesos user to run tasks. Default - none",
"principal" -> "Principal (username) used to register framework. Default - none",
"secret" -> "Secret (password) used to register framework. Default - none",
"framework-name" -> ("Framework name. Default - " + Config.frameworkName),
"framework-role" -> ("Framework role. Default - " + Config.frameworkRole),
"framework-timeout" -> ("Framework timeout (30s, 1m, 1h). Default - " + Config.frameworkTimeout),
"api" -> "Api url. Example: http://master:7000",
"bind-address" -> "Scheduler bind address (master, 0.0.0.0, 192.168.50.*, if:eth1). Default - all",
"log" -> "Log file to use. Default - stdout."
)
)
val configArg = parser.nonOptions()
if (help) {
printLine("Start scheduler \\nUsage: scheduler [options] [config.properties]\\n")
parser.printHelpOn(out)
return
}
var options: OptionSet = null
try {
options = parser.parse(args: _*)
} catch {
case e: OptionException =>
parser.printHelpOn(out)
printLine()
throw new CliError(e.getMessage)
}
fetchConfigFile(options, configArg).foreach { configFile =>
printLine("Loading config defaults from " + configFile)
Config.loadFromFile(configFile)
}
loadConfigFromArgs(options)
Scheduler.start()
}
private def fetchConfigFile(options: OptionSet, configArg: NonOptionArgumentSpec[String]): Option[File] = {
Option(options.valueOf(configArg)) match {
case Some(configArgValue) =>
val configFile = new File(configArgValue)
if (configFile.exists()) throw new CliError(s"config-file $configFile not found")
Some(configFile)
case None if Config.DEFAULT_FILE.exists() =>
Some(Config.DEFAULT_FILE)
case _ => None
}
}
private def loadConfigFromArgs(options: OptionSet): Unit = {
val provideOption = "Provide either cli option or config default value"
readCLProperty[java.lang.Boolean]("debug", options).foreach(Config.debug = _)
readCLProperty[java.lang.Boolean]("genTraces", options).foreach(Config.genTraces = _)
readCLProperty[String]("storage", options).foreach(Config.storage = _)
readCLProperty[String]("master", options).foreach(x => Config.master = Some(x))
if (Config.master.isEmpty) throw new CliError(s"Undefined master. $provideOption")
readCLProperty[String]("secret", options).foreach(x => Config.secret = Some(x))
readCLProperty[String]("principal", options).foreach(x => Config.principal = Some(x))
readCLProperty[String]("user", options).foreach(x => Config.user = Some(x))
readCLProperty[String]("framework-name", options).foreach(Config.frameworkName = _)
readCLProperty[String]("framework-role", options).foreach(Config.frameworkRole = _)
readCLProperty[String]("framework-timeout", options).foreach {
ft => try {
Config.frameworkTimeout = new Period(ft)
} catch {
case e: IllegalArgumentException => throw new CliError("Invalid framework-timeout")
}
}
readCLProperty[String]("api", options).foreach(x => Config.api = Some(x))
if (Config.api.isEmpty) throw new CliError(s"Undefined api. $provideOption")
readCLProperty[String]("bind-address", options).foreach {
ba => try {
Config.bindAddress = Some(new BindAddress(ba))
} catch {
case e: IllegalArgumentException => throw new CliError("Invalid bind-address")
}
}
readCLProperty[String]("log", options).foreach(x => Config.log = Some(new File(x)))
Config.log.foreach(log => printLine(s"Logging to $log"))
}
}
| elodina/zipkin-mesos-framework | src/main/scala/net/elodina/mesos/zipkin/cli/SchedulerCli.scala | Scala | apache-2.0 | 5,132 |
package com.robocubs4205.cubscout
import java.nio.ByteBuffer
import java.security.SecureRandom
import org.apache.commons.codec.binary.Base64
import scala.util.Try
case class TokenVal(val1:Long,val2:Long){
override def toString: String = {
val bytes: ByteBuffer = ByteBuffer.allocate(16)
bytes.putLong(val1)
bytes.putLong(val2)
Base64.encodeBase64URLSafeString(bytes.array())
}
}
object TokenVal {
val csprng = new SecureRandom()
def apply():TokenVal = apply(csprng.nextLong(),csprng.nextLong())
def apply(str:String):Try[TokenVal] = Try{
val bytes = ByteBuffer.wrap(Base64.decodeBase64(str))
bytes.rewind()
val val1 = bytes.getLong()
val val2 = bytes.getLong()
apply(val1,val2)
}
}
| robocubs4205/cubscout-server | common/src/main/scala/com/robocubs4205/cubscout/TokenVal.scala | Scala | mit | 735 |
/**
* *****************************************************************************
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use these files except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* *****************************************************************************
*/
package uk.ac.cam.eng
import resource._
import scala.concurrent._
import scala.concurrent.duration._
import ExecutionContext.Implicits.global
import scala.collection.JavaConversions.iterableAsScalaIterable
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.fs.Path
import org.apache.hadoop.hbase.io.hfile.CacheConfig
import org.apache.hadoop.hbase.io.hfile.HFile
import org.apache.hadoop.io.IntWritable
import org.apache.hadoop.io.SequenceFile
import org.apache.hadoop.io.Writable
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat
import org.apache.hadoop.mapreduce.lib.input.MultipleInputs
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
import org.apache.hadoop.util.ReflectionUtils
import org.scalatest.BeforeAndAfterAll
import org.scalatest.ConfigMap
import org.scalatest.FunSuite
import uk.ac.cam.eng.extraction.Rule
import uk.ac.cam.eng.extraction.RuleExtractorTest
import uk.ac.cam.eng.extraction.RuleString
import uk.ac.cam.eng.extraction.hadoop.extraction.ExtractorJob
import uk.ac.cam.eng.extraction.hadoop.features.phrase.Source2TargetJob
import uk.ac.cam.eng.extraction.hadoop.features.phrase.Target2SourceJob
import uk.ac.cam.eng.extraction.hadoop.merge.MergeJob
import uk.ac.cam.eng.extraction.hadoop.merge.MergeJob.MergeFeatureMapper
import uk.ac.cam.eng.extraction.hadoop.merge.MergeJob.MergeRuleMapper
import uk.ac.cam.eng.rule.features.Feature
import uk.ac.cam.eng.rule.retrieval.HFileRuleReader
import uk.ac.cam.eng.util.CLI
import uk.ac.cam.eng.util.CLI.FilterParams
import uk.ac.cam.eng.extraction.hadoop.datatypes.FeatureMap
import uk.ac.cam.eng.extraction.hadoop.datatypes.ExtractedData
/**
* An integration test of the full extraction pipeline.
*/
class PipelineTest extends FunSuite with BeforeAndAfterAll {
override def beforeAll(configMap: ConfigMap) {
RuleExtractorTest.folder.create()
RuleExtractorTest.setupFileSystem()
}
override def afterAll(configMap: ConfigMap) {
RuleExtractorTest.folder.delete()
}
def filterSequenceFile(input: Path, output: Path, conf: Configuration) = {
val reader = new SequenceFile.Reader(FileSystem.get(conf), input, conf);
val key = ReflectionUtils.newInstance(
reader.getKeyClass(), conf).asInstanceOf[Writable]
val value = ReflectionUtils.newInstance(
reader.getValueClass(), conf).asInstanceOf[Writable]
for (writer <-
managed(new SequenceFile.Writer(FileSystem.get(conf), conf, output, key.getClass, value.getClass))) {
var count = 0
while (reader.next(key, value)) {
if (count % 10 == 0)
writer.append(key, value)
count += 1
}
}
}
def assertWithDelta(expected: Double)(result: Double) = {
val delta = 1D / 1024D
assert(Math.abs(expected - result) < delta)
}
test("The rule extraction job") {
val conf = new Configuration
conf.set("mapreduce.framework.name", "local");
conf.setInt(CLI.RuleParameters.MAX_SOURCE_PHRASE, 9)
conf.setInt(CLI.RuleParameters.MAX_SOURCE_ELEMENTS, 5)
conf.setInt(CLI.RuleParameters.MAX_TERMINAL_LENGTH, 5)
conf.setInt(CLI.RuleParameters.MAX_NONTERMINAL_SPAN, 10)
conf.setBoolean(CLI.ExtractorJobParameters.REMOVE_MONOTONIC_REPEATS, true)
conf.setBoolean(CLI.ExtractorJobParameters.COMPATIBILITY_MODE, true)
conf.set(CLI.Provenance.PROV, "all");
val job = ExtractorJob.getJob(conf)
val trainingData = new Path(RuleExtractorTest.trainingDataFile.getAbsolutePath)
val filteredData = new Path(RuleExtractorTest.folder.newFile().getAbsolutePath)
filterSequenceFile(trainingData, filteredData, conf)
FileInputFormat.setInputPaths(job, filteredData)
val extractOut = new Path("extractOut")
FileOutputFormat.setOutputPath(job, extractOut);
job.waitForCompletion(true);
val s2tOut = new Path("s2t")
val s2tJob = (new Source2TargetJob).getJob(conf)
FileInputFormat.setInputPaths(s2tJob, extractOut)
FileOutputFormat.setOutputPath(s2tJob, s2tOut);
val fs2t = Future {
s2tJob.waitForCompletion(true)
}
val t2sOut = new Path("t2s")
val t2sJob = (new Target2SourceJob).getJob(conf)
FileInputFormat.setInputPaths(t2sJob, extractOut)
FileOutputFormat.setOutputPath(t2sJob, t2sOut);
val ft2s = Future {
t2sJob.waitForCompletion(true)
}
Await.ready(fs2t, 1 hours)
Await.ready(ft2s, 1 hours)
conf.set(FilterParams.MIN_SOURCE2TARGET_PHRASE, "0.01");
conf.set(FilterParams.MIN_TARGET2SOURCE_PHRASE, "1e-10");
conf.set(FilterParams.MIN_SOURCE2TARGET_RULE, "0.01");
conf.set(FilterParams.MIN_TARGET2SOURCE_RULE, "1e-10");
conf.setBoolean(FilterParams.PROVENANCE_UNION, true);
val patternsFile = RuleExtractorTest.copyDataToTestDir("/CF.rulextract.patterns").toPath.toUri.toString;
conf.set(FilterParams.SOURCE_PATTERNS, patternsFile)
val allowedFile = RuleExtractorTest.copyDataToTestDir("/CF.rulextract.filter.allowedonly").toPath.toUri.toString;
conf.set(FilterParams.ALLOWED_PATTERNS, allowedFile)
val mergeJob = MergeJob.getJob(conf);
for (featurePath <- List(s2tOut, t2sOut)) {
MultipleInputs.addInputPath(mergeJob, featurePath,
classOf[SequenceFileInputFormat[Rule, FeatureMap]], classOf[MergeFeatureMapper]);
}
MultipleInputs.addInputPath(mergeJob, extractOut,
classOf[SequenceFileInputFormat[Rule, ExtractedData]], classOf[MergeRuleMapper]);
val mergeOut = new Path("mergeOut")
FileOutputFormat.setOutputPath(mergeJob, mergeOut);
mergeJob.waitForCompletion(true)
val cacheConf = new CacheConfig(conf);
val hfReader = HFile.createReader(FileSystem.get(conf),
new Path(mergeOut, "part-r-00000.hfile"), cacheConf);
val reader = new HFileRuleReader(hfReader);
var count = 0
var notTested = 0
for (entry <- reader) {
count += 1
val data = entry.getSecond
entry.getFirst match {
case Rule("5660 1294") => {
assertWithDelta(-1.0986122886681098)(data.getFeatures.get(Feature.SOURCE2TARGET_PROBABILITY).get(new IntWritable(0)).get)
assertWithDelta(-0.6931471805599453)(data.getFeatures.get(Feature.TARGET2SOURCE_PROBABILITY).get(new IntWritable(0)).get)
}
case Rule("1804_6 2967_8_3") => {
assertWithDelta(-1.3862943611198906)(data.getFeatures.get(Feature.SOURCE2TARGET_PROBABILITY).get(new IntWritable(0)).get)
assertWithDelta(0.0)(data.getFeatures.get(Feature.TARGET2SOURCE_PROBABILITY).get(new IntWritable(0)).get)
}
case Rule("V_437 V_3_920") => {
assertWithDelta(-0.916290731874155)(data.getFeatures.get(Feature.SOURCE2TARGET_PROBABILITY).get(new IntWritable(0)).get)
assertWithDelta(0.0)(data.getFeatures.get(Feature.TARGET2SOURCE_PROBABILITY).get(new IntWritable(0)).get)
}
case Rule("2617_3_10619_2675 507_4015_3083") => {
assertWithDelta(0.0)(data.getFeatures.get(Feature.SOURCE2TARGET_PROBABILITY).get(new IntWritable(0)).get)
assertWithDelta(0.0)(data.getFeatures.get(Feature.TARGET2SOURCE_PROBABILITY).get(new IntWritable(0)).get)
}
case Rule("222_1148_34716_151055_5_265808 1819_1857_3312_9_3_670870") => {
assertWithDelta(0.0)(data.getFeatures.get(Feature.SOURCE2TARGET_PROBABILITY).get(new IntWritable(0)).get)
assertWithDelta(0.0)(data.getFeatures.get(Feature.TARGET2SOURCE_PROBABILITY).get(new IntWritable(0)).get)
}
case _ => notTested += 1
}
}
assertResult(125834)(count)
assertResult(5)(count - notTested)
val seekReader = new HFileRuleReader(hfReader)
assert(seekReader.seek(RuleString("V_437")))
assert(seekReader.getRulesForSource.foldLeft(0)((count, _) => count + 1) == 2)
}
} | ucam-smt/ucam-smt | java/ruleXtract/src/it/scala/uk/ac/cam/eng/PipelineTest.scala | Scala | apache-2.0 | 8,571 |
/*
* Copyright 2016 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package testdata.verified2
/**
* Created by rbraunstein on 5/31/16.
*/
class InheritsTest {
// TODO(rbraunstein): Person doesn't extend anyting
//- @Person defines/binding Person
class Person(name: String)
//- @Athlete defines/binding Athlete
//- Athlete extends Person
class Athlete(name: String) extends Person(name) {
//- @play defines/binding PlayMeth
def play(): Unit = ()
}
//- @Soccer defines/binding Soccer
trait Soccer {
def kick(): Unit
}
//- @Hockey defines/binding Hockey
trait Hockey {
def slap(): Unit
}
//- @Golf defines/binding Golf
trait Golf {
def swing(): Unit
//- @waggle defines/binding TraitWaggle
def waggle(): Int
}
//- @TriAthlete defines/binding Tri
class TriAthlete(name: String) extends Athlete(name) with Soccer with Hockey with Golf {
//- @play defines/binding TriPlayMeth
override def play(): Unit = kick() ; slap()
def kick() = ()
def slap() = ()
def swing() = ()
//- @waggle defines/binding TriAthleteWaggle
def waggle = 7
}
//- Tri extends Athlete
//- Tri extends Golf
//- Tri extends Hockey
//- Tri extends Soccer
//- @OldTriAthlete defines/binding OldTri
class OldTriAthlete(name: String) extends TriAthlete(name) {
//- @play defines/binding OldPlayMeth
override def play(): Unit = {
super.play
swing
}
override def kick() = ()
//- !{@waggle defines/binding TraitWaggle}
//- !{@waggle defines/binding TriAthleteWaggle}
override def waggle = 13
override def toString = name
}
//- OldTri extends Tri
//- !{OldTri extends Athlete}
//- !{OldTri extends Golf}
//- !{OldTri extends Person}
//- OldPlayMeth overrides TriPlayMeth
//- TriPlayMeth overrides PlayMeth
//
}
| kythe/kythe-contrib | kythe/scala/com/google/devtools/kythe/analyzers/scala/testdata/verified/InheritsTest.scala | Scala | apache-2.0 | 2,394 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.arrow.tools.stats
import com.beust.jcommander.Parameters
import org.locationtech.geomesa.arrow.data.ArrowDataStore
import org.locationtech.geomesa.arrow.tools.ArrowDataStoreCommand
import org.locationtech.geomesa.arrow.tools.ArrowDataStoreCommand.UrlParam
import org.locationtech.geomesa.arrow.tools.stats.ArrowStatsCountCommand.ArrowStatsCountParams
import org.locationtech.geomesa.tools.stats.StatsCountCommand
import org.locationtech.geomesa.tools.stats.StatsCountCommand.StatsCountParams
class ArrowStatsCountCommand extends StatsCountCommand[ArrowDataStore] with ArrowDataStoreCommand {
override val params = new ArrowStatsCountParams
override def execute(): Unit = {
params.exact = true
super.execute()
}
}
object ArrowStatsCountCommand {
@Parameters(commandDescription = "Calculate feature counts in a GeoMesa feature type")
class ArrowStatsCountParams extends StatsCountParams with UrlParam
}
| aheyne/geomesa | geomesa-arrow/geomesa-arrow-tools/src/main/scala/org/locationtech/geomesa/arrow/tools/stats/ArrowStatsCountCommand.scala | Scala | apache-2.0 | 1,422 |
package debop4s.mongo.springdata.order
import debop4s.mongo.springdata.ApplicationConfigurationFunSuite
import debop4s.mongo.springdata.model.EmailAddress
/**
* OrderRepositoryFunSuite
* @author sunghyouk.bae@gmail.com 14. 10. 22.
*/
class OrderRepositoryFunSuite extends ApplicationConfigurationFunSuite {
before {
setup()
}
test("find orders by customer") {
val customer = customerRepo.findByEmailAddress(new EmailAddress("sunghyouk.bae@gmail.com"))
customer should not be null
val orders = orderRepo.findByCustomer(customer)
orders.size() should be > 0
}
}
| debop/debop4s | debop4s-mongo/src/test/scala/debop4s/mongo/springdata/order/OrderRepositoryFunSuite.scala | Scala | apache-2.0 | 597 |
package com.stockit.client
import java.io.IOException
import java.net.URL
import java.text.SimpleDateFormat
import java.util.regex.Pattern
import java.util.{SimpleTimeZone, Date}
import com.stockit.module.service.SolrClientModule
import org.apache.solr.client.solrj.request.QueryRequest
import org.apache.solr.client.solrj.{SolrRequest, SolrQuery, SolrClient}
import org.apache.solr.common.{SolrDocumentList, SolrDocument}
import scaldi.Injectable
import scala.collection.mutable.ListBuffer
/**
* Created by jmcconnell1 on 4/22/15.
*/
class Client extends Injectable {
implicit val module = new SolrClientModule
val host = "http://solr.deepdishdev.com:8983/solr"
val client: SolrClient = inject[SolrClient]('solrClient and 'httpSolrClient and 'articleStockSolrClient)
val instanceCount = 10000
val queryCutoff = 5000 // 100 performed better?
def fetch(date: Date) = {
val request = dateQueryRequest(date)
try {
val response = request.process(client)
documentListToList(response.getResults)
} catch {
case e: IOException => {
println("Error on query:" + request.toString)
throw e
}
}
}
def ensureNeighborsBeforeDate(documents: List[SolrDocument], latestDate: Date) = {
documents.foreach((doc) => {
val date = dateOfDoc(doc)
if (date.after(latestDate)) {
println(s"Article ${doc.getFieldValue("articleId")} has date:[$date] which is after $latestDate}")
}
})
}
def ensureNeighborsDontIncludeSelf(documents: List[SolrDocument], docId: String) = {
documents.foreach((doc) => {
val id = idOfDoc(doc)
if (id == docId) {
throw new Exception(s"Article $id was returned as neighbor")
}
})
}
def neighbors(trainDocs: List[SolrDocument], doc: SolrDocument, number: Int): List[SolrDocument] = {
val request = neighborQuery(trainDocs, doc, number)
try {
val response = client.query(request.getParams, SolrRequest.METHOD.POST)
val documents = documentListToList(response.getResults)
ensureNeighborsBeforeDate(documents, dateOfDoc(doc))
documents
} catch {
case e: IOException => {
println("Error on query:" + request.toString)
neighbors(trainDocs, doc, number)
}
case e: Exception => {
println("Really bad fuck up returning no neighbors")
Nil
}
}
}
def dateOfDoc(doc: SolrDocument) = {
doc.getFieldValue("historyDate").asInstanceOf[Date]
}
def idOfDoc(doc: SolrDocument) = {
doc.get("articleId").toString()
}
def neighborQuery(trainDocs: List[SolrDocument], doc: SolrDocument, count: Int) = {
val pattern = Pattern.compile("[^\\\\s\\\\d\\\\w]+") // Pattern.compile("([\\\\\\\\(|\\\\\\\\)|\\\\\\\\+|\\\\\\\\-|\\\\\\\\?|\\\\\\\\*|\\\\\\\\{|\\\\\\\\}|\\\\\\\\[|\\\\\\\\]|\\\\\\\\:|\\\\\\\\~|\\\\\\\\!|\\\\\\\\^|&&|\\\\\\"|\\\\\\\\\\\\\\\\|\\\\\\\\||\\", \\"\\")])");
var queryString = doc.getFieldValue("content").toString
queryString = pattern.matcher(queryString).replaceAll("");
queryString = queryString.substring(0, List(queryCutoff, queryString.length).min)
val (minDate: Date, maxDate: Date) = minMaxDate(trainDocs)
val query = new SolrQuery()
query.setParam("q", queryString)
query.setRows(count)
query.setStart(0)
query.setFields("articleId","title","content","date","stockHistoryId","symbol","historyDate","open","high","low","close","score","adjClose","volume","id","_version_")
val fq = String.format("historyDate:[%s TO %s]", formatDateForSolr(minDate, isMin = true), formatDateForSolr(maxDate, isMin = false))
query.setFilterQueries(fq)
new QueryRequest(query)
}
def formatDateForSolr(date: Date, isMin: Boolean) = {
try {
s"${dayFormatter.format(date)}${if(isMin) "T00:00:00Z" else "T59:59:59Z"}"
} catch {
case e: Exception => {
println(s"Error: $date, $isMin")
throw e
}
}
}
def minMaxDate(trainDocs: List[SolrDocument]) = {
val dates: List[Date] = trainDocs.map{ doc =>
doc.getFieldValue("historyDate") match {
case date:Date => Some(date)
case _ => None
}
}.flatten
val maxDate = dates.max
val minDate = dates.min
(minDate, maxDate)
}
def sortedByDate() = {
val request = sortedByDateQuery
try {
val response = request.process(client)
val documents = documentListToList(response.getResults())
documents.sortBy(_.getFieldValue("historyDate").asInstanceOf[Date])
} catch {
case e: IOException => {
println("Error on query:" + request.toString)
throw e
}
}
}
def sortedByDateQuery = {
val query = new SolrQuery
query.setSort("historyDate", SolrQuery.ORDER.desc)
query.setQuery("*:*")
query.setRows(instanceCount)
new QueryRequest(query)
}
def dateQueryRequest(date: Date) = {
val string = parseDate(date)
val query = new SolrQuery
query.setQuery("historyDate:" + string)
new QueryRequest(query) // dateToString(date)))
}
def parseDate(date: Date) = {
parseDateString(dateToString(date))
}
def parseDateString(rawString: String) = {
val withChars = rawString.substring(0,10) + "T" + rawString.substring(11, rawString.length) + "Z"
withChars.replace(":", "\\\\:")
}
def dateToString(date: Date) = {
formatter.format(date)
}
def createParsableString(dateString: String) = {
dateString.replace("T", " ").replace("Z", "")
}
def documentListToList(list: SolrDocumentList) = {
var listBuffer = new ListBuffer[SolrDocument]
val it = list.listIterator()
while(it.hasNext) {
listBuffer += it.next
}
listBuffer.toList
}
def formatter = synchronized {
val format = new SimpleDateFormat("yyyy-MM-dd'T'hh:mm:ssZ")
format.setTimeZone(new SimpleTimeZone(SimpleTimeZone.UTC_TIME, "UTC"))
format
}
def dayFormatter = synchronized {
val format = new SimpleDateFormat("yyyy-MM-dd")
format
}
}
| stockit/stockit | src/main/scala/com/stockit/client/Client.scala | Scala | mit | 6,576 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalactic.{Equality, Every, One, Many, Prettifier}
import org.scalactic.Uniformity
import org.scalactic.StringNormalizations._
import SharedHelpers._
import FailureMessages.decorateToStringValue
import Matchers._
import exceptions.TestFailedException
class EveryShouldContainTheSameElementsAsSpec extends FunSpec {
private val prettifier = Prettifier.default
private def upperCase(value: Any): Any =
value match {
case l: Every[_] => l.map(upperCase(_))
case s: String => s.toUpperCase
case c: Char => c.toString.toUpperCase.charAt(0)
case (s1: String, s2: String) => (s1.toUpperCase, s2.toUpperCase)
case e: java.util.Map.Entry[_, _] =>
(e.getKey, e.getValue) match {
case (k: String, v: String) => Entry(k.toUpperCase, v.toUpperCase)
case _ => value
}
case _ => value
}
val upperCaseStringEquality =
new Equality[String] {
def areEqual(a: String, b: Any): Boolean = upperCase(a) == upperCase(b)
}
//ADDITIONAL//
describe("an Every") {
val fumList: Every[String] = Every("fum", "foe", "fie", "fee")
val toList: Every[String] = Every("you", "to", "birthday", "happy")
describe("when used with contain theSameElementsAs (..)") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should contain theSameElementsAs Set("fee", "fie", "foe", "fum")
val e1 = intercept[TestFailedException] {
fumList should contain theSameElementsAs Set("happy", "birthday", "to", "you")
}
e1.failedCodeFileName.get should be ("EveryShouldContainTheSameElementsAsSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (Resources.didNotContainSameElements(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, Set("happy", "birthday", "to", "you"))))
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should contain theSameElementsAs Set("FEE", "FIE", "FOE", "FUM")
intercept[TestFailedException] {
fumList should contain theSameElementsAs Set("fee", "fie", "foe")
}
}
it("should use an explicitly provided Equality") {
(fumList should contain theSameElementsAs Set("FEE", "FIE", "FOE", "FUM")) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(fumList should contain theSameElementsAs Set("fee", "fie", "foe")) (decided by upperCaseStringEquality)
}
intercept[TestFailedException] {
fumList should contain theSameElementsAs Set(" FEE ", " FIE ", " FOE ", " FUM ")
}
(fumList should contain theSameElementsAs Set(" FEE ", " FIE ", " FOE ", " FUM ")) (after being lowerCased and trimmed)
}
}
describe("when used with (contain theSameElementsAs (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (contain theSameElementsAs Set("fee", "fie", "foe", "fum"))
val e1 = intercept[TestFailedException] {
fumList should (contain theSameElementsAs Set("happy", "birthday", "to", "you"))
}
e1.failedCodeFileName.get should be ("EveryShouldContainTheSameElementsAsSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (Resources.didNotContainSameElements(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, Set("happy", "birthday", "to", "you"))))
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (contain theSameElementsAs Set("FEE", "FIE", "FOE", "FUM"))
intercept[TestFailedException] {
fumList should (contain theSameElementsAs Set("fee", "fie", "foe"))
}
}
it("should use an explicitly provided Equality") {
(fumList should (contain theSameElementsAs Set("FEE", "FIE", "FOE", "FUM"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(fumList should (contain theSameElementsAs Set("fee", "fie", "foe"))) (decided by upperCaseStringEquality)
}
intercept[TestFailedException] {
fumList should (contain theSameElementsAs Set(" FEE ", " FIE ", " FOE ", " FUM "))
}
(fumList should (contain theSameElementsAs Set(" FEE ", " FIE ", " FOE ", " FUM "))) (after being lowerCased and trimmed)
}
}
describe("when used with not contain theSameElementsAs (..)") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
toList should not contain theSameElementsAs (Set("fee", "fie", "foe", "fum"))
val e1 = intercept[TestFailedException] {
toList should not contain theSameElementsAs (Set("happy", "birthday", "to", "you"))
}
e1.failedCodeFileName.get should be ("EveryShouldContainTheSameElementsAsSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (Resources.containedSameElements(decorateToStringValue(prettifier, toList), decorateToStringValue(prettifier, Set("happy", "birthday", "to", "you"))))
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
toList should not contain theSameElementsAs (Set("happy", "birthday", "to"))
intercept[TestFailedException] {
toList should not contain theSameElementsAs (Set("HAPPY", "BIRTHDAY", "TO", "YOU"))
}
}
it("should use an explicitly provided Equality") {
(toList should not contain theSameElementsAs (Set("happy", "birthday", "to"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(toList should not contain theSameElementsAs (Set("HAPPY", "BIRTHDAY", "TO", "YOU"))) (decided by upperCaseStringEquality)
}
toList should not contain theSameElementsAs (Set(" HAPPY ", " BIRTHDAY ", " TO ", " YOU "))
intercept[TestFailedException] {
(toList should not contain theSameElementsAs (Set(" HAPPY ", " BIRTHDAY ", " TO ", " YOU "))) (after being lowerCased and trimmed)
}
}
}
describe("when used with (not contain theSameElementsAs (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
toList should (not contain theSameElementsAs (Set("HAPPY", "BIRTHDAY", "TO", "YOU")))
val e1 = intercept[TestFailedException] {
toList should (not contain theSameElementsAs (Set("happy", "birthday", "to", "you")))
}
e1.failedCodeFileName.get should be ("EveryShouldContainTheSameElementsAsSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (Resources.containedSameElements(decorateToStringValue(prettifier, toList), decorateToStringValue(prettifier, Set("happy", "birthday", "to", "you"))))
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
toList should (not contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU")))
intercept[TestFailedException] {
toList should (not contain theSameElementsAs (Set("HAPPY", "BIRTHDAY", "TO", "YOU")))
}
}
it("should use an explicitly provided Equality") {
(toList should (not contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU")))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(toList should (not contain theSameElementsAs (Set("HAPPY", "BIRTHDAY", "TO", "YOU")))) (decided by upperCaseStringEquality)
}
toList should (not contain theSameElementsAs (Set(" HAPPY ", " BIRTHDAY ", " TO ", " YOU ")))
intercept[TestFailedException] {
(toList should (not contain theSameElementsAs (Set(" HAPPY ", " BIRTHDAY ", " TO ", " YOU ")))) (after being lowerCased and trimmed)
}
}
}
describe("when used with shouldNot contain theSameElementsAs (..)") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
toList shouldNot contain theSameElementsAs (Set("fee", "fie", "foe", "fum"))
val e1 = intercept[TestFailedException] {
toList shouldNot contain theSameElementsAs (Set("happy", "birthday", "to", "you"))
}
e1.failedCodeFileName.get should be ("EveryShouldContainTheSameElementsAsSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (Resources.containedSameElements(decorateToStringValue(prettifier, toList), decorateToStringValue(prettifier, Set("happy", "birthday", "to", "you"))))
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
toList shouldNot contain theSameElementsAs (Set("happy", "birthday", "to"))
intercept[TestFailedException] {
toList shouldNot contain theSameElementsAs (Set("HAPPY", "BIRTHDAY", "TO", "YOU"))
}
}
it("should use an explicitly provided Equality") {
(toList shouldNot contain theSameElementsAs (Set("happy", "birthday", "to"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(toList shouldNot contain theSameElementsAs (Set("HAPPY", "BIRTHDAY", "TO", "YOU"))) (decided by upperCaseStringEquality)
}
toList shouldNot contain theSameElementsAs (Set(" HAPPY ", " BIRTHDAY ", " TO ", " YOU "))
intercept[TestFailedException] {
(toList shouldNot contain theSameElementsAs (Set(" HAPPY ", " BIRTHDAY ", " TO ", " YOU "))) (after being lowerCased and trimmed)
}
}
}
describe("when used with shouldNot (contain theSameElementsAs (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
toList shouldNot (contain theSameElementsAs (Set("HAPPY", "BIRTHDAY", "TO", "YOU")))
val e1 = intercept[TestFailedException] {
toList shouldNot (contain theSameElementsAs (Set("happy", "birthday", "to", "you")))
}
e1.failedCodeFileName.get should be ("EveryShouldContainTheSameElementsAsSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (Resources.containedSameElements(decorateToStringValue(prettifier, toList), decorateToStringValue(prettifier, Set("happy", "birthday", "to", "you"))))
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
toList shouldNot (contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU")))
intercept[TestFailedException] {
toList shouldNot (contain theSameElementsAs (Set("HAPPY", "BIRTHDAY", "TO", "YOU")))
}
}
it("should use an explicitly provided Equality") {
(toList shouldNot (contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU")))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(toList shouldNot (contain theSameElementsAs (Set("HAPPY", "BIRTHDAY", "TO", "YOU")))) (decided by upperCaseStringEquality)
}
toList shouldNot (contain theSameElementsAs (Set(" HAPPY ", " BIRTHDAY ", " TO ", " YOU ")))
intercept[TestFailedException] {
(toList shouldNot (contain theSameElementsAs (Set(" HAPPY ", " BIRTHDAY ", " TO ", " YOU ")))) (after being lowerCased and trimmed)
}
}
}
}
describe("an every of Everys") {
val list1s: Every[Every[Int]] = Every(Every(3, 2, 1), Every(3, 2, 1), Every(3, 2, 1))
val lists: Every[Every[Int]] = Every(Every(3, 2, 1), Every(3, 2, 1), Every(4, 3, 2))
val hiLists: Every[Every[String]] = Every(Every("hi", "he"), Every("hi", "he"), Every("hi", "he"))
val toLists: Every[Every[String]] = Every(Every("to", "you"), Every("to", "you"), Every("to", "you"))
describe("when used with contain theSameElementsAs (..)") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (list1s) should contain theSameElementsAs Set(1, 2, 3)
atLeast (2, lists) should contain theSameElementsAs Set(1, 2, 3)
atMost (2, lists) should contain theSameElementsAs Set(1, 2, 3)
no (lists) should contain theSameElementsAs Set(3, 4, 5)
val e1 = intercept[TestFailedException] {
all (lists) should contain theSameElementsAs Set(1, 2, 3)
}
e1.failedCodeFileName.get should be ("EveryShouldContainTheSameElementsAsSpec.scala")
val offendingLine = thisLineNumber - 3
e1.failedCodeLineNumber.get should be (offendingLine)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 2, " + decorateToStringValue(prettifier, Many(4, 3, 2)) + " did not contain the same elements as " + decorateToStringValue(prettifier, Set(1, 2, 3)) + " (EveryShouldContainTheSameElementsAsSpec.scala:" + offendingLine + ") \\n" +
"in " + decorateToStringValue(prettifier, lists)))
}
it("should use the implicit Equality in scope") {
all (hiLists) should contain theSameElementsAs Set("he", "hi")
intercept[TestFailedException] {
all (hiLists) should contain theSameElementsAs Set("ho", "hi")
}
implicit val ise = upperCaseStringEquality
all (hiLists) should contain theSameElementsAs Set("HE", "HI")
intercept[TestFailedException] {
all (hiLists) should contain theSameElementsAs Set("HO", "HI")
}
}
it("should use an explicitly provided Equality") {
(all (hiLists) should contain theSameElementsAs Set("HE", "HI")) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (hiLists) should contain theSameElementsAs Set("HO", "HI")) (decided by upperCaseStringEquality)
}
implicit val ise = upperCaseStringEquality
(all (hiLists) should contain theSameElementsAs Set("he", "hi")) (decided by defaultEquality[String])
intercept[TestFailedException] {
(all (hiLists) should contain theSameElementsAs Set("ho", "hi")) (decided by defaultEquality[String])
}
}
}
describe("when used with (contain theSameElementsAs (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (list1s) should (contain theSameElementsAs Set(1, 2, 3))
atLeast (2, lists) should (contain theSameElementsAs Set(1, 2, 3))
atMost (2, lists) should (contain theSameElementsAs Set(1, 2, 3))
no (lists) should (contain theSameElementsAs Set(3, 4, 5))
val e1 = intercept[TestFailedException] {
all (lists) should (contain theSameElementsAs Set(1, 2, 3))
}
e1.failedCodeFileName.get should be ("EveryShouldContainTheSameElementsAsSpec.scala")
val offendingLine = thisLineNumber - 3
e1.failedCodeLineNumber.get should be (offendingLine)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 2, " + decorateToStringValue(prettifier, Many(4, 3, 2)) + " did not contain the same elements as " + decorateToStringValue(prettifier, Set(1, 2, 3)) + " (EveryShouldContainTheSameElementsAsSpec.scala:" + offendingLine + ") \\n" +
"in " + decorateToStringValue(prettifier, lists)))
}
it("should use the implicit Equality in scope") {
all (hiLists) should (contain theSameElementsAs Set("he", "hi"))
intercept[TestFailedException] {
all (hiLists) should (contain theSameElementsAs Set("ho", "hi"))
}
implicit val ise = upperCaseStringEquality
all (hiLists) should (contain theSameElementsAs Set("HE", "HI"))
intercept[TestFailedException] {
all (hiLists) should (contain theSameElementsAs Set("HO", "HI"))
}
}
it("should use an explicitly provided Equality") {
(all (hiLists) should (contain theSameElementsAs Set("HE", "HI"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (hiLists) should (contain theSameElementsAs Set("HO", "HI"))) (decided by upperCaseStringEquality)
}
implicit val ise = upperCaseStringEquality
(all (hiLists) should (contain theSameElementsAs Set("he", "hi"))) (decided by defaultEquality[String])
intercept[TestFailedException] {
(all (hiLists) should (contain theSameElementsAs Set("ho", "hi"))) (decided by defaultEquality[String])
}
}
}
describe("when used with not contain theSameElementsAs (..)") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (toLists) should not contain theSameElementsAs (Set("fee", "fie", "foe", "fum"))
val e1 = intercept[TestFailedException] {
all (toLists) should not contain theSameElementsAs (Set("you", "to"))
}
e1.failedCodeFileName.get should be ("EveryShouldContainTheSameElementsAsSpec.scala")
val offendingLine = thisLineNumber - 3
e1.failedCodeLineNumber.get should be (offendingLine)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 0, " + decorateToStringValue(prettifier, Many("to", "you")) + " contained the same elements as " + decorateToStringValue(prettifier, Set("you", "to")) + " (EveryShouldContainTheSameElementsAsSpec.scala:" + offendingLine + ") \\n" +
"in " + decorateToStringValue(prettifier, toLists)))
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
all (toLists) should not contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU"))
intercept[TestFailedException] {
all (toLists) should not contain theSameElementsAs (Set("YOU", "TO"))
}
}
it("should use an explicitly provided Equality") {
(all (toLists) should not contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (toLists) should not contain theSameElementsAs (Set("YOU", "TO"))) (decided by upperCaseStringEquality)
}
all (toLists) should not contain theSameElementsAs (Set(" YOU ", " TO "))
intercept[TestFailedException] {
(all (toLists) should not contain theSameElementsAs (Set(" YOU ", " TO "))) (after being lowerCased and trimmed)
}
}
}
describe("when used with (not contain theSameElementsAs (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (toLists) should (not contain theSameElementsAs (Set("fee", "fie", "foe", "fum")))
val e1 = intercept[TestFailedException] {
all (toLists) should (not contain theSameElementsAs (Set("you", "to")))
}
e1.failedCodeFileName.get should be ("EveryShouldContainTheSameElementsAsSpec.scala")
val offendingLine = thisLineNumber - 3
e1.failedCodeLineNumber.get should be (offendingLine)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 0, " + decorateToStringValue(prettifier, Many("to", "you")) + " contained the same elements as " + decorateToStringValue(prettifier, Set("you", "to")) + " (EveryShouldContainTheSameElementsAsSpec.scala:" + offendingLine + ") \\n" +
"in " + decorateToStringValue(prettifier, toLists)))
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
all (toLists) should (not contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU")))
intercept[TestFailedException] {
all (toLists) should (not contain theSameElementsAs (Set("YOU", "TO")))
}
}
it("should use an explicitly provided Equality") {
(all (toLists) should (not contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU")))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (toLists) should (not contain theSameElementsAs (Set("YOU", "TO")))) (decided by upperCaseStringEquality)
}
all (toLists) should (not contain theSameElementsAs (Set(" YOU ", " TO ")))
intercept[TestFailedException] {
(all (toLists) should (not contain theSameElementsAs (Set(" YOU ", " TO ")))) (after being lowerCased and trimmed)
}
}
}
describe("when used with shouldNot contain theSameElementsAs (..)") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (toLists) shouldNot contain theSameElementsAs (Set("fee", "fie", "foe", "fum"))
val e1 = intercept[TestFailedException] {
all (toLists) shouldNot contain theSameElementsAs (Set("you", "to"))
}
e1.failedCodeFileName.get should be ("EveryShouldContainTheSameElementsAsSpec.scala")
val offendingLine = thisLineNumber - 3
e1.failedCodeLineNumber.get should be (offendingLine)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 0, " + decorateToStringValue(prettifier, Many("to", "you")) + " contained the same elements as " + decorateToStringValue(prettifier, Set("you", "to")) + " (EveryShouldContainTheSameElementsAsSpec.scala:" + offendingLine + ") \\n" +
"in " + decorateToStringValue(prettifier, toLists)))
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
all (toLists) shouldNot contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU"))
intercept[TestFailedException] {
all (toLists) shouldNot contain theSameElementsAs (Set("YOU", "TO"))
}
}
it("should use an explicitly provided Equality") {
(all (toLists) shouldNot contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (toLists) shouldNot contain theSameElementsAs (Set("YOU", "TO"))) (decided by upperCaseStringEquality)
}
all (toLists) shouldNot contain theSameElementsAs (Set(" YOU ", " TO "))
intercept[TestFailedException] {
(all (toLists) shouldNot contain theSameElementsAs (Set(" YOU ", " TO "))) (after being lowerCased and trimmed)
}
}
}
describe("when used with shouldNot (contain theSameElementsAs (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (toLists) shouldNot (contain theSameElementsAs (Set("fee", "fie", "foe", "fum")))
val e1 = intercept[TestFailedException] {
all (toLists) shouldNot (contain theSameElementsAs (Set("you", "to")))
}
e1.failedCodeFileName.get should be ("EveryShouldContainTheSameElementsAsSpec.scala")
val offendingLine = thisLineNumber - 3
e1.failedCodeLineNumber.get should be (offendingLine)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 0, " + decorateToStringValue(prettifier, Many("to", "you")) + " contained the same elements as " + decorateToStringValue(prettifier, Set("you", "to")) + " (EveryShouldContainTheSameElementsAsSpec.scala:" + offendingLine + ") \\n" +
"in " + decorateToStringValue(prettifier, toLists)))
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
all (toLists) shouldNot (contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU")))
intercept[TestFailedException] {
all (toLists) shouldNot (contain theSameElementsAs (Set("YOU", "TO")))
}
}
it("should use an explicitly provided Equality") {
(all (toLists) shouldNot (contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU")))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (toLists) shouldNot (contain theSameElementsAs (Set("YOU", "TO")))) (decided by upperCaseStringEquality)
}
all (toLists) shouldNot (contain theSameElementsAs (Set(" YOU ", " TO ")))
intercept[TestFailedException] {
(all (toLists) shouldNot (contain theSameElementsAs (Set(" YOU ", " TO ")))) (after being lowerCased and trimmed)
}
}
}
}
}
| dotty-staging/scalatest | scalatest-test/src/test/scala/org/scalatest/EveryShouldContainTheSameElementsAsSpec.scala | Scala | apache-2.0 | 25,424 |
package scala.slick.lifted
import java.util.UUID
import java.sql.{Blob, Clob, Date, Time, Timestamp}
import scala.slick.SlickException
import scala.slick.ast.Type
import scala.slick.driver.BasicProfile
import scala.slick.session.{PositionedParameters, PositionedResult}
/**
* A (usually implicit) TypeMapper object represents a Scala type that can be
* used as a column type in the database. The actual implementation of the
* type is deferred to a TypeMapperDelegate which can depend on the driver.
*
* <p>Custom types with a single implementation can implement both traits in
* one object:</p>
* <code><pre>
* implicit object MyTypeMapper
* extends TypeMapper[MyType] with TypeMapperDelegate[MyType] {
* def apply(p: BasicProfile) = this
* def zero = ...
* def sqlType = ...
* def setValue(v: Long, p: PositionedParameters) = ...
* def setOption(v: Option[Long], p: PositionedParameters) = ...
* def nextValue(r: PositionedResult) = ...
* def updateValue(v: Long, r: PositionedResult) = ...
* }
* </pre></code>
*/
sealed trait TypeMapper[T] extends (BasicProfile => TypeMapperDelegate[T]) with Type { self =>
def createOptionTypeMapper: OptionTypeMapper[T] = new OptionTypeMapper[T](self) {
def apply(profile: BasicProfile) = self(profile).createOptionTypeMapperDelegate
def getBaseTypeMapper[U](implicit ev: Option[U] =:= Option[T]): TypeMapper[U] = self.asInstanceOf[TypeMapper[U]]
}
def getBaseTypeMapper[U](implicit ev: Option[U] =:= T): TypeMapper[U]
}
object TypeMapper {
@inline implicit def typeMapperToOptionTypeMapper[T](implicit t: TypeMapper[T]): OptionTypeMapper[T] = t.createOptionTypeMapper
implicit object BooleanTypeMapper extends BaseTypeMapper[Boolean] {
def apply(profile: BasicProfile) = profile.typeMapperDelegates.booleanTypeMapperDelegate
}
implicit object BlobTypeMapper extends BaseTypeMapper[Blob] {
def apply(profile: BasicProfile) = profile.typeMapperDelegates.blobTypeMapperDelegate
}
implicit object ByteTypeMapper extends BaseTypeMapper[Byte] with NumericTypeMapper {
def apply(profile: BasicProfile) = profile.typeMapperDelegates.byteTypeMapperDelegate
}
implicit object ByteArrayTypeMapper extends BaseTypeMapper[Array[Byte]] {
def apply(profile: BasicProfile) = profile.typeMapperDelegates.byteArrayTypeMapperDelegate
}
implicit object ClobTypeMapper extends BaseTypeMapper[Clob] {
def apply(profile: BasicProfile) = profile.typeMapperDelegates.clobTypeMapperDelegate
}
implicit object DateTypeMapper extends BaseTypeMapper[Date] {
def apply(profile: BasicProfile) = profile.typeMapperDelegates.dateTypeMapperDelegate
}
implicit object DoubleTypeMapper extends BaseTypeMapper[Double] with NumericTypeMapper {
def apply(profile: BasicProfile) = profile.typeMapperDelegates.doubleTypeMapperDelegate
}
implicit object FloatTypeMapper extends BaseTypeMapper[Float] with NumericTypeMapper {
def apply(profile: BasicProfile) = profile.typeMapperDelegates.floatTypeMapperDelegate
}
implicit object IntTypeMapper extends BaseTypeMapper[Int] with NumericTypeMapper {
def apply(profile: BasicProfile) = profile.typeMapperDelegates.intTypeMapperDelegate
}
implicit object LongTypeMapper extends BaseTypeMapper[Long] with NumericTypeMapper {
def apply(profile: BasicProfile) = profile.typeMapperDelegates.longTypeMapperDelegate
}
implicit object ShortTypeMapper extends BaseTypeMapper[Short] with NumericTypeMapper {
def apply(profile: BasicProfile) = profile.typeMapperDelegates.shortTypeMapperDelegate
}
implicit object StringTypeMapper extends BaseTypeMapper[String] {
def apply(profile: BasicProfile) = profile.typeMapperDelegates.stringTypeMapperDelegate
}
implicit object TimeTypeMapper extends BaseTypeMapper[Time] {
def apply(profile: BasicProfile) = profile.typeMapperDelegates.timeTypeMapperDelegate
}
implicit object TimestampTypeMapper extends BaseTypeMapper[Timestamp] {
def apply(profile: BasicProfile) = profile.typeMapperDelegates.timestampTypeMapperDelegate
}
implicit object UnitTypeMapper extends BaseTypeMapper[Unit] {
def apply(profile: BasicProfile) = profile.typeMapperDelegates.unitTypeMapperDelegate
}
implicit object UUIDTypeMapper extends BaseTypeMapper[UUID] {
def apply(profile: BasicProfile) = profile.typeMapperDelegates.uuidTypeMapperDelegate
}
implicit object BigDecimalTypeMapper extends BaseTypeMapper[BigDecimal] with NumericTypeMapper {
def apply(profile: BasicProfile) = profile.typeMapperDelegates.bigDecimalTypeMapperDelegate
}
object NullTypeMapper extends BaseTypeMapper[Null] {
def apply(profile: BasicProfile) = profile.typeMapperDelegates.nullTypeMapperDelegate
}
}
trait BaseTypeMapper[T] extends TypeMapper[T] {
def getBaseTypeMapper[U](implicit ev: Option[U] =:= T) =
throw new SlickException("A BaseTypeMapper should not have an Option type")
}
abstract class OptionTypeMapper[T](val base: TypeMapper[T]) extends TypeMapper[Option[T]]
/**
* Adding this marker trait to a TypeMapper makes the type eligible for
* numeric operators.
*/
trait NumericTypeMapper
trait TypeMapperDelegate[T] { self =>
/**
* A zero value for the type. This is used as a default instead of NULL when
* used as a non-nullable column.
*/
def zero: T
/**
* The constant from java.sql.Types that is used for setting parameters of
* the type to NULL.
*/
def sqlType: Int
/**
* The default name for the SQL type that is used for column declarations.
*/
def sqlTypeName: String
/**
* Set a parameter of the type.
*/
def setValue(v: T, p: PositionedParameters): Unit
/**
* Set an Option parameter of the type.
*/
def setOption(v: Option[T], p: PositionedParameters): Unit
/**
* Get a result column of the type.
*/
def nextValue(r: PositionedResult): T
/**
* Update a column of the type in a mutable result set.
*/
def updateValue(v: T, r: PositionedResult): Unit
def nextValueOrElse(d: =>T, r: PositionedResult) = { val v = nextValue(r); if(r.rs.wasNull) d else v }
def nextOption(r: PositionedResult): Option[T] = { val v = nextValue(r); if(r.rs.wasNull) None else Some(v) }
def updateOption(v: Option[T], r: PositionedResult): Unit = v match {
case Some(s) => updateValue(s, r)
case None => r.updateNull()
}
def valueToSQLLiteral(value: T): String = value.toString
def nullable = false
def createOptionTypeMapperDelegate: TypeMapperDelegate[Option[T]] = new TypeMapperDelegate[Option[T]] {
def zero = None
def sqlType = self.sqlType
override def sqlTypeName = self.sqlTypeName
def setValue(v: Option[T], p: PositionedParameters) = self.setOption(v, p)
def setOption(v: Option[Option[T]], p: PositionedParameters) = self.setOption(v.getOrElse(None), p)
def nextValue(r: PositionedResult) = self.nextOption(r)
def updateValue(v: Option[T], r: PositionedResult) = self.updateOption(v, r)
override def valueToSQLLiteral(value: Option[T]): String = value.map(self.valueToSQLLiteral).getOrElse("null")
override def nullable = true
}
}
object TypeMapperDelegate {
private[slick] lazy val typeNames = Map() ++
(for(f <- classOf[java.sql.Types].getFields)
yield f.get(null).asInstanceOf[Int] -> f.getName)
}
abstract class MappedTypeMapper[T,U](implicit tm: TypeMapper[U]) extends TypeMapper[T] { self =>
def map(t: T): U
def comap(u: U): T
def sqlType: Option[Int] = None
def sqlTypeName: Option[String] = None
def valueToSQLLiteral(value: T): Option[String] = None
def nullable: Option[Boolean] = None
def apply(profile: BasicProfile): TypeMapperDelegate[T] = new TypeMapperDelegate[T] {
val tmd = tm(profile)
def zero = comap(tmd.zero)
def sqlType = self.sqlType.getOrElse(tmd.sqlType)
override def sqlTypeName = self.sqlTypeName.getOrElse(tmd.sqlTypeName)
def setValue(v: T, p: PositionedParameters) = tmd.setValue(map(v), p)
def setOption(v: Option[T], p: PositionedParameters) = tmd.setOption(v.map(map _), p)
def nextValue(r: PositionedResult) = comap(tmd.nextValue(r))
override def nextValueOrElse(d: =>T, r: PositionedResult) = { val v = tmd.nextValue(r); if(r.rs.wasNull) d else comap(v) }
override def nextOption(r: PositionedResult): Option[T] = { val v = tmd.nextValue(r); if(r.rs.wasNull) None else Some(comap(v)) }
def updateValue(v: T, r: PositionedResult) = tmd.updateValue(map(v), r)
override def valueToSQLLiteral(value: T) = self.valueToSQLLiteral(value).getOrElse(tmd.valueToSQLLiteral(map(value)))
override def nullable = self.nullable.getOrElse(tmd.nullable)
}
}
object MappedTypeMapper {
def base[T, U](tmap: T => U, tcomap: U => T)(implicit tm: TypeMapper[U]): BaseTypeMapper[T] =
new MappedTypeMapper[T, U] with BaseTypeMapper[T] {
def map(t: T) = tmap(t)
def comap(u: U) = tcomap(u)
}
}
| zefonseca/slick-1.0.0-scala.2.11.1 | src/main/scala/scala/slick/lifted/TypeMapper.scala | Scala | bsd-2-clause | 8,917 |
/* Copyright (c) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.gdata.data
package kinds
import com.google.gdata.data.util.DateTime
import com.google.xml.combinators.{Picklers, ~}
/**
* A reminder, usually found inside a 'when' element.
*
* @author Iulian Dragos
* @see http://code.google.com/apis/gdata/elements.html#gdReminder
*/
case class Reminder(
/** Absolute time at which the reminder should be issued. */
absoluteTime: Option[DateTime] = None,
/** The notification method the reminder should use, like 'alert', 'email, 'sms'. */
method: Option[String] = None,
/**
* Period of time before gd:when/@startTime when a reminder should be issued.
* If the parent entity's target time is a date rather than a specific time,
* then these attributes are relative to midnight (00:00) on that date
*/
days: Option[Int] = None,
/**
* Period of time before gd:when/@startTime when a reminder should be issued.
* If the parent entity's target time is a date rather than a specific time,
* then these attributes are relative to midnight (00:00) on that date
*/
hours: Option[Int] = None,
/**
* Period of time before gd:when/@startTime when a reminder should be issued.
* If the parent entity's target time is a date rather than a specific time,
* then these attributes are relative to midnight (00:00) on that date
*/
minutes: Option[Int] = None)
object Reminder {
import Picklers._
/** A Pickler for reminder objects. */
lazy val pickler: Pickler[Reminder] = {
val ctents = elem("reminder", opt(attr("absoluteTime", dateTime))
~ opt(attr("method", text)) ~ opt(attr("days", intVal))
~ opt(attr("hours", intVal)) ~ opt(attr("minutes", intVal)))(Uris.gdNs)
wrap (ctents) (Reminder.apply) (fromReminder)
}
private def fromReminder(r: Reminder) =
new ~(r.absoluteTime, r.method) ~ r.days ~ r.hours ~ r.minutes
}
| mjanson/gdata-scala-client | src/com/google/gdata/data/kinds/Reminder.scala | Scala | apache-2.0 | 2,546 |
package jvm
import io.github.hamsters.jvm.Retry
import org.scalamock.scalatest.MockFactory
import org.scalatest.{FlatSpec, Matchers}
import scala.concurrent.Await
import scala.concurrent.duration.Duration
class RetrySpec extends FlatSpec with Matchers with MockFactory {
val logErrorsFunctionMock = mockFunction[String, Unit]
import scala.concurrent.ExecutionContext.Implicits.global
"RetryJvm" should "run function several times if failed with wait" in {
logErrorsFunctionMock expects "Tried 3 times, still not enough : failed"
val result = Retry.withWait(3, 3000, logErrorsFunctionMock) {
throw new Exception("failed")
}
Await.result(result.failed, Duration.Inf) shouldBe a[Exception]
}
"RetryJvm" should "return result if no error with wait" in {
val result = Retry.withWait(3, 3000, logErrorsFunctionMock) {
1 + 1
}
Await.result(result, Duration.Inf) should be(2)
}
} | dgouyette/hamsters | jvm/src/test/scala/jvm/RetrySpec.scala | Scala | apache-2.0 | 933 |
package slinky.core
import org.scalajs.dom
import slinky.core.facade.Profiler
import slinky.web.ReactDOM
import slinky.web.html.div
import org.scalatest.funsuite.AnyFunSuite
class ProfilerTest extends AnyFunSuite {
test("Can render a Profiler component with children") {
val target = dom.document.createElement("div")
ReactDOM.render(
Profiler(id = "profiler", onRender = (_, _, _, _, _, _, _) => {})(
div("hello!")
),
target
)
assert(target.innerHTML == "<div>hello!</div>")
}
}
| shadaj/slinky | tests/src/test/scala/slinky/core/ProfilerTest.scala | Scala | mit | 530 |
/*
* Copyright 2016 Dennis Vriend
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.dnvriend
import akka.actor.ActorSystem
import akka.event.{ Logging, LoggingAdapter }
import akka.http.scaladsl._
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import akka.stream.{ ActorMaterializer, Materializer }
import com.github.dnvriend.SecurityService.Crypto
import org.apache.shiro.codec.{ Base64, CodecSupport }
import org.apache.shiro.crypto.AesCipherService
import org.apache.shiro.util.ByteSource
import org.mindrot.jbcrypt.BCrypt
import spray.json.DefaultJsonProtocol
import scala.concurrent.{ ExecutionContext, Future }
object SecurityService {
// an enumeration
object Crypto extends Enumeration {
type Crypto = Value
val AES = Value("AES")
val BCRYPT = Value("BCRYPT")
val UNKNOWN = Value("UNKNOWN")
}
// factory method
def apply()(implicit ec: ExecutionContext, log: LoggingAdapter): SecurityService =
new SecurityServiceImpl
}
trait SecurityService {
def decryptAes(base64Encrypted: String): Future[DecryptResponse]
def encryptAes(plainText: String): Future[EncryptResponse]
def hashBcrypt(plainText: String): Future[EncryptResponse]
def validateBcrypt(candidate: String, hashed: String): Future[DecryptResponse]
}
class SecurityServiceImpl(implicit ec: ExecutionContext, log: LoggingAdapter) extends SecurityService {
object AES {
val passPhrase = "j68KkRjq21ykRGAQ"
val cipher = new AesCipherService
}
override def encryptAes(plainText: String): Future[EncryptResponse] = Future {
val result = AES.cipher.encrypt(plainText.getBytes, AES.passPhrase.getBytes).toBase64
log.debug(s"[EncryptAES]: plainText: $plainText, result: $result")
EncryptResponse(Crypto.AES.toString, result)
}
override def decryptAes(base64Encrypted: String): Future[DecryptResponse] = Future {
val byteSource: ByteSource = ByteSource.Util.bytes(base64Encrypted)
val decryptedToken = AES.cipher.decrypt(Base64.decode(byteSource.getBytes), AES.passPhrase.getBytes)
val result = CodecSupport.toString(decryptedToken.getBytes)
log.debug(s"[DecryptAES]: base64Encrypted: $base64Encrypted")
DecryptResponse(Crypto.AES.toString, result)
}
override def hashBcrypt(plainText: String): Future[EncryptResponse] = Future {
val hashed = BCrypt.hashpw(plainText, BCrypt.gensalt(15))
log.debug(s"[HashBCrypt]: plainText: $plainText")
EncryptResponse(Crypto.BCRYPT.toString, hashed)
}
override def validateBcrypt(candidate: String, hashed: String): Future[DecryptResponse] = Future {
if (BCrypt.checkpw(candidate, hashed)) {
log.debug(s"[ValidateBCrypt]: candidate: $candidate, hashed: $hashed, result: Valid")
DecryptResponse(Crypto.BCRYPT.toString, "Valid")
} else {
log.debug(s"[ValidateBCrypt]: candidate: $candidate, hashed: $hashed, result: Invalid")
DecryptResponse(Crypto.BCRYPT.toString, "Invalid")
}
}
}
case class EncryptResponse(crypto: String, response: String)
case class DecryptResponse(crypto: String, response: String)
object Main extends App with DefaultJsonProtocol with SprayJsonSupport {
implicit val system: ActorSystem = ActorSystem()
implicit val mat: Materializer = ActorMaterializer()
implicit val ec: ExecutionContext = system.dispatcher
implicit val log: LoggingAdapter = Logging(system, this.getClass)
// the security service encrypts and decrypts using AES
val securityService = SecurityService()
implicit val encryptResponseFormat = jsonFormat2(EncryptResponse)
implicit val decryptResponseFormat = jsonFormat2(DecryptResponse)
val routes: Route =
pathPrefix("crypto") {
pathPrefix("aes") {
path("encrypt") {
post {
parameter('text.as[String]) { text ⇒
complete(securityService.encryptAes(text))
}
}
} ~
path("decrypt") {
post {
parameter('encrypted.as[String]) { encrypted ⇒
complete(securityService.decryptAes(encrypted))
}
}
}
} ~
pathPrefix("bcrypt") {
path("hash") {
post {
parameter('text.as[String]) { text ⇒
complete(securityService.hashBcrypt(text))
}
}
} ~
path("validate") {
post {
parameter('candidate.as[String], 'hash.as[String]) { (candidate: String, hash: String) ⇒
complete(securityService.validateBcrypt(candidate, hash))
}
}
}
}
}
// launch the akka-http service
Http().bindAndHandle(routes, "0.0.0.0", 8080)
}
| dnvriend/akka-http-crypto | src/main/scala/com/github/dnvriend/Main.scala | Scala | apache-2.0 | 5,342 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.SparkFunSuite
import org.apache.spark.metrics.source.CodegenMetrics
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.objects.{CreateExternalRow, GetExternalRowField, ValidateExternalType}
import org.apache.spark.sql.catalyst.util.{ArrayBasedMapData, GenericArrayData}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
import org.apache.spark.util.ThreadUtils
/**
* Additional tests for code generation.
*/
class CodeGenerationSuite extends SparkFunSuite with ExpressionEvalHelper {
test("multithreaded eval") {
import scala.concurrent._
import ExecutionContext.Implicits.global
import scala.concurrent.duration._
val futures = (1 to 20).map { _ =>
Future {
GeneratePredicate.generate(EqualTo(Literal(1), Literal(1)))
GenerateMutableProjection.generate(EqualTo(Literal(1), Literal(1)) :: Nil)
GenerateOrdering.generate(Add(Literal(1), Literal(1)).asc :: Nil)
}
}
futures.foreach(ThreadUtils.awaitResult(_, 10.seconds))
}
test("metrics are recorded on compile") {
val startCount1 = CodegenMetrics.METRIC_COMPILATION_TIME.getCount()
val startCount2 = CodegenMetrics.METRIC_SOURCE_CODE_SIZE.getCount()
val startCount3 = CodegenMetrics.METRIC_GENERATED_CLASS_BYTECODE_SIZE.getCount()
val startCount4 = CodegenMetrics.METRIC_GENERATED_METHOD_BYTECODE_SIZE.getCount()
GenerateOrdering.generate(Add(Literal(123), Literal(1)).asc :: Nil)
assert(CodegenMetrics.METRIC_COMPILATION_TIME.getCount() == startCount1 + 1)
assert(CodegenMetrics.METRIC_SOURCE_CODE_SIZE.getCount() == startCount2 + 1)
assert(CodegenMetrics.METRIC_GENERATED_CLASS_BYTECODE_SIZE.getCount() > startCount3)
assert(CodegenMetrics.METRIC_GENERATED_METHOD_BYTECODE_SIZE.getCount() > startCount4)
}
test("SPARK-8443: split wide projections into blocks due to JVM code size limit") {
val length = 5000
val expressions = List.fill(length)(EqualTo(Literal(1), Literal(1)))
val plan = GenerateMutableProjection.generate(expressions)
val actual = plan(new GenericMutableRow(length)).toSeq(expressions.map(_.dataType))
val expected = Seq.fill(length)(true)
if (!checkResult(actual, expected)) {
fail(s"Incorrect Evaluation: expressions: $expressions, actual: $actual, expected: $expected")
}
}
test("SPARK-13242: case-when expression with large number of branches (or cases)") {
val cases = 50
val clauses = 20
// Generate an individual case
def generateCase(n: Int): (Expression, Expression) = {
val condition = (1 to clauses)
.map(c => EqualTo(BoundReference(0, StringType, false), Literal(s"$c:$n")))
.reduceLeft[Expression]((l, r) => Or(l, r))
(condition, Literal(n))
}
val expression = CaseWhen((1 to cases).map(generateCase(_)))
val plan = GenerateMutableProjection.generate(Seq(expression))
val input = new GenericMutableRow(Array[Any](UTF8String.fromString(s"${clauses}:${cases}")))
val actual = plan(input).toSeq(Seq(expression.dataType))
assert(actual(0) == cases)
}
test("SPARK-14793: split wide array creation into blocks due to JVM code size limit") {
val length = 5000
val expressions = Seq(CreateArray(List.fill(length)(EqualTo(Literal(1), Literal(1)))))
val plan = GenerateMutableProjection.generate(expressions)
val actual = plan(new GenericMutableRow(length)).toSeq(expressions.map(_.dataType))
val expected = Seq(new GenericArrayData(Seq.fill(length)(true)))
if (!checkResult(actual, expected)) {
fail(s"Incorrect Evaluation: expressions: $expressions, actual: $actual, expected: $expected")
}
}
test("SPARK-14793: split wide map creation into blocks due to JVM code size limit") {
val length = 5000
val expressions = Seq(CreateMap(
List.fill(length)(EqualTo(Literal(1), Literal(1))).zipWithIndex.flatMap {
case (expr, i) => Seq(Literal(i), expr)
}))
val plan = GenerateMutableProjection.generate(expressions)
val actual = plan(new GenericMutableRow(length)).toSeq(expressions.map(_.dataType)).map {
case m: ArrayBasedMapData => ArrayBasedMapData.toScalaMap(m)
}
val expected = (0 until length).map((_, true)).toMap :: Nil
if (!checkResult(actual, expected)) {
fail(s"Incorrect Evaluation: expressions: $expressions, actual: $actual, expected: $expected")
}
}
test("SPARK-14793: split wide struct creation into blocks due to JVM code size limit") {
val length = 5000
val expressions = Seq(CreateStruct(List.fill(length)(EqualTo(Literal(1), Literal(1)))))
val plan = GenerateMutableProjection.generate(expressions)
val actual = plan(new GenericMutableRow(length)).toSeq(expressions.map(_.dataType))
val expected = Seq(InternalRow(Seq.fill(length)(true): _*))
if (!checkResult(actual, expected)) {
fail(s"Incorrect Evaluation: expressions: $expressions, actual: $actual, expected: $expected")
}
}
test("SPARK-14793: split wide named struct creation into blocks due to JVM code size limit") {
val length = 5000
val expressions = Seq(CreateNamedStruct(
List.fill(length)(EqualTo(Literal(1), Literal(1))).flatMap {
expr => Seq(Literal(expr.toString), expr)
}))
val plan = GenerateMutableProjection.generate(expressions)
val actual = plan(new GenericMutableRow(length)).toSeq(expressions.map(_.dataType))
val expected = Seq(InternalRow(Seq.fill(length)(true): _*))
if (!checkResult(actual, expected)) {
fail(s"Incorrect Evaluation: expressions: $expressions, actual: $actual, expected: $expected")
}
}
test("SPARK-14224: split wide external row creation into blocks due to JVM code size limit") {
val length = 5000
val schema = StructType(Seq.fill(length)(StructField("int", IntegerType)))
val expressions = Seq(CreateExternalRow(Seq.fill(length)(Literal(1)), schema))
val plan = GenerateMutableProjection.generate(expressions)
val actual = plan(new GenericMutableRow(length)).toSeq(expressions.map(_.dataType))
val expected = Seq(Row.fromSeq(Seq.fill(length)(1)))
if (!checkResult(actual, expected)) {
fail(s"Incorrect Evaluation: expressions: $expressions, actual: $actual, expected: $expected")
}
}
test("test generated safe and unsafe projection") {
val schema = new StructType(Array(
StructField("a", StringType, true),
StructField("b", IntegerType, true),
StructField("c", new StructType(Array(
StructField("aa", StringType, true),
StructField("bb", IntegerType, true)
)), true),
StructField("d", new StructType(Array(
StructField("a", new StructType(Array(
StructField("b", StringType, true),
StructField("", IntegerType, true)
)), true)
)), true)
))
val row = Row("a", 1, Row("b", 2), Row(Row("c", 3)))
val lit = Literal.create(row, schema)
val internalRow = lit.value.asInstanceOf[InternalRow]
val unsafeProj = UnsafeProjection.create(schema)
val unsafeRow: UnsafeRow = unsafeProj(internalRow)
assert(unsafeRow.getUTF8String(0) === UTF8String.fromString("a"))
assert(unsafeRow.getInt(1) === 1)
assert(unsafeRow.getStruct(2, 2).getUTF8String(0) === UTF8String.fromString("b"))
assert(unsafeRow.getStruct(2, 2).getInt(1) === 2)
assert(unsafeRow.getStruct(3, 1).getStruct(0, 2).getUTF8String(0) ===
UTF8String.fromString("c"))
assert(unsafeRow.getStruct(3, 1).getStruct(0, 2).getInt(1) === 3)
val fromUnsafe = FromUnsafeProjection(schema)
val internalRow2 = fromUnsafe(unsafeRow)
assert(internalRow === internalRow2)
// update unsafeRow should not affect internalRow2
unsafeRow.setInt(1, 10)
unsafeRow.getStruct(2, 2).setInt(1, 10)
unsafeRow.getStruct(3, 1).getStruct(0, 2).setInt(1, 4)
assert(internalRow === internalRow2)
}
test("*/ in the data") {
// When */ appears in a comment block (i.e. in /**/), code gen will break.
// So, in Expression and CodegenFallback, we escape */ to \\*\\/.
checkEvaluation(
EqualTo(BoundReference(0, StringType, false), Literal.create("*/", StringType)),
true,
InternalRow(UTF8String.fromString("*/")))
}
test("\\\\u in the data") {
// When \\ u appears in a comment block (i.e. in /**/), code gen will break.
// So, in Expression and CodegenFallback, we escape \\ u to \\\\u.
checkEvaluation(
EqualTo(BoundReference(0, StringType, false), Literal.create("\\\\u", StringType)),
true,
InternalRow(UTF8String.fromString("\\\\u")))
}
test("check compilation error doesn't occur caused by specific literal") {
// The end of comment (*/) should be escaped.
GenerateUnsafeProjection.generate(
Literal.create("*/Compilation error occurs/*", StringType) :: Nil)
// `\\u002A` is `*` and `\\u002F` is `/`
// so if the end of comment consists of those characters in queries, we need to escape them.
GenerateUnsafeProjection.generate(
Literal.create("\\\\u002A/Compilation error occurs/*", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("\\\\\\\\u002A/Compilation error occurs/*", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("\\\\u002a/Compilation error occurs/*", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("\\\\\\\\u002a/Compilation error occurs/*", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("*\\\\u002FCompilation error occurs/*", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("*\\\\\\\\u002FCompilation error occurs/*", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("*\\\\002fCompilation error occurs/*", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("*\\\\\\\\002fCompilation error occurs/*", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("\\\\002A\\\\002FCompilation error occurs/*", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("\\\\\\\\002A\\\\002FCompilation error occurs/*", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("\\\\002A\\\\\\\\002FCompilation error occurs/*", StringType) :: Nil)
// \\ u002X is an invalid unicode literal so it should be escaped.
GenerateUnsafeProjection.generate(
Literal.create("\\\\u002X/Compilation error occurs", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("\\\\\\\\u002X/Compilation error occurs", StringType) :: Nil)
// \\ u001 is an invalid unicode literal so it should be escaped.
GenerateUnsafeProjection.generate(
Literal.create("\\\\u001/Compilation error occurs", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("\\\\\\\\u001/Compilation error occurs", StringType) :: Nil)
}
test("SPARK-17160: field names are properly escaped by GetExternalRowField") {
val inputObject = BoundReference(0, ObjectType(classOf[Row]), nullable = true)
GenerateUnsafeProjection.generate(
ValidateExternalType(
GetExternalRowField(inputObject, index = 0, fieldName = "\\"quote"), IntegerType) :: Nil)
}
test("SPARK-17160: field names are properly escaped by AssertTrue") {
GenerateUnsafeProjection.generate(AssertTrue(Cast(Literal("\\""), BooleanType)) :: Nil)
}
}
| gioenn/xSpark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala | Scala | apache-2.0 | 12,484 |
package org.apache.spark.ml.feature
import org.apache.spark.ml.feature.TestHelper._
import org.apache.spark.mllib.feature.{FewValuesThresholdFinder, InitialThresholdsFinder}
import org.apache.spark.sql.SQLContext
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterAll, FunSuite}
/**
* Test the finding of thresholds when not too many unique values.
* This test suite was created in order to track down a source of non-determinism in this algorithm.
*
* @author Barry Becker
*/
@RunWith(classOf[JUnitRunner])
class InitialThresholdsFinderSuite extends FunSuite with BeforeAndAfterAll {
var sqlContext: SQLContext = _
val finder = new InitialThresholdsFinder()
override def beforeAll(): Unit = {
sqlContext = new SQLContext(SPARK_CTX)
}
test("Find initial thresholds for a single feature") {
val feature: Array[((Int, Float), Array[Long])] = Array(
((0, 4.0f), Array(1L, 2L, 3L)),
((0, 4.5f), Array(3L, 20L, 12L)),
((0, 4.6f), Array(8L, 18L, 2L)),
((0, 5.0f), Array(5L, 4L, 20L))
)
val points = sqlContext.sparkContext.parallelize(feature)
val result = finder.findInitialThresholds(points, 1, nLabels = 3, maxByPart = 100)
assertResult("4.25, 4.55, 4.8, 5.0") {
result.collect().map(_._1._2).mkString(", ")
}
}
test("Find initial thresholds for four features") {
val feature: Array[((Int, Float), Array[Long])] = Array(
((0, 100.0f), Array(1L, 2L, 3L)),
((0, 150.0f), Array(3L, 20L, 12L)),
((0, 300.6f), Array(8L, 18L, 2L)),
((0, 400.0f), Array(5L, 4L, 20L)),
((1, 4.0f), Array(0L, 0L, 3L)),
((1, 4.5f), Array(0L, 0L, 4L)),
((1, 4.6f), Array(8L, 18L, 0L)),
((1, 5.0f), Array(5L, 4L, 20L)),
((2, 4.0f), Array(1L, 2L, 3L)),
((2, 4.5f), Array(0L, 20L, 0L)),
((2, 4.6f), Array(0L, 8L, 0L)),
((2, 5.0f), Array(5L, 0L, 20L)),
((3, 4.0f), Array(1L, 2L, 3L)),
((3, 4.5f), Array(0L, 8L, 0L)),
((3, 4.6f), Array(0L, 18L, 0L)),
((3, 5.0f), Array(0L, 28L, 0L))
)
val points = sqlContext.sparkContext.parallelize(feature)
val result = finder.findInitialThresholds(points, nLabels = 3, maxByPart = 100, nFeatures = 4)
assertResult("(0,125.0), (0,225.3), (0,350.3), (0,400.0), (1,4.55), (1,4.8), (1,5.0), (2,4.25), (2,4.8), (2,5.0), (3,4.25), (3,5.0)") {
result.collect().map(_._1).mkString(", ")
}
}
// In this case the features have more values that fit in a partition
test("Find initial thresholds when more values than maxByPart") {
val result = finder.findInitialThresholds(createPointsFor2Features, nLabels = 3, maxByPart = 5, nFeatures = 2)
assertResult("(0,125.0), (0,225.0), (0,450.0), (0,700.0), (0,800.0), (0,1025.0), (0,1150.0), (0,1200.0), " +
"(1,3.75), (1,4.55), (1,4.8), (1,5.5), (1,6.0), (1,8.05), (1,8.8), (1,9.5), (1,10.0), (1,12.05), (1,12.6)") {
result.collect().map(_._1).mkString(", ")
}
}
test("test createFeatureInfList for 2 features") {
val points = createPointsFor2Features
// the tuple in result list is
//(featureIdx, numUniqueValues, sumValsBeforeFirst, partitionSize, numPartitionsForFeature, sumPreviousPartitions)
assertResult("(0,8,0,8,1,0), (1,11,8,11,1,1)") {
finder.createFeatureInfoList(points, 100, nFeatures = 2).mkString(", ")
}
assertResult("(0,8,0,8,1,0), (1,11,8,6,2,1)") {
finder.createFeatureInfoList(points, 10, nFeatures = 2).mkString(", ")
}
assertResult("(0,8,0,4,2,0), (1,11,8,6,2,2)") {
finder.createFeatureInfoList(points, 7, nFeatures = 2).mkString(", ")
}
assertResult("(0,8,0,4,2,0), (1,11,8,4,3,2)") {
finder.createFeatureInfoList(points, 5, nFeatures = 2).mkString(", ")
}
assertResult("(0,8,0,2,4,0), (1,11,8,2,6,4)") {
finder.createFeatureInfoList(points, 2, nFeatures = 2).mkString(", ")
}
}
private def createPointsFor2Features = {
val features: Array[((Int, Float), Array[Long])] = Array(
((0, 100.0f), Array(1L, 2L, 3L)),
((0, 150.0f), Array(3L, 20L, 12L)),
((0, 300.0f), Array(3L, 20L, 12L)),
((0, 600.0f), Array(5L, 4L, 20L)),
((0, 800.0f), Array(1L, 2L, 3L)),
((0, 950.0f), Array(1L, 2L, 3L)),
((0, 1100.0f), Array(8L, 18L, 2L)),
((0, 1200.0f), Array(5L, 4L, 20L)),
((1, 3.0f), Array(1L, 2L, 3L)),
((1, 4.5f), Array(3L, 20L, 12L)),
((1, 4.6f), Array(8L, 18L, 2L)),
((1, 5.0f), Array(5L, 4L, 20L)),
((1, 6.0f), Array(1L, 2L, 3L)),
((1, 7.5f), Array(3L, 20L, 12L)),
((1, 8.6f), Array(8L, 18L, 2L)),
((1, 9.0f), Array(5L, 4L, 20L)),
((1, 10.0f), Array(1L, 2L, 3L)),
((1, 11.5f), Array(3L, 20L, 12L)),
((1, 12.6f), Array(8L, 18L, 2L))
)
sqlContext.sparkContext.parallelize(features)
}
} | sramirez/spark-MDLP-discretization | src/test/scala/org/apache/spark/ml/feature/InitialThresholdsFinderSuite.scala | Scala | apache-2.0 | 4,873 |
/*
* Copyright (C) 2015 47 Degrees, LLC http://47deg.com hello@47deg.com
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.fortysevendeg.scala.android.ui.components
import android.graphics.{Bitmap, Canvas, Paint, PorterDuff, PorterDuffXfermode, Rect}
import com.squareup.picasso.Transformation
class CircularTransformation(size: Int) extends Transformation {
val radius = Math.ceil(size / 2).toInt
def transform(source: Bitmap): Bitmap = {
val output: Bitmap = Bitmap.createBitmap(size, size, Bitmap.Config.ARGB_8888)
val canvas: Canvas = new Canvas(output)
val color: Int = 0xff424242
val paint: Paint = new Paint
val rect: Rect = new Rect(0, 0, source.getWidth, source.getHeight)
val target: Rect = new Rect(0, 0, size, size)
paint.setAntiAlias(true)
canvas.drawARGB(0, 0, 0, 0)
paint.setColor(color)
canvas.drawCircle(radius, radius, radius, paint)
paint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC_IN))
canvas.drawBitmap(source, rect, target, paint)
source.recycle()
output
}
def key: String = {
s"radius-$size"
}
} | pamu/ScalaAndroidMacroid | src/main/scala/com/fortysevendeg/scala/android/ui/components/CircularTransformation.scala | Scala | apache-2.0 | 1,629 |
package de.maci.beanmodel.generator.testhelper
import javax.lang.model.element.{ ElementKind, PackageElement, Name }
import de.maci.beanmodel.generator.testhelper.PackageElementMocker._
import org.mockito.Mockito.when
/**
* @author Daniel Götten <daniel.goetten@googlemail.com>
* @since 30.04.15
*/
final class NameMocker private () extends Mocker[Name] {
private var _content: Option[String] = None
def withContent(content: String): this.type = { _content = Some(content); this }
def build: Name = {
val instance = mock[Name]
when(instance.toString) thenReturn _content.orElse(Some("")).get
instance
}
}
final object NameMocker {
def mockName = new NameMocker()
}
| dangoe/maci-beanmodel | maci-beanmodel-gen/src/test/scala/de/maci/beanmodel/generator/testhelper/NameMocker.scala | Scala | apache-2.0 | 700 |
package splain.plugin
import splain.SpecBase
class ShapelessSpec extends SpecBase.File {
override lazy val predefCode: String =
"""
|object types
|{
| class ***[A, B]
| class >:<[A, B]
| class C
| trait D
|}
|import types._
|""".stripMargin.trim
// in all error messages from toolbox, line number has to -8 to get the real line number
check("shapeless Record", "record") {
checkError()
}
check("witness value types", "witness-value") {
checkError()
}
check("lazyImplicit") {
checkError()
}
}
| tek/splain | core/src/test/scala/splain/plugin/ShapelessSpec.scala | Scala | mit | 588 |
package com.twitter.inject
import com.google.inject.PrivateModule
import net.codingwell.scalaguice.ScalaPrivateModule
/**
* A module whose configuration information is hidden from its environment by default. Only bindings
* that are explicitly exposed will be available to other modules and to the users of the injector.
* This module may expose the bindings it creates and the bindings of the modules it installs.
*
* @note Calling [[https://static.javadoc.io/com.google.inject/guice/4.1.0/com/google/inject/PrivateModule.html#install-com.google.inject.Module- com.google.inject.PrivateModule#install]]
* in the [[configure()]] method is not supported. Please set [[TwitterBaseModule.modules]]
* (or [[TwitterBaseModule.javaModules]]) to a non-empty list instead.
* @see [[https://static.javadoc.io/com.google.inject/guice/4.1.0/com/google/inject/PrivateModule.html com.google.inject.PrivateModule]]
* @see [[https://twitter.github.io/finatra/user-guide/getting-started/modules.html Writing Modules in Finatra]]
*/
abstract class TwitterPrivateModule
extends PrivateModule
with TwitterBaseModule
with ScalaPrivateModule {
/* Overrides */
/**
* Configures a [[https://google.github.io/guice/api-docs/4.1/javadoc/com/google/inject/Binder.html com.google.inject.Binder]]
* via the exposed methods. A default implementation is provided such that extensions using
* only `@Provider`-annotated methods do not need to implement an empty
* [[TwitterModule.configure()]] method.
*
* @see [[com.google.inject.PrivateModule#configure]]
* @see [[https://static.javadoc.io/com.google.inject/guice/4.1.0/com/google/inject/PrivateModule.html#configure-- com.google.inject.PrivateModule#configure()]]
*/
override protected def configure(): Unit = {}
}
| twitter/finatra | inject/inject-core/src/main/scala/com/twitter/inject/TwitterPrivateModule.scala | Scala | apache-2.0 | 1,805 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever
case class CP26(value: Option[Int]) extends CtBoxIdentifier(name = " Bad debts") with CtOptionalInteger with Input
with ValidatableBox[ComputationsBoxRetriever] {
override def validate(boxRetriever: ComputationsBoxRetriever): Set[CtValidation] = {
validateZeroOrPositiveInteger(this)
}
}
object CP26 {
def apply(int: Int): CP26 = CP26(Some(int))
}
| liquidarmour/ct-calculations | src/main/scala/uk/gov/hmrc/ct/computations/CP26.scala | Scala | apache-2.0 | 1,104 |
package cortex.controller
/**
* Created by jasonflax on 2/19/16.
*/
trait Message[A] {
val response: A
}
case class HttpMessage(response: Option[Array[Byte]],
cookie: Option[String] = None,
redirect: Option[String] = None)
extends Message[Option[Array[Byte]]]
case class WsMessage[A](response: A) extends Message[A]
| jsflax/cortex | src/main/scala/cortex/controller/Message.scala | Scala | mit | 374 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.