code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Copyright 2019 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.play.frontend.filters
import akka.Done
import akka.actor.ActorSystem
import akka.stream.scaladsl.Source
import akka.stream.{ActorMaterializer, Materializer}
import akka.util.ByteString
import controllers.Assets
import org.mockito.ArgumentCaptor
import org.mockito.Matchers.any
import org.mockito.Mockito._
import org.scalatest.concurrent.{Eventually, ScalaFutures}
import org.scalatest.mock.MockitoSugar
import org.scalatest.time.{Millis, Seconds, Span}
import org.scalatest.{BeforeAndAfterEach, Matchers, TestData, WordSpecLike}
import org.scalatestplus.play._
import play.api.Application
import play.api.inject.guice.GuiceApplicationBuilder
import play.api.libs.ws.WS
import play.api.mvc._
import play.api.test.Helpers._
import play.api.test.{FakeApplication, FakeRequest}
import uk.gov.hmrc.http.{CookieNames, HeaderCarrier, HeaderNames}
import uk.gov.hmrc.play.audit.EventKeys
import uk.gov.hmrc.play.audit.http.connector.AuditConnector
import uk.gov.hmrc.play.audit.model.DataEvent
import uk.gov.hmrc.play.frontend.config.EventTypes
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{ExecutionContext, Future}
class FrontendAuditFilterSpec
extends WordSpecLike
with Matchers
with Eventually
with ScalaFutures
with FilterFlowMock
with MockitoSugar
with BeforeAndAfterEach {
import MockMethods._
implicit val system = ActorSystem("test")
implicit val materializer = ActorMaterializer()
def enumerateResponseBody(r: Result): Future[Done] =
r.body.dataStream.runForeach({ _ =>
})
implicit val filter = new FrontendAuditFilter {
override val maskedFormFields: Seq[String] = Seq("password")
override val applicationPort: Option[Int] = Some(80)
override val auditConnector: AuditConnector = mock[AuditConnector]
override val appName: String = "app"
override def controllerNeedsAuditing(controllerName: String): Boolean = false
implicit val system = ActorSystem("test")
implicit override def mat: Materializer = ActorMaterializer()
}
override def beforeEach() {
reset(filter.auditConnector)
}
"A password" should {
"be obfuscated with the password at the beginning" in {
filter.stripPasswords(
Some("application/x-www-form-urlencoded"),
"password=p2ssword%26adkj&csrfToken=123&userId=113244018119",
Seq("password")) shouldBe "password=#########&csrfToken=123&userId=113244018119"
}
"be obfuscated with the password in the end" in {
filter.stripPasswords(
Some("application/x-www-form-urlencoded"),
"csrfToken=123&userId=113244018119&password=p2ssword%26adkj",
Seq("password")) shouldBe "csrfToken=123&userId=113244018119&password=#########"
}
"be obfuscated with the password in the middle" in {
filter.stripPasswords(
Some("application/x-www-form-urlencoded"),
"csrfToken=123&password=p2ssword%26adkj&userId=113244018119",
Seq("password")) shouldBe "csrfToken=123&password=#########&userId=113244018119"
}
"be obfuscated even if the password is empty" in {
filter.stripPasswords(
Some("application/x-www-form-urlencoded"),
"csrfToken=123&password=&userId=113244018119",
Seq("password")) shouldBe "csrfToken=123&password=#########&userId=113244018119"
}
"not be obfuscated if content type is not application/x-www-form-urlencoded" in {
filter.stripPasswords(Some("text/json"), "{ password=p2ssword%26adkj }", Seq("password")) shouldBe "{ password=p2ssword%26adkj }"
}
"be obfuscated using multiple fields" in {
val body = """companyNumber=05448736&password=secret&authCode=code"""
val result = filter.stripPasswords(Some("application/x-www-form-urlencoded"), body, Seq("password", "authCode"))
result shouldBe """companyNumber=05448736&password=#########&authCode=#########"""
}
}
"The Filter" should {
"generate audit events without passwords" when {
val body = "csrfToken=acb" +
"&userId=113244018119" +
"&password=123456789" +
"&key1="
val source = Source.single(ByteString(body))
val request = FakeRequest("POST", "/foo").withHeaders("Content-Type" -> "application/x-www-form-urlencoded")
"when the request succeeds" in {
val result = await(filter.apply(nextAction)(request).run(source))
await(enumerateResponseBody(result))
behave like expected
}
"when an action further down the chain throws an exception" in {
a[RuntimeException] should be thrownBy await(filter.apply(exceptionThrowingAction)(request).run(source))
behave like expected
}
def expected() =
eventually {
val event = verifyAndRetrieveEvent
event.auditType shouldBe EventTypes.RequestReceived
event.detail should contain("requestBody" -> "csrfToken=acb&userId=113244018119&password=#########&key1=")
}(PatienceConfig(Span(5, Seconds), Span(200, Millis)), implicitly)
}
"generate audit events with the device finger print when it is supplied in a request cookie" when {
val encryptedFingerprint = "eyJ1c2VyQWdlbnQiOiJNb3ppbGxhLzUuMCAoTWFjaW50b3NoOyBJbnRlbCBNYWMgT1MgWCAxMF84XzUpIEFwcGxlV2ViS2l0LzUzNy4zNiAoS0hUTUwsIGx" +
"pa2UgR2Vja28pIENocm9tZS8zMS4wLjE2NTAuNDggU2FmYXJpLzUzNy4zNiIsImxhbmd1YWdlIjoiZW4tVVMiLCJjb2xvckRlcHRoIjoyNCwicmVzb2x1dGlvbiI6IjgwMHgxMj" +
"gwIiwidGltZXpvbmUiOjAsInNlc3Npb25TdG9yYWdlIjp0cnVlLCJsb2NhbFN0b3JhZ2UiOnRydWUsImluZGV4ZWREQiI6dHJ1ZSwicGxhdGZvcm0iOiJNYWNJbnRlbCIsImRvT" +
"m90VHJhY2siOnRydWUsIm51bWJlck9mUGx1Z2lucyI6NSwicGx1Z2lucyI6WyJTaG9ja3dhdmUgRmxhc2giLCJDaHJvbWUgUmVtb3RlIERlc2t0b3AgVmlld2VyIiwiTmF0aXZl" +
"IENsaWVudCIsIkNocm9tZSBQREYgVmlld2VyIiwiUXVpY2tUaW1lIFBsdWctaW4gNy43LjEiXX0="
val request = FakeRequest("GET", "/foo").withCookies(
Cookie(DeviceFingerprint.deviceFingerprintCookieName, encryptedFingerprint))
"when the request succeeds" in {
val result = await(filter.apply(nextAction)(request).run)
await(enumerateResponseBody(result))
behave like expected
}
"when an action further down the chain throws an exception" in {
a[RuntimeException] should be thrownBy await(filter.apply(exceptionThrowingAction)(request).run)
behave like expected
}
def expected() = eventually {
val event = verifyAndRetrieveEvent
event.auditType shouldBe EventTypes.RequestReceived
event.detail should contain(
"deviceFingerprint" -> ("""{"userAgent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.48 Safari/537.36",""" +
""""language":"en-US","colorDepth":24,"resolution":"800x1280","timezone":0,"sessionStorage":true,"localStorage":true,"indexedDB":true,"platform":"MacIntel",""" +
""""doNotTrack":true,"numberOfPlugins":5,"plugins":["Shockwave Flash","Chrome Remote Desktop Viewer","Native Client","Chrome PDF Viewer","QuickTime Plug-in 7.7.1"]}"""))
}
}
"generate audit events without the device finger print when it is not supplied in a request cookie" when {
val request = FakeRequest("GET", "/foo")
"when the request succeeds" in {
val result = await(filter.apply(nextAction)(request).run)
await(enumerateResponseBody(result))
behave like expected
}
"when an action further down the chain throws an exception" in {
a[RuntimeException] should be thrownBy await(filter.apply(exceptionThrowingAction)(request).run)
behave like expected
}
def expected() = eventually {
val event = verifyAndRetrieveEvent
event.auditType shouldBe EventTypes.RequestReceived
event.detail should contain("deviceFingerprint" -> "-")
}
}
"generate audit events without the device finger print when the value supplied in the request cookie is invalid" when {
def request =
FakeRequest("GET", "/foo").withCookies(
Cookie(
DeviceFingerprint.deviceFingerprintCookieName,
"THIS IS SOME JUST THAT SHOULDN'T BE DECRYPTABLE *!@&£$)B__!@£$"))
"when the request succeeds" in running(
FakeApplication(additionalConfiguration = Map("play.http.cookies.strict" -> false))) {
await(filter.apply(nextAction)(request).run)
behave like expected
}
"when an action further down the chain throws an exception" in running(
FakeApplication(additionalConfiguration = Map("play.http.cookies.strict" -> false))) {
a[RuntimeException] should be thrownBy await(filter.apply(exceptionThrowingAction)(request).run)
behave like expected
}
def expected() = eventually {
val event = verifyAndRetrieveEvent
event.auditType shouldBe EventTypes.RequestReceived
event.detail should contain("deviceFingerprint" -> "-")
}
}
"use the session to read Authorization, session Id and token" when {
"when the request succeeds" in running(FakeApplication()) {
val request = FakeRequest("GET", "/foo").withSession(
"token" -> "aToken",
"authToken" -> "Bearer fNAao9C4kTby8cqa6g75emw1DZIyA5B72nr9oKHHetE=",
"sessionId" -> "mySessionId")
val result = await(filter.apply(nextAction)(request).run)
await(enumerateResponseBody(result))
behave like expected
}
"when an action further down the chain throws an exception" in running(FakeApplication()) {
val request = FakeRequest("GET", "/foo").withSession(
"token" -> "aToken",
"authToken" -> "Bearer fNAao9C4kTby8cqa6g75emw1DZIyA5B72nr9oKHHetE=",
"sessionId" -> "mySessionId")
a[RuntimeException] should be thrownBy await(filter.apply(exceptionThrowingAction)(request).run)
behave like expected
}
def expected() = eventually {
val event = verifyAndRetrieveEvent
event.auditType shouldBe EventTypes.RequestReceived
event.detail should contain("Authorization" -> "Bearer fNAao9C4kTby8cqa6g75emw1DZIyA5B72nr9oKHHetE=")
event.detail should contain("token" -> "aToken")
event.tags should contain("X-Session-ID" -> "mySessionId")
}
}
"add the Location header to the details if available" in {
implicit val hc = HeaderCarrier()
val next = Action.async { _ =>
Future.successful(Results.Ok.withHeaders("Location" -> "some url"))
}
val result = await(filter.apply(next)(FakeRequest()).run)
await(enumerateResponseBody(result))
eventually {
val event = verifyAndRetrieveEvent
event.detail should contain("Location" -> "some url")
}
}
"generate audit events with the device ID when it is supplied in a request cookie" when {
val deviceID = "A_DEVICE_ID"
val request = FakeRequest("GET", "/foo").withCookies(Cookie(CookieNames.deviceID, deviceID))
"when the request succeeds" in {
val result = await(filter.apply(nextAction)(request).run)
await(enumerateResponseBody(result))
behave like expected
}
"when an action further down the chain throws an exception" in {
a[RuntimeException] should be thrownBy await(filter.apply(exceptionThrowingAction)(request).run)
behave like expected
}
def expected() = eventually {
val event = verifyAndRetrieveEvent
event.auditType shouldBe EventTypes.RequestReceived
event.detail should contain("deviceID" -> deviceID)
}
}
"generate audit events with the device ID from headers if not supplied as a cookie" when {
val deviceID = "A_DEVICE_ID"
val request = FakeRequest("GET", "/foo").withHeaders(HeaderNames.deviceID -> deviceID)
"when the request succeeds" in {
await(filter.apply(nextAction)(request).run)
behave like expected
}
"when an action further down the chain throws an exception" in {
a[RuntimeException] should be thrownBy await(filter.apply(exceptionThrowingAction)(request).run)
behave like expected
}
def expected() = eventually {
val event = verifyAndRetrieveEvent
event.auditType shouldBe EventTypes.RequestReceived
event.detail should contain("deviceID" -> deviceID)
}
}
}
"Get query string for audit" should {
"handle a simple querystring" in {
filter.getQueryString(FakeRequest("GET", "/foo?action=frog").queryString) should be("action:frog")
}
"handle an empty querystring" in {
filter.getQueryString(FakeRequest("GET", "/foo").queryString) should be("-")
}
"handle an invalid Request object" in {
filter.getQueryString(FakeRequest("GET", "").queryString) should be("-")
}
"handle multiple query strings" in {
filter.getQueryString(FakeRequest("GET", "/foo?action1=frog1&action2=frog2").queryString) should be(
"action1:frog1&action2:frog2")
}
"handle sequences of values for a single query string" in {
filter.getQueryString(FakeRequest("GET", "/foo?action1=frog1,frog2").queryString) should be("action1:frog1,frog2")
}
"handle sequences of values with multiple query strings" in {
val underOrderedProcessedQueryString =
filter.getQueryString(FakeRequest("GET", "/foo?mammal=dog,cat&bird=dove&reptile=lizard,snake").queryString)
underOrderedProcessedQueryString should be("mammal:dog,cat&bird:dove&reptile:lizard,snake")
}
"handle empty maps" in {
filter.getQueryString(Map.empty) should be("-")
}
"handle empty sequences" in {
filter.getQueryString(Map("mammal" -> Seq.empty)) should be("mammal:")
}
"print in the same order as the sequence" in {
filter.getQueryString(Map("mammal" -> Seq("dog", "cat"), "reptile" -> Seq("snake", "lizard"))) should be(
"mammal:dog,cat&reptile:snake,lizard")
}
}
"Retrieve host from request" should {
"convert a not found value into a hyphen" in {
filter.getHost(FakeRequest()) should be("-")
}
"keep the host name when it does not contain any port" in {
filter.getHost(FakeRequest().withHeaders("Host" -> "localhost")) should be("localhost")
}
"remove the port and keep host name when the host contains the port" in {
filter.getHost(FakeRequest().withHeaders("Host" -> "localhost:9000")) should be("localhost")
}
}
"Retrieve port from play configuration" should {
"retrieve the port when it is specified in the configuration" in {
filter.getPort should be("80")
}
}
"A frontend response" should {
"not be included in the audit message if it is HTML" in {
implicit val hc = HeaderCarrier()
val next = Action(Results.Ok(<h1>Hello, world!</h1>).as(HTML).withHeaders("Content-Type" -> "text/html"))
val result = await(filter.apply(next)(FakeRequest()).run)
await(enumerateResponseBody(result))
eventually {
val event = verifyAndRetrieveEvent
event.detail should contain("responseMessage" -> "<HTML>...</HTML>")
}
}
"not depend on response headers when truncating HTML" in {
implicit val hc = HeaderCarrier()
val next = Action(Results.Ok(<h1>Hello, world!</h1>).as(HTML))
val result = await(filter.apply(next)(FakeRequest()).run)
await(enumerateResponseBody(result))
eventually {
val event = verifyAndRetrieveEvent
event.detail should contain("responseMessage" -> "<HTML>...</HTML>")
}
}
"not be included in the audit message if it is html with utf-8" in {
implicit val hc = HeaderCarrier()
val next = Action.async { _ =>
Future.successful(
Results.Ok(<h1>Hello, world!</h1>).as(HTML).withHeaders("Content-Type" -> "text/html; charset=utf-8"))
}
val result = await(filter.apply(next)(FakeRequest()).run)
await(enumerateResponseBody(result))
eventually {
val event = verifyAndRetrieveEvent
event.detail should contain("responseMessage" -> "<HTML>...</HTML>")
}
}
"be included if the ContentType is not text/html" in {
implicit val hc = HeaderCarrier()
val next = Action.async { _ =>
Future.successful(Results.Status(303)("....the response...").withHeaders("Content-Type" -> "application/json"))
}
val result = await(filter.apply(next)(FakeRequest()).run)
await(enumerateResponseBody(result))
eventually {
val event = verifyAndRetrieveEvent
event.detail should contain("responseMessage" -> "....the response...")
}
}
}
}
class FrontendAuditFilterServerSpec extends FrontendAuditFilterSpec with OneServerPerTest {
import MockMethods._
val random = new scala.util.Random
val largeContent: String = randomString("abcdefghijklmnopqrstuvwxyz0123456789")(filter.maxBodySize * 3)
val standardContent: String = randomString("abcdefghijklmnopqrstuvwxyz0123456789")(filter.maxBodySize - 1)
val pc = PatienceConfig(Span(5, Seconds), Span(15, Millis))
// Generate a random string of length n from the given alphabet
def randomString(alphabet: String)(n: Int): String =
Stream.continually(random.nextInt(alphabet.length)).map(alphabet).take(n).mkString
val assets: Assets = new GuiceApplicationBuilder().injector().instanceOf[Assets]
override def newAppForTest(testData: TestData): Application =
new GuiceApplicationBuilder()
.routes({
case ("GET", "/standardresponse") =>
filter.apply(Action {
Results.Ok(standardContent)
})
case ("GET", "/longresponse") =>
filter.apply(Action {
Results.Ok(largeContent)
})
case ("POST", "/longrequest") =>
filter.apply(Action {
Results.Ok
})
})
.build()
"Attempting to audit a large in-memory response" in {
reset(filter.auditConnector)
val url = s"http://localhost:$port/longresponse"
val response = await(WS.url(url).get())
eventually {
response.body.length should equal(largeContent.length)
verifyDetailPropertyLength(EventKeys.ResponseMessage, filter.maxBodySize)
}
}
"Attempting to audit a standard in-memory response" in {
reset(filter.auditConnector)
val url = s"http://localhost:$port/standardresponse"
val response = await(WS.url(url).get())
eventually {
response.body.length should equal(standardContent.length)
verifyDetailPropertyLength(EventKeys.ResponseMessage, standardContent.length)
}
}
"Attempting to audit a large request" in {
reset(filter.auditConnector)
val url = s"http://localhost:$port/longrequest"
val response = await(WS.url(url).post(largeContent))
eventually {
response.body.length should equal(0)
verifyDetailPropertyLength(EventKeys.RequestBody, filter.maxBodySize)
}
}
def verifyDetailPropertyLength(detailKey: String, length: Int): Unit = {
val event = verifyAndRetrieveEvent
event.detail should not be null
event.detail.getOrElse(detailKey, "").length should equal(length)
}
}
object MockMethods {
def verifyAndRetrieveEvent(implicit filter: FrontendAuditFilter): DataEvent = {
val captor = ArgumentCaptor.forClass(classOf[DataEvent])
verify(filter.auditConnector).sendEvent(captor.capture)(any[HeaderCarrier], any[ExecutionContext])
captor.getValue
}
}
| hmrc/frontend-bootstrap | src/test/scala/uk/gov/hmrc/play/frontend/filters/FrontendAuditFilterSpec.scala | Scala | apache-2.0 | 20,358 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.reflect.reify
package utils
import scala.collection._
import java.lang.System.{lineSeparator => EOL}
trait SymbolTables {
self: Utils =>
import global._
class SymbolTable private[SymbolTable] (
private[SymbolTable] val symtab: immutable.ListMap[Symbol, Tree] = immutable.ListMap[Symbol, Tree](),
private[SymbolTable] val aliases: List[(Symbol, TermName)] = List[(Symbol, TermName)](),
private[SymbolTable] val original: Option[List[Tree]] = None) {
def syms: List[Symbol] = symtab.keys.toList
def symDef(sym: Symbol): Tree =
symtab.getOrElse(sym, EmptyTree)
def symName(sym: Symbol): TermName =
symtab.get(sym) match {
case Some(FreeDef(_, name, _, _, _)) => name
case Some(SymDef(_, name, _, _)) => name
case None => nme.EMPTY
case x => throw new MatchError(x)
}
def symAliases(sym: Symbol): List[TermName] =
symName(sym) match {
case name if name.isEmpty => Nil
case _ => (aliases.distinct.groupMap(_._1)(_._2))(sym)
}
def symBinding(sym: Symbol): Tree =
symtab.get(sym) match {
case Some(FreeDef(_, _, binding, _, _)) => binding
case Some(SymDef(_, _, _, _)) => throw new UnsupportedOperationException(s"${symtab(sym)} is a symdef, hence it doesn't have a binding")
case None => EmptyTree
case x => throw new MatchError(x)
}
def symRef(sym: Symbol): Tree =
symtab.get(sym) match {
case Some(FreeDef(_, name, binding, _, _)) => Ident(name) updateAttachment binding
case Some(SymDef(_, name, _, _)) => Ident(name) updateAttachment ReifyBindingAttachment(Ident(sym))
case None => EmptyTree
case x => throw new MatchError(x)
}
@deprecated("use add instead", since="2.13.3")
def +(sym: Symbol, name: TermName, reification: Tree): SymbolTable = add(sym, name, reification)
def +(symDef: Tree): SymbolTable = add(symDef)
def ++(symDefs: IterableOnce[Tree]): SymbolTable = symDefs.iterator.foldLeft(this)((symtab, symDef) => symtab.add(symDef))
def ++(symtab: SymbolTable): SymbolTable = { val updated = this ++ symtab.symtab.values; new SymbolTable(updated.symtab, updated.aliases ++ symtab.aliases) }
def -(sym: Symbol): SymbolTable = remove(sym)
def -(name: TermName): SymbolTable = remove(name)
def -(symDef: Tree): SymbolTable = remove(reifyBinding(symDef).symbol)
def --(syms: IterableOnce[Symbol]): SymbolTable = syms.iterator.foldLeft(this)((symtab, sym) => symtab.remove(sym))
def --(names: List[TermName]): SymbolTable = names.foldLeft(this)((symtab, name) => symtab.remove(name))
def --(symDefs: Iterable[Tree]): SymbolTable = this -- (symDefs map (reifyBinding(_)))
def --(symtab: SymbolTable): SymbolTable = { val updated = this -- symtab.symtab.values; new SymbolTable(updated.symtab, updated.aliases diff symtab.aliases) }
def filterSyms(p: Symbol => Boolean): SymbolTable = this -- (syms filterNot p)
def filterAliases(p: (Symbol, TermName) => Boolean): SymbolTable = this -- (aliases filterNot (tuple => p(tuple._1, tuple._2)) map (_._2))
private def add(symDef: Tree): SymbolTable = {
val sym = reifyBinding(symDef).symbol
assert(sym != NoSymbol, showRaw(symDef))
val name = symDef match {
case FreeDef(_, name, _, _, _) => name
case SymDef(_, name, _, _) => name
case x => throw new MatchError(x)
}
val newSymtab = if (!(symtab contains sym)) symtab + (sym -> symDef) else symtab
val newAliases = aliases :+ (sym -> name)
new SymbolTable(newSymtab, newAliases)
}
def add(sym: Symbol, name0: TermName, reification: Tree): SymbolTable = {
def freshName(name0: TermName): TermName = {
var name = name0.toString
name = name.replace(".type", "$type")
name = name.replace(" ", "$")
val fresh = typer.fresh
newTermName(fresh.newName(name))
}
val bindingAttachment = reification.attachments.get[ReifyBindingAttachment].get
add(ValDef(NoMods, freshName(name0), TypeTree(), reification) updateAttachment bindingAttachment)
}
private def remove(sym: Symbol): SymbolTable = {
val newSymtab = symtab - sym
val newAliases = aliases filter (_._1 != sym)
new SymbolTable(newSymtab, newAliases)
}
private def remove(name: TermName): SymbolTable = {
var newSymtab = symtab
val newAliases = aliases filter (_._2 != name)
newSymtab = newSymtab filter { case ((sym, _)) => newAliases exists (_._1 == sym) }
newSymtab = newSymtab map { case ((sym, tree)) =>
val ValDef(mods, primaryName, tpt, rhs) = tree: @unchecked
val tree1 =
if (!(newAliases contains ((sym, primaryName)))) {
val primaryName1 = newAliases.find(_._1 == sym).get._2
ValDef(mods, primaryName1, tpt, rhs).copyAttrs(tree)
} else tree
(sym, tree1)
}
new SymbolTable(newSymtab, newAliases)
}
private val cache = mutable.Map[SymbolTable, List[Tree]]()
def encode: List[Tree] = cache.getOrElseUpdate(this, SymbolTable.encode(this)) map (_.duplicate)
override def toString = {
val symtabString = symtab.keys.map(symName(_)).mkString(", ")
val trueAliases = aliases.distinct.filter(entry => symName(entry._1) != entry._2)
val aliasesString = trueAliases.map(entry => s"${symName(entry._1)} -> ${entry._2}").mkString(", ")
s"""symtab = [$symtabString], aliases = [$aliasesString]${if (original.isDefined) ", has original" else ""}"""
}
def debugString: String = {
val buf = new StringBuilder
buf.append("symbol table = " + (if (syms.length == 0) "<empty>" else "")).append(EOL)
syms foreach (sym => buf.append(symDef(sym)).append(EOL))
buf.delete(buf.length - EOL.length, buf.length)
buf.toString
}
}
object SymbolTable {
def apply(): SymbolTable =
new SymbolTable()
def apply(encoded: List[Tree]): SymbolTable = {
var result = new SymbolTable(original = Some(encoded))
encoded foreach (entry => (entry.attachments.get[ReifyBindingAttachment], entry.attachments.get[ReifyAliasAttachment]) match {
case (Some(ReifyBindingAttachment(_)), _) => result += entry
case (_, Some(ReifyAliasAttachment(sym, alias))) => result = new SymbolTable(result.symtab, result.aliases :+ ((sym, alias)))
case _ => // do nothing, this is boilerplate that can easily be recreated by subsequent `result.encode`
})
result
}
private[SymbolTable] def encode(symtab0: SymbolTable): List[Tree] = {
if (symtab0.original.isDefined) return symtab0.original.get.map(_.duplicate)
else assert(hasReifier, "encoding a symbol table requires a reifier")
// during `encode` we might need to do some reifications
// these reifications might lead to changes in `reifier.symtab`
// reifier is mutable, symtab is immutable. this is a tough friendship
val backup = reifier.state.backup
reifier.state.symtab = symtab0.asInstanceOf[reifier.SymbolTable]
def currtab = reifier.symtab.asInstanceOf[SymbolTable]
try {
val cumulativeSymtab = mutable.ArrayBuffer[Tree](symtab0.symtab.values.toList: _*)
val cumulativeAliases = mutable.ArrayBuffer[(Symbol, TermName)](symtab0.aliases: _*)
def fillInSymbol(sym: Symbol): Tree = {
if (reifyDebug) println("Filling in: %s (%s)".format(sym, sym.accurateKindString))
val isFreeTerm = FreeTermDef.unapply(currtab.symDef(sym)).isDefined
// scala/bug#6204 don't reify signatures for incomplete symbols, because this might lead to cyclic reference errors
val signature =
if (sym.isInitialized) {
if (sym.isCapturedVariable) capturedVariableType(sym)
else if (isFreeTerm) sym.tpe
else sym.info
} else NoType
val rset = reifier.mirrorBuildCall(nme.setInfo, currtab.symRef(sym), reifier.reify(signature))
// `Symbol.annotations` doesn't initialize the symbol, so we don't need to do anything special here
// also since we call `sym.info` a few lines above, by now the symbol will be initialized (if possible)
// so the annotations will be filled in and will be waiting to be reified (unless symbol initialization is prohibited as described above)
if (sym.annotations.isEmpty) rset
else reifier.mirrorBuildCall(nme.setAnnotations, rset, reifier.mkList(sym.annotations map reifier.reifyAnnotationInfo))
}
// `fillInSymbol` might add symbols to `symtab`, that's why this is done iteratively
var progress = 0
while (progress < cumulativeSymtab.length) {
val sym = reifyBinding(cumulativeSymtab(progress)).symbol
if (sym != NoSymbol) {
val symtabProgress = currtab.symtab.size
val aliasesProgress = currtab.aliases.length
val fillIn = fillInSymbol(sym)
cumulativeSymtab ++= currtab.symtab.values drop symtabProgress
cumulativeAliases ++= currtab.aliases drop aliasesProgress
cumulativeSymtab += fillIn
}
progress += 1
}
val withAliases = cumulativeSymtab flatMap (entry => {
val result = mutable.ListBuffer[Tree]()
result += entry
val sym = reifyBinding(entry).symbol
if (sym != NoSymbol)
result ++= cumulativeAliases.distinct filter (alias => alias._1 == sym && alias._2 != currtab.symName(sym)) map (alias => {
val canonicalName = currtab.symName(sym)
val aliasName = alias._2
ValDef(NoMods, aliasName, TypeTree(), Ident(canonicalName)) updateAttachment ReifyAliasAttachment(sym, aliasName)
})
result.toList
})
withAliases.toList
} finally {
reifier.state.restore(backup)
}
}
}
}
| scala/scala | src/compiler/scala/reflect/reify/utils/SymbolTables.scala | Scala | apache-2.0 | 10,342 |
package main.scala
class Calculator {
def sum(x: Int, y: Int): Int = x + y;
def minus(x: Int, y: Int): Int = x - y;
def divide(x: Int, y: Int): Int = x / y;
def multiply(x: Int, y: Int): Int = x * y;
} | labs2/FLiMSy | ServerFLiMSy/src/main/scala/Calculator.scala | Scala | apache-2.0 | 213 |
package java.nio
import scala.scalajs.js.typedarray._
private[nio] final class TypedArrayFloatBuffer private (
override private[nio] val _typedArray: Float32Array,
_initialPosition: Int, _initialLimit: Int, _readOnly: Boolean)
extends FloatBuffer(_typedArray.length, null, -1) {
position(_initialPosition)
limit(_initialLimit)
private[this] implicit def newTypedArrayFloatBuffer =
TypedArrayFloatBuffer.NewTypedArrayFloatBuffer
def isReadOnly(): Boolean = _readOnly
def isDirect(): Boolean = true
@noinline
def slice(): FloatBuffer =
GenTypedArrayBuffer(this).generic_slice()
@noinline
def duplicate(): FloatBuffer =
GenTypedArrayBuffer(this).generic_duplicate()
@noinline
def asReadOnlyBuffer(): FloatBuffer =
GenTypedArrayBuffer(this).generic_asReadOnlyBuffer()
@noinline
def get(): Float =
GenBuffer(this).generic_get()
@noinline
def put(c: Float): FloatBuffer =
GenBuffer(this).generic_put(c)
@noinline
def get(index: Int): Float =
GenBuffer(this).generic_get(index)
@noinline
def put(index: Int, c: Float): FloatBuffer =
GenBuffer(this).generic_put(index, c)
@noinline
override def get(dst: Array[Float], offset: Int, length: Int): FloatBuffer =
GenBuffer(this).generic_get(dst, offset, length)
@noinline
override def put(src: Array[Float], offset: Int, length: Int): FloatBuffer =
GenBuffer(this).generic_put(src, offset, length)
@noinline
def compact(): FloatBuffer =
GenTypedArrayBuffer(this).generic_compact()
def order(): ByteOrder =
ByteOrder.nativeOrder()
// Internal API
@inline
override private[nio] def _arrayBuffer: ArrayBuffer =
GenTypedArrayBuffer(this).generic_arrayBuffer
@inline
override private[nio] def _arrayBufferOffset: Int =
GenTypedArrayBuffer(this).generic_arrayBufferOffset
@inline
override private[nio] def _dataView: DataView =
GenTypedArrayBuffer(this).generic_dataView
@inline
private[nio] def load(index: Int): Float =
_typedArray(index)
@inline
private[nio] def store(index: Int, elem: Float): Unit =
_typedArray(index) = elem
@inline
override private[nio] def load(startIndex: Int,
dst: Array[Float], offset: Int, length: Int): Unit =
GenBuffer(this).generic_load(startIndex, dst, offset, length)
@inline
override private[nio] def store(startIndex: Int,
src: Array[Float], offset: Int, length: Int): Unit =
GenBuffer(this).generic_store(startIndex, src, offset, length)
}
private[nio] object TypedArrayFloatBuffer {
private[nio] implicit object NewTypedArrayFloatBuffer
extends GenTypedArrayBuffer.NewTypedArrayBuffer[FloatBuffer] {
def bytesPerElem: Int = 4
def apply(typedArray: Float32Array,
initialPosition: Int, initialLimit: Int,
readOnly: Boolean): TypedArrayFloatBuffer = {
new TypedArrayFloatBuffer(typedArray,
initialPosition, initialLimit, readOnly)
}
@inline
def newTypedArray(buffer: ArrayBuffer,
byteOffset: Int, length: Int): Float32Array = {
new Float32Array(buffer, byteOffset, length)
}
}
@inline
def fromTypedArrayByteBuffer(byteBuffer: TypedArrayByteBuffer): FloatBuffer =
GenTypedArrayBuffer.generic_fromTypedArrayByteBuffer(byteBuffer)
def wrap(array: Float32Array): FloatBuffer =
new TypedArrayFloatBuffer(array, 0, array.length, false)
}
| jmnarloch/scala-js | javalib/src/main/scala/java/nio/TypedArrayFloatBuffer.scala | Scala | bsd-3-clause | 3,405 |
package com.sksamuel.elastic4s
import com.sksamuel.elastic4s.ElasticDsl._
import org.scalatest.FlatSpec
import org.scalatest.mock.MockitoSugar
/** @author Stephen Samuel */
class CountTest extends FlatSpec with MockitoSugar with ElasticSugar {
client.execute {
index into "london/landmarks" fields "name" -> "hampton court palace"
}.await
client.execute {
index into "london/landmarks" fields "name" -> "tower of london"
}.await
client.execute {
index into "london/pubs" fields "name" -> "blue bell"
}.await
refresh("london")
blockUntilCount(3, "london")
"a count request" should "return total count when no query is specified" in {
val resp = client.execute {
count from "london"
}.await
assert(3 === resp.getCount)
}
"a count request" should "return the document count for the correct type" in {
val resp = client.execute {
count from "london" -> "landmarks"
}.await
assert(2 === resp.getCount)
}
// todo looks like elasticsearch bug
// "a count request" should "return the document count based on the specified query" in {
// val resp = client.sync.execute {
// count from "london" -> "landmarks" query "tower"
// }
// assert(1 === resp.getCount)
// }
}
| l15k4/elastic4s | elastic4s-core/src/test/scala/com/sksamuel/elastic4s/CountTest.scala | Scala | apache-2.0 | 1,265 |
import scala.reflect.runtime.universe._
object Test extends dotty.runtime.LegacyApp {
def manifestIsTypeTag[T: Manifest] = {
println(typeOf[T])
}
manifestIsTypeTag[Int]
manifestIsTypeTag[String]
manifestIsTypeTag[Array[Int]]
}
| yusuke2255/dotty | tests/pending/run/interop_manifests_are_typetags.scala | Scala | bsd-3-clause | 243 |
package fr.laas.fape.anml.model.concrete
import fr.laas.fape.anml.ANMLException
import fr.laas.fape.anml.model.abs.AbstractAction
import fr.laas.fape.anml.model.{Context, _}
import fr.laas.fape.anml.model.concrete.statements.Statement
import scala.collection.JavaConverters._
/** Represents a concrete action that is to be inserted into a plan. All parameters of the action refer to one global
* variable.
*
* An action implements StateModifier by exposing all TemporalStatement that should be inserted in the plan after
* applying the action.
* An action implements TemporalInterval which gives it two timepoints start and end that respectively map
* to the start time and end time of the action.
*
* @param abs The AbstractAction of which this Action is an instance
* @param context Context of the action, containing mapping from local to global variables. Most notably contains a
* mapping from parameters to actual global variables. It differs from
* the context in which the action is declared (that can be accessed in `context.parentContext`.
* @param id Global id of the action, used for future reference within anml statements.
* @param parentAction The parent action if it is issued from a decomposition
*/
class Action(
val abs:AbstractAction,
val context:Context,
val id:ActRef,
val interval: TemporalInterval,
val chronicle: Chronicle,
val parentAction:Option[Action],
refCounter: RefCounter)
extends TemporalInterval with ChronicleContainer with VariableUser {
chronicle.container = Some(this)
def label = context.label
override val start : TPRef = interval.start
override val end : TPRef = interval.end
def statements = chronicle.statements
val instantiationVar : VarRef = new VarRef(TInteger, refCounter, Label(label,"instantiation_var"))
def bindingConstraints = chronicle.bindingConstraints
private var mStatus = ActionStatus.PENDING
/** Depicts the current status of the action. It is first
* initialized to PENDING and might be changed with `setStatus()`
*
* @return
*/
def status = mStatus
/** Returns true if the action was defined with the `motivated` keyword. False otherwise. */
def mustBeMotivated = abs.isTaskDependent
/** Assigns a new status to the action.
* Allowed transitions are
* - PENDING -> EXECUTING
* - EXECUTING -> (FAILED || EXECUTED)
*
* @param newStatus New status of the action.
*/
def setStatus(newStatus: ActionStatus) {
import ActionStatus._
mStatus match {
case PENDING => //assert(newStatus == EXECUTING)
case EXECUTING => //assert(newStatus == FAILED || newStatus == EXECUTED)
case FAILED => //throw new ANMLException("No valid status transition from FAILED.")
case EXECUTED => //throw new ANMLException("No valid status transition from EXECUTED.")
}
mStatus = newStatus
}
def vars = chronicle.vars
def temporalConstraints = chronicle.temporalConstraints
val container = this
def taskName = abs.taskName
def name = abs.name
def tasks = chronicle.tasks
def logStatements = chronicle.logStatements
/** Returns true if the statement `s` is contained in this action */
def contains(s: Statement) = statements.contains(s)
/** Returns true if this action has a parent (ie. it is issued from a decomposition). */
def hasParent = parentAction match {
case Some(_) => true
case None => false
}
/** Returns the parent action. Throws [[ANMLException]] if this action has no parent.
* Invocation of this method should be preceded by a call to `hasParent()` */
def parent : Action = parentAction match {
case Some(x) => x
case None => throw new ANMLException("Action has no parent.")
}
/** Arguments (as global variables) of the action */
lazy val args = abs.args.asScala.map(context.getGlobalVar).asJava
override def toString = name +"("+ abs.args.asScala.map(context.getGlobalVar).mkString(", ") + ")"
override def usedVariables = chronicle.usedVariables ++ args.asScala + start + end + instantiationVar
}
object Action {
/** Builds new concrete action
*
* @param pb Problem in which the action appears
* @param abs Abstract version of the action
* @param localID Local reference to the action
* @param parentAction Optional action in which the action to be created appears (as part of decomposition.
* @param contextOpt Context in which the action appears, if set to None, it defaults to the problem's context.
* @return The resulting concrete Action.
*/
def newAction(
pb :AnmlProblem,
abs :AbstractAction,
localID :LActRef,
refCounter: RefCounter,
parentAction :Option[Action] = None,
contextOpt :Option[Context] = None) : Action =
{
// containing context is the one passed, if it is empty, it defaults to the problem's
val parentContext = contextOpt match {
case Some(parentContext) => parentContext
case None => pb.context
}
// creates pair (localVar, globalVar) as defined by the ActionRef
val id = new ActRef(refCounter)
val interval = new TemporalInterval {
override val start: TPRef = new TPRef(refCounter)
override val end: TPRef = new TPRef(refCounter)
}
val context = abs.context.buildContext(pb, "act"+id.id, Some(parentContext), refCounter, interval)
val chronicle = abs.chron.getInstance(context, interval, pb, refCounter)
val act = new Action(abs, context, id, interval, chronicle, parentAction, refCounter)
// if there is a parent action, add a mapping localId -> globalID to its context
contextOpt match {
case Some(parent) => parent.addAction(localID, act)
case _ =>
}
act
}
/** Creates a new Action with new VarRef as parameters.
*
* @param pb Problem in which the action appears
* @param actionName Name of the action to create.
* @return The new concrete action.
*/
def getNewStandaloneAction(pb:AnmlProblem, actionName:String, refCounter: RefCounter) : Action = {
val abs =
pb.abstractActions.asScala.find(_.name == actionName) match {
case Some(act) => act
case None => throw new ANMLException("Unable to find action "+actionName)
}
getNewStandaloneAction(pb, abs, refCounter)
}
/** Creates a new Action with new VarRef as parameters.
*
* @param pb Problem in which the action appears
* @param abs Abstract version of the action to create.
* @return The new concrete action.
*/
def getNewStandaloneAction(pb:AnmlProblem, abs:AbstractAction, refCounter: RefCounter) : Action = {
val act = newAction(pb, abs, new LActRef(), refCounter, None, Some(pb.context)) //TODO: fix with real context
act
}
} | athy/fape | anml-parser/src/main/scala/fr/laas/fape/anml/model/concrete/Action.scala | Scala | bsd-2-clause | 6,808 |
package viscel.server
import scalatags.Text.attrs.{
`for`, `type`, action, attr, content, method, enctype, href, id, rel, src, title, value, name => attrname
}
import scalatags.Text.implicits.{Frag, Tag, stringAttr, stringFrag}
import scalatags.Text.tags.{SeqFrag, body, div, form, h1, head, html, input, label, link, meta, script}
import scalatags.Text.tags2.section
class ServerPages() {
def makeHtml(stuff: Frag*): Tag =
html(
head(
title := "Viscel",
link(rel := "stylesheet", href := "style.css", `type` := "text/css"),
link(rel := "manifest", href := "manifest.json"),
link(rel := "icon", href := "icon.png", attr("sizes") := "192x192"),
meta(attrname := "viewport", content := "width=device-width, initial-scale=1, user-scalable=yes, minimal-ui")
)
)(stuff: _*)
def landingTag: Tag =
makeHtml(
body("if nothing happens, your javascript does not work"),
script(src := "localforage.min.js"),
script(src := "viscel-opt.js")
)
def fullrender(tag: Tag): String = "<!DOCTYPE html>" + tag.render
def labelledInput(name: String, inputType: String = "text"): Frag =
div(label(name, `for` := name), input(id := name, `type` := inputType, attrname := name))
def toolsPage: Tag =
makeHtml(
body(
id := "tools",
makeToolForm("stop", Nil),
makeToolForm("import", Seq("id", "name", "path")),
makeToolForm("add", Seq("url"))
)
)
private def makeToolForm(formAction: String, inputs: Seq[String]): Tag = {
section(
h1(formAction.capitalize),
form(
action := formAction,
method := "post",
enctype := "application/x-www-form-urlencoded",
SeqFrag(inputs.map(labelledInput(_))),
input(`type` := "submit", value := formAction)
)
)
}
}
| rmgk/viscel | code/jvm/src/main/scala/viscel/server/ServerPages.scala | Scala | agpl-3.0 | 1,852 |
package lila.forum
import play.api.libs.json.Json
import play.modules.reactivemongo.json.ImplicitBSONHandlers.JsObjectWriter
import lila.db.api._
import lila.db.Implicits._
import tube.topicTube
object TopicRepo extends TopicRepo(false) {
def apply(troll: Boolean): TopicRepo = troll.fold(TopicRepoTroll, TopicRepo)
}
object TopicRepoTroll extends TopicRepo(true)
sealed abstract class TopicRepo(troll: Boolean) {
private lazy val trollFilter = troll.fold(
Json.obj(),
Json.obj("troll" -> false)
)
def close(id: String, value: Boolean): Funit =
$update.field(id, "closed", value)
def hide(id: String, value: Boolean): Funit =
$update.field(id, "hidden", value)
def byCateg(categ: Categ): Fu[List[Topic]] =
$find(byCategQuery(categ))
def byTree(categSlug: String, slug: String): Fu[Option[Topic]] =
$find.one(Json.obj("categId" -> categSlug, "slug" -> slug) ++ trollFilter)
def nextSlug(categ: Categ, name: String, it: Int = 1): Fu[String] = {
val slug = Topic.nameToId(name) + ~(it != 1).option("-" + it)
// also take troll topic into accounts
TopicRepoTroll.byTree(categ.slug, slug) flatMap {
_.isDefined.fold(
nextSlug(categ, name, it + 1),
fuccess(slug)
)
}
}
def incViews(topic: Topic): Funit =
$update($select(topic.id), $inc("views" -> 1))
def byCategQuery(categ: Categ) = Json.obj("categId" -> categ.slug) ++ trollFilter
}
| Happy0/lila | modules/forum/src/main/TopicRepo.scala | Scala | mit | 1,438 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.enhancement.enhancements
import io.truthencode.ddo.model.attribute.Attribute
import io.truthencode.ddo.model.enhancement.enhancements.classbased.BombardierTierThree
trait AbilityIBombardier
extends BombardierTierThree with ClassEnhancementImpl with AbilitySelector {
override lazy val description: Option[String] = Some(
"+1 to Intelligence"
)
/**
* Some enhancements can be taken multiple times (generally up to three)
*/
override val ranks: Int = 1
override val abilitySelections: Seq[Attribute] = Seq(Attribute.Intelligence)
/**
* Some enhancements have multiple ranks. This is the cost for each rank. Older versions had
* increasing costs which has been streamlined to a linear progression.
*
* @return
*/
override def apCostPerRank: Int = 2
/**
* Roman Numeral Suffix
*
* @return
*/
override def rnSuffix: Int = 1
}
| adarro/ddo-calc | subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/enhancement/enhancements/AbilityIBombardier.scala | Scala | apache-2.0 | 1,560 |
package vaadin.scala
import vaadin.scala.mixins.ScaladinMixin
import vaadin.scala.mixins.ValidatorMixin
import vaadin.scala.mixins.ValidatableMixin
import scala.collection.mutable
import scala.collection.immutable.List
package mixins {
trait ValidatorMixin extends ScaladinMixin
trait ValidatableMixin extends ScaladinMixin
}
object Validation {
def exceptionToInvalid(e: com.vaadin.data.Validator.InvalidValueException): Validation = Invalid(e.getMessage :: e.getCauses().toList.map(_.getMessage))
def wrapToValidation(f: () => Unit): Validation = try {
f()
Valid //no exception -> valid
} catch {
case e: com.vaadin.data.Validator.InvalidValueException => exceptionToInvalid(e)
}
}
sealed abstract class Validation(val isValid: Boolean, val errorMessages: List[String] = List.empty) {
def ::(other: Validation): Validation = (this, other) match {
case (Valid, Valid) => Valid
case (Invalid(reasons), Valid) => Invalid(reasons)
case (Valid, Invalid(reasons)) => Invalid(reasons)
case (Invalid(reasons1), Invalid(reasons2)) => Invalid(reasons1 ::: reasons2)
}
}
case object Valid extends Validation(true)
case class Invalid(reasons: List[String]) extends Validation(false, reasons)
object Validator {
def apply(validatorFunction: Option[Any] => Validation): Validator = {
new Validator() { def validate(value: Option[Any]) = validatorFunction(value) }
}
}
class Validators(p: com.vaadin.data.Validatable with ValidatableMixin) extends mutable.Set[Validator] {
def contains(key: Validator) = iterator.contains(key)
import scala.collection.JavaConverters._
def iterator(): Iterator[Validator] =
p.getValidators.asScala.filter(_.isInstanceOf[ValidatorMixin]).map(_.asInstanceOf[ValidatorMixin].wrapper).map(_.asInstanceOf[Validator]).iterator
def +=(elem: Option[Any] => Validation) = { p.addValidator(Validator(elem).p); this }
def +=(elem: Validator) = { p.addValidator(elem.p); this }
def -=(elem: Validator) = {
iterator.foreach { e =>
if (e == elem) {
p.removeValidator(e.p)
}
}
this
}
}
trait Validatable extends Wrapper {
override def p: com.vaadin.data.Validatable with ValidatableMixin
lazy val validators: Validators = new Validators(p)
def validate: Validation = Validation.wrapToValidation(p.validate)
}
class ValidatorDelegator extends com.vaadin.data.Validator with ValidatorMixin {
def isValid(value: Any): Boolean = internalValidate(value) == Valid
def validate(value: Any): Unit = internalValidate(value) match {
case Valid =>
case Invalid(reasons) => {
if (reasons.isEmpty)
throw new com.vaadin.data.Validator.InvalidValueException("")
else
throw new com.vaadin.data.Validator.InvalidValueException(reasons.head, reasons.tail.map(new com.vaadin.data.Validator.InvalidValueException(_)).toArray)
}
}
protected def internalValidate(value: Any): Validation = wrapper.asInstanceOf[Validator].validate(Option(value))
}
trait Validator extends Wrapper {
override val p: com.vaadin.data.Validator = new ValidatorDelegator { wrapper = Validator.this }
def validate(value: Option[Any]): Validation
} | CloudInABox/scalavaadinutils | src/main/scala/vaadin/scala/Validation.scala | Scala | mit | 3,182 |
package ml.neural.old
import ml.Pattern
object Neural {
import no.uib.cipr.matrix.DenseMatrix
import no.uib.cipr.matrix.Matrices
// sealed abstract class ActFun() {
// def f(x: Double): Double
// }
//
// case class LinearOutputLayer() extends ActFun {
// def f(x: Double) = x
// }
//
// case class Sigmoide() extends ActFun {
// def f(x: Double) = (1.0 / (1 + math.exp(-x)) - 0.5) * 2d //better to avoid numerical instability //1.0 / (1 + math.exp(-x))
// }
//
// case class Tahn() extends ActFun {
// def f(x: Double) = 1.7159 * math.tanh(2 * x / 3)
// }
//
// def patterns2matrices(insts: Seq[Pattern]) = {
// val instsarray = insts.toArray
// val transP = new DenseMatrix(instsarray.map(_.array))
// val transT = new DenseMatrix(instsarray.map(x => x.weighted_label_array))
// (transP, transT)
// }
/**
* Moore-Penrose generalized inverse matrix.
* Evita ill-conditioned matrices, i.e. singular ones (det = 0).
* Based on the Java code, which was based on Huang Matlab code.
* Theory:Ridge regression
* MP(A) = inv((H'*H+lumda*I))*H'
*/
def pinv(H0: DenseMatrix) = {
val lumda = 0.000001
val m = H0.numRows
val n = H0.numColumns
val H0T: DenseMatrix = new DenseMatrix(n, m)
H0.transpose(H0T)
val H0TH0: DenseMatrix = new DenseMatrix(n, n)
H0T.mult(H0, H0TH0)
val I: DenseMatrix = Matrices.identity(n)
H0TH0.add(lumda, I)
val H0TH0_inv: DenseMatrix = I.copy
H0TH0.solve(I, H0TH0_inv)
val pseudo_inverse: DenseMatrix = new DenseMatrix(n, m)
H0TH0_inv.mult(H0T, pseudo_inverse)
pseudo_inverse
}
def rougherPinv(H0: DenseMatrix) = {
val lumda = 0.1
val m = H0.numRows
val n = H0.numColumns
val H0T: DenseMatrix = new DenseMatrix(n, m)
H0.transpose(H0T)
val H0TH0: DenseMatrix = new DenseMatrix(n, n)
H0T.mult(H0, H0TH0)
val I: DenseMatrix = Matrices.identity(n)
H0TH0.add(lumda, I)
val H0TH0_inv: DenseMatrix = I.copy
H0TH0.solve(I, H0TH0_inv)
val pseudo_inverse: DenseMatrix = new DenseMatrix(n, m)
H0TH0_inv.mult(H0T, pseudo_inverse)
pseudo_inverse
}
// def inv(H0: DenseMatrix) = {
// val I = Matrices.identity(H0.numColumns())
// val Ainv = I.copy()
// H0.solve(I, Ainv)
// Ainv
// }
//
// def toArray(M: DenseMatrix) = ((0 until M.numRows) map {
// r => ((0 until M.numColumns) map (c => M.get(r, c))).toArray
// }).toArray
} | active-learning/active-learning-scala | src/main/scala/ml/neural/old/Neural.scala | Scala | gpl-2.0 | 2,497 |
package org.infinispan.spark.examples.twitter
import java.util.concurrent.{Executors, TimeUnit}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkContext
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.infinispan.client.hotrod.RemoteCacheManager
import org.infinispan.client.hotrod.configuration.ConfigurationBuilder
import org.infinispan.spark.examples.twitter.Sample.{getSparkConf, runAndExit, usageStream}
import org.infinispan.spark.examples.util.TwitterDStream
import org.infinispan.spark.stream._
import scala.collection.JavaConverters._
import scala.concurrent.duration._
import scala.language.postfixOps
/**
* @see StreamConsumerJava
* @author gustavonalle
*/
object StreamConsumerScala {
def main(args: Array[String]) {
Logger.getLogger("org").setLevel(Level.WARN)
if (args.length < 2) {
usageStream("StreamConsumerScala")
}
val infinispanHost = args(0)
val duration = args(1).toLong * 1000
val conf = getSparkConf("spark-infinispan-stream-consumer-scala")
val sparkContext = new SparkContext(conf)
val streamingContext = new StreamingContext(sparkContext, Seconds(1))
val config = Sample.getConnectorConf(infinispanHost)
val remoteCacheManager = new RemoteCacheManager(new ConfigurationBuilder().withProperties(config.getHotRodClientProperties).build())
val cache = remoteCacheManager.getCache[Long, Tweet]("default")
val twitterDStream = TwitterDStream.create(streamingContext)
val keyValueTweetStream = twitterDStream.map(s => (s.getId, s))
keyValueTweetStream.writeToInfinispan(config)
Repeat.every(5 seconds, {
val keySet = cache.keySet()
val maxKey = keySet.asScala.max
println(s"${keySet.size} tweets inserted in the cache")
println(s"Last tweet:${Option(cache.get(maxKey)).map(_.getText).getOrElse("<no tweets received so far>")}")
println()
})
runAndExit(streamingContext, duration)
}
object Repeat {
def every(d: Duration, code: => Unit) =
Executors.newSingleThreadScheduledExecutor.scheduleWithFixedDelay(new Runnable {
override def run(): Unit = code
}, 10, d.toSeconds, TimeUnit.SECONDS)
}
}
| infinispan/infinispan-spark | examples/twitter/src/main/scala/org/infinispan/spark/examples/twitter/StreamConsumerScala.scala | Scala | apache-2.0 | 2,287 |
package com.szadowsz.starform.model.star.calc
import com.szadowsz.starform.model.star.constants.StarConstants
import com.szadowsz.starform.rand.RandGenTrait
import com.szadowsz.starform.system.bodies.base.Star
/**
* @author Zakski : 10/09/2016.
*/
trait StarCalc[S <: Star] {
val sConst : StarConstants
def initStar(rand : RandGenTrait): S
/**
* function to randomise the star's age as noted in "3. Characteristics of The Primary Star" in Extra-solar Planetary Systems: A Microcomputer Simulation.
*
* @see Extra-solar Planetary Systems: A Microcomputer Simulation - Martyn J. Fogg
* @see method generate_stellar_system, lines 94-97 in main.c - Mat Burdick (accrete)
* @see method getLifetime, line 179 in genstar.cc - Keris (accretion v2)
* @see method generate_stellar_system, line 69-74 in gensys.c - Keris (starform)
* @see method generate_stellar_system, lines 86-89 in starform.c - Mat Burdick (starform)
* @see method commonConstructor, lines 113-116 in StarSystem.java - Carl Burke (starform)
*
* @note unit is Byr (1.0E9 years)
*
* @param rand pseudo-random number generator interface
* @param lifespan the lifespan the star can expect on the main sequence
* @return the approximate age of the star in Byr
*/
def stellarAge(rand: RandGenTrait, lifespan: Double): Double = {
if (lifespan >= 6.0) {
rand.nextDouble() * 5.0 + 1.0
} else {
rand.nextDouble() * (lifespan - 1.0) + 1.0
}
}
/**
* Method to approximate the star's lifespan on the main sequence. eq. 4 in "3. Characteristics of The Primary Star" from "Extra-solar Planetary Systems: A
* Microcomputer Simulation".
*
* @note unit is Byr (1.0E9 years)
*
* @see p. 502, Extra-solar Planetary Systems: A Microcomputer Simulation - Martyn J. Fogg
* @see method generate_stellar_system, line 93 in main.c - Mat Burdick (accrete)
* @see method getLifetime, line 179 in genstar.cc - Keris (accretion v2)
* @see method critical_limit, line 283 in accrete.c - Keris (starform)
* @see method generate_stellar_system, line 85 in starform.c - Mat Burdick (starform)
* @see method commonConstructor, line 112 in Star.java - Carl Burke (starform)
*
* @param stellarMass - star's mass in terms of solar mass
* @param stellarLuminosity - star's luminosity in terms of solar luminosity
* @return star's Main Sequence Lifespan in terms of billon years (Byr)
*/
def stellarMSLifespan(stellarMass: Double, stellarLuminosity: Double): Double = 10 * (stellarMass / stellarLuminosity)
/**
* function to calculate the mean habitable orbit around the star from "3. Characteristics of The Primary Star" in "Extra-solar Planetary Systems:
* A Microcomputer Simulation".
*
* @see eq.5, p.502, Extra-solar Planetary Systems: A Microcomputer Simulation - Martyn J. Fogg
* @see method generate_stellar_system, line 91 in main.c - Mat Burdick (accrete)
* @see method GenStar, line 167 in Genstar.c - Andrew Folkins (accretion)
* @see method GenStar, line 214 in genstar.c - Keris (accretion v1)
* @see method generate_stellar_system, line 75 in gensys.c - Keris (starform)
* @see method generate_stellar_system, line 90 in starform.c - Mat Burdick (starform)
* @see method commonConstructor, line 118 in Star.java - Carl Burke (starform)
*
* @param stellarLuminosity star's luminosity in terms of solar luminosity
* @return mean habitable radius in AU.
*/
def ecosphereRadius(stellarLuminosity: Double): Double = Math.sqrt(stellarLuminosity)
/**
* function to calculate the closest possible habitable orbit around the star from "3. Characteristics of The Primary Star" in "Extra-solar Planetary
* Systems: A Microcomputer Simulation".
*
* @see eq.6, p.503, Extra-solar Planetary Systems: A Microcomputer Simulation - Martyn J. Fogg
* @see method generate_stellar_system, line 92 in main.c - Mat Burdick (accrete)
* @see method GenStar, line 168 in Genstar.c - Andrew Folkins (accretion)
* @see method GenStar, line 215 in genstar.c - Keris (accretion v1)
* @see method generate_stellar_system, line 76 in gensys.c - Keris (starform)
* @see method generate_stellar_system, line 91 in starform.c - Mat Burdick (starform)
* @see method commonConstructor, line 119 in Star.java - Carl Burke (starform)
*
* @param ecosphereRadius mean habitable radius in AU.
* @return inner habitable radius in AU.
*/
def greenhouseRadius(ecosphereRadius: Double): Double = ecosphereRadius * sConst.GREENHOUSE_EFFECT_CONST
}
| zakski/accrete-starform-stargen | recreations/composite/src/main/scala/com/szadowsz/starform/model/star/calc/StarCalc.scala | Scala | apache-2.0 | 4,636 |
/*
Copyright 2013 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.summingbird.builder
import com.twitter.algebird.{Monoid, Semigroup}
import com.twitter.bijection.{Codec, Injection}
import com.twitter.chill.IKryoRegistrar
import com.twitter.chill.java.IterableRegistrar
import com.twitter.storehaus.algebra.MergeableStore
import com.twitter.summingbird._
import com.twitter.summingbird.batch.{BatchID, Batcher}
import com.twitter.summingbird.option.CacheSize
import com.twitter.summingbird.scalding.{Scalding, Service, ScaldingEnv, Sink}
import com.twitter.summingbird.scalding.batch.BatchedStore
import com.twitter.summingbird.service.CompoundService
import com.twitter.summingbird.sink.{CompoundSink, BatchedSinkFromOffline}
import com.twitter.summingbird.source.EventSource
import com.twitter.summingbird.store.CompoundStore
import com.twitter.summingbird.storm.{
MergeableStoreSupplier, StoreWrapper, Storm, StormEnv, StormSource, StormSink
}
import java.io.Serializable
import java.util.Date
/**
* The (deprecated) Summingbird builder API builds up a single
* MapReduce job using a SourceBuilder. After any number of calls to
* flatMap, leftJoin, filter, merge, etc, the user calls
* "groupAndSumTo", equivalent to "sumByKey" in the Producer
* API. This call converts the SourceBuilder into a CompletedBuilder
* and prevents and future flatMap operations.
*
* @author Oscar Boykin
* @author Sam Ritchie
* @author Ashu Singhal
*/
object SourceBuilder {
type PlatformPair = OptionalPlatform2[Scalding, Storm]
type Node[T] = Producer[PlatformPair, T]
private val nextId = new java.util.concurrent.atomic.AtomicLong(0)
def adjust[T](m: Map[T, Options], k: T)(f: Options => Options) =
m.updated(k, f(m.getOrElse(k, Options())))
implicit def sg[T]: Semigroup[SourceBuilder[T]] =
Semigroup.from(_ ++ _)
def nextName[T:Manifest]: String =
"%s_%d".format(manifest[T], nextId.getAndIncrement)
def apply[T](eventSource: EventSource[T], timeOf: T => Date)
(implicit mf: Manifest[T], eventCodec: Codec[T]) = {
implicit val te = TimeExtractor[T](timeOf(_).getTime)
val newID = nextName[T]
val scaldingSource =
eventSource.offline.map( s => Scalding.pipeFactory(s.scaldingSource(_)))
val stormSource = eventSource.spout.map(Storm.toStormSource(_))
new SourceBuilder[T](
Source[PlatformPair, T]((scaldingSource, stormSource)),
CompletedBuilder.injectionRegistrar[T](eventCodec),
newID
)
}
}
case class SourceBuilder[T: Manifest] private (
@transient node: SourceBuilder.Node[T],
@transient registrar: IKryoRegistrar,
id: String,
@transient opts: Map[String, Options] = Map.empty
) extends Serializable {
import SourceBuilder.{ adjust, Node, nextName }
def map[U: Manifest](fn: T => U): SourceBuilder[U] = copy(node = node.map(fn))
def filter(fn: T => Boolean): SourceBuilder[T] = copy(node = node.filter(fn))
def flatMap[U: Manifest](fn: T => TraversableOnce[U]): SourceBuilder[U] =
copy(node = node.flatMap(fn))
/** This may be more efficient if you know you are not changing the values in
* you flatMap.
*/
def flatMapKeys[K1, K2, V](fn: K1 => TraversableOnce[K2])(implicit ev: T <:< (K1, V),
key1Mf: Manifest[K1], key2Mf: Manifest[K2], valMf: Manifest[V]): SourceBuilder[(K2,V)] =
copy(node = node.asInstanceOf[Node[(K1, V)]].flatMapKeys(fn))
def flatMapBuilder[U: Manifest](newFlatMapper: FlatMapper[T, U]): SourceBuilder[U] =
flatMap(newFlatMapper(_))
def write[U](sink: CompoundSink[U])(conversion: T => TraversableOnce[U])
(implicit batcher: Batcher, mf: Manifest[U]): SourceBuilder[T] = {
val newNode =
node.flatMap(conversion).write(
sink.offline.map(new BatchedSinkFromOffline[U](batcher, _)),
sink.online.map { supplier => new StormSink[U] { lazy val toFn = supplier() } }
)
copy(
node = node.either(newNode).flatMap[T] {
case Left(t) => Some(t)
case Right(u) => None
}
)
}
def write(sink: CompoundSink[T])(implicit batcher: Batcher): SourceBuilder[T] =
copy(
node = node.write(
sink.offline.map(new BatchedSinkFromOffline[T](batcher, _)),
sink.online.map { supplier => new StormSink[T] { lazy val toFn = supplier() } }
)
)
def leftJoin[K, V, JoinedValue](service: CompoundService[K, JoinedValue])
(implicit ev: T <:< (K, V), keyMf: Manifest[K], valMf: Manifest[V], joinedMf: Manifest[JoinedValue])
: SourceBuilder[(K, (V, Option[JoinedValue]))] =
copy(
node = node.asInstanceOf[Node[(K, V)]].leftJoin((
service.offline,
service.online.map(StoreWrapper[K, JoinedValue](_))
))
)
/** Set's an Option on all nodes ABOVE this point */
def set(opt: Any): SourceBuilder[T] = copy(opts = adjust(opts, id)(_.set(opt)))
/**
* Complete this builder instance with a BatchStore. At this point,
* the Summingbird job can be executed on Hadoop.
*/
def groupAndSumTo[K, V](store: BatchedStore[K, V])(
implicit ev: T <:< (K, V),
env: Env,
keyMf: Manifest[K],
valMf: Manifest[V],
keyCodec: Codec[K],
valCodec: Codec[V],
batcher: Batcher,
monoid: Monoid[V]): CompletedBuilder[_, K, V] =
groupAndSumTo(CompoundStore.fromOffline(store))
/**
* Complete this builder instance with a MergeableStore. At this point,
* the Summingbird job can be executed on Storm.
*/
def groupAndSumTo[K, V](store: => MergeableStore[(K, BatchID), V])(
implicit ev: T <:< (K, V),
env: Env,
keyMf: Manifest[K],
valMf: Manifest[V],
keyCodec: Codec[K],
valCodec: Codec[V],
batcher: Batcher,
monoid: Monoid[V]): CompletedBuilder[_, K, V] =
groupAndSumTo(CompoundStore.fromOnline(store))
/**
* Complete this builder instance with a CompoundStore. At this
* point, the Summingbird job can be executed on Storm or Hadoop.
*/
def groupAndSumTo[K, V](store: CompoundStore[K, V])(
implicit ev: T <:< (K, V),
env: Env,
keyMf: Manifest[K],
valMf: Manifest[V],
keyCodec: Codec[K],
valCodec: Codec[V],
batcher: Batcher,
monoid: Monoid[V]): CompletedBuilder[_, K, V] = {
val cb = env match {
case scalding: ScaldingEnv =>
val givenStore = store.offlineStore.getOrElse(sys.error("No offline store given in Scalding mode"))
// Set the store to reset if needed
val batchSetStore = scalding
.initialBatch(batcher)
.map { givenStore.withInitialBatch(_) }
.getOrElse(givenStore)
val newNode = OptionalUnzip2[Scalding, Storm]()(node)._1.map { p =>
Producer.evToKeyed(p.name(id))
.sumByKey(batchSetStore)
}.getOrElse(sys.error("Scalding mode specified alongside some online-only Source, Service or Sink."))
CompletedBuilder(newNode, registrar, batcher, keyCodec, valCodec, nextName[(K,V)], opts)
case storm: StormEnv =>
val supplier = store.onlineSupplier.getOrElse(sys.error("No online store given in Storm mode"))
val givenStore = MergeableStoreSupplier.from(supplier())
val newNode = OptionalUnzip2[Scalding, Storm]()(node)._2.map { p =>
Producer.evToKeyed(p.name(id))
.sumByKey(givenStore)
}.getOrElse(sys.error("Storm mode specified alongside some offline-only Source, Service or Sink."))
CompletedBuilder(newNode, registrar, batcher, keyCodec, valCodec, nextName[(K,V)], opts)
case _ => sys.error("Unknown environment: " + env)
}
env.builder = cb
cb
}
// useful when you need to merge two different Event sources
def ++(other: SourceBuilder[T]): SourceBuilder[T] =
copy(
node = node.name(id).merge(other.node.name(other.id)),
registrar = new IterableRegistrar(registrar, other.registrar),
id = "merge_" + nextName[T],
opts = opts ++ other.opts
)
}
| surabhiiyer/summingbird | summingbird-builder/src/main/scala/com/twitter/summingbird/builder/SourceBuilder.scala | Scala | apache-2.0 | 8,425 |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.cosmos.spark
import com.azure.cosmos.spark.diagnostics.BasicLoggingTrait
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
class RowSerializerPollSpec extends UnitSpec with BasicLoggingTrait {
//scalastyle:off multiple.string.literals
"RowSerializer returned to the pool" should "be reused when capacity not exceeded" in {
val schema = StructType(Seq(StructField("column_A", IntegerType), StructField("column_B", StringType)))
val sameSchema = StructType(Seq(StructField("column_A", IntegerType), StructField("column_B", StringType)))
val serializer = RowSerializerPool.getOrCreateSerializer(schema)
RowSerializerPool.returnSerializerToPool(schema, serializer) shouldBe true
val pooledSerializer = RowSerializerPool.getOrCreateSerializer(sameSchema)
serializer.eq(pooledSerializer) shouldBe true
val newSerializer = RowSerializerPool.getOrCreateSerializer(sameSchema)
serializer.eq(newSerializer) shouldBe false
}
"RowSerializer " should "be returned to the pool only a limited number of times" in {
val schema = StructType(Seq(StructField("column01", IntegerType), StructField("column02", StringType)))
for (_ <- 1 to 256) {
RowSerializerPool.returnSerializerToPool(schema, RowEncoder(schema).createSerializer()) shouldBe true
}
logInfo("First 256 attempt to pool succeeded")
RowSerializerPool.returnSerializerToPool(schema, RowEncoder(schema).createSerializer()) shouldBe false
}
//scalastyle:on multiple.string.literals
}
| Azure/azure-sdk-for-java | sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/RowSerializerPollSpec.scala | Scala | mit | 1,712 |
package io.github.tailhq.dynaml.algebra
import breeze.linalg.{DenseVector, NumericOps, Transpose}
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
/**
* @author tailhq date 13/10/2016.
* A distributed row vector that is stored in blocks.
* @param data The underlying [[RDD]] which should consist of
* block indices and a row vector containing
* all the elements in the said block.
*/
private[dynaml] class SparkBlockedDualVector(data: RDD[(Long, Transpose[DenseVector[Double]])],
num_cols: Long = -1L,
num_col_blocks: Long = -1L)
extends SparkVectorLike[Transpose[DenseVector[Double]]] with NumericOps[SparkBlockedDualVector] {
lazy val colBlocks = if(num_col_blocks == -1L) data.keys.max else num_col_blocks
lazy val rowBlocks = 1L
override var vector = data
lazy val cols: Long = if(num_cols == -1L) data.map(_._2.inner.length).sum().toLong else num_cols
lazy val rows: Long = 1L
def _data = vector
override def repr: SparkBlockedDualVector = this
def t: SparkBlockedVector = new SparkBlockedVector(data.map(c => (c._1, c._2.t)), cols, colBlocks)
def persist: Unit = {
data.persist(StorageLevel.MEMORY_AND_DISK)
}
def unpersist: Unit = {
data.unpersist()
}
}
| mandar2812/DynaML | dynaml-core/src/main/scala/io/github/tailhq/dynaml/algebra/SparkBlockedDualVector.scala | Scala | apache-2.0 | 1,363 |
package com.sksamuel.elastic4s.requests.count
import com.sksamuel.elastic4s.requests.searches.queries.QueryBuilderFn
import com.sksamuel.elastic4s.json.{XContentBuilder, XContentFactory}
object CountBodyBuilderFn {
def apply(request: CountRequest): XContentBuilder = {
val builder = XContentFactory.jsonBuilder()
request.query.map(QueryBuilderFn.apply).foreach(builder.rawField("query", _))
builder
}
}
| stringbean/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/requests/count/CountBodyBuilderFn.scala | Scala | apache-2.0 | 421 |
/*
* Copyright 2015 Webtrends (http://www.webtrends.com)
*
* See the LICENCE.txt file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webtrends.harness.component.spray.client
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.json4s.jackson.Serialization
import spray.httpx.unmarshalling.Unmarshaller
import spray.http._
import spray.httpx.marshalling.Marshaller
trait HttpLiftSupport {
implicit def httpFormats = DefaultFormats
implicit def HttpJsonUnmarshaller[T: Manifest] =
Unmarshaller[T](MediaTypes.`application/json`) {
case x: HttpEntity.NonEmpty =>
parse(x.asString).extract[T]
}
implicit def HttpJsonMarshaller[T <: AnyRef] =
Marshaller.of[T](ContentTypes.`application/json`) { (value, contentType, ctx) =>
ctx.marshalTo(HttpEntity(contentType, Serialization.write(value)))
}
}
| mjwallin1/wookiee-spray | src/main/scala/com/webtrends/harness/component/spray/client/HttpLiftSupport.scala | Scala | apache-2.0 | 1,471 |
package org.http4s.blaze.util
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets.ISO_8859_1
import org.http4s.Headers
import org.http4s.blaze.pipeline.TailStage
import org.http4s.util.StringWriter
import scodec.bits.ByteVector
import scala.concurrent.{ExecutionContext, Future, Promise}
import scalaz.concurrent.Task
import scalaz.{-\\/, \\/-}
class ChunkProcessWriter(private var headers: StringWriter,
pipe: TailStage[ByteBuffer],
trailer: Task[Headers])
(implicit val ec: ExecutionContext) extends ProcessWriter {
import org.http4s.blaze.util.ChunkProcessWriter._
protected def writeBodyChunk(chunk: ByteVector, flush: Boolean): Future[Unit] = {
if (chunk.nonEmpty) pipe.channelWrite(encodeChunk(chunk, Nil))
else Future.successful(())
}
protected def writeEnd(chunk: ByteVector): Future[Boolean] = {
def writeTrailer = {
val promise = Promise[Boolean]
trailer.map { trailerHeaders =>
if (trailerHeaders.nonEmpty) {
val rr = new StringWriter(256)
rr << "0\\r\\n" // Last chunk
trailerHeaders.foreach( h => rr << h.name.toString << ": " << h << "\\r\\n") // trailers
rr << "\\r\\n" // end of chunks
ByteBuffer.wrap(rr.result.getBytes(ISO_8859_1))
}
else ChunkEndBuffer
}.runAsync {
case \\/-(buffer) =>
promise.completeWith(pipe.channelWrite(buffer).map(Function.const(false)))
()
case -\\/(t) =>
promise.failure(t)
()
}
promise.future
}
val f = if (headers != null) { // This is the first write, so we can add a body length instead of chunking
val h = headers
headers = null
if (chunk.nonEmpty) {
val body = chunk.toByteBuffer
h << s"Content-Length: ${body.remaining()}\\r\\n\\r\\n"
// Trailers are optional, so dropping because we have no body.
val hbuff = ByteBuffer.wrap(h.result.getBytes(ISO_8859_1))
pipe.channelWrite(hbuff::body::Nil)
}
else {
h << s"Content-Length: 0\\r\\n\\r\\n"
val hbuff = ByteBuffer.wrap(h.result.getBytes(ISO_8859_1))
pipe.channelWrite(hbuff)
}
} else {
if (chunk.nonEmpty) writeBodyChunk(chunk, true).flatMap { _ => writeTrailer }
else writeTrailer
}
f.map(Function.const(false))
}
private def writeLength(length: Long): ByteBuffer = {
val bytes = length.toHexString.getBytes(ISO_8859_1)
val b = ByteBuffer.allocate(bytes.length + 2)
b.put(bytes).put(CRLFBytes).flip()
b
}
private def encodeChunk(chunk: ByteVector, last: List[ByteBuffer]): List[ByteBuffer] = {
val list = writeLength(chunk.length)::chunk.toByteBuffer::CRLF::last
if (headers != null) {
headers << "Transfer-Encoding: chunked\\r\\n\\r\\n"
val b = ByteBuffer.wrap(headers.result.getBytes(ISO_8859_1))
headers = null
b::list
} else list
}
}
object ChunkProcessWriter {
private val CRLFBytes = "\\r\\n".getBytes(ISO_8859_1)
private def CRLF = CRLFBuffer.duplicate()
private def ChunkEndBuffer = chunkEndBuffer.duplicate()
private[this] val CRLFBuffer = ByteBuffer.wrap(CRLFBytes).asReadOnlyBuffer()
private[this] val chunkEndBuffer =
ByteBuffer.wrap("0\\r\\n\\r\\n".getBytes(ISO_8859_1)).asReadOnlyBuffer()
}
| hvesalai/http4s | blaze-core/src/main/scala/org/http4s/blaze/util/ChunkProcessWriter.scala | Scala | apache-2.0 | 3,385 |
package com.lexan.lox.grammar.diagnostics
abstract class CompileError(msg: String, cause: Throwable = null) extends Throwable(msg, cause)
class LexicalError(msg: String) extends CompileError(msg)
| doczir/lox | src/main/scala/com/lexan/lox/grammar/diagnostics/CompileError.scala | Scala | unlicense | 197 |
import org.scalatest._
class HelloSpec extends FlatSpec with Matchers {
"Hello" should "have tests" in {
true should be(true)
}
}
| cyrillk/sample-scala-project.g8 | src/test/scala/HelloSpec.scala | Scala | apache-2.0 | 139 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.frontend.v2_3.perty.print
import org.neo4j.cypher.internal.frontend.v2_3.perty._
import org.neo4j.cypher.internal.frontend.v2_3.perty.handler.DefaultDocHandler
import scala.reflect.runtime.universe.TypeTag
object pprint {
// Print value to PrintStream after converting to a doc using the given generator and formatter
def apply[T: TypeTag, S >: T : TypeTag](value: T,
formatter: DocFormatter = DocFormatters.defaultPageFormatter)
(docGen: DocGenStrategy[S] = DefaultDocHandler.docGen): Unit = {
Console.print(pprintToString[T, S](value, formatter)(docGen))
}
}
| HuangLS/neo4j | community/cypher/frontend-2.3/src/main/scala/org/neo4j/cypher/internal/frontend/v2_3/perty/print/pprint.scala | Scala | apache-2.0 | 1,487 |
package android
import java.io.File
import android.Dependencies.{AarLibrary, ApkLibrary, LibraryDependency, LibraryProject}
import com.android.builder.core.{VariantType, AaptPackageProcessBuilder, AndroidBuilder}
import com.android.builder.model.AaptOptions
import com.android.builder.dependency.{LibraryDependency => AndroidLibrary}
import com.android.builder.png.VectorDrawableRenderer
import com.android.ide.common.res2._
import com.android.resources.Density
import com.android.utils.ILogger
import sbt.Keys.TaskStreams
import sbt._
import collection.JavaConverters._
import language.postfixOps
import Dependencies.LibrarySeqOps
object Resources {
def doCollectResources( bldr: AndroidBuilder
, minSdk: Int
, noTestApk: Boolean
, isLib: Boolean
, libs: Seq[LibraryDependency]
, layout: ProjectLayout
, extraAssets: Seq[File]
, extraRes: Seq[File]
, renderVectors: Boolean
, logger: Logger => ILogger
, cache: File
, s: TaskStreams
)(implicit m: BuildOutput.Converter): (File,File) = {
val assetBin = layout.mergedAssets
val assets = layout.assets
val resTarget = layout.mergedRes
val rsResources = layout.rsRes
resTarget.mkdirs()
assetBin.mkdirs
val depassets = collectdeps(libs) collect {
case m: ApkLibrary => m
case n: AarLibrary => n
} collect { case n if n.getAssetsFolder.isDirectory => n.getAssetsFolder }
// copy assets to single location
depassets ++ (libs collect {
case r if r.layout.assets.isDirectory => r.layout.assets
}) foreach { a => IO.copyDirectory(a, assetBin, false, true) }
extraAssets foreach { a =>
if (a.isDirectory) IO.copyDirectory(a, assetBin, false, true)
}
if (assets.exists) IO.copyDirectory(assets, assetBin, false, true)
if (noTestApk && layout.testAssets.exists)
IO.copyDirectory(layout.testAssets, assetBin, false, true)
// prepare resource sets for merge
val res = extraRes ++ Seq(layout.res, rsResources) ++
(libs map { _.layout.res } filter { _.isDirectory })
s.log.debug("Local/library-project resources: " + res)
// this needs to wait for other projects to at least finish their
// apklibs tasks--handled if androidBuild() is called properly
val depres = collectdeps(libs) collect {
case m: ApkLibrary => m
case n: AarLibrary => n
} collect { case n if n.getResFolder.isDirectory => n.getResFolder }
s.log.debug("apklib/aar resources: " + depres)
val respaths = depres ++ res.reverse ++
(if (layout.res.isDirectory) Seq(layout.res) else Seq.empty) ++
(if (noTestApk && layout.testRes.isDirectory)
Seq(layout.res) else Seq.empty)
val vectorprocessor = new VectorDrawableRenderer(
if (renderVectors) minSdk else math.max(minSdk,21),
layout.generatedVectors, Set(Density.MEDIUM,
Density.HIGH,
Density.XHIGH,
Density.XXHIGH).asJava,
SbtLogger(s.log))
val sets = respaths.distinct flatMap { r =>
val set = new ResourceSet(r.getAbsolutePath)
set.addSource(r)
set.setPreprocessor(vectorprocessor)
val generated = new GeneratedResourceSet(set)
set.setGeneratedSet(generated)
s.log.debug("Adding resource path: " + r)
List(generated, set)
}
val inputs = (respaths flatMap { r => (r ***) get }) filter (n =>
!n.getName.startsWith(".") && !n.getName.startsWith("_"))
FileFunction.cached(cache / "nuke-res-if-changed", FilesInfo.lastModified) { in =>
IO.delete(resTarget)
in
}(depres.toSet)
FileFunction.cached(cache / "collect-resources")(
FilesInfo.lastModified, FilesInfo.exists) { (inChanges,outChanges) =>
s.log.info("Collecting resources")
incrResourceMerge(layout, minSdk, resTarget, isLib, libs,
cache / "collect-resources", logger(s.log), bldr, sets, vectorprocessor, inChanges, s.log)
((resTarget ** FileOnlyFilter).get ++ (layout.generatedVectors ** FileOnlyFilter).get).toSet
}(inputs.toSet)
(assetBin, resTarget)
}
def incrResourceMerge(layout: ProjectLayout, minSdk: Int, resTarget: File, isLib: Boolean,
libs: Seq[LibraryDependency], blobDir: File, logger: ILogger,
bldr: AndroidBuilder, resources: Seq[ResourceSet],
preprocessor: ResourcePreprocessor,
changes: ChangeReport[File],
slog: Logger)(implicit m: BuildOutput.Converter) {
def merge() = fullResourceMerge(layout, minSdk, resTarget, isLib, libs, blobDir,
logger, bldr, resources, preprocessor, slog)
val merger = new ResourceMerger
if (!merger.loadFromBlob(blobDir, true)) {
slog.debug("Could not load merge blob (no full merge yet?)")
merge()
} else if (!merger.checkValidUpdate(resources.asJava)) {
slog.debug("requesting full merge: !checkValidUpdate")
merge()
} else {
val fileValidity = new FileValidity[ResourceSet]
val exists = changes.added ++ changes.removed ++ changes.modified exists {
file =>
val status = if (changes.added contains file)
FileStatus.NEW
else if (changes.removed contains file)
FileStatus.REMOVED
else if (changes.modified contains file)
FileStatus.CHANGED
else
sys.error("Unknown file status: " + file)
merger.findDataSetContaining(file, fileValidity)
val vstatus = fileValidity.getStatus
if (vstatus == FileValidity.FileStatus.UNKNOWN_FILE) {
merge()
slog.debug("Incremental merge aborted, unknown file: " + file)
true
} else if (vstatus == FileValidity.FileStatus.VALID_FILE) {
// begin workaround
// resource merger doesn't seem to actually copy changed files over...
// values.xml gets merged, but if files are changed...
val targetFile = resTarget / (
file relativeTo fileValidity.getSourceFile).get.getPath
val copy = Seq((file, targetFile))
status match {
case FileStatus.NEW =>
case FileStatus.CHANGED =>
if (targetFile.exists) IO.copy(copy, false, true)
case FileStatus.REMOVED => targetFile.delete()
}
// end workaround
try {
if (!fileValidity.getDataSet.updateWith(
fileValidity.getSourceFile, file, status, logger)) {
slog.debug("Unable to handle changed file: " + file)
merge()
true
} else
false
} catch {
case e: RuntimeException =>
slog.warn("Unable to handle changed file: " + file + ": " + e)
merge()
true
}
} else
false
}
if (!exists) {
slog.info("Performing incremental resource merge")
val writer = new MergedResourceWriter(resTarget,
bldr.getAaptCruncher(SbtProcessOutputHandler(slog)),
true, true, layout.publicTxt, layout.mergeBlame,
preprocessor)
merger.mergeData(writer, true)
merger.writeBlobTo(blobDir, writer)
}
}
}
def fullResourceMerge(layout: ProjectLayout, minSdk: Int, resTarget: File, isLib: Boolean,
libs: Seq[LibraryDependency], blobDir: File, logger: ILogger,
bldr: AndroidBuilder, resources: Seq[ResourceSet],
preprocessor: ResourcePreprocessor, slog: Logger)(implicit m: BuildOutput.Converter) {
slog.info("Performing full resource merge")
val merger = new ResourceMerger
resTarget.mkdirs()
resources foreach { r =>
r.loadFromFiles(logger)
merger.addDataSet(r)
}
val writer = new MergedResourceWriter(resTarget,
bldr.getAaptCruncher(SbtProcessOutputHandler(slog)),
true, true, layout.publicTxt, layout.mergeBlame, preprocessor)
merger.mergeData(writer, false)
merger.writeBlobTo(blobDir, writer)
}
def aapt(bldr: AndroidBuilder, manifest: File, pkg: String,
extraParams: Seq[String],
libs: Seq[LibraryDependency], lib: Boolean, debug: Boolean,
res: File, assets: File, resApk: String, gen: File, proguardTxt: String,
logger: Logger) = synchronized {
gen.mkdirs()
val options = new AaptOptions {
override def getIgnoreAssets = null
override def getNoCompress = null
override def getFailOnMissingConfigEntry = false
override def getAdditionalParameters = extraParams.asJava
}
val genPath = gen.getAbsolutePath
val all = collectdeps(libs)
logger.debug("All libs: " + all)
logger.debug("packageForR: " + pkg)
logger.debug("proguard.txt: " + proguardTxt)
val aaptCommand = new AaptPackageProcessBuilder(manifest, options)
if (res.isDirectory)
aaptCommand.setResFolder(res)
if (assets.isDirectory)
aaptCommand.setAssetsFolder(assets)
aaptCommand.setLibraries(all.asJava)
aaptCommand.setPackageForR(pkg)
aaptCommand.setResPackageOutput(resApk)
aaptCommand.setSourceOutputDir(if (resApk == null) genPath else null)
aaptCommand.setSymbolOutputDir(if (resApk == null) genPath else null)
aaptCommand.setProguardOutput(proguardTxt)
aaptCommand.setType(if (lib) VariantType.LIBRARY else VariantType.DEFAULT)
aaptCommand.setDebuggable(debug)
bldr.processResources(aaptCommand, true, SbtProcessOutputHandler(logger))
}
def collectdeps(libs: Seq[AndroidLibrary]): Seq[AndroidLibrary] = {
libs
.map(_.getDependencies.asScala)
.flatMap(collectdeps)
.++(libs)
.distinctLibs
}
}
| aafa/android-sdk-plugin | src/resources.scala | Scala | bsd-3-clause | 10,076 |
package com.softwaremill.codebrag.service.updater
import akka.actor._
import com.typesafe.scalalogging.slf4j.Logging
import com.softwaremill.codebrag.service.commits.CommitImportService
import RepoUpdateActor._
import com.softwaremill.codebrag.repository.Repository
class RepoUpdateActor(importService: CommitImportService, repository: Repository) extends Actor with Logging {
def receive = {
case Update(scheduleNext) => { // TODO: get rid of this scheduleNext
try {
importService.importRepoCommits(repository)
importService.cleanupStaleBranches(repository)
} finally {
if (scheduleNext) {
import context.dispatcher
logger.debug(s"Scheduling next update of ${repository.repoName} in ${NextUpdatesInterval.toString}")
context.system.scheduler.scheduleOnce(NextUpdatesInterval, self, Update(scheduleNext = true))
}
}
}
}
}
object RepoUpdateActor {
import scala.concurrent.duration._
val InitialDelay = 3.seconds
val NextUpdatesInterval = 45.seconds
case class Update(scheduleNext: Boolean)
} | softwaremill/codebrag | codebrag-service/src/main/scala/com/softwaremill/codebrag/service/updater/RepoUpdateActor.scala | Scala | agpl-3.0 | 1,100 |
/*
* Copyright (C) 2007-2008 Artima, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Example code from:
*
* Programming in Scala (First Edition, Version 6)
* by Martin Odersky, Lex Spoon, Bill Venners
*
* http://booksites.artima.com/programming_in_scala
*/
object Queues1 {
class SlowAppendQueue[T](elems: List[T]) { // Not efficient
def head = elems.head
def tail = new SlowAppendQueue(elems.tail)
def append(x: T) = new SlowAppendQueue(elems ::: List(x))
}
def main(args: Array[String]) {
val q = new SlowAppendQueue(Nil) append 1 append 2
println(q)
}
}
| peachyy/scalastu | type-parameterization/Queues1.scala | Scala | apache-2.0 | 1,139 |
package scala
/** Marker trait for polymorphic function types.
*
* This is the only trait that can be refined with a polymorphic method,
* as long as that method is called `apply`, e.g.:
* PolyFunction { def apply[T_1, ..., T_M](x_1: P_1, ..., x_N: P_N): R }
* This type will be erased to FunctionN.
*/
trait PolyFunction
| som-snytt/dotty | library/src/scala/PolyFunction.scala | Scala | apache-2.0 | 337 |
/*
* Copyright 2014-2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.core.db
import com.netflix.iep.config.ConfigManager
object Limits {
private val config = ConfigManager.dynamicConfig().getConfig("atlas.core.db")
def maxLines: Int = config.getInt("max-lines")
def maxDatapoints: Int = config.getInt("max-datapoints")
}
| brharrington/atlas | atlas-core/src/main/scala/com/netflix/atlas/core/db/Limits.scala | Scala | apache-2.0 | 891 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import uk.gov.hmrc.ct.box.{Calculated, CtBoxIdentifier, CtInteger}
import uk.gov.hmrc.ct.computations.calculations.ProfitAndLossCalculator
import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever
case class CP44(value: Int) extends CtBoxIdentifier(name = "Profit or losses before tax") with CtInteger
object CP44 extends Calculated[CP44, ComputationsBoxRetriever] with ProfitAndLossCalculator {
override def calculate(fieldValueRetriever: ComputationsBoxRetriever): CP44 = {
calculateGrossProfitOrLossBeforeTax(cp14 = fieldValueRetriever.retrieveCP14(),
cp40 = fieldValueRetriever.retrieveCP40(),
cp43 =fieldValueRetriever.retrieveCP43(),
cp501 =fieldValueRetriever.retrieveCP501(),
cp502 = fieldValueRetriever.retrieveCP502())
}
}
| scottcutts/ct-calculations | src/main/scala/uk/gov/hmrc/ct/computations/CP44.scala | Scala | apache-2.0 | 1,558 |
package com.landoop.streamreactor.connect.hive
import com.landoop.streamreactor.connect.hive.kerberos.UgiExecute
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.kafka.connect.data.Schema
import org.apache.orc.OrcFile.EncodingStrategy
import org.apache.orc._
package object orc {
def createOrcWriter(path: Path, schema: TypeDescription, config: OrcSinkConfig)
(implicit fs: FileSystem): Writer = {
val options = OrcFile.writerOptions(null, fs.getConf).setSchema(schema)
options.compress(config.compressionKind)
options.encodingStrategy(config.encodingStrategy)
options.blockPadding(config.blockPadding)
options.version(OrcFile.Version.V_0_12)
config.bloomFilterColumns.map(_.mkString(",")).foreach(options.bloomFilterColumns)
config.rowIndexStride.foreach(options.rowIndexStride)
config.blockSize.foreach(options.blockSize)
config.stripeSize.foreach(options.stripeSize)
if (config.overwrite && fs.exists(path))
fs.delete(path, false)
OrcFile.createWriter(path, options)
}
def source(path: Path, config: OrcSourceConfig, ugi:UgiExecute)
(implicit fs: FileSystem) = new OrcSource(path, config, ugi)
def sink(path: Path, schema: Schema, config: OrcSinkConfig)
(implicit fs: FileSystem) = new OrcSink(path, schema, config)
}
case class OrcSourceConfig()
case class OrcSinkConfig(overwrite: Boolean = false,
batchSize: Int = 1024, // orc default is 1024
encodingStrategy: EncodingStrategy = EncodingStrategy.COMPRESSION,
compressionKind: CompressionKind = CompressionKind.SNAPPY,
blockPadding: Boolean = true,
blockSize: Option[Long] = None,
stripeSize: Option[Long] = None,
bloomFilterColumns: Seq[String] = Nil,
rowIndexStride: Option[Int] = None)
| datamountaineer/stream-reactor | kafka-connect-hive/src/main/scala/com/landoop/streamreactor/connect/hive/orc/package.scala | Scala | apache-2.0 | 1,975 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.std
import slamdata.Predef._
import quasar._, SemanticError._
import quasar.fp._
import quasar.fp.ski._
import quasar.frontend.logicalplan.{LogicalPlan => LP, _}
import scala.util.matching.Regex
import matryoshka._
import matryoshka.implicits._
import scalaz._, Scalaz._, Validation.{success, failureNel}
import shapeless.{Data => _, :: => _, _}
trait StringLib extends Library {
private def stringApply(f: (String, String) => String): Func.Typer[nat._2] =
partialTyper[nat._2] {
case Sized(Type.Const(Data.Str(a)), Type.Const(Data.Str(b))) => Type.Const(Data.Str(f(a, b)))
case Sized(Type.Str, Type.Const(Data.Str(_))) => Type.Str
case Sized(Type.Const(Data.Str(_)), Type.Str) => Type.Str
case Sized(Type.Str, Type.Str) => Type.Str
}
// TODO: variable arity
val Concat = BinaryFunc(
Mapping,
"Concatenates two (or more) string values",
Type.Str,
Func.Input2(Type.Str, Type.Str),
new Func.Simplifier {
def apply[T]
(orig: LP[T])
(implicit TR: Recursive.Aux[T, LP], TC: Corecursive.Aux[T, LP]) =
orig match {
case InvokeUnapply(_, Sized(Embed(Constant(Data.Str(""))), Embed(second))) =>
second.some
case InvokeUnapply(_, Sized(Embed(first), Embed(Constant(Data.Str(""))))) =>
first.some
case _ => None
}
},
stringApply(_ + _),
basicUntyper)
private def regexForLikePattern(pattern: String, escapeChar: Option[Char]):
String = {
def sansEscape(pat: List[Char]): List[Char] = pat match {
case '_' :: t => '.' +: escape(t)
case '%' :: t => ".*".toList ⊹ escape(t)
case c :: t =>
if ("\\\\^$.|?*+()[{".contains(c))
'\\\\' +: c +: escape(t)
else c +: escape(t)
case Nil => Nil
}
@SuppressWarnings(Array("org.wartremover.warts.Recursion"))
def escape(pat: List[Char]): List[Char] =
escapeChar match {
case None => sansEscape(pat)
case Some(esc) =>
pat match {
// NB: We only handle the escape char when it’s before a special
// char, otherwise you run into weird behavior when the escape
// char _is_ a special char. Will change if someone can find
// an actual definition of SQL’s semantics.
case `esc` :: '%' :: t => '%' +: escape(t)
case `esc` :: '_' :: t => '_' +: escape(t)
case l => sansEscape(l)
}
}
"^" + escape(pattern.toList).mkString + "$"
}
// TODO: This is here (rather than converted to `Search` in `sql.compile`)
// until we can constant-fold as we compile.
val Like = TernaryFunc(
Mapping,
"Determines if a string value matches a pattern.",
Type.Bool,
Func.Input3(Type.Str, Type.Str, Type.Str),
new Func.Simplifier {
def apply[T]
(orig: LP[T])
(implicit TR: Recursive.Aux[T, LP], TC: Corecursive.Aux[T, LP]) =
orig match {
case InvokeUnapply(_, Sized(Embed(str), Embed(Constant(Data.Str(pat))), Embed(Constant(Data.Str(esc))))) =>
if (esc.length > 1)
None
else
Search(str.embed,
constant[T](Data.Str(regexForLikePattern(pat, esc.headOption))).embed,
constant[T](Data.Bool(false)).embed).some
case _ => None
}
},
constTyper(Type.Bool),
basicUntyper)
import java.util.regex.Pattern
def patternInsensitive(pattern: String, insen: Boolean): String =
if (insen) "(?i)" ⊹ pattern else pattern
def matchAnywhere(str: String, pattern: Pattern, insen: Boolean) =
pattern.matcher(str).find()
val Search = TernaryFunc(
Mapping,
"Determines if a string value matches a regular expression. If the third argument is true, then it is a case-insensitive match.",
Type.Bool,
Func.Input3(Type.Str, Type.Str, Type.Bool),
noSimplification,
partialTyperOV[nat._3] {
case Sized(Type.Const(Data.Str(str)), Type.Const(Data.Str(pattern)), Type.Const(Data.Bool(insen))) =>
val compiledPattern = Try(Pattern.compile(patternInsensitive(pattern, insen))).toOption
compiledPattern.map(p => success(Type.Const(Data.Bool(matchAnywhere(str, p, insen)))))
case _ => None
},
basicUntyper)
val Length = UnaryFunc(
Mapping,
"Counts the number of characters in a string.",
Type.Int,
Func.Input1(Type.Str),
noSimplification,
partialTyper[nat._1] {
case Sized(Type.Const(Data.Str(str))) => Type.Const(Data.Int(str.length))
case Sized(Type.Str) => Type.Int
},
basicUntyper)
val Lower = UnaryFunc(
Mapping,
"Converts the string to lower case.",
Type.Str,
Func.Input1(Type.Str),
noSimplification,
partialTyper[nat._1] {
case Sized(Type.Const(Data.Str(str))) =>
Type.Const(Data.Str(str.toLowerCase))
case Sized(Type.Str) => Type.Str
},
basicUntyper)
val Upper = UnaryFunc(
Mapping,
"Converts the string to upper case.",
Type.Str,
Func.Input1(Type.Str),
noSimplification,
partialTyper[nat._1] {
case Sized(Type.Const(Data.Str(str))) =>
Type.Const(Data.Str(str.toUpperCase))
case Sized(Type.Str) => Type.Str
},
basicUntyper)
/** Substring which always gives a result, no matter what offsets are provided.
* Reverse-engineered from MongoDb's \\$substr op, for lack of a better idea
* of how this should work. Note: if `start` < 0, the result is `""`.
* If `length` < 0, then result includes the rest of the string. Otherwise
* the behavior is as you might expect.
*/
def safeSubstring(str: String, start: Int, length: Int): String =
if (start < 0 || start > str.length) ""
else if (length < 0) str.substring(start, str.length)
else str.substring(start, (start + length) min str.length)
val Substring: TernaryFunc = TernaryFunc(
Mapping,
"Extracts a portion of the string",
Type.Str,
Func.Input3(Type.Str, Type.Int, Type.Int),
new Func.Simplifier {
def apply[T]
(orig: LP[T])
(implicit TR: Recursive.Aux[T, LP], TC: Corecursive.Aux[T, LP]) =
orig match {
case InvokeUnapply(f @ TernaryFunc(_, _, _, _, _, _, _), Sized(
Embed(Constant(Data.Str(str))),
Embed(Constant(Data.Int(from))),
for0)) if from != 0 =>
if (from < 0 || from > str.length) Constant[T](Data.Str("")).some
else
Invoke(f, Func.Input3(
Constant[T](Data.Str(str.substring(from.intValue))).embed,
Constant[T](Data.Int(0)).embed,
for0)).some
case _ => None
}
},
partialTyperV[nat._3] {
case Sized(
Type.Const(Data.Str(str)),
Type.Const(Data.Int(from)),
Type.Const(Data.Int(for0))) => {
success(Type.Const(Data.Str(safeSubstring(str, from.intValue, for0.intValue))))
}
case Sized(Type.Const(Data.Str(str)), Type.Const(Data.Int(from)), _)
if str.length <= from =>
success(Type.Const(Data.Str("")))
case Sized(_, Type.Const(Data.Int(from)), _)
if from < 0 =>
success(Type.Const(Data.Str("")))
case Sized(_, _, Type.Const(Data.Int(for0)))
if for0.intValue ≟ 0 =>
success(Type.Const(Data.Str("")))
case Sized(Type.Const(Data.Str(_)), Type.Const(Data.Int(_)), Type.Int) =>
success(Type.Str)
case Sized(Type.Const(Data.Str(_)), Type.Int, Type.Const(Data.Int(_))) =>
success(Type.Str)
case Sized(Type.Const(Data.Str(_)), Type.Int, Type.Int) =>
success(Type.Str)
case Sized(Type.Str, Type.Const(Data.Int(_)), Type.Const(Data.Int(_))) =>
success(Type.Str)
case Sized(Type.Str, Type.Const(Data.Int(_)), Type.Int) =>
success(Type.Str)
case Sized(Type.Str, Type.Int, Type.Const(Data.Int(_))) =>
success(Type.Str)
case Sized(Type.Str, Type.Int, Type.Int) =>
success(Type.Str)
case Sized(Type.Str, _, _) =>
failureNel(GenericError("expected integer arguments for SUBSTRING"))
case Sized(t, _, _) => failureNel(TypeError(Type.Str, t, None))
},
basicUntyper)
val Split = BinaryFunc(
Mapping,
"Splits a string into an array of substrings based on a delimiter.",
Type.FlexArr(0, None, Type.Str),
Func.Input2(Type.Str, Type.Str),
noSimplification,
partialTyperV[nat._2] {
case Sized(Type.Const(Data.Str(str)), Type.Const(Data.Str(delimiter))) =>
success(Type.Const(Data.Arr(str.split(Regex.quote(delimiter), -1).toList.map(Data.Str(_)))))
case Sized(strT, delimiterT) =>
(Type.typecheck(Type.Str, strT).leftMap(nel => nel.map(ι[SemanticError])) |@|
Type.typecheck(Type.Str, delimiterT).leftMap(nel => nel.map(ι[SemanticError])))((_, _) => Type.FlexArr(0, None, Type.Str))
},
basicUntyper)
val Boolean = UnaryFunc(
Mapping,
"Converts the strings “true” and “false” into boolean values. This is a partial function – arguments that don’t satisify the constraint have undefined results.",
Type.Bool,
Func.Input1(Type.Str),
noSimplification,
partialTyperV[nat._1] {
case Sized(Type.Const(Data.Str("true"))) =>
success(Type.Const(Data.Bool(true)))
case Sized(Type.Const(Data.Str("false"))) =>
success(Type.Const(Data.Bool(false)))
case Sized(Type.Const(Data.Str(str))) =>
failureNel(InvalidStringCoercion(str, List("true", "false").right))
case Sized(Type.Str) => success(Type.Bool)
},
untyper[nat._1](x => ToString.tpe(Func.Input1(x)).map(Func.Input1(_))))
val intRegex = "[+-]?\\\\d+"
val floatRegex = intRegex + "(?:.\\\\d+)?(?:[eE]" + intRegex + ")?"
val dateRegex = "(?:\\\\d{4}-\\\\d{2}-\\\\d{2}|\\\\d{8})"
val timeRegex = "\\\\d{2}(?::?\\\\d{2}(?::?\\\\d{2}(?:\\\\.\\\\d{1,9})?)?)?Z?"
val timestampRegex = dateRegex + "T" + timeRegex
val Integer = UnaryFunc(
Mapping,
"Converts strings containing integers into integer values. This is a partial function – arguments that don’t satisify the constraint have undefined results.",
Type.Int,
Func.Input1(Type.Str),
noSimplification,
partialTyperV[nat._1] {
case Sized(Type.Const(Data.Str(str))) =>
\\/.fromTryCatchNonFatal(BigInt(str)).fold(
κ(failureNel(InvalidStringCoercion(str, "a string containing an integer".left))),
i => success(Type.Const(Data.Int(i))))
case Sized(Type.Str) => success(Type.Int)
},
untyper[nat._1](x => ToString.tpe(Func.Input1(x)).map(Func.Input1(_))))
val Decimal = UnaryFunc(
Mapping,
"Converts strings containing decimals into decimal values. This is a partial function – arguments that don’t satisify the constraint have undefined results.",
Type.Dec,
Func.Input1(Type.Str),
noSimplification,
partialTyperV[nat._1] {
case Sized(Type.Const(Data.Str(str))) =>
\\/.fromTryCatchNonFatal(BigDecimal(str)).fold(
κ(failureNel(InvalidStringCoercion(str, "a string containing an decimal number".left))),
i => success(Type.Const(Data.Dec(i))))
case Sized(Type.Str) => success(Type.Dec)
},
untyper[nat._1](x => ToString.tpe(Func.Input1(x)).map(Func.Input1(_))))
val Null = UnaryFunc(
Mapping,
"Converts strings containing “null” into the null value. This is a partial function – arguments that don’t satisify the constraint have undefined results.",
Type.Null,
Func.Input1(Type.Str),
noSimplification,
partialTyperV[nat._1] {
case Sized(Type.Const(Data.Str("null"))) => success(Type.Const(Data.Null))
case Sized(Type.Const(Data.Str(str))) =>
failureNel(InvalidStringCoercion(str, List("null").right))
case Sized(Type.Str) => success(Type.Null)
},
untyper[nat._1](x => ToString.tpe(Func.Input1(x)).map(Func.Input1(_))))
val ToString: UnaryFunc = UnaryFunc(
Mapping,
"Converts any primitive type to a string.",
Type.Str,
Func.Input1(Type.Syntaxed),
noSimplification,
partialTyperV[nat._1] {
case Sized(Type.Const(data)) => (data match {
case Data.Str(str) => success(str)
case Data.Null => success("null")
case Data.Bool(b) => success(b.shows)
case Data.Int(i) => success(i.shows)
case Data.Dec(d) => success(d.shows)
case Data.OffsetDate(t) =>
success(t.toString)
case Data.OffsetDateTime(t) =>
success(t.toString)
case Data.OffsetTime(t) =>
success(t.toString)
case Data.LocalDate(t) =>
success(t.toString)
case Data.LocalDateTime(t) =>
success(t.toString)
case Data.LocalTime(t) =>
success(t.toString)
case Data.Interval(i) => success(i.toString)
case Data.Id(i) => success(i.toString)
// NB: Should not be able to hit this case, because of the domain.
case other =>
failureNel(
TypeError(
Type.Syntaxed,
other.dataType,
"can not convert aggregate types to String".some):SemanticError)
}).map(s => Type.Const(Data.Str(s)))
case Sized(_) => success(Type.Str)
},
partialUntyperV[nat._1] {
case x @ Type.Const(_) =>
(Null.tpe(Func.Input1(x)) <+>
Boolean.tpe(Func.Input1(x)) <+>
Integer.tpe(Func.Input1(x)) <+>
Decimal.tpe(Func.Input1(x)) <+>
DateLib.OffsetDateTime.tpe(Func.Input1(x)) <+>
DateLib.OffsetTime.tpe(Func.Input1(x)) <+>
DateLib.OffsetDate.tpe(Func.Input1(x)) <+>
DateLib.LocalDateTime.tpe(Func.Input1(x)) <+>
DateLib.LocalTime.tpe(Func.Input1(x)) <+>
DateLib.LocalDate.tpe(Func.Input1(x)) <+>
DateLib.Interval.tpe(Func.Input1(x)) <+>
IdentityLib.ToId.tpe(Func.Input1(x))
).map(Func.Input1(_))
})
}
object StringLib extends StringLib
| jedesah/Quasar | frontend/src/main/scala/quasar/std/string.scala | Scala | apache-2.0 | 14,905 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2013 Cristian Vrabie
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package com.postmark
import spray.json._
import scala.collection.mutable.ListBuffer
import spray.http.{MediaTypes, MediaType}
import com.postmark.Message.Attachment
import java.io._
import org.parboiled.common.Base64
import scala.Some
import java.util.Calendar
import java.text.SimpleDateFormat
/**
* User: cvrabie
* Date: 19/08/2013
*/
case class Message(
From: String,
To: Option[String],
Cc: Option[String],
Bcc: Option[String],
Subject: Option[String],
Tag: Option[String],
HtmlBody: Option[String],
TextBody: Option[String],
ReplyTo: Option[String],
Headers: Seq[(String,String)],
Attachments: Option[Seq[Attachment]]
){
if(To.isEmpty && Cc.isEmpty && Bcc.isEmpty)
throw new InvalidMessage("You need at least one recipient!")
if(TextBody.isEmpty && HtmlBody.isEmpty)
throw new InvalidMessage("Provide either email TextBody or HtmlBody or both.")
@transient
lazy val recipients = Array(To,Cc,Bcc).flatten.mkString(",")
}
object Message extends DefaultJsonProtocol{
case class Attachment(
Name: String,
ContentType: MediaType,
Content: String
)
implicit val contentTypeFormat = new JsonFormat[MediaType]{
def write(obj: MediaType) = JsString(obj.value)
def read(json: JsValue) = json match {
case JsString(str) => Attachment.defaultTypeDetector(str)
case js => throw new DeserializationException("Expecting MediaType as string but got "+js)
}
}
implicit val headersJsonFormat = new JsonFormat[Seq[(String,String)]] {
def write(obj: Seq[(String, String)]) = JsArray(obj.map{
case (name,value) => new JsObject(Map[String,JsValue](
"Name" -> JsString(name),
"Value" -> JsString(value)
))
}:_*)
def read(json: JsValue) = json match {
case JsArray(elems) => elems.map{
case JsObject(fields) => (fields.get("Name"),fields.get("Value")) match {
case (Some(JsString(name)),Some(JsString(value))) => (name,value)
case (None,_) => throw new DeserializationException("Expecting header object but the 'Name' field is missing!")
case (_,None) => throw new DeserializationException("Expecting header object but the 'Value' fields is missing!")
case other => throw new DeserializationException("""Expecting {"Name":STRING,"Value":String} but got """+other)
}
case other => throw new DeserializationException("""Expecting {"Name":STRING,"Value":String} but got """+other)
}
case other => throw new DeserializationException("Expecting an array of header objects but got "+other)
}
}
implicit val attachmentJsonFormat = this.jsonFormat3(Attachment.apply _)
val defaultMessageJsonFormat = this.jsonFormat11(Message.apply _)
implicit val messageJsonFormat = new RootJsonFormat[Message] {
def write(obj: Message) = {
val delegate = obj.toJson(defaultMessageJsonFormat)
val fields = delegate.asJsObject.fields
fields.get("Headers") match {
//if the headers is an empty array don't serialize it
case Some(JsArray(headers)) if headers.isEmpty => JsObject(fields - "Headers")
case _ => delegate
}
}
def read(json: JsValue) = json.convertTo[Message](defaultMessageJsonFormat)
}
implicit val receiptJsonFormat = this.jsonFormat5(Receipt.apply _)
implicit val rejectionJsonFormat = this.jsonFormat2(Rejection.apply _)
implicit val receiptAndRejectionArrayReader = new RootJsonFormat[Array[Either[Message.Rejection,Message.Receipt]]] {
def read(json: JsValue) = json match {
case JsArray(arr) => arr.toArray.map( _ match {
case elem@JsObject(obj) if obj.get("MessageID").isDefined => Right(elem.convertTo[Message.Receipt])
case elem@JsObject(obj) => Left(elem.convertTo[Message.Rejection])
case elem => throw new DeserializationException("Expecting either Message.Rejection or Message.Receipt but got"+elem)
})
case other => throw new DeserializationException("Expecting Array of Message.Rejection/Message.Receipt but got"+other)
}
def write(obj: Array[Either[Rejection, Receipt]]) = JsNull
}
object Attachment{
protected val extensionRegex = "\\\\.([a-zA-Z0-9-_]*)$".r
protected def extension(file:File) = extensionRegex findFirstMatchIn file.getName map(_.group(1))
def defaultTypeDetector:String=>MediaType = (ext:String) =>
MediaTypes.forExtension(ext.toLowerCase)
.getOrElse(MediaTypes.`application/octet-stream`)
protected val Base64Encoder = Base64.rfc2045()
protected val BUFFER_SIZE = 1024*10
//TODO this reads the entire file in memory. we should stream files with a custom Marshaller if possible!
protected def readFile(file:File):Array[Byte] = (file.exists(), file.length()) match {
case (false, _) => throw new IOException("File %s does not exist!".format(file.getAbsolutePath))
case (true, length) =>
val stream = new BufferedInputStream(new FileInputStream(file), BUFFER_SIZE)
val buffer = new Array[Byte](BUFFER_SIZE)
val out = new ByteArrayOutputStream(file.length().toInt)
try readFile(stream,out, buffer) finally {
stream.close()
out.close()
}
}
protected def readFile(stream:BufferedInputStream, out:ByteArrayOutputStream, buffer:Array[Byte]):Array[Byte] = {
val read = stream.read(buffer, 0, BUFFER_SIZE)
if(read <= 0) {
out.flush()
out.toByteArray
} else {
out.write(buffer,0,read)
readFile(stream, out, buffer)
}
}
def fromFile(file:File)(implicit typeDetector:String=>MediaType = defaultTypeDetector) =
fromBytes(file.getName, typeDetector(extension(file).getOrElse("")),readFile(file))
def fromBytes(name:String, t:MediaType, bytes:Array[Byte]) =
Attachment(name, t, Base64Encoder.encodeToString(bytes,false))
}
class Builder{
private var From:Option[String] = None
private val To:ListBuffer[String] = ListBuffer.empty
private val Cc:ListBuffer[String] = ListBuffer.empty
private val Bcc:ListBuffer[String] = ListBuffer.empty
private var Subject:Option[String] = None
private val Tag:ListBuffer[String] = ListBuffer.empty
private var HtmlBody:Option[String] = None
private var TextBody:Option[String] = None
private var ReplyTo:Option[String] = None
private val Headers:ListBuffer[(String,String)] = ListBuffer.empty
private val Attachments:ListBuffer[Attachment] = ListBuffer.empty
def from(from:String):Builder = { From = Some(from); this }
def to(to:String):Builder = { To += to; this }
def cc(cc:String):Builder = { Cc += cc; this }
def bcc(bcc:String):Builder = { Bcc += bcc; this }
def subject(subject:String):Builder = { Subject = Some(subject); this }
def tag(tag:String):Builder = { Tag += tag; this }
def htmlBody(htmlBody:String):Builder = { HtmlBody = Some(htmlBody); this }
def textBody(textBody:String):Builder = { TextBody= Some(textBody); this }
def replyTo(replyTo:String):Builder = { ReplyTo = Some(replyTo); this }
def header(header:(String,String)):Builder = { Headers += header; this }
def attachment(attachment:Attachment):Builder = { Attachments += attachment; this }
def attachment(file:File):Builder = { Attachments += Attachment.fromFile(file); this }
def attachment(path:String):Builder = { Attachments += Attachment.fromFile(new File(path)); this }
def attachment(name:String, t:MediaType, bytes:Array[Byte]):Builder = { Attachments +=Attachment.fromBytes(name, t, bytes); this }
def to(to:String*):Builder = { To ++= to; this }
def cc(cc:String*):Builder = { Cc ++= cc; this }
def bcc(bcc:String*):Builder = { Bcc ++= bcc; this }
def tags(tags:String*):Builder = { Tag ++= tags; this }
def headers(headers:(String,String)*):Builder = { Headers ++= headers; this }
def attachments(attachments:Attachment*):Builder = { Attachments ++= attachments; this }
def clearFrom = { From = None; this }
def clearTo = { To.clear(); this }
def clearCc = { Cc.clear(); this }
def clearBcc = { Bcc.clear(); this }
def clearSubject = { Subject = None; this }
def clearTag = { Tag.clear(); this }
def clearHtmlBody = { HtmlBody = None; this }
def clearTextBody = { TextBody = None; this }
def clearReplyTo = { ReplyTo = None; this }
def clearHeaders = { Headers.clear(); this }
def clearAttachments = { Attachments.clear(); this }
private def buildFrom =
if(From.isEmpty) throw new InvalidMessage("Message needs From field!")
else From.mkString(",")
private def buildTo =
if(To.size + From.size + Bcc.size > Message.Builder.MAX_RECIPIENTS)
throw new InvalidMessage("Postmark accepts maximum %d recipients!".format(Message.Builder.MAX_RECIPIENTS))
else if(To.isEmpty) None else Some(To.mkString(","))
def build = new Message(
buildFrom, buildTo,
if(Cc.isEmpty) None else Some(Cc.mkString(",")),
if(Bcc.isEmpty) None else Some(Bcc.mkString(",")),
Subject,
if(Tag.isEmpty) None else Some(Tag.mkString(",")),
HtmlBody, TextBody, ReplyTo, Headers.toList,
if(Attachments.isEmpty) None else Some(Attachments.toList)
)
}
object Builder{
val MAX_RECIPIENTS = 20
def apply() = new Builder()
implicit def builderToMessage(builder:Builder) = builder.build
}
case class Receipt(
val To:String,
val SubmittedAt:String,
val MessageID:String,
val ErrorCode:Int,
val Message:String
)
case class Rejection(
val ErrorCode: Int,
val Message: String
)
case class Result(
val message: Message,
val response: Either[Throwable,Receipt]
)
case class Batch(val msgs:Seq[Message])
case class BatchResult(
val messages: Seq[Message],
val response: Either[Throwable,Array[Either[Rejection,Receipt]]]
)
} | cvrabie/postmark-spray | src/main/scala/com/postmark/Message.scala | Scala | mit | 11,053 |
package caustic.compiler.gen
import caustic.compiler.reflect._
import caustic.grammar._
import scala.collection.JavaConverters._
/**
* Generates a Scala program from a program.
*
* @param universe Type universe.
*/
case class Gen(universe: Universe) extends CausticBaseVisitor[String] {
override def visitProgram(ctx: CausticParser.ProgramContext): String = {
s"""${ if (ctx.module() != null) s"package ${ ctx.module().Identifier().asScala.mkString(".") }" else "" }
|
|import caustic.library._
|import caustic.library.control._
|import caustic.library.typing._
|import caustic.library.typing.collection._
|import caustic.library.typing.record._
|import caustic.library.typing.Value._
|import caustic.runtime._
|
|import spray.json._
|import DefaultJsonProtocol._
|
|import scala.language.implicitConversions
|import scala.language.reflectiveCalls
|import scala.util.Try
|
|${ ctx.include().asScala.map(visitInclude) mkString "\\n" }
|
|${ ctx.declaration().asScala.map(visitDeclaration) mkString "\\n" }
""".stripMargin
}
override def visitInclude(ctx: CausticParser.IncludeContext): String =
s"${ ctx.getText }._"
override def visitStruct(ctx: CausticParser.StructContext): String =
s"""${ GenInternal(this.universe).visitStruct(ctx) }
|${ GenExternal(this.universe).visitStruct(ctx) }
""".stripMargin
override def visitService(ctx: CausticParser.ServiceContext): String =
s"""${ GenInternal(this.universe).visitService(ctx) }
|${ GenExternal(this.universe).visitService(ctx) }
""".stripMargin
}
| ashwin153/caustic | caustic-compiler/src/main/scala/caustic/compiler/gen/Gen.scala | Scala | apache-2.0 | 1,697 |
package com.sksamuel.elastic4s.mappings
import com.sksamuel.elastic4s.mappings.dynamictemplate.{DynamicMapping, DynamicTemplateBodyFn}
import org.elasticsearch.common.xcontent.{XContentBuilder, XContentFactory, XContentType}
import scala.collection.JavaConverters._
object MappingContentBuilder {
def build(d: MappingDefinitionLike): XContentBuilder = {
val builder = XContentFactory.jsonBuilder().startObject()
build(d, builder)
builder.endObject()
}
// returns the mapping json wrapped in the mapping type name, eg "mytype" : { mapping }
def buildWithName(d: MappingDefinitionLike, tpe: String): XContentBuilder = {
val builder = XContentFactory.jsonBuilder().startObject()
builder.startObject(tpe)
build(d, builder)
builder.endObject()
builder.endObject()
}
def build(d: MappingDefinitionLike, builder: XContentBuilder): Unit = {
for (all <- d.all) builder.startObject("_all").field("enabled", all).endObject()
(d.source, d.sourceExcludes) match {
case (_, l) if l.nonEmpty => builder.startObject("_source").field("excludes", l.asJava).endObject()
case (Some(source), _) => builder.startObject("_source").field("enabled", source).endObject()
case _ =>
}
if (d.dynamicDateFormats.nonEmpty)
builder.field("dynamic_date_formats", d.dynamicDateFormats.asJava)
for (dd <- d.dateDetection) builder.field("date_detection", dd)
for (nd <- d.numericDetection) builder.field("numeric_detection", nd)
d.dynamic.foreach(dynamic => {
builder.field("dynamic", dynamic match {
case DynamicMapping.Strict => "strict"
case DynamicMapping.False => "false"
case _ => "dynamic"
})
})
d.boostName.foreach(x => builder.startObject("_boost").field("name", x).field("null_value", d.boostNullValue.getOrElse(0D)).endObject())
d.analyzer.foreach(x => builder.startObject("_analyzer").field("path", x).endObject())
d.parent.foreach(x => builder.startObject("_parent").field("type", x).endObject())
d.size.foreach(x => builder.startObject("_size").field("enabled", x).endObject())
d.timestamp.foreach(_.build(builder))
if (d.fields.nonEmpty) {
builder.startObject("properties")
for (field <- d.fields) {
builder.rawField(field.name, FieldBuilderFn(field).bytes, XContentType.JSON)
}
builder.endObject() // end properties
}
if (d.meta.nonEmpty) {
builder.startObject("_meta")
for (meta <- d.meta) {
builder.field(meta._1, meta._2)
}
builder.endObject()
}
d.routing.foreach(routing => {
builder.startObject("_routing").field("required", routing.required)
routing.path.foreach(path => builder.field("path", path))
builder.endObject()
})
if (d.templates.nonEmpty) {
builder.startArray("dynamic_templates")
d.templates.foreach(DynamicTemplateBodyFn.build(_, builder))
builder.endArray()
}
}
}
| aroundus-inc/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/mappings/MappingContentBuilder.scala | Scala | apache-2.0 | 2,969 |
/**
* Copyright (C) 2010-2012 LShift Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.lshift.diffa.client
import net.lshift.diffa.adapter.scanning.{ScanResultEntry, ScanConstraint}
import net.lshift.diffa.kernel.config._
import net.lshift.diffa.kernel.participants.{CategoryFunction, ScanningParticipantRef}
import org.slf4j.LoggerFactory
import java.net.{SocketTimeoutException, SocketException, ConnectException, URI}
import net.lshift.diffa.kernel.differencing.ScanFailedException
import net.lshift.diffa.kernel.util.AlertCodes._
import net.lshift.diffa.kernel.config.PairRef
import net.lshift.diffa.kernel.config.QueryParameterCredentials
import net.lshift.diffa.kernel.config.BasicAuthCredentials
import scala.Some
import org.apache.http.NoHttpResponseException
class ScanParticipantRestClient(pair: PairRef,
scanUrl: String,
credentialsLookup: DomainCredentialsLookup,
httpClient: DiffaHttpClient,
parser: JsonScanResultParser)
extends ScanningParticipantRef {
private val log = LoggerFactory.getLogger(getClass)
def scan(constraints: Seq[ScanConstraint], aggregations: Seq[CategoryFunction]) : Seq[ScanResultEntry] = {
val query = DiffaHttpQuery(scanUrl).accepting("application/json").
withConstraints(constraints).
withAggregations(aggregations)
val credentials = credentialsLookup.credentialsForUri(pair.space, new URI(scanUrl))
val queryWithCredentials = credentials match {
case None => query
case Some(BasicAuthCredentials(user, password)) => query.withBasicAuth(user, password)
case Some(QueryParameterCredentials(name, value)) => query.withQuery(Map(name -> Seq(value)))
}
try {
this.httpClient.get(queryWithCredentials, parser)
} catch {
case ex => handleHttpError(ex, queryWithCredentials)
}
}
def handleHttpError(ex: Throwable, query: DiffaHttpQuery) = ex match {
case ex: ConnectException =>
log.error("%s Connection to %s refused".format(SCAN_CONNECTION_REFUSED, scanUrl))
// NOTICE: ScanFailedException is handled specially (see its class documentation).
throw new ScanFailedException("Could not connect to " + scanUrl)
case ex: SocketException =>
log.error("%s Socket closed to %s".format(SCAN_CONNECTION_CLOSED, scanUrl))
// NOTICE: ScanFailedException is handled specially (see its class documentation).
throw new ScanFailedException("Connection to %s closed unexpectedly, query %s".format(
scanUrl, query.query))
case ex: SocketTimeoutException =>
log.error("%s Socket time out for %s".format(SCAN_SOCKET_TIMEOUT, scanUrl))
// NOTICE: ScanFailedException is handled specially (see its class documentation).
throw new ScanFailedException("Socket to %s timed out unexpectedly, query %s".format(
scanUrl, query.query))
case ex => throw ex
}
}
| lshift/diffa | client-support/src/main/scala/net/lshift/diffa/client/ScanParticipantRestClient.scala | Scala | apache-2.0 | 3,498 |
package mesosphere.raml
import java.nio.charset.StandardCharsets
import java.security.MessageDigest
import java.util.Base64
import sbt._
import sbt.Keys._
import org.raml.v2.api.RamlModelBuilder
import org.yaml.snakeyaml.Yaml
import scala.collection.JavaConverters._
import scala.util.Try
import scala.annotation.tailrec
object RamlGeneratorPlugin extends AutoPlugin {
object autoImport {
lazy val ramlFiles = settingKey[Seq[File]]("List of RAML 1.0 top level definitions togenerate from")
lazy val ramlPackage = settingKey[String]("Package to place all generated classes in")
lazy val ramlGenerate = taskKey[Seq[File]]("Generate the RAML files")
}
import autoImport._
override lazy val projectSettings = inConfig(Compile)(
Seq(
ramlFiles := Seq(
baseDirectory.value / "docs" / "docs" / "rest-api" / "public" / "api" / "api.raml"
),
ramlPackage := "mesosphere.marathon.raml",
ramlGenerate := {
generate(ramlFiles.value, ramlPackage.value, sourceManaged.value, streams.value.log, streams.value.cacheDirectory)
}
)
)
private def storeTypeHashes(file: File, hashes: Map[String, String]): Unit = {
IO.write(
file,
hashes.map {
case (k, v) =>
s"$k $v"
}.mkString("\n")
)
}
private def readTypeHashes(file: File): Map[String, String] = {
Try(
IO.readLines(file)
.view
.map { line =>
line.split(" ")
}
.withFilter(_.length == 2)
.map { a =>
a(0) -> a(1)
}
.toMap
).getOrElse(Map.empty[String, String])
}
private val IncludeDirective = ".*!include ([^ ]+).*".r
/** Given a list of input files, crawls them by looking for include directives. Only crawls files once.
* Poor mans because it uses regex to parse the !include directives (snakeyaml dies on these)
* Uses regex because snakeyaml dies on !include and RamlModelBuilder does not expose the necessary metadata
* And then we parse the yaml and look for a `uses` key because this is the second way to include files with raml
*/
@tailrec def poorMansIncludeCrawler(log: Logger, files: List[File], discovered: Set[File]): Set[File] =
files match {
case Nil =>
discovered
case head :: rest =>
val folder = head.getParentFile
val contents = IO.read(head)
val includes = contents.split("\n").iterator.collect { case IncludeDirective(fileName) => fileName }.toSeq
val yaml = (new Yaml()).loadAs(contents.replaceAll("!include", "include"), classOf[java.util.Map[_, _]])
val uses = Option(yaml.get("uses")).collect {
case m: java.util.Map[_, _] =>
m.values().asScala.iterator.collect { case fileName: String => fileName }.toSeq
}.getOrElse(Nil)
val references = (uses ++ includes).map { fileName =>
new File(folder, fileName).getCanonicalFile
}.filterNot(discovered).toSet
val newThingsToCrawl = references.filter { _.getName.split('.').lastOption.exists(_ == "raml") }.filter { file =>
(file.isFile) || {
log.warn(s"File ${file} does not exist (referenced by ${head})")
false
}
}
poorMansIncludeCrawler(log, rest ++ newThingsToCrawl, discovered ++ references)
}
/**
* Generates RAML scala sources for the provided input raml files.
*
* First, we crawl all of the input files for directives. Then, we use this crawled result as an input to the
* cache function and only re-run if any of these files change.
*
* @param ramlFiles the input files from which to start; referenced include files are also processed
* @param pkg the name of the package the generated Scala sources should use
* @param log sbt logger
* @param cacheDir the CacheDir for this build step
*/
def generate(ramlFiles: Seq[File], pkg: String, outputDir: File, log: Logger, cacheDir: File): Seq[File] = {
val start = System.currentTimeMillis()
log.debug("Discovering RAML input files")
val allInputFiles = poorMansIncludeCrawler(log, ramlFiles.toList, Set.empty)
val cachedCompile = FileFunction.cached(cacheDir, inStyle = FilesInfo.lastModified, outStyle = FilesInfo.exists) { (_: Set[File]) =>
log.info("Constructing RAML model")
val models = ramlFiles.map { file =>
val model = new RamlModelBuilder().buildApi(file)
if (model.hasErrors) {
model.getValidationResults.asScala.foreach { error =>
sys.error(error.toString)
}
}
model
}
log.info("Generating RAML scala sources")
val types = RamlTypeGenerator(models.toVector, pkg)
val typesAsStr = types.mapValues(treehugger.forest.treeToString(_))
val digest = MessageDigest.getInstance("SHA-256")
val typeHashes =
typesAsStr.mapValues(content => Base64.getEncoder.encodeToString(digest.digest(content.getBytes(StandardCharsets.UTF_8))))
val hashes = readTypeHashes(cacheDir / "type-cache")
storeTypeHashes(cacheDir / "type-cache", typeHashes)
val results: Set[File] = typesAsStr.map {
case (typeName, content) =>
val file = outputDir / pkg.replaceAll("\\.", "/") / s"$typeName.scala"
// don't write the file if it hasn't changed
if (hashes.get(typeName).fold(true)(_ != typeHashes(typeName) || !file.exists())) {
IO.write(file, content)
}
file
}(collection.breakOut)
log.info(s"Done generating ${results.size} RAML Scala sources; took ${(System.currentTimeMillis() - start) / 1000} seconds total")
results
}
cachedCompile(allInputFiles).toSeq
}
}
| mesosphere/marathon | type-generator/src/main/scala/mesosphere/raml/RamlGeneratorPlugin.scala | Scala | apache-2.0 | 5,745 |
/*
* Copyright 2015 The SIRIS Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* The SIRIS Project is a cooperation between Beuth University, Berlin and the
* HCI Group at the University of Würzburg. The project is funded by the German
* Federal Ministry of Education and Research (grant no. 17N4409).
*/
package simx.components.ai.mipro.supervisedlearning.examples
import java.io.File
/**
* Created by
* martin
* in September 2015.
*/
abstract class NeuralNetwork(storageFolder: File) {
/**
* Add one new training data item to X and Y
*
* Suggestion for combining parameters:
* val X = x ::: xs.flatten.toList
* val Y = y ::: ys.flatten.toList
*/
def appendTrainingData(x: List[Double], xs: List[Double]*)(y: List[Double], ys: List[Double]*): Unit
/**
* Uses previously appended X and Y to calculate Thetas
*/
def trainNetwork(): Unit
/**
* Predict Y based on X using trained Thetas
*
* Suggestion for combining parameters:
* val X = x ::: xs.flatten.toList
*/
def predict(x: List[Double], xs: List[Double]*): Double
}
| simulator-x/feature | src/simx/components/ai/mipro/supervisedlearning/examples/NeuralNetwork.scala | Scala | apache-2.0 | 1,633 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.rules.logical
import org.apache.flink.table.api.{TableException, ValidationException}
import org.apache.flink.table.expressions.FieldReferenceExpression
import org.apache.flink.table.planner.calcite.FlinkTypeFactory
import org.apache.flink.table.planner.calcite.FlinkTypeFactory.toLogicalType
import org.apache.flink.table.runtime.types.LogicalTypeDataTypeConverter.fromLogicalTypeToDataType
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rel.logical.{LogicalAggregate, LogicalProject}
import org.apache.calcite.rex._
import org.apache.calcite.sql.`type`.SqlTypeName
import _root_.java.math.{BigDecimal => JBigDecimal}
/**
* Planner rule that transforms simple [[LogicalAggregate]] on a [[LogicalProject]]
* with windowing expression to
* [[org.apache.flink.table.planner.plan.nodes.calcite.LogicalWindowAggregate]]
* for batch.
*/
class BatchLogicalWindowAggregateRule
extends LogicalWindowAggregateRuleBase("BatchLogicalWindowAggregateRule") {
/** Returns the operand of the group window function. */
override private[table] def getInAggregateGroupExpression(
rexBuilder: RexBuilder,
windowExpression: RexCall): RexNode = windowExpression.getOperands.get(0)
/** Returns a zero literal of the correct type. */
override private[table] def getOutAggregateGroupExpression(
rexBuilder: RexBuilder,
windowExpression: RexCall): RexNode = {
// Create a literal with normal SqlTypeName.TIMESTAMP
// in case we reference a rowtime field.
rexBuilder.makeLiteral(
0L,
rexBuilder.getTypeFactory.createSqlType(
SqlTypeName.TIMESTAMP, windowExpression.getType.getPrecision),
true)
}
private[table] override def getTimeFieldReference(
operand: RexNode,
timeAttributeIndex: Int,
rowType: RelDataType): FieldReferenceExpression = {
if (FlinkTypeFactory.isProctimeIndicatorType(operand.getType)) {
throw new ValidationException("Window can not be defined over "
+ "a proctime attribute column for batch mode")
}
val fieldName = rowType.getFieldList.get(timeAttributeIndex).getName
val fieldType = rowType.getFieldList.get(timeAttributeIndex).getType
new FieldReferenceExpression(
fieldName,
fromLogicalTypeToDataType(toLogicalType(fieldType)),
0,
timeAttributeIndex)
}
def getOperandAsLong(call: RexCall, idx: Int): Long =
call.getOperands.get(idx) match {
case v: RexLiteral => v.getValue.asInstanceOf[JBigDecimal].longValue()
case _ => throw new TableException("Only constant window descriptors are supported")
}
}
object BatchLogicalWindowAggregateRule {
val INSTANCE = new BatchLogicalWindowAggregateRule
}
| tillrohrmann/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/plan/rules/logical/BatchLogicalWindowAggregateRule.scala | Scala | apache-2.0 | 3,569 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.frontend.v2_3.ast.functions
import org.neo4j.cypher.internal.frontend.v2_3.ast.{AggregatingFunction, SimpleTypedFunction}
import org.neo4j.cypher.internal.frontend.v2_3.symbols._
case object Count extends AggregatingFunction with SimpleTypedFunction {
def name = "count"
val signatures = Vector(
Signature(argumentTypes = Vector(CTAny), outputType = CTInteger)
)
}
| HuangLS/neo4j | community/cypher/frontend-2.3/src/main/scala/org/neo4j/cypher/internal/frontend/v2_3/ast/functions/Count.scala | Scala | apache-2.0 | 1,208 |
package com.bee4bit.cd.websocket
import javax.websocket.server.ServerEndpoint
import javax.websocket.OnClose
import javax.websocket.OnError
import javax.websocket.OnMessage
import javax.websocket.OnOpen
import javax.websocket.Session
import javax.websocket.MessageHandler.Whole
@ServerEndpoint("/socket.io")
class WebSocketServer {
@OnOpen
def open(session: Session) {
session.addMessageHandler(FooImpl)
println(session);
}
@OnClose
def close(session: Session) {
}
@OnError
def onError(error: Throwable) {
println(error);
}
@OnMessage
def handleMessage(message: String, session: Session) {
println(session.getId())
println(message)
}
}
object FooImpl extends javax.websocket.MessageHandler.Whole[String] {
override def onMessage(message: String): Unit = {
println(message);
}
} | bert4b/calcdist | cb-server/src/main/java/com/bee4bit/cd/websocket/WebSocketServer.scala | Scala | mit | 835 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.index.utils
import java.util.concurrent.ConcurrentHashMap
import org.opengis.feature.simple.SimpleFeatureType
object SplitArrays {
val EmptySplits = IndexedSeq(Array.empty[Byte])
private val splitArraysMap: ConcurrentHashMap[Int, IndexedSeq[Array[Byte]]] =
new ConcurrentHashMap[Int, IndexedSeq[Array[Byte]]]()
def apply(sft: SimpleFeatureType): IndexedSeq[Array[Byte]] = {
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
apply(sft.getZShards)
}
def apply(numSplits: Int): IndexedSeq[Array[Byte]] = {
val temp = splitArraysMap.get(numSplits)
if (temp == null) {
val splitArrays = (0 until numSplits).map(_.toByte).toArray.map(Array(_)).toIndexedSeq
splitArraysMap.put(numSplits, splitArrays)
splitArrays
} else {
temp
}
}
}
| spandanagrawal/geomesa | geomesa-index-api/src/main/scala/org/locationtech/geomesa/index/utils/SplitArrays.scala | Scala | apache-2.0 | 1,333 |
inline def meth =
val x1 = inline ("a": Any) match
case _: String => "ok"
val x2 = inline { "a": Any } match
case _: String => "ok"
inline s match
case _: String => "ok"
inline def s = "a": Any
def test = meth
| dotty-staging/dotty | tests/pos/i11291.scala | Scala | apache-2.0 | 230 |
/*§
===========================================================================
Chronos
===========================================================================
Copyright (C) 2015-2016 Gianluca Costa
===========================================================================
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===========================================================================
*/
package info.gianlucacosta.chronos.ast.statements
import info.gianlucacosta.chronos.ast.{AstVisitor, Reference, Statement}
case class CreateEntity(entityType: String, reference: Option[Reference], lineNumber: Int) extends Statement {
override def accept[T](visitor: AstVisitor[T]): T =
visitor.visit(this)
} | giancosta86/Chronos | src/main/scala/info/gianlucacosta/chronos/ast/statements/CreateEntity.scala | Scala | apache-2.0 | 1,230 |
/*package util
import org.jblas.DoubleMatrix
import org.apache.spark.rdd._
import org.apache.spark.mllib.recommendation.{ALS, Rating, MatrixFactorizationModel}
import scala.util.Random
import org.apache.spark.SparkContext
import org.joda.time.{Seconds, DateTime}
class Recommender(@transient sc: SparkContext, ratings: RDD[Unit], products: Map[Int, String]) extends Serializable {
/* println(ratings)
val ratings_parse = sc.textFile(ratings).map {
line =>
val Array(userId, productId, scoreStr) = line.split("::")
AllRatedProducts(userId, prodId, scoreStr.toDouble)
}
val ratings_new = ratings_parse.groupBy(_.prodId)
*
*/
val Retries = 3
val prodRatings = getRandomProduct
val productDict = new Dictionary(ratings.map(_.prodId).distinct.collect)
val myRatingsRDD = sc.parallelize(prodRatings, 1)
def getRandomProduct(Retries: Int) = getRandomProductID
def getRandomProductID = {
val randomProduct = (productDict.getWord(random.nextInt(productDict.size))).toString
println("Printing randomProduct")
println(randomProduct)
if (randomProduct.size < 1) {
throw new RuntimeException(s"Check your data please!")
}
}
val numRatings = ratings.count()
val numUsers = ratings.map(_._2.user).distinct().count()
val numProducts = ratings.map(_._2.product).distinct().count()
println("Got " + numRatings + " ratings from "
+ numUsers + " users on " + numProducts + " products.")
val numPartitions = 4
val training = ratings.filter(x => x._1 < 6)
.values
.union(myRatingsRDD)
.repartition(numPartitions)
.cache()
val validation = ratings.filter(x => x._1 >= 6 && x._1 < 8)
.values
.repartition(numPartitions)
.cache()
val test = ratings.filter(x => x._1 >= 8).values.cache()
val numTraining = training.count()
val numValidation = validation.count()
val numTest = test.count()
println("Training: " + numTraining + ", validation: " + numValidation + ", test: " + numTest)
val ranks = List(8, 12)
val lambdas = List(0.1, 10.0)
val numIters = List(10, 20)
var bestModel: Option[MatrixFactorizationModel] = None
var bestValidationRmse = Double.MaxValue
var bestRank = 0
var bestLambda = -1.0
var bestNumIter = -1
for (rank <- ranks; lambda <- lambdas; numIter <- numIters) {
val model = ALS.train(training, rank, numIter, lambda)
val validationRmse = computeRmse(model, validation, numValidation)
println("RMSE (validation) = " + validationRmse + " for the model trained with rank = "
+ rank + ", lambda = " + lambda + ", and numIter = " + numIter + ".")
if (validationRmse < bestValidationRmse) {
bestModel = Some(model)
bestValidationRmse = validationRmse
bestRank = rank
bestLambda = lambda
bestNumIter = numIter
}
}
val testRmse = computeRmse(bestModel.get, test, numTest)
println("The best model was trained with rank = " + bestRank + " and lambda = " + bestLambda
+ ", and numIter = " + bestNumIter + ", and its RMSE on the test set is " + testRmse + ".")
val meanRating = training.union(validation).map(_.rating).mean
val baselineRmse =
math.sqrt(test.map(x => (meanRating - x.rating) * (meanRating - x.rating)).mean)
val improvement = (baselineRmse - testRmse) / baselineRmse * 100
println("The best model improves the baseline by " + "%1.2f".format(improvement) + "%.")
val aptProductIds = myRatings.map(_.product).toSet
val candidates = sc.parallelize(products.keys.filter(!aptProductIds.contains(_)).toSeq)
val recommendations = bestModel.get
.predict(candidates.map((0, _)))
.collect()
.sortBy(- _.rating)
.take(50)
//return the predictions
var i = 1
println("Products matching the given products:")
recommendations.foreach { r =>
println("%2d".format(i) + ": " + movies(r.product))
i += 1
}
sc.stop()
//NEED TO CHANGE THE computeRmse() and get random product from dictionary and compare.
/** Compute RMSE (Root Mean Squared Error). */
def computeRmse(model: MatrixFactorizationModel, data: RDD[Rating], n: Long): Double = {
val predictions: RDD[Rating] = model.predict(data.map(x => (x.user, x.product)))
val predictionsAndRatings = predictions.map(x => ((x.user, x.product), x.rating))
.join(data.map(x => ((x.user, x.product), x.rating)))
.values
math.sqrt(predictionsAndRatings.map(x => (x._1 - x._2) * (x._1 - x._2)).reduce(_ + _) / n)
}
}
/*
@transient val random = new Random() with Serializable
// first create an RDD out of the rating file
val rawTrainingRatings = sc.textFile(ratingFile).map {
line =>
val Array(userId, productId, scoreStr) = line.split(",")
AmazonRating(userId, productId, scoreStr.toDouble)
}
// only keep users that have rated between MinRecommendationsPerUser and MaxRecommendationsPerUser products
val trainingRatings = rawTrainingRatings.groupBy(_.userId)
.filter(r => MinRecommendationsPerUser <= r._2.size && r._2.size < MaxRecommendationsPerUser)
.flatMap(_._2)
.repartition(NumPartitions)
.cache()
println(s"Parsed $ratingFile. Kept ${trainingRatings.count()} ratings out of ${rawTrainingRatings.count()}")
// create user and item dictionaries
val userDict = new Dictionary(MyUsername +: trainingRatings.map(_.userId).distinct.collect)
val productDict = new Dictionary(trainingRatings.map(_.productId).distinct.collect)
private def toSparkRating(amazonRating: AmazonRating) = {
Rating(userDict.getIndex(amazonRating.userId),
productDict.getIndex(amazonRating.productId),
amazonRating.rating)
}
private def toAmazonRating(rating: Rating) = {
AmazonRating(userDict.getWord(rating.user),
productDict.getWord(rating.product),
rating.rating
)
}
// convert to Spark Ratings using the dictionaries
val sparkRatings = trainingRatings.map(toSparkRating)
def getRandomProductId = productDict.getWord(random.nextInt(productDict.size))
def predict(ratings: Seq[AmazonRating]) = {
// train model
val myRatings = ratings.map(toSparkRating)
val myRatingRDD = sc.parallelize(myRatings)
val startAls = DateTime.now
val model = ALS.train((sparkRatings ++ myRatingRDD).repartition(NumPartitions), 10, 20, 0.01)
val myProducts = myRatings.map(_.product).toSet
val candidates = sc.parallelize((0 until productDict.size).filterNot(myProducts.contains))
// get ratings of all products not in my history ordered by rating (higher first) and only keep the first NumRecommendations
val myUserId = userDict.getIndex(MyUsername)
val recommendations = model.predict(candidates.map((myUserId, _))).collect
val endAls = DateTime.now
val result = recommendations.sortBy(-_.rating).take(NumRecommendations).map(toAmazonRating)
val alsTime = Seconds.secondsBetween(startAls, endAls).getSeconds
println(s"ALS Time: $alsTime seconds")
result
}
}
*/
*/
| sk413025/retail_analytics | app/util/Recommender.scala | Scala | apache-2.0 | 7,277 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.stream.sql
import org.apache.flink.api.scala._
import org.apache.flink.table.util.TableTestBase
import org.junit.Test
class SortTest extends TableTestBase {
private val util = streamTestUtil()
util.addDataStream[(Int, String, Long)]("MyTable", 'a, 'b, 'c, 'proctime, 'rowtime)
@Test
def testSortProcessingTime(): Unit = {
// be converted to TemporalSort
util.verifyPlan("SELECT a FROM MyTable ORDER BY proctime, c")
}
@Test
def testSortRowTime(): Unit = {
// be converted to TemporalSort
util.verifyPlan("SELECT a FROM MyTable ORDER BY rowtime, c")
}
@Test
def testSortProcessingTimeDesc(): Unit = {
util.verifyPlan("SELECT a FROM MyTable ORDER BY proctime desc, c")
}
@Test
def testSortRowTimeDesc(): Unit = {
util.verifyPlan("SELECT a FROM MyTable ORDER BY rowtime desc, c")
}
@Test
def testSortProcessingTimeSecond(): Unit = {
util.verifyPlan("SELECT a FROM MyTable ORDER BY c, proctime")
}
@Test
def testSortRowTimeSecond(): Unit = {
util.verifyPlan("SELECT a FROM MyTable ORDER BY c, rowtime")
}
@Test
def testSortProcessingTimeSecondDesc(): Unit = {
util.verifyPlan("SELECT a FROM MyTable ORDER BY c, proctime desc")
}
@Test
def testSortRowTimeSecondDesc(): Unit = {
util.verifyPlan("SELECT a FROM MyTable ORDER BY c, rowtime desc")
}
@Test
def testSortWithoutTime(): Unit = {
util.verifyPlan("SELECT a FROM MyTable ORDER BY c")
}
}
| shaoxuan-wang/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/sql/SortTest.scala | Scala | apache-2.0 | 2,297 |
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.lsp.api.companions
import org.ensime.lsp.rpc.companions._
import org.ensime.lsp.api.commands._
object ServerCommands {
implicit val initializeCommand: RpcCommand[InitializeParams] =
RpcCommand[InitializeParams]("initialize")
implicit val shutdownCommand: RpcCommand[Shutdown] =
RpcCommand[Shutdown]("shutdown")
implicit val competitionCommand: RpcCommand[TextDocumentCompletionRequest] =
RpcCommand[TextDocumentCompletionRequest]("textDocument/completion")
implicit val definitionCommand: RpcCommand[TextDocumentDefinitionRequest] =
RpcCommand[TextDocumentDefinitionRequest]("textDocument/definition")
implicit val hoverCommand: RpcCommand[TextDocumentHoverRequest] =
RpcCommand[TextDocumentHoverRequest]("textDocument/hover")
implicit val documentSymbolCommand: RpcCommand[DocumentSymbolParams] =
RpcCommand[DocumentSymbolParams]("textDocument/documentSymbol")
}
object ServerCommand extends CommandCompanion[ServerCommand] {
import ServerCommands._
val commands = Seq(
initializeCommand,
shutdownCommand,
competitionCommand,
definitionCommand,
hoverCommand,
documentSymbolCommand
)
}
object ClientCommands {
implicit val showMessageRequestCommand: RpcCommand[ShowMessageRequestParams] =
RpcCommand[ShowMessageRequestParams]("showMessageRequest")
}
object ClientCommand extends CommandCompanion[ClientCommand] {
import ClientCommands._
val commands = Seq(showMessageRequestCommand)
}
object Notifications {
implicit val showMessageNotification: RpcNotification[ShowMessageParams] =
RpcNotification[ShowMessageParams]("window/showMessage")
implicit val logMessageNotification: RpcNotification[LogMessageParams] =
RpcNotification[LogMessageParams]("window/logMessage")
implicit val publishDiagnosticsNotification
: RpcNotification[PublishDiagnostics] =
RpcNotification[PublishDiagnostics]("textDocument/publishDiagnostics")
implicit val didOpenNotification: RpcNotification[DidOpenTextDocumentParams] =
RpcNotification[DidOpenTextDocumentParams]("textDocument/didOpen")
implicit val didChangeNotification
: RpcNotification[DidChangeTextDocumentParams] =
RpcNotification[DidChangeTextDocumentParams]("textDocument/didChange")
implicit val didCloseNotification
: RpcNotification[DidCloseTextDocumentParams] =
RpcNotification[DidCloseTextDocumentParams]("textDocument/didClose")
implicit val didSaveNotification: RpcNotification[DidSaveTextDocumentParams] =
RpcNotification[DidSaveTextDocumentParams]("textDocument/didSave")
implicit val didChangeWatchedFilesNotification
: RpcNotification[DidChangeWatchedFiles] =
RpcNotification[DidChangeWatchedFiles]("workspace/didChangeWatchedFiles")
implicit val initializedNotification: RpcNotification[Initialized] =
RpcNotification[Initialized]("initialized")
implicit val cancelRequestNotification: RpcNotification[CancelRequest] =
RpcNotification[CancelRequest]("$/cancelRequest")
implicit val exitNotification: RpcNotification[Exit] =
RpcNotification[Exit]("exit")
}
object Notification extends NotificationCompanion[Notification] {
import Notifications._
val notifications = Seq(
showMessageNotification,
logMessageNotification,
publishDiagnosticsNotification,
didOpenNotification,
didChangeNotification,
didCloseNotification,
didSaveNotification,
didChangeWatchedFilesNotification,
initializedNotification,
cancelRequestNotification,
exitNotification
)
}
| ensime/ensime-server | lsp/src/main/scala/org/ensime/lsp/api/companions.scala | Scala | gpl-3.0 | 3,666 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.ctrl.service
import akka.actor.Actor
import akka.actor.Actor.Receive
import akka.pattern.{ask, pipe}
import akka.util.Timeout
import cmwell.ctrl.hc.HealthActor
import scala.concurrent.ExecutionContext.Implicits.global
import scala.language.postfixOps
import scala.concurrent.duration._
/**
* Created by michael on 2/1/16.
*/
class ClusterServiceActor extends Actor {
implicit val timeout = Timeout(3 seconds)
override def receive: Receive = {
case msg => (HealthActor.ref ? msg) pipeTo sender
}
}
| nruppin/CM-Well | server/cmwell-controller/src/main/scala/cmwell/ctrl/service/ClusterServiceActor.scala | Scala | apache-2.0 | 1,143 |
/*
* Copyright 2016 Groupon, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.groupon.sparklint.events
/**
* @author rxue
* @since 1.0.5
*/
case class UnrecognizedLogFileException(filename: String, reason: Option[String] = None) extends Throwable {
override def getMessage: String = {
s"$filename can not be recognized as a spark log file.${reason.map(r => s" Reason: $r.").getOrElse("")}"
}
}
| groupon/sparklint | src/main/scala/com/groupon/sparklint/events/UnrecognizedLogFileException.scala | Scala | apache-2.0 | 941 |
/*
* Copyright 2018 Analytics Zoo Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.zoo.pipeline.api.keras.layers
import com.intel.analytics.bigdl.nn.abstractnn.AbstractModule
import com.intel.analytics.bigdl.nn.{Module, GaussianSampler => BGaussianSampler}
import com.intel.analytics.zoo.pipeline.api.keras.layers.{GaussianSampler => ZGaussianSampler}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.{RandomGenerator, Shape, T, Table}
import com.intel.analytics.zoo.pipeline.api.keras.ZooSpecHelper
import com.intel.analytics.zoo.pipeline.api.keras.serializer.ModuleSerializationTest
class GaussianSamplerSpec extends ZooSpecHelper {
"GaussianSampler Zoo" should "be the same as BigDL" in {
val blayer = BGaussianSampler[Float]()
val zlayer = ZGaussianSampler[Float](inputShape = Shape(List(Shape(3), Shape(3))))
zlayer.build(Shape(List(Shape(-1, 3), Shape(-1, 3))))
assert(zlayer.getOutputShape() == Shape(-1, 3))
val input = T(Tensor[Float](Array(2, 3)).rand(), Tensor[Float](Array(2, 3)).rand())
compareOutputAndGradInputTable2Tensor(
blayer.asInstanceOf[AbstractModule[Table, Tensor[Float], Float]],
zlayer.asInstanceOf[AbstractModule[Table, Tensor[Float], Float]],
input)
}
}
class GaussianSamplerSerialTest extends ModuleSerializationTest {
override def test(): Unit = {
val layer = ZGaussianSampler[Float](inputShape = Shape(List(Shape(3), Shape(3))))
layer.build(Shape(List(Shape(-1, 3), Shape(-1, 3))))
val input = T(Tensor[Float](Array(2, 3)).rand(),
Tensor[Float](Array(2, 3)).rand())
val seed = System.currentTimeMillis()
RandomGenerator.RNG.setSeed(seed)
val originalOutput = layer.forward(input).asInstanceOf[Tensor[Float]].clone()
val tmpFile = ZooSpecHelper.createTmpFile()
val absPath = tmpFile.getAbsolutePath
layer.saveModule(absPath, overWrite = true)
val loadedLayer = Module.loadModule[Float](absPath)
RandomGenerator.RNG.setSeed(seed)
val loadedOutput = loadedLayer.forward(input).asInstanceOf[Tensor[Float]].clone()
originalOutput.asInstanceOf[Tensor[Float]].size.sameElements(
loadedOutput.asInstanceOf[Tensor[Float]].size) should be (true)
originalOutput.asInstanceOf[Tensor[Float]].
almostEqual(loadedOutput.asInstanceOf[Tensor[Float]], 1e-6) should be (true)
}
}
| intel-analytics/analytics-zoo | zoo/src/test/scala/com/intel/analytics/zoo/pipeline/api/keras/layers/GaussianSamplerSpec.scala | Scala | apache-2.0 | 2,911 |
package net.debasishg.domain.trade
package event
import akka.dispatch._
import akka.util.Timeout
import akka.util.duration._
import akka.actor.{Actor, ActorRef, Props, ActorSystem}
import akka.pattern.Patterns.ask
import Actor._
class InMemoryEventLog(as: ActorSystem) extends EventLog {
val loggerActorName = "memory-event-logger"
// need a pinned dispatcher to maintain order of log entries
// val dispatcher = as.dispatcherFactory.newPinnedDispatcher(loggerActorName)
lazy val logger = as.actorOf(Props(new Logger).withDispatcher("my-pinned-dispatcher"), name = loggerActorName)
// implicit val timeout = as.settings.ActorTimeout
implicit val timeout = Timeout(20 seconds)
def iterator = iterator(0L)
def iterator(fromEntryId: Long) =
getEntries.drop(fromEntryId.toInt).iterator
def appendAsync(id: String, state: State, data: Option[Any], event: Event): Future[EventLogEntry] =
ask(logger, LogEvent(id, state, data, event), timeout.duration).asInstanceOf[Future[EventLogEntry]]
def getEntries: List[EventLogEntry] = {
val future = ask(logger, GetEntries(), timeout.duration)
Await.result(future, timeout.duration).asInstanceOf[List[EventLogEntry]]
}
case class LogEvent(objectId: String, state: State, data: Option[Any], event: Event)
case class GetEntries()
class Logger extends Actor {
private var entries = List.empty[EventLogEntry]
def receive = {
case LogEvent(id, state, data, event) =>
val entry = EventLogEntry(InMemoryEventLog.nextId(), id, state, data, event)
entries = entry :: entries
sender ! entry
case GetEntries() =>
sender ! entries.reverse
}
}
}
object InMemoryEventLog {
var current = 0L
def nextId() = {
current = current + 1
current
}
}
| Tjoene/thesis | Case_Programs/cqrs-akka-master/src/main/scala/InMemoryEventLog.scala | Scala | gpl-2.0 | 1,790 |
package coursier.install.error
import java.nio.file.Path
final class NotAnApplication(val path: Path)
extends InstallDirException(s"File $path wasn't installed by cs install")
| alexarchambault/coursier | modules/install/src/main/scala/coursier/install/error/NotAnApplication.scala | Scala | apache-2.0 | 182 |
import sbt._
object LibraryVersions {
object Version {
val cats = "1.6.0"
val circe = "0.11.1"
val circeOptics = "0.11.0"
val kindProjector = "0.9.9"
val scalaXml = "1.1.1"
val java8compat = "0.9.0"
val shapeless = "2.3.3"
val scalaTest = "3.0.6"
val scalaCheck = "1.14.0"
val scalaCheckToolbox = "0.2.5"
}
val cats = "org.typelevel" %% "cats-core" % Version.cats
val circeCore = "io.circe" %% "circe-core" % Version.circe
val circeGeneric = "io.circe" %% "circe-generic" % Version.circe
val circeGenericExtras = "io.circe" %% "circe-generic-extras" % Version.circe
val circeParser = "io.circe" %% "circe-parser" % Version.circe
val circeOptics = "io.circe" %% "circe-optics" % Version.circeOptics
val circeRefined = "io.circe" %% "circe-refined" % Version.circe
val circeJava8 = "io.circe" %% "circe-java8" % Version.circe
val scalaxml = "org.scala-lang.modules" %% "scala-xml" % Version.scalaXml
val java8compat = "org.scala-lang.modules" %% "scala-java8-compat" % Version.java8compat
val shapeless = "com.chuusai" %% "shapeless" % Version.shapeless
val scalaCheck = "org.scalacheck" %% "scalacheck" % Version.scalaCheck % Test
val scalaCheckToolbox = "com.47deg" %% "scalacheck-toolbox-datetime" % Version.scalaCheckToolbox % Test
val scalaTest = "org.scalatest" %% "scalatest" % Version.scalaTest % Test
} | channingwalton/qanda | project/LibraryVersions.scala | Scala | mit | 1,801 |
package com.twitter.inject.thrift.integration.inheritance
import com.google.inject.Module
import com.twitter.finatra.http.HttpServer
import com.twitter.finatra.http.filters.CommonFilters
import com.twitter.finatra.http.routing.HttpRouter
import com.twitter.inject.thrift.modules.ThriftClientIdModule
class ServiceBHttpServer extends HttpServer {
override val modules: Seq[Module] =
Seq(ThriftClientIdModule, ServiceBThriftMethodBuilderClientModule)
override def configureHttp(router: HttpRouter): Unit = {
router
.filter[CommonFilters]
.add[ServiceBHttpController]
}
}
| twitter/finatra | inject/inject-thrift-client/src/test/scala/com/twitter/inject/thrift/integration/inheritance/ServiceBHttpServer.scala | Scala | apache-2.0 | 598 |
package controllers
import play.api._
import play.api.libs.json._
import play.modules.reactivemongo.json.BSONFormats._
import play.api.libs.json.Json.JsValueWrapper
import play.api.mvc._
import play.modules.reactivemongo.MongoController
import play.modules.reactivemongo.json.collection.JSONCollection
import play.twirl.api.Html
import reactivemongo.api.Cursor
import reactivemongo.bson.BSONObjectID
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.io.Source
object Application extends Controller with MongoController {
def $(a: (String, JsValueWrapper)*) = Json.obj(a: _*)
val idReads : Reads[String] = (JsPath \\ "_id").read[String]
def collection(repo: String): JSONCollection = db.collection[JSONCollection](repo)
def stats : JSONCollection = db.collection[JSONCollection]("super-uber-ille")
def index = htmlVendor("repo")
def htmlVendor(repo: String) = Action.async {
val cursor: Cursor[JsObject] = stats.find($("name" -> repo)).cursor[JsObject]
val futureSlavesList: Future[List[JsObject]] = cursor.collect[List]()
futureSlavesList.map { pins =>
println(pins)
if(pins.nonEmpty){
println(pins.head)
Ok(views.html.index(repo, Html(Json.toJson(pins.head).toString())))
} else {
Ok(views.html.index(repo, Html("{}")))
}
}
}
def create(repo: String) = Action.async(parse.json) { implicit req =>
val id = BSONObjectID.generate
collection(repo).insert($("_id" -> id) ++ req.body.as[JsObject]).map { last =>
if(last.ok){
stats.update($("name" -> repo),$("$inc" -> $("created" -> 1)),upsert=true).map{ last =>
println(last.ok)
}
Ok(Json.toJson($("_id"->id) ++ $("success"->true)))
}
else
BadRequest($("success"->false))
}
}
def selectAll(repo: String) = Action.async {
val cursor: Cursor[JsObject] = db.collection[JSONCollection](repo).find(Json.obj()).cursor[JsObject]
val futureSlavesList: Future[List[JsObject]] = cursor.collect[List]()
futureSlavesList.map { pins =>
Ok(Json.toJson(pins))
}
}
def delete(repo: String) = Action.async(parse.json) { implicit req =>
req.body.validate[String](idReads).map{ id =>
println(id)
collection(repo).remove($("_id" -> BSONObjectID(id))).map {last =>
if(last.ok){
stats.update($("name" -> repo),$("$inc" -> $("deleted" -> 1)),upsert=true).map{ last =>
println(last.ok)
}
Ok($("success"->last.ok))
} else
BadRequest("BadJson")}
} getOrElse { Future.successful(BadRequest($("BadJson"->req.body)))}
}
def update(repo: String) = Action.async(parse.json) { implicit req =>
req.body.validate[String](idReads).map{ id =>
val newValues = req.body.as[JsObject] - "_id"
collection(repo).update($("_id" -> BSONObjectID(id)),$("$set" -> newValues)).map { _=> Ok("Success")}
} getOrElse { Future.successful(BadRequest("BadJSon"))}
}
} | Hajtosek/stickyNotes | app/controllers/Application.scala | Scala | apache-2.0 | 3,020 |
package rl
package tests
import org.specs2.Specification
import skinny.micro.rl.UrlCodingUtils._
import collection.immutable.BitSet
class UrlCodingSpec extends Specification {
def is =
"Encoding a URI should" ^
"not change any of the allowed chars" ! {
val encoded = urlEncode("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890!$&'()*+,;=:/?@-._~")
encoded must_== "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890!$&'()*+,;=:/?@-._~"
} ^
"uppercase encodings already in a string" ! {
ensureUppercasedEncodings("hello%3fworld") must_== "hello%3Fworld"
} ^
"percent encode spaces" ! {
urlEncode("hello world") must_== "hello%20world"
} ^
"encode a letter with an accent as 2 values" ! {
urlEncode("é") must_== "%C3%A9"
} ^ p ^
"Decoding a URI should" ^
"not change any of the allowed chars" ! {
val decoded = urlDecode("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890!$&'()*,;=:/?#[]@-._~")
decoded must_== "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890!$&'()*,;=:/?#[]@-._~"
} ^
"leave Fußgängerübergänge as is" ! {
urlDecode("Fußgängerübergänge") must_== "Fußgängerübergänge"
} ^
"not overflow on all utf-8 chars" ! {
urlDecode("äéèüああああああああ") must_== "äéèüああああああああ"
} ^
"decode a pct encoded string" ! {
urlDecode("hello%20world") must_== "hello world"
} ^
"gracefully handle '%' encoding errors" ! {
urlDecode("%") must_== "%"
urlDecode("%2") must_== "%2"
urlDecode("%20") must_== " "
} ^
"decode value consisting of 2 values to 1 char" ! {
urlDecode("%C3%A9") must_== "é"
} ^
"skip the chars in toSkip when decoding" ^
"skips '%2F' when decoding" ! { urlDecode("%2F", toSkip = "/?#") must_== "%2F" } ^
"skips '%23' when decoding" ! { urlDecode("%23", toSkip = "/?#") must_== "%23" } ^
"skips '%3F' when decoding" ! { urlDecode("%3F", toSkip = "/?#") must_== "%3F" } ^
"still encodes others" ! { urlDecode("br%C3%BCcke", toSkip = "/?#") must_== "brücke" } ^
"handles mixed" ! { urlDecode("/ac%2Fdc/br%C3%BCcke%2342%3Fcheck", toSkip = "/?#") must_== "/ac%2Fdc/brücke%2342%3Fcheck" } ^ p ^
"The plusIsSpace flag specifies how to treat pluses" ^
"it treats + as allowed when the plusIsSpace flag is either not supplied or supplied as false" ! {
urlDecode("+") must_== "+"
urlDecode("+", plusIsSpace = false) must_== "+"
} ^
"it decodes + as space when the plusIsSpace flag is true" ! {
urlDecode("+", plusIsSpace = true) must_== " "
} ^ end
}
| xerial/skinny-micro | micro/src/test/scala/rl/tests/UrlCodingSpec.scala | Scala | bsd-2-clause | 2,799 |
package main.java.piratebot.input_sources
import scala.collection.mutable
trait Statistics {
private val counters = mutable.HashMap[String, Int]()
def addCounter(counterName: String, value: Int): Unit = {
if (counters.contains(counterName)) {
counters.update(counterName, counters(counterName) + value)
} else {
counters.put(counterName, value)
}
}
def printCounters(name: String = ""): Unit = {
println(name + ": " + counters)
}
}
| ItCouldHaveBeenGreat/Eyepatch | src/main/java/piratebot/input_sources/Statistics.scala | Scala | gpl-3.0 | 511 |
package knot.core.stream.flows
import knot.core.stream.Sink
import knot.core.stream.ops.SinkOps
import knot.core.stream.ops.StreamOps.{Completed, ErrorEmitted}
import knot.core.stream.plugs.Input
import knot.core.testKit.CellSpec
import knot.core.{BufferOverflowException, OverflowStrategy}
import org.scalatest.Matchers._
import scala.concurrent.Await
import scala.concurrent.duration.Duration
class BufferSpec extends CellSpec {
case class SlowLastSink[T]() extends Sink[T, T] {
override def newOps: SinkOps[T, T] = new SinkOps[T, T] {
private[this] var prev: T = null.asInstanceOf[T]
override def requestLength: Long = 1
override protected def preStart(): Unit = requestIfNeeded(in)
override protected def onUpstreamFinish: LifecycleBehavior = {
case Completed =>
val result = prev
prev = null.asInstanceOf[T]
complete(result)
case ErrorEmitted(cause, _) =>
prev = null.asInstanceOf[T]
error(cause)
}
override protected def onNext(in: Input[T], element: T): Unit = {
Thread.sleep(500)
println(s"${Thread.currentThread().getName} - $element")
prev = element
requestIfNeeded(in)
}
}
}
import knot.core.stream.dsl.Implicits._
def test(size: Int, strategy: OverflowStrategy): Int = {
val m = (0 until 10).toGraph.map(x => x).buffer(size, strategy)
val f = m
.materializeWithSink(Sink.from(SlowLastSink[Int]()))
.dispatcher("single")
.run()
Await.result(f, Duration.Inf)
}
describe("Buffer") {
it("safe") {
val r = test(10, OverflowStrategy.error)
r should be(9)
}
it("drop") {
val r = test(3, OverflowStrategy.drop)
r should not be 9
}
it("backpressure") {
val r = test(1, OverflowStrategy.backpressure)
r should be(9)
}
it("latest") {
val r = test(1, OverflowStrategy.latest)
r should be(9)
}
it("error") {
a[BufferOverflowException] should be thrownBy {
test(1, OverflowStrategy.error)
}
}
}
}
| defvar/knot | knot-core/src/test/scala/knot/core/stream/flows/BufferSpec.scala | Scala | mit | 2,111 |
package eu.timepit.refined
import eu.timepit.refined.api.Validate
import eu.timepit.refined.boolean.Or
/** Module for `Char` related predicates. */
object char {
/** Predicate that checks if a `Char` is a digit. */
final case class Digit()
/** Predicate that checks if a `Char` is a letter. */
final case class Letter()
/** Predicate that checks if a `Char` is a lower case character. */
final case class LowerCase()
/** Predicate that checks if a `Char` is an upper case character. */
final case class UpperCase()
/** Predicate that checks if a `Char` is white space. */
final case class Whitespace()
/** Predicate that checks if a `Char` is a letter or digit. */
type LetterOrDigit = Letter Or Digit
object Digit {
implicit def digitValidate: Validate.Plain[Char, Digit] =
Validate.fromPredicate(_.isDigit, t => s"isDigit('$t')", Digit())
}
object Letter {
implicit def letterValidate: Validate.Plain[Char, Letter] =
Validate.fromPredicate(_.isLetter, t => s"isLetter('$t')", Letter())
}
object LowerCase {
implicit def lowerCaseValidate: Validate.Plain[Char, LowerCase] =
Validate.fromPredicate(_.isLower, t => s"isLower('$t')", LowerCase())
}
object UpperCase {
implicit def upperCaseValidate: Validate.Plain[Char, UpperCase] =
Validate.fromPredicate(_.isUpper, t => s"isUpper('$t')", UpperCase())
}
object Whitespace {
implicit def whitespaceValidate: Validate.Plain[Char, Whitespace] =
Validate.fromPredicate(_.isWhitespace, t => s"isWhitespace('$t')", Whitespace())
}
}
| fthomas/refined | modules/core/shared/src/main/scala/eu/timepit/refined/char.scala | Scala | mit | 1,584 |
/*
* Copyright (C) 2015-2016 Paulo Angelo Alves Resende <pa@pauloangelo.com>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License Version 2 as
* published by the Free Software Foundation. You may not use, modify or
* distribute this program under any other version of the GNU General
* Public License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package org.hogzilla.util
import scala.collection.mutable.Map
/**
* @author pa
*/
case class HogFlow(map:Map[String,String],lower_ip:String,upper_ip:String) {
def get(key:String):String =
{
map.get(key).get
}
} | pauloangelo/hogzilla | src/org/hogzilla/util/HogFlow.scala | Scala | gpl-2.0 | 1,064 |
import org.apache.spark.sql.SQLContext
:load /home/ealmansi/dev/yavi/spark/jobs/utility.scala
def runJob(workDirectory: String, sqlContext: SQLContext): Unit = {
loadTable("page_outlinks", workDirectory, sqlContext)
saveTableCsv("page_outlinks", workDirectory, sqlContext)
}
| ealmansi/yavi | spark/jobs/export_page_outlinks.scala | Scala | mit | 284 |
/*
* Seldon -- open source prediction engine
* =======================================
* Copyright 2011-2015 Seldon Technologies Ltd and Rummble Ltd (http://www.seldon.io/)
*
**********************************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************************
*/
package io.seldon.spark.wikipedia
import org.apache.log4j.Level
import org.apache.log4j.Logger
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.graphx.Edge
import org.apache.spark.graphx.Graph
import org.apache.spark.graphx.lib.TriangleCountEdge
import org.apache.spark.graphx.VertexRDD
import org.apache.commons.lang.StringEscapeUtils
import scala.collection.mutable.ListBuffer
import org.apache.spark.graphx.util.GraphGenerators
import org.jets3t.service.S3Service
import scala.collection.mutable.ArrayBuffer
case class PathConfig (
local : Boolean = false,
graphInputPath : String = "",
outputPath : String = "",
query : String = "",
queryInputPath : String = "",
targets : String = "",
targetsInputPath : String = "",
awsKey : String = "",
awsSecret : String = "",
maxVertexTriangles : Int = Int.MaxValue,
numResultsPerQuery : Int = 100)
class FindShortestPaths(private val sc : SparkContext,config : PathConfig) {
/*
def outputResultsToFile(results : String) {
import org.jets3t.service.impl.rest.httpclient.RestS3Service
import org.jets3t.service.model.{S3Object, S3Bucket}
import org.jets3t.service.security.AWSCredentials
val service: S3Service = new RestS3Service(new AWSCredentials(config.awsKey, config.awsSecret))
val bucket = service.getBucket("seldon-data")
val obj = new S3Object(config.outputPath+"/tag_expansion.txt", results)
service.putObject(bucket, obj)
}
*/
def outputResultsToFile(results : Array[String]) {
val rdd = sc.parallelize(results, 1)
rdd.saveAsTextFile(config.outputPath)
}
def doSearch(maxTriangles : Int,initialGraph : org.apache.spark.graphx.Graph[(Double,Int,Int),Double]) : org.apache.spark.graphx.Graph[(Double,Int,Int),Double] =
{
val sssp = initialGraph.pregel((Double.PositiveInfinity,0,0))(
(id, attr1, attr2) => {
if (attr1._1 < attr2._1)
attr1
else
attr2
},
triplet => { // Send Message
if (triplet.srcAttr._2 < 4 && (triplet.srcAttr._1 + triplet.attr < triplet.dstAttr._1) && triplet.dstAttr._3 < maxTriangles) {
Iterator((triplet.dstId, (triplet.srcAttr._1 + triplet.attr,triplet.srcAttr._2+1,triplet.dstAttr._3)))
} else {
Iterator.empty
}
},
(a,b) => {
if (a._1 < b._1)
a
else
b
}
)
sssp
}
def run()
{
val vertices = sc.textFile(config.graphInputPath+"/vertices").flatMap{line =>
val parts = line.substring(1,line.length()-1).split(",")
if (parts.length < 3)
None
else
{
val nameNumTri = parts(1)+","+parts(2)
val parts2 = nameNumTri.substring(1,nameNumTri.length()-1).split(",")
Seq((parts(0).toLong,(parts2(0),parts2(1).toInt)))
}
}.cache()
val edges = sc.textFile(config.graphInputPath+"/edges").map{line =>
val parts = line.substring(5,line.length()-1).split(",")
val from = parts(0).toLong
val to = parts(1).toLong
val weight = parts(2).toDouble
Edge(parts(0).toLong,parts(1).toLong,1.0-parts(2).toDouble)
}
var queries : Array[String] = null
if (config.query.nonEmpty)
queries = config.query.split(",").map(_.toLowerCase())
else if (config.queryInputPath.nonEmpty)
queries = sc.textFile(config.queryInputPath, 1).map(_.toLowerCase()).collect()
var targets : Array[String] = null
if (config.targets.nonEmpty)
targets = config.targets.split(",")
else if (config.targetsInputPath.nonEmpty)
targets = sc.textFile(config.targetsInputPath, 1).collect()
else
targets = Array.empty[String]
val queryToVertexId = vertices.filter( v => queries.contains(v._2._1) || targets.contains(v._2._1)).map(t => (t._2._1,t._1)).collectAsMap()
println("map size-->"+queryToVertexId.size)
val vMap = vertices.collectAsMap()
val graph = Graph(vertices,edges)
var maxSearchDepth = 4
if (targets.nonEmpty)
maxSearchDepth = 4
else
maxSearchDepth = 2
val results = ArrayBuffer[String]()
var counter : Int = 0
println("Running "+queries.size+" queries")
for (query <- queries)
{
counter = counter + 1
println("looking at query "+counter.toString()+" "+query)
if (queryToVertexId.contains(query))
{
println("Found "+query)
val sourceId: Long = queryToVertexId(query)
// Initialize the graph such that all vertices except the root have distance infinity.
val initialGraph = graph.mapVertices((id, nameNumTri) => if (id == sourceId) (0.0,0,nameNumTri._2) else (100000.0,0,nameNumTri._2))
val sssp = doSearch(config.maxVertexTriangles, initialGraph)
if (targets.nonEmpty)
{
val targetIds = queryToVertexId.values.toSet
val matches = sssp.vertices.filter( v => targetIds.contains(v._1)).collectAsMap()
for ((id,weight) <- matches)
results.append(query+","+vMap(id)._1+","+weight)
}
else
{
val numResults = config.numResultsPerQuery
val matches = sssp.vertices.filter(_._2._1 < 10).sortBy(_._2._1, true, 1).take(numResults)
for ((id,weight) <- matches)
results.append(query+","+vMap(id)._1+","+weight)
}
}
else
println("Not found "+query)
}
outputResultsToFile(results.toArray)
}
}
object FindShortestPaths
{
def main(args: Array[String])
{
Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF)
val parser = new scopt.OptionParser[PathConfig]("ClusterUsersByDimension") {
head("CrateVWTopicTraining", "1.x")
opt[Unit]('l', "local") action { (_, c) => c.copy(local = true) } text("debug mode - use local Master")
opt[String]('i', "input-graph-path") required() valueName("input graph path") action { (x, c) => c.copy(graphInputPath = x) } text("path for graph input needs subfolders vertices and edges")
opt[String]('i', "query-input-path") valueName("search terms input file") action { (x, c) => c.copy(queryInputPath = x) } text("file with query words to start search from. Comma separated.")
opt[String]('i', "targets-input-path") valueName("target terms input file") action { (x, c) => c.copy(targetsInputPath = x) } text("file with target words to score. Comma separated.")
opt[String]('i', "query") valueName("search terms") action { (x, c) => c.copy(query = x) } text("query words to start search from. Comma separated.")
opt[String]('i', "targets") valueName("target terms") action { (x, c) => c.copy(targets = x) } text("target words to score. Comma separated.")
opt[String]('o', "output-path") required() valueName("path url") action { (x, c) => c.copy(outputPath = x) } text("path prefix for output")
opt[String]('a', "awskey") required() valueName("aws access key") action { (x, c) => c.copy(awsKey = x) } text("aws key")
opt[String]('s', "awssecret") required() valueName("aws secret") action { (x, c) => c.copy(awsSecret = x) } text("aws secret")
opt[Int]('v', "max-vertex-triangles") valueName("max vertex triangles") action { (x, c) => c.copy(maxVertexTriangles = x) } text("max vertex triangles in search")
opt[Int]('n', "num-results-per-query") valueName("num results per query") action { (x, c) => c.copy(numResultsPerQuery = x) } text("number of results per query")
}
parser.parse(args, PathConfig()) map { config =>
val conf = new SparkConf()
.setAppName("FindShortestPath")
if (config.local)
conf.setMaster("local")
.set("spark.executor.memory", "8g")
val sc = new SparkContext(conf)
try
{
sc.hadoopConfiguration.set("fs.s3.impl", "org.apache.hadoop.fs.s3native.NativeS3FileSystem")
sc.hadoopConfiguration.set("fs.s3n.awsAccessKeyId", config.awsKey)
sc.hadoopConfiguration.set("fs.s3n.awsSecretAccessKey", config.awsSecret)
val cByd = new FindShortestPaths(sc,config)
cByd.run()
}
finally
{
println("Shutting down job")
sc.stop()
}
} getOrElse
{
}
// set up environment
}
} | smrjan/seldon-server | offline-jobs/spark/src/main/scala/io/seldon/spark/wikipedia/FindShortestPaths.scala | Scala | apache-2.0 | 9,260 |
package com.mz.training.services
import akka.actor.{ActorContext, ActorRef, ActorSystem, Props}
import akka.testkit.{ImplicitSender, TestKit, TestProbe}
import com.mz.training.common.jdbc.JDBCConnectionActor
import com.mz.training.common.jdbc.JDBCConnectionActor._
import com.mz.training.common.repositories.{Insert, Inserted, SelectCount, SelectPaging}
import com.mz.training.common.services._
import com.mz.training.common.supervisors.DataSourceSupervisorActor
import com.mz.training.domains.address.Address
import com.mz.training.domains.address.AddressServiceActor.FindOrCreateAddress
import com.mz.training.domains.address.{AddressRepositoryActor, AddressServiceActor}
import com.mz.training.domains.user.UserRepositoryActor
import org.scalatest.{BeforeAndAfterAll, FunSuiteLike, Matchers}
import scala.collection.mutable
/**
* Created by zemi on 13. 11. 2015.
*/
class AddressServiceActorTest extends TestKit(ActorSystem("test-jdbc-demo-AddressServiceActorTest"))
with FunSuiteLike
with BeforeAndAfterAll
with Matchers
with ImplicitSender {
val dataSourceSupervisor = system.actorOf(DataSourceSupervisorActor.props, DataSourceSupervisorActor.actorName)
val jdbcConActor = system.actorOf(JDBCConnectionActor.props)
test("0. Create address") {
def mockChild(contect: ActorSystem): ActorRef = {
// contect.actorOf(new TestProbe())
null
}
val userRepository = TestProbe()
val addressRepository = TestProbe()
val addressService = system.actorOf(Props(classOf[AddressServiceActor],
(context: ActorContext) => userRepository.ref,
(context: ActorContext) => addressRepository.ref))
//street: String, zip: String, houseNumber: String, city: String
addressService ! Create(Address(0, "StreetCreate", "zipCreate", "houseNumCreate", "CityCreate"))
addressRepository.expectMsgType[Insert[Address]]
addressRepository.reply(Inserted(999))
expectMsgAnyOf(Created(999))
}
test("1. Create address") {
val jdbcConA = TestProbe()
val userRepository = Props(new UserRepositoryActor(jdbcConA.ref))
val addressRepository = Props(new AddressRepositoryActor(jdbcConA.ref))
val addressService = system.actorOf(AddressServiceActor.props(userRepository, addressRepository))
//street: String, zip: String, houseNumber: String, city: String
addressService ! Create(Address(0, "StreetCreate", "zipCreate", "houseNumCreate", "CityCreate"))
jdbcConA.expectMsgType[JdbcInsert]
jdbcConA.reply(GeneratedKeyRes(999))
expectMsgAnyOf(Created(999))
}
test("2. delete address") {
val jdbcConA = TestProbe()
val userRepository = Props(new UserRepositoryActor(jdbcConA.ref))
val addressRepository = Props(new AddressRepositoryActor(jdbcConA.ref))
val addressService = system.actorOf(AddressServiceActor.props(userRepository, addressRepository))
addressService ! Delete(Address(12, "Street_Find", "zip_Find", "houseNum_Find", "City_Find"))
jdbcConA.expectMsgType[JdbcDelete]
jdbcConA.reply(true)
expectMsgType[Deleted]
}
test("3. Find address by all attributes") {
val jdbcConA = TestProbe()
val userRepository = Props(new UserRepositoryActor(jdbcConA.ref))
val addressRepository = Props(new AddressRepositoryActor(jdbcConA.ref))
val addressService = system.actorOf(AddressServiceActor.props(userRepository, addressRepository))
addressService ! FindById(0)
jdbcConA.expectMsgType[JdbcSelect[Address]]
jdbcConA.reply(JdbcSelectResult[Option[Address]](Some(Address(3, "StreetFind", "zipFind", "houseNumFind", "CityFind"))))
expectMsgType[Found[Address]]
}
test("4. Find or create address - create") {
val jdbcConA = TestProbe()
val userRepository = Props(new UserRepositoryActor(jdbcConA.ref))
val addressRepository = Props(new AddressRepositoryActor(jdbcConA.ref))
val addressService = system.actorOf(AddressServiceActor.props(userRepository, addressRepository))
addressService ! FindOrCreateAddress(Address(0, "Street_Find", "zip_Find", "houseNum_Find", "City_Find"))
jdbcConA.expectMsgType[JdbcSelect[Address]]
// jdbcConA.reply(JdbcSelectResult[Option[Address]](Some(Address(3, "StreetFind", "zipFind", "houseNumFind", "CityFind"))))
jdbcConA.reply(JdbcSelectResult(None))
jdbcConA.expectMsgType[JdbcInsert]
jdbcConA.reply(GeneratedKeyRes(12))
val address = Address(12, "Street_Find", "zip_Find", "houseNum_Find", "City_Find")
jdbcConA.expectMsgType[JdbcSelect[Address]]
jdbcConA.reply(JdbcSelectResult(Some(address)))
val addresses = mutable.MutableList(address)
expectMsgAllOf(Found(addresses))
}
test("5. Find or create address - find") {
val jdbcConA = TestProbe()
val userRepository = Props(new UserRepositoryActor(jdbcConA.ref))
val addressRepository = Props(new AddressRepositoryActor(jdbcConA.ref))
val addressService = system.actorOf(AddressServiceActor.props(userRepository, addressRepository))
addressService ! FindOrCreateAddress(Address(12, "Street_Find", "zip_Find", "houseNum_Find", "City_Find"))
jdbcConA.expectMsgType[JdbcSelect[Address]]
val address = Address(12, "Street_Find", "zip_Find", "houseNum_Find", "City_Find")
jdbcConA.reply(JdbcSelectResult(Some(address)))
val addresses = mutable.MutableList(Address(12, "Street_Find", "zip_Find", "houseNum_Find", "City_Find"))
expectMsgAllOf(Found(addresses))
}
test("6. Pagination ") {
val userRepository = TestProbe()
val addressRepository = TestProbe()
val addressService = system.actorOf(Props(classOf[AddressServiceActor],
(context: ActorContext) => userRepository.ref,
(context: ActorContext) => addressRepository.ref))
addressService ! GetAllPagination[Address](2, 2)
addressRepository.expectMsgType[SelectCount]
addressRepository.reply(Some[Long](12234l))
addressRepository.expectMsgType[SelectPaging]
val resultList = List(Address(12, "Street_Find", "zip_Find", "houseNum_Find", "City_Find"))
addressRepository.reply(resultList)
val result = expectMsgType[GetAllPaginationResult[Address]]
result.result should be(resultList)
}
test("7. Real pagination test") {
val userRepository = system.actorOf(UserRepositoryActor.props(jdbcConActor))
val addressRepository = system.actorOf(AddressRepositoryActor.props(jdbcConActor))
val addressService = system.actorOf(Props(classOf[AddressServiceActor],
(context: ActorContext) => userRepository,
(context: ActorContext) => addressRepository))
addressService ! GetAllPagination[Address](1, 16)
val resultDoc = expectMsgType[GetAllPaginationResult[Address]]
resultDoc.result.size should not be(0)
}
}
| michalzeman/angular2-training | akka-http-server/src/test/scala/com/mz/training/services/AddressServiceActorTest.scala | Scala | mit | 6,692 |
package vultura.calibration
import vultura.util.graph.graphviz.DotGraph
trait Edge
/** Features:
* - damping
* - mutable updating of edge values (set operation)
* - type-safe lookup (not necessary if all values are double arrays)
*
* What does a problem do?
*
* - define a set of edges
*/
trait CalProblem {
/** Internal representation of values. Only arrays of doubles. */
type IR = Array[Double]
/** Node type exposed by a problem. This defined the signature methods `initializer` and `nodes`. */
type N <: Node
/** The type of the parameter value that has to be provided to initialize the problem;
* e.g. assign values to the source nodes. */
type Parameter
/** Node type. A node is a representation of a node within the computation graph, but also carries the
* computation rule and the set of dependencies. */
sealed trait Node {
/** Size of the array required to store the state of this edge. */
def arraySize: Int
def dependenciesOpt: Option[IndexedSeq[N]]
}
trait ComputedNode extends Node {
def dependencies: IndexedSeq[N]
override def dependenciesOpt: Option[IndexedSeq[N]] = Some(dependencies)
/**
* - first parameter: `zip`s with `dependencies`.
* - second parameter: Result of computation shall be stored here. Content of result is garbage.
*/
def compute(ins: Array[IR], result: IR): Unit
}
trait ParameterNode extends Node {
override def dependenciesOpt: Option[IndexedSeq[N]] = None
}
/** Constructs a new initial value for each edge. */
def initializer(param: Parameter): N => IR
/** The set of nodes defined by this problem. */
def nodes: Set[N]
def computationGraph: DotGraph[N,(N,N)] =
DotGraph[N,(N,N)](
nodes,
for (e <- nodes; d <- e.dependenciesOpt.getOrElse(IndexedSeq()))
yield (d, e)).labelNodes{case e => e.toString}
}
object CalProblem {
type Aux[P] = CalProblem {type Parameter = P}
}
trait ResultBuilder[+R] {outer: CalProblem =>
def buildResult(valuation: outer.N => IR): R
}
| ziggystar/vultura-factor | src/main/scala/vultura/calibration/CalProblem.scala | Scala | mit | 2,056 |
package de.twentyone.sbt.models
sealed trait UnformattedLine {
val content: String = ""
val verb: String = ""
val path: String = ""
val call: String = ""
}
case class EmptyLine() extends UnformattedLine
case class CommentLine(override val content: String) extends UnformattedLine
case class RouteLine(override val content: String,
override val verb: String,
override val path: String,
override val call: String)
extends UnformattedLine
| 21re/sbt-play-routes-formatter | src/main/scala/de/twentyone/sbt/models/UnformattedLine.scala | Scala | mit | 518 |
import slick.backend.DatabaseConfig
import slick.driver.JdbcProfile
object MyDatabase {
lazy val config = DatabaseConfig.forConfig[JdbcProfile]("slick")
lazy val db = config.db
}
| lastland/scala-forklift-start-template | app/src/main/scala/Database.scala | Scala | cc0-1.0 | 184 |
/*
* @author Philip Stutz
* @author Daniel Strebel
*
* Copyright 2012 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.signalcollect.nodeprovisioning.torque
import java.io.File
import java.io.FileOutputStream
import scala.concurrent.Await
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.future
import scala.language.postfixOps
import scala.sys.process.stringToProcess
import com.signalcollect.serialization.DefaultSerializer
import scala.util.Random
case class TorqueHost(
jobSubmitter: AbstractJobSubmitter,
localJarPath: String,
jarDescription: String = (Random.nextInt.abs % 1000).toString,
jvmParameters: String = "-Xmx63000m -Xms63000m",
jdkBinPath: String = "",
mainClass: String = "com.signalcollect.nodeprovisioning.torque.JobExecutor",
priority: String = TorquePriority.superfast) extends ExecutionHost {
val fileSeparator = System.getProperty("file.separator")
val jarName = localJarPath.substring(localJarPath.lastIndexOf(fileSeparator) + 1, localJarPath.size)
def executeJobs(jobs: List[Job]) = executeJobs(jobs, true)
def executeJobs(jobs: List[Job], copyExecutable: Boolean = true) = {
/** COPY EVAL JAR TO TORQUE HOME DIRECTORY */
if (copyExecutable) {
jobSubmitter.copyFileToCluster(localJarPath)
}
/** SUBMIT AN EVALUATION JOB FOR EACH CONFIGURATION */
val jubSubmissions = jobs map {
job =>
future {
println("Submitting job " + job.jobId + " ...")
val config = DefaultSerializer.write(job)
val folder = new File("." + fileSeparator + "config-tmp")
if (!folder.exists) {
folder.mkdir
}
val configPath = "." + fileSeparator + "config-tmp" + fileSeparator + job.jobId + ".config"
val out = new FileOutputStream(configPath)
out.write(config)
out.close
jobSubmitter.copyFileToCluster(configPath)
val deleteConfig = "rm " + configPath
deleteConfig !!
val result = jobSubmitter.runOnClusterNode(job.jobId.toString, jarName, mainClass, priority, jvmParameters, jdkBinPath)
println("Job " + job.jobId + " has been submitted.")
result
}
}
jubSubmissions foreach (Await.ready(_, Duration.Inf))
jubSubmissions map (_.onFailure({ case t: Throwable => println(t) }))
jubSubmissions map (_.onFailure({ case e: Exception => e.printStackTrace }))
println("All jobs submitted.")
}
} | gmazlami/dcop-maxsum | src/main/scala/com/signalcollect/nodeprovisioning/torque/TorqueHost.scala | Scala | apache-2.0 | 3,087 |
// Copyright 2017 Foursquare Labs Inc. All Rights Reserved.
// NOTE(awinter): ideally this belongs in common but we're keeping it here to
// play nice with builds.
package io.fsq.spindle.codegen.binary
import scala.collection.mutable
/* Helper class storing the timing information for each block in BlockTimer. */
class BlockDets() {
var totalTimeNanos: Long = 0
var calls: Int = 0
def bump(nanos: Long): Unit = {
calls += 1
totalTimeNanos += nanos
}
}
/* HashMap wrapper that times blocks of code. */
class BlockTimer {
val blocks: mutable.Map[String, BlockDets] = mutable.Map[String, BlockDets]()
var currentName: String = null
var startTime: Long = 0
/* Call this at the top of a block. Calls [[stop]] internally. */
def start(name: String): Unit = {
stop()
currentName = name
startTime = System.nanoTime()
}
/*
Call once before [[render]] to close your final block. Safe to call multiple times.
Used internally by [[start]].
*/
def stop(): Unit = {
val now: Long = System.nanoTime()
if (currentName != null) {
val dets = blocks.getOrElseUpdate(currentName, new BlockDets)
dets.bump(now - startTime)
}
currentName = null
startTime = now
}
/* Human-readable string for each block. */
def render(): Iterable[String] =
for ((k, dets) <- blocks)
yield s"BlockDets($k ms=${dets.totalTimeNanos / 1000000} calls=${dets.calls})"
}
| foursquare/fsqio | src/jvm/io/fsq/spindle/codegen/binary/BlockTimer.scala | Scala | apache-2.0 | 1,435 |
package org.jetbrains.plugins.scala.lang.controlFlow
import com.intellij.openapi.editor.SelectionModel
import com.intellij.psi.PsiElement
import com.intellij.psi.util.PsiTreeUtil
import com.intellij.testFramework.fixtures.LightCodeInsightFixtureTestCase
import org.jetbrains.plugins.scala.ScalaFileType
import org.jetbrains.plugins.scala.lang.psi.api.{ScControlFlowOwner, ScalaFile}
import org.jetbrains.plugins.scala.lang.psi.controlFlow.Instruction
import org.jetbrains.plugins.scala.util.TestUtils
import org.junit.Assert
/**
* @author ilyas
*/
class ControlFlowTest extends LightCodeInsightFixtureTestCase {
protected override def getBasePath = TestUtils.getTestDataPath + "/controlFlow/"
override def setUp() {
super.setUp()
myFixture.setTestDataPath(getBasePath)
}
def doTest() {
val input: java.util.List[String] = TestUtils.readInput(getBasePath + getTestName(true) + ".test")
myFixture.configureByText(ScalaFileType.INSTANCE, input.get(0))
val file: ScalaFile = myFixture.getFile.asInstanceOf[ScalaFile]
val model: SelectionModel = myFixture.getEditor.getSelectionModel
val start: PsiElement = file.findElementAt(if (model.hasSelection) model.getSelectionStart else 0)
val end: PsiElement = file.findElementAt(if (model.hasSelection) model.getSelectionEnd - 1 else file.getTextLength - 1)
val owner: ScControlFlowOwner = PsiTreeUtil.getParentOfType(PsiTreeUtil.findCommonParent(start, end), classOf[ScControlFlowOwner], false)
val instructions = owner.getControlFlow()
val cf: String = dumpControlFlow(instructions)
Assert.assertEquals(input.get(1).trim, cf.trim)
}
protected def dumpControlFlow(instructions: Seq[Instruction]) = instructions.mkString("\\n")
def testAssignment() {doTest()}
def testIfStatement() {doTest()}
def testIfStatement2() {doTest()}
def testWhile() {doTest()}
def testWhile2() {doTest()}
def testMatch1() {doTest()}
def testFor1() {doTest()}
def testFor2() {doTest()}
def testDoWhile1() {doTest()}
def testReturn1() {doTest()}
def testMethod1() {doTest()}
def testThrow1() {doTest()}
def testKaplan_1703() {doTest()}
def testKaplan_1703_2() {doTest()}
def testTry1() {doTest()}
def testTry2() {doTest()}
def testTry3() {doTest()}
def testNoneThrow() = doTest()
def testScl_7393() = doTest()
def testUnresolvedParamThrow() = doTest()
}
| ilinum/intellij-scala | test/org/jetbrains/plugins/scala/lang/controlFlow/ControlFlowTest.scala | Scala | apache-2.0 | 2,378 |
/*
* Copyright 2013 Stephane Godbillon (@sgodbillon)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package reactivemongo.bson
import exceptions.DocumentKeyNotFound
import scala.util.{ Failure, Success, Try }
import buffer._
import utils.Converters
/** A BSON Double. */
case class BSONDouble(value: Double) extends BSONValue { val code = 0x01.toByte }
case class BSONString(value: String) extends BSONValue { val code = 0x02.toByte }
/**
* A `BSONDocument` structure (BSON type `0x03`).
*
* A `BSONDocument` is basically a stream of tuples `(String, BSONValue)`.
* It is completely lazy. The stream it wraps is a `Stream[Try[(String, BSONValue)]]` since
* we cannot be sure that a not yet deserialized value will be processed without error.
*/
case class BSONDocument(stream: Stream[Try[BSONElement]]) extends BSONValue {
val code = 0x03.toByte
/**
* Returns the [[BSONValue]] associated with the given `key`.
*
* If the key is not found or the matching value cannot be deserialized, returns `None`.
*/
def get(key: String): Option[BSONValue] = {
stream.find {
case Success(element) => element._1 == key
case Failure(e) => false
}.map(_.get._2)
}
/**
* Returns the [[BSONValue]] associated with the given `key`.
*
* If the key is not found or the matching value cannot be deserialized, returns a `Failure`.
* The `Failure` holds a [[exceptions.DocumentKeyNotFound]] if the key could not be found.
*/
def getTry(key: String): Try[BSONValue] = Try {
stream.find {
case Success(element) => element._1 == key
case Failure(e) => throw e
}.map(_.get._2).getOrElse(throw DocumentKeyNotFound(key))
}
/**
* Returns the [[BSONValue]] associated with the given `key`.
*
* If the key could not be found, the resulting option will be `None`.
* If the matching value could not be deserialized, returns a `Failure`.
*/
def getUnflattenedTry(key: String): Try[Option[BSONValue]] = getTry(key) match {
case Failure(e: DocumentKeyNotFound) => Success(None)
case Failure(e) => Failure(e)
case Success(e) => Success(Some(e))
}
/**
* Returns the [[BSONValue]] associated with the given `key`, and converts it with the given implicit [[BSONReader]].
*
* If there is no matching value, or the value could not be deserialized or converted, returns a `None`.
*/
def getAs[T](s: String)(implicit reader: BSONReader[_ <: BSONValue, T]): Option[T] = {
get(s).flatMap { element =>
reader match {
case r: BSONReader[BSONValue, T]@unchecked => r.readOpt(element)
case _ => None
}
}
}
/**
* Returns the [[BSONValue]] associated with the given `key`, and converts it with the given implicit [[BSONReader]].
*
* If there is no matching value, or the value could not be deserialized or converted, returns a `Failure`.
* The `Failure` holds a [[exceptions.DocumentKeyNotFound]] if the key could not be found.
*/
def getAsTry[T](s: String)(implicit reader: BSONReader[_ <: BSONValue, T]): Try[T] = {
val tt = getTry(s)
tt.flatMap { element => Try(reader.asInstanceOf[BSONReader[BSONValue, T]].read(element)) }
}
/**
* Returns the [[BSONValue]] associated with the given `key`, and converts it with the given implicit [[BSONReader]].
*
* If there is no matching value, returns a `Success` holding `None`.
* If the value could not be deserialized or converted, returns a `Failure`.
*/
def getAsUnflattenedTry[T](s: String)(implicit reader: BSONReader[_ <: BSONValue, T]): Try[Option[T]] = getAsTry(s)(reader) match {
case Failure(e: DocumentKeyNotFound) => Success(None)
case Failure(e) => Failure(e)
case Success(e) => Success(Some(e))
}
/** Creates a new [[BSONDocument]] containing all the elements of this one and the elements of the given document. */
def add(doc: BSONDocument): BSONDocument = new BSONDocument(stream ++ doc.stream)
/** Creates a new [[BSONDocument]] containing all the elements of this one and the given `elements`. */
def add(elements: Producer[(String, BSONValue)]*): BSONDocument = new BSONDocument(
stream ++ elements.flatMap { el =>
el.produce.map(value => Seq(Try(value))).getOrElse(Seq.empty)
}.toStream)
/** Alias for `add(doc: BSONDocument): BSONDocument` */
def ++(doc: BSONDocument): BSONDocument = add(doc)
/** Alias for `add(elements: Producer[(String, BSONValue)]*): BSONDocument` */
def ++(elements: Producer[(String, BSONValue)]*): BSONDocument = add(elements: _*)
/** Returns a `Stream` for all the elements of this `BSONDocument`. */
def elements: Stream[BSONElement] = stream.filter(_.isSuccess).map(_.get)
/** Is this document empty? */
def isEmpty: Boolean = stream.isEmpty
override def toString: String = "BSONDocument(<" + (if (isEmpty) "empty" else "non-empty") + ">)"
}
object BSONDocument {
/** Creates a new [[BSONDocument]] containing all the given `elements`. */
def apply(elements: Producer[(String, BSONValue)]*): BSONDocument = new BSONDocument(
elements.flatMap { el =>
el.produce.map(value => Seq(Try(value))).getOrElse(Seq.empty)
}.toStream)
/** Creates a new [[BSONDocument]] containing all the `elements` in the given `Traversable`. */
def apply(elements: Traversable[(String, BSONValue)]): BSONDocument = {
new BSONDocument(elements.toStream.map(Success(_)))
}
/** Returns a String representing the given [[BSONDocument]]. */
def pretty(doc: BSONDocument) = BSONIterator.pretty(doc.stream.iterator)
/** Writes the `document` into the `buffer`. */
def write(value: BSONDocument, buffer: WritableBuffer)(implicit bufferHandler: BufferHandler = DefaultBufferHandler): WritableBuffer = {
bufferHandler.writeDocument(value, buffer)
}
/**
* Reads a `document` from the `buffer`.
*
* Note that the buffer's readerIndex must be set on the start of a document, or it will fail.
*/
def read(buffer: ReadableBuffer)(implicit bufferHandler: BufferHandler = DefaultBufferHandler): BSONDocument = {
bufferHandler.readDocument(buffer).get
}
/** An empty BSONDocument. */
val empty: BSONDocument = BSONDocument()
}
/**
* A `BSONArray` structure (BSON type `0x04`).
*
* A `BSONArray` is a straightforward `BSONDocument` where keys are a sequence of positive integers.
*
* A `BSONArray` is basically a stream of tuples `(String, BSONValue)` where the first member is a string representation of an index.
* It is completely lazy. The stream it wraps is a `Stream[Try[(String, BSONValue)]]` since
* we cannot be sure that a not yet deserialized value will be processed without error.
*/
case class BSONArray(stream: Stream[Try[BSONValue]]) extends BSONValue {
val code = 0x04.toByte
/**
* Returns the [[BSONValue]] at the given `index`.
*
* If there is no such `index` or the matching value cannot be deserialized, returns `None`.
*/
def get(index: Int): Option[BSONValue] = getTry(index).toOption
/**
* Returns the [[BSONValue]] at the given `index`.
*
* If there is no such `index` or the matching value cannot be deserialized, returns a `Failure`.
* The `Failure` holds a [[exceptions.DocumentKeyNotFound]] if the key could not be found.
*/
def getTry(index: Int): Try[BSONValue] = stream.drop(index).headOption.getOrElse(Failure(DocumentKeyNotFound(index.toString)))
/**
* Returns the [[BSONValue]] at the given `index`.
*
* If there is no such `index`, the resulting option will be `None`.
* If the matching value could not be deserialized, returns a `Failure`.
*/
def getUnflattenedTry(index: Int): Try[Option[BSONValue]] = getTry(index) match {
case Failure(e: DocumentKeyNotFound) => Success(None)
case Failure(e) => Failure(e)
case Success(e) => Success(Some(e))
}
/**
* Gets the [[BSONValue]] at the given `index`, and converts it with the given implicit [[BSONReader]].
*
* If there is no matching value, or the value could not be deserialized or converted, returns a `None`.
*/
def getAs[T](index: Int)(implicit reader: BSONReader[_ <: BSONValue, T]): Option[T] = {
getTry(index).toOption.flatMap { element =>
Try(reader.asInstanceOf[BSONReader[BSONValue, T]].read(element)).toOption
}
}
/**
* Gets the [[BSONValue]] at the given `index`, and converts it with the given implicit [[BSONReader]].
*
* If there is no matching value, or the value could not be deserialized or converted, returns a `Failure`.
* The `Failure` holds a [[exceptions.DocumentKeyNotFound]] if the key could not be found.
*/
def getAsTry[T](index: Int)(implicit reader: BSONReader[_ <: BSONValue, T]): Try[T] = {
val tt = getTry(index)
tt.flatMap { element => Try(reader.asInstanceOf[BSONReader[BSONValue, T]].read(element)) }
}
/**
* Gets the [[BSONValue]] at the given `index`, and converts it with the given implicit [[BSONReader]].
*
* If there is no matching value, returns a `Success` holding `None`.
* If the value could not be deserialized or converted, returns a `Failure`.
*/
def getAsUnflattenedTry[T](index: Int)(implicit reader: BSONReader[_ <: BSONValue, T]): Try[Option[T]] = getAsTry(index)(reader) match {
case Failure(e: DocumentKeyNotFound) => Success(None)
case Failure(e) => Failure(e)
case Success(e) => Success(Some(e))
}
/** Creates a new [[BSONArray]] containing all the elements of this one and the elements of the given document. */
def add(doc: BSONArray): BSONArray = new BSONArray(stream ++ doc.stream)
/** Creates a new [[BSONArray]] containing all the elements of this one and the given `elements`. */
def add(elements: Producer[BSONValue]*): BSONArray = new BSONArray(
stream ++ elements.flatMap { el =>
el.produce.map(value => Seq(Try(value))).getOrElse(Seq.empty)
}.toStream)
/** Alias for `add(arr: BSONArray): BSONArray` */
def ++(array: BSONArray): BSONArray = add(array)
/** Alias for `add(elements: Producer[BSONValue]*): BSONArray` */
def ++(elements: Producer[BSONValue]*): BSONArray = add(elements: _*)
def iterator: Iterator[Try[(String, BSONValue)]] = stream.zipWithIndex.map { vv =>
vv._1.map(vv._2.toString -> _)
}.toIterator
def values: Stream[BSONValue] = stream.filter(_.isSuccess).map(_.get)
lazy val length = stream.size
/** Is this array empty? */
def isEmpty: Boolean = stream.isEmpty
override def toString: String = s"BSONArray(<${if (isEmpty) "empty" else "non-empty"}>)"
}
object BSONArray {
/** Creates a new [[BSONArray]] containing all the given `elements`. */
def apply(elements: Producer[BSONValue]*): BSONArray = new BSONArray(
elements.flatMap { el =>
el.produce.map(value => Seq(Try(value))).getOrElse(Seq.empty)
}.toStream)
/** Creates a new [[BSONArray]] containing all the `elements` in the given `Traversable`. */
def apply(elements: Traversable[BSONValue]): BSONArray = {
new BSONArray(elements.toStream.map(Success(_)))
}
/** Returns a String representing the given [[BSONArray]]. */
def pretty(array: BSONArray) = BSONIterator.pretty(array.iterator)
/** An empty BSONArray. */
val empty: BSONArray = BSONArray()
}
/**
* A BSON binary value.
*
* @param value The binary content.
* @param subtype The type of the binary content.
*/
case class BSONBinary(value: ReadableBuffer, subtype: Subtype) extends BSONValue { val code = 0x05.toByte } // TODO
object BSONBinary {
def apply(value: Array[Byte], subtype: Subtype): BSONBinary =
BSONBinary(ArrayReadableBuffer(value), subtype)
}
/** BSON Undefined value */
case object BSONUndefined extends BSONValue { val code = 0x06.toByte }
/** BSON ObjectId value. */
@SerialVersionUID(1L)
class BSONObjectID private (private val raw: Array[Byte]) extends BSONValue with Serializable with Equals {
val code = 0x07.toByte
import java.util.Arrays
import java.nio.ByteBuffer
/** ObjectId hexadecimal String representation */
lazy val stringify = Converters.hex2Str(raw)
override def toString = "BSONObjectID(\\"" + stringify + "\\")"
override def canEqual(that: Any) : Boolean = that.isInstanceOf[BSONObjectID]
override def equals(that: Any): Boolean = {
canEqual(that) && Arrays.equals(this.raw, that.asInstanceOf[BSONObjectID].raw)
}
override lazy val hashCode: Int = Arrays.hashCode(raw)
/** The time of this BSONObjectId, in milliseconds */
def time: Long = this.timeSecond * 1000L
/** The time of this BSONObjectId, in seconds */
def timeSecond: Int = ByteBuffer.wrap(raw.take(4)).getInt
def valueAsArray = Arrays.copyOf(raw, 12)
}
object BSONObjectID {
private val maxCounterValue = 16777216
private val increment = new java.util.concurrent.atomic.AtomicInteger(scala.util.Random.nextInt(maxCounterValue))
private def counter = (increment.getAndIncrement + maxCounterValue) % maxCounterValue
/**
* The following implemtation of machineId work around openjdk limitations in
* version 6 and 7
*
* Openjdk fails to parse /proc/net/if_inet6 correctly to determine macaddress
* resulting in SocketException thrown.
*
* Please see:
* * https://github.com/openjdk-mirror/jdk7u-jdk/blob/feeaec0647609a1e6266f902de426f1201f77c55/src/solaris/native/java/net/NetworkInterface.c#L1130
* * http://lxr.free-electrons.com/source/net/ipv6/addrconf.c?v=3.11#L3442
* * http://lxr.free-electrons.com/source/include/linux/netdevice.h?v=3.11#L1130
* * http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=7078386
*
* and fix in openjdk8:
* * http://hg.openjdk.java.net/jdk8/tl/jdk/rev/b1814b3ea6d3
*/
private val machineId = {
import java.net._
val validPlatform = Try {
val correctVersion = System.getProperty("java.version").substring(0, 3).toFloat >= 1.8
val noIpv6 = System.getProperty("java.net.preferIPv4Stack") == true
val isLinux = System.getProperty("os.name") == "Linux"
!isLinux || correctVersion || noIpv6
}.getOrElse(false)
// Check java policies
val permitted = {
val sec = System.getSecurityManager();
Try { sec.checkPermission(new NetPermission("getNetworkInformation")) }.toOption.map(_ => true).getOrElse(false);
}
if (validPlatform && permitted) {
val networkInterfacesEnum = NetworkInterface.getNetworkInterfaces
val networkInterfaces = scala.collection.JavaConverters.enumerationAsScalaIteratorConverter(networkInterfacesEnum).asScala
val ha = networkInterfaces.find(ha => Try(ha.getHardwareAddress).isSuccess && ha.getHardwareAddress != null && ha.getHardwareAddress.length == 6)
.map(_.getHardwareAddress)
.getOrElse(InetAddress.getLocalHost.getHostName.getBytes)
Converters.md5(ha).take(3)
} else {
val threadId = Thread.currentThread.getId.toInt
val arr = new Array[Byte](3)
arr(0) = (threadId & 0xFF).toByte
arr(1) = (threadId >> 8 & 0xFF).toByte
arr(2) = (threadId >> 16 & 0xFF).toByte
arr
}
}
/**
* Constructs a BSON ObjectId element from a hexadecimal String representation.
* Throws an exception if the given argument is not a valid ObjectID.
*
* `parse(str: String): Try[BSONObjectID]` should be considered instead of this method.
*/
def apply(id: String): BSONObjectID = {
if (id.length != 24)
throw new IllegalArgumentException(s"wrong ObjectId: '$id'")
/** Constructs a BSON ObjectId element from a hexadecimal String representation */
new BSONObjectID(Converters.str2Hex(id))
}
def apply(array: Array[Byte]): BSONObjectID = {
if(array.length != 12)
throw new IllegalArgumentException(s"wrong byte array for an ObjectId (size ${array.length})")
new BSONObjectID(java.util.Arrays.copyOf(array, 12))
}
def unapply(id: BSONObjectID): Option[Array[Byte]] = Some(id.valueAsArray)
/** Tries to make a BSON ObjectId element from a hexadecimal String representation. */
def parse(str: String): Try[BSONObjectID] = Try(apply(str))
/**
* Generates a new BSON ObjectID.
*
* +------------------------+------------------------+------------------------+------------------------+
* + timestamp (in seconds) + machine identifier + thread identifier + increment +
* + (4 bytes) + (3 bytes) + (2 bytes) + (3 bytes) +
* +------------------------+------------------------+------------------------+------------------------+
*
* The returned BSONObjectID contains a timestamp set to the current time (in seconds),
* with the `machine identifier`, `thread identifier` and `increment` properly set.
*/
def generate: BSONObjectID = fromTime(System.currentTimeMillis, false)
/**
* Generates a new BSON ObjectID from the given timestamp in milliseconds.
*
* +------------------------+------------------------+------------------------+------------------------+
* + timestamp (in seconds) + machine identifier + thread identifier + increment +
* + (4 bytes) + (3 bytes) + (2 bytes) + (3 bytes) +
* +------------------------+------------------------+------------------------+------------------------+
*
* The included timestamp is the number of seconds since epoch, so a BSONObjectID time part has only
* a precision up to the second. To get a reasonably unique ID, you _must_ set `onlyTimestamp` to false.
*
* Crafting a BSONObjectID from a timestamp with `fillOnlyTimestamp` set to true is helpful for range queries,
* eg if you want of find documents an _id field which timestamp part is greater than or lesser than
* the one of another id.
*
* If you do not intend to use the produced BSONObjectID for range queries, then you'd rather use
* the `generate` method instead.
*
* @param fillOnlyTimestamp if true, the returned BSONObjectID will only have the timestamp bytes set; the other will be set to zero.
*/
def fromTime(timeMillis: Long, fillOnlyTimestamp: Boolean = true): BSONObjectID = {
// n of seconds since epoch. Big endian
val timestamp = (timeMillis / 1000).toInt
val id = new Array[Byte](12)
id(0) = (timestamp >>> 24).toByte
id(1) = (timestamp >> 16 & 0xFF).toByte
id(2) = (timestamp >> 8 & 0xFF).toByte
id(3) = (timestamp & 0xFF).toByte
if (!fillOnlyTimestamp) {
// machine id, 3 first bytes of md5(macadress or hostname)
id(4) = machineId(0)
id(5) = machineId(1)
id(6) = machineId(2)
// 2 bytes of the pid or thread id. Thread id in our case. Low endian
val threadId = Thread.currentThread.getId.toInt
id(7) = (threadId & 0xFF).toByte
id(8) = (threadId >> 8 & 0xFF).toByte
// 3 bytes of counter sequence, which start is randomized. Big endian
val c = counter
id(9) = (c >> 16 & 0xFF).toByte
id(10) = (c >> 8 & 0xFF).toByte
id(11) = (c & 0xFF).toByte
}
BSONObjectID(id)
}
}
/** BSON boolean value */
case class BSONBoolean(value: Boolean) extends BSONValue { val code = 0x08.toByte }
/** BSON date time value */
case class BSONDateTime(value: Long) extends BSONValue { val code = 0x09.toByte }
/** BSON null value */
case object BSONNull extends BSONValue { val code = 0x0A.toByte }
/**
* BSON Regex value.
*
* @param flags Regex flags.
*/
case class BSONRegex(value: String, flags: String) extends BSONValue { val code = 0x0B.toByte }
/** BSON DBPointer value. */
case class BSONDBPointer(value: String, id: Array[Byte]) extends BSONValue {
val code = 0x0C.toByte
/** The BSONObjectID representation of this reference. */
val objectId = BSONObjectID(id)
override def canEqual(that: Any) : Boolean = that.isInstanceOf[BSONDBPointer]
override def equals(that: Any) : Boolean = {
canEqual(that) && {
val other = that.asInstanceOf[BSONDBPointer]
this.value.equals(other.value) &&
java.util.Arrays.equals(this.id,other.id)
}
}
}
/**
* BSON JavaScript value.
*
* @param value The JavaScript source code.
*/
case class BSONJavaScript(value: String) extends BSONValue { val code = 0x0D.toByte }
/** BSON Symbol value. */
case class BSONSymbol(value: String) extends BSONValue { val code = 0x0E.toByte }
/**
* BSON scoped JavaScript value.
*
* @param value The JavaScript source code. TODO
*/
case class BSONJavaScriptWS(value: String) extends BSONValue { val code = 0x0F.toByte }
/** BSON Integer value */
case class BSONInteger(value: Int) extends BSONValue { val code = 0x10.toByte }
/** BSON Timestamp value. TODO */
case class BSONTimestamp(value: Long) extends BSONValue { val code = 0x11.toByte }
/** BSON Long value */
case class BSONLong(value: Long) extends BSONValue { val code = 0x12.toByte }
/** BSON Min key value */
object BSONMinKey extends BSONValue { val code = 0xFF.toByte }
/** BSON Max key value */
object BSONMaxKey extends BSONValue { val code = 0x7F.toByte }
/** Binary Subtype */
sealed trait Subtype {
/** Subtype code */
val value: Byte
}
object Subtype {
case object GenericBinarySubtype extends Subtype { val value = 0x00.toByte }
case object FunctionSubtype extends Subtype { val value = 0x01.toByte }
case object OldBinarySubtype extends Subtype { val value = 0x02.toByte }
case object OldUuidSubtype extends Subtype { val value = 0x03.toByte }
case object UuidSubtype extends Subtype { val value = 0x04.toByte }
case object Md5Subtype extends Subtype { val value = 0x05.toByte }
case object UserDefinedSubtype extends Subtype { val value = 0x80.toByte }
def apply(code: Byte) = code match {
case 0 => GenericBinarySubtype
case 1 => FunctionSubtype
case 2 => OldBinarySubtype
case 3 => OldUuidSubtype
case 4 => UuidSubtype
case 5 => Md5Subtype
case -128 => UserDefinedSubtype
case _ => throw new NoSuchElementException(s"binary type = $code")
}
}
| bfil/ReactiveMongo | bson/src/main/scala/types.scala | Scala | apache-2.0 | 22,655 |
package dsmoq.maintenance.data.group
/**
* グループメンバー追加画面を描画するための情報
*/
case class MemberAddData(
groupId: String,
groupName: String
)
| nkawa/dsmoq | server/maintenance/src/main/scala/dsmoq/maintenance/data/group/MemberAddData.scala | Scala | apache-2.0 | 183 |
/*
* Copyright 2011-2012 The myBatis Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mybatis.scala.samples.select
import org.mybatis.scala.mapping._
import org.mybatis.scala.config._
import org.mybatis.scala.session._
import org.mybatis.scala.samples.util._
// Model beans (Immutable case class) ==========================================
case class CPerson(id : Int, firstName : String, lastName : String)
// Data access layer ===========================================================
object CDB {
// Simple select function
val findAll = new SelectListBy[String,CPerson] {
// Constructor Mapping (Warning: Order is important)
resultMap = new ResultMap[CPerson] {
idArg(column="id_", javaType=T[Int])
arg(column="first_name_", javaType=T[String])
arg(column="last_name_", javaType=T[String])
}
def xsql =
"""
SELECT
id_, first_name_, last_name_
FROM
person
WHERE
first_name_ LIKE #{name}
"""
}
// Datasource configuration
val config = Configuration(
Environment(
"default",
new JdbcTransactionFactory(),
new PooledDataSource(
"org.hsqldb.jdbcDriver",
"jdbc:hsqldb:mem:scala",
"sa",
""
)
)
)
// Add the data access method to the default namespace
config += findAll
config ++= DBSchema
config ++= DBSampleData
// Build the session manager
lazy val context = config.createPersistenceContext
}
// Application code ============================================================
object SelectImmutableSample {
// Do the Magic ...
def main(args : Array[String]) : Unit = {
CDB.context.transaction { implicit session =>
DBSchema.create
DBSampleData.populate
CDB.findAll("%a%").foreach {
case CPerson(id, firstName, lastName) =>
println( "Person(%d): %s %s".format(id, firstName, lastName) )
}
}
}
}
| mnesarco/mybatis-scala-samples-beta2 | src/main/scala/org/mybatis/scala/samples/select/SelectImmutableSample.scala | Scala | apache-2.0 | 2,506 |
/*
* Copyright (c) 2012 Roberto Tyley
*
* This file is part of 'BFG Repo-Cleaner' - a tool for removing large
* or troublesome blobs from Git repositories.
*
* BFG Repo-Cleaner is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* BFG Repo-Cleaner is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see http://www.gnu.org/licenses/ .
*/
package com.madgag.git.bfg.cleaner
import com.madgag.git.bfg.MemoUtil
import com.madgag.git.bfg.model.{TreeBlobEntry, _}
import org.eclipse.jgit.lib.ObjectId
trait TreeBlobModifier extends Cleaner[TreeBlobs] {
val memoisedCleaner: Cleaner[TreeBlobEntry] = MemoUtil.concurrentCleanerMemo[TreeBlobEntry](Set.empty) {
entry =>
val (mode, objectId) = fix(entry)
TreeBlobEntry(entry.filename, mode, objectId)
}
def fix(entry: TreeBlobEntry): (BlobFileMode, ObjectId) // implementing code can not safely know valid filename
override def apply(treeBlobs: TreeBlobs) = treeBlobs.entries.map(memoisedCleaner)
}
| rtyley/bfg-repo-cleaner | bfg-library/src/main/scala/com/madgag/git/bfg/cleaner/TreeBlobModifier.scala | Scala | gpl-3.0 | 1,453 |
package models.game
import play.api.db.slick.Config.driver.simple.TableQuery
package object table {
val gamesTable = TableQuery[GamesTable]
}
| kristiankime/web-education-games | app/models/game/table/package.scala | Scala | mit | 146 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.core.routing
import java.net.URI
import scala.util.control.Exception
/**
* A part of a path.
*/
trait PathPart
/**
* A dynamically extracted part of the path.
*
* @param name The name of the part.
* @param constraint The constraint - that is, the type.
* @param encodeable Whether the path should be encoded/decoded.
*/
case class DynamicPart(name: String, constraint: String, encodeable: Boolean) extends PathPart {
override def toString = """DynamicPart("""" + name + "\\", \\"\\"\\"" + constraint + "\\"\\"\\")" // "
}
/**
* A static part of the path.
*/
case class StaticPart(value: String) extends PathPart {
override def toString = """StaticPart("""" + value + """")"""
}
/**
* A pattern for match paths, consisting of a sequence of path parts.
*/
case class PathPattern(parts: Seq[PathPart]) {
import java.util.regex._
private def decodeIfEncoded(decode: Boolean, groupCount: Int): Matcher => Either[Throwable, String] =
matcher =>
Exception.allCatch[String].either {
if (decode) {
val group = matcher.group(groupCount)
// If param is not correctly encoded, get path will return null, so we prepend a / to it
new URI("/" + group).getPath.drop(1)
} else
matcher.group(groupCount)
}
private lazy val (regex, groups) = {
Some(parts.foldLeft("", Map.empty[String, Matcher => Either[Throwable, String]], 0) { (s, e) =>
e match {
case StaticPart(p) => ((s._1 + Pattern.quote(p)), s._2, s._3)
case DynamicPart(k, r, encodeable) => {
(
(s._1 + "(" + r + ")"),
(s._2 + (k -> decodeIfEncoded(encodeable, s._3 + 1))),
s._3 + 1 + Pattern.compile(r).matcher("").groupCount
)
}
}
}).map {
case (r, g, _) => Pattern.compile("^" + r + "$") -> g
}.get
}
/**
* Apply the path pattern to a given candidate path to see if it matches.
*
* @param path The path to match against.
* @return The map of extracted parameters, or none if the path didn't match.
*/
def apply(path: String): Option[Map[String, Either[Throwable, String]]] = {
val matcher = regex.matcher(path)
if (matcher.matches) {
Some(groups.view.mapValues(_(matcher)).toMap)
} else {
None
}
}
override def toString =
parts.map {
case DynamicPart(name, constraint, _) => "$" + name + "<" + constraint + ">"
case StaticPart(path) => path
}.mkString
}
| mkurz/playframework | core/play/src/main/scala/play/core/routing/PathPattern.scala | Scala | apache-2.0 | 2,574 |
package api
import domain.Person
import service.UserService
import scalaz._
import Scalaz._
/**
*
*/
class Mockery {
val userSvc = UserService
def createDummyPerson: Person = {
val p = Person(None, 1, "Jaroslav".some, "Siska".some, "jaro.siska@journi.com", "M".some)
userSvc.createPerson(p).toOption.get
}
}
| pnosko/staging.vita.infinita.api | src/test/scala/api/Mockery.scala | Scala | cc0-1.0 | 327 |
package com.lysdev.transperthcached.activities.train
import java.util.ArrayList
import android.app.ProgressDialog
import android.os.Bundle
import android.os.AsyncTask
import android.util.Log
import android.os.Looper
import android.view.View
import android.widget.AdapterView.OnItemClickListener
import android.widget.{AdapterView, ArrayAdapter, ListAdapter, TextView, ListView, Toast}
import org.scaloid.common._
import scala.collection.JavaConverters._
import scala.collection.JavaConversions._
import org.joda.time.{Minutes, DateTime}
import com.lysdev.transperthcached.livetimes.{
Trip,
GetTimesForStation,
TimesForStation,
InvalidPlatformCodeException
}
import com.lysdev.transperthcached.R
case class TripDisplayWrapper(trip: Trip) {
override def toString() : String = {
var s : String = this.trip.getLineFull()
val mins : Integer = Minutes.minutesBetween(
DateTime.now(),
this.trip.getActual()
).getMinutes
val cars : Integer = this.trip.getNumCars
String.format(
"%s - %d minutes - %d cars",
s, mins, cars
)
}
}
class FetchTimesTask[V <: android.view.View, T <: AnyRef]
(var klass : TrainStationTimesActivity,
var direction : Direction,
var ad : ArrayAdapter[TripDisplayWrapper])
extends AsyncTask[AnyRef, Void, List[Trip]] {
var mDialog : ProgressDialog = null
override protected def onPreExecute() {
this.mDialog = new ProgressDialog(this.klass)
this.mDialog.setMessage("Loading...")
this.mDialog.setCancelable(true)
this.mDialog.show()
}
override protected def doInBackground(p1: AnyRef*) : List[Trip] = {
(
GetTimesForStation
.getTimes(p1.head.asInstanceOf[String])
.getTrips().toList
)
}
override protected def onPostExecute(trips: List[Trip]) {
Log.d(
"TransperthCached",
String.format(
"%s trips",
trips.length.toString()
)
)
val filtered = (
trips
.filter(trip => {
val to_perth = trip.getDestination().equals("Perth")
val coming = to_perth && this.direction == Direction.TO
val going = !to_perth && this.direction == Direction.FROM
going || coming
})
.map(new TripDisplayWrapper(_))
)
this.ad.clear()
this.ad.addAll(filtered.asJavaCollection)
this.ad.notifyDataSetChanged()
this.mDialog.dismiss()
Log.d("TransperthCached", "initialized")
}
}
class TrainStationTimesActivity extends SActivity
with OnItemClickListener {
lazy val times_lv = find[ListView](R.id.times)
override def onCreate(savedInstanceState: Bundle) = {
super.onCreate(savedInstanceState)
setContentView(R.layout.station_times)
val line_name = getIntent().getStringExtra("line_name")
val station_name = getIntent().getStringExtra("station_name")
val direction = Direction.from_val("direction", getIntent())
display_data(line_name, station_name, direction)
}
def display_data(line_name: String, station_name: String, direction: Direction) = {
// https://stackoverflow.com/questions/3200551/unable-to-modify-arrayadapter-in-listview-unsupportedoperationexception
val ad = new ArrayAdapter(
this,
android.R.layout.simple_list_item_1,
new ArrayList(List[TripDisplayWrapper]().asJavaCollection)
)
this.times_lv.setOnItemClickListener(this)
this.times_lv.setAdapter(ad)
(
new FetchTimesTask[TextView, TripDisplayWrapper](this, direction, ad)
.execute(station_name)
)
}
def onItemClick(parent: AdapterView[_], view: View, position: Int, id: Long) : Unit = {
val trp = parent.getItemAtPosition(position).asInstanceOf[TripDisplayWrapper].trip
Log.d("TransperthCached", String.format("Clicked: %s", new TripDisplayWrapper(trp)))
Toast.makeText(
this,
trp.getPatternFullDisplay().mkString(", "),
Toast.LENGTH_LONG
).show()
}
}
| Mause/TransperthCached | src/com/lysdev/transperthcached/activities/train/TrainStationTimesActivity.scala | Scala | apache-2.0 | 4,392 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.nio.charset.StandardCharsets
import org.apache.spark.sql.functions._
import org.apache.spark.sql.functions.{log => logarithm}
import org.apache.spark.sql.test.SharedSQLContext
private object MathExpressionsTestData {
case class DoubleData(a: java.lang.Double, b: java.lang.Double)
case class NullDoubles(a: java.lang.Double)
}
class MathExpressionsSuite extends QueryTest with SharedSQLContext {
import MathExpressionsTestData._
import testImplicits._
private lazy val doubleData = (1 to 10).map(i => DoubleData(i * 0.2 - 1, i * -0.2 + 1)).toDF()
private lazy val nnDoubleData = (1 to 10).map(i => DoubleData(i * 0.1, i * -0.1)).toDF()
private lazy val nullDoubles =
Seq(NullDoubles(1.0), NullDoubles(2.0), NullDoubles(3.0), NullDoubles(null)).toDF()
private def testOneToOneMathFunction[
@specialized(Int, Long, Float, Double) T,
@specialized(Int, Long, Float, Double) U](
c: Column => Column,
f: T => U): Unit = {
checkAnswer(
doubleData.select(c('a)),
(1 to 10).map(n => Row(f((n * 0.2 - 1).asInstanceOf[T])))
)
checkAnswer(
doubleData.select(c('b)),
(1 to 10).map(n => Row(f((-n * 0.2 + 1).asInstanceOf[T])))
)
checkAnswer(
doubleData.select(c(lit(null))),
(1 to 10).map(_ => Row(null))
)
}
private def testOneToOneNonNegativeMathFunction(c: Column => Column, f: Double => Double): Unit =
{
checkAnswer(
nnDoubleData.select(c('a)),
(1 to 10).map(n => Row(f(n * 0.1)))
)
if (f(-1) === math.log1p(-1)) {
checkAnswer(
nnDoubleData.select(c('b)),
(1 to 9).map(n => Row(f(n * -0.1))) :+ Row(null)
)
}
checkAnswer(
nnDoubleData.select(c(lit(null))),
(1 to 10).map(_ => Row(null))
)
}
private def testTwoToOneMathFunction(
c: (Column, Column) => Column,
d: (Column, Double) => Column,
f: (Double, Double) => Double): Unit = {
checkAnswer(
nnDoubleData.select(c('a, 'a)),
nnDoubleData.collect().toSeq.map(r => Row(f(r.getDouble(0), r.getDouble(0))))
)
checkAnswer(
nnDoubleData.select(c('a, 'b)),
nnDoubleData.collect().toSeq.map(r => Row(f(r.getDouble(0), r.getDouble(1))))
)
checkAnswer(
nnDoubleData.select(d('a, 2.0)),
nnDoubleData.collect().toSeq.map(r => Row(f(r.getDouble(0), 2.0)))
)
checkAnswer(
nnDoubleData.select(d('a, -0.5)),
nnDoubleData.collect().toSeq.map(r => Row(f(r.getDouble(0), -0.5)))
)
val nonNull = nullDoubles.collect().toSeq.filter(r => r.get(0) != null)
checkAnswer(
nullDoubles.select(c('a, 'a)).orderBy('a.asc),
Row(null) +: nonNull.map(r => Row(f(r.getDouble(0), r.getDouble(0))))
)
}
test("sin") {
testOneToOneMathFunction(sin, math.sin)
}
test("asin") {
testOneToOneMathFunction(asin, math.asin)
}
test("sinh") {
testOneToOneMathFunction(sinh, math.sinh)
}
test("cos") {
testOneToOneMathFunction(cos, math.cos)
}
test("acos") {
testOneToOneMathFunction(acos, math.acos)
}
test("cosh") {
testOneToOneMathFunction(cosh, math.cosh)
}
test("tan") {
testOneToOneMathFunction(tan, math.tan)
}
test("atan") {
testOneToOneMathFunction(atan, math.atan)
}
test("tanh") {
testOneToOneMathFunction(tanh, math.tanh)
}
test("toDegrees") {
testOneToOneMathFunction(toDegrees, math.toDegrees)
checkAnswer(
sql("SELECT degrees(0), degrees(1), degrees(1.5)"),
Seq((1, 2)).toDF().select(toDegrees(lit(0)), toDegrees(lit(1)), toDegrees(lit(1.5)))
)
}
test("toRadians") {
testOneToOneMathFunction(toRadians, math.toRadians)
checkAnswer(
sql("SELECT radians(0), radians(1), radians(1.5)"),
Seq((1, 2)).toDF().select(toRadians(lit(0)), toRadians(lit(1)), toRadians(lit(1.5)))
)
}
test("cbrt") {
testOneToOneMathFunction(cbrt, math.cbrt)
}
test("ceil and ceiling") {
testOneToOneMathFunction(ceil, (d: Double) => math.ceil(d).toLong)
checkAnswer(
sql("SELECT ceiling(0), ceiling(1), ceiling(1.5)"),
Row(0L, 1L, 2L))
}
test("conv") {
val df = Seq(("333", 10, 2)).toDF("num", "fromBase", "toBase")
checkAnswer(df.select(conv('num, 10, 16)), Row("14D"))
checkAnswer(df.select(conv(lit(100), 2, 16)), Row("4"))
checkAnswer(df.select(conv(lit(3122234455L), 10, 16)), Row("BA198457"))
checkAnswer(df.selectExpr("conv(num, fromBase, toBase)"), Row("101001101"))
checkAnswer(df.selectExpr("""conv("100", 2, 10)"""), Row("4"))
checkAnswer(df.selectExpr("""conv("-10", 16, -10)"""), Row("-16"))
checkAnswer(
df.selectExpr("""conv("9223372036854775807", 36, -16)"""), Row("-1")) // for overflow
}
test("floor") {
testOneToOneMathFunction(floor, (d: Double) => math.floor(d).toLong)
}
test("factorial") {
val df = (0 to 5).map(i => (i, i)).toDF("a", "b")
checkAnswer(
df.select(factorial('a)),
Seq(Row(1), Row(1), Row(2), Row(6), Row(24), Row(120))
)
checkAnswer(
df.selectExpr("factorial(a)"),
Seq(Row(1), Row(1), Row(2), Row(6), Row(24), Row(120))
)
}
test("rint") {
testOneToOneMathFunction(rint, math.rint)
}
test("round/bround") {
val df = Seq(5, 55, 555).map(Tuple1(_)).toDF("a")
checkAnswer(
df.select(round('a), round('a, -1), round('a, -2)),
Seq(Row(5, 10, 0), Row(55, 60, 100), Row(555, 560, 600))
)
checkAnswer(
df.select(bround('a), bround('a, -1), bround('a, -2)),
Seq(Row(5, 0, 0), Row(55, 60, 100), Row(555, 560, 600))
)
val pi = "3.1415"
checkAnswer(
sql(s"SELECT round($pi, -3), round($pi, -2), round($pi, -1), " +
s"round($pi, 0), round($pi, 1), round($pi, 2), round($pi, 3)"),
Seq(Row(BigDecimal("0E3"), BigDecimal("0E2"), BigDecimal("0E1"), BigDecimal(3),
BigDecimal("3.1"), BigDecimal("3.14"), BigDecimal("3.142")))
)
checkAnswer(
sql(s"SELECT bround($pi, -3), bround($pi, -2), bround($pi, -1), " +
s"bround($pi, 0), bround($pi, 1), bround($pi, 2), bround($pi, 3)"),
Seq(Row(BigDecimal("0E3"), BigDecimal("0E2"), BigDecimal("0E1"), BigDecimal(3),
BigDecimal("3.1"), BigDecimal("3.14"), BigDecimal("3.142")))
)
}
test("exp") {
testOneToOneMathFunction(exp, math.exp)
}
test("expm1") {
testOneToOneMathFunction(expm1, math.expm1)
}
test("signum / sign") {
testOneToOneMathFunction[Double, Double](signum, math.signum)
checkAnswer(
sql("SELECT sign(10), signum(-11)"),
Row(1, -1))
}
test("pow / power") {
testTwoToOneMathFunction(pow, pow, math.pow)
checkAnswer(
sql("SELECT pow(1, 2), power(2, 1)"),
Seq((1, 2)).toDF().select(pow(lit(1), lit(2)), pow(lit(2), lit(1)))
)
}
test("hex") {
val data = Seq((28, -28, 100800200404L, "hello")).toDF("a", "b", "c", "d")
checkAnswer(data.select(hex('a)), Seq(Row("1C")))
checkAnswer(data.select(hex('b)), Seq(Row("FFFFFFFFFFFFFFE4")))
checkAnswer(data.select(hex('c)), Seq(Row("177828FED4")))
checkAnswer(data.select(hex('d)), Seq(Row("68656C6C6F")))
checkAnswer(data.selectExpr("hex(a)"), Seq(Row("1C")))
checkAnswer(data.selectExpr("hex(b)"), Seq(Row("FFFFFFFFFFFFFFE4")))
checkAnswer(data.selectExpr("hex(c)"), Seq(Row("177828FED4")))
checkAnswer(data.selectExpr("hex(d)"), Seq(Row("68656C6C6F")))
checkAnswer(data.selectExpr("hex(cast(d as binary))"), Seq(Row("68656C6C6F")))
}
test("unhex") {
val data = Seq(("1C", "737472696E67")).toDF("a", "b")
checkAnswer(data.select(unhex('a)), Row(Array[Byte](28.toByte)))
checkAnswer(data.select(unhex('b)), Row("string".getBytes(StandardCharsets.UTF_8)))
checkAnswer(data.selectExpr("unhex(a)"), Row(Array[Byte](28.toByte)))
checkAnswer(data.selectExpr("unhex(b)"), Row("string".getBytes(StandardCharsets.UTF_8)))
checkAnswer(data.selectExpr("""unhex("##")"""), Row(null))
checkAnswer(data.selectExpr("""unhex("G123")"""), Row(null))
}
test("hypot") {
testTwoToOneMathFunction(hypot, hypot, math.hypot)
}
test("atan2") {
testTwoToOneMathFunction(atan2, atan2, math.atan2)
}
test("log / ln") {
testOneToOneNonNegativeMathFunction(org.apache.spark.sql.functions.log, math.log)
checkAnswer(
sql("SELECT ln(0), ln(1), ln(1.5)"),
Seq((1, 2)).toDF().select(logarithm(lit(0)), logarithm(lit(1)), logarithm(lit(1.5)))
)
}
test("log10") {
testOneToOneNonNegativeMathFunction(log10, math.log10)
}
test("log1p") {
testOneToOneNonNegativeMathFunction(log1p, math.log1p)
}
test("shift left") {
val df = Seq[(Long, Integer, Short, Byte, Integer, Integer)]((21, 21, 21, 21, 21, null))
.toDF("a", "b", "c", "d", "e", "f")
checkAnswer(
df.select(
shiftLeft('a, 1), shiftLeft('b, 1), shiftLeft('c, 1), shiftLeft('d, 1),
shiftLeft('f, 1)),
Row(42.toLong, 42, 42.toShort, 42.toByte, null))
checkAnswer(
df.selectExpr(
"shiftLeft(a, 1)", "shiftLeft(b, 1)", "shiftLeft(b, 1)", "shiftLeft(d, 1)",
"shiftLeft(f, 1)"),
Row(42.toLong, 42, 42.toShort, 42.toByte, null))
}
test("shift right") {
val df = Seq[(Long, Integer, Short, Byte, Integer, Integer)]((42, 42, 42, 42, 42, null))
.toDF("a", "b", "c", "d", "e", "f")
checkAnswer(
df.select(
shiftRight('a, 1), shiftRight('b, 1), shiftRight('c, 1), shiftRight('d, 1),
shiftRight('f, 1)),
Row(21.toLong, 21, 21.toShort, 21.toByte, null))
checkAnswer(
df.selectExpr(
"shiftRight(a, 1)", "shiftRight(b, 1)", "shiftRight(c, 1)", "shiftRight(d, 1)",
"shiftRight(f, 1)"),
Row(21.toLong, 21, 21.toShort, 21.toByte, null))
}
test("shift right unsigned") {
val df = Seq[(Long, Integer, Short, Byte, Integer, Integer)]((-42, 42, 42, 42, 42, null))
.toDF("a", "b", "c", "d", "e", "f")
checkAnswer(
df.select(
shiftRightUnsigned('a, 1), shiftRightUnsigned('b, 1), shiftRightUnsigned('c, 1),
shiftRightUnsigned('d, 1), shiftRightUnsigned('f, 1)),
Row(9223372036854775787L, 21, 21.toShort, 21.toByte, null))
checkAnswer(
df.selectExpr(
"shiftRightUnsigned(a, 1)", "shiftRightUnsigned(b, 1)", "shiftRightUnsigned(c, 1)",
"shiftRightUnsigned(d, 1)", "shiftRightUnsigned(f, 1)"),
Row(9223372036854775787L, 21, 21.toShort, 21.toByte, null))
}
test("binary log") {
val df = Seq[(Integer, Integer)]((123, null)).toDF("a", "b")
checkAnswer(
df.select(org.apache.spark.sql.functions.log("a"),
org.apache.spark.sql.functions.log(2.0, "a"),
org.apache.spark.sql.functions.log("b")),
Row(math.log(123), math.log(123) / math.log(2), null))
checkAnswer(
df.selectExpr("log(a)", "log(2.0, a)", "log(b)"),
Row(math.log(123), math.log(123) / math.log(2), null))
}
test("abs") {
val input =
Seq[(java.lang.Double, java.lang.Double)]((null, null), (0.0, 0.0), (1.5, 1.5), (-2.5, 2.5))
checkAnswer(
input.toDF("key", "value").select(abs($"key").alias("a")).sort("a"),
input.map(pair => Row(pair._2)))
checkAnswer(
input.toDF("key", "value").selectExpr("abs(key) a").sort("a"),
input.map(pair => Row(pair._2)))
checkAnswer(
sql("select abs(0), abs(-1), abs(123), abs(-9223372036854775807), abs(9223372036854775807)"),
Row(0, 1, 123, 9223372036854775807L, 9223372036854775807L)
)
checkAnswer(
sql("select abs(0.0), abs(-3.14159265), abs(3.14159265)"),
Row(BigDecimal("0.0"), BigDecimal("3.14159265"), BigDecimal("3.14159265"))
)
}
test("log2") {
val df = Seq((1, 2)).toDF("a", "b")
checkAnswer(
df.select(log2("b") + log2("a")),
Row(1))
checkAnswer(sql("SELECT LOG2(8), LOG2(null)"), Row(3, null))
}
test("sqrt") {
val df = Seq((1, 4)).toDF("a", "b")
checkAnswer(
df.select(sqrt("a"), sqrt("b")),
Row(1.0, 2.0))
checkAnswer(sql("SELECT SQRT(4.0), SQRT(null)"), Row(2.0, null))
checkAnswer(df.selectExpr("sqrt(a)", "sqrt(b)", "sqrt(null)"), Row(1.0, 2.0, null))
}
test("negative") {
checkAnswer(
sql("SELECT negative(1), negative(0), negative(-1)"),
Row(-1, 0, 1))
}
test("positive") {
val df = Seq((1, -1, "abc")).toDF("a", "b", "c")
checkAnswer(df.selectExpr("positive(a)"), Row(1))
checkAnswer(df.selectExpr("positive(b)"), Row(-1))
}
}
| gioenn/xSpark | sql/core/src/test/scala/org/apache/spark/sql/MathExpressionsSuite.scala | Scala | apache-2.0 | 13,314 |
package scala.slick.jdbc
import java.sql.ResultSet
/** Represents a result set concurrency mode. */
sealed abstract class ResultSetConcurrency(val intValue: Int) { self =>
/** Run a block of code on top of a JDBC session with this concurrency mode */
def apply[T](base: JdbcBackend#Session)(f: JdbcBackend#Session => T): T = f(base.forParameters(rsConcurrency = self))
/** Run a block of code on top of the dynamic, thread-local JDBC session with this concurrency mode */
def apply[T](f: => T)(implicit base: JdbcBackend#Session): T = apply(base)(_.asDynamicSession(f))
/** Return this `ResultSetConcurrency`, unless it is `Auto` in which case
* the specified concurrency mode is returned instead. */
def withDefault(r: ResultSetConcurrency) = this
}
object ResultSetConcurrency {
/** The current concurrency mode of the JDBC driver */
case object Auto extends ResultSetConcurrency(ResultSet.CONCUR_READ_ONLY) {
override def withDefault(r: ResultSetConcurrency) = r
}
/** The concurrency mode which indicates that the result set may <em>not</em> be updated. */
case object ReadOnly extends ResultSetConcurrency(ResultSet.CONCUR_READ_ONLY)
/** The concurrency mode which indicates that the result set may be updated. */
case object Updatable extends ResultSetConcurrency(ResultSet.CONCUR_UPDATABLE)
} | nuodb/slick | src/main/scala/scala/slick/jdbc/ResultSetConcurrency.scala | Scala | bsd-2-clause | 1,340 |
package de.htwg.zeta.common.format.project.gdsl.style
import de.htwg.zeta.common.models.project.gdsl.style.Background
import de.htwg.zeta.common.models.project.gdsl.style.Color
import play.api.libs.json.JsSuccess
import play.api.libs.json.Json
import org.scalatest.freespec.AnyFreeSpec
import org.scalatest.matchers.should.Matchers
//noinspection ScalaStyle
class BackgroundFormatTest extends AnyFreeSpec with Matchers {
"A BackgroundFormat should" - {
"write an object" in {
val result = BackgroundFormat().writes(Background(Color(23, 24, 25, 26)))
result.toString() shouldBe
"""{"color":{"r":23,"g":24,"b":25,"a":26,"rgb":"rgb(23,24,25)","rgba":"rgba(23,24,25,26.0)","hex":"#171819"}}"""
}
"read an object" in {
val result = BackgroundFormat().reads(Json.parse(
"""{"color":"rgba(23,24,25,26)"}"""
))
result shouldBe JsSuccess(Background(Color(23, 24, 25, 26)))
}
"fail in reading an invalid input" in {
val result = BackgroundFormat().reads(Json.parse(
"""{"invalid":{"r":23}}"""
))
result.isSuccess shouldBe false
}
}
}
| Zeta-Project/zeta | api/common/src/test/scala/de/htwg/zeta/common/format/project/gdsl/style/BackgroundFormatTest.scala | Scala | bsd-2-clause | 1,128 |
package com.yourtion.TinyWeb
/**
* Created by Yourtion on 9/5/16.
*/
trait Controller {
def handleRequest(httpRequest: HttpRequest): HttpResponse
}
class FunctionController(view: View, doRequest: (HttpRequest) =>
Map[String, List[String]] ) extends Controller {
def handleRequest(request: HttpRequest): HttpResponse =
try {
val model = doRequest(request)
val responseBody = view.render(model)
HttpResponse(responseBody, 200)
} catch {
case e: ControllerException =>
HttpResponse("", e.getStatusCode)
case e: RenderingException =>
HttpResponse("Exception while rendering.", 500)
case e: Exception =>
HttpResponse("", 500)
}
}
| yourtion/LearningFunctionalProgramming | Scala/src/com/yourtion/TinyWeb/Controller.scala | Scala | mit | 711 |
// Adapted from github:mandubian/kind-polymorphic-semigroup.scala
sealed trait HList
case class HCons[+HD, +TL](hd: HD, tl: TL) extends HList
case object HNil extends HList
object Test {
type HNil = HNil.type
// Kind Extractor
trait Kinder[MA] { type M <: AnyKind }
object Kinder extends KinderLowerImplicits {
type Aux[MA, M0 <: AnyKind] = Kinder[MA] { type M = M0 }
implicit def kinder1[M0[_], A0]: Kinder.Aux[M0[A0], M0] =
new Kinder[M0[A0]] { type M[t] = M0[t] }
implicit def kinder2[M0[_, _], A0, B0]: Kinder.Aux[M0[A0, B0], M0] =
new Kinder[M0[A0, B0]] { type M[t, u] = M0[t, u]; type Args = HCons[A0, HCons[B0, HNil]] }
}
trait KinderLowerImplicits {
implicit def kinder0[A]: Kinder.Aux[A, A] = new Kinder[A] { type M = A; type Args = HNil }
}
// Kind Polymorphic Semigroup using shapeless "Polymorphic function"-style
trait SemiGroup[M <: AnyKind] {
// Just a mirror type of itself to ensure the owning of AppendFunction...
type Self
// the function accepting only monomorphic type MA allowed by this scoped Semigroup AppendFunction
def append[MA](m1: MA, m2: MA)(implicit appender: SemiGroup.AppendFunction[Self, MA, M]) = appender(m1, m2)
}
object SemiGroup {
type Aux[M <: AnyKind, Self0] = SemiGroup[M] { type Self = Self0 }
// the monomorphic append function (yes we need to reify monomorphic types sometimes)
trait AppendFunction[P, FA, F <: AnyKind] {
def apply(m1: FA, m2: FA): FA
}
}
// Int SemiGroup instance
implicit object SemiGroupInt extends SemiGroup[Int] {
type Self = this.type
implicit val appender: SemiGroup.AppendFunction[Self, Int, Int]= new SemiGroup.AppendFunction[Self, Int, Int] {
def apply(m1: Int, m2: Int) = m1 + m2
}
}
// List SemiGroup instance
implicit object SemiGroupList extends SemiGroup[List] {
type Self = this.type
implicit def appender[A]: SemiGroup.AppendFunction[Self, List[A], List] = new {
def apply(m1: List[A], m2: List[A]) = m1 ++ m2
}
}
// Map SemiGroup instance
implicit object SemiGroupMap extends SemiGroup[Map] {
type Self = this.type
implicit def appender[A, B]: SemiGroup.AppendFunction[Self, Map[A, B], Map] = new {
def apply(m1: Map[A, B], m2: Map[A, B]) = m1 ++ m2
}
}
// Searching a semigroup and using it
def semiGroup[M <: AnyKind](implicit sg: SemiGroup[M]): SemiGroup.Aux[M, sg.Self] = sg
semiGroup[Int].append(5, 8)
semiGroup[List].append(List(1), List(3))
semiGroup[Map].append(Map("toto" -> 1L), Map("tata" -> 3L))
// higher level append function
def append[MA, M <: AnyKind, Self](m1: MA, m2: MA)(
implicit kinder: Kinder.Aux[MA, M], semiGroup: SemiGroup.Aux[M, Self], appender: SemiGroup.AppendFunction[Self, MA, M]
): MA = semiGroup.append(m1, m2)
import SemiGroupList.appender
import SemiGroupMap.appender
val r1: Int = append(5, 8)
// TODO: Figure igure out why `M` below cannot be inferred
val r2: List[Int] = append[M = List](List(1), List(3))
val r3: Map[String, Long] = append[M = Map](Map("toto" -> 1L), Map("tata" -> 3L))
} | som-snytt/dotty | tests/pos/kindPolySemiGroup.scala | Scala | apache-2.0 | 3,118 |
package com.rasterfoundry.backsplash
import cats.effect.IO
import simulacrum._
import java.util.UUID
@typeclass trait ToolStore[A] {
@op("read") def read(self: A,
analysisId: UUID,
nodeId: Option[UUID]): IO[PaintableTool]
}
| azavea/raster-foundry | app-backend/backsplash-core/src/main/scala/com/rasterfoundry/backsplash/ToolStore.scala | Scala | apache-2.0 | 275 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp.
package scala
/** A function of 21 parameters.
*
*/
trait Function21[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, -T21, +R] extends AnyRef { self =>
/** Apply the body of this function to the arguments.
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20, v21: T21): R
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20)(x21) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)`
*/
@annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20, x21: T21) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)).curried
}
/** Creates a tupled version of this function: instead of 21 arguments,
* it accepts a single [[scala.Tuple21]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)) == f(Tuple21(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)`
*/
@annotation.unspecialized def tupled: ((T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21)) => R = {
case ((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)
}
override def toString(): String = "<function21>"
}
| scala/scala | src/library/scala/Function21.scala | Scala | apache-2.0 | 2,631 |
// Databricks notebook source
// MAGIC %md
// MAGIC ScaDaMaLe Course [site](https://lamastex.github.io/scalable-data-science/sds/3/x/) and [book](https://lamastex.github.io/ScaDaMaLe/index.html)
// COMMAND ----------
// MAGIC %md
// MAGIC # Twitter Streaming Language Classifier
// MAGIC
// MAGIC This is a databricksification of [https://databricks.gitbooks.io/databricks-spark-reference-applications/content/twitter_classifier/index.html](https://databricks.gitbooks.io/databricks-spark-reference-applications/content/twitter_classifier/index.html) by Amendra Shreshta.
// MAGIC
// MAGIC Note that you need to change the fields in background notebook `025_a_extendedTwitterUtils2run` so more fields for `lang` in Tweets are exposed. This is a good example of how one has to go deeper into the java code of `twitter4j` as new needs arise.
// COMMAND ----------
// MAGIC %run "./025_c_extendedTwitterUtils2runWithLangs"
// COMMAND ----------
import org.apache.spark._
import org.apache.spark.storage._
import org.apache.spark.streaming._
import scala.math.Ordering
import twitter4j.auth.OAuthAuthorization
import twitter4j.conf.ConfigurationBuilder
// COMMAND ----------
// MAGIC %md
// MAGIC
// MAGIC **Enter your Twitter API Credentials.**
// MAGIC * Go to https://apps.twitter.com and look up your Twitter API Credentials, or create an app to create them.
// MAGIC * Run the code in a cell to Enter your own credentials.
// MAGIC
// MAGIC ```%scala
// MAGIC // put your own twitter developer credentials below instead of xxx
// MAGIC // instead of the '%run ".../secrets/026_secret_MyTwitterOAuthCredentials"' below
// MAGIC // you need to copy-paste the following code-block with your own Twitter credentials replacing XXXX
// MAGIC
// MAGIC
// MAGIC // put your own twitter developer credentials below
// MAGIC
// MAGIC import twitter4j.auth.OAuthAuthorization
// MAGIC import twitter4j.conf.ConfigurationBuilder
// MAGIC
// MAGIC
// MAGIC // These have been regenerated!!! - need to chane them
// MAGIC
// MAGIC def myAPIKey = "XXXX" // APIKey
// MAGIC def myAPISecret = "XXXX" // APISecretKey
// MAGIC def myAccessToken = "XXXX" // AccessToken
// MAGIC def myAccessTokenSecret = "XXXX" // AccessTokenSecret
// MAGIC
// MAGIC
// MAGIC System.setProperty("twitter4j.oauth.consumerKey", myAPIKey)
// MAGIC System.setProperty("twitter4j.oauth.consumerSecret", myAPISecret)
// MAGIC System.setProperty("twitter4j.oauth.accessToken", myAccessToken)
// MAGIC System.setProperty("twitter4j.oauth.accessTokenSecret", myAccessTokenSecret)
// MAGIC
// MAGIC println("twitter OAuth Credentials loaded")
// MAGIC
// MAGIC ```
// MAGIC
// MAGIC The cell-below will not expose my Twitter API Credentials: `myAPIKey`, `myAPISecret`, `myAccessToken` and `myAccessTokenSecret`. Use the code above to enter your own credentials in a scala cell.
// COMMAND ----------
// MAGIC %run "Users/raazesh.sainudiin@math.uu.se/scalable-data-science/secrets/026_secret_MyTwitterOAuthCredentials"
// COMMAND ----------
// MAGIC %md
// MAGIC ## Step 1. Collect Data
// MAGIC Start downloading tweets in order to start building a simple model for language classification.
// COMMAND ----------
// ## Let's create a directory in dbfs for storing tweets in the cluster's distributed file system.
val outputDirectoryRoot = "/datasets/tweetsStreamTmp" // output directory
// COMMAND ----------
// to remove a pre-existing directory and start from scratch uncomment next line and evaluate this cell
dbutils.fs.rm(outputDirectoryRoot, true)
// COMMAND ----------
// ## Capture tweets in every sliding window of slideInterval many milliseconds.
val slideInterval = new Duration(1 * 1000) // 1 * 1000 = 1000 milli-seconds = 1 sec
// COMMAND ----------
// Our goal is to take each RDD in the twitter DStream and write it as a json file in our dbfs.
// Create a Spark Streaming Context.
val ssc = new StreamingContext(sc, slideInterval)
// COMMAND ----------
// Create a Twitter Stream for the input source.
val auth = Some(new OAuthAuthorization(new ConfigurationBuilder().build()))
val twitterStream = ExtendedTwitterUtils.createStream(ssc, auth)
// COMMAND ----------
// Let's import google's json library next.
import com.google.gson.Gson
//Let's map the tweets into json formatted string (one tweet per line).
val twitterStreamJson = twitterStream.map(
x => { val gson = new Gson();
val xJson = gson.toJson(x)
xJson
}
)
// COMMAND ----------
val partitionsEachInterval = 1
val batchInterval = 1 // in minutes
val timeoutJobLength = batchInterval * 5
var newContextCreated = false
var numTweetsCollected = 0L // track number of tweets collected
twitterStreamJson.foreachRDD((rdd, time) => { // for each filtered RDD in the DStream
val count = rdd.count()
if (count > 0) {
val outputRDD = rdd.repartition(partitionsEachInterval) // repartition as desired
// to write to parquet directly in append mode in one directory per 'time'------------
val outputDF = outputRDD.toDF("tweetAsJsonString")
// get some time fields from current `.Date()`
val year = (new java.text.SimpleDateFormat("yyyy")).format(new java.util.Date())
val month = (new java.text.SimpleDateFormat("MM")).format(new java.util.Date())
val day = (new java.text.SimpleDateFormat("dd")).format(new java.util.Date())
val hour = (new java.text.SimpleDateFormat("HH")).format(new java.util.Date())
// write to a file with a clear time-based hierarchical directory structure for example
outputDF.write.mode(SaveMode.Append)
.parquet(outputDirectoryRoot+ "/"+ year + "/" + month + "/" + day + "/" + hour + "/" + time.milliseconds)
// end of writing as parquet file-------------------------------------
numTweetsCollected += count // update with the latest count
}
})
// COMMAND ----------
// ## Let's start the spark streaming context we have created next.
ssc.start()
// COMMAND ----------
// total tweets downloaded
numTweetsCollected
// COMMAND ----------
// ## Go to SparkUI and see if a streaming job is already running. If so you need to terminate it before starting a new streaming job. Only one streaming job can be run on the DB CE.
// # let's stop the streaming job next.
ssc.stop(stopSparkContext = false)
StreamingContext.getActive.foreach { _.stop(stopSparkContext = false) }
// COMMAND ----------
// MAGIC %md
// MAGIC ## Step 2: Explore Data
// COMMAND ----------
// MAGIC %run "./025_b_TTTDFfunctions"
// COMMAND ----------
// #Let's examine what was saved in dbfs
display(dbutils.fs.ls(outputDirectoryRoot))
// COMMAND ----------
// Replace the date with current date
val date = "/2020/11/*"
val rawDF = fromParquetFile2DF(outputDirectoryRoot + date +"/*/*") //.cache()
// COMMAND ----------
val TTTsDF = tweetsDF2TTTDF(tweetsJsonStringDF2TweetsDF(rawDF)).cache()
// COMMAND ----------
// Creating SQL table
TTTsDF.createOrReplaceTempView("tbl_tweet")
// COMMAND ----------
// MAGIC %md
// MAGIC ## Step 3. Build Model
// MAGIC Let us use the structured data in `tbl_tweet` to build a simple classifier of the language using K-means.
// COMMAND ----------
sqlContext.sql("SELECT lang, CPostUserName, CurrentTweet FROM tbl_tweet LIMIT 10").collect.foreach(println)
// COMMAND ----------
// Checking the language of tweets
sqlContext.sql(
"SELECT lang, COUNT(*) as cnt FROM tbl_tweet " +
"GROUP BY lang ORDER BY cnt DESC limit 1000")
.collect.foreach(println)
// COMMAND ----------
// extracting just tweets from the table and converting it to String
val texts = sqlContext
.sql("SELECT CurrentTweet from tbl_tweet")
.map(_.toString)
// COMMAND ----------
import org.apache.spark.mllib.clustering.KMeans
import org.apache.spark.mllib.linalg.{Vector, Vectors}
// COMMAND ----------
// MAGIC %md
// MAGIC **Featurize as bigrams**
// MAGIC
// MAGIC Create feature vectors by turning each tweet into bigrams of characters (an n-gram model)
// MAGIC and then hashing those to a length-1000 feature vector that we can pass to MLlib.
// COMMAND ----------
def featurize(s: String): Vector = {
val n = 1000
val result = new Array[Double](n)
val bigrams = s.sliding(2).toArray
for (h <- bigrams.map(_.hashCode % n)) {
result(h) += 1.0 / bigrams.length
}
Vectors.sparse(n, result.zipWithIndex.filter(_._1 != 0).map(_.swap))
}
// COMMAND ----------
//Cache the vectors RDD since it will be used for all the KMeans iterations.
val vectors = texts.rdd
.map(featurize)
.cache()
// COMMAND ----------
// cache is lazy so count will force the data to store in memory
vectors.count()
// COMMAND ----------
vectors.first()
// COMMAND ----------
// MAGIC %md
// MAGIC **K-Means model** trained with 10 clusters and 10 iterations.
// COMMAND ----------
// Training model with 10 cluster and 10 iteration
val model = KMeans.train(vectors, k=10, maxIterations = 10)
// COMMAND ----------
// Sample 100 of the original set
val some_tweets = texts.take(100)
// COMMAND ----------
// iterate through the 100 samples and show which cluster they are in
for (i <- 0 until 10) {
println(s"\\nCLUSTER $i:")
some_tweets.foreach { t =>
if (model.predict(featurize(t)) == i) {
println(t)
}
}
}
// COMMAND ----------
dbutils.fs.ls("/datasets/model/")
// COMMAND ----------
// to remove a pre-existing model and start from scratch
dbutils.fs.rm("/datasets/model", true)
// COMMAND ----------
// save the model
sc.makeRDD(model.clusterCenters).saveAsObjectFile("/datasets/model")
// COMMAND ----------
import org.apache.spark.mllib.clustering.KMeans
import org.apache.spark.mllib.linalg.{Vector, Vectors}
import org.apache.spark.mllib.clustering.KMeansModel
// COMMAND ----------
// Checking if the model works
val clusterNumber = 5
val modelFile = "/datasets/model"
val model: KMeansModel = new KMeansModel(sc.objectFile[Vector](modelFile).collect)
model.predict(featurize("واحد صاحبى لو حد يعرف اكونت وزير التعليم ")) == clusterNumber
// COMMAND ----------
model.predict(featurize("ご参加ありがとうございます❣")) == 2
// COMMAND ----------
model.predict(featurize("واحد صاحبى لو حد يعرف اكونت وزير التعليم ")) == 2
// COMMAND ----------
// MAGIC %md
// MAGIC **Loading model and printing tweets that matched the desired cluster.**
// COMMAND ----------
var newContextCreated = false
var num = 0
// Create a Spark Streaming Context.
@transient val ssc = new StreamingContext(sc, slideInterval)
// Create a Twitter Stream for the input source.
@transient val auth = Some(new OAuthAuthorization(new ConfigurationBuilder().build()))
@transient val twitterStream = ExtendedTwitterUtils.createStream(ssc, auth)
//Replace the cluster number as you desire between 0 to 9
val clusterNumber = 2
//model location
val modelFile = "/datasets/model"
// Get tweets from twitter
val Tweet = twitterStream.map(_.getText)
//Tweet.print()
println("Initalizaing the the KMeans model...")
val model: KMeansModel = new KMeansModel(sc.objectFile[Vector](modelFile).collect)
//printing tweets that match our choosen cluster
Tweet.foreachRDD(rdd => {
rdd.collect().foreach(i =>
{
val record = i
if (model.predict(featurize(record)) == clusterNumber) {
println(record)
}
})
})
// Start the streaming computation
println("Initialization complete.")
ssc.start()
ssc.awaitTermination()
// COMMAND ----------
// ## Go to SparkUI and see if a streaming job is already running. If so you need to terminate it before starting a new streaming job. Only one streaming job can be run on the DB CE.
// # let's stop the streaming job next.
ssc.stop(stopSparkContext = false)
StreamingContext.getActive.foreach { _.stop(stopSparkContext = false) } | lamastex/scalable-data-science | dbcArchives/2021/000_3-sds-3-x-st/029_TweetLanguageClassifier.scala | Scala | unlicense | 12,137 |
package com.arcusys.valamis.persistence.impl.scorm.schema
import com.arcusys.valamis.persistence.impl.scorm.model.SequencingModel
import com.arcusys.valamis.persistence.common.DbNameUtils._
import com.arcusys.valamis.persistence.common.{LongKeyTableComponent, SlickProfile, TypeMapper}
import com.arcusys.valamis.util.ToTuple
import com.arcusys.valamis.util.TupleHelpers._
import slick.driver.MySQLDriver
trait SequencingTableComponent extends LongKeyTableComponent with TypeMapper { self: SlickProfile =>
import driver.simple._
class SequencingTable(tag: Tag) extends LongKeyTable[SequencingModel](tag, "SCO_SEQUENCING") {
def packageId = column[Option[Long]]("PACKAGE_ID")
def activityId = column[Option[String]]("ACTIVITY_ID", O.Length(512, true))
def sharedId = column[Option[String]]("SHARED_ID", O.DBType(varCharMax))
def sharedSequencingIdReference = column[Option[String]]("SHARED_SEQ_ID_REFERENCE", O.DBType(varCharMax))
def cAttemptObjectiveProgressChild = column[Boolean]("C_ATTEMPT_OBJ_PROGRESS_CHILD")
def cAttemptAttemptProgressChild = column[Boolean]("C_ATTEMPT_ATT_AROGRESS_CHILD")
def attemptLimit = column[Option[Int]]("ATTEMPT_LIMIT")
def durationLimitInMilliseconds = column[Option[Long]]("DURATION_LIMIT_IN_MILLESEC")
def preventChildrenActivation = column[Boolean]("PREVENT_CHILDREN_ACTIVATION")
def constrainChoice = column[Boolean]("CONSTRAIN_CHOICE")
def * = (id.?,
packageId,
activityId,
sharedId,
sharedSequencingIdReference,
cAttemptObjectiveProgressChild,
cAttemptAttemptProgressChild,
attemptLimit,
durationLimitInMilliseconds,
preventChildrenActivation,
constrainChoice) <> (SequencingModel.tupled, SequencingModel.unapply)
def update = (packageId,
activityId,
sharedId,
sharedSequencingIdReference,
cAttemptObjectiveProgressChild,
cAttemptAttemptProgressChild,
attemptLimit,
durationLimitInMilliseconds,
preventChildrenActivation,
constrainChoice) <> (tupleToEntity, entityToTuple)
if (!slickDriver.isInstanceOf[MySQLDriver]) {
def idx = index("SEQUENCING_ACTID_PACKAGEID", (activityId, packageId))
}
def entityToTuple(entity: TableElementType) = {
Some(toTupleWithFilter(entity))
}
}
val sequencingTQ = TableQuery[SequencingTable]
} | igor-borisov/valamis | valamis-slick-persistence/src/main/scala/com/arcusys/valamis/persistence/impl/scorm/schema/SequencingTableComponent.scala | Scala | gpl-3.0 | 2,382 |
package org.jetbrains.sbt
package project
import com.intellij.ide.actions.OpenProjectFileChooserDescriptor
import com.intellij.openapi.vfs.VirtualFile
/**
* @author Pavel Fatin
*/
class SbtOpenProjectDescriptor extends OpenProjectFileChooserDescriptor(true) {
override def isFileVisible(file: VirtualFile, showHiddenFiles: Boolean): Boolean =
super.isFileVisible(file, showHiddenFiles) &&
(file.isDirectory || language.SbtFileType.isMyFileType(file))
override def isFileSelectable(file: VirtualFile): Boolean =
super.isFileSelectable(file) &&
SbtProjectImportProvider.canImport(file)
}
| JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/sbt/project/SbtOpenProjectDescriptor.scala | Scala | apache-2.0 | 616 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.apollo.stomp
import _root_.org.fusesource.hawtbuf._
import org.apache.activemq.apollo.broker._
import org.apache.activemq.apollo.broker.protocol.{MessageCodecFactory, MessageCodec, ProtocolCodecFactory, Protocol}
import Stomp._
import org.apache.activemq.apollo.broker.store._
/**
* @author <a href="http://hiramchirino.com">Hiram Chirino</a>
*/
/**
* Creates StompCodec objects that encode/decode the
* <a href="http://activemq.apache.org/stomp/">Stomp</a> protocol.
*
* @author <a href="http://hiramchirino.com">Hiram Chirino</a>
*/
class StompProtocolCodecFactory extends ProtocolCodecFactory.Provider {
def id = PROTOCOL
def createProtocolCodec(connector:Connector) = new StompCodec();
def isIdentifiable() = true
def maxIdentificaionLength() = CONNECT.length;
def matchesIdentification(header: Buffer):Boolean = {
if (header.length < CONNECT.length) {
false
} else {
header.startsWith(CONNECT) || header.startsWith(STOMP)
}
}
}
/**
* @author <a href="http://hiramchirino.com">Hiram Chirino</a>
*/
object StompProtocol extends StompProtocolCodecFactory with Protocol {
def createProtocolHandler = new StompProtocolHandler
}
object StompMessageCodecFactory extends MessageCodecFactory.Provider {
def create = Array[MessageCodec](StompMessageCodec)
}
/**
* @author <a href="http://hiramchirino.com">Hiram Chirino</a>
*/
object StompMessageCodec extends MessageCodec{
def id = "stomp"
def encode(message: Message):MessageRecord = {
StompCodec.encode(message.asInstanceOf[StompFrameMessage])
}
def decode(message: MessageRecord) = {
StompCodec.decode(message)
}
}
| chirino/activemq-apollo | apollo-stomp/src/main/scala/org/apache/activemq/apollo/stomp/StompProtocol.scala | Scala | apache-2.0 | 2,483 |
/* Copyright 2009-2021 EPFL, Lausanne */
import stainless.lang._
import stainless.collection._
import stainless._
object BVDivisionByZero {
def divByZero(x: Int): Boolean = {
(x / 0 == 10)
}
}
| epfl-lara/stainless | frontends/benchmarks/verification/invalid/BVDivisionByZero.scala | Scala | apache-2.0 | 207 |
package de.kaufhof.hajobs
import org.joda.time.DateTime
import org.slf4j.LoggerFactory.getLogger
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.concurrent.duration.Duration
/**
* JobUpdater is responsible finding running/pending jobs that have lost its lock
* and set them to status failed/dead
* JobUpdater tries to get latest JobStatusData to update status, so one can
* see the latest content of the dead job.
*
* @param lockRepository see which jobs actually have a lock
* @param jobStatusRepository find all the jobStatus
* @param deadJobWaitingTime time by jobType to wait before updating a job to dead failed
*/
class JobUpdater(lockRepository: LockRepository,
jobStatusRepository: JobStatusRepository,
limitByJobType: JobType => Int = JobStatusRepository.defaultLimitByJobType,
deadJobWaitingTime: JobType => Duration = JobUpdater.defaultDeadJobWaitingTime) {
private val logger = getLogger(getClass)
def updateJobs(): Future[List[JobStatus]] = {
for {
// we *really* want sequential execution here: first read the locks,
// and only after that is finished, read the jobs status (to ensure
// consistency between job state and lock state). So please do not try to
// optimize by moving this code out of the for comprehension.
// we also need to read with quorom to ensure we get the most current
// (and consistent) data
locks <- lockRepository.getAll()
jobs <- jobStatusRepository.getMetadata(readwithQuorum = true, limitByJobType = limitByJobType)
runningJobs = jobs.flatMap(_._2).toList.filter(_.jobResult == JobResult.Pending)
deadJobs = runningJobs.filterNot(job => locks.exists(_.jobId == job.jobId))
updatedJobs <- updateDeadJobState(deadJobs)
} yield {
updatedJobs
}
}
private[hajobs] def updateDeadJobState(deadJobs: List[JobStatus]): Future[List[JobStatus]] = {
Future.traverse(deadJobs) { jobMeta =>
logger.info("Detected dead job, changing state from {} to DEAD for: {} ({})", jobMeta.jobState, jobMeta.jobId, jobMeta.jobType)
jobStatusRepository.get(jobMeta.jobType, jobMeta.jobId).flatMap {
case Some(data) => if (data.jobStatusTs.plus(deadJobWaitingTime(data.jobType).toMillis).isBefore(DateTime.now())){
jobStatusRepository.updateJobState(data, JobState.Dead).map(List(_))
}
else{
Future.successful(Nil)
}
// if no latest JobStatusData is found update JobStatusMeta instead
case None => jobStatusRepository.updateJobState(jobMeta, JobState.Dead).map(List(_))
}
}.map(_.flatten)
}
}
object JobUpdater {
import scala.concurrent.duration._
/**
* Returns a default waiting time for setting jobstatus to dead/failed.
*/
val defaultDeadJobWaitingTime: JobType => Duration = _ => 5.minutes
}
| MarcoPriebe/ha-jobs | ha-jobs-core/src/main/scala/de/kaufhof/hajobs/JobUpdater.scala | Scala | apache-2.0 | 2,942 |
package de.tototec.sbuild.runner
import java.io.File
import java.io.FileWriter
import java.util.Timer
import java.util.TimerTask
// File Locking Mechanism
//
// Problem:
// Independent processed try to create/modify/delete the same file.
// There is no simple way to detect problems.
//
// Solution Idea:
// Use an additional lock file.
//
// Problem with a lock file:
// If the process that created the lockfile dies before deleting the lockfile,
// all other waiting procress will wait forever.
//
// Solution:
// A lock file needs some informations:
// - touch time
// - process id
// If a process finds a lockfile, it checks the age of that lockfile by checking the touch time.
// If that file is to old, it can be assumed that the creating process dies.
// The process that created the lockfile is responsible to refresh the touch time of the lockfile in a multithreaded way, so that it is guaranteed, the touch time is accurate and up-to-date.
//
//
// Details:
// Because of different file system specific time precision, to time interval has to be at least 2 seconds or must be store inside the file.
// If a lockfile of a died process was detected, the wait time should be double the time of the refresh time, e.g. 3-4 seconds.
class FileLock(file: File,
updateIntervalMsec: Long,
processInformation: String,
createDirs: Boolean) {
val timer = new Timer("Lock-" + file.getName, true /* isDaemon */ )
def updateLock = if (file.exists) file.setLastModified(System.currentTimeMillis)
// CREATE THE LOCK
// init
{
require(!file.exists, "Lock file already exists")
if (createDirs) {
file.getParentFile() match {
case null =>
case parent => parent.mkdirs()
}
}
// create the file
val fw = new FileWriter(file)
fw.write(processInformation)
fw.close()
// create and schedule a timer to update the touch time
val timerTask = new TimerTask() {
override def run() = updateLock
}
timer.scheduleAtFixedRate(timerTask, 0 /* delay */ , updateIntervalMsec /* interval */ )
}
// RELEASE THE LOCK
private[this] var released: Boolean = false
// Release this lock
def release = if (!released) {
timer.cancel
val success = file.delete
released = true
if (!success) throw new IllegalStateException("Could not delete lock file: " + file)
}
}
| SBuild-org/sbuild | de.tototec.sbuild.runner/src/main/scala/de/tototec/sbuild/runner/FileLock.scala | Scala | apache-2.0 | 2,411 |
package suzaku.ui.style
abstract class StyleClassBase extends StyleClass {
// register at initialization time
val id = StyleClassRegistry.register(this, getClass)
}
| suzaku-io/suzaku | core-app/shared/src/main/scala/suzaku/ui/style/StyleClassBase.scala | Scala | apache-2.0 | 170 |
package mimir.adaptive
import mimir.algebra.ID
object MultilensRegistry
{
val multilenses = Map[ID,Multilens](
ID("DETECT_HEADER") -> CheckHeader,
ID("TYPE_INFERENCE") -> TypeInference,
ID("SCHEMA_MATCHING") -> SchemaMatching,
ID("SHAPE_WATCHER") -> ShapeWatcher,
ID("DATASOURCE_ERRORS") -> DataSourceErrors
)
}
| UBOdin/mimir | src/main/scala/mimir/adaptive/MultilensRegistry.scala | Scala | apache-2.0 | 353 |
package geotrellis.raster.op.local
import geotrellis._
import geotrellis.process._
object Xor {
def apply(x:Op[Int], y:Op[Int]) = logic.Do2(x, y)(_ ^ _)
def apply(r:Op[Raster], c:Op[Int]) = XorConstant1(r, c)
def apply(c:Op[Int], r:Op[Raster]) = XorConstant2(c, r)
def apply(r1:Op[Raster], r2:Op[Raster]) = XorRaster(r1, r2)
}
case class XorConstant1(r:Op[Raster], c:Op[Int]) extends Op2(r, c) ({
(r, c) => Result(r.mapIfSet(_ ^ c))
})
case class XorConstant2(c:Op[Int], r:Op[Raster]) extends Op2(c, r) ({
(c, r) => Result(r.mapIfSet(_ ^ c))
})
case class XorRaster(r1:Op[Raster], r2:Op[Raster]) extends Op2(r1, r2) ({
(r1, r2) => Result(r1.combine(r2)(_ ^ _))
})
| Tjoene/thesis | Case_Programs/geotrellis-0.7.0/src/main/scala/geotrellis/raster/op/local/Xor.scala | Scala | gpl-2.0 | 683 |
package com.blogspot.nurkiewicz.web
import org.springframework.stereotype.Controller
import org.springframework.web.bind.annotation.RequestMethod._
import java.net.URI
import org.springframework.web.util.UriTemplate
import org.springframework.beans.factory.annotation.Autowired
import com.blogspot.nurkiewicz.{BookService, Book}
import org.springframework.web.bind.annotation._
import javax.servlet.http.HttpServletRequest
import org.springframework.http.{ResponseEntity, HttpHeaders, HttpStatus}
import org.springframework.data.domain.PageRequest
import scalaj.collection.Implicits._
/**
* @author Tomasz Nurkiewicz
* @since 24.09.11, 23:39
*/
@Controller
@RequestMapping(value = Array("/book"))
class BookController @Autowired()(bookService: BookService) {
@RequestMapping(value = Array("/{bookId}"), method = Array(GET))
@ResponseBody def read(@PathVariable("bookId") id: Int) = bookService.findBy(id).getOrElse(throw new NotFoundException)
@RequestMapping(method = Array(GET))
@ResponseBody def listBooks(
@RequestParam(value = "page", required = false, defaultValue = "1") page: Int,
@RequestParam(value = "max", required = false, defaultValue = "20") max: Int) =
new ResultPage(bookService.listBooks(new PageRequest(page - 1, max)))
@RequestMapping(value = Array("/{bookId}"), method = Array(PUT))
@ResponseStatus(HttpStatus.NO_CONTENT)
def updateBook(@PathVariable("bookId") id: Int, @RequestBody book: Book) {
book.id = id
bookService update book
}
@RequestMapping(method = Array(POST)) def createBook(request: HttpServletRequest, @RequestBody book: Book) = {
bookService save book
val uri: URI = new UriTemplate("{requestUrl}/{username}").expand(request.getRequestURL.toString, book.id.toString)
val headers = new HttpHeaders
headers.put("Location", List(uri.toASCIIString).asJava)
new ResponseEntity[String](headers, HttpStatus.CREATED)
}
@RequestMapping(value = Array("/{bookId}"), method = Array(DELETE))
@ResponseStatus(HttpStatus.NO_CONTENT) def deleteBook(@PathVariable("bookId") id: Int) {
bookService.deleteBy(id)
}
}
| nurkiewicz/spring-rest-wadl | showcase/src/main/scala/com/blogspot/nurkiewicz/web/BookController.scala | Scala | apache-2.0 | 2,136 |
/*
* Copyright 2014 Commonwealth Computer Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.locationtech.geomesa.core.process.knn
import com.vividsolutions.jts.geom.Point
import org.locationtech.geomesa.utils.geohash._
import org.locationtech.geomesa.utils.geotools.Conversions.RichSimpleFeature
import org.opengis.feature.simple.SimpleFeature
import scala.annotation.tailrec
import scala.collection.mutable
// used for ordering GeoHashes within a PriorityQueue
case class GeoHashWithDistance(gh: GeoHash, dist: Double)
/**
* Object and Class for the GeoHashSpiral
*
* This provides a Iterator[GeoHash] which generates GeoHashes in order from the geodetic distance from a single POINT.
*/
trait GeoHashDistanceFilter {
// distance in meters used by the filter
var statefulFilterDistance: Double
// modifies statefulFiilterDistance if a new distance is smaller
def mutateFilterDistance(theNewMaxDistance: Double) {
if (theNewMaxDistance < statefulFilterDistance) statefulFilterDistance = theNewMaxDistance
}
// removes GeoHashes that are further than a certain distance from a feature or point
def statefulDistanceFilter(x: GeoHashWithDistance): Boolean =
{ x.dist < statefulFilterDistance }
}
trait GeoHashAutoSize {
// find the smallest GeoHash whose minimumSize is larger than the desiredSizeInMeters
def geoHashToSize(pointInside: Point, desiredSizeInMeters: Double ): GeoHash = {
import org.locationtech.geomesa.utils.geohash.GeohashUtils._
// typically 25 bits are encoded in the Index Key
val allowablePrecisions = (5 to 40 by 5).reverse
allowablePrecisions.map { prec => GeoHash(pointInside,prec) }
.find { gh => getGeohashMinDimensionMeters(gh) > desiredSizeInMeters }
.getOrElse (GeoHash (pointInside, allowablePrecisions.last) )
}
}
object GeoHashSpiral extends GeoHashAutoSize {
def apply(centerFeature: SimpleFeature, distanceGuess: Double, maxDistance: Double): GeoHashSpiral = {
centerFeature.point match {
case aPoint: Point => GeoHashSpiral(aPoint, distanceGuess, maxDistance)
case _ => throw new RuntimeException("GeoHashSpiral not implemented for non-point geometries")
}
}
def apply(centerPoint: Point, distanceGuess: Double, maxDistance: Double): GeoHashSpiral = {
// generate the central GeoHash as a seed with precision/size governed by distanceGuess
val seedGH = geoHashToSize(centerPoint, distanceGuess)
val seedWithDistance = GeoHashWithDistance(seedGH, 0.0)
// These are helpers for distance calculations and ordering.
def distanceCalc(gh:GeoHash) =
GeohashUtils.getMinimumGeodeticDistance(gh.bbox, centerPoint, exhaustive = true)
implicit val orderedGH: Ordering[GeoHashWithDistance] = Ordering.by { _.dist}
// Create a new GeoHash PriorityQueue and enqueue with a seed.
val ghPQ = new mutable.PriorityQueue[GeoHashWithDistance]()(orderedGH.reverse) { enqueue(seedWithDistance) }
new GeoHashSpiral(ghPQ, distanceCalc, maxDistance)
}
}
class GeoHashSpiral(pq: mutable.PriorityQueue[GeoHashWithDistance],
val distance: (GeoHash) => Double,
var statefulFilterDistance: Double) extends GeoHashDistanceFilter with BufferedIterator[GeoHash] {
// running set of GeoHashes which have already been encountered -- used to prevent visiting a GeoHash more than once
val oldGH = new mutable.HashSet[GeoHash] ++= pq.toSet[GeoHashWithDistance].map{ _.gh }
// these are used to setup a modified on-deck pattern: a PriorityQueue backed by a generator
var onDeck: Option[GeoHashWithDistance] = None
var nextGHFromPQ: Option[GeoHashWithDistance] = None
// prime the on deck pattern
loadNextGHFromPQ()
loadNextGHFromTouching()
loadNext()
// method used to find the next element in the PQ that passes a filter
@tailrec
private def loadNextGHFromPQ() {
if (pq.isEmpty) nextGHFromPQ = None
else {
val theHead = pq.dequeue() // removes elements from pq
if (statefulDistanceFilter(theHead)) nextGHFromPQ = Option(theHead)
else loadNextGHFromPQ()
}
}
// method to load the neighbors of the next GeoHash to be visited into the PriorityQueue
private def loadNextGHFromTouching() {
// use the GeoHash already taken from the head of the PriorityQueue as a seed
nextGHFromPQ.foreach { newSeedGH =>
// obtain only *new* GeoHashes that touch
val newTouchingGH = TouchingGeoHashes.touching(newSeedGH.gh).filterNot(oldGH contains)
// enrich the GeoHashes with distances
val withDistance = newTouchingGH.map { aGH => GeoHashWithDistance(aGH, distance(aGH))}
// remove the GeoHashes that are located too far away
val withinDistance = withDistance.filter(statefulDistanceFilter)
// add all GeoHashes which pass the filter to the PQ
withinDistance.foreach { ghWD => pq.enqueue(ghWD)}
// also add the new GeoHashes to the set of old GeoHashes
// note: we add newTouchingGH now, since the cost of having many extra GeoHashes will likely
// be less than that of computing the distance for the same GeoHash multiple times,
// which is what happens if withinDistance is used.
oldGH ++= newTouchingGH
}
}
// loads the head element in the PQ into onDeck, and adds GeoHashes to the PQ
private def loadNext() {
nextGHFromPQ match {
case None => onDeck = None // nothing left in the priorityQueue
case Some(x) => loadNextGHFromPQ(); loadNextGHFromTouching(); onDeck = Some(x)
}
}
// filter applied here to account for mutations in the filter AFTER onDeck is loaded
def head = onDeck.filter(statefulDistanceFilter) match {
case Some(nextGH) => nextGH.gh
case None => throw new Exception
}
// filter applied here to account for mutations in the filter AFTER onDeck is loaded
def hasNext = onDeck.filter(statefulDistanceFilter).isDefined
def next() = head match {case nextGH:GeoHash => loadNext() ; nextGH }
} | jwkessi/geomesa | geomesa-core/src/main/scala/org/locationtech/geomesa/core/process/knn/GeoHashSpiral.scala | Scala | apache-2.0 | 6,549 |
package org.http4s
package client
package blaze
import cats.effect.IO
import java.nio.ByteBuffer
import org.http4s.blaze.pipeline.{HeadStage, LeafBuilder}
private[blaze] object MockClientBuilder {
def builder(
head: => HeadStage[ByteBuffer],
tail: => BlazeConnection[IO]): ConnectionBuilder[IO, BlazeConnection[IO]] = { req =>
IO {
val t = tail
LeafBuilder(t).base(head)
t
}
}
def manager(
head: => HeadStage[ByteBuffer],
tail: => BlazeConnection[IO]): ConnectionManager[IO, BlazeConnection[IO]] =
ConnectionManager.basic(builder(head, tail))
}
| reactormonk/http4s | blaze-client/src/test/scala/org/http4s/client/blaze/MockClientBuilder.scala | Scala | apache-2.0 | 608 |
package com.verizon.bda.trapezium.framework.zookeeper
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}
/**
* Created by parmana on 12/20/18.
*/
class ZookeeperTest extends FlatSpec with Matchers with BeforeAndAfter{
var zk: EmbeddedZookeeper = null
before{
zk = new EmbeddedZookeeper("127.0.0.1:2181")
ZooKeeperClient("127.0.0.1:2181")
}
"ZookeperClient" should "be able insert and access data" in {
ZooKeeperClient.setData("/test11/test111", "test data".getBytes)
// println(ZooKeeperClient.getData("/test11/test111"))
ZooKeeperClient.getData("/test11/test111") should be ("test data")
}
after{
zk.shutdown()
}
}
| Verizon/trapezium | framework/src/test/scala/com/verizon/bda/trapezium/framework/zookeeper/ZookeeperTest.scala | Scala | apache-2.0 | 670 |
package org.mbari.vars.varspub
import java.awt.image.BufferedImage
import java.io._
import java.net.URL
import java.nio.file.{ Files, Paths, Path }
import java.sql.{ ResultSet, DriverManager }
import java.text.SimpleDateFormat
import java.util.{ TimeZone, Date }
import javax.imageio.ImageIO
import com.google.inject.Injector
import org.apache.commons.imaging.Imaging
import org.apache.commons.imaging.common.RationalNumber
import org.apache.commons.imaging.formats.jpeg.JpegImageMetadata
import org.apache.commons.imaging.formats.jpeg.exif.ExifRewriter
import org.apache.commons.imaging.formats.tiff.constants.{ GpsTagConstants, TiffTagConstants, TiffEpTagConstants, ExifTagConstants }
import org.apache.commons.imaging.formats.tiff.write.TiffOutputSet
import org.slf4j.LoggerFactory
import vars.annotation.ui.ToolBelt
import vars.knowledgebase.ui.{ Lookup }
import scala.collection.mutable
import scala.math._
import scala.util.{Failure, Success, Try}
import scala.collection.JavaConverters._
/**
* A class that watermarks images for VARS pub and also adds EXIF data to the images.
*
* @param target The base directory to write the processed images into
* @param overlayImageURL The URL to the image that will be used for an overlay
* @param pathKey This is some key in the internal urls that we used to split the url. the part before
* this key is discarded, the part after this key is split into directories.
* The directories will be created in target to store the image
* @param overlayPercentWidth The width of the overlay as percent of the framegrab
* @param toolBelt The VARS toolbelt object used to create DAO objects
*
* @author Brian Schlining
* @since 2014-11-21T11:01:00
*/
class AnnoImageMigrator2(target: Path,
overlayImageURL: URL,
pathKey: String = "framegrabs",
overlayPercentWidth: Double = 0.4)(implicit toolBelt: ToolBelt)
extends Runnable {
private[this] val now = new Date()
private[this] val log = LoggerFactory.getLogger(getClass)
private[this] val timestampFormat = {
val f = new SimpleDateFormat("yyyy:MM:dd HH:mm:ss")
f.setTimeZone(TimeZone.getTimeZone("UTC"))
f
}
private[this] val yearFormat = new SimpleDateFormat("yyyy")
private[this] val overlayImage = ImageIO.read(overlayImageURL)
private[this] val internalConnection = DriverManager.getConnection("jdbc:jtds:sqlserver://equinox.shore.mbari.org:1433/VARS",
"everyone", "guest")
private[this] val externalConnection = DriverManager.getConnection("jdbc:jtds:sqlserver://dione.mbari.org:51001/VARS",
"everyone", "NeWW1stLst")
/**
* This is the method used to execute the class. This processing will take a
* long time as it checks EVERY to see if every image listed in VARS Pub exists
* on the external web server.
*/
def run(): Unit = {
for {
(e, i) <- mapURLs()
p <- toTargetPath(e, i) if i != null
} {
try {
val image = ImageIO.read(new URL(i))
watermark(image, overlayImage).foreach(image => {
val jpegBytes = addExif(i, image)
val os = new BufferedOutputStream(new FileOutputStream(p.toFile))
os.write(jpegBytes)
os.close()
})
log.debug(s"""Prepped image for external release:
| \\tInternal: $i
| \\tExternal: $e
| \\tTarget : $p""".stripMargin)
} catch {
case e: Exception => log.debug(s"Failed to watermark $i", e)
}
}
}
/**
* Maps the URL's to a fully-qualified path that a watermarked image will
* be written to
* @param external The external VARSpub framegrab URL
* @param internal The internal framegrab URL
* @return A path to write an external framegrab too. Some is returned
* if the image is missing and needs to be created. None if
* the image already exists (so there's no need to updated it)
*/
private def toTargetPath(external: String, internal: String): Option[Path] = {
// True if the image alreay exists on the external web server
// If an external URL is bogus this will also return True so that no copy is attempted
val externalImageExists: Boolean = Try {
val externalURL = new URL(external)
WatermarkUtilities.imageExistsAt(externalURL)
} match {
case Success(a) => a
case Failure(_) => true
}
if (externalImageExists) None
else {
Try {
val idx = internal.toLowerCase.indexOf(pathKey.toLowerCase)
val parts = internal.substring(idx + pathKey.size).split("/").map(_.replace("%20", " "))
// TODO: externalTarget doesn't always seem to map correctly
val externalTarget = Paths.get(target.toString, parts: _*)
val externalDir = externalTarget.getParent
if (Files.notExists(externalDir)) {
log.info("Creating {}", externalDir)
Files.createDirectories(externalDir)
}
Some(externalTarget)
} getOrElse(None) // TODO this swalows exceptions. Need to report them
}
}
/**
* Takes a watermarked image and adds EXIF data.
*
* @param internalUrl The internal URL of the framegrab. Used to look up
* VARS information about the framegrab
* @param image The BufferedImage that we will be turning into a JPEG and writing
* EXIF metadata to
* @return A byte array representing the bufferedimage as JPEG data. EXIF info
* will be included in this representation.
*/
private def addExif(internalUrl: String, image: BufferedImage): Array[Byte] = {
// -- Convert BufferedImage to a jpeg in a byte array
val jpegBytes = WatermarkUtilities.toJpegByteArray(image)
// -- Create a TiffOutputSet from byte array
val outputSet = WatermarkUtilities.getOrCreateOutputSet(jpegBytes)
// -- Lookup metadata from VARS
val exifInfo = lookupMetadataFromDatabase(internalUrl).getOrElse(
ExifInfo("No annotation information is available for this image",
now, now, 0F, 0D, 0D))
// -- Add EXIF
val exifDirectory = outputSet.getOrCreateExifDirectory()
// Create Date
exifDirectory.removeField(ExifTagConstants.EXIF_TAG_DATE_TIME_ORIGINAL)
exifDirectory.add(ExifTagConstants.EXIF_TAG_DATE_TIME_ORIGINAL,
timestampFormat.format(exifInfo.dateTimeOriginal))
// DateTimeDigitized
exifDirectory.removeField(ExifTagConstants.EXIF_TAG_DATE_TIME_DIGITIZED)
exifDirectory.add(ExifTagConstants.EXIF_TAG_DATE_TIME_DIGITIZED,
timestampFormat.format(exifInfo.createDate))
// Time Zone offset
exifDirectory.removeField(TiffEpTagConstants.EXIF_TAG_TIME_ZONE_OFFSET)
exifDirectory.add(TiffEpTagConstants.EXIF_TAG_TIME_ZONE_OFFSET, 0.shortValue)
// UserComment
exifDirectory.removeField(ExifTagConstants.EXIF_TAG_USER_COMMENT)
exifDirectory.add(ExifTagConstants.EXIF_TAG_USER_COMMENT, exifInfo.userComment)
// -- Add ROOT to EXIF
val rootDirectory = outputSet.getOrCreateRootDirectory()
rootDirectory.removeField(TiffTagConstants.TIFF_TAG_COPYRIGHT)
rootDirectory.add(TiffTagConstants.TIFF_TAG_COPYRIGHT,
s"Copyright ${yearFormat.format(exifInfo.dateTimeOriginal)} Monterey Bay Aquarium Research Institute")
// -- Add GPS to EXIF
val gpsDirectory = outputSet.getOrCreateGPSDirectory()
val altAsInt = round(exifInfo.gpsAltitude * 10)
gpsDirectory.removeField(GpsTagConstants.GPS_TAG_GPS_ALTITUDE)
gpsDirectory.add(GpsTagConstants.GPS_TAG_GPS_ALTITUDE, new RationalNumber(altAsInt, 10))
gpsDirectory.removeField(GpsTagConstants.GPS_TAG_GPS_ALTITUDE_REF)
gpsDirectory.add(GpsTagConstants.GPS_TAG_GPS_ALTITUDE_REF,
GpsTagConstants.GPS_TAG_GPS_ALTITUDE_REF_VALUE_BELOW_SEA_LEVEL.byteValue)
outputSet.setGPSInDegrees(exifInfo.gpsLongitude, exifInfo.gpsLatitude)
gpsDirectory.removeField(GpsTagConstants.GPS_TAG_GPS_PROCESSING_METHOD)
gpsDirectory.add(GpsTagConstants.GPS_TAG_GPS_PROCESSING_METHOD, "MANUAL")
WatermarkUtilities.addExifAsJPG(jpegBytes, outputSet)
}
/**
* Retrieves VARS data about an image
* @param internalUrl The URL to the framegrab, used as the database key
* @return An object containing all the info needed to populate the EXIF metadata. None
* if the internalUrl wasn't found in VARS
*/
private def lookupMetadataFromDatabase(internalUrl: String): Option[ExifInfo] = {
// -- Grab parameters via JPA
val dao = toolBelt.getAnnotationDAOFactory.newCameraDataDAO()
dao.startTransaction()
val cd = dao.findByImageReference(internalUrl)
if (cd == null) {
dao.endTransaction()
return None
}
val vf = cd.getVideoFrame
val pd = vf.getPhysicalData
val va = vf.getVideoArchive
val vas = va.getVideoArchiveSet
val dives = vas.getCameraDeployments.asScala.map(_.getSequenceNumber).mkString(", ")
val obs = vf.getObservations.asScala
dao.endTransaction()
// -- Extract values, substitute placeholders if needed.
// Use the earliest observation date in the videoframe. If none are available will use now as a placeholder
val createDate = Try(vf.getObservations
.asScala
.map(_.getObservationDate)
.sortBy(_.getTime)
.head)
.getOrElse(now)
// Use recorded date, if not available use now as a placeholder
val dateTimeOriginal = Option(vf.getRecordedDate).getOrElse(now)
val altitude: Float = Try(pd.getDepth.floatValue()).getOrElse(0F)
val latitude: Double = Try(pd.getLatitude.doubleValue()).getOrElse(0D)
val longitude: Double = Try(pd.getLongitude.doubleValue()).getOrElse(0D)
val temperature: Float = Try(pd.getTemperature.floatValue()).getOrElse(-999F)
val salinity: Float = Try(pd.getSalinity.floatValue()).getOrElse(-999F)
val oxygen: Float = Try(pd.getOxygen.floatValue()).getOrElse(-999F)
val dateTimeStrForComment = Option(vf.getRecordedDate).map("and time " + timestampFormat.format(_)).getOrElse("")
val yearsString = yearFormat.format(dateTimeOriginal)
val createDateStr = timestampFormat.format(createDate)
val dateTimeOriginalStr = timestampFormat.format(dateTimeOriginal)
val conceptStr = obs.map(o => {
val ass = o.getAssociations.asScala.mkString(", ")
s"${o.getConceptName}: $ass"
}).mkString("'", ",", "'")
Option(ExifInfo("Image captured from a video camera mounted on underwater remotely operated " +
s"vehicle ${vas.getPlatformName} on dive number $dives. The original MBARI video " +
s"tape number is ${va.getName}. This image is from timecode ${vf.getTimecode} " +
s"$dateTimeStrForComment. The recorded edited location and environmental " +
f"measurements at time of capture are Lat=$latitude%.7f Lon=$longitude%.7f " +
f"Depth=$altitude%.1f m Temp=$temperature%.3f C Sal=$salinity%.3f PSU " +
f"Oxy=$oxygen%.3f ml/l. The Video Annotation and Reference system annotations for" +
s" this image is/are $conceptStr.",
createDate,
dateTimeOriginal,
altitude,
latitude,
longitude))
}
/**
* Create a Map of [external image URL] -> [internal image URL] for all annotation images
* @return Mapping between internal and external URLs
*/
private def mapURLs(): Seq[(String, String)] = {
log.info("Starting database lookup")
val urls = new mutable.ArrayBuffer[(String, String)]
val external = externalConnection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY)
val internal = internalConnection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY)
val ers = external.executeQuery(
"""
|SELECT
| id,
| StillImageURL
|FROM
| CameraData
|WHERE
| StillImageURL LIKE 'http%'
|ORDER BY
| id DESC
""".stripMargin)
while (ers.next()) {
val (id, externalURL) = (ers.getLong(1), ers.getString(2))
val irs = internal.executeQuery(
s"""
|SELECT
| id,
| StillImageURL
|FROM
| CameraData
|WHERE
| id = $id
""".stripMargin)
if (irs.next()) {
urls += externalURL -> irs.getString(2)
}
irs.close()
}
external.close()
internal.close()
log.info("Finished database lookup")
urls
}
/**
* Watermark and image
* @param image The bufferedimage to modify
* @param overlay The overlay to use
* @return The watermarked image, None is returned if the watermarking fails
*/
private def watermark(image: BufferedImage, overlay: BufferedImage): Option[BufferedImage] =
Try(WatermarkUtilities.addWatermarkImage(image, overlay, overlayPercentWidth)).toOption
}
object AnnoImageMigrator2 {
private[this] val logger = LoggerFactory.getLogger(getClass)
def main(args: Array[String]) {
if (args.size != 2) {
println(
"""
|Process all annotation images, watermark them and add EXIF metadata for
| VARS Pub.
|
| Usage:
| AnnoImageMigrator2(Array(target, overlay))
|
| Inputs:
| target = The root directory to write the new images to
| overlay = The path to the overlay image to use for watermarking
""".stripMargin)
return
}
val target = Paths.get(args(0))
val overlayImageURL = new File(args(1)).toURI.toURL
implicit val toolbelt = {
val injector = Lookup.getGuiceInjectorDispatcher.getValueObject.asInstanceOf[Injector]
injector.getInstance(classOf[ToolBelt])
}
val imageMigrator = new AnnoImageMigrator2(target, overlayImageURL)
imageMigrator.run()
}
}
case class ExifInfo(userComment: String,
createDate: Date,
dateTimeOriginal: Date,
gpsAltitude: Float,
gpsLatitude: Double,
gpsLongitude: Double) | sanyaade-mediadev/vars | vars-standalone/src/main/scala/org/mbari/vars/varspub/AnnoImageMigrator2.scala | Scala | lgpl-2.1 | 13,907 |
package org.ensime.server
import java.io.File
import org.ensime.config.ProjectConfig
import org.ensime.debug.ProjectDebugInfo
import org.ensime.protocol._
import org.ensime.util._
import scala.actors._
import scala.actors.Actor._
import scala.tools.nsc.{ Settings }
case class SendBackgroundMessageEvent(msg: String)
case class RPCResultEvent(value: WireFormat, callId: Int)
case class RPCErrorEvent(value: String, callId: Int)
case class RPCRequestEvent(req: Any, callId: Int)
case class TypeCheckResultEvent(notes: NoteList)
case class AnalyzerReadyEvent()
case class AnalyzerShutdownEvent()
case class ReloadFileReq(file: File)
case class ReloadAllReq()
case class RemoveFileReq(file: File)
case class ScopeCompletionReq(file: File, point: Int, prefix: String, constructor: Boolean)
case class TypeCompletionReq(file: File, point: Int, prefix: String)
case class PackageMemberCompletionReq(path: String, prefix: String)
case class SymbolAtPointReq(file: File, point: Int)
case class InspectTypeReq(file: File, point: Int)
case class InspectTypeByIdReq(id: Int)
case class InspectPackageByPathReq(path: String)
case class TypeByIdReq(id: Int)
case class TypeByNameReq(name: String)
case class CallCompletionReq(id: Int)
case class TypeAtPointReq(file: File, point: Int)
class Project(val protocol: Protocol) extends Actor with RPCTarget {
protocol.setRPCTarget(this)
protected var analyzer: Actor = actor {}
protected var builder: Option[Actor] = None
protected var config: ProjectConfig = ProjectConfig.nullConfig
protected var debugInfo: Option[ProjectDebugInfo] = None
def act() {
println("Project waiting for init...")
loop {
try {
receive {
case SendBackgroundMessageEvent(msg: String) => {
protocol.sendBackgroundMessage(msg)
}
case IncomingMessageEvent(msg: WireFormat) => {
protocol.handleIncomingMessage(msg)
}
case msg: AnalyzerReadyEvent => {
protocol.sendCompilerReady
}
case result: TypeCheckResultEvent => {
protocol.sendTypeCheckResult(result.notes)
}
case RPCResultEvent(value, callId) => {
protocol.sendRPCReturn(value, callId)
}
case RPCErrorEvent(msg, callId) => {
protocol.sendRPCError(msg, callId)
}
}
} catch {
case e: Exception => {
println("Error at Project message loop: " + e + " :\n" + e.getStackTraceString)
}
}
}
}
protected def initProject(conf: ProjectConfig) {
this.config = conf;
restartCompiler
shutdownBuilder
}
protected def restartCompiler() {
analyzer ! AnalyzerShutdownEvent()
analyzer = new Analyzer(this, protocol, this.config)
analyzer.start
}
protected def getOrStartBuilder(): Actor = {
builder match {
case Some(b) => b
case None =>
{
val b = new IncrementalBuilder(this, protocol, this.config)
builder = Some(b)
b.start
b
}
}
}
protected def shutdownBuilder() {
for (b <- builder) {
b ! BuilderShutdownEvent
}
builder = None
}
}
| bbatsov/ensime | src/main/scala/org/ensime/server/Project.scala | Scala | gpl-3.0 | 3,203 |
import stainless.lang._
object ADTInvariantCheck {
case class Foo(x: BigInt) {
require(x != 0)
}
def fooFailed1(x: BigInt) = Foo(x)
def fooFailed2 = Foo(0)
def fooFailed3(x: BigInt) = {
require(x <= 0)
if (x < 10) Foo(x) else Foo(x - 10)
}
def fooOk1 = Foo(1)
def fooOk2(x: BigInt) = {
require(x > 0)
Foo(x)
}
case class Bar(arg1: BigInt, arg2: Option[BigInt]) {
require(arg1 > 0 || arg2.isDefined)
}
def barFailed1: Bar = Bar(-1, None())
def barOk1: Bar = Bar(-1, Some(1))
def barOk2: Bar = Bar(12, None())
}
| epfl-lara/stainless | frontends/benchmarks/verification/invalid/ADTInvariantCheck.scala | Scala | apache-2.0 | 571 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.