code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package dbpedia.dataparsers.ontology.datatypes
import scala.util.matching.Regex
/**
* Represents an enumeration of literals.
*/
//TODO make immutable
class EnumerationDatatype(name : String) extends Datatype(name)
{
private var literals = List[Literal]()
/**
* Adds a new literal to this enumeration
*/
def addLiteral(name : String, keywords : List[String] = List.empty)
{
literals = new Literal(name, keywords) :: literals
}
def parse(text : String) : Option[String] =
{
for( literal <- literals; _ <- literal.regex.findFirstIn(text) )
{
return Some(literal.name)
}
return None
}
private class Literal(val name : String, val keywords : List[String])
{
val regex = new Regex("(?iu)\\b(?:" + (name :: keywords).mkString("|") + ")\\b")
}
}
| FnOio/dbpedia-parsing-functions-scala | src/main/scala/dbpedia/dataparsers/ontology/datatypes/EnumerationDatatype.scala | Scala | gpl-2.0 | 864 |
package com.sksamuel.elastic4s.http.search.queries.specialized
import com.sksamuel.elastic4s.json.XContentBuilder
import com.sksamuel.elastic4s.searches.queries.funcscorer._
object ScoreFunctionBuilderFn {
def apply(func: ScoreFunction): XContentBuilder =
func match {
case r: RandomScoreFunction => RandomScoreFunctionBuilderFn(r)
case g: GaussianDecayScore => GaussianDecayScoreBuilderFn(g)
case s: ScriptScore => ScriptScoreBuilderFn(s)
case f: FieldValueFactor => FieldValueFactorBuilderFn(f)
case e: ExponentialDecayScore => ExponentialDecayScoreBuilderFn(e)
case w: WeightScore => WeightBuilderFn(w)
case l: LinearDecayScore => LinearDecayScoreBuilderFn(l)
}
}
| Tecsisa/elastic4s | elastic4s-http/src/main/scala/com/sksamuel/elastic4s/http/search/queries/specialized/ScoreFunctionBuilderFn.scala | Scala | apache-2.0 | 756 |
package org.labrad.manager
import io.netty.bootstrap.ServerBootstrap
import io.netty.channel._
import io.netty.channel.nio.NioEventLoopGroup
import io.netty.channel.socket.SocketChannel
import io.netty.channel.socket.nio.NioServerSocketChannel
import io.netty.handler.ssl.{SniHandler, SslContext}
import io.netty.util.DomainNameMapping
import java.util.concurrent.ThreadFactory
import java.util.concurrent.atomic.AtomicLong
import org.labrad.PacketCodec
import org.labrad.util._
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.Duration
import scala.util.{Failure, Success, Try}
/**
* Manager policies for securing incoming client/server connections with TLS.
*/
sealed trait TlsPolicy
object TlsPolicy {
/**
* Initial connection uses TLS.
*/
case object ON extends TlsPolicy
/**
* No TLS; connection is unencrypted and attempts to upgrade with STARTTLS
* will be rejected. This is provided for testing and for compatibility with
* the old manager.
*/
case object OFF extends TlsPolicy
/**
* Default STARTTLS behavior. Connection starts unencrypted and we use
* STARTTLS to upgrade later. STARTTLS is required for remote connections,
* but optional for connections from localhost.
*/
case object STARTTLS extends TlsPolicy
/**
* STARTTLS is optional for all connections, including from remote hosts.
*/
case object STARTTLS_OPT extends TlsPolicy
/**
* STARTTLS is required for all connection, including from localhost.
*/
case object STARTTLS_FORCE extends TlsPolicy
def fromString(s: String): TlsPolicy = {
s.toLowerCase match {
case "on" => ON
case "off" => OFF
case "starttls" => STARTTLS
case "starttls-opt" => STARTTLS_OPT
case "starttls-force" => STARTTLS_FORCE
case _ => sys.error(s"Invalid tls mode '$s'. Expected 'on', 'off', or 'starttls'.")
}
}
}
object Listener {
private val bossFactoryCounter = new AtomicLong(0)
private val workerFactoryCounter = new AtomicLong(0)
private val loginFactoryCounter = new AtomicLong(0)
def newBossGroup(): EventLoopGroup =
NettyUtil.newEventLoopGroup("LabradManagerBoss", bossFactoryCounter, 1)
def newWorkerGroup(): EventLoopGroup =
NettyUtil.newEventLoopGroup("LabradManagerWorker", workerFactoryCounter)
def newLoginGroup(): EventLoopGroup =
NettyUtil.newEventLoopGroup("LabradManagerLogin", loginFactoryCounter)
}
/**
* Listens on one or more ports for incoming labrad network connections.
*/
class Listener(
auth: AuthService,
hub: Hub,
tracker: StatsTracker,
messager: Messager,
listeners: Seq[(Int, TlsPolicy)],
tlsHostConfig: TlsHostConfig,
authTimeout: Duration,
registryTimeout: Duration,
bossGroup: EventLoopGroup,
workerGroup: EventLoopGroup,
loginGroup: EventLoopGroup
) extends Logging {
private def bootServer(port: Int, tlsPolicy: TlsPolicy): NioServerSocketChannel = {
try {
val b = new ServerBootstrap()
b.group(bossGroup, workerGroup)
.channel(classOf[NioServerSocketChannel])
.childOption[java.lang.Boolean](ChannelOption.TCP_NODELAY, true)
.childOption[java.lang.Boolean](ChannelOption.SO_KEEPALIVE, true)
.childHandler(new ChannelInitializer[SocketChannel] {
override def initChannel(ch: SocketChannel): Unit = {
val p = ch.pipeline
if (tlsPolicy == TlsPolicy.ON) {
p.addLast(new SniHandler(tlsHostConfig.sslCtxs))
}
p.addLast("packetCodec", new PacketCodec())
// Use a dedicated event loop group with its own thread pool for the
// login handler, since it may block when making auth requests.
p.addLast(loginGroup, "loginHandler",
new LoginHandler(auth, hub, tracker, messager, tlsHostConfig, tlsPolicy,
authTimeout = authTimeout, registryTimeout = registryTimeout))
}
})
// Bind and start to accept incoming connections.
val ch = b.bind(port).sync().channel.asInstanceOf[NioServerSocketChannel]
log.info(s"now accepting labrad connections: port=$port, tlsPolicy=$tlsPolicy")
ch
} catch {
case e: Exception =>
stop()
throw e
}
}
val listenerTrys = listeners.map { case (port, tlsPolicy) => Try(bootServer(port, tlsPolicy)) }
val channels = listenerTrys.collect { case Success(ch) => ch }
val failures = listenerTrys.collect { case Failure(e) => e }
// If any listeners failed to start, shutdown those that _did_ start and fail.
if (!failures.isEmpty) {
shutdown(channels)
throw new Exception(s"some listeners failed to start: ${failures.mkString(", ")}")
}
def stop() {
log.info("shutting down")
shutdown(channels)
}
private def shutdown(listeners: Seq[Channel]): Unit = {
for (ch <- listeners) {
ch.close()
ch.closeFuture.sync()
}
}
}
| labrad/scalabrad | manager/src/main/scala/org/labrad/manager/Listener.scala | Scala | mit | 4,926 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.businessdetails
import models.registrationprogress.{Completed, NotStarted, Section, Started}
import org.joda.time.LocalDate
import org.mockito.Matchers.{any, eq => meq}
import org.mockito.Mockito._
import play.api.libs.json.{JsNull, Json}
import uk.gov.hmrc.http.cache.client.CacheMap
import utils.AmlsSpec
class businessDetailsSpec extends AmlsSpec {
val previouslyRegistered = PreviouslyRegisteredYes(Some("12345678"))
val cache = mock[CacheMap]
val regForVAT = VATRegisteredYes("123456789")
// scalastyle:off
val activityStartDate = ActivityStartDate(new LocalDate(1990, 2, 24))
val newActivityStartDate = ActivityStartDate(new LocalDate(1990, 2, 24))
val regForCorpTax = CorporationTaxRegisteredYes("1234567890")
val contactingYou = ContactingYou(Some("1234567890"), Some("test@test.com"))
val regOfficeIsUK = RegisteredOfficeIsUK(true)
val regOfficeOrMainPlaceUK = RegisteredOfficeUK("38B", "line2", None, None, "AA1 1AA")
val correspondenceAddressIsUk = CorrespondenceAddressIsUk(true)
val correspondenceAddressUk = CorrespondenceAddressUk("Name",
"Business Name",
"address 1",
"address 2",
Some("address 3"),
Some("address 4"),
"AA11 1AA")
val correspondenceAddress = CorrespondenceAddress(Some(correspondenceAddressUk), None)
val completeModel = BusinessDetails(
previouslyRegistered = Some(previouslyRegistered),
activityStartDate = Some(activityStartDate),
vatRegistered = Some(regForVAT),
corporationTaxRegistered = Some(regForCorpTax),
contactingYou = Some(contactingYou),
registeredOfficeIsUK = Some(regOfficeIsUK),
registeredOffice = Some(regOfficeOrMainPlaceUK),
altCorrespondenceAddress = Some(true),
correspondenceAddress = Some(correspondenceAddress),
hasAccepted = true
)
val completeJson = Json.obj(
"previouslyRegistered" -> Json.obj("previouslyRegistered" -> true,
"prevMLRRegNo" -> "12345678"),
"activityStartDate" -> Json.obj(
"startDate" -> "1990-02-24"),
"vatRegistered" -> Json.obj("registeredForVAT" -> true,
"vrnNumber" -> "123456789"),
"corporationTaxRegistered" -> Json.obj("registeredForCorporationTax" -> true,
"corporationTaxReference" -> "1234567890"),
"contactingYou" -> Json.obj(
"phoneNumber" -> "1234567890",
"email" -> "test@test.com"),
"registeredOfficeIsUK" -> Json.obj(
"isUK" -> true),
"registeredOffice" -> Json.obj(
"addressLine1" -> "38B",
"addressLine2" -> "line2",
"addressLine3" -> JsNull,
"addressLine4" -> JsNull,
"postCode" -> "AA1 1AA",
"dateOfChange" -> JsNull),
"altCorrespondenceAddress" -> true,
"correspondenceAddress" -> Json.obj(
"yourName" -> "Name",
"businessName" -> "Business Name",
"correspondenceAddressLine1" -> "address 1",
"correspondenceAddressLine2" -> "address 2",
"correspondenceAddressLine3" -> "address 3",
"correspondenceAddressLine4" -> "address 4",
"correspondencePostCode" -> "AA11 1AA"
),
"hasChanged" -> false,
"hasAccepted" -> true
)
"BusinessDetails Serialisation" must {
"Serialise as expected" in {
Json.toJson(completeModel) must
be(completeJson)
}
"Deserialise as expected" in {
completeJson.as[BusinessDetails] must
be(completeModel)
}
"isComplete must return true" in {
completeModel.isComplete must be(true)
}
}
it when {
"hasChanged is missing from the Json" must {
"Deserialise correctly" in {
(completeJson - "hasChanged").as[BusinessDetails] must
be (completeModel)
}
}
}
"isComplete" must {
"return false" when {
"previously registered but no previous AMLS number and no activity start date" in {
completeModel.copy(previouslyRegistered = Some(PreviouslyRegisteredYes(None)),
activityStartDate = None).isComplete must be(false)
}
"not previously registered and no activity start date" in {
completeModel.copy(previouslyRegistered = Some(PreviouslyRegisteredNo),
activityStartDate = None).isComplete must be(false)
}
}
"return true" when {
"previously registered with a previous AMLS number but no activity start date" in {
completeModel.copy(previouslyRegistered = Some(PreviouslyRegisteredYes(Some("12345678"))),
activityStartDate = None).isComplete must be(true)
}
"previously registered with no previous AMLS number but with activity start date" in {
completeModel.copy(previouslyRegistered = Some(PreviouslyRegisteredYes(None)),
activityStartDate = Some(ActivityStartDate(LocalDate.now()))).isComplete must be(true)
}
"not previously registered and with activity start date" in {
completeModel.copy(previouslyRegistered = Some(PreviouslyRegisteredNo),
activityStartDate = Some(ActivityStartDate(LocalDate.now()))).isComplete must be(true)
}
}
}
"Partially complete BusinessDetails" must {
val partialJson = Json.obj(
"previouslyRegistered" -> Json.obj("previouslyRegistered" -> true,
"prevMLRRegNo" -> "12345678"
),
"hasChanged" -> false,
"hasAccepted" -> false
)
val partialModel = BusinessDetails(Some(previouslyRegistered), None)
"Serialise as expected" in {
Json.toJson(partialModel) must
be(partialJson)
}
"Deserialise as expected" in {
partialJson.as[BusinessDetails] must
be(partialModel)
}
"isComplete must return false" in {
partialModel.isComplete must be(false)
}
}
"isComplete return false" when {
"altCorrespondenceAddress is true but correspondenceAddress is not set" in {
val modelWithMissingCorrespondecneAddress = completeModel.copy(
altCorrespondenceAddress = Some(true),
correspondenceAddress = None
)
modelWithMissingCorrespondecneAddress.isComplete must be(false)
}
}
"'None'" when {
val initial: Option[BusinessDetails] = None
"Merged with previously registered with MLR" must {
"return BusinessDetails with correct previously registered for MLR option" in {
val result = initial.previouslyRegistered(previouslyRegistered)
result must be (BusinessDetails(Some(previouslyRegistered), None, None, None, None, None, None, None, None, None, true))
}
}
"Merged with RegisteredForVAT" must {
"return BusinessDetails with correct VAT Registered option" in {
val result = initial.vatRegistered(regForVAT)
result must be (BusinessDetails(None, None, Some(regForVAT), None, None, None, None, None, None, None, true))
}
}
"Merged with CorporationTaxRegistered" must {
"return BusinessDetails with correct corporation tax registered option" in {
val result = initial.corporationTaxRegistered(regForCorpTax)
result must be (BusinessDetails(None, None, None, Some(regForCorpTax), None, None, None, None, None, None, true))
}
}
"Merged with RegisteredOfficeOrMainPlaceOfBusiness" must {
"return BusinessDetails with correct registeredOfficeOrMainPlaceOfBusiness" in {
val result = initial.registeredOffice(regOfficeOrMainPlaceUK)
result must be (BusinessDetails(None, None, None, None, None, None, Some(regOfficeOrMainPlaceUK), None, None, None, true))
}
}
"Merged with CorrespondenceAddressUk" must {
"return BusinessDetails with correct CorrespondenceAddressUk" in {
val result = initial.correspondenceAddress(CorrespondenceAddress(Some(correspondenceAddressUk), None))
result must be (BusinessDetails(None, None, None, None, None, None, None, None, None, Some(CorrespondenceAddress(Some(correspondenceAddressUk), None)), true))
}
}
}
"BusinessDetails class" when {
"previouslyRegistered value is set" which {
"is the same as before" must {
"leave the object unchanged" in {
val res = completeModel.previouslyRegistered(previouslyRegistered)
res must be (completeModel)
res.hasChanged must be (false)
}
}
"is different" must {
"set the hasChanged & previouslyRegisterd Properties" in {
val res = completeModel.previouslyRegistered(PreviouslyRegisteredNo)
res.hasChanged must be (true)
res.previouslyRegistered must be (Some(PreviouslyRegisteredNo))
}
}
}
"activityStartDate value is set" which {
"is the same as before" must {
"leave the object unchanged" in {
val res = completeModel.activityStartDate(activityStartDate)
res must be (completeModel)
res.hasChanged must be (false)
}
}
"is different" must {
"set the hasChanged & activityStartDate Properties" in {
val res = completeModel.activityStartDate(ActivityStartDate(new LocalDate(1344, 12, 1)))
res.hasChanged must be (true)
res.activityStartDate must be (Some(ActivityStartDate(new LocalDate(1344, 12, 1))))
}
}
}
"vatRegistered value is set" which {
"is the same as before" must {
"leave the object unchanged" in {
val res = completeModel.vatRegistered(regForVAT)
res must be (completeModel)
res.hasChanged must be (false)
}
}
"is different" must {
"set the hasChanged & vatRegistered Properties" in {
val res = completeModel.vatRegistered(VATRegisteredNo)
res.hasChanged must be (true)
res.vatRegistered must be (Some(VATRegisteredNo))
}
}
}
"corporationTaxRegistered value is set" which {
"is the same as before" must {
"leave the object unchanged" in {
val res = completeModel.corporationTaxRegistered(regForCorpTax)
res must be (completeModel)
res.hasChanged must be (false)
}
}
"is different" must {
"set the hasChanged & corporationTaxRegistered Properties" in {
val res = completeModel.corporationTaxRegistered(CorporationTaxRegisteredYes("3333333333"))
res.hasChanged must be (true)
res.corporationTaxRegistered must be (Some(CorporationTaxRegisteredYes("3333333333")))
}
}
}
"contactingYou value is set" which {
"is the same as before" must {
"leave the object unchanged" in {
val res = completeModel.contactingYou(contactingYou)
res must be (completeModel)
res.hasChanged must be (false)
}
}
"is different" must {
"set the hasChanged & contactingYou Properties" in {
val res = completeModel.contactingYou(ContactingYou(Some("0000000000"), Some("new@testvalue.com")))
res.hasChanged must be (true)
res.contactingYou must be (Some(ContactingYou(Some("0000000000"), Some("new@testvalue.com"))))
}
}
}
"registeredOffice value is set" which {
"is the same as before" must {
"leave the object unchanged" in {
val res = completeModel.registeredOffice(regOfficeOrMainPlaceUK)
res must be (completeModel)
res.hasChanged must be (false)
}
}
"is different" must {
"set the hasChanged & registeredOffice Properties" in {
val res = completeModel.registeredOffice(RegisteredOfficeUK("Line 1 New", "Line 2 New", None, None, "NEW CODE"))
res.hasChanged must be (true)
res.registeredOffice must be (Some(RegisteredOfficeUK("Line 1 New", "Line 2 New", None, None, "NEW CODE")))
}
}
}
"correspondenceAddressIsUk value is not set" when {
"correspondenceIsUk value is then set" must {
"set the hasChanged & correspondenceAddressIsUk properties" in {
val res = completeModel.correspondenceAddressIsUk(CorrespondenceAddressIsUk(true))
res.correspondenceAddressIsUk must be (Some(CorrespondenceAddressIsUk(true)))
res.hasChanged must be (true)
}
}
}
"correspondenceAddressIsUk value is set" when {
"is the same" must {
"not set the hasChanged & correspondenceAddressIsUk properties" in {
val model = completeModel.copy(correspondenceAddressIsUk = Some(CorrespondenceAddressIsUk(true)))
val res = model.correspondenceAddressIsUk(CorrespondenceAddressIsUk(true))
res.hasChanged must be (false)
res.correspondenceAddressIsUk must be (Some(CorrespondenceAddressIsUk(true)))
}
}
"is different" must {
"set the hasChanged & correspondenceAddressIsUk properties" in {
val model = completeModel.copy(correspondenceAddressIsUk = Some(CorrespondenceAddressIsUk(true)))
val res = model.correspondenceAddressIsUk(CorrespondenceAddressIsUk(false))
res.hasChanged must be (true)
res.correspondenceAddressIsUk must be (Some(CorrespondenceAddressIsUk(false)))
}
}
}
"correspondenceAddress value is set" which {
"is the same as before" must {
"leave the object unchanged" in {
val res = completeModel.correspondenceAddress(CorrespondenceAddress(Some(correspondenceAddressUk), None))
res must be (completeModel)
res.hasChanged must be (false)
}
}
"is different" must {
"set the hasChanged & correspondenceAddress Properties" in {
val res = completeModel.correspondenceAddress(CorrespondenceAddress(Some(CorrespondenceAddressUk("name new", "Business name new", "Line 1 New", "Line 2 New", None, None, "NEW CODE")), None))
res.hasChanged must be (true)
res.correspondenceAddress must be (Some(CorrespondenceAddress(Some(CorrespondenceAddressUk("name new", "Business name new", "Line 1 New", "Line 2 New", None, None, "NEW CODE")), None)))
}
}
}
}
"Section" must {
"return a NotStarted Section when there is no data at all" in {
val notStartedSection = Section("businessdetails", NotStarted, false, controllers.businessdetails.routes.WhatYouNeedController.get)
when(cache.getEntry[BusinessDetails](meq("about-the-business"))(any())) thenReturn None
BusinessDetails.section(cache) must be(notStartedSection)
}
"return a Completed Section when model is complete and has not changed" in {
val complete = completeModel
val completedSection = Section("businessdetails", Completed, false, controllers.businessdetails.routes.SummaryController.get)
when(cache.getEntry[BusinessDetails](meq("about-the-business"))(any())) thenReturn Some(complete)
BusinessDetails.section(cache) must be(completedSection)
}
"return a Started Section when model is incomplete" in {
val incomplete = BusinessDetails(Some(previouslyRegistered), None)
val startedSection = Section("businessdetails", Started, false, controllers.businessdetails.routes.WhatYouNeedController.get)
when(cache.getEntry[BusinessDetails](meq("about-the-business"))(any())) thenReturn Some(incomplete)
BusinessDetails.section(cache) must be(startedSection)
}
}
} | hmrc/amls-frontend | test/models/businessdetails/businessDetailsSpec.scala | Scala | apache-2.0 | 15,977 |
package com.bio4j.dynamograph.model.go
import com.bio4j.dynamograph.{DynamoEdge, DynamoVertex}
import com.bio4j.dynamograph.model.go.GoSchema._
import com.bio4j.dynamograph.model.go.TableGoImplementation._
import com.bio4j.dynamograph.reader.GoReaders._
object GoImplementation {
// vertices
case object GoTerm extends DynamoVertex(GoTermType, GoTermTable , goTermVertexReader)
case object GoNamespaces extends DynamoVertex(GoNamespacesType, GoNamespacesTable, goNamespaceVertexReader)
//edges
case object HasPart extends DynamoEdge(
GoTerm, HasPartType, GoTerm, HasPartTables, hasPartEdgeReader)
case object IsA extends DynamoEdge(
GoTerm, IsAType, GoTerm, IsATables, isAEdgeReader)
case object PartOf extends DynamoEdge(
GoTerm, PartOfType, GoTerm, PartOfTables, partOfEdgeReader)
case object NegativelyRegulates extends DynamoEdge(
GoTerm, NegativelyRegulatesType, GoTerm, NegativelyRegulatesTables, negativelyRegulatesEdgeReader)
case object PositivelyRegulates extends DynamoEdge(
GoTerm, PositivelyRegulatesType, GoTerm, PositivelyRegulatesTables, positivelyRegulatesEdgeReader)
case object Regulates extends DynamoEdge(
GoTerm, RegulatesType, GoTerm, RegulatesTables, regulatesEdgeReader)
case object Namespace extends DynamoEdge(
GoTerm, NamespaceType, GoNamespaces, NamespaceTables, namespaceEdgeReader)
}
| bio4j/dynamograph | src/main/scala/com/bio4j/dynamograph/model/go/GoImplementation.scala | Scala | agpl-3.0 | 1,451 |
/*
* Copyright (c) 2014 Contributor. All rights reserved.
*/
package org.scalaide.debug.internal.expression.features
import org.junit.Ignore
import org.junit.Test
import org.scalaide.debug.internal.expression.Names.Java
import org.scalaide.debug.internal.expression.BaseIntegrationTestCompanion
import org.scalaide.debug.internal.expression.BaseIntegrationTest
import org.scalaide.debug.internal.expression.TestValues.VarargsTestCase
/**
* Test uses a class with many overloaded methods containing both constant list of arguments and varargs
*/
class VarargsTest extends BaseIntegrationTest(VarargsTest) {
@Test
def simpleVararg(): Unit = {
eval(""" vararg.f("1", "2") """, "s*", Java.String)
eval(""" vararg.g(1, "2") """, "i,s*", Java.String)
eval(""" vararg.h("1", 2, "3") """, "s,i,s*", Java.String)
}
@Test
def simpleVarargWithStrangeName(): Unit = {
eval(""" vararg.f_+("1", "2") """, "s*", Java.String)
eval(""" vararg.g_+(1, "2") """, "i,s*", Java.String)
}
@Test
def simpleVarargWithNoArgs(): Unit = eval(""" vararg.f() """, "s*", Java.String)
@Test
def varargWithOneArgWithNoArgs(): Unit = eval(""" vararg.g(1) """, "i,s*", Java.String)
@Ignore("TODO - O-4581 - proper method with varargs should be chosen when erased signature is the same")
@Test
def sameErasedSignatureVararg(): Unit = {
eval(""" sameErasedSignature.f(1, 2, 3) """, 6, Java.boxed.Integer)
eval(""" sameErasedSignature.f("1", "2", "3") """, "123", Java.String)
}
@Test
def argumentAndVararg(): Unit = {
eval(""" argumentAndVarArg.f("1") """, "s", Java.String)
eval(""" argumentAndVarArg.f("1", "2") """, "s,s*", Java.String)
}
@Test
def varargWithSimpleOverloads(): Unit = {
eval(""" varargWithSimpleOverloads.f() """, s(""), Java.String)
eval(""" varargWithSimpleOverloads.f("1") """, "s", Java.String)
eval(""" varargWithSimpleOverloads.f("1", "2") """, "s*", Java.String)
}
@Ignore("TODO - O-4581 - proper method with varargs should be chosen with subtypes")
@Test
def varargWithSubtyping(): Unit = {
eval(""" varargsAndSubtyping.f(new A) """, 1, Java.boxed.Integer)
eval(""" varargsAndSubtyping.f(new B) """, "2", Java.String)
eval(""" varargsAndSubtyping.f(new B, new A) """, 1, Java.boxed.Integer)
eval(""" varargsAndSubtyping.f(new A, new B) """, 1, Java.boxed.Integer)
eval(""" varargsAndSubtyping.f() """, "2", Java.String)
}
@Ignore("TODO - O-4581 - proper method with varargs should be chosen with primitives coercion")
@Test
def varargWithPrimitiveCoercion(): Unit = {
eval(""" varargsAndPrimitiveCoercion.f(1) """, 1, Java.boxed.Integer)
eval(""" varargsAndPrimitiveCoercion.f(1.0) """, 1.0, Java.boxed.Double)
eval(""" varargsAndPrimitiveCoercion.f(1, 1.0) """, 2.0, Java.boxed.Double)
expectReflectiveCompilationError(""" varargsAndPrimitiveCoercion.f() """)
}
}
object VarargsTest extends BaseIntegrationTestCompanion(VarargsTestCase)
| Kwestor/scala-ide | org.scala-ide.sdt.debug.expression.tests/src/org/scalaide/debug/internal/expression/features/VarargsTest.scala | Scala | bsd-3-clause | 2,986 |
package cgta
import java.util.concurrent.locks.Lock
import oscala.{OScalaExportsPlat, OScalaExportsShared}
//////////////////////////////////////////////////////////////
// Copyright (c) 2013 Ben Jackman
// All Rights Reserved
// please contact ben@jackman.biz
// for licensing inquiries
// Created by bjackman @ 9/24/13 11:45 PM
//////////////////////////////////////////////////////////////
package object serland extends SerlandExportsShared
with SerlandExportsPlat
with OScalaExportsShared
with OScalaExportsPlat {
def withLock[A](lock: Lock)(blk: => A) = {
try {
lock.lock()
blk
} finally {
lock.unlock()
}
}
def UNSUPPORTED(reason: String) = throw new UnsupportedOperationException(reason)
def READ_ERROR(reason: String): Nothing = throw new SerReadException(reason)
def READ_ERROR(reason: String, causedBy: Throwable): Nothing = throw new SerReadException(reason, causedBy)
def WRITE_ERROR(reason: String): Nothing = throw new SerWriteException(reason)
def WRITE_ERROR(reason: String, causedBy: Throwable): Nothing = throw new SerWriteException(reason, causedBy)
}
| cgta/open | serland/shared/src/main/scala/cgta/serland/package.scala | Scala | mit | 1,125 |
package me.jeffmay.neo4j.client.cypher
import scala.util.matching.Regex
/**
* A valid argument to pass into the [[CypherStringContext]] used to insert some literal string or parameter(s)
* into a Cypher [[CypherStatement]].
*/
sealed trait CypherArg {
/**
* The string of Cypher text sent to the server as the "statement" argument in the
* <a href="http://neo4j.com/docs/stable/rest-api-transactional.html">Transaction Endpoint</a>
*
* @note This may still contain placeholders (surrounded by {}) that are substituted on the
* Neo4j server to avoid Cypher injection attacks.
*/
def template: String
}
/**
* A literal string to insert into the Cypher string.
*
* @param template the literal value to insert into the [[CypherStatement.template]].
*/
sealed abstract class CypherTemplatePart(override val template: String) extends CypherArg with Proxy {
override def self: Any = template
}
/**
* An identifier to refer to nodes, relationships, or paths in a pattern.
*
* For example, the following query
* {{{
* val e = Cypher.ident("entity")
* cypher"MATCH $e WHERE $e.id = 1 RETURN $e"
* }}}
* would serialize as the following template:
* {{{
* "MATCH entity WHERE entity.id = 1 RETURN entity"
* }}}
*
* @note This is not constructed directly. Rather, you use the [[Cypher.ident]] method. If the object
* is constructed successfully, then it is a valid identifier.
*
* @see <a href="http://neo4j.com/docs/stable/cypher-identifiers.html">Cypher Identifiers</a>
* @param name the name of the identifier
*/
final class CypherIdentifier private (name: String) extends CypherTemplatePart(name)
object CypherIdentifier {
private[cypher] val ValidChars: String = "[a-zA-Z][a-zA-Z0-9_]*"
private[cypher] val Valid: Regex = s"^$ValidChars$$".r
def isValid(literal: String): Boolean = {
Valid.findFirstMatchIn(literal).isDefined
}
def apply(literal: String): CypherResult[CypherIdentifier] = {
if (isValid(literal)) {
CypherResultValid(new CypherIdentifier(literal))
} else {
CypherResultInvalid(CypherIdentifierInvalidFormat(literal))
}
}
}
/**
* A label to add to either a node or relationship.
*
* @see <a href="http://neo4j.com/docs/stable/graphdb-neo4j.html#graphdb-neo4j-labels">Label Documentation</a>
* @param name the name of the label (without the preceding colon ':')
*/
final class CypherLabel private (name: String) extends CypherTemplatePart(s":$name")
object CypherLabel {
private[cypher] val ValidChars: String = "[a-zA-Z0-9_]+"
private[cypher] val Valid: Regex = s"^$ValidChars$$".r
private[this] var validated: Map[String, CypherLabel] = Map.empty
def isValid(label: String): Boolean = {
Valid.findFirstMatchIn(label).isDefined
}
def apply(label: String): CypherResult[CypherLabel] = {
validated.get(label).map(CypherResultValid(_)) getOrElse {
if (isValid(label)) {
val valid = new CypherLabel(label)
validated += label -> valid
CypherResultValid(valid)
} else {
CypherResultInvalid(CypherLabelInvalidFormat(label))
}
}
}
}
/**
* Marker trait for all [[CypherArg]]s that add [[CypherProps]] to a namespace.
*/
sealed trait CypherParamArg extends CypherArg {
/**
* The namespace in which the [[CypherProps]] live.
*/
def namespace: String
/**
* Extract the properties provided by this parameter.
*/
def toProps: CypherProps
}
/**
* Holds a single parameter field within one of the [[CypherStatement.parameters]] namespaces.
*
* @note This is not constructed directly. Rather, you use the [[Cypher.params]] methods to build this.
* @param namespace the key of the [[CypherProps]] within which field names are unique
* @param id the field name within the namespace
* @param value the value of the parameter object's field
*/
case class CypherParamField private[cypher] (namespace: String, id: String, value: CypherValue) extends CypherParamArg {
override val template: String = s"{$namespace}.$id"
override def toProps: CypherProps = Map(id -> value)
}
/**
* Holds a single parameter object as one of the [[CypherStatement.parameters]] namespaces.
*
* This is short-hand for a collection of [[CypherParamField]]s and only works in certain circumstances.
*
* For example, a collection of fields would serialize into a [[CypherStatement]] that looks like:
* {{{
* {
* "statement": "CREATE (n { id: {props}.id, name: {props}.name })",
* "parameters": {
* "props": {
* "id": 1,
* "name": "myProps"
* }
* }
* }
* }}}
* And a param object would serialize into a [[CypherStatement]] that looks like:
* {{{
* {
* "statement": "CREATE (n { props })",
* "parameters": {
* "props": {
* "id": 1,
* "name": "myProps"
* }
* }
* }
* }}}
*
* @note This does not work when using a MATCH in your query.
*
* @note This is not constructed directly. Rather, you use one of the [[Cypher.params]] methods.
*
* @param namespace the key of the [[CypherProps]] within which field names are unique
* @param props the map of fields to unfold as properties in place
*/
case class CypherParamObject private[cypher] (namespace: String, props: CypherProps) extends CypherParamArg {
override val template: String = s"{ $namespace }"
override def toProps: CypherProps = props
}
/**
* Represents a fragment of cypher query to embed into another [[CypherStatement]].
*
* @param statement the fragment of template and any embedded [[CypherParams]].
*/
case class CypherStatementFragment private[cypher] (statement: CypherStatement) extends CypherArg {
override def template: String = statement.template
} | jeffmay/neo4j-scala-client | core/src/main/scala/me/jeffmay/neo4j/client/cypher/CypherArg.scala | Scala | apache-2.0 | 5,837 |
package cz.kamenitxan.jakon.core.custom_pages
import cz.kamenitxan.jakon.core.Director
import cz.kamenitxan.jakon.logging.Logger
object CustomPageInitializer {
def initCustomPages(customPages: Seq[Class[_]]): Unit = {
Logger.info("Initializing custom pages")
customPages.foreach(cp => {
Logger.info("Initializing custom page: " + cp.getSimpleName)
if (!isChildOf(cp, classOf[AbstractCustomPage])) {
Logger.error(cp.getSimpleName + " is not child of AbstractCustomPage")
return
}
Director.registerCustomPage(cp.newInstance().asInstanceOf[AbstractCustomPage])
})
Logger.info("Initializing custom pages complete")
}
def initStaticPages(customPages: Seq[Class[_]]): Unit = {
Logger.info("Initializing static pages")
customPages.foreach(cp => {
Logger.info("Initializing static page: " + cp.getSimpleName)
if (!isChildOf(cp, classOf[AbstractStaticPage])) {
Logger.error(cp.getSimpleName + " is not child of AbstractStaticPage")
return
}
Director.registerCustomPage(cp.newInstance().asInstanceOf[AbstractStaticPage])
})
Logger.info("Initializing static pages complete")
}
@scala.annotation.tailrec
private def isChildOf(child: Class[_], parent: Class[_]): Boolean = {
val supperClass = child.getSuperclass
if (supperClass == null) {
return false
}
if (supperClass.isAssignableFrom(parent)) {
true
} else {
isChildOf(supperClass, parent)
}
}
}
| kamenitxan/Jakon | modules/backend/src/main/scala/cz/kamenitxan/jakon/core/custom_pages/CustomPageInitializer.scala | Scala | bsd-3-clause | 1,426 |
package org.rebeam.boxes.core
import org.rebeam.boxes.core._
import org.rebeam.boxes.core.data._
import org.scalacheck.Arbitrary
import org.scalatest._
import org.scalatest.prop.PropertyChecks
import BoxUtils._
import BoxTypes._
import BoxScriptImports._
import scalaz._
import Scalaz._
class ListIndexingSpec extends WordSpec with PropertyChecks with ShouldMatchers {
def assertBox[A](a: BoxScript[A], expected: A): BoxScript[Unit] = a.map(x => assert(x == expected))
"ListIndexing" should {
"keep selection set within list using setIsInList and no default selection" in {
atomic {
for {
l <- create(List("a", "b", "c", "d", "e", "f"))
s <- create(Set("a", "b", "c", "f"))
r <- ListIndexing.setIsInList(l, s)
_ <- modifyBox(l, (l: List[String]) => l.tail)
_ <- assertBox(s, Set("b", "c", "f"))
_ <- modifyBox(l, (l: List[String]) => l.tail)
_ <- assertBox(s, Set("c", "f"))
_ <- modifyBox(l, (l: List[String]) => l.tail)
_ <- assertBox(s, Set("f"))
_ <- modifyBox(l, (l: List[String]) => l.tail)
_ <- assertBox(s, Set("f"))
_ <- l() = List("f", "f", "f")
_ <- assertBox(s, Set("f"))
_ <- l() = List()
_ <- assertBox(s, Set[String]())
_ <- l() = List("f", "f", "f")
_ <- assertBox(s, Set[String]())
_ <- s() = Set("f")
_ <- assertBox(s, Set("f"))
_ <- l() = List("A", "B", "C")
_ <- assertBox(s, Set[String]())
_ <- s() = Set("A", "C", "D")
_ <- assertBox(s, Set("A", "C"))
} yield ()
}
}
"keep selection set within list using setIsInList and selecting first by default" in {
atomic {
for {
l <- create(List("a", "b", "c", "d", "e", "f"))
s <- create(Set("a", "b", "c", "f"))
r <- ListIndexing.setIsInList(l, s, ListIndexing.selectFirstAsSet[String])
_ <- modifyBox(l, (l: List[String]) => l.tail)
_ <- assertBox(s, Set("b", "c", "f"))
_ <- modifyBox(l, (l: List[String]) => l.tail)
_ <- assertBox(s, Set("c", "f"))
_ <- modifyBox(l, (l: List[String]) => l.tail)
_ <- assertBox(s, Set("f"))
_ <- modifyBox(l, (l: List[String]) => l.tail)
_ <- assertBox(s, Set("f"))
_ <- l() = List("f", "f", "f")
_ <- assertBox(s, Set("f"))
_ <- l() = List()
_ <- assertBox(s, Set[String]())
_ <- l() = List("f", "f", "f")
_ <- assertBox(s, Set[String]("f"))
_ <- s() = Set("f")
_ <- assertBox(s, Set("f"))
//Different behaviour here when selecting first by default
_ <- l() = List("A", "B", "C")
_ <- assertBox(s, Set("A"))
_ <- s() = Set("A", "C", "D")
_ <- assertBox(s, Set("A", "C"))
//Selecting outside the list should select first
_ <- s() = Set("X")
_ <- assertBox(s, Set("A"))
//Select something valid should work, then selecting nothing should select first instead
_ <- s() = Set("B")
_ <- assertBox(s, Set("B"))
_ <- s() = Set.empty[String]
_ <- assertBox(s, Set("A"))
//Selecting something or nothing in an empty list should select nothing
_ <- l() = List()
_ <- assertBox(s, Set[String]())
_ <- s() = Set("B")
_ <- assertBox(s, Set[String]())
_ <- s() = Set.empty[String]
_ <- assertBox(s, Set[String]())
} yield ()
}
}
"keep selection set within list using setIsInList and selecting all by default" in {
atomic {
for {
l <- create(List("a", "b", "c", "d", "e", "f"))
s <- create(Set("a", "b", "c", "f"))
r <- ListIndexing.setIsInList(l, s, ListIndexing.selectAllAsSet[String])
_ <- modifyBox(l, (l: List[String]) => l.tail)
_ <- assertBox(s, Set("b", "c", "f"))
_ <- modifyBox(l, (l: List[String]) => l.tail)
_ <- assertBox(s, Set("c", "f"))
_ <- modifyBox(l, (l: List[String]) => l.tail)
_ <- assertBox(s, Set("f"))
_ <- modifyBox(l, (l: List[String]) => l.tail)
_ <- assertBox(s, Set("f"))
_ <- l() = List("f", "f", "f")
_ <- assertBox(s, Set("f"))
_ <- l() = List()
_ <- assertBox(s, Set[String]())
_ <- l() = List("f", "f", "f")
_ <- assertBox(s, Set[String]("f"))
_ <- s() = Set("f")
_ <- assertBox(s, Set("f"))
//Different behaviour here when selecting all by default
_ <- l() = List("A", "B", "C")
_ <- assertBox(s, Set("A", "B", "C"))
_ <- s() = Set("A", "C", "D")
_ <- assertBox(s, Set("A", "C"))
//Selecting outside the list should select all
_ <- s() = Set("X")
_ <- assertBox(s, Set("A", "B", "C"))
//Select something valid should work, then selecting nothing should select all instead
_ <- s() = Set("B")
_ <- assertBox(s, Set("B"))
_ <- s() = Set.empty[String]
_ <- assertBox(s, Set("A", "B", "C"))
//Selecting something or nothing in an empty list should select nothing
_ <- l() = List()
_ <- assertBox(s, Set[String]())
_ <- s() = Set("B")
_ <- assertBox(s, Set[String]())
_ <- s() = Set.empty[String]
_ <- assertBox(s, Set[String]())
} yield ()
}
}
"keep selection option within list using optionIsInList and no default selection" in {
atomic {
for {
l <- create(List("a", "b", "c", "d", "e", "f"))
s <- create("a".some)
r <- ListIndexing.optionIsInList(l, s)
_ <- assertBox(s, "a".some)
_ <- modifyBox(l, (l: List[String]) => l.tail)
_ <- assertBox(s, None)
_ <- s() = "c".some
_ <- assertBox(s, "c".some)
_ <- modifyBox(l, (l: List[String]) => l.tail)
_ <- assertBox(s, "c".some)
_ <- modifyBox(l, (l: List[String]) => l.tail)
_ <- assertBox(s, None)
_ <- l() = List("c", "c", "c")
_ <- assertBox(s, None)
_ <- s() = "c".some
_ <- assertBox(s, "c".some)
_ <- s() = "X".some
_ <- assertBox(s, None)
} yield ()
}
}
"keep selection option within list using optionIsInList and selecting first by default" in {
atomic {
for {
l <- create(List("a", "b", "c", "d", "e", "f"))
s <- create("a".some)
r <- ListIndexing.optionIsInList(l, s, ListIndexing.selectFirstAsOption[String])
_ <- assertBox(s, "a".some)
_ <- modifyBox(l, (l: List[String]) => l.tail)
_ <- assertBox(s, "b".some)
_ <- s() = "c".some
_ <- assertBox(s, "c".some)
_ <- modifyBox(l, (l: List[String]) => l.tail)
_ <- assertBox(s, "c".some)
_ <- modifyBox(l, (l: List[String]) => l.tail)
_ <- assertBox(s, "d".some)
_ <- l() = List("c", "c", "c")
_ <- assertBox(s, "c".some)
_ <- l() = List()
_ <- assertBox(s, None)
_ <- l() = List("A", "B", "C")
_ <- assertBox(s, "A".some)
_ <- s() = "B".some
_ <- assertBox(s, "B".some)
_ <- s() = "X".some
_ <- assertBox(s, "A".some)
_ <- l() = List()
_ <- assertBox(s, None)
_ <- s() = "X".some
_ <- assertBox(s, None)
_ <- s() = None
_ <- assertBox(s, None)
} yield ()
}
}
"read/write selection as index: Option[Int] using indexFromListAndOption" in {
atomic {
for {
l <- create(List("a", "b", "c", "d", "e", "f"))
s <- create("a".some)
i = ListIndexing.indexFromListAndOption(l, s)
_ <- assertBox(i(), 0.some)
//Remove "a" from list, so selection is no longer in it
_ <- modifyBox(l, (l: List[String]) => l.tail)
_ <- assertBox(i(), None)
//Select "c" and check index
_ <- s() = "c".some
_ <- assertBox(i(), 1.some)
//Try to select some indices outside list, and fail, clearing selection
_ <- i() = -1.some
_ <- assertBox(s, None)
_ <- assertBox(i(), None)
_ <- i() = 10.some
_ <- assertBox(s, None)
_ <- assertBox(i(), None)
//Reinstate selection
_ <- s() = "d".some
_ <- assertBox(i(), 2.some)
//Another invalid selection
_ <- s() = "X".some
_ <- assertBox(i(), None)
//Now change selection using valid indices
_ <- i() = 3.some
_ <- assertBox(s, "e".some)
_ <- assertBox(i(), 3.some)
_ <- i() = 4.some
_ <- assertBox(s, "f".some)
_ <- assertBox(i(), 4.some)
//Clear selection directly and check index
_ <- s() = None
_ <- assertBox(s, None)
_ <- assertBox(i(), None)
} yield ()
}
}
"read/write selection as indices: Set[Int] using indexFromListAndSet" in {
atomic {
for {
l <- create(List("a", "b", "c", "d", "e", "f"))
s <- create(Set("a"))
i = ListIndexing.indexFromListAndSet(l, s)
_ <- assertBox(i(), Set(0))
//Remove "a" from list, so selection is no longer in it
_ <- modifyBox(l, (l: List[String]) => l.tail)
_ <- assertBox(i(), Set.empty[Int])
//Select "c" and check indices
_ <- s() = Set("c")
_ <- assertBox(i(), Set(1))
//Try to select some indices outside list, and fail, clearing selection
_ <- i() = Set(-1)
_ <- assertBox(s, Set.empty[String])
_ <- assertBox(i(), Set.empty[Int])
_ <- i() = Set(10, 100)
_ <- assertBox(s, Set.empty[String])
_ <- assertBox(i(), Set.empty[Int])
//Reinstate selection
_ <- s() = Set("d")
_ <- assertBox(i(), Set(2))
//Another invalid selection
_ <- s() = Set("X")
_ <- assertBox(i(), Set.empty[Int])
//Now change selection using valid indices
_ <- i() = Set(3)
_ <- assertBox(s, Set("e"))
_ <- assertBox(i(), Set(3))
_ <- i() = Set(4)
_ <- assertBox(s, Set("f"))
_ <- assertBox(i(), Set(4))
//Clear selection directly and check indices
_ <- s() = Set.empty[String]
_ <- assertBox(s, Set.empty[String])
_ <- assertBox(i(), Set.empty[Int])
//Select multiple valid indices and check selected elements
_ <- i() = Set(3, 4)
_ <- assertBox(s, Set("e", "f"))
_ <- assertBox(i(), Set(3, 4))
//Select multiple valid elements and check selected indices
_ <- s() = Set("c", "d")
_ <- assertBox(s, Set("c", "d"))
_ <- assertBox(i(), Set(1, 2))
//Select multiple valid indices and some invalid indices and check selected elements
_ <- i() = Set(-1, 3, 4, 10, 100)
_ <- assertBox(s, Set("e", "f"))
_ <- assertBox(i(), Set(3, 4))
//Select multiple valid elements and some invalid elements and check selected indices
_ <- s() = Set("A", "B", "c", "d", "X", "Y")
_ <- assertBox(s, Set("A", "B", "c", "d", "X", "Y")) //Note we are not constraining selection to be in list, so it keeps the invalid selections
_ <- assertBox(i(), Set(1, 2)) //But the indices only reflect the valid ones
} yield ()
}
}
}
}
| trepidacious/boxes-core | src/test/scala/org/rebeam/boxes/core/ListIndexingSpec.scala | Scala | gpl-2.0 | 11,978 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.fuberlin.wiwiss.silk.util
import collection.mutable.{SynchronizedMap, WeakHashMap}
trait Observable[T] {
private val subscribers = new WeakHashMap[T => _, Unit]() with SynchronizedMap[T => _, Unit]
/**
* Execute a function on every update.
* Note that the function is stored in a weak hash map i.e. it is removed as soon as it is no longer referenced.
*
* @return The provided function
*/
def onUpdate[U](f: T => U) = {
subscribers.update(f, Unit)
f
}
protected def publish(event: T) {
for(subscriber <- subscribers.keys)
subscriber(event)
}
def removeSubscriptions() {
subscribers.clear()
}
} | fusepoolP3/p3-silk | silk-core/src/main/scala/de/fuberlin/wiwiss/silk/util/Observable.scala | Scala | apache-2.0 | 1,227 |
package me.alexray.wolfram.impl
import java.net.URLEncoder
import akka.NotUsed
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.{HttpRequest, Uri}
import akka.http.scaladsl.unmarshalling.Unmarshal
import akka.stream.Materializer
import akka.util.ByteString
import com.lightbend.lagom.scaladsl.api.ServiceCall
import me.alexray.wolfram.api.WolframService
import play.api.Configuration
import scala.concurrent.{ExecutionContext, Future}
class WolframServiceImpl(config: Configuration)
(implicit system: ActorSystem, mat: Materializer, ec: ExecutionContext)
extends WolframService
{
val appID = config.underlying.getString("wolfram.appid")
val apiUrl = s"http://api.wolframalpha.com/v2/"
override def query(q: String): ServiceCall[NotUsed, String] = ServiceCall { _ =>
val url = apiUrl + s"query?appid=$appID&input=" + URLEncoder.encode(q, "UTF-8")
for {
response <- Http().singleRequest(HttpRequest(uri = Uri(url)))
if response.status.isSuccess()
data <- Unmarshal(response).to[String]
} yield data
}
override def simple(q: String): ServiceCall[NotUsed, Array[Byte]] = ServiceCall { _ =>
println(s"quetions = '$q'")
val url = apiUrl + s"simple?appid=$appID&input=" + URLEncoder.encode(q, "UTF-8").replace("+", "%20")
println(s"url = '$url'")
for {
response <- Http().singleRequest(HttpRequest(uri = Uri(url)))
if response.status.isSuccess()
bytes <- Unmarshal(response).to[ByteString]
} yield {
println(s"received image ${bytes.size} bytes long")
bytes.toArray
}
}
}
| AlexanderRay/lagom-on-kube | lagomKubeServices/wolframService_impl/src/main/scala/me/alexray/wolfram/impl/WolframServiceImpl.scala | Scala | apache-2.0 | 1,649 |
package pl.writeonly.jswt.scaladsl
import org.eclipse.core.runtime.IProgressMonitor
import org.eclipse.jface.dialogs.IPageChangedListener
import org.eclipse.jface.dialogs.IPageChangingListener
import org.eclipse.jface.dialogs.PageChangedEvent
import org.eclipse.jface.dialogs.PageChangingEvent
import org.eclipse.jface.operation.IRunnableWithProgress
import org.eclipse.jface.util.IOpenEventListener
import org.eclipse.jface.util.IPropertyChangeListener
import org.eclipse.jface.util.OpenStrategy
import org.eclipse.jface.util.PropertyChangeEvent
import org.eclipse.jface.viewers.deferred.IConcurrentModel
import org.eclipse.jface.viewers.deferred.IConcurrentModelListener
import org.eclipse.jface.viewers.AbstractTreeViewer
import org.eclipse.jface.viewers.CheckStateChangedEvent
import org.eclipse.jface.viewers.ComboViewer
import org.eclipse.jface.viewers.DoubleClickEvent
import org.eclipse.jface.viewers.IBaseLabelProvider
import org.eclipse.jface.viewers.ICheckStateListener
import org.eclipse.jface.viewers.ICheckable
import org.eclipse.jface.viewers.IDoubleClickListener
import org.eclipse.jface.viewers.ILabelProviderListener
import org.eclipse.jface.viewers.IOpenListener
import org.eclipse.jface.viewers.IPostSelectionProvider
import org.eclipse.jface.viewers.ISelectionChangedListener
import org.eclipse.jface.viewers.ISelectionProvider
import org.eclipse.jface.viewers.ITreeViewerListener
import org.eclipse.jface.viewers.LabelProviderChangedEvent
import org.eclipse.jface.viewers.ListViewer
import org.eclipse.jface.viewers.OpenEvent
import org.eclipse.jface.viewers.SelectionChangedEvent
import org.eclipse.jface.viewers.StructuredViewer
import org.eclipse.jface.viewers.TableViewer
import org.eclipse.jface.viewers.CheckboxTableViewer
import org.eclipse.jface.viewers.TableViewerColumn
import org.eclipse.jface.viewers.TreeExpansionEvent
import org.eclipse.jface.viewers.TreeViewer
import org.eclipse.jface.viewers.TreeViewerColumn
import org.eclipse.jface.viewers.Viewer
import org.eclipse.jface.viewers.ViewerColumn
import org.eclipse.jface.wizard.WizardDialog
import org.eclipse.swt.events.SelectionEvent
import org.eclipse.swt.widgets.Composite
import org.eclipse.swt.SWT
import XScalaWTAPI._
import pl.writeonly.jswt.scaladsl.viewers.TableViewerBuilder
import pl.writeonly.jswt.scaladsl.viewers.CheckboxTableViewerBuilder
import pl.writeonly.jswt.scaladsl.viewers.TreeViewerBuilder
import org.eclipse.jface.action.Action
import org.eclipse.jface.action.IMenuListener
import org.eclipse.jface.action.IMenuManager
import org.eclipse.swt.graphics.Image
import org.eclipse.jface.resource.ImageDescriptor
import org.eclipse.jface.action.ToolBarManager
import org.eclipse.swt.widgets.ToolBar
import org.eclipse.jface.action.IAction
import pl.writeonly.jswt.scaladsl.XScalaWT.toolBar
import pl.writeonly.jswt.scaladsl.XScalaWT.toolBar$
object XJFace {
implicit def viewer2XScalaWT[W <: Viewer](viewer: W) = new WidgetX[W](viewer)
implicit def viewerColumn2XScalaWT[W <: ViewerColumn](viewerColumn: W) = new WidgetX[W](viewerColumn)
class ExtAction(runFunc: => Unit) extends Action {
override def run() = runFunc
def setImage(img: Image) = setImageDescriptor(ImageDescriptor.createFromImage(img))
}
def action(setups: (ExtAction => Any)*)(runFunc: => Unit) = {
setupAndReturn(new ExtAction(runFunc), setups: _*)
}
def toolBarManager$[A <: IAction](style: Int = SWT.NONE)(tbmSetups: (ToolBarManager => Any)*)(setups: (ToolBar => Any)*)(actions: A*) = { (parent: Composite) =>
val tb = toolBar$(style)(setups: _*)(parent)
val toolbarMan = new ToolBarManager(tb)
tbmSetups.foreach(_(toolbarMan))
actions.foreach(toolbarMan.add(_))
toolbarMan.update(true)
toolbarMan
}
def toolBarManager[A <: IAction](tbmSetups: (ToolBarManager => Any)*)(setups: (ToolBar => Any)*)(actions: A*) =
toolBarManager$()(tbmSetups: _*)(setups: _*)(actions: _*)
def listViewer(setups: (ListViewer => Any)*) = (parent: Composite) =>
setupAndReturn(new ListViewer(parent, SWT.BORDER), setups: _*)
def comboViewer(setups: (ComboViewer => Any)*) = (parent: Composite) =>
setupAndReturn(new ComboViewer(parent, SWT.BORDER), setups: _*)
def tableViewer(setups: (TableViewer => Any)*) = (parent: Composite) =>
setupAndReturn(new TableViewer(parent, SWT.MULTI | SWT.H_SCROLL | SWT.V_SCROLL | SWT.BORDER), setups: _*)
def treeViewer(setups: (TreeViewer => Any)*) = (parent: Composite) =>
setupAndReturn(new TreeViewer(parent, SWT.MULTI | SWT.H_SCROLL | SWT.V_SCROLL | SWT.BORDER), setups: _*)
def tableViewerColumn(setups: (TableViewerColumn => Any)*) = (parent: TableViewer) =>
setupAndReturn(new TableViewerColumn(parent, SWT.LEFT), setups: _*)
def treeViewerColumn(setups: (TreeViewerColumn => Any)*) = (parent: TreeViewer) =>
setupAndReturn(new TreeViewerColumn(parent, SWT.LEFT), setups: _*)
def tableViewerBuilder$[A](style: Int = SWT.MULTI | SWT.H_SCROLL | SWT.V_SCROLL | SWT.BORDER)
(setups: (TableViewerBuilder[A] => Any)*)(viewerSetups: (TableViewer => Any)*) = (parent: Composite) => {
val builder = setupAndReturn(new TableViewerBuilder[A](parent, style), setups: _*)
val viewer = builder.viewer
viewerSetups.foreach(_(viewer))
builder
}
def tableViewerBuilder[A](setups: (TableViewerBuilder[A] => Any)*)(viewerSetups: (TableViewer => Any)*) = tableViewerBuilder$()(setups: _*)(viewerSetups: _*)
def checkboxTableViewerBuilder$[A](style: Int = SWT.MULTI | SWT.H_SCROLL | SWT.V_SCROLL | SWT.BORDER)
(setups: (CheckboxTableViewerBuilder[A] => Any)*)(viewerSetups: (CheckboxTableViewer => Any)*) = (parent: Composite) => {
val builder = setupAndReturn(new CheckboxTableViewerBuilder[A](parent, style), setups: _*)
val viewer = builder.viewer
viewerSetups.foreach(_(viewer))
builder
}
def treeViewerBuilder$[A](style: Int = SWT.MULTI | SWT.H_SCROLL | SWT.V_SCROLL | SWT.BORDER)
(setups: (TreeViewerBuilder[A] => Any)*)(viewerSetups: (TreeViewer => Any)*) = (parent: Composite) => {
val builder = setupAndReturn(new TreeViewerBuilder[A](parent, style), setups: _*)
val viewer = builder.viewer
viewerSetups.foreach(_(viewer))
builder
}
def treeViewerBuilder[A](setups: (TreeViewerBuilder[A] => Any)*)(viewerSetups: (TreeViewer => Any)*) = treeViewerBuilder$()(setups: _*)(viewerSetups: _*)
// private type AddMenuListener = { def addMenuListener(l: IMenuListener) }
//
// def addMenuListener(l: IMenuListener) =
// (subject: AddMenuListener) => subject.addMenuListener(l)
implicit def menuListener(func: IMenuManager => Any): IMenuListener =
new IMenuListener {
def menuAboutToShow(m: IMenuManager) { func(m) }
}
def addSelectionChangedListener(l: ISelectionChangedListener) =
(subject: ISelectionProvider) => subject.addSelectionChangedListener(l)
implicit def selectionChangedListener(func: SelectionChangedEvent => Any): ISelectionChangedListener =
new ISelectionChangedListener {
def selectionChanged(e: SelectionChangedEvent) { func(e) }
}
def onSelectionChange(func: SelectionChangedEvent => Any) =
addSelectionChangedListener(func)
// can't be => Any, or we lose type inference
def onSelectionChange(func: => Unit) =
addSelectionChangedListener((e: SelectionChangedEvent) => func)
implicit def onSelectionChangeImplicit(func: SelectionChangedEvent => Any) =
addSelectionChangedListener(func)
def addPostSelectionChangedListener(l: ISelectionChangedListener) =
(subject: IPostSelectionProvider) => subject.addPostSelectionChangedListener(l)
def postSelectionChange(func: SelectionChangedEvent => Any) =
addPostSelectionChangedListener(func)
def postSelectionChange(func: => Unit) =
addPostSelectionChangedListener((e: SelectionChangedEvent) => func)
def addTreeListener(l: ITreeViewerListener) =
(subject: AbstractTreeViewer) => subject.addTreeListener(l)
def treeViewerListener(collapsed: TreeExpansionEvent => Any = ignore, expanded: TreeExpansionEvent => Any = ignore): ITreeViewerListener =
new ITreeViewerListener {
override def treeCollapsed(e: TreeExpansionEvent) = collapsed(e)
override def treeExpanded(e: TreeExpansionEvent) = expanded(e)
}
def onTreeViewer(collapsed: TreeExpansionEvent => Any = ignore, expanded: TreeExpansionEvent => Any = ignore) =
addTreeListener(treeViewerListener(collapsed, expanded))
private type AddPropertyChangeListener = { def addPropertyChangeListener(l: IPropertyChangeListener) }
implicit def propertyChangeListener(func: PropertyChangeEvent => Any): IPropertyChangeListener =
new IPropertyChangeListener {
def propertyChange(e: PropertyChangeEvent) { func(e) }
}
implicit def onPropertyChangeImplicit[T <: AddPropertyChangeListener](func: PropertyChangeEvent => Any) =
addPropertyChangeListener[T](func)
def addPropertyChangeListener[T <: AddPropertyChangeListener](l: IPropertyChangeListener) =
(subject: T) => subject.addPropertyChangeListener(l)
def onPropertyChange[T <: AddPropertyChangeListener](func: PropertyChangeEvent => Any) =
addPropertyChangeListener(func)
def onPropertyChange[T <: AddPropertyChangeListener](func: => Unit) =
addPropertyChangeListener((e: PropertyChangeEvent) => func)
def addPageChangingListener(l: IPageChangingListener) =
(subject: WizardDialog) => subject.addPageChangingListener(l)
implicit def pageChangingListener(func: PageChangingEvent => Any): IPageChangingListener =
new IPageChangingListener {
def handlePageChanging(e: PageChangingEvent) { func(e) }
}
def onPageChanging(func: PageChangingEvent => Any) =
addPageChangingListener(func)
def onPageChanging(func: => Unit) =
addPageChangingListener((e: PageChangingEvent) => func)
implicit def onPageChangingImplicit(func: PageChangingEvent => Any) =
addPageChangingListener(func)
def addPageChangedListener(l: IPageChangedListener) =
(subject: WizardDialog) => subject.addPageChangedListener(l)
implicit def pageChangedListener(func: PageChangedEvent => Any): IPageChangedListener =
new IPageChangedListener {
def pageChanged(e: PageChangedEvent) { func(e) }
}
def onPageChanged(func: PageChangedEvent => Any) =
addPageChangedListener(func)
// can't be => Any, or we lose type inference
def onPageChanged(func: => Unit) =
addPageChangedListener((e: PageChangedEvent) => func)
implicit def onPageChangedImplicit(func: PageChangedEvent => Any) =
addPageChangedListener(func)
def addOpenListener(l: IOpenListener) =
(subject: StructuredViewer) => subject.addOpenListener(l)
implicit def openListener(func: OpenEvent => Any): IOpenListener =
new IOpenListener {
def open(e: OpenEvent) { func(e) }
}
def onOpen(func: OpenEvent => Any) =
addOpenListener(func)
def onOpen(func: => Unit) =
addOpenListener((e: OpenEvent) => func)
implicit def onOpenImplicit(func: OpenEvent => Any) =
addOpenListener(func)
def addOpenEventListener(l: IOpenEventListener) =
(subject: OpenStrategy) => subject.addOpenListener(l)
implicit def openEventListener(func: SelectionEvent => Any): IOpenEventListener =
new IOpenEventListener {
def handleOpen(e: SelectionEvent) { func(e) }
}
def onOpenEvent(func: SelectionEvent => Any) =
addOpenEventListener(func)
def onOpenEvent(func: => Unit) =
addOpenEventListener((e: SelectionEvent) => func)
implicit def onOpenEventImplicit(func: SelectionEvent => Any) =
addOpenEventListener(func)
def addDoubleClickListener(l: IDoubleClickListener) =
(subject: StructuredViewer) => subject.addDoubleClickListener(l)
def onDoubleClick(func: DoubleClickEvent => Any) =
addDoubleClickListener(func)
// can't be => Any, or we lose type inference
def onDoubleClick(func: => Unit) =
addDoubleClickListener((e: DoubleClickEvent) => func)
implicit def onDoubleClickImplicit(func: DoubleClickEvent => Any) =
addDoubleClickListener(func)
implicit def doubleClickListener(func: DoubleClickEvent => Any): IDoubleClickListener =
new IDoubleClickListener {
def doubleClick(e: DoubleClickEvent) { func(e) }
}
def addConcurrentModelListener(l: IConcurrentModelListener) =
(subject: IConcurrentModel) => subject.addListener(l)
def concurrentModelListener(addF: Array[AnyRef] => Any = ignore, removeF: Array[AnyRef] => Any = ignore, updateF: Array[AnyRef] => Any = ignore, setContentsF: Array[AnyRef] => Any = ignore): IConcurrentModelListener =
new IConcurrentModelListener {
def add(added: Array[AnyRef]) = addF(added)
def remove(added: Array[AnyRef]) = removeF(added)
def update(added: Array[AnyRef]) = updateF(added)
def setContents(added: Array[AnyRef]) = setContentsF(added)
}
def onConcurrentModel(add: Array[AnyRef] => Any = ignore, remove: Array[AnyRef] => Any = ignore, update: Array[AnyRef] => Any = ignore, setContents: Array[AnyRef] => Any = ignore) =
addConcurrentModelListener(concurrentModelListener(add, remove, update, setContents))
def addCheckStateListener(l: ICheckStateListener) =
(subject: ICheckable) => subject.addCheckStateListener(l)
implicit def checkStateListener(func: CheckStateChangedEvent => Any): ICheckStateListener =
new ICheckStateListener {
def checkStateChanged(e: CheckStateChangedEvent) { func(e) }
}
def onCheckState(func: CheckStateChangedEvent => Any) =
addCheckStateListener(func)
def onCheckState(func: => Unit) =
addCheckStateListener((e: CheckStateChangedEvent) => func)
implicit def onCheckStateImplicit(func: CheckStateChangedEvent => Any) =
addCheckStateListener(func)
def addLabelProviderListener(l: ILabelProviderListener) =
(subject: IBaseLabelProvider) => subject.addListener(l)
implicit def labelProviderListener(func: LabelProviderChangedEvent => Any): ILabelProviderListener =
new ILabelProviderListener {
def labelProviderChanged(e: LabelProviderChangedEvent) { func(e) }
}
def onLabelProvider(func: LabelProviderChangedEvent => Any) =
addLabelProviderListener(func)
def onLabelProvider(func: => Unit) =
addLabelProviderListener((e: LabelProviderChangedEvent) => func)
implicit def onLabelProviderImplicit(func: LabelProviderChangedEvent => Any) =
addLabelProviderListener(func)
implicit def runnableWithProgress(f: IProgressMonitor => Any) =
new IRunnableWithProgress { override def run(m: IProgressMonitor) { f(m) } }
} | writeonly/JSWT | src/main/scala/pl/writeonly/jswt/scaladsl/XJFace.scala | Scala | unlicense | 14,560 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spot.utilities
object CountryCodes {
val CountryCodes = Set("ac", "ad", "ae", "af", "ag", "ai", "al", "am", "an", "ao", "aq", "ar", "as", "at", "au",
"aw", "ax", "az", "ba", "bb", "bd", "be", "bf", "bg", "bh", "bi", "bj", "bm", "bn", "bo", "bq", "br", "bs", "bt",
"bv", "bw", "by", "bz", "ca", "cc", "cd", "cf", "cg", "ch", "ci", "ck", "cl", "cm", "cn", "co", "cr", "cu", "cv",
"cw", "cx", "cy", "cz", "de", "dj", "dk", "dm", "do", "dz", "ec", "ee", "eg", "eh", "er", "es", "et", "eu", "fi",
"fj", "fk", "fm", "fo", "fr", "ga", "gb", "gd", "ge", "gf", "gg", "gh", "gi", "gl", "gm", "gn", "gp", "gq", "gr",
"gs", "gt", "gu", "gw", "gy", "hk", "hm", "hn", "hr", "ht", "hu", "id", "ie", "il", "im", "in", "io", "iq", "ir",
"is", "it", "je", "jm", "jo", "jp", "ke", "kg", "kh", "ki", "km", "kn", "kp", "kr", "krd", "kw", "ky", "kz", "la",
"lb", "lc", "li", "lk", "lr", "ls", "lt", "lu", "lv", "ly", "ma", "mc", "md", "me", "mg", "mh", "mk", "ml", "mm",
"mn", "mo", "mp", "mq", "mr", "ms", "mt", "mu", "mv", "mw", "mx", "my", "mz", "na", "nc", "ne", "nf", "ng", "ni",
"nl", "no", "np", "nr", "nu", "nz", "om", "pa", "pe", "pf", "pg", "ph", "pk", "pl", "pm", "pn", "pr", "ps", "pt",
"pw", "py", "qa", "re", "ro", "rs", "ru", "rw", "sa", "sb", "sc", "sd", "se", "sg", "sh", "si", "sj", "", "sk",
"sl", "sm", "sn", "so", "sr", "ss", "st", "su", "sv", "sx", "sy", "sz", "tc", "td", "tf", "tg", "th", "tj", "tk",
"tl", "tm", "tn", "to", "tp", "tr", "tt", "tv", "tw", "tz", "ua", "ug", "uk", "us", "uy", "uz", "va", "vc", "ve",
"vg", "vi", "vn", "vu", "wf", "ws", "ye", "yt", "za", "zm", "zw")
}
| brandon-edwards/incubator-spot | spot-ml/src/main/scala/org/apache/spot/utilities/CountryCodes.scala | Scala | apache-2.0 | 2,462 |
package com.peterpotts.snake.coercion
object Max extends Coercion[Any] {
def boolean(left: Boolean, right: Boolean): Any = if (left > right) left else right
def int(left: Int, right: Int): Any = if (left > right) left else right
def long(left: Long, right: Long): Any = if (left > right) left else right
def double(left: Double, right: Double): Any = if (left > right) left else right
def string(left: String, right: String): Any = if (left > right) left else right
}
| peterpotts/snake | src/main/scala/com/peterpotts/snake/coercion/Max.scala | Scala | mit | 483 |
package models.db
import models.join.Stage
import scalikejdbc._
case class CellInfo(
areaId: Int,
infoNo: Int,
cell: Int,
alphabet: String,
start: Boolean,
boss: Boolean) {
def save()(implicit session: DBSession = CellInfo.autoSession): CellInfo = CellInfo.save(this)(session)
def destroy()(implicit session: DBSession = CellInfo.autoSession): Unit = CellInfo.destroy(this)(session)
lazy val stage: Stage = Stage(areaId, infoNo)
def pointStr: String = s"$stage-$cell"
def pointAlpha: String = s"$stage-$alphabet"
def rawPointStr: String = s"$areaId-$infoNo-$cell"
def rawPointAlpha: String = s"$areaId-$infoNo-$alphabet"
}
object CellInfo extends SQLSyntaxSupport[CellInfo] {
override val tableName = "cell_info"
override val columns = Seq("area_id", "info_no", "cell", "alphabet", "start", "boss")
def apply(ci: SyntaxProvider[CellInfo])(rs: WrappedResultSet): CellInfo = apply(ci.resultName)(rs)
def apply(ci: ResultName[CellInfo])(rs: WrappedResultSet): CellInfo = autoConstruct(rs, ci)
val ci = CellInfo.syntax("ci")
override val autoSession = AutoSession
/** Heap Cache */
lazy val all: List[CellInfo] = {
implicit val session = autoSession
withSQL(select.from(CellInfo as ci)).map(CellInfo(ci.resultName)).list().apply()
}
/** from Heap */
def find(areaId: Int, infoNo: Int, cell: Int): Option[CellInfo] =
all.find(c => c.areaId == areaId && c.infoNo == infoNo && c.cell == cell)
def findOrDefault(areaId: Int, infoNo: Int, cell: Int): CellInfo =
find(areaId, infoNo, cell).getOrElse(noAlphabet(areaId, infoNo, cell))
/** from Heap */
def findAll(): List[CellInfo] = all
/** from Heap */
def countAll(): Long = all.size
def findAllBy(where: SQLSyntax)(implicit session: DBSession = autoSession): List[CellInfo] = {
withSQL {
select.from(CellInfo as ci).where.append(sqls"${where}")
}.map(CellInfo(ci.resultName)).list().apply()
}
def countBy(where: SQLSyntax)(implicit session: DBSession = autoSession): Long = {
withSQL {
select(sqls"count(1)").from(CellInfo as ci).where.append(sqls"${where}")
}.map(_.long(1)).single().apply().get
}
def create(
areaId: Int,
infoNo: Int,
cell: Int,
alphabet: String,
start: Boolean,
boss: Boolean)(implicit session: DBSession = autoSession): CellInfo = {
withSQL {
insert.into(CellInfo).columns(
column.areaId,
column.infoNo,
column.cell,
column.alphabet,
column.start,
column.boss
).values(
areaId,
infoNo,
cell,
alphabet,
start,
boss
)
}.update().apply()
CellInfo(
areaId = areaId,
infoNo = infoNo,
cell = cell,
alphabet = alphabet,
start = start,
boss = boss)
}
def save(entity: CellInfo)(implicit session: DBSession = autoSession): CellInfo = {
withSQL {
update(CellInfo).set(
column.areaId -> entity.areaId,
column.infoNo -> entity.infoNo,
column.cell -> entity.cell,
column.alphabet -> entity.alphabet,
column.start -> entity.start,
column.boss -> entity.boss
).where.eq(column.areaId, entity.areaId).and.eq(column.cell, entity.cell).and.eq(column.infoNo, entity.infoNo)
}.update().apply()
entity
}
def destroy(entity: CellInfo)(implicit session: DBSession = autoSession): Unit = {
withSQL {
delete.from(CellInfo).where.eq(column.areaId, entity.areaId).and.eq(column.cell, entity.cell).and.eq(column.infoNo, entity.infoNo)
}.update().apply()
}
def noAlphabet(area: Int, info: Int, cell: Int): CellInfo = CellInfo(area, info, cell, "", false, false)
}
| ttdoda/MyFleetGirls | server/app/models/db/CellInfo.scala | Scala | mit | 3,735 |
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt
import java.io.File
import java.util.regex.{ Pattern, PatternSyntaxException }
import sbt.internal.util.AttributeKey
import sbt.internal.util.complete.Parser
import sbt.internal.util.complete.DefaultParsers._
import sbt.internal.util.Util.nilSeq
import sbt.io.IO
import sbt.io.syntax._
object CommandUtil {
def readLines(files: Seq[File]): Seq[String] =
files flatMap (IO.readLines(_)) flatMap processLine
def processLine(s: String): Option[String] = {
val s2 = s.trim; if (ignoreLine(s2)) None else Some(s2)
}
def ignoreLine(s: String): Boolean = s.isEmpty || s.startsWith("#")
private def canRead = (_: File).canRead
def notReadable(files: Seq[File]): Seq[File] = files filterNot canRead
def readable(files: Seq[File]): Seq[File] = files filter canRead
// slightly better fallback in case of older launcher
def bootDirectory(state: State): File =
try state.configuration.provider.scalaProvider.launcher.bootDirectory
catch { case _: NoSuchMethodError => new File(".").getAbsoluteFile }
def aligned(pre: String, sep: String, in: Seq[(String, String)]): Seq[String] =
if (in.isEmpty) nilSeq
else {
val width = in.iterator.map(_._1.length).max
for ((a, b) <- in) yield pre + fill(a, width) + sep + b
}
def fill(s: String, size: Int): String = s + " " * math.max(size - s.length, 0)
def withAttribute[T](s: State, key: AttributeKey[T], ifMissing: String)(f: T => State): State =
s get key match {
case None =>
s.log.error(ifMissing); s.fail
case Some(nav) => f(nav)
}
def singleArgument(exampleStrings: Set[String]): Parser[String] = {
val arg = (NotSpaceClass ~ any.*) map { case (ns, s) => (ns +: s).mkString }
token(Space) ~> token(arg examples exampleStrings)
}
def detail(selected: String, detailMap: Map[String, String]): String =
detailMap.get(selected) match {
case Some(exactDetail) => exactDetail
case None =>
try {
val details = searchHelp(selected, detailMap)
if (details.isEmpty)
"No matches for regular expression '" + selected + "'."
else
layoutDetails(details)
} catch {
case pse: PatternSyntaxException =>
sys.error("Invalid regular expression (java.util.regex syntax).\\n" + pse.getMessage)
}
}
def searchHelp(selected: String, detailMap: Map[String, String]): Map[String, String] = {
val pattern = Pattern.compile(selected, HelpPatternFlags)
detailMap flatMap {
case (k, v) =>
val contentMatches = Highlight.showMatches(pattern)(v)
val keyMatches = Highlight.showMatches(pattern)(k)
val keyString = Highlight.bold(keyMatches getOrElse k)
val contentString = contentMatches getOrElse v
if (keyMatches.isDefined || contentMatches.isDefined)
Seq((keyString, contentString))
else
nilSeq
}
}
def layoutDetails(details: Map[String, String]): String =
details.map { case (k, v) => k + "\\n\\n " + v }.mkString("\\n", "\\n\\n", "\\n")
final val HelpPatternFlags = Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE
private[sbt] def isSbtBuild(baseDir: File) =
(baseDir / "project").exists() || (baseDir * "*.sbt").get().nonEmpty
}
| sbt/sbt | main-command/src/main/scala/sbt/CommandUtil.scala | Scala | apache-2.0 | 3,434 |
import scala.util.Random
import scala.math
class RBM(val N: Int, val n_visible: Int, val n_hidden: Int,
_W: Array[Array[Double]]=null, _hbias: Array[Double]=null, _vbias: Array[Double]=null,
var rng: Random=null) {
var W: Array[Array[Double]] = Array.ofDim[Double](n_hidden, n_visible)
var hbias: Array[Double] = new Array[Double](n_hidden)
var vbias: Array[Double] = new Array[Double](n_visible)
if(rng == null) rng = new Random(1234)
if(_W == null) {
var i: Int = 0
var j: Int = 0
val a: Double = 1 / n_visible
for(i <- 0 until n_hidden)
for(j <- 0 until n_visible)
W(i)(j) = uniform(-a, a)
} else {
W = _W
}
if(_hbias == null) {
var i: Int = 0
for(i <- 0 until n_hidden) hbias(i) = 0
} else {
hbias = _hbias
}
if(_vbias == null) {
var i: Int = 0
for(i <- 0 until n_visible) vbias(i) = 0
} else {
vbias = _vbias
}
def uniform(min: Double, max: Double): Double = rng.nextDouble() * (max - min) + min
def binomial(n: Int, p: Double): Int = {
if(p < 0 || p > 1) return 0
var c: Int = 0
var r: Double = 0
var i: Int = 0
for(i <- 0 until n) {
r = rng.nextDouble()
if(r < p) c += 1
}
c
}
def sigmoid(x: Double): Double = 1.0 / (1.0 + math.pow(math.E, -x))
def contrastive_divergence(input: Array[Int], lr: Double, k: Int) {
val ph_mean: Array[Double] = new Array[Double](n_hidden)
val ph_sample: Array[Int] = new Array[Int](n_hidden)
val nv_means: Array[Double] = new Array[Double](n_visible)
val nv_samples: Array[Int] = new Array[Int](n_visible)
val nh_means: Array[Double] = new Array[Double](n_hidden)
val nh_samples: Array[Int] = new Array[Int](n_hidden)
/* CD-k */
sample_h_given_v(input, ph_mean, ph_sample)
var step: Int = 0
for(step <- 0 until k) {
if(step == 0) {
gibbs_hvh(ph_sample, nv_means, nv_samples, nh_means, nh_samples)
} else {
gibbs_hvh(nh_samples, nv_means, nv_samples, nh_means, nh_samples)
}
}
var i: Int = 0
var j: Int = 0
for(i <- 0 until n_hidden) {
for(j <- 0 until n_visible) {
// W(i)(j) += lr * (ph_sample(i) * input(j) - nh_means(i) * nv_samples(j)) / N
W(i)(j) += lr * (ph_mean(i) * input(j) - nh_means(i) * nv_samples(j)) / N
}
hbias(i) += lr * (ph_sample(i) - nh_means(i)) / N
}
for(i <- 0 until n_visible) {
vbias(i) += lr * (input(i) - nv_samples(i)) / N
}
}
def sample_h_given_v(v0_sample: Array[Int], mean: Array[Double], sample: Array[Int]) {
var i: Int = 0
for(i <- 0 until n_hidden) {
mean(i) = propup(v0_sample, W(i), hbias(i))
sample(i) = binomial(1, mean(i))
}
}
def sample_v_given_h(h0_sample: Array[Int], mean: Array[Double], sample: Array[Int]) {
var i: Int = 0
for(i <- 0 until n_visible) {
mean(i) = propdown(h0_sample, i, vbias(i))
sample(i) = binomial(1, mean(i))
}
}
def propup(v: Array[Int], w: Array[Double], b: Double): Double = {
var pre_sigmoid_activation: Double = 0
var j: Int = 0
for(j <- 0 until n_visible) {
pre_sigmoid_activation += w(j) * v(j)
}
pre_sigmoid_activation += b
sigmoid(pre_sigmoid_activation)
}
def propdown(h: Array[Int], i: Int, b: Double): Double = {
var pre_sigmoid_activation: Double = 0
var j: Int = 0
for(j <- 0 until n_hidden) {
pre_sigmoid_activation += W(j)(i) * h(j)
}
pre_sigmoid_activation += b
sigmoid(pre_sigmoid_activation)
}
def gibbs_hvh(h0_sample: Array[Int], nv_means: Array[Double], nv_samples: Array[Int], nh_means: Array[Double], nh_samples: Array[Int]) {
sample_v_given_h(h0_sample, nv_means, nv_samples)
sample_h_given_v(nv_samples, nh_means, nh_samples)
}
def reconstruct(v: Array[Int], reconstructed_v: Array[Double]) {
val h: Array[Double] = new Array[Double](n_hidden)
var pre_sigmoid_activation: Double = 0
var i: Int = 0
var j: Int = 0
for(i <- 0 until n_hidden) {
h(i) = propup(v, W(i), hbias(i))
}
for(i <- 0 until n_visible) {
pre_sigmoid_activation = 0
for(j <- 0 until n_hidden) {
pre_sigmoid_activation += W(j)(i) * h(j)
}
pre_sigmoid_activation += vbias(i)
reconstructed_v(i) = sigmoid(pre_sigmoid_activation)
}
}
}
object RBM {
def test_rbm() {
val rng: Random = new Random(123)
var learning_rate: Double = 0.1
val training_epochs: Int = 1000
val k: Int = 1
val train_N: Int = 6;
val test_N: Int = 2
val n_visible: Int = 6
val n_hidden: Int = 3
val train_X: Array[Array[Int]] = Array(
Array(1, 1, 1, 0, 0, 0),
Array(1, 0, 1, 0, 0, 0),
Array(1, 1, 1, 0, 0, 0),
Array(0, 0, 1, 1, 1, 0),
Array(0, 0, 1, 0, 1, 0),
Array(0, 0, 1, 1, 1, 0)
)
val rbm: RBM = new RBM(train_N, n_visible, n_hidden, rng=rng)
var i: Int = 0
var j: Int = 0
// train
var epoch: Int = 0
for(epoch <- 0 until training_epochs) {
for(i <- 0 until train_N) {
rbm.contrastive_divergence(train_X(i), learning_rate, k)
}
}
// test data
val test_X: Array[Array[Int]] = Array(
Array(1, 1, 0, 0, 0, 0),
Array(0, 0, 0, 1, 1, 0)
)
val reconstructed_X: Array[Array[Double]] = Array.ofDim[Double](test_N, n_visible)
for(i <- 0 until test_N) {
rbm.reconstruct(test_X(i), reconstructed_X(i))
for(j <- 0 until n_visible) {
printf("%.5f ", reconstructed_X(i)(j))
}
println()
}
}
def main(args: Array[String]) {
test_rbm()
}
} | Gunaatita/ScalaDeepLearning | RBM.scala | Scala | gpl-2.0 | 5,669 |
package com.sksamuel.elastic4s.searches.aggs
import com.sksamuel.elastic4s.searches.aggs.pipeline.PipelineAggregationDefinition
import com.sksamuel.elastic4s.searches.queries.QueryDefinition
import com.sksamuel.exts.OptionImplicits._
case class KeyedFiltersAggregationDefinition(name: String,
filters: Iterable[(String, QueryDefinition)],
otherBucket: Option[Boolean] = None,
otherBucketKey: Option[String] = None,
pipelines: Seq[PipelineAggregationDefinition] = Nil,
subaggs: Seq[AggregationDefinition] = Nil,
metadata: Map[String, AnyRef] = Map.empty)
extends AggregationDefinition {
type T = KeyedFiltersAggregationDefinition
def otherBucket(otherBucket: Boolean): T = copy(otherBucket = otherBucket.some)
def otherBucketKey(otherBucketKey: String): T = copy(otherBucketKey = otherBucketKey.some)
override def pipelines(pipelines: Iterable[PipelineAggregationDefinition]): T = copy(pipelines = pipelines.toSeq)
override def subAggregations(aggs: Iterable[AggregationDefinition]): T = copy(subaggs = aggs.toSeq)
override def metadata(map: Map[String, AnyRef]): T = copy(metadata = metadata)
}
| tyth/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/searches/aggs/KeyedFiltersAggregationDefinition.scala | Scala | apache-2.0 | 1,383 |
/*
* Copyright (c) 2014. Regents of the University of California
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.avocado.postprocessing
import org.apache.commons.configuration.SubnodeConfiguration
import org.apache.spark.rdd.RDD
import org.bdgenomics.adam.avro.ADAMGenotype
import org.bdgenomics.adam.models.ADAMVariantContext
import org.bdgenomics.avocado.stats.AvocadoConfigAndStats
private[postprocessing] trait PostprocessingStage {
val stageName: String
def apply(rdd: RDD[ADAMVariantContext],
stats: AvocadoConfigAndStats,
config: SubnodeConfiguration): RDD[ADAMVariantContext]
}
private[postprocessing] trait GenotypeFilter extends Serializable {
/**
* Abstract method that must be implemented. Implements basic filtering on genotypes that
* are inside a single variant context.
*
* @param genotypes Genotypes to filter.
* @return Filtered genotypes.
*/
def filterGenotypes(genotypes: Seq[ADAMGenotype]): Seq[ADAMGenotype]
/**
* Applies filtering and creates a new variant context, if called genotypes still exist.
* If all genotypes have been filtered out, then an empty option (None) is returned.
*
* @param vc Variant context on which to filter.
* @return If not all genotypes have been filtered out, a new variant context, else none.
*/
def createNewVC(vc: ADAMVariantContext): Option[ADAMVariantContext] = {
val filteredGt = filterGenotypes(vc.genotypes)
if (filteredGt.length > 0) {
Some(ADAMVariantContext.buildFromGenotypes(filteredGt))
} else {
None
}
}
/**
* Applies the filtering described above across a full RDD.
*
* @param rdd RDD of variant contexts.
* @return An RDD containing variant contexts after filtering.
*/
def filter(rdd: RDD[ADAMVariantContext]): RDD[ADAMVariantContext] = {
rdd.flatMap(vc => createNewVC(vc))
}
}
| hammerlab/avocado | avocado-core/src/main/scala/org/bdgenomics/avocado/postprocessing/PostprocessingStage.scala | Scala | apache-2.0 | 2,423 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datastax.killrweather
import java.io.{BufferedInputStream, FileInputStream, File => JFile}
import java.util.zip.GZIPInputStream
import scala.util.Try
import akka.japi.Util.immutableSeq
import akka.http.scaladsl.model.{ContentTypes, HttpHeader, RequestEntity}
import com.typesafe.config.ConfigFactory
import Weather.Day
private[killrweather] trait ClientHelper {
import Sources._
private val config = ConfigFactory.load
protected val BasePort = 2550
protected val HttpHost = config.getString("killrweather.http.host")
protected val HttpPort = config.getInt("killrweather.http.port")
protected val DefaultPath = config.getString("killrweather.data.load.path")
protected val DefaultExtension = config.getString("killrweather.data.file.extension")
protected val KafkaHosts = immutableSeq(config.getStringList("kafka.hosts")).toSet
protected val KafkaTopic = config.getString("kafka.topic.raw")
protected val KafkaKey = config.getString("kafka.group.id")
protected val KafkaBatchSendSize = config.getInt("kafka.batch.send.size")
protected val initialData: Set[FileSource] = new JFile(DefaultPath).list.collect {
case name if name.endsWith(DefaultExtension) =>
FileSource(new JFile(s"$DefaultPath/$name".replace("./", "")))
}.toSet
}
private[killrweather] object Sources {
sealed trait HttpSource extends Serializable {
def header: HttpHeader
def entity: RequestEntity
}
object HttpSource {
def unapply[T](headers: Seq[HttpHeader], entity: RequestEntity): Option[HttpSource] =
headers.collectFirst {
case header if fileSource(header) => HeaderSource(header, entity)
case header if entitySource(header, entity) => EntitySource(header, entity)
}
}
case class EntitySource[T](header: HttpHeader, entity: RequestEntity) extends HttpSource {
def extract: Iterator[T] = Iterator.empty // not supported yet
}
case class HeaderSource(header: HttpHeader, entity: RequestEntity, sources: Array[String]) extends HttpSource {
def extract: Iterator[FileSource] = sources.map(new JFile(_)).filter(_.exists).map(FileSource(_)).toIterator
}
object HeaderSource {
def apply(header: HttpHeader, entity: RequestEntity): HeaderSource =
HeaderSource(header, entity, header.value.split(","))
}
case class FileSource(data: Array[String], name: String) {
def days: Seq[Day] = data.map(Day(_)).toSeq
}
object FileSource {
def apply(file: JFile): FileSource = {
val src = file match {
case f if f.getAbsolutePath endsWith ".gz" =>
scala.io.Source.fromInputStream(new GZIPInputStream(new BufferedInputStream(new FileInputStream(file))), "utf-8")
case f =>
scala.io.Source.fromFile(file, "utf-8")
}
val read = src.getLines.toList
Try(src.close())
FileSource(read.toArray, file.getName)
}
}
private def fileSource(h: HttpHeader): Boolean =
h.name == "X-DATA-FEED" && h.value.nonEmpty && h.value.contains(JFile.separator) // more validation..
private def entitySource(h: HttpHeader, e: RequestEntity): Boolean =
h.name == "X-DATA-FEED" && e.contentType == ContentTypes.`application/json` // more validation..
}
| wgpshashank/killrweather | killrweather-clients/src/main/scala/com/datastax/killrweather/ClientHelper.scala | Scala | apache-2.0 | 4,011 |
package com.github.libsml.test
import java.util.StringTokenizer
import scala.collection.mutable.ArrayBuffer
/**
* Created by huangyu on 15/8/31.
*/
object Submit {
def main(args: Array[String]): Unit = {
// args.foreach(println _)
val value = "a,,,"
val st: StringTokenizer = new StringTokenizer(value, ",")
while (st.hasMoreTokens) {
println("s:"+st.nextToken())
// functions += (if (s.trim == "") None else Some(s))
}
}
}
| libsml/libsml | aggregation/src/test/scala/com/github/libsml/test/Submit.scala | Scala | apache-2.0 | 469 |
/*
* Sentilab SARE: a Sentiment Analysis Research Environment
* Copyright (C) 2013 Sabanci University Sentilab
* http://sentilab.sabanciuniv.edu
*
* This file is part of SARE.
*
* SARE is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* SARE is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with SARE. If not, see <http://www.gnu.org/licenses/>.
*/
package edu.sabanciuniv.sentilab.sare.models.aspect.extraction
import scala.collection.JavaConversions._
import org.apache.commons.lang3.Validate._
import edu.sabanciuniv.sentilab.sare.models.base.document._
import edu.sabanciuniv.sentilab.sare.models.base.documentStore._
import edu.sabanciuniv.sentilab.core.models.UserInaccessibleModel
import edu.sabanciuniv.sentilab.sare.models.aspect.AspectExpression
import edu.sabanciuniv.sentilab.utils.CannedMessages
/**
* @author Mus'ab Husaini
*/
class AspectExprExtrDocument(
baseDocument: FullTextDocument = null,
store: AspectExprExtrDocumentStore = null
)
extends ShadowFullTextDocument(baseDocument)
with UserInaccessibleModel {
setStore(store)
private def initialize = {
(Option(getBaseDocument), Option(getStore)) match {
case (Some(base: FullTextDocument), Some(store: AspectExprExtrDocumentStore)) => {
base.getParsedContent.getSentences filter { _.getTokens.size < 50 } foreach { sentence =>
sentence.getTokens filter { token =>
!getExtractorStore.stopWords.exists { _ equalsIgnoreCase token.getWord } &&
token.getWord.length > 2 &&
token.getPosTag.isNoun &&
token.getWord.matches("[a-zA-Z\\\\'\\\\-]+")
} foreach { token =>
token.setIsLemmatized(true)
val exp = store.getCandidateExpressions find {
_ equals token
} getOrElse new ContextualizedAspectExpression(token, getExtractorStore)
exp addSentence sentence
}
}
}
case _ => ()
}
this
}
override def setStore(store: PersistentDocumentStore) = if (store != getStore) {
super.setStore(store)
initialize
} else this
override def setBaseDocument(base: PersistentDocument) = if (base != getBaseDocument) {
super.setBaseDocument(base)
initialize
} else this
def getExtractorStore = getStore match {
case store: AspectExprExtrDocumentStore => store
case _ => null
}
override def getAccessible = getFullTextDocument
} | musabhusaini/sare | sare-lib/modules/sare-alex/src/main/scala/edu/sabanciuniv/sentilab/sare/models/aspect/extraction/AspectExprExtrDocument.scala | Scala | gpl-3.0 | 2,844 |
/*
* Code Pulse: A real-time code coverage testing tool. For more information
* see http://code-pulse.com
*
* Copyright (C) 2014 Applied Visions - http://securedecisions.avi.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import sbt._
object PluginDef extends Build {
// see http://stackoverflow.com/questions/8568821/in-sbt-how-do-you-add-a-plugin-thats-in-the-local-filesystem
lazy override val projects = Seq(root)
lazy val root = Project("plugins", file(".")).dependsOn(betterzipPlugin).settings(
sbt.Keys.libraryDependencies += "org.apache.commons" % "commons-compress" % "1.6",
sbt.Keys.libraryDependencies += "commons-io" % "commons-io" % "2.1"
)
lazy val betterzipPlugin = RootProject(file("sbt-betterzip"))
} | secdec/codepulse | project/project/Build.scala | Scala | apache-2.0 | 1,250 |
/**
* Created by Irina on 8/13/14.
*/
package org.scalatrain
import org.specs2.ScalaCheck
import org.scalacheck.{Prop, Gen}
import Prop.forAll
import Gen.choose
import org.specs2.mutable._
class TimeSpec extends Specification with ScalaCheck {
"Calling fromMinutes" should {
"throw an IllegalArgumentException for negative minutes" in {
forAll(choose(Int.MinValue, -1)) { (minutes: Int) =>
Time fromMinutes minutes must throwA[IllegalArgumentException]
}
}
"return a correctly initialized Time instance for minutes within [0, 24*60-1]" in {
forAll(choose(0, 24 * 60 - 1)) { (minutes: Int) =>
val time = Time fromMinutes minutes
time.hours mustEqual minutes / 60
time.minutes mustEqual minutes % 60
}
}
}
"Creating Time" should {
"throw an IllegalArgumentException for negative hours" in {
forAll(choose(Int.MinValue, -1)) { (hours: Int) =>
new Time(hours, 0) must throwA[IllegalArgumentException]
}
}
"throw an IllegalArgumentException for hours >= 24" in {
forAll(choose(24, Int.MaxValue)) { (hours: Int) =>
new Time(hours, 0) must throwA[IllegalArgumentException]
}
}
"throw an IllegalArgumentException for negative minutes" in {
forAll(choose(Int.MinValue, -1)) { (minutes: Int) =>
new Time(0, minutes) must throwA[IllegalArgumentException]
}
}
"throw an IllegalArgumentException for minutes >= 60" in {
forAll(choose(60, Int.MaxValue)) { (minutes: Int) =>
new Time(0, minutes) must throwA[IllegalArgumentException]
}
}
"return an instance with correct defaults" in {
val time = new Time
time.hours mustEqual 0
time.minutes mustEqual 0
}
}
"Calling - " should {
val time1 = new Time(2, 20)
val time2 = new Time(1, 10)
"throw an IllegalArgumentException for null that" in {
time1 - null must throwA[IllegalArgumentException]
}
"return the corect time difference" in {
time1 - time2 mustEqual 70
time2 - time1 mustEqual -70
}
}
"Calling asMinutes" should {
"return the correct value" in {
new Time(0, 10).asMinutes mustEqual 10
new Time(1, 10).asMinutes mustEqual 70
}
}
}
| onoprodum/myScalatrain | src/test/scala/TimeSpec.scala | Scala | gpl-2.0 | 2,269 |
package varys.framework.master
import akka.actor.ActorRef
import java.util.Date
import scala.collection.mutable.{HashSet}
private[varys] class ClientInfo(
val startTime: Long,
val id: String,
val host: String,
val commPort: Int,
val submitDate: Date,
val actor: ActorRef) {
var endTime = -1L
var coflows = new HashSet[CoflowInfo]
def markFinished() {
endTime = System.currentTimeMillis()
}
def duration: Long = {
if (endTime != -1) {
endTime - startTime
} else {
System.currentTimeMillis() - startTime
}
}
def addCoflow(coflow: CoflowInfo) {
coflows += coflow
}
val user = System.getProperty("user.name", "<unknown>")
override def toString: String = "ClientInfo(" + id + "[" + host + ":" + commPort + "])"
}
| frankfzw/varys | core/src/main/scala/varys/framework/master/ClientInfo.scala | Scala | apache-2.0 | 807 |
package org.thp.cortex.models
import java.util.Date
import javax.inject.{ Inject, Singleton }
import scala.collection.immutable
import play.api.libs.json.JsObject
import play.api.{ Configuration, Logger }
import org.elastic4play.models.{ Attribute, AttributeDef, AttributeFormat, BaseModelDef, EntityDef, EnumerationAttributeFormat, ListEnumerationAttributeFormat, ModelDef, MultiAttributeFormat, ObjectAttributeFormat, OptionalAttributeFormat, StringAttributeFormat, AttributeOption ⇒ O }
import org.elastic4play.services.AuditableAction
import org.elastic4play.services.JsonFormat.auditableActionFormat
trait AuditedModel { self: BaseModelDef ⇒
def attributes: Seq[Attribute[_]]
lazy val auditedAttributes: Map[String, Attribute[_]] =
attributes
.collect { case a if !a.isUnaudited ⇒ a.attributeName → a }
.toMap
def selectAuditedAttributes(attrs: JsObject) = JsObject {
attrs.fields.flatMap {
case (attrName, value) ⇒
val attrNames = attrName.split("\\\\.").toSeq
auditedAttributes.get(attrNames.head).map { _ ⇒
val reverseNames = attrNames.reverse
reverseNames.drop(1).foldLeft(reverseNames.head → value)((jsTuple, name) ⇒ name → JsObject(Seq(jsTuple)))
}
}
}
}
trait AuditAttributes { _: AttributeDef ⇒
def detailsAttributes: Seq[Attribute[_]]
val operation: A[AuditableAction.Value] = attribute("operation", AttributeFormat.enumFmt(AuditableAction), "Operation", O.readonly)
val details: A[JsObject] = attribute("details", AttributeFormat.objectFmt(detailsAttributes), "Details", JsObject.empty, O.readonly)
val otherDetails: A[Option[String]] = optionalAttribute("otherDetails", AttributeFormat.textFmt, "Other details", O.readonly)
val objectType: A[String] = attribute("objectType", AttributeFormat.stringFmt, "Table affected by the operation", O.readonly)
val objectId: A[String] = attribute("objectId", AttributeFormat.stringFmt, "Object targeted by the operation", O.readonly)
val base: A[Boolean] = attribute("base", AttributeFormat.booleanFmt, "Indicates if this operation is the first done for a http query", O.readonly)
val startDate: A[Date] = attribute("startDate", AttributeFormat.dateFmt, "Date and time of the operation", new Date, O.readonly)
val rootId: A[String] = attribute("rootId", AttributeFormat.stringFmt, "Root element id (routing id)", O.readonly)
val requestId: A[String] = attribute("requestId", AttributeFormat.stringFmt, "Id of the request that do the operation", O.readonly)
}
@Singleton
class AuditModel(
auditName: String,
auditedModels: immutable.Set[AuditedModel]) extends ModelDef[AuditModel, Audit](auditName, "Audit", "/audit") with AuditAttributes {
@Inject() def this(
configuration: Configuration,
auditedModels: immutable.Set[AuditedModel]) =
this(
configuration.get[String]("audit.name"),
auditedModels)
private[AuditModel] lazy val logger = Logger(getClass)
def mergeAttributeFormat(context: String, format1: AttributeFormat[_], format2: AttributeFormat[_]): Option[AttributeFormat[_]] = {
(format1, format2) match {
case (OptionalAttributeFormat(f1), f2) ⇒ mergeAttributeFormat(context, f1, f2)
case (f1, OptionalAttributeFormat(f2)) ⇒ mergeAttributeFormat(context, f1, f2)
case (MultiAttributeFormat(f1), MultiAttributeFormat(f2)) ⇒ mergeAttributeFormat(context, f1, f2).map(MultiAttributeFormat(_))
case (f1, EnumerationAttributeFormat(_) | ListEnumerationAttributeFormat(_)) ⇒ mergeAttributeFormat(context, f1, StringAttributeFormat)
case (EnumerationAttributeFormat(_) | ListEnumerationAttributeFormat(_), f2) ⇒ mergeAttributeFormat(context, StringAttributeFormat, f2)
case (ObjectAttributeFormat(subAttributes1), ObjectAttributeFormat(subAttributes2)) ⇒ mergeAttributes(context, subAttributes1 ++ subAttributes2)
case (f1, f2) if f1 == f2 ⇒ Some(f1)
case (f1, f2) ⇒
logger.warn(s"Attribute $f1 != $f2")
None
}
}
def mergeAttributes(context: String, attributes: Seq[Attribute[_]]): Option[ObjectAttributeFormat] = {
val mergeAttributes: Iterable[Option[Attribute[_]]] = attributes
.groupBy(_.attributeName)
.map {
case (_name, _attributes) ⇒
_attributes
.map(a ⇒ Some(a.format))
.reduce[Option[AttributeFormat[_]]] {
case (Some(f1), Some(f2)) ⇒ mergeAttributeFormat(context + "." + _name, f1, f2)
case _ ⇒ None
}
.map {
case oaf: OptionalAttributeFormat[_] ⇒ oaf: AttributeFormat[_]
case maf: MultiAttributeFormat[_] ⇒ maf: AttributeFormat[_]
case f ⇒ OptionalAttributeFormat(f): AttributeFormat[_]
}
.map(format ⇒ Attribute("audit", _name, format, Nil, None, ""))
.orElse {
logger.error(s"Mapping is not consistent on attribute $context:\\n${_attributes.map(a ⇒ a.modelName + "/" + a.attributeName + ": " + a.format.name).mkString("\\n")}")
None
}
}
if (mergeAttributes.exists(_.isEmpty))
None
else
Some(ObjectAttributeFormat(mergeAttributes.flatten.toSeq))
}
def detailsAttributes: Seq[Attribute[_]] = {
mergeAttributes("audit", auditedModels
.flatMap(_.attributes)
.filter(a ⇒ a.isModel && !a.isUnaudited)
.toSeq)
.map(_.subAttributes)
.getOrElse(Nil)
}
override def apply(attributes: JsObject): Audit = new Audit(this, attributes)
}
class Audit(model: AuditModel, attributes: JsObject) extends EntityDef[AuditModel, Audit](model, attributes) with AuditAttributes {
def detailsAttributes = Nil
} | CERT-BDF/Cortex | app/org/thp/cortex/models/Audit.scala | Scala | agpl-3.0 | 5,980 |
package elegans
import Cells._
import Model._
import Schedules._
import scala.collection.mutable.ListBuffer
import scala.swing._
import scala.swing.event._
import java.awt.image.BufferedImage
import java.io.File
import javax.imageio.ImageIO
object Visualization {
class Visualizer {
val images = new ListBuffer[BufferedImage]()
val macrosteps = new ListBuffer[MacroStep]()
val microsteps = new ListBuffer[MicroStep]()
def microSnapshot(cells: List[Cell], schedule: MicroStep) {
snapshot(cells, Left(schedule))
}
def macroSnapshot(cells: List[Cell], schedule: MacroStep) {
snapshot(cells, Right(schedule))
}
private def snapshot(cells: List[Cell], schedule: Either[MicroStep, MacroStep]) {
import Graphs._
val graph = new DotConverter(cells)
val dotPath = "graph.dot"
val imagePath = "graph.png"
graph.writeFile("graph.dot")
import scala.sys.process._
("dot -Tpng -o " + imagePath + " " + dotPath).!!
if (Settings.resizeImages) {
val size = java.awt.Toolkit.getDefaultToolkit().getScreenSize()
("convert -resize " + (size.getWidth().toDouble * Settings.resizeRatio) + "x" + (size.getHeight() * 4 / 5) + " " + imagePath + " " + imagePath).!!
}
images append ImageIO.read(new File(imagePath))
schedule match {
case Left(micro) => microsteps append micro
case Right(macro) => macrosteps append macro
}
("rm -f " + dotPath + " " + imagePath).!!
}
def show() {
val history: Either[FineSchedule, CoarseSchedule] = if (macrosteps.isEmpty)
Left(microsteps.toList) else Right(macrosteps.toList)
new Interface(images.toList, history).main(Array())
}
}
class ImagePanel extends Panel {
private var _bufferedImage: BufferedImage = null
def bufferedImage = _bufferedImage
def bufferedImage_=(buf: BufferedImage) {
_bufferedImage = buf
}
override def paintComponent(g: Graphics2D) = {
if (null != bufferedImage) g.drawImage(bufferedImage, 0, 0, null)
}
}
def labelText(schedule: Either[FineSchedule, CoarseSchedule],
highlightIndex: Int): String = {
val listOfEithers: List[Either[MicroStep, MacroStep]] = schedule match {
case Left(fineSch) => fineSch map (Left(_))
case Right(coarseSch) => coarseSch map (Right(_))
}
"<html>" +
(for (i <- 0 until listOfEithers.size) yield {
val str = listOfEithers(i) match {
case Left(microstep) => {
val mapped = microstep.map {
case false => "0"
case true => "1"
}
mapped.mkString("[","","]")
}
case Right(macrostep) => {
val mapped = macrostep.map {
case Disabled => "-"
case EnabledLeft => "<"
case EnabledRight => ">"
}
mapped.mkString("["," ","]")
}
}
val color = if (i == highlightIndex) "red" else "black"
"<font color=" + color + ">" + str + "</font>"
}).toList.mkString("<br/>") +
"<html/>"
}
class Interface(images: List[BufferedImage],
schedule: Either[FineSchedule, CoarseSchedule]) extends SimpleSwingApplication {
def top = new MainFrame {
title = "Visualizer"
val slider = new Slider {
min = 0
max = images.size - 1
value = 0
snapToTicks = true
// paintLabels = true
// labels = ((1 to images.size) map (i => (i, new Label(i.toString)))).toMap
}
val imagePanel = new ImagePanel {
bufferedImage = images.head
val (w, h) = (bufferedImage.getWidth(), bufferedImage.getHeight())
minimumSize = new Dimension(w, h)
maximumSize = new Dimension(w, h)
preferredSize = new Dimension(w, h)
}
val scheduleLabel = new Label(labelText(schedule, 0)) {
minimumSize = new Dimension(200, 800)
maximumSize = new Dimension(200, 800)
preferredSize = new Dimension(200, 800)
}
val schedulePanel = new BorderPanel {
layout(scheduleLabel) = BorderPanel.Position.Center
minimumSize = new Dimension(300, 800)
maximumSize = new Dimension(300, 800)
preferredSize = new Dimension(300, 800)
}
contents = new BorderPanel {
layout(slider) = BorderPanel.Position.North
layout(imagePanel) = BorderPanel.Position.Center
layout(schedulePanel) = BorderPanel.Position.East
listenTo(slider)
reactions += {
case vc: ValueChanged =>
val newIndex = vc.source.asInstanceOf[Slider].value
imagePanel.bufferedImage = images(newIndex)
scheduleLabel.text = labelText(schedule, newIndex)
repaint()
}
}
val dim = new Dimension(imagePanel.size.width + schedulePanel.size.width + 4, imagePanel.size.height + slider.size.height + 20)
minimumSize = dim
maximumSize = dim
preferredSize = dim
override def closeOperation() {
sys.exit(0)
}
}
}
}
| koksal/elegans-popl2013-code | src/main/scala/elegans/Visualizer.scala | Scala | bsd-2-clause | 5,152 |
package nodescala
import scala.language.postfixOps
import scala.util.{Try, Success, Failure}
import scala.collection._
import scala.concurrent._
import ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.async.Async.{async, await}
import org.scalatest._
import NodeScala._
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class NodeScalaSuite extends FunSuite {
test("A Future should always be completed") {
val always = Future.always(517)
assert(Await.result(always, 0 nanos) == 517)
}
test("A Future should never be completed") {
val never = Future.never[Int]
try {
Await.result(never, 1 second)
assert(false)
} catch {
case t: TimeoutException => // ok!
}
}
test("any should be success when one future is a success") {
val always = Future.always(3)
val never = Future.never[Int]
val any = Future.any(List(always, never))
assert(Await.result(any, 1 second) == 3)
}
test("any should be a failure if all futures fail") {
val never = Future.never[Int]
val nuver = Future.never[Int]
val niver = Future.never[Int]
val any = Future.any(List(never, nuver, niver))
try {
Await.result(any, 1 second)
assert(false)
} catch {
case t: TimeoutException => // ok!
}
}
test("any should complete with a failure too") {
val failure : Future[Int] = Future { throw new Exception }
val never = Future.never[Int]
val any = Future.any(List(failure, never))
try {
Await.result(any, 1 second)
assert(false)
} catch {
case e: Exception => // ok!
}
}
// test("A Future should not complete after 1s when using a delay of 3s") {
// val a3secs = Future.delay(Duration(3, SECONDS))
// assert(! a3secs.isCompleted)
// Thread.sleep(1000)
// assert(! a3secs.isCompleted)
// Thread.sleep(2050)
// assert(a3secs.isCompleted)
// }
class DummyExchange(val request: Request) extends Exchange {
@volatile var response = ""
val loaded = Promise[String]()
def write(s: String) {
response += s
}
def close() {
loaded.success(response)
}
}
class DummyListener(val port: Int, val relativePath: String) extends NodeScala.Listener {
self =>
@volatile private var started = false
var handler: Exchange => Unit = null
def createContext(h: Exchange => Unit) = this.synchronized {
assert(started, "is server started?")
handler = h
}
def removeContext() = this.synchronized {
assert(started, "is server started?")
handler = null
}
def start() = self.synchronized {
started = true
new Subscription {
def unsubscribe() = self.synchronized {
started = false
}
}
}
def emit(req: Request) = {
val exchange = new DummyExchange(req)
if (handler != null) handler(exchange)
exchange
}
}
class DummyServer(val port: Int) extends NodeScala {
self =>
val listeners = mutable.Map[String, DummyListener]()
def createListener(relativePath: String) = {
val l = new DummyListener(port, relativePath)
listeners(relativePath) = l
l
}
def emit(relativePath: String, req: Request) = this.synchronized {
val l = listeners(relativePath)
l.emit(req)
}
}
test("Server should serve requests") {
val dummy = new DummyServer(8191)
val dummySubscription = dummy.start("/testDir") {
request => for (kv <- request.iterator) yield (kv + "\\n").toString
}
// wait until server is really installed
Thread.sleep(500)
def test(req: Request) {
val webpage = dummy.emit("/testDir", req)
val content = Await.result(webpage.loaded.future, 1 second)
val expected = (for (kv <- req.iterator) yield (kv + "\\n").toString).mkString
assert(content == expected, s"'$content' vs. '$expected'")
}
test(immutable.Map("StrangeRequest" -> List("Does it work?")))
test(immutable.Map("StrangeRequest" -> List("It works!")))
test(immutable.Map("WorksForThree" -> List("Always works. Trust me.")))
dummySubscription.unsubscribe()
}
}
| rranelli/rrreacprog | nodescala/src/test/scala/nodescala/tests.scala | Scala | unlicense | 4,243 |
package controllers
import play.api.data.Form
import lila.api.Context
import lila.app._
import lila.common.{ Captcha, LilaCookie, HTTPRequest }
import lila.i18n.{ Translation, TransInfo }
import views._
object I18n extends LilaController {
private def env = Env.i18n
def select = OpenBody { implicit ctx =>
import play.api.data.Forms._
import play.api.data._
implicit val req = ctx.body
Form(single("lang" -> text.verifying(env.pool contains _))).bindFromRequest.fold(
_ => notFound,
lang => (ctx.me ?? { me => lila.user.UserRepo.setLang(me.id, lang) }) inject Redirect {
s"${Env.api.Net.Protocol}${lang}.${Env.api.Net.Domain}" + {
HTTPRequest.referer(ctx.req).fold(routes.Lobby.home.url) { str =>
try {
new java.net.URL(str).getPath
}
catch {
case e: java.net.MalformedURLException => routes.Lobby.home.url
}
}
}
}
)
}
def contribute = Open { implicit ctx =>
val mines = (ctx.req.acceptLanguages map env.transInfos.get).toList.flatten.distinct
Ok(html.i18n.contribute(env.transInfos.all, mines)).fuccess
}
def translationForm(lang: String) = Auth { implicit ctx =>
me =>
OptionFuOk(infoAndContext(lang)) {
case (info, context) => env.forms.translationWithCaptcha map {
case (form, captcha) => renderTranslationForm(form, info, captcha, context = context)
}
}
}
def translationPost(lang: String) = AuthBody { implicit ctx =>
me =>
OptionFuResult(infoAndContext(lang)) {
case (info, context) =>
implicit val req = ctx.body
val data = env.forms.decodeTranslationBody
FormFuResult(env.forms.translation) { form =>
env.forms.anyCaptcha map { captcha =>
renderTranslationForm(form, info, captcha, data = data, context = context)
}
} { metadata =>
env.forms.process(lang, metadata, data, me.username) inject {
Redirect(routes.I18n.contribute).flashing("success" -> "1") withCookies
LilaCookie.cookie(env.hideCallsCookieName, "1", maxAge = Some(60 * 24))
}
}
}
}
private def infoAndContext(lang: String) = env.transInfos.get(lang) ?? { i =>
env.context.get map (i -> _) map (_.some)
}
private def renderTranslationForm(
form: Form[_],
info: TransInfo,
captcha: Captcha,
context: Map[String, String],
data: Map[String, String] = Map.empty)(implicit ctx: Context) =
html.i18n.translationForm(
info,
form,
env.keys,
env.pool.default,
env.translator.rawTranslation(info.lang) _,
captcha,
data = data,
context = context)
def fetch(from: Int) = Open { implicit ctx =>
JsonOk(env jsonFromVersion from)
}
def hideCalls = Open { implicit ctx =>
implicit val req = ctx.req
val cookie = LilaCookie.cookie(
env.hideCallsCookieName,
"1",
maxAge = env.hideCallsCookieMaxAge.some)
fuccess(Redirect(routes.Lobby.home()) withCookies cookie)
}
}
| Enigmahack/lila | app/controllers/I18n.scala | Scala | mit | 3,141 |
package security.models.services
import java.util.UUID
import javax.inject.Inject
import com.mohiva.play.silhouette.api.LoginInfo
import com.mohiva.play.silhouette.impl.providers.CommonSocialProfile
import security.models.User
import security.models.daos.UserDAO
import scala.concurrent.{ExecutionContext, Future}
/**
* Handles actions to users.
*
* @param userDAO The user DAO implementation.
*/
class UserServiceImpl @Inject() (val userDAO: UserDAO, implicit val ec: ExecutionContext) extends UserService {
/**
* Retrieves a user that matches the specified login info.
*
* @param loginInfo The login info to retrieve a user.
* @return The retrieved user or None if no user could be retrieved for the given login info.
*/
def retrieve(loginInfo: LoginInfo): Future[Option[User]] = userDAO.find(loginInfo)
/**
* Saves a user.
*
* @param user The user to save.
* @return The saved user.
*/
def save(user: User) = userDAO.save(user)
/**
* Saves the social profile for a user.
*
* If a user exists for this profile then update the user, otherwise create a new user with the given profile.
*
* @param profile The social profile to save.
* @return The user for whom the profile was saved.
*/
def save(profile: CommonSocialProfile) = {
userDAO.find(profile.loginInfo).flatMap {
case Some(user) => // Update user with profile
userDAO.save(user.copy(
firstName = profile.firstName,
lastName = profile.lastName,
fullName = profile.fullName,
email = profile.email,
avatarURL = profile.avatarURL
))
case None => // Insert a new user
userDAO.save(User(
userID = UUID.randomUUID(),
loginInfo = profile.loginInfo,
firstName = profile.firstName,
lastName = profile.lastName,
fullName = profile.fullName,
email = profile.email,
avatarURL = profile.avatarURL
))
}
}
}
| unclealex72/west-ham-calendar | app/security/models/services/UserServiceImpl.scala | Scala | apache-2.0 | 1,999 |
import com.typesafe.sbt.pgp.PgpKeys._
import sbt._
import sbtrelease.ReleasePlugin.autoImport._
import sbtrelease.ReleaseStateTransformations._
object common {
lazy val releaseSignedArtifactsSettings = Seq(
releaseProcess ~= {
s: Seq[ReleaseStep] =>
lazy val publishArtifactsAction = { st: State =>
val extracted = Project.extract(st)
val ref = extracted.get(Keys.thisProjectRef)
extracted.runAggregated(publishSigned in Global in ref, st)
}
s map {
case `publishArtifacts` => publishArtifacts.copy(action = publishArtifactsAction)
case step => step
} map {
_.copy(enableCrossBuild = false)
}
}
)
def bfgProject(name: String) = Project(name, file(name)) settings releaseSignedArtifactsSettings
}
| javabrett/bfg-repo-cleaner | project/common.scala | Scala | gpl-3.0 | 819 |
/*
* Copyright 2014 DataGenerator Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.finra.datagenerator
import java.util
import java.util.concurrent.atomic.AtomicBoolean
import org.finra.datagenerator.engine.Frontier
/**
* Frontier implementation for generating data with model
*
* Created by Brijesh on 5/28/2015.
*/
class RandomNumberFrontier extends Frontier with java.io.Serializable {
/**
* Iterate for loop from 1 to Number in each frontier
* Generate random number string and add them to Map
* Add Map to Queue
*
* @param randomNumberQueue Random Number Queue
* @param flag Atomic boolean flag
*/
override def searchForScenarios(randomNumberQueue:util.Queue[util.Map[String, String]], flag: AtomicBoolean) : Unit = {
this.synchronized {
for (i <- 1 to RandomNumberEngine.numberInEachFrontier) {
//Generate Random Number and add it to Map
val randomNumber = scala.util.Random
val value = randomNumber.nextInt(100000).toString
val key = "Key for Random Number"
val randomNumberMap = new util.HashMap[String, String]()
randomNumberMap.put(key, value)
//Add randomNumberMap to randomNumberQueue
randomNumberQueue.add(randomNumberMap)
}
}
}
}
| wnilkamal/DataGenerator | dg-spark/src/main/code/org/finra/datagenerator/RandomNumberFrontier.scala | Scala | apache-2.0 | 1,812 |
def destroy(x: M): Unit = () | hmemcpy/milewski-ctfp-pdf | src/content/3.7/code/scala/snippet26.scala | Scala | gpl-3.0 | 28 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package connectors
import javax.inject.{Inject, Singleton}
import play.api.Logger
import play.api.libs.json.JsObject
import uk.gov.hmrc.http.{HeaderCarrier, HttpClient, HttpResponse}
import uk.gov.hmrc.play.bootstrap.config.ServicesConfig
import uk.gov.hmrc.http.HttpReads.Implicits.readRaw
import scala.concurrent.{ExecutionContext, Future}
@Singleton
class ICLConnector @Inject()(val http: HttpClient, config: ServicesConfig)
(implicit ec: ExecutionContext) {
val baseUri: String = config.getConfString("industry-classification-lookup-frontend.uri",
throw new RuntimeException("[ICLConnector] Could not retrieve config for 'industry-classification-lookup-frontend.uri'"))
val iclFEurl: String = config.baseUrl("industry-classification-lookup-frontend") + baseUri
val IClFEinternal: String = config.baseUrl("industry-classification-lookup-frontend-internal")
val initialiseJourney: String = config.getConfString("industry-classification-lookup-frontend.initialise-journey", throw new RuntimeException("[ICLConnector] Could not retrieve config for 'industry-classification-lookup-frontend.initialise-journey'"))
val IClInitialiseUrl: String = iclFEurl + initialiseJourney
def iclSetup(js: JsObject)(implicit hc: HeaderCarrier): Future[JsObject] = {
http.POST[JsObject, HttpResponse](IClInitialiseUrl, js)
.map(_.json.as[JsObject])
.recover {
case ex =>
logger.error(s"[ICLConnector] [ICLSetup] Threw an exception whilst Posting to initialise a new ICL journey with message: ${ex.getMessage}")
throw ex
}
}
def iclGetResult(fetchResultsUrl: String)(implicit hc: HeaderCarrier): Future[JsObject] = {
http.GET[HttpResponse](IClFEinternal + fetchResultsUrl)
.map(_.json.as[JsObject])
.recover {
case ex =>
logger.error(s"[ICLConnector] [ICLGetResult] Threw an exception while getting ICL journey results with message: ${ex.getMessage}")
throw ex
}
}
} | hmrc/vat-registration-frontend | app/connectors/ICLConnector.scala | Scala | apache-2.0 | 2,609 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.codegen
import org.apache.flink.table.api.TableConfig
import org.apache.flink.table.data.{GenericRowData, RowData}
import org.apache.flink.table.runtime.operators.values.ValuesInputFormat
import org.apache.flink.table.runtime.typeutils.InternalTypeInfo
import org.apache.flink.table.types.logical.RowType
import org.apache.calcite.rex.RexLiteral
import java.util
import scala.collection.JavaConversions._
object ValuesCodeGenerator {
def generatorInputFormat(
tableConfig: TableConfig,
outputType: RowType,
tuples: util.List[util.List[RexLiteral]],
description: String): ValuesInputFormat = {
val ctx = CodeGeneratorContext(tableConfig)
val exprGenerator = new ExprCodeGenerator(ctx, false)
// generate code for every record
val generatedRecords = tuples.map { r =>
exprGenerator.generateResultExpression(
r.map(exprGenerator.generateExpression), outputType, classOf[GenericRowData])
}
// generate input format
val generatedFunction = InputFormatCodeGenerator.generateValuesInputFormat[RowData](
ctx,
description,
generatedRecords.map(_.code),
outputType)
new ValuesInputFormat(generatedFunction, InternalTypeInfo.of(outputType))
}
}
| apache/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/ValuesCodeGenerator.scala | Scala | apache-2.0 | 2,079 |
package poly.collection.cache
import poly.collection._
import poly.collection.mut._
import poly.collection.typeclass._
/**
* An LRU cache of a function.
* @author Tongfei Chen
* @since 0.1.0
*/
class LRUCache[K: Hash, +R] private(f: K => R, n: Int) extends CachedFunction[K, R] {
private[this] val c = LinkedHashMap[K, R]()
def cache: Map[K, R] = c
def apply(a: K) = {
if (c containsKey a) { // cache hit
val e = c.data.locate(a)
e.prev.next = e.next // moves this entry to the MRU location
e.next.prev = e.prev
c.dummy.prev.next = e
e.prev = c.dummy.prev
e.next = c.dummy
c.dummy.prev = e
e.value
}
else { // cache miss
if (c.size >= n)
c.remove_!(c.dummy.next.key) // evict the LRU (least recently used) element
val b = f(a) // heavyweight computation
c.add_!(a, b) // caches the computation result
b
}
}
def clearCache_!() = c.clear_!()
}
object LRUCache {
/**
* Returns a function with an LRU cache. This is useful for wrapping a
* high-cost function (e.g. reading from files).
* @param n Capacity of this cache
*/
def apply[K: Hash, R](n: Int)(f: K => R) = new LRUCache(f, n)
/**
* Creates an LRU cache of a function using the default `hashCode` method on inputs
* as the hashing function for the keys.
*/
def byDefaultHashing[K, R](n: Int)(f: K => R) = new LRUCache(f, n)(Hash.default[K])
/**
* Creates an LRU cache of a function using the reference (pointer) of the keys for hashing.
*/
def byRefHashing[K <: AnyRef, R](n: Int)(f: K => R) = new LRUCache(f, n)(Hash.byRef[K])
}
| ctongfei/poly-collection | core/src/main/scala/poly/collection/cache/LRUCache.scala | Scala | mit | 1,645 |
package pages.theme
import util.ImplicitHelpers._
import net.liftweb.http.js.{JsCmds, JsCmd}
import net.liftweb.http.js.JsCmds.Run
import net.liftweb.util.Helpers._
import scala.xml.NodeSeq
trait SToastr {
case class Toastr private[theme](
protected val clas: String,
protected val title: String = "",
protected val mesg: String = "",
protected val pos: Option[String] = None,
protected val showEasing: Option[String] = None,
protected val hideEasing: Option[String] = None,
protected val showMethod: Option[String] = None,
protected val hideMethod: Option[String] = None,
protected val _newestOnTop: Option[Boolean] = None,
protected val _preventDuplicates: Option[Boolean] = None,
protected val _closeBtn: Option[Boolean] = None,
protected val _progressBar: Option[Boolean] = None,
protected val duration: Option[Long] = None,
protected val hideDuration: Option[Long] = None,
protected val timeout: Option[Long] = None,
protected val extendedTimeout: Option[Long] = None
) {
def withTitle(title: String): Toastr = copy(title = title)
def withMesg(s: String): Toastr = copy(mesg = s)
def withPosTopRight: Toastr = copy(pos = Some("toast-top-right"))
def withPosBottomRight: Toastr = copy(pos = Some("toast-bottom-right"))
def withPosBottomLeft: Toastr = copy(pos = Some("toast-bottom-left"))
def withPosTopLeft: Toastr = copy(pos = Some("toast-top-left"))
def withPosTopFullWidth: Toastr = copy(pos = Some("toast-top-full-width"))
def withPosBottomFullWidth: Toastr = copy(pos = Some("toast-bottom-full-width"))
def withPosTopCenter: Toastr = copy(pos = Some("toast-top-center"))
def withPosBottomCenter: Toastr = copy(pos = Some("toast-bottom-center"))
def closeBtn(v: Boolean): Toastr = copy(_closeBtn = Some(v))
def progressBar(v: Boolean): Toastr = copy(_progressBar = Some(v))
def newestOnTop(v: Boolean): Toastr = copy(_newestOnTop = Some(v))
def preventDuplicates(v: Boolean): Toastr = copy(_preventDuplicates = Some(v))
def withDuration(v: Long): Toastr = copy(duration = Some(v))
def withTimeout(v: Long): Toastr = copy(timeout = Some(v))
def withHideDuration(v: Long): Toastr = copy(hideDuration = Some(v))
def withExtendedTimeout(v: Long): Toastr = copy(extendedTimeout = Some(v))
def showEasingLinear: Toastr = copy(showEasing = Some("linear"))
def showEasingSwing: Toastr = copy(showEasing = Some("swing"))
def hideEasingLinear: Toastr = copy(hideEasing = Some("linear"))
def hideEasingSwing: Toastr = copy(hideEasing = Some("swing"))
def showMethodFadeIn: Toastr = copy(showMethod = Some("fadeIn"))
def showMethodSlideDown: Toastr = copy(showMethod = Some("slideDown"))
def showMethodShow: Toastr = copy(showMethod = Some("show"))
def hideMethodFadeOut: Toastr = copy(hideMethod = Some("fadeOut"))
def hideMethodSlideUp: Toastr = copy(hideMethod = Some("slideUp"))
def hideMethodShow: Toastr = copy(hideMethod = Some("hide"))
def show(): JsCmd = {
Run({
val options = List(
_newestOnTop.map(v => "newestOnTop: " + v)
, _closeBtn.map(v => "closeButton: " + v)
, _progressBar.map(v => "progressBar: " + v)
, pos.map(v => "positionClass: " + v.toString.encJs)
, _preventDuplicates.map(v => "preventDuplicates: " + v)
, duration.map(v => "showDuration: " + v)
, hideDuration.map(v => "hideDuration: " + v)
, timeout.map(v => "timeOut: " + v)
, extendedTimeout.map(v => "extendedTimeOut: " + v)
, showEasing.map(v => "showEasing: " + v.toString.encJs)
, hideEasing.map(v => "hideEasing: " + v.toString.encJs)
, showMethod.map(v => "showMethod: " + v.toString.encJs)
, hideMethod.map(v => "hideMethod: " + v.toString.encJs)
).flatten.reduceOption(_ + ",\\n" + _).getOrElse("")
s"""toastr.options = {$options}; toastr[${clas.encJs}](${mesg.encJs},${title.encJs});"""
})
}
}
object Toastr {
def Info = Toastr("info")
def Info(title: String, mesg: String) = Toastr("info", title, mesg)
def Warning = Toastr("warning")
def Warning(title: String, mesg: String) = Toastr("warning", title, mesg)
def Success = Toastr("success")
def Success(title: String, mesg: String) = Toastr("success", title, mesg)
def Error = Toastr("error")
def Error(title: String, mesg: String) = Toastr("error", title, mesg)
}
}
| slynx-fw/slynx-demo | app/pages/theme/SToastr.scala | Scala | apache-2.0 | 5,067 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.stream.sql.validation
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.utils.{StreamTableTestUtil, TableTestBase}
import org.junit.Test
class SortValidationTest extends TableTestBase {
private val streamUtil: StreamTableTestUtil = streamTestUtil()
streamUtil.addTable[(Int, String, Long)]("MyTable", 'a, 'b, 'c,
'proctime.proctime, 'rowtime.rowtime)
// test should fail because time order is descending
@Test(expected = classOf[TableException])
def testSortProcessingTimeDesc(): Unit = {
val sqlQuery = "SELECT a FROM MyTable ORDER BY proctime DESC, c"
streamUtil.verifySql(sqlQuery, "")
}
// test should fail because time is not the primary order field
@Test(expected = classOf[TableException])
def testSortProcessingTimeSecondaryField(): Unit = {
val sqlQuery = "SELECT a FROM MyTable ORDER BY c, proctime"
streamUtil.verifySql(sqlQuery, "")
}
// test should fail because LIMIT is not supported without sorting
@Test(expected = classOf[TableException])
def testLimitWithoutSorting(): Unit = {
val sqlQuery = "SELECT a FROM MyTable LIMIT 3"
streamUtil.verifySql(sqlQuery, "")
}
}
| tzulitai/flink | flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/sql/validation/SortValidationTest.scala | Scala | apache-2.0 | 2,044 |
import scala.tools.partest._
// a cold run of partest takes about 15s for this test on my laptop
object Test extends DirectTest {
// test that we hit the code size limit and error out gracefully
// 5958 is the magic number (2^16/11 -- each `a(1,2,3,4,5,6)` is 11 bytes of bytecode)
override def code
= s"""
|class BigEnoughToFail {
| def a(a: Int, b: Int, c: Int, d: Int, e: Int, f: Int): Unit = {}
| def tooLong: Unit = {
| ${(1 to 5958) map (_ => "a(1,2,3,4,5,6)") mkString(";")}
| }
|}""".stripMargin.trim
override def show(): Unit = compile()
}
| scala/scala | test/files/run/large_code.scala | Scala | apache-2.0 | 606 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs105.boxes
import uk.gov.hmrc.ct.accounts.frs105.retriever.Frs105AccountsBoxRetriever
import uk.gov.hmrc.ct.accounts.frs105.validation.OffBalanceSheetArrangementsValidator
import uk.gov.hmrc.ct.box.ValidatableBox.StandardCohoTextFieldLimit
import uk.gov.hmrc.ct.box._
case class AC7999(value: Option[String])
extends CtBoxIdentifier(name = "Off balance sheet disclosure footnote")
with CtOptionalString
with Input
with SelfValidatableBox[Frs105AccountsBoxRetriever, Option[String]]
with OffBalanceSheetArrangementsValidator
{
override def validate(
boxRetriever: Frs105AccountsBoxRetriever): Set[CtValidation] = {
collectErrors(
validateAgainstAC7999a(boxRetriever, this.boxId, value),
validateOptionalStringByLength(1, StandardCohoTextFieldLimit)
)
}
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/accounts/frs105/boxes/AC7999.scala | Scala | apache-2.0 | 1,445 |
/*
* Copyright 2020 Lenses.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.lenses.streamreactor.connect.aws.s3.sink.conversion
import org.apache.kafka.connect.sink.SinkRecord
import scala.collection.JavaConverters._
object HeaderToStringConverter {
def apply(record: SinkRecord): Map[String, String] = record.headers().asScala.map(header => header.key() -> headerValueToString(header.value())).toMap
def headerValueToString(value: Any): String = {
value match {
case stringVal: String => stringVal
case intVal: Int => String.valueOf(intVal)
case shortVal: Short => String.valueOf(shortVal)
case floatVal: Float => String.valueOf(floatVal)
case doubleVal: Double => String.valueOf(doubleVal)
case byteVal: Byte => String.valueOf(byteVal)
case boolVal: Boolean => String.valueOf(boolVal)
case longVal: Long => String.valueOf(longVal)
case otherVal => throw new IllegalArgumentException(s"Unsupported header value type $otherVal:${otherVal.getClass.getCanonicalName}. Consider if you need to set the header.converter property in your connector configuration.")
}
}
}
| datamountaineer/stream-reactor | kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/sink/conversion/HeaderToStringConverter.scala | Scala | apache-2.0 | 1,669 |
package objektwerks.core
case class QueueConnectorConf(url: String,
exchangeName: String,
exchangeType: String,
queueName: String,
isQueueDurable: Boolean,
routingKey: String,
autoAck: Boolean,
publishConfirmationTimeout: Int) | objektwerks/akka.cluster | core/src/main/scala/objektwerks/core/QueueConnectorConf.scala | Scala | apache-2.0 | 437 |
package mr.merc.economics
import mr.merc.economics.PopulationType._
import mr.merc.economics.Culture._
import mr.merc.economics.Products.Grain
import mr.merc.economics.TaxPolicy.CorporateTax
import mr.merc.politics.{Party, PoliticalViews, Province, State}
import org.scalatest.BeforeAndAfter
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
class ResourceGatheringTest extends AnyFunSuite with BeforeAndAfter with Matchers {
var aristocrats: Population = _
var workers: Population = _
var region1:Province = _
var region2:Province = _
var region3:Province = _
var country: State = _
val zeroPolicy = TaxPolicy.zeroTaxes
before {
country = new State("", FrenchHuman, 0, Party.Absolute, 0)
country.taxPolicy.set(TaxPolicy.zeroTaxes.taxPolicyValues)
country.budget.refreshTaxPolicy()
region1 = new Province("", country, null, null, null) {
override lazy val neighbours: List[Province] = List(region2, region3)
override val regionWarriors: RegionWarriors = null
}
aristocrats = new Population(LatinHuman, Aristocrats, 1000, 0, 0, PoliticalViews.averagePoliticalViews, region1)
workers = new Population(LatinHuman, Farmers, 4000, 0, 0, PoliticalViews.averagePoliticalViews, region1)
aristocrats.newDay(zeroPolicy, 1, aristocrats.buildNewDayPopulationDayRecord())
workers.newDay(zeroPolicy,1, workers.buildNewDayPopulationDayRecord())
region1.regionPopulation.addPop(aristocrats)
region1.regionPopulation.addPop(workers)
region2 = new Province("", country, null, null, null) {
override lazy val neighbours: List[Province] = List(region1, region3)
}
region3 = new Province("", country, null, null, null) {
override lazy val neighbours: List[Province] = List(region1, region2)
}
}
test("resource gathering path") {
val resourceGathering = new Farm(Grain, region1, 1000d, 40)
val prices = Map[Products.Product, Double](Grain -> 10)
resourceGathering.newDay(new TaxPolicy(Map(CorporateTax -> 0.1)), 1,1)
assert(resourceGathering.componentDemandRequests(prices) === Map())
assert(resourceGathering.unsoldProducts === 1000)
val sell = resourceGathering.sellProduct(Map(region1 -> EconomicRegionDemand(1000, 100),
region2 -> EconomicRegionDemand(1000, 0)))
assert(sell.view.mapValues(_.count).toMap === Map(region1 -> 1000))
val profit = FulfilledSupplyRequestProfit(
FulfilledSupplyRequest(1000, 0, sell.head._2), 100)
resourceGathering.receiveSellingResultAndMoney(region1, profit)
assert(resourceGathering.unsoldProducts === 0)
assert(resourceGathering.workforceEfficiencyDemand(prices) === 4000)
resourceGathering.receiveWorkforceRequest(Map(workers -> 10000))
resourceGathering.payMoneyToPops()
aristocrats.moneyReserves shouldBe 9000d +- 000.1
workers.moneyReserves shouldBe 81000d +- 000.1
resourceGathering.payTaxes()
country.budget.dayReport.income(CorporateTax) shouldBe 10*1000d +- 0.0001
country.budget.dayReport.grossIncome(CorporateTax) shouldBe 100000d +- 0.0001
resourceGathering.produce()
assert(resourceGathering.unsoldProducts === 10000 * 40)
resourceGathering.endOfDay()
}
}
| RenualdMarch/merc | src/test/scala/mr/merc/economics/ResourceGatheringTest.scala | Scala | gpl-3.0 | 3,248 |
package de.tudresden.inf.lat.tabulas.parser
/** Parser constants.
*/
object ParserConstant {
final val SpecificationFormat = "simple format"
final val SpecificationVersion = "1.0.0"
final val CommentSymbol: String = "#"
final val ColonFieldSign: String = ":"
final val EqualsFieldSign: String = "="
final val StandardOrderSign: String = "+"
final val ReverseOrderSign: String = "-"
final val IdKeyword: String = "\\n"
final val LineContinuationSymbol: String = "\\\\"
final val NewLine: String = "\\n"
final val NewRecordToken: String = "new"
final val PrefixMapToken: String = "prefix"
final val PrefixSign: String = ":"
final val SortingOrderDeclarationToken: String = "order"
final val Space: String = " "
final val TypeSelectionToken: String = "type"
final val TypeNameToken: String = "name"
final val TypeDefinitionToken: String = "def"
final val TypeSign: String = ":"
final val DeprecationOfMultipleTables = "Deprecation of Multiple Tables"
final val WarningDeprecationOfMultipleTables = "WARNING: Using multiple tables is deprecated and they are no longer supported in newer versions."
}
| julianmendez/tabulas | tabulas-core/src/main/scala/de/tudresden/inf/lat/tabulas/parser/ParserConstant.scala | Scala | apache-2.0 | 1,142 |
// lchannels - session programming in Scala
// Copyright (c) 2016, Alceste Scalas and Imperial College London
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
/** @author Alceste Scalas <alceste.scalas@imperial.ac.uk> */
package lchannels.examples.http.server
import lchannels._
import lchannels.examples.http.protocol.binary
import lchannels.examples.http.protocol.server._
import lchannels.examples.http.protocol.server.{Server => ServerName} // Clash
import lchannels.examples.http.protocol.types._
import scala.concurrent.duration._
import java.net.Socket
import java.nio.file.{Path, Paths}
import java.time.ZonedDateTime;
import com.typesafe.scalalogging.StrictLogging
object Server extends App {
// Helper method to ease external invocation
def run() = main(Array())
import java.net.{InetAddress, ServerSocket}
val root = java.nio.file.FileSystems.getDefault().getPath("").toAbsolutePath
val address = InetAddress.getByName(null)
val port = 8080
val ssocket = new ServerSocket(port, 0, address)
println(f"[*] HTTP server listening on: http://${address.getHostAddress}:${port}/")
println(f"[*] Root directory: ${root}")
println(f"[*] Press Ctrl+C to terminate")
implicit val timeout = 30.seconds
accept(1)
@scala.annotation.tailrec
def accept(nextWorkerId: Int) {
val client = ssocket.accept()
println(f"[*] Connection from ${client.getInetAddress}, spawning worker")
new Worker(nextWorkerId, client, root)
accept(nextWorkerId + 1)
}
}
class Worker(id: Int, socket: Socket, root: Path)
(implicit timeout: Duration)
extends Runnable with StrictLogging {
private def logTrace(msg: String) = logger.trace(msg)
private def logDebug(msg: String) = logger.debug(msg)
private def logInfo(msg: String) = logger.info(msg)
private def logWarn(msg: String) = logger.warn(msg)
private def logError(msg: String) = logger.error(msg)
private val serverName = "lchannels HTTP server"
private val pslash = Paths.get("/") // Used to relativize request paths
// Own thread
private val thread = { val t = new Thread(this); t.start(); t }
def join() = thread.join()
override def run(): Unit = {
logInfo("Started.")
// Socket manager for the HTTP connection
val sktmgr = new binary.HttpServerSocketManager(socket, true, logInfo)
// Create a SocketChannel (with the correct type) from the client socket...
val c = SocketIn[binary.Request](sktmgr)
// ...and wrap it with a multiparty (in this case, binary) session object,
// to hide continuation-passing
val r = MPRequest(c)
val (rpath, cont) = {
try getRequest(r)
catch {
case sktmgr.ConnectionClosed(msg) => { logInfo(msg); return }
case e: java.util.concurrent.TimeoutException => {
logInfo(f"Timeout error: ${e.getMessage}")
sktmgr.close()
logInfo("Terminating.")
return
}
}
}
val path = root.resolve(pslash.relativize(Paths.get(rpath)))
logInfo(f"Resolved request path: ${path}")
// TODO: we should reject paths like e.g. ../../../../etc/passwd
val cont2 = cont.send(HttpVersion(Http11))
val file = path.toFile
if (!file.exists || !file.canRead) {
notFound(cont2, rpath)
} else {
logInfo("Resource found.")
val cont3 = cont2.send(Code200("OK"))
.send(ServerName(serverName))
.send(Date(ZonedDateTime.now))
if (file.isFile) {
serveFile(cont3, path)
} else if (file.isDirectory) {
serveDirectory(cont3, rpath, file)
} else {
throw new RuntimeException(f"BUG: unsupported resource type: ${path}")
}
}
logInfo("Terminating.")
}
private def getRequest(c: MPRequest)(implicit timeout: Duration) = {
val req = c.receive
logInfo(f"Method: ${req.p.method}; path: ${req.p.path}; version: ${req.p.version}")
val cont = choices(req.cont)
(req.p.path, cont)
}
@scala.annotation.tailrec
private def choices(c: MPRequestChoice)
(implicit timeout: Duration): MPHttpVersion = c.receive match {
case Accept(p, cont) => {
logInfo(f"Client accepts: ${p}")
choices(cont)
}
case AcceptEncodings(p, cont) => {
logInfo(f"Client encodings: ${p}")
choices(cont)
}
case AcceptLanguage(p, cont) => {
logInfo(f"Client languages: ${p}")
choices(cont)
}
case Connection(p, cont) => {
logInfo(f"Client connection: ${p}")
choices(cont)
}
case DoNotTrack(p, cont) => {
logInfo(f"Client Do Not Track flag: ${p}")
choices(cont)
}
case Host(p, cont) => {
logInfo(f"Client host: ${p}")
choices(cont)
}
case RequestBody(p, cont) => {
logInfo(f"Client request body: ${p}")
cont
}
case UpgradeIR(p, cont) => {
logInfo(f"Client upgrade insecure requests: ${p}")
choices(cont)
}
case UserAgent(p, cont) => {
logInfo(f"Client user agent: ${p}")
choices(cont)
}
}
private def notFound(c: MPCode200OrCode404, res: String) = {
logInfo(f"Resource not found: ${res}")
c.send(Code404("Not Found"))
.send(ServerName(serverName))
.send(Date(ZonedDateTime.now))
.send(ResponseBody(
Body("text/plain", f"Resource ${res} not found".getBytes("UTF-8"))))
}
private def serveFile(c: MPResponseChoice, file: Path) = {
val filename = file.getFileName().toString()
val contentType = {
if (filename.endsWith(".html")) "text/html"
else if (filename.endsWith(".css")) "text/css"
else "text/plain" // TODO: we assume content is human-readable
}
logInfo(f"Serving file: ${file} (content type: ${contentType})")
// TODO: for simplicity, we assume all files are UTF-8
c.send(ResponseBody(
Body(f"${contentType}; charset=utf-8", java.nio.file.Files.readAllBytes(file))))
}
private def serveDirectory(c: MPResponseChoice, rpath: String, dir: java.io.File) = {
logInfo(f"Serving directory: ${dir}")
val list = dir.listFiles.foldLeft(""){(a,i) =>
a + f"""| <li>
| <a href="${i.getName}${if (i.isFile) "" else "/"}">
| ${i.getName}${if (i.isFile) "" else "/"}
| </a>
| </li>\\n""".stripMargin
}
val html = f"""|<!DOCTYPE html>
|<html>
| <head>
| <meta charset="UTF-8">
| <title>Contents of ${rpath}</title>
| </head>
| <body>
| <h1>Contents of ${rpath}</h1>
| <ul>
|${list}
| </ul>
| <p><em>Page generated by ${serverName}</em></p>
| </body>
|</html>\\n""".stripMargin
c.send(ResponseBody(
Body("text/html", html.getBytes("UTF-8"))))
}
}
| alcestes/lchannels | examples/src/main/scala/lchannels/examples/http/Server.scala | Scala | bsd-2-clause | 8,324 |
package com.eevolution.context.dictionary.infrastructure.repository
import java.util.UUID
import com.eevolution.context.dictionary.domain._
import com.eevolution.context.dictionary.domain.model.Registration
import com.eevolution.context.dictionary.infrastructure.db.DbContext._
import com.eevolution.utils.PaginatedSequence
import com.lightbend.lagom.scaladsl.persistence.jdbc.JdbcSession
import scala.concurrent.{ExecutionContext, Future}
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: eduardo.moreno@e-evolution.com, http://www.e-evolution.com , http://github.com/e-Evolution
* Created by eduardo.moreno@e-evolution.com , www.e-evolution.com
*/
/**
* Registration Repository
* @param session
* @param executionContext
*/
class RegistrationRepository (session: JdbcSession)(implicit executionContext: ExecutionContext)
extends api.repository.RegistrationRepository[Registration , Int]
with RegistrationMapping {
def getById(id: Int): Future[Registration] = {
Future(run(queryRegistration.filter(_.registrationId == lift(id))).headOption.get)
}
def getByUUID(uuid: UUID): Future[Registration] = {
Future(run(queryRegistration.filter(_.uuid == lift(uuid.toString))).headOption.get)
}
def getByRegistrationId(id : Int) : Future[List[Registration]] = {
Future(run(queryRegistration))
}
def getAll() : Future[List[Registration]] = {
Future(run(queryRegistration))
}
def getAllByPage(page: Int, pageSize: Int): Future[PaginatedSequence[Registration]] = {
val offset = page * pageSize
val limit = (page + 1) * pageSize
for {
count <- countRegistration()
elements <- if (offset > count) Future.successful(Nil)
else selectRegistration(offset, limit)
} yield {
PaginatedSequence(elements, page, pageSize, count)
}
}
private def countRegistration() = {
Future(run(queryRegistration.size).toInt)
}
private def selectRegistration(offset: Int, limit: Int): Future[Seq[Registration]] = {
Future(run(queryRegistration).drop(offset).take(limit).toSeq)
}
}
| adempiere/ADReactiveSystem | dictionary-impl/src/main/scala/com/eevolution/context/dictionary/infrastructure/repository/RegistrationRepository.scala | Scala | gpl-3.0 | 2,774 |
package org.pantsbuild.zinc.compiler
import sbt.io.IO
import java.io.File
import java.nio.file.{Files, Path, Paths}
import java.util.jar.{JarInputStream, JarEntry}
import scala.collection.mutable
import org.junit.runner.RunWith
import org.scalatest.WordSpec
import org.scalatest.junit.JUnitRunner
import org.scalatest.MustMatchers
@RunWith(classOf[JUnitRunner])
class JarCreationSpec extends WordSpec with MustMatchers {
"JarCreationWithoutClasses" should {
"succeed when input classes are not provided" in {
IO.withTemporaryDirectory { tempInputDir =>
val filePaths = new mutable.TreeSet[Path]()
IO.withTemporaryDirectory { tempOutputDir =>
val jarOutputPath = Paths.get(tempOutputDir.toString, "spec-empty-output.jar")
OutputUtils.createJar(tempInputDir.toString, filePaths, jarOutputPath, System.currentTimeMillis())
OutputUtils.existsClass(jarOutputPath, "NonExistent.class") must be(false)
}
}
}
}
"JarCreationWithClasses" should {
"succeed when input classes are provided" in {
IO.withTemporaryDirectory { tempInputDir =>
val tempFile = File.createTempFile("Temp", ".class", tempInputDir)
val filePaths = mutable.TreeSet(tempFile.toPath)
IO.withTemporaryDirectory { tempOutputDir =>
val jarOutputPath = Paths.get(tempOutputDir.toString, "spec-valid-output.jar")
OutputUtils.createJar(tempInputDir.toString, filePaths, jarOutputPath, System.currentTimeMillis())
OutputUtils.existsClass(jarOutputPath, tempFile.toString) must be(false)
OutputUtils.existsClass(jarOutputPath, OutputUtils.relativize(tempInputDir.toString, tempFile.toPath)) must be(true)
}
}
}
"should result in a sorted jar" in {
IO.withTemporaryDirectory { tempInputDir =>
val filePaths =
mutable.TreeSet(
(0 to 100).map { _ => File.createTempFile("Temp", ".class", tempInputDir).toPath}: _*
)
IO.withTemporaryDirectory { tempOutputDir =>
val jarOutputPath = Paths.get(tempOutputDir.toString, "output.jar")
OutputUtils.createJar(tempInputDir.toString, filePaths, jarOutputPath, System.currentTimeMillis())
isSortedJar(jarOutputPath) must be(true)
}
}
}
}
"JarCreationWithNestedClasses" should {
"succeed when nested input directory and classes are provided" in {
IO.withTemporaryDirectory { tempInputDir =>
val nestedTempDir = Files.createTempDirectory(tempInputDir.toPath, "tmp")
val nestedTempClass = File.createTempFile("NestedTemp", ".class", nestedTempDir.toFile)
val filePaths = mutable.TreeSet(nestedTempDir, nestedTempClass.toPath)
IO.withTemporaryDirectory { tempOutputDir =>
val jarOutputPath = Paths.get(tempOutputDir.toString, "spec-valid-output.jar")
OutputUtils.createJar(tempInputDir.toString, filePaths, jarOutputPath, System.currentTimeMillis())
OutputUtils.existsClass(jarOutputPath, OutputUtils.relativize(tempInputDir.toString, nestedTempDir)) must be(true)
OutputUtils.existsClass(jarOutputPath, OutputUtils.relativize(tempInputDir.toString, nestedTempClass.toPath)) must be(true)
}
}
}
}
def isSortedJar(jarPath: Path): Boolean = {
val is = new JarInputStream(Files.newInputStream(jarPath))
try {
var nextEntry: JarEntry = null
// An impossible name for a jar entry.
var previousName = ""
while ({nextEntry = is.getNextJarEntry(); nextEntry ne null}) {
if (nextEntry.getName() <= previousName) {
return false
}
previousName = nextEntry.getName()
}
return true
} finally {
is.close()
}
}
}
| tdyas/pants | tests/scala/org/pantsbuild/zinc/compiler/JarCreationSpec.scala | Scala | apache-2.0 | 3,781 |
/*
* Copyright (C) 2012 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.core.tools.math
import org.openmole.core.exception.UserBadDataError
import math._
object Stat {
def median(sequence: Iterable[Double]): Double = {
val sortedSerie = sequence.toArray.filterNot(_.isNaN).sorted
val size = sortedSerie.size
if (size == sequence.size)
if (size % 2 == 0) (sortedSerie(size / 2) + sortedSerie((size / 2) - 1)) / 2
else sortedSerie((size / 2))
else Double.NaN
}
def medianAbsoluteDeviation(sequence: Iterable[Double]): Double = {
val m = median(sequence)
median(sequence.map { v ⇒ math.abs(v - m) })
}
def average(sequence: Iterable[Double]) = sequence.sum / sequence.size
def meanSquareError(sequence: Iterable[Double]) = {
val avg = average(sequence)
average(sequence.map { v ⇒ math.pow(v - avg, 2) })
}
def rootMeanSquareError(sequence: Iterable[Double]) = sqrt(meanSquareError(sequence))
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/**
* Compute the p-th quantile for the "standard normal distribution" function.
* This function returns an approximation of the "inverse" cumulative
* standard normal distribution function. I.e., given p, it returns
* an approximation to the x satisfying p = P{Z <= x} where Z is a
* random variable from the standard normal distribution.
* The algorithm uses a minimax approximation by rational functions
* and the result has a relative error whose absolute value is less
* than 1.15e-9.
* Author: Peter J. Acklam (Adapted to Scala by John Miller)
* (Javascript version by Alankar Misra @ Digital Sutras (alankar@digitalsutras.com))
* Time-stamp: 2003-05-05 05:15:14
* E-mail: pjacklam@online.no
* WWW URL: http://home.online.no/~pjacklam
* @param p the p-th quantile, e.g., .95 (95%)
*/
def normalInv(p: Double = .95): Double =
{
if (p < 0 || p > 1) throw new UserBadDataError("parameter p must be in the range [0, 1]")
// Coefficients in rational approximations
val a = Array(-3.969683028665376e+01, 2.209460984245205e+02,
-2.759285104469687e+02, 1.383577518672690e+02,
-3.066479806614716e+01, 2.506628277459239e+00)
val b = Array(-5.447609879822406e+01, 1.615858368580409e+02,
-1.556989798598866e+02, 6.680131188771972e+01,
-1.328068155288572e+01)
val c = Array(-7.784894002430293e-03, -3.223964580411365e-01,
-2.400758277161838e+00, -2.549732539343734e+00,
4.374664141464968e+00, 2.938163982698783e+00)
val d = Array(7.784695709041462e-03, 3.224671290700398e-01,
2.445134137142996e+00, 3.754408661907416e+00)
// Define break-points
val plow = 0.02425
val phigh = 1 - plow
// Rational approximation for lower region:
if (p < plow) {
val q = sqrt(-2 * log(p))
return (((((c(0) * q + c(1)) * q + c(2)) * q + c(3)) * q + c(4)) * q + c(5)) /
((((d(0) * q + d(1)) * q + d(2)) * q + d(3)) * q + 1)
} // if
// Rational approximation for upper region:
if (phigh < p) {
val q = sqrt(-2 * log(1 - p))
return -(((((c(0) * q + c(1)) * q + c(2)) * q + c(3)) * q + c(4)) * q + c(5)) /
((((d(0) * q + d(1)) * q + d(2)) * q + d(3)) * q + 1)
} // if
// Rational approximation for central region:
val q = p - 0.5
val r = q * q
(((((a(0) * r + a(1)) * r + a(2)) * r + a(3)) * r + a(4)) * r + a(5)) * q /
(((((b(0) * r + b(1)) * r + b(2)) * r + b(3)) * r + b(4)) * r + 1)
} // normalInv
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/**
* Compute the p-th quantile for "Student's t distribution" function.
* This function returns an approximation of the "inverse" cumulative
* Student's t distribution function. I.e., given p, it returns
* an approximation to the x satisfying p = P{T <= x} where T is a
* random variable from Student's t distribution.
* From K. Pawlikowski (www.cosc.canterbury.ac.nz).
* This function computes the upper p-th quantile of the t distribution (the
* value of t for which the area under the curve from t to +infinity is equal
* to p). It is a transliteration of the 'STUDTP' function given in Appendix C
* of "Principles of Discrete Event Simulation", G. S. Fishman, Wiley, 1978.
* @param p the p-th quantile, e.g., 95 (95%)
* @param df the degrees of freedom
*/
def studentTInv(p: Double = .95, df: Int = 10): Double =
{
if (p < 0 || p > 1) throw new UserBadDataError("parameter p must be in the range [0, 1]")
if (df <= 0) throw new UserBadDataError("parameter df must be positive")
val z1 = abs(normalInv(p))
val z2 = z1 * z1
val h = Array[Double](
0.25 * z1 * (z2 + 1.0),
0.010416667 * z1 * ((5.0 * z2 + 16.0) * z2 + 3.0),
0.002604167 * z1 * (((3.0 * z2 + 19.0) * z2 + 17.0) * z2 - 15.0),
0.000010851 * z1 * ((((79.0 * z2 + 776.0) * z2 + 1482.0) * z2 - 1920.0) * z2 - 945.0))
var x = 0.0
for (i ← h.length - 1 to 0 by -1) x = (x + h(i)) / df
if (p >= 0.5) z1 + x else -(z1 + x)
} // studentTInv
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/**
* Compute the confidence interval half-width for the given confidence level.
* @param p the confidence level
*/
def confidenceInterval(sequence: Iterable[Double], p: Double = .95): Double =
{
val n = sequence.size
val df = n - 1 // degrees of freedom
if (df < 1) return 0.0 // flaw ("interval", "must have at least 2 observations")
val pp = 1 - (1 - p) / 2.0 // e.g., .95 --> .975 (two tails)
val t = studentTInv(pp, df)
t * rootMeanSquareError(sequence) / sqrt(n.toDouble)
} // interval
}
| ISCPIF/PSEExperiments | openmole-src/openmole/core/org.openmole.core.tools/src/main/scala/org/openmole/core/tools/math/Stat.scala | Scala | agpl-3.0 | 6,497 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import java.io.File
import com.google.common.io.Files
import org.apache.spark.util.Utils
import org.scalatest.{BeforeAndAfterAll, FunSuite}
class SSLOptionsSuite extends FunSuite with BeforeAndAfterAll {
test("test resolving property file as spark conf ") {
val keyStorePath = new File(this.getClass.getResource("/keystore").toURI).getAbsolutePath
val trustStorePath = new File(this.getClass.getResource("/truststore").toURI).getAbsolutePath
val conf = new SparkConf
conf.set("spark.ssl.enabled", "true")
conf.set("spark.ssl.keyStore", keyStorePath)
conf.set("spark.ssl.keyStorePassword", "password")
conf.set("spark.ssl.keyPassword", "password")
conf.set("spark.ssl.trustStore", trustStorePath)
conf.set("spark.ssl.trustStorePassword", "password")
conf.set("spark.ssl.enabledAlgorithms", "TLS_RSA_WITH_AES_128_CBC_SHA, TLS_RSA_WITH_AES_256_CBC_SHA")
conf.set("spark.ssl.protocol", "SSLv3")
val opts = SSLOptions.parse(conf, "spark.ssl")
assert(opts.enabled === true)
assert(opts.trustStore.isDefined === true)
assert(opts.trustStore.get.getName === "truststore")
assert(opts.trustStore.get.getAbsolutePath === trustStorePath)
assert(opts.keyStore.isDefined === true)
assert(opts.keyStore.get.getName === "keystore")
assert(opts.keyStore.get.getAbsolutePath === keyStorePath)
assert(opts.trustStorePassword === Some("password"))
assert(opts.keyStorePassword === Some("password"))
assert(opts.keyPassword === Some("password"))
assert(opts.protocol === Some("SSLv3"))
assert(opts.enabledAlgorithms === Set("TLS_RSA_WITH_AES_128_CBC_SHA", "TLS_RSA_WITH_AES_256_CBC_SHA"))
}
test("test resolving property with defaults specified ") {
val keyStorePath = new File(this.getClass.getResource("/keystore").toURI).getAbsolutePath
val trustStorePath = new File(this.getClass.getResource("/truststore").toURI).getAbsolutePath
val conf = new SparkConf
conf.set("spark.ssl.enabled", "true")
conf.set("spark.ssl.keyStore", keyStorePath)
conf.set("spark.ssl.keyStorePassword", "password")
conf.set("spark.ssl.keyPassword", "password")
conf.set("spark.ssl.trustStore", trustStorePath)
conf.set("spark.ssl.trustStorePassword", "password")
conf.set("spark.ssl.enabledAlgorithms", "TLS_RSA_WITH_AES_128_CBC_SHA, TLS_RSA_WITH_AES_256_CBC_SHA")
conf.set("spark.ssl.protocol", "SSLv3")
val defaultOpts = SSLOptions.parse(conf, "spark.ssl", defaults = None)
val opts = SSLOptions.parse(conf, "spark.ui.ssl", defaults = Some(defaultOpts))
assert(opts.enabled === true)
assert(opts.trustStore.isDefined === true)
assert(opts.trustStore.get.getName === "truststore")
assert(opts.trustStore.get.getAbsolutePath === trustStorePath)
assert(opts.keyStore.isDefined === true)
assert(opts.keyStore.get.getName === "keystore")
assert(opts.keyStore.get.getAbsolutePath === keyStorePath)
assert(opts.trustStorePassword === Some("password"))
assert(opts.keyStorePassword === Some("password"))
assert(opts.keyPassword === Some("password"))
assert(opts.protocol === Some("SSLv3"))
assert(opts.enabledAlgorithms === Set("TLS_RSA_WITH_AES_128_CBC_SHA", "TLS_RSA_WITH_AES_256_CBC_SHA"))
}
test("test whether defaults can be overridden ") {
val keyStorePath = new File(this.getClass.getResource("/keystore").toURI).getAbsolutePath
val trustStorePath = new File(this.getClass.getResource("/truststore").toURI).getAbsolutePath
val conf = new SparkConf
conf.set("spark.ssl.enabled", "true")
conf.set("spark.ui.ssl.enabled", "false")
conf.set("spark.ssl.keyStore", keyStorePath)
conf.set("spark.ssl.keyStorePassword", "password")
conf.set("spark.ui.ssl.keyStorePassword", "12345")
conf.set("spark.ssl.keyPassword", "password")
conf.set("spark.ssl.trustStore", trustStorePath)
conf.set("spark.ssl.trustStorePassword", "password")
conf.set("spark.ssl.enabledAlgorithms", "TLS_RSA_WITH_AES_128_CBC_SHA, TLS_RSA_WITH_AES_256_CBC_SHA")
conf.set("spark.ui.ssl.enabledAlgorithms", "ABC, DEF")
conf.set("spark.ssl.protocol", "SSLv3")
val defaultOpts = SSLOptions.parse(conf, "spark.ssl", defaults = None)
val opts = SSLOptions.parse(conf, "spark.ui.ssl", defaults = Some(defaultOpts))
assert(opts.enabled === false)
assert(opts.trustStore.isDefined === true)
assert(opts.trustStore.get.getName === "truststore")
assert(opts.trustStore.get.getAbsolutePath === trustStorePath)
assert(opts.keyStore.isDefined === true)
assert(opts.keyStore.get.getName === "keystore")
assert(opts.keyStore.get.getAbsolutePath === keyStorePath)
assert(opts.trustStorePassword === Some("password"))
assert(opts.keyStorePassword === Some("12345"))
assert(opts.keyPassword === Some("password"))
assert(opts.protocol === Some("SSLv3"))
assert(opts.enabledAlgorithms === Set("ABC", "DEF"))
}
}
| Dax1n/spark-core | core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala | Scala | apache-2.0 | 5,776 |
package scala.meta.internal.parsers
import scala.annotation.tailrec
import scala.meta.Dialect
import scala.meta.classifiers._
import scala.meta.tokens.Token
import scala.meta.tokens.Token._
import scala.meta.tokens.Tokens
object LazyTokenIterator {
def apply(scannerTokens: ScannerTokens)(implicit dialect: Dialect): LazyTokenIterator =
new LazyTokenIterator(
scannerTokens,
Nil,
TokenRef(scannerTokens(0), 0, 1, 0),
-1
)
private case class TokenRef(
token: Token,
pos: Int,
nextPos: Int,
pointPos: Int
)
private def mkIndentToken(token: Token): Token =
new Indentation.Indent(token.input, token.dialect, token.start, token.end)
private def mkOutdentToken(token: Token): Token =
new Indentation.Outdent(token.input, token.dialect, token.start, token.end)
private def multilineCommentIndent(t: Comment): Int = {
@tailrec
def loop(idx: Int, indent: Int, isAfterNewline: Boolean): Int = {
if (idx == t.value.length) indent
else {
t.value.charAt(idx) match {
case '\\n' => loop(idx + 1, 0, isAfterNewline = true)
case ' ' | '\\t' if isAfterNewline => loop(idx + 1, indent + 1, isAfterNewline)
case _ => loop(idx + 1, indent, isAfterNewline = false)
}
}
}
loop(0, 0, false)
}
}
private[parsers] class LazyTokenIterator private (
private val scannerTokens: ScannerTokens,
private var sepRegions: List[SepRegion],
private var curr: LazyTokenIterator.TokenRef,
private var prevPos: Int
)(implicit dialect: Dialect)
extends TokenIterator {
import LazyTokenIterator._
import scannerTokens.Classifiers._
import scannerTokens.Implicits._
override def next(): Unit = {
val (newSepRegions, newTokenRef) = nextToken(curr.pos, curr.nextPos, sepRegions)
prevPos = curr.pointPos
curr = newTokenRef
sepRegions = newSepRegions
}
private def observeIndented0(f: (Int, List[SepRegion]) => List[SepRegion]): Boolean = {
if (!dialect.allowSignificantIndentation) false
else {
val existingIndent = sepRegions.find(_.isIndented).fold(0)(_.indent)
val (expected, pointPos) = countIndentAndNewlineIndex(tokenPos)
if (expected > existingIndent) {
sepRegions = f(expected, sepRegions)
val indent = mkIndentToken(token)
curr = TokenRef(indent, curr.pos, curr.pos, pointPos)
true
} else false
}
}
/**
* Deals with different rules for indentation after self type arrow.
*/
def undoIndent(): Unit = {
sepRegions match {
case region :: others if region.isIndented && curr.token.is[Indentation.Indent] =>
next()
sepRegions = sepRegions match {
// deal with region added by `case` in enum after self type
case RegionArrow :: _ => others
// if no region was added
case `region` :: _ => others
// keep any added region in `next()`
case head :: _ => head :: others
case _ => sepRegions
}
case _ =>
}
}
def observeIndented(): Boolean = {
observeIndented0 { (i, prev) =>
val undoRegionChange =
prev.headOption match {
case Some(RegionParen(_)) if token.is[LeftParen] => prev.tail
case Some(RegionEnumArtificialMark) if token.is[KwEnum] => prev.tail
case Some(_: RegionBrace) if token.is[LeftBrace] => prev.tail
case _ => prev
}
RegionIndent(i, false) :: undoRegionChange
}
}
def observeIndentedEnum(): Boolean = {
observeIndented0((i, prev) => {
val nextPrev = prev match {
case RegionArrow :: RegionEnumArtificialMark :: other => other
case RegionEnumArtificialMark :: other => other
case x => x
}
RegionIndentEnum(i) :: nextPrev
})
}
def currentIndentation: Int = {
val foundIndentation = countIndent(curr.pointPos)
if (foundIndentation < 0)
// empty sepregions means we are at toplevel
sepRegions.headOption.fold(0)(_.indent)
else
foundIndentation
}
def observeOutdented(): Boolean = {
if (!dialect.allowSignificantIndentation) false
else {
def canEndIndentation(token: Token) = token.is[KwElse] || token.is[KwThen] ||
token.is[KwDo] || token.is[KwCatch] || token.is[KwFinally] || token.is[KwYield] ||
token.is[KwMatch]
sepRegions match {
case region :: tail if region.isIndented && canEndIndentation(curr.token) =>
sepRegions = tail
val outdent = mkOutdentToken(curr.token)
val outdentPos = findOutdentPos(prevPos, curr.pos, region)
curr = TokenRef(outdent, curr.pos, curr.pos, outdentPos)
true
case _ => false
}
}
}
private def findOutdentPos(prevPos: Int, currPos: Int, region: SepRegion): Int = {
val outdent = region.indent
@tailrec
def iter(i: Int, pos: Int, indent: Int): Int = {
if (i >= currPos) {
if (pos < currPos) pos else currPos - 1
} else {
scannerTokens(i) match {
case _: EOL =>
iter(i + 1, if (pos == prevPos) i else pos, 0)
case _: HSpace if indent >= 0 =>
iter(i + 1, pos, indent + 1)
case _: Whitespace =>
iter(i + 1, pos, indent)
case _: Comment if indent < 0 || outdent <= indent =>
iter(i + 1, i + 1, -1)
case _ => pos
}
}
}
val iterPos = 1 + prevPos
if (iterPos < currPos) iter(iterPos, prevPos, -1)
else if (scannerTokens(currPos).is[EOF]) currPos
else prevPos
}
@tailrec
private def nextToken(
prevPos: Int,
currPos: Int,
sepRegions: List[SepRegion]
): (List[SepRegion], TokenRef) = {
val prev = if (prevPos >= 0) scannerTokens(prevPos) else null
val curr = scannerTokens(currPos)
val (nextPos, next) = {
@tailrec
def iter(i: Int): (Int, Token) =
if (i == scannerTokens.length) (-1, null)
else
scannerTokens(i) match {
case _: Trivia => iter(i + 1)
case t => (i, t)
}
iter(currPos + 1)
}
def isTrailingComma: Boolean =
dialect.allowTrailingCommas &&
curr.is[Comma] &&
next.is[CloseDelim] &&
next.pos.startLine > curr.pos.endLine
def mkIndent(pointPos: Int): TokenRef =
TokenRef(mkIndentToken(curr), prevPos, currPos, pointPos)
def mkOutdent(region: SepRegion): TokenRef =
mkOutdentTo(region, currPos)
def mkOutdentTo(region: SepRegion, maxPointPos: Int): TokenRef = {
val pointPos = findOutdentPos(prevPos, maxPointPos, region)
TokenRef(mkOutdentToken(curr), prevPos, currPos, pointPos)
}
def currRef: TokenRef = TokenRef(curr, currPos, currPos + 1, currPos)
def indentationWithinParenRegion: Option[SepRegion] = {
def isWithinParenRegion =
sepRegions.tail
.collectFirst {
case RegionParen(_) => true
case other if !other.isIndented => false
}
.contains(true)
sepRegions.headOption.filter(_.isIndented && isWithinParenRegion)
}
def nonTrivial = curr match {
case _: LeftParen => (RegionParen(false) :: sepRegions, currRef)
case _: LeftBracket => (RegionBracket :: sepRegions, currRef)
case _: Comma =>
indentationWithinParenRegion.fold {
(sepRegions, currRef)
} { region =>
(sepRegions.tail, mkOutdent(region))
}
case _: LeftBrace =>
val indentInBrace = if (isAheadNewLine(currPos)) countIndent(nextPos) else -1
// After encountering keyword Enum we add artificial '{' on top of stack.
// Then always after Enum next token is '{'. On token '{' we check if top of stack is '{'
// (which in case of enum is always true) and replace it with '$'.
// Now if we have token 'case' and top of stack is '$' we know it is Enum-case.
// In any other case it is 'match-case' or 'try-case'
val nextRegions =
if (sepRegions.headOption.contains(RegionEnumArtificialMark))
RegionEnum(indentInBrace) :: sepRegions.tail
else {
val indentOnArrow = !(prev.is[KwMatch] || prev.is[KwCatch])
RegionBrace(indentInBrace, indentOnArrow) :: sepRegions
}
(nextRegions, currRef)
case _: KwEnum =>
(RegionEnumArtificialMark :: sepRegions, currRef)
case CaseIntro() =>
val nextRegions = sepRegions.headOption match {
case Some(_: RegionEnum | _: RegionIndentEnum) => sepRegions
case Some(_: RegionCase) => RegionArrow :: sepRegions.tail
case _ => RegionArrow :: sepRegions
}
(nextRegions, currRef)
case _: RightBrace =>
// produce outdent for every indented region before RegionBrace|RegionEnum
@tailrec
def nextRegions(in: List[SepRegion]): (List[SepRegion], TokenRef) = {
in match {
case (_: RegionBrace | _: RegionEnum) :: xs =>
(xs, currRef)
case x :: xs if x.isIndented =>
(xs, mkOutdent(x))
case _ :: xs =>
nextRegions(xs)
case Nil =>
(Nil, currRef)
}
}
nextRegions(sepRegions)
case _: RightBracket =>
val nextRegions =
if (sepRegions.headOption.contains(RegionBracket)) sepRegions.tail
else sepRegions
(nextRegions, currRef)
case _: EOF =>
sepRegions match {
case x :: xs if x.isIndented => (xs, mkOutdent(x))
case other => (other, currRef)
}
case _: RightParen =>
sepRegions match {
case x :: xs if x.isIndented => (xs, mkOutdent(x))
case RegionParen(_) :: xs => (xs, currRef)
case _ => (sepRegions, currRef)
}
case _: LeftArrow =>
val nextRegions =
if (sepRegions.headOption.contains(RegionArrow)) sepRegions.tail
else sepRegions
(nextRegions, currRef)
case _: RightArrow =>
val nextRegions =
if (sepRegions.headOption.contains(RegionArrow)) {
// add case region for `match {` to calculate proper indentation
// for statements in indentation dialects
val newRegions = sepRegions.tail
val shouldNotProduceIndentation =
!dialect.allowSignificantIndentation ||
newRegions.headOption.exists(!_.indentOnArrow)
lazy val indentInCase = if (isAheadNewLine(currPos)) countIndent(nextPos) else -1
if (newRegions.nonEmpty && shouldNotProduceIndentation && indentInCase > 0)
RegionCase(indentInCase) :: newRegions
else
newRegions
} else sepRegions
(nextRegions, currRef)
case _: KwFor if dialect.allowSignificantIndentation =>
val updatedSepRegions = sepRegions match {
case RegionParen(_) :: tail => RegionParen(true) :: tail
case _ => sepRegions
}
(updatedSepRegions, currRef)
case _ =>
(sepRegions, currRef)
}
if (isTrailingComma) nextToken(currPos, currPos + 1, sepRegions)
else if (curr.isNot[Trivia]) nonTrivial
else {
var i = prevPos + 1
var lastNewlinePos = -1
var newlineStreak = false
var newlines = false
var hasMultilineComment = false
while (i < nextPos) {
val token = scannerTokens(i)
if (token.is[LF] || token.is[FF]) {
lastNewlinePos = i
if (newlineStreak) newlines = true
newlineStreak = true
}
hasMultilineComment |= token.is[MultilineComment]
newlineStreak &= token.is[Whitespace]
i += 1
}
def lastWhitespaceToken = {
val token = scannerTokens(lastNewlinePos)
val out =
if (newlines) LFLF(token.input, token.dialect, token.start, token.end) else token
TokenRef(out, lastNewlinePos, lastNewlinePos + 1, lastNewlinePos)
}
def canProduceLF: Boolean = {
lastNewlinePos != -1 &&
prev != null && (prev.is[CanEndStat] || token.is[Indentation.Outdent]) &&
next != null && next.isNot[CantStartStat] && sepRegions.headOption.forall {
case _: RegionBrace | _: RegionCase | _: RegionEnum => true
case _: RegionIndent | _: RegionIndentEnum => true
case x: RegionParen => x.canProduceLF
case _ => false
}
}
def getIfCanProduceLF =
if (canProduceLF) Some((sepRegions, lastWhitespaceToken))
else None
val resOpt = if (dialect.allowSignificantIndentation) {
val hasLF = lastNewlinePos != -1 || hasMultilineComment
if (hasLF && next != null && !next.isLeadingInfixOperator) {
val nextIndent = countIndent(nextPos)
/**
* Outdent is needed in following cases:
* - If indentation on next line is less than current and previous token can't continue
* expr on the next line
* - At the end of `match` block even if indentation level is not changed. Example:
* ```
* x match
* case 1 =>
* case 2 =>
* // <- produce outdent
* foo()
* ```
*/
def getOutdentIfNeeded() = sepRegions.headOption
.filter { r =>
r.isIndented && {
// need to check prev.prev in case of `end match`
if (nextIndent < r.indent)
prev.isNot[CanContinueOnNextLine] || prev.prev.is[soft.KwEnd]
else r.closeOnNonCase && next.isNot[KwCase] && nextIndent == r.indent
}
}
.map { region => (sepRegions.tail, mkOutdentTo(region, nextPos)) }
/**
* Indent is needed in the following cases:
* - Indetation on new line is greater and previous token can start indentation and
* token can start indentation
* - Indentation on the new line is the same and the next token is the first `case`
* clause in match. Example:
* ```
* x match // <- mk indent
* case 1 =>
* ```
*
* Notice: Indentation after `:` isn't hadled here. It's produced manually on the parser
* level.
*/
def getIndentIfNeeded = {
val ok = nextIndent >= 0 && {
val (currIndent, indentOnArrow) =
sepRegions.headOption.fold((0, true))(r => (r.indent, r.indentOnArrow))
// !next.is[RightBrace] - braces can sometimes have -1 and we can start indent on }
if (nextIndent > currIndent && prev.is[RightArrow]) {
indentOnArrow && next.isNot[RightBrace] && next.isNot[EndMarkerIntro]
} else if (nextIndent > currIndent) {
// if does not work with indentation in pattern matches
val shouldNotIndentIf =
prev.is[KwIf] && sepRegions.headOption.contains(RegionArrow)
!shouldNotIndentIf && prev.is[CanStartIndent] && !next.is[RightBrace]
} else
// always add indent for indented `match` block
// check the previous token to avoid infinity loop
((prev.is[KwMatch] || prev.is[KwCatch]) && !prev.prev.is[soft.KwEnd]) &&
next.is[KwCase] && token.isNot[Indentation.Indent]
}
if (ok) Some {
val region = RegionIndent(nextIndent, prev.is[KwMatch])
(region :: sepRegions, mkIndent(lastNewlinePos))
}
else None
}
getOutdentIfNeeded()
.orElse { getIndentIfNeeded }
.orElse { getIfCanProduceLF }
} else None
} else {
getIfCanProduceLF
}
resOpt match {
case Some(res) => res
case _ => nextToken(prevPos, nextPos, sepRegions)
}
}
}
override def prevTokenPos: Int = prevPos
override def tokenPos: Int = curr.pointPos
override def token: Token = curr.token
override def fork: TokenIterator =
new LazyTokenIterator(scannerTokens, sepRegions, curr, prevPos)
/**
* When token on `tokenPosition` is not a whitespace and is a first non-whitespace character in a
* current line then a result is a number of whitespace characters counted. Otherwise
* {{{(-1, -1)}}} is returned.
*
* Returns a tuple2 where:
* - first value is indentation level
* - second is `LF` token index
*/
private def countIndentAndNewlineIndex(tokenPosition: Int): (Int, Int) = {
@tailrec
def countIndentInternal(pos: Int, acc: Int = 0): (Int, Int) = {
if (pos < 0) (acc, pos)
else {
val token = scannerTokens(pos)
token match {
case _: EOL | _: BOF => (acc, pos)
case AsMultilineComment(c) => (multilineCommentIndent(c), pos)
case _: Comment => countIndentInternal(pos - 1)
case _: HSpace => countIndentInternal(pos - 1, acc + 1)
case _ => (-1, -1)
}
}
}
if (scannerTokens(tokenPosition).is[Whitespace]) (-1, -1)
else countIndentInternal(tokenPosition - 1)
}
private def countIndent(tokenPosition: Int): Int =
countIndentAndNewlineIndex(tokenPosition)._1
@tailrec
private def isAheadNewLine(currentPosition: Int): Boolean = {
val nextPos = currentPosition + 1
if (nextPos >= scannerTokens.length) false
else if (scannerTokens(nextPos).is[LF]) true
else scannerTokens(nextPos).is[Trivia] && isAheadNewLine(nextPos)
}
}
| scalameta/scalameta | scalameta/parsers/shared/src/main/scala/scala/meta/internal/parsers/LazyTokenIterator.scala | Scala | bsd-3-clause | 17,824 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui.storage
import java.net.URLEncoder
import java.nio.charset.StandardCharsets.UTF_8
import javax.servlet.http.HttpServletRequest
import scala.xml.{Node, Unparsed}
import org.apache.spark.status.AppStatusStore
import org.apache.spark.status.api.v1.{ExecutorSummary, RDDDataDistribution, RDDPartitionInfo}
import org.apache.spark.ui._
import org.apache.spark.util.Utils
/** Page showing storage details for a given RDD */
private[ui] class RDDPage(parent: SparkUITab, store: AppStatusStore) extends WebUIPage("rdd") {
def render(request: HttpServletRequest): Seq[Node] = {
val parameterId = request.getParameter("id")
require(parameterId != null && parameterId.nonEmpty, "Missing id parameter")
val parameterBlockPage = request.getParameter("block.page")
val parameterBlockSortColumn = request.getParameter("block.sort")
val parameterBlockSortDesc = request.getParameter("block.desc")
val parameterBlockPageSize = request.getParameter("block.pageSize")
val blockPage = Option(parameterBlockPage).map(_.toInt).getOrElse(1)
val blockSortColumn = Option(parameterBlockSortColumn).getOrElse("Block Name")
val blockSortDesc = Option(parameterBlockSortDesc).map(_.toBoolean).getOrElse(false)
val blockPageSize = Option(parameterBlockPageSize).map(_.toInt).getOrElse(100)
val rddId = parameterId.toInt
val rddStorageInfo = try {
store.rdd(rddId)
} catch {
case _: NoSuchElementException =>
// Rather than crashing, render an "RDD Not Found" page
return UIUtils.headerSparkPage(request, "RDD Not Found", Seq.empty[Node], parent)
}
// Worker table
val workerTable = UIUtils.listingTable(workerHeader, workerRow,
rddStorageInfo.dataDistribution.get, id = Some("rdd-storage-by-worker-table"))
val blockTableHTML = try {
val _blockTable = new BlockPagedTable(
UIUtils.prependBaseUri(request, parent.basePath) + s"/storage/rdd/?id=${rddId}",
rddStorageInfo.partitions.get,
blockPageSize,
blockSortColumn,
blockSortDesc,
store.executorList(true))
_blockTable.table(blockPage)
} catch {
case e @ (_ : IllegalArgumentException | _ : IndexOutOfBoundsException) =>
<div class="alert alert-error">{e.getMessage}</div>
}
val jsForScrollingDownToBlockTable =
<script>
{
Unparsed {
"""
|$(function() {
| if (/.*&block.sort=.*$/.test(location.search)) {
| var topOffset = $("#blocks-section").offset().top;
| $("html,body").animate({scrollTop: topOffset}, 200);
| }
|});
""".stripMargin
}
}
</script>
val content =
<div class="row">
<div class="col-12">
<ul class="list-unstyled">
<li>
<strong>Storage Level:</strong>
{rddStorageInfo.storageLevel}
</li>
<li>
<strong>Cached Partitions:</strong>
{rddStorageInfo.numCachedPartitions}
</li>
<li>
<strong>Total Partitions:</strong>
{rddStorageInfo.numPartitions}
</li>
<li>
<strong>Memory Size:</strong>
{Utils.bytesToString(rddStorageInfo.memoryUsed)}
</li>
<li>
<strong>Disk Size:</strong>
{Utils.bytesToString(rddStorageInfo.diskUsed)}
</li>
</ul>
</div>
</div>
<div class="row">
<div class="col-12">
<h4>
Data Distribution on {rddStorageInfo.dataDistribution.map(_.size).getOrElse(0)}
Executors
</h4>
{workerTable}
</div>
</div>
<div>
<h4 id="blocks-section">
{rddStorageInfo.partitions.map(_.size).getOrElse(0)} Partitions
</h4>
{blockTableHTML ++ jsForScrollingDownToBlockTable}
</div>;
UIUtils.headerSparkPage(
request, "RDD Storage Info for " + rddStorageInfo.name, content, parent)
}
/** Header fields for the worker table */
private def workerHeader = Seq(
"Host",
"On Heap Memory Usage",
"Off Heap Memory Usage",
"Disk Usage")
/** Render an HTML row representing a worker */
private def workerRow(worker: RDDDataDistribution): Seq[Node] = {
<tr>
<td>{worker.address}</td>
<td>
{Utils.bytesToString(worker.onHeapMemoryUsed.getOrElse(0L))}
({Utils.bytesToString(worker.onHeapMemoryRemaining.getOrElse(0L))} Remaining)
</td>
<td>
{Utils.bytesToString(worker.offHeapMemoryUsed.getOrElse(0L))}
({Utils.bytesToString(worker.offHeapMemoryRemaining.getOrElse(0L))} Remaining)
</td>
<td>{Utils.bytesToString(worker.diskUsed)}</td>
</tr>
}
}
private[ui] case class BlockTableRowData(
blockName: String,
storageLevel: String,
memoryUsed: Long,
diskUsed: Long,
executors: String)
private[ui] class BlockDataSource(
rddPartitions: Seq[RDDPartitionInfo],
pageSize: Int,
sortColumn: String,
desc: Boolean,
executorIdToAddress: Map[String, String]) extends PagedDataSource[BlockTableRowData](pageSize) {
private val data = rddPartitions.map(blockRow).sorted(ordering(sortColumn, desc))
override def dataSize: Int = data.size
override def sliceData(from: Int, to: Int): Seq[BlockTableRowData] = {
data.slice(from, to)
}
private def blockRow(rddPartition: RDDPartitionInfo): BlockTableRowData = {
BlockTableRowData(
rddPartition.blockName,
rddPartition.storageLevel,
rddPartition.memoryUsed,
rddPartition.diskUsed,
rddPartition.executors
.map { id => executorIdToAddress.getOrElse(id, id) }
.sorted
.mkString(" "))
}
/**
* Return Ordering according to sortColumn and desc
*/
private def ordering(sortColumn: String, desc: Boolean): Ordering[BlockTableRowData] = {
val ordering: Ordering[BlockTableRowData] = sortColumn match {
case "Block Name" => Ordering.by(_.blockName)
case "Storage Level" => Ordering.by(_.storageLevel)
case "Size in Memory" => Ordering.by(_.memoryUsed)
case "Size on Disk" => Ordering.by(_.diskUsed)
case "Executors" => Ordering.by(_.executors)
case unknownColumn => throw new IllegalArgumentException(s"Unknown column: $unknownColumn")
}
if (desc) {
ordering.reverse
} else {
ordering
}
}
}
private[ui] class BlockPagedTable(
basePath: String,
rddPartitions: Seq[RDDPartitionInfo],
pageSize: Int,
sortColumn: String,
desc: Boolean,
executorSummaries: Seq[ExecutorSummary]) extends PagedTable[BlockTableRowData] {
override def tableId: String = "rdd-storage-by-block-table"
override def tableCssClass: String =
"table table-bordered table-sm table-striped table-head-clickable"
override def pageSizeFormField: String = "block.pageSize"
override def pageNumberFormField: String = "block.page"
override val dataSource: BlockDataSource = new BlockDataSource(
rddPartitions,
pageSize,
sortColumn,
desc,
executorSummaries.map { ex => (ex.id, ex.hostPort) }.toMap)
override def pageLink(page: Int): String = {
val encodedSortColumn = URLEncoder.encode(sortColumn, UTF_8.name())
basePath +
s"&$pageNumberFormField=$page" +
s"&block.sort=$encodedSortColumn" +
s"&block.desc=$desc" +
s"&$pageSizeFormField=$pageSize"
}
override def goButtonFormPath: String = {
val encodedSortColumn = URLEncoder.encode(sortColumn, UTF_8.name())
s"$basePath&block.sort=$encodedSortColumn&block.desc=$desc"
}
override def headers: Seq[Node] = {
val blockHeaders = Seq(
"Block Name",
"Storage Level",
"Size in Memory",
"Size on Disk",
"Executors")
if (!blockHeaders.contains(sortColumn)) {
throw new IllegalArgumentException(s"Unknown column: $sortColumn")
}
val headerRow: Seq[Node] = {
blockHeaders.map { header =>
if (header == sortColumn) {
val headerLink = Unparsed(
basePath +
s"&block.sort=${URLEncoder.encode(header, UTF_8.name())}" +
s"&block.desc=${!desc}" +
s"&block.pageSize=$pageSize")
val arrow = if (desc) "▾" else "▴" // UP or DOWN
<th>
<a href={headerLink}>
{header}
<span> {Unparsed(arrow)}</span>
</a>
</th>
} else {
val headerLink = Unparsed(
basePath +
s"&block.sort=${URLEncoder.encode(header, UTF_8.name())}" +
s"&block.pageSize=$pageSize")
<th>
<a href={headerLink}>
{header}
</a>
</th>
}
}
}
<thead>{headerRow}</thead>
}
override def row(block: BlockTableRowData): Seq[Node] = {
<tr>
<td>{block.blockName}</td>
<td>{block.storageLevel}</td>
<td>{Utils.bytesToString(block.memoryUsed)}</td>
<td>{Utils.bytesToString(block.diskUsed)}</td>
<td>{block.executors}</td>
</tr>
}
}
| matthewfranglen/spark | core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala | Scala | mit | 10,095 |
package ch.bsisa.hyperbird.patman.simulations.messages
case class StopSimulationRequest(cause: String) {
} | bsisa/hb-api | app/ch/bsisa/hyperbird/patman/simulations/messages/StopSimulationRequest.scala | Scala | gpl-2.0 | 109 |
package org.jetbrains.plugins.scala.lang.psi.dataFlow
import org.jetbrains.plugins.scala.lang.psi.controlFlow.Instruction
/**
* @author ilyas
*/
trait DfaInstance[E] {
def isForward: Boolean
//val fun: Instruction => E => E
def fun(i: Instruction)(e: E): E
}
trait Semilattice[E] {
def eq(e1: E, e2: E): Boolean
def join(ins: Iterable[E]): E
val bottom: E
} | gtache/intellij-lsp | intellij-lsp-dotty/src/org/jetbrains/plugins/scala/lang/psi/dataFlow/DfaInstance.scala | Scala | apache-2.0 | 375 |
package breeze.linalg.operators
/*
Copyright 2012 David Hall
Licensed under the Apache License, Version 2.0 (the "License")
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import breeze.generic.UFunc.UImpl2
import breeze.generic.{UFunc, MMRegistry2/*, Multimethod2*/}
import breeze.linalg.support.CanCopy
import scala.annotation.unchecked.uncheckedVariance
import scala.reflect.ClassTag
object BinaryOp {
def fromCopyAndUpdate[A, B, Op](implicit op: UFunc.InPlaceImpl2[Op, A, B], copy: CanCopy[A]):UFunc.UImpl2[Op, A, B, A] = {
new UFunc.UImpl2[Op, A, B, A] {
def apply(a: A, b: B): A = {
val c = copy(a)
op(c,b)
c
}
}
}
}
/**
* This is a special kind of BinaryOp that supports registration
* of specialized implementations for a given operation.
* @author dlwh
*/
// This trait could reuse code from Multimethod2, but not doing so allows us to reduce code size a lot
// because we don't need BinaryOp's to inherit from Function2, which has a lot of @specialzied cruft.
trait BinaryRegistry[A, B, Op, +R] extends UFunc.UImpl2[Op, A, B, R] with MMRegistry2[UFunc.UImpl2[Op, _ <: A, _ <: B, _ <: (R @uncheckedVariance)]] {
protected def bindingMissing(a: A, b: B):R = throw new UnsupportedOperationException("Types not found!" + a + b + " " + ops)
protected def multipleOptions(a: A, b: B, m: Map[(Class[_],Class[_]),UImpl2[Op, _ <: A, _ <: B, _ <: R @uncheckedVariance]]) = {
throw new RuntimeException("Multiple bindings for method: " + m)
}
private val l1cache: ThreadLocal[((Class[_], Class[_]), Option[UImpl2[Op, _ <: A, _ <: B, _ <: R @uncheckedVariance]])] = {
new ThreadLocal[((Class[_], Class[_]), Option[UImpl2[Op, _ <: A, _ <: B, _ <: R]])]
}
def apply(a: A, b: B): R = {
val ac = a.asInstanceOf[AnyRef].getClass
val bc = b.asInstanceOf[AnyRef].getClass
val pair = (ac, bc)
val firstLevelCached = l1cache.get()
if (firstLevelCached != null && pair == firstLevelCached._1) {
firstLevelCached._2 match {
case None => bindingMissing(a, b)
case some@Some(m) =>
m.asInstanceOf[UImpl2[Op, A, B, R]].apply(a, b)
}
} else {
slowPath(a, b, ac, bc, pair)
}
}
private def slowPath(a: A, b: B, ac: Class[_ <: AnyRef], bc: Class[_ <: AnyRef], pair: (Class[_ <: AnyRef], Class[_ <: AnyRef])): R = {
val cached: Option[UImpl2[Op, _ <: A, _ <: B, _ <: R@uncheckedVariance]] = cache.get(pair)
if (cached != null) {
cached match {
case None => bindingMissing(a, b)
case some@Some(m) =>
l1cache.set(pair -> some)
m.asInstanceOf[UImpl2[Op, A, B, R]].apply(a, b)
}
} else {
val options = resolve(ac, bc.asInstanceOf[Class[_ <: B]])
options.size match {
case 0 =>
cache.put(ac -> bc, None)
bindingMissing(a, b)
case 1 =>
val method = options.values.head
cache.put(ac -> bc, Some(method))
method.asInstanceOf[UImpl2[Op, A, B, R]].apply(a, b)
case _ =>
val selected = selectBestOption(options)
if (selected.size != 1)
multipleOptions(a, b, options)
else {
val method = selected.values.head
val some = Some(method)
l1cache.set(pair -> some)
cache.put(pair, some)
method.asInstanceOf[UImpl2[Op, A, B, R]].apply(a, b)
}
}
}
}
def register[AA<:A, BB<:B](op: UImpl2[Op, AA, BB, _ <: R @uncheckedVariance])(implicit cA: ClassTag[AA], cB: ClassTag[BB]) = {
super.register(cA.runtimeClass, cB.runtimeClass, op)
op
}
}
| claydonkey/breeze | math/src/main/scala/breeze/linalg/operators/BinaryOp.scala | Scala | apache-2.0 | 4,062 |
package com.sksamuel.elastic4s.searches.queries
import com.sksamuel.elastic4s.searches.QueryDefinition
import org.apache.lucene.search.join.ScoreMode
import org.elasticsearch.index.query.{NestedQueryBuilder, QueryBuilders}
case class NestedQueryDefinition(path: String,
query: QueryDefinition,
scoreMode: ScoreMode,
boost: Option[Double] = None,
ignoreUnmapped: Option[Boolean] = None,
inner: Option[InnerHitDefinition] = None,
queryName: Option[String] = None) extends QueryDefinition {
require(query != null, "must specify query for nested score query")
def builder: NestedQueryBuilder = {
val builder = QueryBuilders.nestedQuery(path, query.builder, scoreMode)
boost.map(_.toFloat).map(builder.boost)
inner.map(_.builder).foreach(builder.innerHit)
queryName.foreach(builder.queryName)
ignoreUnmapped.foreach(builder.ignoreUnmapped)
builder
}
def boost(b: Double): NestedQueryDefinition = copy(boost = Option(b))
def ignoreUnmapped(ignoreUnmapped: Boolean): NestedQueryDefinition = copy(ignoreUnmapped = Option(ignoreUnmapped))
def inner(inner: InnerHitDefinition): NestedQueryDefinition = copy(inner = Option(inner))
def queryName(queryName: String): NestedQueryDefinition = copy(queryName = Option(queryName))
}
| ulric260/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/searches/queries/NestedQueryDefinition.scala | Scala | apache-2.0 | 1,456 |
package com.hackdfw.backend
import com.github.tminglei.slickpg.{PgDateSupport, PgJson4sSupport, ExPostgresDriver}
import org.json4s.JsonAST.JValue
import slick.driver.PostgresDriver.api._
import org.json4s.jackson.JsonMethods
import scala.concurrent.ExecutionContext.Implicits.global
trait MyPostgresDriver extends ExPostgresDriver
with PgJson4sSupport with PgDateSupport {
override val pgjson = "json"
type DOCType = JValue
val jsonMethods = JsonMethods
override val api = MyAPI
object MyAPI extends API
with JsonImplicits with DateTimeImplicits
}
object MyPostgresDriver extends MyPostgresDriver
| simplyianm/hackdfw-backend-old | src/main/scala/com/hackdfw/backend/MyPostgresDriver.scala | Scala | isc | 618 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.optimization
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.mllib.linalg.{DenseVector, Vector, Vectors}
import org.apache.spark.mllib.linalg.BLAS.{axpy, dot, scal}
import org.apache.spark.mllib.util.MLUtils
/**
* :: DeveloperApi ::
* Class used to compute the gradient for a loss function, given a single data point.
*/
@DeveloperApi
abstract class Gradient extends Serializable {
/**
* Compute the gradient and loss given the features of a single data point.
*
* @param data features for one data point
* @param label label for this data point
* @param weights weights/coefficients corresponding to features
*
* @return (gradient: Vector, loss: Double)
*/
def compute(data: Vector, label: Double, weights: Vector): (Vector, Double) = {
val gradient = Vectors.zeros(weights.size)
val loss = compute(data, label, weights, gradient)
(gradient, loss)
}
/**
* Compute the gradient and loss given the features of a single data point,
* add the gradient to a provided vector to avoid creating new objects, and return loss.
*
* @param data features for one data point
* @param label label for this data point
* @param weights weights/coefficients corresponding to features
* @param cumGradient the computed gradient will be added to this vector
*
* @return loss
*/
def compute(data: Vector, label: Double, weights: Vector, cumGradient: Vector): Double
}
/**
* :: DeveloperApi ::
* Compute gradient and loss for a multinomial logistic loss function, as used
* in multi-class classification (it is also used in binary logistic regression).
*
* In `The Elements of Statistical Learning: Data Mining, Inference, and Prediction, 2nd Edition`
* by Trevor Hastie, Robert Tibshirani, and Jerome Friedman, which can be downloaded from
* http://statweb.stanford.edu/~tibs/ElemStatLearn/ , Eq. (4.17) on page 119 gives the formula of
* multinomial logistic regression model. A simple calculation shows that
*
* {{{
* P(y=0|x, w) = 1 / (1 + \\sum_i^{K-1} \\exp(x w_i))
* P(y=1|x, w) = exp(x w_1) / (1 + \\sum_i^{K-1} \\exp(x w_i))
* ...
* P(y=K-1|x, w) = exp(x w_{K-1}) / (1 + \\sum_i^{K-1} \\exp(x w_i))
* }}}
*
* for K classes multiclass classification problem.
*
* The model weights w = (w_1, w_2, ..., w_{K-1})^T becomes a matrix which has dimension of
* (K-1) * (N+1) if the intercepts are added. If the intercepts are not added, the dimension
* will be (K-1) * N.
*
* As a result, the loss of objective function for a single instance of data can be written as
* {{{
* l(w, x) = -log P(y|x, w) = -\\alpha(y) log P(y=0|x, w) - (1-\\alpha(y)) log P(y|x, w)
* = log(1 + \\sum_i^{K-1}\\exp(x w_i)) - (1-\\alpha(y)) x w_{y-1}
* = log(1 + \\sum_i^{K-1}\\exp(margins_i)) - (1-\\alpha(y)) margins_{y-1}
* }}}
*
* where \\alpha(i) = 1 if i != 0, and
* \\alpha(i) = 0 if i == 0,
* margins_i = x w_i.
*
* For optimization, we have to calculate the first derivative of the loss function, and
* a simple calculation shows that
*
* {{{
* \\frac{\\partial l(w, x)}{\\partial w_{ij}}
* = (\\exp(x w_i) / (1 + \\sum_k^{K-1} \\exp(x w_k)) - (1-\\alpha(y)\\delta_{y, i+1})) * x_j
* = multiplier_i * x_j
* }}}
*
* where \\delta_{i, j} = 1 if i == j,
* \\delta_{i, j} = 0 if i != j, and
* multiplier =
* \\exp(margins_i) / (1 + \\sum_k^{K-1} \\exp(margins_i)) - (1-\\alpha(y)\\delta_{y, i+1})
*
* If any of margins is larger than 709.78, the numerical computation of multiplier and loss
* function will be suffered from arithmetic overflow. This issue occurs when there are outliers
* in data which are far away from hyperplane, and this will cause the failing of training once
* infinity / infinity is introduced. Note that this is only a concern when max(margins) > 0.
*
* Fortunately, when max(margins) = maxMargin > 0, the loss function and the multiplier can be
* easily rewritten into the following equivalent numerically stable formula.
*
* {{{
* l(w, x) = log(1 + \\sum_i^{K-1}\\exp(margins_i)) - (1-\\alpha(y)) margins_{y-1}
* = log(\\exp(-maxMargin) + \\sum_i^{K-1}\\exp(margins_i - maxMargin)) + maxMargin
* - (1-\\alpha(y)) margins_{y-1}
* = log(1 + sum) + maxMargin - (1-\\alpha(y)) margins_{y-1}
* }}}
*
* where sum = \\exp(-maxMargin) + \\sum_i^{K-1}\\exp(margins_i - maxMargin) - 1.
*
* Note that each term, (margins_i - maxMargin) in \\exp is smaller than zero; as a result,
* overflow will not happen with this formula.
*
* For multiplier, similar trick can be applied as the following,
*
* {{{
* multiplier = \\exp(margins_i) / (1 + \\sum_k^{K-1} \\exp(margins_i)) - (1-\\alpha(y)\\delta_{y, i+1})
* = \\exp(margins_i - maxMargin) / (1 + sum) - (1-\\alpha(y)\\delta_{y, i+1})
* }}}
*
* where each term in \\exp is also smaller than zero, so overflow is not a concern.
*
* For the detailed mathematical derivation, see the reference at
* http://www.slideshare.net/dbtsai/2014-0620-mlor-36132297
*
* @param numClasses the number of possible outcomes for k classes classification problem in
* Multinomial Logistic Regression. By default, it is binary logistic regression
* so numClasses will be set to 2.
*/
@DeveloperApi
class LogisticGradient(numClasses: Int) extends Gradient {
def this() = this(2)
override def compute(data: Vector, label: Double, weights: Vector): (Vector, Double) = {
val gradient = Vectors.zeros(weights.size)
val loss = compute(data, label, weights, gradient)
(gradient, loss)
}
override def compute(
data: Vector,
label: Double,
weights: Vector,
cumGradient: Vector): Double = {
val dataSize = data.size
// (weights.size / dataSize + 1) is number of classes
require(weights.size % dataSize == 0 && numClasses == weights.size / dataSize + 1)
numClasses match {
case 2 =>
/**
* For Binary Logistic Regression.
*
* Although the loss and gradient calculation for multinomial one is more generalized,
* and multinomial one can also be used in binary case, we still implement a specialized
* binary version for performance reason.
*/
val margin = -1.0 * dot(data, weights)
val multiplier = (1.0 / (1.0 + math.exp(margin))) - label
axpy(multiplier, data, cumGradient)
if (label > 0) {
// The following is equivalent to log(1 + exp(margin)) but more numerically stable.
MLUtils.log1pExp(margin)
} else {
MLUtils.log1pExp(margin) - margin
}
case _ =>
/**
* For Multinomial Logistic Regression.
*/
val weightsArray = weights match {
case dv: DenseVector => dv.values
case _ =>
throw new IllegalArgumentException(
s"weights only supports dense vector but got type ${weights.getClass}.")
}
val cumGradientArray = cumGradient match {
case dv: DenseVector => dv.values
case _ =>
throw new IllegalArgumentException(
s"cumGradient only supports dense vector but got type ${cumGradient.getClass}.")
}
// marginY is margins(label - 1) in the formula.
var marginY = 0.0
var maxMargin = Double.NegativeInfinity
var maxMarginIndex = 0
val margins = Array.tabulate(numClasses - 1) { i =>
var margin = 0.0
data.foreachActive { (index, value) =>
if (value != 0.0) margin += value * weightsArray((i * dataSize) + index)
}
if (i == label.toInt - 1) marginY = margin
if (margin > maxMargin) {
maxMargin = margin
maxMarginIndex = i
}
margin
}
/**
* When maxMargin > 0, the original formula will cause overflow as we discuss
* in the previous comment.
* We address this by subtracting maxMargin from all the margins, so it's guaranteed
* that all of the new margins will be smaller than zero to prevent arithmetic overflow.
*/
val sum = {
var temp = 0.0
if (maxMargin > 0) {
for (i <- 0 until numClasses - 1) {
margins(i) -= maxMargin
if (i == maxMarginIndex) {
temp += math.exp(-maxMargin)
} else {
temp += math.exp(margins(i))
}
}
} else {
for (i <- 0 until numClasses - 1) {
temp += math.exp(margins(i))
}
}
temp
}
for (i <- 0 until numClasses - 1) {
val multiplier = math.exp(margins(i)) / (sum + 1.0) - {
if (label != 0.0 && label == i + 1) 1.0 else 0.0
}
data.foreachActive { (index, value) =>
if (value != 0.0) cumGradientArray(i * dataSize + index) += multiplier * value
}
}
val loss = if (label > 0.0) math.log1p(sum) - marginY else math.log1p(sum)
if (maxMargin > 0) {
loss + maxMargin
} else {
loss
}
}
}
}
/**
* :: DeveloperApi ::
* Compute gradient and loss for a Least-squared loss function, as used in linear regression.
* This is correct for the averaged least squares loss function (mean squared error)
* L = 1/2n ||A weights-y||^2
* See also the documentation for the precise formulation.
*/
@DeveloperApi
class LeastSquaresGradient extends Gradient {
override def compute(data: Vector, label: Double, weights: Vector): (Vector, Double) = {
val diff = dot(data, weights) - label
val loss = diff * diff / 2.0
val gradient = data.copy
scal(diff, gradient)
(gradient, loss)
}
override def compute(
data: Vector,
label: Double,
weights: Vector,
cumGradient: Vector): Double = {
val diff = dot(data, weights) - label
axpy(diff, data, cumGradient)
diff * diff / 2.0
}
}
/**
* :: DeveloperApi ::
* Compute gradient and loss for a Hinge loss function, as used in SVM binary classification.
* See also the documentation for the precise formulation.
* NOTE: This assumes that the labels are {0,1}
*/
@DeveloperApi
class HingeGradient extends Gradient {
override def compute(data: Vector, label: Double, weights: Vector): (Vector, Double) = {
val dotProduct = dot(data, weights)
// Our loss function with {0, 1} labels is max(0, 1 - (2y - 1) (f_w(x)))
// Therefore the gradient is -(2y - 1)*x
val labelScaled = 2 * label - 1.0
if (1.0 > labelScaled * dotProduct) {
val gradient = data.copy
scal(-labelScaled, gradient)
(gradient, 1.0 - labelScaled * dotProduct)
} else {
(Vectors.sparse(weights.size, Array.empty, Array.empty), 0.0)
}
}
override def compute(
data: Vector,
label: Double,
weights: Vector,
cumGradient: Vector): Double = {
val dotProduct = dot(data, weights)
// Our loss function with {0, 1} labels is max(0, 1 - (2y - 1) (f_w(x)))
// Therefore the gradient is -(2y - 1)*x
val labelScaled = 2 * label - 1.0
if (1.0 > labelScaled * dotProduct) {
axpy(-labelScaled, data, cumGradient)
1.0 - labelScaled * dotProduct
} else {
0.0
}
}
}
| practice-vishnoi/dev-spark-1 | mllib/src/main/scala/org/apache/spark/mllib/optimization/Gradient.scala | Scala | apache-2.0 | 12,187 |
package vonsim.webapp
import org.scalajs.dom.raw.HTMLElement
import vonsim.simulator.Simulator
import vonsim.simulator.InstructionInfo
import vonsim.simulator.DWord
import vonsim.simulator.Word
import vonsim.assembly.Compiler.CompilationResult
import org.scalajs.dom
import vonsim.simulator.SimulatorStoppedState
import vonsim.simulator.SimulatorExecutionFinished
import vonsim.simulator.SimulatorExecutionStopped
import vonsim.simulator.SimulatorExecutionError
import vonsim.webapp.i18n.UILanguage
import vonsim.webapp.i18n.English
import vonsim.webapp.i18n.UILanguage
abstract class HTMLUI {
def root: HTMLElement
def bindkey(element:HTMLElement,key:String,f:Function0[Boolean]){
Mousetrap.bindGlobal(key, f, "keydown")
// element.onkeydown = (e: dom.KeyboardEvent) => {
// //println("Pressed " + e.keyCode + " " + e.ctrlKey)
// e.key
// // var keyCode = e.keyCode;
// // if ((e.ctrlKey || e.metaKey) && e.keyCode == 83) {
// if (!(e.ctrlKey || e.metaKey || e.altKey || e.shiftKey)) {
// if (e.key == key) {
// e.stopPropagation()
// e.preventDefault()
// f.apply()
// }
// }
//
// }
}
}
class VonSimState(var s:Simulator, var c:CompilationResult,var uil:UILanguage){
def simulatorStopped()={
s.state==SimulatorExecutionFinished || s.state==SimulatorExecutionStopped || s.state.isInstanceOf[SimulatorExecutionError]
}
def canLoadOrQuickRun()={
simulatorStopped() && this.c.isRight
}
def isSimulatorExecuting()={
!simulatorStopped()
}
}
abstract class VonSimUI(val s: VonSimState) extends HTMLUI{
def simulatorEvent() // update changes made to the simulator
def simulatorEvent(i:InstructionInfo) // update UI after execution of instruction
def compilationEvent()
def disable(){
root.classList.add("disabledElement")
}
def enable(){
root.classList.remove("disabledElement")
}
def setDisabled(disabled:Boolean){
if (disabled){
disable
}else{
enable
}
}
def formatIOAddress(a:Int)={
s.uil.formatIOAddress(a)
"%02X".format(a)
}
def formatAddress(a:Int)={
"%04X".format(a)
}
def formatWord(a:Word)={
"%02X".format(a.toUnsignedInt)
}
def formatDWord(a:DWord)={
"%04X".format(a.toUnsignedInt)
}
}
| facundoq/vonsim | src/main/scala/vonsim/webapp/VonSimUI.scala | Scala | agpl-3.0 | 2,354 |
/*
Copyright 2018 Google LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package ml.dmlc.xgboost4j.scala.example.spark
import ml.dmlc.xgboost4j.scala.Booster
import ml.dmlc.xgboost4j.scala.spark.XGBoost
import org.apache.spark.sql.SparkSession
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.functions.col
import scala.util.parsing.json.JSON
/** A distributed XGBoost trainer program running in spark cluster.
* Args:
* train-conf: GCS path of the training config json file for xgboost training.
* num-of-rounds: number of rounds to train.
* num-workers: number of spark worker node used for training.
* analysis-path: GCS path of analysis results directory.
* target-name: column name of the prediction target.
* training-path: GCS path of training libsvm file patterns.
* eval-path: GCS path of eval libsvm file patterns.
* output-path: GCS path to store the trained model.
*/
object XGBoostTrainer {
def column_feature_size(stats: (String, Any), target: String): Double = {
if (stats._1 == target) 0.0
val statsMap = stats._2.asInstanceOf[Map[String, Any]]
if (statsMap.keys.exists(_ == "vocab_size")) statsMap("vocab_size").asInstanceOf[Double]
else if (statsMap.keys.exists(_ == "max")) 1.0
else 0.0
}
def get_feature_size(statsPath: String, target: String): Int = {
val sparkSession = SparkSession.builder().getOrCreate()
val schema_string = sparkSession.sparkContext.wholeTextFiles(
statsPath).map(tuple => tuple._2).collect()(0)
val column_stats = JSON.parseFull(schema_string).get.asInstanceOf[Map[String, Any]](
"column_stats").asInstanceOf[Map[String, Any]]
var sum = 0.0
for (stats <- column_stats) sum = sum + column_feature_size(stats, target)
sum.toInt
}
def read_config(configFile: String): Map[String, Any] = {
val sparkSession = SparkSession.builder().getOrCreate()
val confString = sparkSession.sparkContext.wholeTextFiles(
configFile).map(tuple => tuple._2).collect()(0)
// Avoid parsing "500" to "500.0"
val originNumberParser = JSON.perThreadNumberParser
JSON.perThreadNumberParser = {
in => try in.toInt catch { case _: NumberFormatException => in.toDouble}
}
try JSON.parseFull(confString).get.asInstanceOf[Map[String, Any]] finally {
JSON.perThreadNumberParser = originNumberParser
}
}
def isClassificationTask(schemaFile: String, targetName: String): Boolean = {
val sparkSession = SparkSession.builder().getOrCreate()
val schemaString = sparkSession.sparkContext.wholeTextFiles(
schemaFile).map(tuple => tuple._2).collect()(0)
val schema = JSON.parseFull(schemaString).get.asInstanceOf[List[Map[String, String]]]
val targetList = schema.filter(x => x("name") == targetName)
if (targetList.isEmpty) {
throw new IllegalArgumentException("target cannot be found.")
}
val targetType = targetList(0)("type")
if (targetType == "CATEGORY") true
else if (targetType == "NUMBER") false
else throw new IllegalArgumentException("invalid target type.")
}
def main(args: Array[String]): Unit = {
if (args.length != 8) {
println(
"usage: program train-conf num-of-rounds num-workers analysis-path " +
"target-name training-path eval-path output-path")
sys.exit(1)
}
val sparkSession = SparkSession.builder().getOrCreate()
val trainConf = args(0)
val numRounds = args(1).toInt
val numWorkers = args(2).toInt
val analysisPath = args(3)
val targetName = args(4)
val inputTrainPath = args(5)
val inputTestPath = args(6)
val outputPath = args(7)
// build dataset
val feature_size = get_feature_size(analysisPath + "/stats.json", targetName)
val trainDF = sparkSession.sqlContext.read.format("libsvm").option(
"numFeatures", feature_size.toString).load(inputTrainPath)
val testDF = sparkSession.sqlContext.read.format("libsvm").option(
"numFeatures", feature_size.toString).load(inputTestPath)
// start training
val paramMap = read_config(trainConf)
val xgboostModel = XGBoost.trainWithDataFrame(
trainDF, paramMap, numRounds, nWorkers = numWorkers, useExternalMemory = true)
println("training summary -------\\n")
println(xgboostModel.summary)
// xgboost-spark appends the column containing prediction results
val predictionDF = xgboostModel.transform(testDF)
val classification = isClassificationTask(analysisPath + "/schema.json", targetName)
implicit val sc = SparkContext.getOrCreate()
if (classification) {
val correctCounts = predictionDF.filter(
col("prediction") === col("label")).groupBy(col("label")).count.collect
val totalCounts = predictionDF.groupBy(col("label")).count.collect
val accuracyAll = (predictionDF.filter(col("prediction") === col("label")).count /
predictionDF.count.toDouble)
print("\\naccuracy: " + accuracyAll + "\\n")
} else {
predictionDF.createOrReplaceTempView("prediction")
val rmseDF = sparkSession.sql(
"SELECT SQRT(AVG((prediction - label) * (prediction - label))) FROM prediction")
val rmse = rmseDF.collect()(0).getDouble(0)
print("RMSE: " + rmse + "\\n")
}
xgboostModel.saveModelAsHadoopFile(outputPath)
print("Done")
}
}
| kubeflow/kfp-tekton-backend | components/deprecated/dataproc/train/src/XGBoostTrainer.scala | Scala | apache-2.0 | 5,926 |
package latis.ops
import latis.dm.Function
import latis.dm.Sample
import latis.dm.Scalar
import latis.dm.Tuple
import latis.dm.Variable
import latis.util.StringUtils
import latis.util.iterator.MappingIterator
import latis.util.iterator.PeekIterator
import latis.metadata.Metadata
/**
* Used for domains that represent a range of values but have only one explicit value.
* The domain will be mapped to a Tuple with 'start_name' and 'end_name' Variables.
* The parameter 'knownValues' indicates whether the known values for each bin are the
* start or the end of the bin. The parameter 'fillVal' is used to fill in the
* unknown start/end value at the start/end of the function.
*/
class DomainBinner(knownValues: String, fillVal: String) extends Operation {
var (binStart, binStop): (Variable, Variable) = (null, null)
var pit: PeekIterator[Sample] = null
/**
* Returns a Tuple containing the bounds of this Variable
*/
override def applyToScalar(scalar: Scalar): Option[Tuple] = {
val name = scalar.getName
knownValues match {
case "start" => {
binStart = scalar.updatedMetadata("name" -> s"start_$name")
binStop = pit.peek match {
case null => scalar(StringUtils.parseStringValue(fillVal, scalar)).
asInstanceOf[Scalar].updatedMetadata("name" -> s"stop_$name")
case Sample(d: Scalar, r) => d.updatedMetadata("name" -> s"stop_$name")
}
}
case "end" => {
binStart = binStop.asInstanceOf[Scalar].updatedMetadata("name" -> s"start_$name")
binStop = scalar.updatedMetadata("name" -> s"stop_$name")
}
}
Some(Tuple(List(binStart, binStop), Metadata("bounds")))
}
override def applyToSample(sample: Sample): Option[Sample] = {
applyToVariable(sample.domain) match {
case Some(t:Tuple) => sample.range match{
case Tuple(vars) => Some(Sample(sample.domain, Tuple(vars :+ t)))
case other => Some(Sample(sample.domain, Tuple(other, t)))
}
case _ => None
}
}
override def applyToFunction(function: Function): Option[Variable] = {
val temp = function.getDomain
val name = temp.getName
pit = PeekIterator(function.iterator)
binStop = temp(StringUtils.parseStringValue(fillVal, temp)).
asInstanceOf[Scalar].updatedMetadata("name" -> s"stop_$name")
super.applyToFunction(Function(function, pit))
}
}
object DomainBinner extends OperationFactory {
def apply(knownValue: String, fillVal: String): DomainBinner = new DomainBinner(knownValue, fillVal)
override def apply(args: Seq[String]): DomainBinner = new DomainBinner(args(0), args(1))
} | dlindhol/LaTiS | src/main/scala/latis/ops/DomainBinner.scala | Scala | epl-1.0 | 2,668 |
package ca.uqam.euler.nicolas
object Problem025 {
def fib = new Iterator[BigInt]() {
def hasNext = true
var pos = 1
var oneBack = BigInt(1)
var twoBack = BigInt(1)
def next() = {
val next =
if (pos == 1 || pos == 2)
BigInt(1)
else
oneBack + twoBack
twoBack = oneBack
oneBack = next
pos += 1
next
}
}
def findFirstWithNDigits(n: Int) =
fib.indexWhere(_.toString.length == n) + 1
def main(args: Array[String]) = Answer {
findFirstWithNDigits(1000)
}
} | nicolaspayette/project-euler | src/main/scala/ca/uqam/euler/nicolas/Problem025.scala | Scala | mit | 562 |
package com.twitter.finagle.netty4.channel
import com.twitter.finagle.stats.StatsReceiver
import io.netty.buffer.ByteBuf
import io.netty.channel.ChannelHandler.Sharable
import io.netty.channel.{ChannelHandlerContext, ChannelInboundHandlerAdapter}
/**
* An inbound channel handler that reports receive buffers sizes in a `receive_buffer_bytes`
* histogram on a given [[StatsReceiver]].
*/
@Sharable
private[netty4] class RecvBufferSizeStatsHandler(stats: StatsReceiver)
extends ChannelInboundHandlerAdapter {
private[this] val receiveBufferBytes = stats.stat("receive_buffer_bytes")
override def channelRead(ctx: ChannelHandlerContext, msg: Any): Unit = {
msg match {
case bb: ByteBuf => receiveBufferBytes.add(bb.readableBytes().toFloat)
case _ => // NOOP
}
ctx.fireChannelRead(msg)
}
}
| spockz/finagle | finagle-netty4/src/main/scala/com/twitter/finagle/netty4/channel/RecvBufferSizeStatsHandler.scala | Scala | apache-2.0 | 827 |
/*
* Copyright 2007-2008 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package net.liftweb.mapper
import _root_.org.specs._
import _root_.org.specs.runner.JUnit3
import _root_.org.specs.runner.ConsoleRunner
import _root_.net.liftweb.util._
import Helpers._
import _root_.java.sql.{Connection, DriverManager}
//import _root_.net.liftweb.mapper.DBVendors.{MySqlRunner, DerbyRunner}
class MapperSpecsAsTest extends JUnit3(MapperSpecs)
object MapperSpecsRunner extends ConsoleRunner(MapperSpecs)
object MapperSpecs extends Specification {
def providers = DBProviders.asList
providers.foreach(provider => {
("Mapper for " + provider.name) should {
"schemify" in {
try { provider.setupDB } catch { case e => skip(e.getMessage) }
Schemifier.destroyTables_!!(ignoreLogger _, SampleModel, SampleTag)
Schemifier.schemify(true, ignoreLogger _, SampleModel, SampleTag)
val elwood = SampleModel.find(By(SampleModel.firstName, "Elwood")).open_!
val madeline = SampleModel.find(By(SampleModel.firstName, "Madeline")).open_!
val archer = SampleModel.find(By(SampleModel.firstName, "Archer")).open_!
elwood.firstName.is must_== "Elwood"
madeline.firstName.is must_== "Madeline"
archer.firstName.is must_== "Archer"
val meow = SampleTag.find(By(SampleTag.tag, "Meow")).open_!
meow.tag.is must_== "Meow"
elwood.id.is must be_<(madeline.id.is)
}
"Like works" in {
try { provider.setupDB } catch { case e => skip(e.getMessage) }
Schemifier.destroyTables_!!(ignoreLogger _, SampleModel, SampleTag)
Schemifier.schemify(true, ignoreLogger _, SampleModel, SampleTag)
val oo = SampleTag.findAll(Like(SampleTag.tag, "%oo%"))
(oo.length > 0) must beTrue
for (t <- oo)
(t.tag.is.indexOf("oo") >= 0) must beTrue
for (t <- oo)
t.model.cached_? must beFalse
val mm = SampleTag.findAll(Like(SampleTag.tag, "M%"))
(mm.length > 0) must beTrue
for (t <- mm)
(t.tag.is.startsWith("M")) must beTrue
for (t <- mm) {
t.model.cached_? must beFalse
t.model.obj
t.model.cached_? must beTrue
}
}
"Precache works" in {
try { provider.setupDB } catch { case e => skip(e.getMessage) }
Schemifier.destroyTables_!!(ignoreLogger _, SampleModel, SampleTag)
Schemifier.schemify(true, ignoreLogger _, SampleModel, SampleTag)
val oo = SampleTag.findAll(By(SampleTag.tag, "Meow"),
PreCache(SampleTag.model))
(oo.length > 0) must beTrue
for (t <- oo)
t.model.cached_? must beTrue
}
}
})
private def ignoreLogger(f: => AnyRef): Unit = ()
}
object SampleTag extends SampleTag with LongKeyedMetaMapper[SampleTag] {
override def dbAddTable = Full(populate _)
private def populate {
val samp = SampleModel.findAll()
val tags = List("Hello", "Moose", "Frog", "WooHoo", "Sloth",
"Meow", "Moof")
for (t <- tags;
m <- samp) SampleTag.create.tag(t).model(m).save
}
}
class SampleTag extends LongKeyedMapper[SampleTag] with IdPK {
def getSingleton = SampleTag // what's the "meta" server
object tag extends MappedString(this, 32)
object model extends MappedLongForeignKey(this, SampleModel)
}
object SampleModel extends SampleModel with KeyedMetaMapper[Long, SampleModel] {
override def dbAddTable = Full(populate _)
private def populate {
create.firstName("Elwood").save
create.firstName("Madeline").save
create.firstName("Archer").save
}
}
class SampleModel extends KeyedMapper[Long, SampleModel] {
def getSingleton = SampleModel // what's the "meta" server
def primaryKeyField = id
object id extends MappedLongIndex(this)
object firstName extends MappedString(this, 32)
}
| andreum/liftweb | lift-mapper/src/test/scala/net/liftweb/mapper/MapperSpecs.scala | Scala | apache-2.0 | 4,267 |
/**
* Created by Variant on 16/3/16.
*/
class Human{
println("Human")
}
trait TTeacher extends Human{
println("TTeacher")
def teach
def eat:Unit = println("I like eat apple teacher")
}
trait PianoPlayer extends Human{
println("PianoPlayer")
def playPiano=println("I am a PianoPlayer,Playing Piano")
def eat:Unit= {
println("I like eat blanna piano")
}
}
//混入一个整体 Human with TTeacher with PianoPlayer
//从左到右,如果Human被构造了,那么即使TTeacher继承了Human,Human也不会被重新构造
class PianoTeacher extends Human with TTeacher with PianoPlayer{
override def teach ={
println("I'm training students.")
}
//继承的Trait出现了相同的方法要复写方法
override def eat ={
println("I LIKE TOMATO")
}
}
object TraitImpro extends App{
// val t1 = new PianoTeacher
// t1.playPiano
// t1.teach
// t1.eat
//多继承trait,方法从右到左调用
val work = new Work with TbeforeAfter with TAfter
work.doAction
}
//AOP
trait Action{
def doAction
}
trait TbeforeAfter extends Action{
abstract override def doAction{
println("Initialization")
super.doAction
println("Destroyed")
}
}
trait TAfter extends Action{
abstract override def doAction{
println("Initialization2")
//如果此处没有super调用,则不会执行TbeforeAfter中的doAction方法
super.doAction
println("Destroyed2")
}
}
class Work extends Action{
override def doAction{
println("do some work")
}
}
| sparkLiwei/ProgrammingNote | scalaLearning/scalaOOP/TraitImpro.scala | Scala | cc0-1.0 | 1,546 |
/*
* Copyright 2012-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package laika.parse.code.languages
import cats.data.NonEmptyList
import laika.ast.{CodeSpan, ~}
import laika.bundle.SyntaxHighlighter
import laika.parse.code.common.StringLiteral
import laika.parse.code.{CodeCategory, CodeSpanParser}
import laika.parse.text.PrefixedParser
import laika.parse.builders._
import laika.parse.implicits._
import laika.parse.code.implicits._
/**
* @author Jens Halm
*/
object MarkdownSyntax extends SyntaxHighlighter {
def span (category: CodeCategory, delim: String): CodeSpanParser =
StringLiteral
.multiLine(delimiter(delim).nextNot(' '), delimiter(delim).prevNot(' '))
.withCategory(category)
def singleLine (category: CodeCategory, start: String, end: Char): CodeSpanParser =
StringLiteral
.singleLine(start, end.toString)
.withCategory(category)
private def linkParser (prefix: String): PrefixedParser[Seq[CodeSpan]] = {
val url = ("(" ~> delimitedBy(')').nonEmpty.failOn('\\n')).source.asCode(CodeCategory.Markup.LinkTarget)
val ref = ("[" ~> delimitedBy(']').failOn('\\n')).source.asCode(CodeCategory.Markup.LinkTarget)
val link = (literal(prefix) ~ delimitedBy(']').failOn('\\n')).source
(link ~ opt(url | ref)).map {
case linkText ~ Some(target) if target.content == "[]" => Seq(CodeSpan(linkText + "[]", CodeCategory.Markup.LinkTarget))
case linkText ~ Some(target) => Seq(CodeSpan(linkText, CodeCategory.Markup.LinkText), target)
case linkText ~ None => Seq(CodeSpan(linkText, CodeCategory.Markup.LinkTarget))
}
}
val link: CodeSpanParser = CodeSpanParser(linkParser("["))
val image: CodeSpanParser = CodeSpanParser(linkParser("!["))
val linkTarget: CodeSpanParser = CodeSpanParser.onLineStart {
("[" ~> delimitedBy("]:").failOn('\\n') ~ restOfLine).map {
case ref ~ target => Seq(
CodeSpan(s"[$ref]:", CodeCategory.Identifier),
CodeSpan(target, CodeCategory.Markup.LinkTarget),
CodeSpan("\\n"),
)
}
}
val atxHeader: CodeSpanParser = CodeSpanParser.onLineStart(CodeCategory.Markup.Headline) {
(someOf('#').max(6) ~ anyNot('\\n')).source
}
val setexHeader: CodeSpanParser = CodeSpanParser.onLineStart(CodeCategory.Markup.Headline) {
val deco = (someOf('=') | someOf('-')) <~ lookAhead(wsEol)
(restOfLine ~ deco).source
}
val codeFence: CodeSpanParser = CodeSpanParser.onLineStart(CodeCategory.Markup.Fence) {
(anyOf('`').take(3) ~ anyNot('\\n')).source
}
val rules: CodeSpanParser = CodeSpanParser.onLineStart(CodeCategory.Markup.Fence) {
Seq('*', '-', '_').map { decoChar =>
(oneOf(decoChar) ~ (anyOf(' ') ~ oneOf(decoChar)).rep.min(2) ~ ws ~ "\\n").source
}.reduceLeft(_ | _)
}
val quoteChars: CodeSpanParser = CodeSpanParser.onLineStart(CodeCategory.Markup.Quote) {
someOf('>').source
}
val mdSpans: CodeSpanParser =
span(CodeCategory.Markup.Emphasized, "**") ++
span(CodeCategory.Markup.Emphasized, "*") ++
span(CodeCategory.Markup.Emphasized, "__") ++
span(CodeCategory.Markup.Emphasized, "_") ++
span(CodeCategory.StringLiteral, "``") ++
span(CodeCategory.StringLiteral, "`")
val language: NonEmptyList[String] = NonEmptyList.of("markdown", "md")
val spanParsers: Seq[CodeSpanParser] = Seq(mdSpans,
singleLine(CodeCategory.Markup.LinkTarget, "<", '>'),
image,
link,
StringLiteral.Escape.char,
linkTarget,
codeFence,
atxHeader,
setexHeader,
rules,
quoteChars)
}
| planet42/Laika | core/shared/src/main/scala/laika/parse/code/languages/MarkdownSyntax.scala | Scala | apache-2.0 | 4,119 |
package wzk.akkalogger.util
import java.net.InetAddress
import akka.actor.ActorSystem
import akka.io.Inet
import com.typesafe.config.ConfigFactory
object RemoteRelatedUtil {
def remotingConfig(host:String, port:Int) = ConfigFactory.parseString(
s"""
|akka {
| actor.provider = "akka.remote.RemoteActorRefProvider"
| remote {
| enable-transports = ["akka.remote.netty.tcp"]
| netty.tcp {
| hostname = $host
| port = $port
| }
| }
| loglevel = "ERROR"
|}
""".stripMargin)
def remotingSystem(name: String, host: String, port: Int): ActorSystem =
ActorSystem(name, remotingConfig(host, port))
def createLocalSystem(name:String):ActorSystem =
ActorSystem(name, remotingConfig(getLocalNodeHostname, 0)) // we only need random port for local system
def getLocalNodeHostname:String = InetAddress.getLocalHost.getHostName
} | wangzk/my-util-codebase | scala/AkkaBasedRemoteLogger/src/main/scala/wzk/akkalogger/util/AkkaRemoteUtil.scala | Scala | apache-2.0 | 947 |
/*
* Copyright 2016 rdbc contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.rdbc.jadapter.internal
import java.time.Duration
import java.util.concurrent.CompletionStage
import io.rdbc.jadapter.internal.Conversions._
import io.rdbc.japi._
import io.rdbc.japi.util.ThrowingSupplier
import io.rdbc.sapi
import io.rdbc.util.Preconditions.checkNotNull
import scala.compat.java8.FutureConverters._
import scala.concurrent.ExecutionContext
private[jadapter] class ConnectionAdapter(val underlying: sapi.Connection)
(implicit ec: ExecutionContext,
exConversion: ExceptionConversion)
extends Connection {
import exConversion._
def beginTx(timeout: Duration): CompletionStage[Void] = {
checkNotNull(timeout)
convertExceptionsFut {
underlying.beginTx()(timeout.asScala).map[Void](_ => null).toJava
}
}
def beginTx(): CompletionStage[Void] = {
beginTx(InfiniteTimeout)
}
def commitTx(timeout: Duration): CompletionStage[Void] = {
checkNotNull(timeout)
convertExceptionsFut {
underlying.commitTx()(timeout.asScala).map[Void](_ => null).toJava
}
}
def commitTx(): CompletionStage[Void] = {
commitTx(InfiniteTimeout)
}
def rollbackTx(timeout: Duration): CompletionStage[Void] = {
checkNotNull(timeout)
convertExceptionsFut {
underlying.rollbackTx()(timeout.asScala).map[Void](_ => null).toJava
}
}
def rollbackTx(): CompletionStage[Void] = {
rollbackTx(InfiniteTimeout)
}
def withTransaction[T](body: ThrowingSupplier[CompletionStage[T]]): CompletionStage[T] = {
checkNotNull(body)
withTransaction(InfiniteTimeout, body)
}
def withTransaction[T](timeout: Duration,
body: ThrowingSupplier[CompletionStage[T]]): CompletionStage[T] = {
checkNotNull(timeout)
checkNotNull(body)
convertExceptionsFut {
underlying.withTransaction {
body.supply().toScala
}(timeout.asScala).toJava
}
}
def release(): CompletionStage[Void] = convertExceptionsFut {
underlying.release().map[Void](_ => null).toJava
}
def forceRelease(): CompletionStage[Void] = convertExceptionsFut {
underlying.forceRelease().map[Void](_ => null).toJava
}
def validate(timeout: Duration): CompletionStage[Void] = {
checkNotNull(timeout)
convertExceptionsFut {
underlying.validate()(timeout.asScala)
.map(_ => null: Void).toJava
}
}
def statement(sql: String): Statement = {
checkNotNull(sql)
convertExceptions {
underlying.statement(sql).asJava
}
}
def statement(sql: String, options: StatementOptions): Statement = {
checkNotNull(sql)
checkNotNull(options)
convertExceptions {
underlying.statement(sql, options.asScala).asJava
}
}
def watchForIdle: CompletionStage[Connection] = convertExceptionsFut {
underlying.watchForIdle.map[Connection](_ => this).toJava
}
override def toString: String = underlying.toString
}
| rdbc-io/rdbc | rdbc-java-adapter/src/main/scala/io/rdbc/jadapter/internal/ConnectionAdapter.scala | Scala | apache-2.0 | 3,576 |
package sbt
trait PlayKeys {
val jdbc = "play" %% "play-jdbc" % play.core.PlayVersion.current
val anorm = "play" %% "anorm" % play.core.PlayVersion.current
val javaCore = "play" %% "play-java" % play.core.PlayVersion.current
val javaJdbc = "play" %% "play-java-jdbc" % play.core.PlayVersion.current
val javaEbean = "play" %% "play-java-ebean" % play.core.PlayVersion.current
val javaJpa = "play" %% "play-java-jpa" % play.core.PlayVersion.current
def component(id: String) = "play" %% id % play.core.PlayVersion.current
val filters = "play" %% "filters-helpers" % play.core.PlayVersion.current
val playVersion = SettingKey[String]("play-version")
val playDefaultPort = SettingKey[Int]("play-default-port")
val requireJs = SettingKey[Seq[String]]("play-require-js")
val requireJsFolder = SettingKey[String]("play-require-js-folder")
val requireJsShim = SettingKey[String]("play-require-js-shim")
val requireNativePath = SettingKey[Option[String]]("play-require-native-path")
val playOnStarted = SettingKey[Seq[(java.net.InetSocketAddress) => Unit]]("play-onStarted")
val playOnStopped = SettingKey[Seq[() => Unit]]("play-onStopped")
val distDirectory = SettingKey[File]("play-dist")
val distExcludes = SettingKey[Seq[String]]("dist-excludes")
val playAssetsDirectories = SettingKey[Seq[File]]("play-assets-directories")
val playExternalAssets = SettingKey[Seq[(File, File => PathFinder, String)]]("play-external-assets")
val confDirectory = SettingKey[File]("play-conf")
val templatesImport = SettingKey[Seq[String]]("play-templates-imports")
val routesImport = SettingKey[Seq[String]]("play-routes-imports")
val ebeanEnabled = SettingKey[Boolean]("play-ebean-enabled")
val templatesTypes = SettingKey[PartialFunction[String, (String, String)]]("play-templates-formats")
val closureCompilerOptions = SettingKey[Seq[String]]("play-closure-compiler-options")
val lessOptions = SettingKey[Seq[String]]("play-less-options")
val coffeescriptOptions = SettingKey[Seq[String]]("play-coffeescript-options")
val lessEntryPoints = SettingKey[PathFinder]("play-less-entry-points")
val coffeescriptEntryPoints = SettingKey[PathFinder]("play-coffeescript-entry-points")
val javascriptEntryPoints = SettingKey[PathFinder]("play-javascript-entry-points")
val playPlugin = SettingKey[Boolean]("play-plugin")
val devSettings = SettingKey[Seq[(String,String)]]("play-dev-settings")
}
object PlayKeys extends PlayKeys | noel-yap/setter-for-catan | play-2.1.1/framework/src/sbt-plugin/src/main/scala/PlayKeys.scala | Scala | apache-2.0 | 2,511 |
import org.scalatest._
import loaders.CifarLoader
import libs.CaffeLibrary
import libs.CaffeNet
import libs.ProtoLoader
import libs.Net
import com.sun.jna.Pointer
import com.sun.jna.Memory
// for this test to work, $SPARKNET_HOME/caffe should be the caffe root directory
// and you need to run $SPARKNET_HOME/caffe/data/cifar10/get_cifar10.sh
class CifarFeaturizationSpec extends FlatSpec {
"CifarNet" should "get chance digits right on randomly initialized net" in {
val sparkNetHome = sys.env("SPARKNET_HOME")
val loader = new CifarLoader(sparkNetHome + "/caffe/data/cifar10/")
System.load(sparkNetHome + "/build/libccaffe.so")
val caffeLib = CaffeLibrary.INSTANCE
caffeLib.set_basepath(sparkNetHome + "/caffe/")
// val net = caffeLib.make_solver_from_prototxt(sparkNetHome + "/caffe/examples/cifar10/cifar10_full_java_solver.prototxt")
val state = caffeLib.create_state()
val solver = ProtoLoader.loadSolverPrototxt(sparkNetHome + "/caffe/examples/cifar10/cifar10_full_java_solver.prototxt")
val byteArr = solver.toByteArray()
val ptr = new Memory(byteArr.length);
ptr.write(0, byteArr, 0, byteArr.length)
caffeLib.load_solver_from_protobuf(state, ptr, byteArr.length)
val dtypeSize = caffeLib.get_dtype_size()
val intSize = caffeLib.get_int_size()
def makeImageCallback(images: Array[Array[Byte]]) : CaffeLibrary.java_callback_t = {
return new CaffeLibrary.java_callback_t() {
var currImage = 0
def invoke(data: Pointer, batch_size: Int, num_dims: Int, shape: Pointer) {
var size = 1
for(i <- 0 to num_dims - 1) {
val dim = shape.getInt(i * intSize)
size *= dim
}
for(j <- 0 to batch_size - 1) {
assert(size == images(currImage).length)
for(i <- 0 to size - 1) {
data.setFloat((j * size + i) * dtypeSize, 1F * (images(currImage)(i) & 0xFF))
}
currImage += 1
if(currImage == images.length) {
currImage = 0
}
}
}
};
}
def makeLabelCallback(labels: Array[Int]) : CaffeLibrary.java_callback_t = {
return new CaffeLibrary.java_callback_t() {
var currImage = 0
def invoke(data: Pointer, batch_size: Int, num_dims: Int, shape: Pointer) {
for(j <- 0 to batch_size - 1) {
assert(shape.getInt(0) == 1)
data.setFloat(j * dtypeSize, 1F * labels(currImage))
currImage += 1
if(currImage == labels.length) {
currImage = 0
}
}
}
};
}
val loadTrainImageFn = makeImageCallback(loader.trainImages)
val loadTrainLabelFn = makeLabelCallback(loader.trainLabels)
caffeLib.set_train_data_callback(state, 0, loadTrainImageFn)
caffeLib.set_train_data_callback(state, 1, loadTrainLabelFn)
val loadTestImageFn = makeImageCallback(loader.testImages)
val loadTestLabelFn = makeLabelCallback(loader.testLabels)
caffeLib.set_test_data_callback(state, 0, loadTestImageFn)
caffeLib.set_test_data_callback(state, 1, loadTestLabelFn)
val net = new CaffeNet(state, caffeLib)
net.forward()
val data = net.getData
val sortedKeys = data.keys.toArray.sorted
assert(sortedKeys(0) == "conv1")
assert(sortedKeys(1) == "conv2")
assert(sortedKeys(2) == "conv3")
assert(sortedKeys(3) == "data")
assert(sortedKeys(4) == "ip1")
assert(sortedKeys(5) == "label")
assert(sortedKeys(6) == "loss")
assert(sortedKeys(7) == "norm1")
assert(sortedKeys(8) == "norm2")
val shape = data("conv1").shape.deep
assert(shape(0) == 100)
assert(shape(1) == 32)
assert(shape(2) == 32)
assert(shape(3) == 32)
}
}
| rahulbhalerao001/SparkNet | src/test/scala/libs/CifarFeaturizationSpec.scala | Scala | mit | 3,784 |
package pureconfig
import com.typesafe.config.{ Config, ConfigFactory, ConfigValueType }
import org.scalatest._
import java.io.PrintWriter
import java.nio.file.Files
import scala.collection.JavaConversions._
import pureconfig.conf.typesafeConfigToConfig
import PureconfSuite.withTempFile
class RawConfigSuite extends FlatSpec with Matchers {
"typesafeConfigToConfig" should "be able to convert any Typesafe Config instance into a RawConfig" in {
withTempFile { configFile =>
val writer = new PrintWriter(Files.newOutputStream(configFile))
writer.println("object = { a: 43 }")
writer.println("string = 'foobar'")
writer.println("list = [1, 2, 3]")
writer.println("number = 1")
writer.println("boolean = true")
writer.close()
val config = ConfigFactory.parseFile(configFile.toFile)
config.getValue("object").valueType shouldEqual ConfigValueType.OBJECT
config.getValue("number").valueType shouldEqual ConfigValueType.NUMBER
config.getValue("string").valueType shouldEqual ConfigValueType.STRING
config.getValue("boolean").valueType shouldEqual ConfigValueType.BOOLEAN
config.getValue("list").valueType shouldEqual ConfigValueType.LIST
typesafeConfigToConfig(config) shouldEqual Map(
"number" -> "1",
"list" -> "1, 2, 3",
"string" -> "'foobar'",
"boolean" -> "true",
"object.a" -> "43"
)
}
}
}
| 13h3r/pureconfig | src/test/scala/pureconfig/RawConfigSuite.scala | Scala | mpl-2.0 | 1,443 |
package org.jetbrains.plugins.scala.lang.typeConformance
package generated
class TypeConformanceBasicTest extends TypeConformanceTestBase {
//This class was generated by build script, please don't change this
override def folderPath: String = super.folderPath + "basic/"
def testAbstractType() {doTest()}
def testAliasBounds() {doTest()}
def testAliasBounds2() {doTest()}
def testAliasBounds3() {doTest()}
def testAnyRef() {doTest()}
def testAnyVal1() {doTest()}
def testAnyVal2() {doTest()}
def testAnyVal3() {doTest()}
def testAnyVal4() {doTest()}
def testAnyVal5() {doTest()}
def testBasicConformance() {doTest()}
def testBasicNonConformance() {doTest()}
def testDependentEnumBaseTypes() {doTest()}
def testFloat() {doTest()}
def testFunctionFalseConformance() {doTest()}
def testFunctionPlaceholderMatch() {doTest()}
def testFunctionRightConformance() {doTest()}
def testHashSetIsSet() {doTest()}
def testImplicitInixParamClause() {doTest()}
def testIsDefinedAt() {doTest()}
def testNullConformance() {doTest()}
def testStringLiteralToObject() {doTest()}
def testTupleConformance() {doTest()}
def testTupleRightConformance() {doTest()}
def testTupleToConformance() {doTest()}
def testTupleToProductNoConformance() {doTest()}
def testSCL9506(): Unit = {
doTest(
s"""object TicketTester {
| trait A {
| private type This = A
|
| def something: This = this
| }
|
| class B extends A {
| private type This = B
|
| ${caretMarker}val aThis: This = super.something
| }
|}
|//false
""".stripMargin)
}
} | jastice/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/lang/typeConformance/generated/TypeConformanceBasicTest.scala | Scala | apache-2.0 | 1,735 |
import org.specs2._
import java.util.UUID
import pgentity.pg_entity._
import anorm._
import anorm.SqlParser._
import anorm.SqlStatementParser.parse
import scala.util.{Success,Failure}
object Values {
case class DummyTable(
id: UUID,
name: String,
number: Int)
implicit val DummyTablePgEntity = new PgEntity[DummyTable] {
val tableName = "dummy_table"
val columns = List(PgField("dummy_table_id", Some("UUID")), PgField("name"), PgField("number"))
def parser(prefix: String) = {
get[UUID](prefix + "dummy_table_id") ~
str(prefix + "name") ~
int(prefix + "number") map { case (id ~ name ~ number) => DummyTable(id, name, number) }
}
}
}
class PgEntitySpec extends mutable.Specification with ScalaCheck {
"columnList" should {
"automatically prefix fields with the table name" in {
val columns = columnList[Values.DummyTable](None).split(",").toList.map(_.trim)
columns.forall({ c =>
c must startWith(""""dummy_table".""")
})
}
"prefix fields with the given prefix" in {
val prefix = "renamed_column_"
val columns = columnList[Values.DummyTable](Some(prefix)).split(",").toList.map(_.trim)
columns.forall({ c =>
c must startWith("renamed_column_")
})
}
"include all fields in insert statement" in {
val statement = insertSQL[Values.DummyTable]
parse(statement) match {
case Success(stmt) => stmt.names must containTheSameElementsAs(Values.DummyTablePgEntity.columns.map(_.name))
case Failure(e) => throw e
}
}
"include all fields in update statement" in {
val statement = updateSQL[Values.DummyTable]()
parse(statement) match {
case Success(stmt) => stmt.names must containTheSameElementsAs(Values.DummyTablePgEntity.columns.map(_.name))
case Failure(e) => throw e
}
}
}
}
| divarvel/anorm-pg-entity | src/test/scala/pgentity/PgEntities.scala | Scala | mit | 1,898 |
package pl.touk.nussknacker.engine.util.metrics
import pl.touk.nussknacker.engine.api.Lifecycle
import pl.touk.nussknacker.engine.api.runtimecontext.EngineRuntimeContext
trait WithMetrics extends Lifecycle {
@transient protected var metricsProvider: MetricsProviderForScenario = _
override def open(context: EngineRuntimeContext): Unit = {
super.open(context)
this.metricsProvider = context.metricsProvider
}
}
| TouK/nussknacker | utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/metrics/WithMetrics.scala | Scala | apache-2.0 | 430 |
/**
* Copyright (C) 2016 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.util
import org.orbeon.oxf.util.CoreUtils._
import scala.collection.generic.CanBuildFrom
import scala.collection.{AbstractIterator, TraversableLike, mutable}
import scala.language.{implicitConversions, reflectiveCalls}
import scala.reflect.ClassTag
object CollectionUtils {
// Combine the second values of each tuple that have the same name
// The caller can specify the type of the resulting values, e.g.:
// - combineValues[String, AnyRef, Array]
// - combineValues[String, String, List]
def combineValues[Key, U, T[_]](parameters: Seq[(Key, U)])(implicit cbf: CanBuildFrom[Nothing, U, T[U]]): Seq[(Key, T[U])] = {
val result = mutable.LinkedHashMap[Key, mutable.Builder[U, T[U]]]()
for ((name, value) ← parameters)
result.getOrElseUpdate(name, cbf()) += value
result map { case (k, v) ⇒ k → v.result } toList
}
// Extensions on Iterator[T]
implicit class IteratorWrapper[T](val i: Iterator[T]) extends AnyVal {
def nextOption(): Option[T] = i.hasNext option i.next()
def lastOption(): Option[T] = {
var n = nextOption()
while (n.isDefined) {
val nextN = nextOption()
if (nextN.isEmpty)
return n
n = nextN
}
None
}
}
// Extensions on Iterator object
object IteratorExt {
def iterateFrom[T](start: T, gen: T ⇒ Option[T]): Iterator[T] = {
var next: Option[T] = Some(start)
iterateWhileDefined {
val result = next
next = next.flatMap(gen)
result
}
}
def iterateWhile[T](cond: ⇒ Boolean, elem: ⇒ T): Iterator[T] =
iterateWhileDefined(cond option elem)
def iterateWhileDefined[T](elemOpt: ⇒ Option[T]): Iterator[T] =
Iterator.continually(elemOpt).takeWhile(_.isDefined).flatten
def iterateOpt[T <: AnyRef](start: T)(f: T ⇒ Option[T]): Iterator[T] = new AbstractIterator[T] {
private[this] var _next: Option[T] = Some(start)
def hasNext: Boolean = _next.isDefined
def next(): T =
_next match {
case Some(result) ⇒
// Advance on `next()` for simplicity
_next = _next flatMap f
result
case None ⇒
throw new NoSuchElementException("next on empty iterator")
}
}
}
implicit def fromIteratorExt(i: Iterator.type): IteratorExt.type = IteratorExt
// WARNING: Remember that type erasure takes place! collectByErasedType[T[U1]] will work even if the underlying type was T[U2]!
// NOTE: `case t: T` works with `ClassTag` only since Scala 2.10.
def collectByErasedType[T: ClassTag](value: Any): Option[T] = Option(value) collect { case t: T ⇒ t }
implicit class TraversableLikeOps[A, Repr](val t: TraversableLike[A, Repr]) extends AnyVal {
def groupByKeepOrder[K](f: A ⇒ K)(implicit cbf: CanBuildFrom[Nothing, A, Repr]): List[(K, Repr)] = {
val m = mutable.LinkedHashMap.empty[K, mutable.Builder[A, Repr]]
for (elem ← t) {
val key = f(elem)
val bldr = m.getOrElseUpdate(key, cbf())
bldr += elem
}
val b = List.newBuilder[(K, Repr)]
for ((k, v) ← m)
b += ((k, v.result()))
b.result()
}
def keepDistinctBy[K, U](key: A ⇒ K): List[A] = {
val result = mutable.ListBuffer[A]()
val seen = mutable.Set[K]()
for (x ← t) {
val k = key(x)
if (! seen(k)) {
result += x
seen += k
}
}
result.to[List]
}
// Return duplicate values in the order in which they appear
// A duplicate value is returned only once
def findDuplicates: List[A] = {
val result = mutable.LinkedHashSet[A]()
val seen = mutable.HashSet[A]()
for (x ← t) {
if (seen(x))
result += x
else
seen += x
}
result.to[List]
}
}
implicit class anyToCollectable[A](val a: A) extends AnyVal {
def collect[B](pf: PartialFunction[A, B]): Option[B] =
pf.isDefinedAt(a) option pf(a)
}
implicit class IntArrayOps(val a: Array[Int]) extends AnyVal {
def codePointsToString = new String(a, 0, a.length)
}
implicit class IntIteratorOps(val i: Iterator[Int]) extends AnyVal {
def codePointsToString = {
val a = i.to[Array]
new String(a, 0, a.length)
}
}
sealed trait InsertPosition
case object InsertBefore extends InsertPosition
case object InsertAfter extends InsertPosition
implicit class VectorOps[T](val values: Vector[T]) extends AnyVal {
def insertAt(index: Int, value: T, position: InsertPosition): Vector[T] =
position match {
case InsertBefore ⇒ (values.take(index) :+ value) ++ values.drop(index)
case InsertAfter ⇒ (values.take(index + 1) :+ value) ++ values.drop(index + 1)
}
def insertAt(index: Int, newValues: Traversable[T], position: InsertPosition): Vector[T] =
position match {
case InsertBefore ⇒ values.take(index) ++ newValues ++ values.drop(index)
case InsertAfter ⇒ values.take(index + 1) ++ newValues ++ values.drop(index + 1)
}
def removeAt(index: Int): Vector[T] =
values.take(index) ++ values.drop(index + 1)
}
} | brunobuzzi/orbeon-forms | common/shared/src/main/scala/org/orbeon/oxf/util/CollectionUtils.scala | Scala | lgpl-2.1 | 5,888 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core
import java.io.File
import scala.io.Source
import org.apache.openwhisk.common.{Config, Logging}
/**
* A set of properties which might be needed to run a whisk microservice implemented
* in scala.
*
* @param requiredProperties a Map whose keys define properties that must be bound to
* a value, and whose values are default values. A null value in the Map means there is
* no default value specified, so it must appear in the properties file.
* @param optionalProperties a set of optional properties (which may not be defined).
* @param propertiesFile a File object, the whisk.properties file, which if given contains the property values.
* @param env an optional environment to initialize from.
*/
class WhiskConfig(requiredProperties: Map[String, String],
optionalProperties: Set[String] = Set.empty,
propertiesFile: File = null,
env: Map[String, String] = sys.env)(implicit logging: Logging)
extends Config(requiredProperties, optionalProperties)(env) {
/**
* Loads the properties as specified above.
*
* @return a pair which is the Map defining the properties, and a boolean indicating whether validation succeeded.
*/
override protected def getProperties() = {
val properties = super.getProperties()
WhiskConfig.readPropertiesFromFile(properties, Option(propertiesFile) getOrElse (WhiskConfig.whiskPropertiesFile))
properties
}
val servicePort = this(WhiskConfig.servicePort)
val dockerEndpoint = this(WhiskConfig.dockerEndpoint)
val dockerPort = this(WhiskConfig.dockerPort)
val wskApiHost = this(WhiskConfig.wskApiProtocol) + "://" + this(WhiskConfig.wskApiHostname) + ":" + this(
WhiskConfig.wskApiPort)
val controllerBlackboxFraction = this.getAsDouble(WhiskConfig.controllerBlackboxFraction, 0.10)
val controllerInstances = this(WhiskConfig.controllerInstances)
val edgeHost = this(WhiskConfig.edgeHostName) + ":" + this(WhiskConfig.edgeHostApiPort)
val kafkaHosts = this(WhiskConfig.kafkaHostList)
val edgeHostName = this(WhiskConfig.edgeHostName)
val invokerHosts = this(WhiskConfig.invokerHostsList)
val zookeeperHosts = this(WhiskConfig.zookeeperHostList)
val dbPrefix = this(WhiskConfig.dbPrefix)
val mainDockerEndpoint = this(WhiskConfig.mainDockerEndpoint)
val runtimesManifest = this(WhiskConfig.runtimesManifest)
val actionInvokePerMinuteLimit = this(WhiskConfig.actionInvokePerMinuteLimit)
val actionInvokeConcurrentLimit = this(WhiskConfig.actionInvokeConcurrentLimit)
val triggerFirePerMinuteLimit = this(WhiskConfig.triggerFirePerMinuteLimit)
val actionSequenceLimit = this(WhiskConfig.actionSequenceMaxLimit)
val controllerSeedNodes = this(WhiskConfig.controllerSeedNodes)
}
object WhiskConfig {
/**
* Reads a key from system environment as if it was part of WhiskConfig.
*/
def readFromEnv(key: String): Option[String] = sys.env.get(asEnvVar(key))
private def whiskPropertiesFile: File = {
def propfile(dir: String, recurse: Boolean = false): File =
if (dir != null) {
val base = new File(dir)
val file = new File(base, "whisk.properties")
if (file.exists())
file
else if (recurse)
propfile(base.getParent, true)
else null
} else null
val dir = sys.props.get("user.dir")
if (dir.isDefined) {
propfile(dir.get, true)
} else {
null
}
}
/**
* Reads a Map of key-value pairs from the environment (sys.env) -- store them in the
* mutable properties object.
*/
def readPropertiesFromFile(properties: scala.collection.mutable.Map[String, String], file: File)(
implicit logging: Logging) = {
if (file != null && file.exists) {
logging.info(this, s"reading properties from file $file")
val source = Source.fromFile(file)
try {
for (line <- source.getLines if line.trim != "") {
val parts = line.split('=')
if (parts.length >= 1) {
val p = parts(0).trim
val v = if (parts.length == 2) parts(1).trim else ""
if (properties.contains(p)) {
properties += p -> v
logging.debug(this, s"properties file set value for $p")
}
} else {
logging.warn(this, s"ignoring properties $line")
}
}
} finally {
source.close()
}
}
}
def asEnvVar(key: String): String = {
if (key != null)
key.replace('.', '_').toUpperCase
else null
}
val servicePort = "port"
val dockerPort = "docker.port"
val dockerEndpoint = "main.docker.endpoint"
val dbPrefix = "db.prefix"
// these are not private because they are needed
// in the invoker (they are part of the environment
// passed to the user container)
val edgeHostName = "edge.host"
val wskApiProtocol = "whisk.api.host.proto"
val wskApiPort = "whisk.api.host.port"
val wskApiHostname = "whisk.api.host.name"
val wskApiHost = Map(wskApiProtocol -> "https", wskApiPort -> 443.toString, wskApiHostname -> null)
val mainDockerEndpoint = "main.docker.endpoint"
val controllerBlackboxFraction = "controller.blackboxFraction"
val controllerInstances = "controller.instances"
val dbInstances = "db.instances"
val kafkaHostList = "kafka.hosts"
val zookeeperHostList = "zookeeper.hosts"
private val edgeHostApiPort = "edge.host.apiport"
val invokerHostsList = "invoker.hosts"
val dbHostsList = "db.hostsList"
val edgeHost = Map(edgeHostName -> null, edgeHostApiPort -> null)
val invokerHosts = Map(invokerHostsList -> null)
val kafkaHosts = Map(kafkaHostList -> null)
val zookeeperHosts = Map(zookeeperHostList -> null)
val runtimesManifest = "runtimes.manifest"
val actionSequenceMaxLimit = "limits.actions.sequence.maxLength"
val actionInvokePerMinuteLimit = "limits.actions.invokes.perMinute"
val actionInvokeConcurrentLimit = "limits.actions.invokes.concurrent"
val triggerFirePerMinuteLimit = "limits.triggers.fires.perMinute"
val controllerSeedNodes = "akka.cluster.seed.nodes"
}
object ConfigKeys {
val cluster = "whisk.cluster"
val loadbalancer = "whisk.loadbalancer"
val buildInformation = "whisk.info"
val couchdb = "whisk.couchdb"
val cosmosdb = "whisk.cosmosdb"
val kafka = "whisk.kafka"
val kafkaCommon = s"$kafka.common"
val kafkaProducer = s"$kafka.producer"
val kafkaConsumer = s"$kafka.consumer"
val kafkaTopics = s"$kafka.topics"
val memory = "whisk.memory"
val timeLimit = "whisk.time-limit"
val logLimit = "whisk.log-limit"
val concurrencyLimit = "whisk.concurrency-limit"
val activation = "whisk.activation"
val userEvents = "whisk.user-events"
val runtimes = "whisk.runtimes"
val runtimesWhitelists = s"$runtimes.whitelists"
val db = "whisk.db"
val docker = "whisk.docker"
val dockerClient = s"$docker.client"
val dockerContainerFactory = s"$docker.container-factory"
val runc = "whisk.runc"
val runcTimeouts = s"$runc.timeouts"
val tracing = "whisk.tracing"
val containerFactory = "whisk.container-factory"
val containerArgs = s"$containerFactory.container-args"
val runtimesRegistry = s"$containerFactory.runtimes-registry"
val containerPool = "whisk.container-pool"
val blacklist = "whisk.blacklist"
val kubernetes = "whisk.kubernetes"
val kubernetesTimeouts = s"$kubernetes.timeouts"
val transactions = "whisk.transactions"
val logStore = "whisk.logstore"
val splunk = s"$logStore.splunk"
val logStoreElasticSearch = s"$logStore.elasticsearch"
val mesos = "whisk.mesos"
val yarn = "whisk.yarn"
val containerProxy = "whisk.container-proxy"
val containerProxyTimeouts = s"$containerProxy.timeouts"
val s3 = "whisk.s3"
val query = "whisk.query-limit"
val execSizeLimit = "whisk.exec-size-limit"
val controller = s"whisk.controller"
val controllerActivation = s"$controller.activation"
val activationStore = "whisk.activation-store"
val activationStoreWithFileStorage = s"$activationStore.with-file-storage"
val metrics = "whisk.metrics"
val featureFlags = "whisk.feature-flags"
}
| cbickel/openwhisk | common/scala/src/main/scala/org/apache/openwhisk/core/WhiskConfig.scala | Scala | apache-2.0 | 8,957 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.metrics.sink
import java.util.Properties
import java.util.concurrent.TimeUnit
import com.codahale.metrics.MetricRegistry
import com.codahale.metrics.ganglia.GangliaReporter
import info.ganglia.gmetric4j.gmetric.GMetric
import org.apache.spark.metrics.MetricsSystem
class GangliaSink(val property: Properties, val registry: MetricRegistry) extends Sink {
val GANGLIA_KEY_PERIOD = "period"
val GANGLIA_DEFAULT_PERIOD = 10
val GANGLIA_KEY_UNIT = "unit"
val GANGLIA_DEFAULT_UNIT = TimeUnit.SECONDS
val GANGLIA_KEY_MODE = "mode"
val GANGLIA_DEFAULT_MODE = GMetric.UDPAddressingMode.MULTICAST
// TTL for multicast messages. If listeners are X hops away in network, must be at least X.
val GANGLIA_KEY_TTL = "ttl"
val GANGLIA_DEFAULT_TTL = 1
val GANGLIA_KEY_HOST = "host"
val GANGLIA_KEY_PORT = "port"
def propertyToOption(prop: String) = Option(property.getProperty(prop))
if (!propertyToOption(GANGLIA_KEY_HOST).isDefined) {
throw new Exception("Ganglia sink requires 'host' property.")
}
if (!propertyToOption(GANGLIA_KEY_PORT).isDefined) {
throw new Exception("Ganglia sink requires 'port' property.")
}
val host = propertyToOption(GANGLIA_KEY_HOST).get
val port = propertyToOption(GANGLIA_KEY_PORT).get.toInt
val ttl = propertyToOption(GANGLIA_KEY_TTL).map(_.toInt).getOrElse(GANGLIA_DEFAULT_TTL)
val mode = propertyToOption(GANGLIA_KEY_MODE)
.map(u => GMetric.UDPAddressingMode.valueOf(u.toUpperCase)).getOrElse(GANGLIA_DEFAULT_MODE)
val pollPeriod = propertyToOption(GANGLIA_KEY_PERIOD).map(_.toInt)
.getOrElse(GANGLIA_DEFAULT_PERIOD)
val pollUnit = propertyToOption(GANGLIA_KEY_UNIT).map(u => TimeUnit.valueOf(u.toUpperCase))
.getOrElse(GANGLIA_DEFAULT_UNIT)
MetricsSystem.checkMinimalPollingPeriod(pollUnit, pollPeriod)
val ganglia = new GMetric(host, port, mode, ttl)
val reporter: GangliaReporter = GangliaReporter.forRegistry(registry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build(ganglia)
override def start() {
reporter.start(pollPeriod, pollUnit)
}
override def stop() {
reporter.stop()
}
}
| sryza/spark | core/src/main/scala/org/apache/spark/metrics/sink/GangliaSink.scala | Scala | apache-2.0 | 2,986 |
import java.io.{ File, FileReader }
import org.slf4j.LoggerFactory
import at.logic.gapt.utils.executionModels.timeout._
import at.logic.gapt.formats.veriT._
/**
* Usage:
*
* gapt> :load testing/testVeriTImport.scala
* gapt> testVeriTImport("testing/veriT-SMT-LIB", 60)
*/
val VeriTImportLogger = LoggerFactory.getLogger("VeriTImportLogger")
object testVeriTImport {
var fail = 0
var syntax_error = 0
var unfold_error = 0
var success = 0
val nLine = sys.props("line.separator")
def apply( str: String, timeout: Int ) = {
val top_dir = new File( str )
val proof_files = getAllProofFiles( top_dir )
proof_files.foreach { case f =>
try { withTimeout( timeout * 1000 ) {
VeriTParser.getExpansionProof( new FileReader( f ) ) match {
case _ => success += 1
}
} } catch {
case e: VeriTParserException =>
syntax_error += 1;
VeriTImportLogger.warn( "File: " + f.getPath + nLine + e )
case e: VeriTUnfoldingTransitivityException =>
unfold_error += 1;
VeriTImportLogger.warn( "File: " + f.getPath + nLine + e )
case e: Throwable =>
fail += 1;
VeriTImportLogger.error( "File: " + f.getPath + nLine + e )
}
}
VeriTImportLogger.info( "==========================" )
VeriTImportLogger.info( "VeriT import results:" )
VeriTImportLogger.info( "success " + success )
VeriTImportLogger.info( "unfold_error " + unfold_error )
VeriTImportLogger.info( "syntax_error " + syntax_error )
VeriTImportLogger.info( "failure " + fail )
VeriTImportLogger.info( "==========================" )
}
def getAllProofFiles( file: File ): List[File] = {
if ( file.isDirectory ) {
file.listFiles.toList.flatMap( f => getAllProofFiles( f ) )
}
else {
if ( file.getName.endsWith( ".proof_flat" ) ) List( file )
else List()
}
}
}
| loewenheim/gapt | testing/testVeriTImport.scala | Scala | gpl-3.0 | 1,855 |
package org.intracer.wmua.cmd
import controllers.Global.commons
import db.scalikejdbc.{ContestJuryJdbc, ImageJdbc, Round, SelectionJdbc, User}
import org.intracer.wmua._
import org.scalawiki.dto.Namespace
import play.api.Logger
import spray.util.pimpFuture
import scala.concurrent.duration._
case class DistributeImages(round: Round, images: Seq[Image], jurors: Seq[User]) {
val sortedJurors = jurors.sorted
def apply() = {
val selection: Seq[Selection] = newSelection
Logger.logger.debug("saving selection: " + selection.size)
SelectionJdbc.batchInsert(selection)
Logger.logger.debug(s"saved selection")
addCriteriaRates(selection)
}
def newSelection = {
val selection: Seq[Selection] = round.distribution match {
case 0 =>
sortedJurors.flatMap { juror =>
images.map(img => Selection(img, juror, round))
}
case x if x > 0 =>
images.zipWithIndex.flatMap {
case (img, i) =>
(0 until x).map(j =>
Selection(img, sortedJurors((i + j) % sortedJurors.size), round)
)
}
}
selection
}
def addCriteriaRates(selection: Seq[Selection]): Unit = {
if (round.hasCriteria) {
val criteriaIds = Seq(1, 2, 3, 4) // TODO load form DB
val rates = selection.flatMap { s =>
criteriaIds.map(id => new CriteriaRate(0, s.getId, id, 0))
}
CriteriaRate.batchInsert(rates)
}
}
}
object DistributeImages {
def distributeImages(round: Round,
jurors: Seq[User],
prevRound: Option[Round],
removeUnrated: Boolean = false): Unit = {
if (removeUnrated) {
SelectionJdbc.removeUnrated(round.getId)
}
val images = getFilteredImages(round, jurors, prevRound)
distributeImages(round, jurors, images)
}
def getFilteredImages(round: Round, jurors: Seq[User], prevRound: Option[Round]): Seq[Image] = {
getFilteredImages(round, jurors, prevRound, selectedAtLeast = round.prevSelectedBy,
selectMinAvgRating = round.prevMinAvgRate,
selectTopByRating = round.topImages,
includeCategory = round.category,
excludeCategory = round.excludeCategory,
includeRegionIds = round.regionIds.toSet,
includeMonumentIds = round.monumentIds.toSet
)
}
def distributeImages(round: Round, jurors: Seq[User], images: Seq[Image]): Unit = {
DistributeImages(round, images, jurors).apply()
}
def getFilteredImages(
round: Round,
jurors: Seq[User],
prevRound: Option[Round],
includeRegionIds: Set[String] = Set.empty,
excludeRegionIds: Set[String] = Set.empty,
includeMonumentIds: Set[String] = Set.empty,
includePageIds: Set[Long] = Set.empty,
excludePageIds: Set[Long] = Set.empty,
includeTitles: Set[String] = Set.empty,
excludeTitles: Set[String] = Set.empty,
selectMinAvgRating: Option[Int] = None,
selectTopByRating: Option[Int] = None,
selectedAtLeast: Option[Int] = None,
includeJurorId: Set[Long] = Set.empty,
excludeJurorId: Set[Long] = Set.empty,
includeCategory: Option[String] = None,
excludeCategory: Option[String] = None
): Seq[Image] = {
val includeFromCats = includeCategory.filter(_.trim.nonEmpty).map { category =>
val pages = commons.page(category).imageInfoByGenerator("categorymembers", "cm", Set(Namespace.FILE)).await(5.minutes)
pages.flatMap(_.id)
}.getOrElse(Nil)
val excludeFromCats = excludeCategory.filter(_.trim.nonEmpty).map { category =>
val pages = commons.page(category).imageInfoByGenerator("categorymembers", "cm", Set(Namespace.FILE)).await(5.minutes)
pages.flatMap(_.id)
}.getOrElse(Nil)
val currentImages = ImageJdbc.byRoundMerged(round.getId, rated = None).filter(iwr => iwr.selection.nonEmpty).toSet
val existingImageIds = currentImages.map(_.pageId)
val existingJurorIds = currentImages.flatMap(_.jurors)
val mpxAtLeast = round.minMpx
val sizeAtLeast = round.minImageSize.map(_ * 1024 * 1024)
val contest = ContestJuryJdbc.findById(round.contestId).get
val imagesAll = prevRound.fold[Seq[ImageWithRating]](
ImageJdbc.findByContest(contest).map(i =>
new ImageWithRating(i, Seq.empty)
)
)(r =>
ImageJdbc.byRoundMerged(r.getId, rated = selectedAtLeast.map(_ > 0))
)
Logger.logger.debug("Total images: " + imagesAll.size)
val funGens = ImageWithRatingSeqFilter.funGenerators(prevRound,
includeRegionIds = includeRegionIds,
excludeRegionIds = excludeRegionIds,
includeMonumentIds = includeMonumentIds,
includePageIds = includePageIds ++ includeFromCats.toSet,
excludePageIds = excludePageIds ++ existingImageIds ++ excludeFromCats.toSet,
includeTitles = includeTitles,
excludeTitles = excludeTitles,
includeJurorId = includeJurorId,
excludeJurorId = excludeJurorId /*++ existingJurorIds*/ ,
selectMinAvgRating = prevRound.flatMap(_ => selectMinAvgRating.filter(x => !prevRound.exists(_.isBinary))),
selectTopByRating = prevRound.flatMap(_ => selectTopByRating),
selectedAtLeast = prevRound.flatMap(_ => selectedAtLeast),
mpxAtLeast = mpxAtLeast,
sizeAtLeast = sizeAtLeast,
specialNomination = round.specialNomination
)
val filterChain = ImageWithRatingSeqFilter.makeFunChain(funGens)
val images = filterChain(imagesAll).map(_.image)
Logger.logger.debug("Images after filtering: " + images.size)
images
}
case class Rebalance(newSelections: Seq[Selection], removedSelections: Seq[Selection])
val NoRebalance = Rebalance(Nil, Nil)
def rebalanceImages(round: Round, jurors: Seq[User], images: Seq[Image], currentSelection: Seq[Selection]): Rebalance = {
if (currentSelection == Nil) {
Rebalance(DistributeImages(round, images, jurors).newSelection, Nil)
} else {
Rebalance(Nil, Nil)
}
}
}
| intracer/wlxjury | app/org/intracer/wmua/cmd/DistributeImages.scala | Scala | apache-2.0 | 6,331 |
package at.logic.gapt.expr
/**
* Helper class for logical constants.
*
* A logical constant describes a propositional connective, or a quantifier, etc. This is different from an expression
* consisting of only this logical constant, as the expression is an object of type LambdaExpression and needs to have
* a definite type.
*
* A logical constant consists of a name (e.g. "∀"), and a set of possible types, (e.g. (Ti->To)->To,
* ((Ti->Ti)->To)->To, ...). Subclasses need to implement the function matchType, which matches these possible types.
* This way we can handle the parametric types of the quantifiers.
*
* @param name The name of this logical constant, e.g. "∀"
*/
abstract class LogicalC( val name: String ) {
val symbol = StringSymbol( name )
protected type MatchResult
protected def matchType( exptype: TA ): MatchResult
protected def noMatch: MatchResult
def unapply( exp: LambdaExpression ): MatchResult = exp match {
case Const( `name`, exptype ) => matchType( exptype )
case _ => noMatch
}
private[expr] def unapply( pair: ( SymbolA, TA ) ): MatchResult = pair match {
case ( `symbol`, ty ) => matchType( ty )
case _ => noMatch
}
}
/**
* Logical constant with a fixed type.
*
* @param name The name of this logical constant, e.g. "∧"
* @param ty The fixed type of this logical constant, e.g. To->To->To
*/
class MonomorphicLogicalC( name: String, val ty: TA ) extends LogicalC( name ) {
def apply() = Const( symbol, ty )
protected type MatchResult = Boolean
protected override def matchType( exptype: TA ) = exptype == ty
protected override def noMatch = false
}
/**
* A logical constant describing a quantifier, which is of type (α->To)->To.
*
* @param name The name of this logical constant, e.g. "∀"
*/
class QuantifierC( name: String ) extends LogicalC( name ) {
def apply( qtype: TA ) = Const( symbol, ( qtype -> To ) -> To )
protected type MatchResult = Option[TA]
protected override def matchType( exptype: TA ) = exptype match {
case ( qtype -> To ) -> To => Some( qtype )
case _ => None
}
protected override def noMatch = None
}
object AndC extends MonomorphicLogicalC( "∧", To -> ( To -> To ) )
object OrC extends MonomorphicLogicalC( "∨", To -> ( To -> To ) )
object ImpC extends MonomorphicLogicalC( "⊃", To -> ( To -> To ) )
object NegC extends MonomorphicLogicalC( "¬", To -> To )
object BottomC extends MonomorphicLogicalC( "⊥", To )
object TopC extends MonomorphicLogicalC( "⊤", To )
object ExistsC extends QuantifierC( "∃" )
object ForallC extends QuantifierC( "∀" )
object EqC extends LogicalC( "=" ) {
def apply( ty: TA ) = Const( symbol, ty -> ( ty -> To ) )
protected type MatchResult = Option[TA]
protected override def matchType( exptype: TA ) = exptype match {
case ty -> ( ty_ -> To ) if ty == ty_ => Some( ty )
case _ => None
}
protected override def noMatch = None
}
//package schematic {
//
// object BigAndC extends MonomorphicLogicalC( "⋀", ( Tindex -> To ) -> ( Tindex -> ( Tindex -> To ) ) )
// object BigOrC extends MonomorphicLogicalC( "⋁", ( Tindex -> To ) -> ( Tindex -> ( Tindex -> To ) ) )
//
// object ZeroC extends MonomorphicLogicalC( "0", Tindex )
// object SuccC extends MonomorphicLogicalC( "s", Tindex -> Tindex )
//
// object PlusC extends MonomorphicLogicalC( "+", Tindex -> ( Tindex -> Tindex ) )
// object TimesC extends MonomorphicLogicalC( "×", Tindex -> ( Tindex -> Tindex ) )
//
// object BiggerThanC extends MonomorphicLogicalC( ">", Tindex -> ( Tindex -> To ) )
// object SimC extends MonomorphicLogicalC( "~", Tindex -> ( Tindex -> To ) )
// object LessThanC extends MonomorphicLogicalC( "<", Tindex -> ( Tindex -> To ) )
// object LeqC extends MonomorphicLogicalC( "≤", Tindex -> ( Tindex -> To ) )
//
//} | gisellemnr/gapt | src/main/scala/at/logic/gapt/expr/logicalConstants.scala | Scala | gpl-3.0 | 3,924 |
package com.mz.training.common.rest
import akka.http.scaladsl.model.HttpMethods._
import akka.http.scaladsl.model.{HttpHeader, HttpMethods, HttpResponse}
import akka.http.scaladsl.model.headers._
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.{Directive0, MethodRejection, RejectionHandler}
trait CorsSupport {
protected val corsAllowOrigins: List[String] = List("*")
protected val corsAllowedHeaders: List[String] = List("Origin", "X-Requested-With", "Content-Type", "Accept", "Accept-Encoding", "Accept-Language", "Host", "Referer", "User-Agent")
protected val corsAllowCredentials: Boolean = true
protected val optionsCorsHeaders: List[HttpHeader] = List[HttpHeader](
`Access-Control-Allow-Methods`(HttpMethods.GET, HttpMethods.PUT, HttpMethods.POST, HttpMethods.DELETE),
`Access-Control-Allow-Headers`(corsAllowedHeaders.mkString(", ")),
`Access-Control-Max-Age`(60 * 60 * 24 * 20), // cache pre-flight response for 20 days
`Access-Control-Allow-Credentials`(corsAllowCredentials)
)
protected def corsRejectionHandler(allowOrigin: `Access-Control-Allow-Origin`) = RejectionHandler
.newBuilder().handle {
case MethodRejection(supported) =>
complete(HttpResponse().withHeaders(
`Access-Control-Allow-Methods`(OPTIONS, supported) ::
allowOrigin ::
optionsCorsHeaders
))
}
.result()
private def originToAllowOrigin(origin: Origin): Option[`Access-Control-Allow-Origin`] =
if (corsAllowOrigins.contains("*") || corsAllowOrigins.contains(origin.value))
origin.origins.headOption.map(`Access-Control-Allow-Origin`.apply)
else
None
def cors[T]: Directive0 = mapInnerRoute { route => context =>
((context.request.method, context.request.header[Origin].flatMap(originToAllowOrigin)) match {
case (OPTIONS, Some(allowOrigin)) =>
handleRejections(corsRejectionHandler(allowOrigin)) {
respondWithHeaders(allowOrigin, `Access-Control-Allow-Credentials`(corsAllowCredentials)) {
route
}
}
case (_, Some(allowOrigin)) =>
respondWithHeaders(allowOrigin, `Access-Control-Allow-Credentials`(corsAllowCredentials)) {
route
}
case (_, _) =>
route
})(context)
}
}
| michalzeman/angular2-training | akka-http-server/src/main/scala/com/mz/training/common/rest/AkkaHttpCorsSupport.scala | Scala | mit | 2,319 |
/*
* Copyright (C) 2012 The Regents of The University California.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shark.memstore2.column
import org.apache.hadoop.io.NullWritable
import shark.memstore2.buffer.ByteBufferReader
object VoidColumnIterator {
sealed class Default(private val _bytesReader: ByteBufferReader) extends ColumnIterator {
private val _writable = NullWritable.get()
override def next() {}
override def current = _writable
}
}
| sameeragarwal/blinkdb_dev | src/main/scala/shark/memstore2/column/VoidColumnIterator.scala | Scala | apache-2.0 | 1,016 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package es.alvsanand.sgc.ftp.secure
import es.alvsanand.sgc.ftp.FTPCredentials
import org.scalatest._
class FTPSSgcConnectorFactoryTest extends FlatSpec with Matchers with OptionValues
with Inside with Inspectors with BeforeAndAfterAll {
it should "fail with obligatory parameters" in {
a[IllegalArgumentException] shouldBe thrownBy(FTPSSgcConnectorFactory
.get(FTPSParameters(null, 21, null, null)))
a[IllegalArgumentException] shouldBe thrownBy(FTPSSgcConnectorFactory
.get(FTPSParameters("host", 21, null, null)))
a[IllegalArgumentException] shouldBe thrownBy(FTPSSgcConnectorFactory
.get(FTPSParameters("host", 21, "dir", null)))
a[IllegalArgumentException] shouldBe thrownBy(FTPSSgcConnectorFactory
.get(FTPSParameters("host", 21, "dir", FTPCredentials(null))))
a[IllegalArgumentException] shouldBe thrownBy(FTPSSgcConnectorFactory
.get(FTPSParameters("host", 21, "dir", FTPCredentials(null), Option(KeystoreConfig(null)))))
a[IllegalArgumentException] shouldBe thrownBy(FTPSSgcConnectorFactory
.get(FTPSParameters("host", 21, "dir", FTPCredentials(null), Option(KeystoreConfig(null)),
Option(KeystoreConfig(null)))))
}
it should "work with obligatory parameters" in {
noException should be thrownBy(
FTPSSgcConnectorFactory.get(FTPSParameters("host", 21, "dir", FTPCredentials("user"),
Option(KeystoreConfig("kstore"))))
)
noException should be thrownBy(
FTPSSgcConnectorFactory.get(FTPSParameters("host", 21, "dir", FTPCredentials("user"),
Option(KeystoreConfig("kstore")), Option(KeystoreConfig("tstore"))))
)
}
}
| alvsanand/spark-generic-connector | sgc-ftp/src/test/scala/es/alvsanand/sgc/ftp/secure/FTPSSgcConnectorFactoryTest.scala | Scala | apache-2.0 | 2,454 |
package com.github.kompot.play2sec
import bootstrap.Global.Injector
import com.github.kompot.play2sec.authentication.providers.oauth2.facebook
.FacebookAuthProvider
import com.github.kompot.play2sec.authentication.user.AuthUserIdentity
import java.util.concurrent.TimeUnit
import mock.MailServer
import play.api.Play._
import play.api.test.{PlaySpecification, WithBrowser}
import util.StringUtils
import play.api.Logger
import com.github.kompot.play2sec.authentication.providers.password
.UsernamePasswordAuthProvider
import play.api.mvc.Session
import com.github.kompot.play2sec.authentication
class MergeTest extends PlaySpecification {
sequential
step {
Injector.tokenStore.clearStore()
Injector.userStore.clearStore()
}
"Should be asked for merging accounts and answer yes" in new WithBrowser(FIREFOX, new FakeAppNoAutoMerge) {
val facebookLogin = current.configuration.getString("test.facebook.login")
val facebookPassword = current.configuration.getString("test.facebook.password")
val facebookUserId = current.configuration.getString("test.facebook.id")
assert(facebookLogin.isDefined && !facebookLogin.get.isEmpty,
"Key test.facebook.login is not defined in configuration.")
assert(facebookPassword.isDefined && !facebookPassword.get.isEmpty,
"Key test.facebook.password is not defined in configuration.")
assert(facebookUserId.isDefined && !facebookUserId.get.isEmpty,
"Key test.facebook.id is not defined in configuration.")
Injector.userStore.getStore.size mustEqual 0
browser.goTo("/auth/signup")
browser.fill("input#email").`with`("kompotik@gmail.com")
browser.$("#password").text("123")
browser.click("input[type = 'submit']")
Injector.userStore.getStore.size mustEqual 1
val link = StringUtils.getFirstLinkByContent(
MailServer.boxes("kompotik@gmail.com").findByContent("verify-email")(0).body, "verify-email").get
val emailVerificationLink = StringUtils.getRequestPathFromString(link)
browser.goTo(emailVerificationLink)
Injector.userStore.getStore.size mustEqual 1
browser.goTo("/auth/logout")
// then signup via facebook
browser.goTo("/auth/external/facebook")
browser.waitUntil(10, TimeUnit.SECONDS) {
browser.url.contains("www.facebook.com/login")
}
if (browser.url.contains("www.facebook.com/login")) {
browser.fill("input#email").`with`(facebookLogin.get)
browser.fill("input#pass").`with`(facebookPassword.get)
browser.click("input[type = 'submit'][name = 'login']")
}
val user = await(Injector.userStore.getByAuthUserIdentity(new AuthUserIdentity {
def provider = FacebookAuthProvider.PROVIDER_KEY
def id = facebookUserId.get
}))
// there should be such separate user as we asked noе to auto merge
user mustNotEqual None
Injector.userStore.getStore.size mustEqual 2
// here we should have 2 separate users in storage
// and be logged in as a facebook user
// when trying to log in with email/password we should be asked to merge
// these 2 accounts
browser.goTo("/auth/login")
browser.fill("input#email").`with`("kompotik@gmail.com")
browser.$("#password").text("123")
browser.click("input[type = 'submit']")
browser.waitUntil(1, TimeUnit.SECONDS) {
browser.url.startsWith("/auth/ask-merge")
}
browser.click("input#merge")
browser.click("input[type = 'submit']")
val userAfterMerge = await(Injector.userStore.getByAuthUserIdentity(new AuthUserIdentity {
def provider = FacebookAuthProvider.PROVIDER_KEY
def id = facebookUserId.get
}))
// we checked checkbox - should be two merged accounts account
userAfterMerge.get.remoteUsers.size mustEqual 2
Injector.userStore.getStore.size mustEqual 1
}
step {
Injector.tokenStore.clearStore()
Injector.userStore.clearStore()
}
"Should be asked for merging accounts and answer no" in new WithBrowser(FIREFOX, new FakeAppNoAutoMerge) {
val facebookLogin = current.configuration.getString("test.facebook.login")
val facebookPassword = current.configuration.getString("test.facebook.password")
val facebookUserId = current.configuration.getString("test.facebook.id")
assert(facebookLogin.isDefined && !facebookLogin.get.isEmpty,
"Key test.facebook.login is not defined in configuration.")
assert(facebookPassword.isDefined && !facebookPassword.get.isEmpty,
"Key test.facebook.password is not defined in configuration.")
assert(facebookUserId.isDefined && !facebookUserId.get.isEmpty,
"Key test.facebook.id is not defined in configuration.")
Injector.userStore.getStore.size mustEqual 0
browser.goTo("/auth/signup")
browser.fill("input#email").`with`("kompotik@gmail.com")
browser.$("#password").text("123")
browser.click("input[type = 'submit']")
Injector.userStore.getStore.size mustEqual 1
val link = StringUtils.getFirstLinkByContent(
MailServer.boxes("kompotik@gmail.com").findByContent("verify-email")(0).body, "verify-email").get
val emailVerificationLink = StringUtils.getRequestPathFromString(link)
browser.goTo(emailVerificationLink)
Injector.userStore.getStore.size mustEqual 1
browser.goTo("/auth/logout")
// then signup via facebook
browser.goTo("/auth/external/facebook")
browser.waitUntil(10, TimeUnit.SECONDS) {
browser.url.contains("www.facebook.com/login")
}
if (browser.url.contains("www.facebook.com/login")) {
browser.fill("input#email").`with`(facebookLogin.get)
browser.fill("input#pass").`with`(facebookPassword.get)
browser.click("input[type = 'submit'][name = 'login']")
}
val user = await(Injector.userStore.getByAuthUserIdentity(new AuthUserIdentity {
def provider = FacebookAuthProvider.PROVIDER_KEY
def id = facebookUserId.get
}))
user mustNotEqual None
Injector.userStore.getStore.size mustEqual 2
// here we should have 2 separate users in storage
// and be logged in as a facebook user
// when trying to log in with email/password we should be asked to merge
// these 2 accounts
browser.goTo("/auth/login")
browser.fill("input#email").`with`("kompotik@gmail.com")
browser.$("#password").text("123")
browser.click("input[type = 'submit']")
browser.waitUntil(1, TimeUnit.SECONDS) {
browser.url.startsWith("/auth/ask-merge")
}
browser.click("input[type = 'submit']")
// and still have 2 separate accounts but be logged in under email account
val userAsBeforeMerge = await(Injector.userStore.getByAuthUserIdentity(new AuthUserIdentity {
def provider = UsernamePasswordAuthProvider.PROVIDER_KEY
def id = "kompotik@gmail.com"
}))
// we checked checkbox - should be two merged accounts account
userAsBeforeMerge.get.remoteUsers.size mustEqual 1
Injector.userStore.getStore.size mustEqual 2
val value = browser.getCookie("PLAY_SESSION").getValue
val playSession1 = Session(Session.decode(value.substring(1, value.length - 1)))
authentication.isLoggedIn(playSession1) mustEqual true
authentication.getUser(playSession1) mustNotEqual None
authentication.getUser(playSession1).get.provider mustEqual UsernamePasswordAuthProvider.PROVIDER_KEY
}
step {
Injector.tokenStore.clearStore()
Injector.userStore.clearStore()
}
}
| kompot/play2sec | test/com/github/kompot/play2sec/MergeTest.scala | Scala | apache-2.0 | 7,429 |
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.onlinetechvision.spark.hazelcast.connector.config
/**
* Spark Hazelcast Config
*
* @author Eren Avsarogullari
* @since 1.0.0
*/
object SparkHazelcastConfig {
val HazelcastXMLConfigFileName = "hazelcast.xml.config.file.name"
val HazelcastDistributedObjectName = "hazelcast.distributed.object.name"
val HazelcastDistributedObjectType = "hazelcast.distributed.object.type"
}
| erenavsarogullari/spark-hazelcast-connector | src/main/scala/com/onlinetechvision/spark/hazelcast/connector/config/SparkHazelcastConfig.scala | Scala | apache-2.0 | 1,015 |
////////////////////////////////////////////////////////////////////////////////
// //
// OpenSolid is a generic library for the representation and manipulation //
// of geometric objects such as points, curves, surfaces, and volumes. //
// //
// Copyright 2007-2015 by Ian Mackenzie //
// ian.e.mackenzie@gmail.com //
// //
// This Source Code Form is subject to the terms of the Mozilla Public //
// License, v. 2.0. If a copy of the MPL was not distributed with this file, //
// you can obtain one at http://mozilla.org/MPL/2.0/. //
// //
////////////////////////////////////////////////////////////////////////////////
package org.opensolid.core
final case class Axis2d(originPoint: Point2d, direction: Direction2d)
extends Transformable2d[Axis2d] {
def normalDirection: Direction2d =
direction.normalDirection
def normalAxis: Axis2d =
Axis2d(originPoint, normalDirection)
def reversed: Axis2d =
Axis2d(originPoint, -direction)
def transformedBy(transformation: Transformation2d): Axis2d =
Axis2d(originPoint.transformedBy(transformation), direction.transformedBy(transformation))
def translatedTo(point: Point2d): Axis2d =
Axis2d(point, direction)
def placedOnto(plane: Plane3d): Axis3d =
Axis3d(originPoint.placedOnto(plane), direction.placedOnto(plane))
def signedDistanceTo(point: Point2d): Double =
originPoint.vectorTo(point).componentIn(normalDirection)
def signedDistanceTo[P](expression: Expression2d[P]): Expression1d[P] =
Expression2d.Constant[P](originPoint).vectorTo(expression).componentIn(normalDirection)
}
object Axis2d {
val X = Axis2d(Point2d.Origin, Direction2d.X)
val Y = Axis2d(Point2d.Origin, Direction2d.Y)
}
| ianmackenzie/opensolid-core | src/main/scala/org/opensolid/core/Axis2d.scala | Scala | mpl-2.0 | 2,133 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import scala.collection.immutable.TreeSet
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, CodeGenerator, ExprCode, FalseLiteral, GenerateSafeProjection, GenerateUnsafeProjection, Predicate => BasePredicate}
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.util.TypeUtils
import org.apache.spark.sql.types._
object InterpretedPredicate {
def create(expression: Expression, inputSchema: Seq[Attribute]): InterpretedPredicate =
create(BindReferences.bindReference(expression, inputSchema))
def create(expression: Expression): InterpretedPredicate = new InterpretedPredicate(expression)
}
case class InterpretedPredicate(expression: Expression) extends BasePredicate {
override def eval(r: InternalRow): Boolean = expression.eval(r).asInstanceOf[Boolean]
}
/**
* An [[Expression]] that returns a boolean value.
*/
trait Predicate extends Expression {
override def dataType: DataType = BooleanType
}
trait PredicateHelper {
protected def splitConjunctivePredicates(condition: Expression): Seq[Expression] = {
condition match {
case And(cond1, cond2) =>
splitConjunctivePredicates(cond1) ++ splitConjunctivePredicates(cond2)
case other => other :: Nil
}
}
protected def splitDisjunctivePredicates(condition: Expression): Seq[Expression] = {
condition match {
case Or(cond1, cond2) =>
splitDisjunctivePredicates(cond1) ++ splitDisjunctivePredicates(cond2)
case other => other :: Nil
}
}
// Substitute any known alias from a map.
protected def replaceAlias(
condition: Expression,
aliases: AttributeMap[Expression]): Expression = {
// Use transformUp to prevent infinite recursion when the replacement expression
// redefines the same ExprId,
condition.transformUp {
case a: Attribute =>
aliases.getOrElse(a, a)
}
}
/**
* Returns true if `expr` can be evaluated using only the output of `plan`. This method
* can be used to determine when it is acceptable to move expression evaluation within a query
* plan.
*
* For example consider a join between two relations R(a, b) and S(c, d).
*
* - `canEvaluate(EqualTo(a,b), R)` returns `true`
* - `canEvaluate(EqualTo(a,c), R)` returns `false`
* - `canEvaluate(Literal(1), R)` returns `true` as literals CAN be evaluated on any plan
*/
protected def canEvaluate(expr: Expression, plan: LogicalPlan): Boolean =
expr.references.subsetOf(plan.outputSet)
/**
* Returns true iff `expr` could be evaluated as a condition within join.
*/
protected def canEvaluateWithinJoin(expr: Expression): Boolean = expr match {
// Non-deterministic expressions are not allowed as join conditions.
case e if !e.deterministic => false
case _: ListQuery | _: Exists =>
// A ListQuery defines the query which we want to search in an IN subquery expression.
// Currently the only way to evaluate an IN subquery is to convert it to a
// LeftSemi/LeftAnti/ExistenceJoin by `RewritePredicateSubquery` rule.
// It cannot be evaluated as part of a Join operator.
// An Exists shouldn't be push into a Join operator too.
false
case e: SubqueryExpression =>
// non-correlated subquery will be replaced as literal
e.children.isEmpty
case a: AttributeReference => true
case e: Unevaluable => false
case e => e.children.forall(canEvaluateWithinJoin)
}
}
@ExpressionDescription(
usage = "_FUNC_ expr - Logical not.")
case class Not(child: Expression)
extends UnaryExpression with Predicate with ImplicitCastInputTypes with NullIntolerant {
override def toString: String = s"NOT $child"
override def inputTypes: Seq[DataType] = Seq(BooleanType)
protected override def nullSafeEval(input: Any): Any = !input.asInstanceOf[Boolean]
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, c => s"!($c)")
}
override def sql: String = s"(NOT ${child.sql})"
}
/**
* Evaluates to `true` if `list` contains `value`.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "expr1 _FUNC_(expr2, expr3, ...) - Returns true if `expr` equals to any valN.",
arguments = """
Arguments:
* expr1, expr2, expr3, ... - the arguments must be same type.
""",
examples = """
Examples:
> SELECT 1 _FUNC_(1, 2, 3);
true
> SELECT 1 _FUNC_(2, 3, 4);
false
> SELECT named_struct('a', 1, 'b', 2) _FUNC_(named_struct('a', 1, 'b', 1), named_struct('a', 1, 'b', 3));
false
> SELECT named_struct('a', 1, 'b', 2) _FUNC_(named_struct('a', 1, 'b', 2), named_struct('a', 1, 'b', 3));
true
""")
// scalastyle:on line.size.limit
case class In(value: Expression, list: Seq[Expression]) extends Predicate {
require(list != null, "list should not be null")
override def checkInputDataTypes(): TypeCheckResult = {
val mismatchOpt = list.find(l => !DataType.equalsStructurally(l.dataType, value.dataType,
ignoreNullability = true))
if (mismatchOpt.isDefined) {
list match {
case ListQuery(_, _, _, childOutputs) :: Nil =>
val valExprs = value match {
case cns: CreateNamedStruct => cns.valExprs
case expr => Seq(expr)
}
if (valExprs.length != childOutputs.length) {
TypeCheckResult.TypeCheckFailure(
s"""
|The number of columns in the left hand side of an IN subquery does not match the
|number of columns in the output of subquery.
|#columns in left hand side: ${valExprs.length}.
|#columns in right hand side: ${childOutputs.length}.
|Left side columns:
|[${valExprs.map(_.sql).mkString(", ")}].
|Right side columns:
|[${childOutputs.map(_.sql).mkString(", ")}].""".stripMargin)
} else {
val mismatchedColumns = valExprs.zip(childOutputs).flatMap {
case (l, r) if l.dataType != r.dataType =>
s"(${l.sql}:${l.dataType.catalogString}, ${r.sql}:${r.dataType.catalogString})"
case _ => None
}
TypeCheckResult.TypeCheckFailure(
s"""
|The data type of one or more elements in the left hand side of an IN subquery
|is not compatible with the data type of the output of the subquery
|Mismatched columns:
|[${mismatchedColumns.mkString(", ")}]
|Left side:
|[${valExprs.map(_.dataType.catalogString).mkString(", ")}].
|Right side:
|[${childOutputs.map(_.dataType.catalogString).mkString(", ")}].""".stripMargin)
}
case _ =>
TypeCheckResult.TypeCheckFailure(s"Arguments must be same type but were: " +
s"${value.dataType.simpleString} != ${mismatchOpt.get.dataType.simpleString}")
}
} else {
TypeUtils.checkForOrderingExpr(value.dataType, s"function $prettyName")
}
}
override def children: Seq[Expression] = value +: list
lazy val inSetConvertible = list.forall(_.isInstanceOf[Literal])
private lazy val ordering = TypeUtils.getInterpretedOrdering(value.dataType)
override def nullable: Boolean = children.exists(_.nullable)
override def foldable: Boolean = children.forall(_.foldable)
override def toString: String = s"$value IN ${list.mkString("(", ",", ")")}"
override def eval(input: InternalRow): Any = {
val evaluatedValue = value.eval(input)
if (evaluatedValue == null) {
null
} else {
var hasNull = false
list.foreach { e =>
val v = e.eval(input)
if (v == null) {
hasNull = true
} else if (ordering.equiv(v, evaluatedValue)) {
return true
}
}
if (hasNull) {
null
} else {
false
}
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val javaDataType = CodeGenerator.javaType(value.dataType)
val valueGen = value.genCode(ctx)
val listGen = list.map(_.genCode(ctx))
// inTmpResult has 3 possible values:
// -1 means no matches found and there is at least one value in the list evaluated to null
val HAS_NULL = -1
// 0 means no matches found and all values in the list are not null
val NOT_MATCHED = 0
// 1 means one value in the list is matched
val MATCHED = 1
val tmpResult = ctx.freshName("inTmpResult")
val valueArg = ctx.freshName("valueArg")
// All the blocks are meant to be inside a do { ... } while (false); loop.
// The evaluation of variables can be stopped when we find a matching value.
val listCode = listGen.map(x =>
s"""
|${x.code}
|if (${x.isNull}) {
| $tmpResult = $HAS_NULL; // ${ev.isNull} = true;
|} else if (${ctx.genEqual(value.dataType, valueArg, x.value)}) {
| $tmpResult = $MATCHED; // ${ev.isNull} = false; ${ev.value} = true;
| continue;
|}
""".stripMargin)
val codes = ctx.splitExpressionsWithCurrentInputs(
expressions = listCode,
funcName = "valueIn",
extraArguments = (javaDataType, valueArg) :: (CodeGenerator.JAVA_BYTE, tmpResult) :: Nil,
returnType = CodeGenerator.JAVA_BYTE,
makeSplitFunction = body =>
s"""
|do {
| $body
|} while (false);
|return $tmpResult;
""".stripMargin,
foldFunctions = _.map { funcCall =>
s"""
|$tmpResult = $funcCall;
|if ($tmpResult == $MATCHED) {
| continue;
|}
""".stripMargin
}.mkString("\\n"))
ev.copy(code =
s"""
|${valueGen.code}
|byte $tmpResult = $HAS_NULL;
|if (!${valueGen.isNull}) {
| $tmpResult = $NOT_MATCHED;
| $javaDataType $valueArg = ${valueGen.value};
| do {
| $codes
| } while (false);
|}
|final boolean ${ev.isNull} = ($tmpResult == $HAS_NULL);
|final boolean ${ev.value} = ($tmpResult == $MATCHED);
""".stripMargin)
}
override def sql: String = {
val childrenSQL = children.map(_.sql)
val valueSQL = childrenSQL.head
val listSQL = childrenSQL.tail.mkString(", ")
s"($valueSQL IN ($listSQL))"
}
}
/**
* Optimized version of In clause, when all filter values of In clause are
* static.
*/
case class InSet(child: Expression, hset: Set[Any]) extends UnaryExpression with Predicate {
require(hset != null, "hset could not be null")
override def toString: String = s"$child INSET ${hset.mkString("(", ",", ")")}"
@transient private[this] lazy val hasNull: Boolean = hset.contains(null)
override def nullable: Boolean = child.nullable || hasNull
protected override def nullSafeEval(value: Any): Any = {
if (set.contains(value)) {
true
} else if (hasNull) {
null
} else {
false
}
}
@transient lazy val set: Set[Any] = child.dataType match {
case _: AtomicType => hset
case _: NullType => hset
case _ =>
// for structs use interpreted ordering to be able to compare UnsafeRows with non-UnsafeRows
TreeSet.empty(TypeUtils.getInterpretedOrdering(child.dataType)) ++ hset
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val setTerm = ctx.addReferenceObj("set", set)
val childGen = child.genCode(ctx)
val setIsNull = if (hasNull) {
s"${ev.isNull} = !${ev.value};"
} else {
""
}
ev.copy(code =
s"""
|${childGen.code}
|${CodeGenerator.JAVA_BOOLEAN} ${ev.isNull} = ${childGen.isNull};
|${CodeGenerator.JAVA_BOOLEAN} ${ev.value} = false;
|if (!${ev.isNull}) {
| ${ev.value} = $setTerm.contains(${childGen.value});
| $setIsNull
|}
""".stripMargin)
}
override def sql: String = {
val valueSQL = child.sql
val listSQL = hset.toSeq.map(Literal(_).sql).mkString(", ")
s"($valueSQL IN ($listSQL))"
}
}
@ExpressionDescription(
usage = "expr1 _FUNC_ expr2 - Logical AND.")
case class And(left: Expression, right: Expression) extends BinaryOperator with Predicate {
override def inputType: AbstractDataType = BooleanType
override def symbol: String = "&&"
override def sqlOperator: String = "AND"
override def eval(input: InternalRow): Any = {
val input1 = left.eval(input)
if (input1 == false) {
false
} else {
val input2 = right.eval(input)
if (input2 == false) {
false
} else {
if (input1 != null && input2 != null) {
true
} else {
null
}
}
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val eval1 = left.genCode(ctx)
val eval2 = right.genCode(ctx)
// The result should be `false`, if any of them is `false` whenever the other is null or not.
if (!left.nullable && !right.nullable) {
ev.copy(code = s"""
${eval1.code}
boolean ${ev.value} = false;
if (${eval1.value}) {
${eval2.code}
${ev.value} = ${eval2.value};
}""", isNull = FalseLiteral)
} else {
ev.copy(code = s"""
${eval1.code}
boolean ${ev.isNull} = false;
boolean ${ev.value} = false;
if (!${eval1.isNull} && !${eval1.value}) {
} else {
${eval2.code}
if (!${eval2.isNull} && !${eval2.value}) {
} else if (!${eval1.isNull} && !${eval2.isNull}) {
${ev.value} = true;
} else {
${ev.isNull} = true;
}
}
""")
}
}
}
@ExpressionDescription(
usage = "expr1 _FUNC_ expr2 - Logical OR.")
case class Or(left: Expression, right: Expression) extends BinaryOperator with Predicate {
override def inputType: AbstractDataType = BooleanType
override def symbol: String = "||"
override def sqlOperator: String = "OR"
override def eval(input: InternalRow): Any = {
val input1 = left.eval(input)
if (input1 == true) {
true
} else {
val input2 = right.eval(input)
if (input2 == true) {
true
} else {
if (input1 != null && input2 != null) {
false
} else {
null
}
}
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val eval1 = left.genCode(ctx)
val eval2 = right.genCode(ctx)
// The result should be `true`, if any of them is `true` whenever the other is null or not.
if (!left.nullable && !right.nullable) {
ev.isNull = FalseLiteral
ev.copy(code = s"""
${eval1.code}
boolean ${ev.value} = true;
if (!${eval1.value}) {
${eval2.code}
${ev.value} = ${eval2.value};
}""", isNull = FalseLiteral)
} else {
ev.copy(code = s"""
${eval1.code}
boolean ${ev.isNull} = false;
boolean ${ev.value} = true;
if (!${eval1.isNull} && ${eval1.value}) {
} else {
${eval2.code}
if (!${eval2.isNull} && ${eval2.value}) {
} else if (!${eval1.isNull} && !${eval2.isNull}) {
${ev.value} = false;
} else {
${ev.isNull} = true;
}
}
""")
}
}
}
abstract class BinaryComparison extends BinaryOperator with Predicate {
// Note that we need to give a superset of allowable input types since orderable types are not
// finitely enumerable. The allowable types are checked below by checkInputDataTypes.
override def inputType: AbstractDataType = AnyDataType
override def checkInputDataTypes(): TypeCheckResult = super.checkInputDataTypes() match {
case TypeCheckResult.TypeCheckSuccess =>
TypeUtils.checkForOrderingExpr(left.dataType, this.getClass.getSimpleName)
case failure => failure
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
if (CodeGenerator.isPrimitiveType(left.dataType)
&& left.dataType != BooleanType // java boolean doesn't support > or < operator
&& left.dataType != FloatType
&& left.dataType != DoubleType) {
// faster version
defineCodeGen(ctx, ev, (c1, c2) => s"$c1 $symbol $c2")
} else {
defineCodeGen(ctx, ev, (c1, c2) => s"${ctx.genComp(left.dataType, c1, c2)} $symbol 0")
}
}
protected lazy val ordering: Ordering[Any] = TypeUtils.getInterpretedOrdering(left.dataType)
}
object BinaryComparison {
def unapply(e: BinaryComparison): Option[(Expression, Expression)] = Some((e.left, e.right))
}
/** An extractor that matches both standard 3VL equality and null-safe equality. */
object Equality {
def unapply(e: BinaryComparison): Option[(Expression, Expression)] = e match {
case EqualTo(l, r) => Some((l, r))
case EqualNullSafe(l, r) => Some((l, r))
case _ => None
}
}
// TODO: although map type is not orderable, technically map type should be able to be used
// in equality comparison
@ExpressionDescription(
usage = "expr1 _FUNC_ expr2 - Returns true if `expr1` equals `expr2`, or false otherwise.",
arguments = """
Arguments:
* expr1, expr2 - the two expressions must be same type or can be casted to a common type,
and must be a type that can be used in equality comparison. Map type is not supported.
For complex types such array/struct, the data types of fields must be orderable.
""",
examples = """
Examples:
> SELECT 2 _FUNC_ 2;
true
> SELECT 1 _FUNC_ '1';
true
> SELECT true _FUNC_ NULL;
NULL
> SELECT NULL _FUNC_ NULL;
NULL
""")
case class EqualTo(left: Expression, right: Expression)
extends BinaryComparison with NullIntolerant {
override def symbol: String = "="
protected override def nullSafeEval(left: Any, right: Any): Any = ordering.equiv(left, right)
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (c1, c2) => ctx.genEqual(left.dataType, c1, c2))
}
}
// TODO: although map type is not orderable, technically map type should be able to be used
// in equality comparison
@ExpressionDescription(
usage = """
expr1 _FUNC_ expr2 - Returns same result as the EQUAL(=) operator for non-null operands,
but returns true if both are null, false if one of the them is null.
""",
arguments = """
Arguments:
* expr1, expr2 - the two expressions must be same type or can be casted to a common type,
and must be a type that can be used in equality comparison. Map type is not supported.
For complex types such array/struct, the data types of fields must be orderable.
""",
examples = """
Examples:
> SELECT 2 _FUNC_ 2;
true
> SELECT 1 _FUNC_ '1';
true
> SELECT true _FUNC_ NULL;
false
> SELECT NULL _FUNC_ NULL;
true
""")
case class EqualNullSafe(left: Expression, right: Expression) extends BinaryComparison {
override def symbol: String = "<=>"
override def nullable: Boolean = false
override def eval(input: InternalRow): Any = {
val input1 = left.eval(input)
val input2 = right.eval(input)
if (input1 == null && input2 == null) {
true
} else if (input1 == null || input2 == null) {
false
} else {
ordering.equiv(input1, input2)
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val eval1 = left.genCode(ctx)
val eval2 = right.genCode(ctx)
val equalCode = ctx.genEqual(left.dataType, eval1.value, eval2.value)
ev.copy(code = eval1.code + eval2.code + s"""
boolean ${ev.value} = (${eval1.isNull} && ${eval2.isNull}) ||
(!${eval1.isNull} && !${eval2.isNull} && $equalCode);""", isNull = FalseLiteral)
}
}
@ExpressionDescription(
usage = "expr1 _FUNC_ expr2 - Returns true if `expr1` is less than `expr2`.",
arguments = """
Arguments:
* expr1, expr2 - the two expressions must be same type or can be casted to a common type,
and must be a type that can be ordered. For example, map type is not orderable, so it
is not supported. For complex types such array/struct, the data types of fields must
be orderable.
""",
examples = """
Examples:
> SELECT 1 _FUNC_ 2;
true
> SELECT 1.1 _FUNC_ '1';
false
> SELECT to_date('2009-07-30 04:17:52') _FUNC_ to_date('2009-07-30 04:17:52');
false
> SELECT to_date('2009-07-30 04:17:52') _FUNC_ to_date('2009-08-01 04:17:52');
true
> SELECT 1 _FUNC_ NULL;
NULL
""")
case class LessThan(left: Expression, right: Expression)
extends BinaryComparison with NullIntolerant {
override def symbol: String = "<"
protected override def nullSafeEval(input1: Any, input2: Any): Any = ordering.lt(input1, input2)
}
@ExpressionDescription(
usage = "expr1 _FUNC_ expr2 - Returns true if `expr1` is less than or equal to `expr2`.",
arguments = """
Arguments:
* expr1, expr2 - the two expressions must be same type or can be casted to a common type,
and must be a type that can be ordered. For example, map type is not orderable, so it
is not supported. For complex types such array/struct, the data types of fields must
be orderable.
""",
examples = """
Examples:
> SELECT 2 _FUNC_ 2;
true
> SELECT 1.0 _FUNC_ '1';
true
> SELECT to_date('2009-07-30 04:17:52') _FUNC_ to_date('2009-07-30 04:17:52');
true
> SELECT to_date('2009-07-30 04:17:52') _FUNC_ to_date('2009-08-01 04:17:52');
true
> SELECT 1 _FUNC_ NULL;
NULL
""")
case class LessThanOrEqual(left: Expression, right: Expression)
extends BinaryComparison with NullIntolerant {
override def symbol: String = "<="
protected override def nullSafeEval(input1: Any, input2: Any): Any = ordering.lteq(input1, input2)
}
@ExpressionDescription(
usage = "expr1 _FUNC_ expr2 - Returns true if `expr1` is greater than `expr2`.",
arguments = """
Arguments:
* expr1, expr2 - the two expressions must be same type or can be casted to a common type,
and must be a type that can be ordered. For example, map type is not orderable, so it
is not supported. For complex types such array/struct, the data types of fields must
be orderable.
""",
examples = """
Examples:
> SELECT 2 _FUNC_ 1;
true
> SELECT 2 _FUNC_ '1.1';
true
> SELECT to_date('2009-07-30 04:17:52') _FUNC_ to_date('2009-07-30 04:17:52');
false
> SELECT to_date('2009-07-30 04:17:52') _FUNC_ to_date('2009-08-01 04:17:52');
false
> SELECT 1 _FUNC_ NULL;
NULL
""")
case class GreaterThan(left: Expression, right: Expression)
extends BinaryComparison with NullIntolerant {
override def symbol: String = ">"
protected override def nullSafeEval(input1: Any, input2: Any): Any = ordering.gt(input1, input2)
}
@ExpressionDescription(
usage = "expr1 _FUNC_ expr2 - Returns true if `expr1` is greater than or equal to `expr2`.",
arguments = """
Arguments:
* expr1, expr2 - the two expressions must be same type or can be casted to a common type,
and must be a type that can be ordered. For example, map type is not orderable, so it
is not supported. For complex types such array/struct, the data types of fields must
be orderable.
""",
examples = """
Examples:
> SELECT 2 _FUNC_ 1;
true
> SELECT 2.0 _FUNC_ '2.1';
false
> SELECT to_date('2009-07-30 04:17:52') _FUNC_ to_date('2009-07-30 04:17:52');
true
> SELECT to_date('2009-07-30 04:17:52') _FUNC_ to_date('2009-08-01 04:17:52');
false
> SELECT 1 _FUNC_ NULL;
NULL
""")
case class GreaterThanOrEqual(left: Expression, right: Expression)
extends BinaryComparison with NullIntolerant {
override def symbol: String = ">="
protected override def nullSafeEval(input1: Any, input2: Any): Any = ordering.gteq(input1, input2)
}
| ddna1021/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala | Scala | apache-2.0 | 25,314 |
package sri
import sri.core.{ReactElement, ReactElementNode, ReactNode}
import sri.universal.apis.Dimensions
import sri.universal.components.{
ListViewDataSource,
ListViewDataSourceInput,
View,
ViewC
}
import scala.collection.GenTraversableOnce
import scala.scalajs.js
import scala.scalajs.js.Dynamic.{literal => json}
import scala.scalajs.js.JSConverters.genTravConvertible2JSRichGenTrav
import scala.scalajs.js.`|`
package object universal {
implicit class Boolean_Ext_Methods(val value: Boolean) extends AnyVal {
def ?=(elm: => ReactNode): ReactElement =
if (value) elm.asInstanceOf[ReactElement] else null
}
implicit class UndefOr_Ext_Methods(val value: js.UndefOr[_]) extends AnyVal {
def isUndefinedOrNull: Boolean = value.isEmpty || value == null
def isDefinedAndNotNull: Boolean = value.isDefined && value != null
}
implicit def genTravarsableToJSArrayReactNode[I <: js.Object](
elm: GenTraversableOnce[ReactElement { type Instance = I }]): ReactNode =
elm.toJSArray.asInstanceOf[ReactNode]
implicit def jsArrayReactElementToReactNode[T](
in: js.Array[ReactElement { type Instance = T }]): ReactNode =
in.asInstanceOf[ReactNode]
implicit def childrenTypeToReactElementNode(
in: ReactNode): ReactElementNode = in.asInstanceOf[ReactElementNode]
implicit def genTravarsableToJSArrayReactElementNode[I <: js.Object](
elm: GenTraversableOnce[ReactElement { type Instance = I }])
: ReactElementNode =
elm.toJSArray.asInstanceOf[ReactElementNode]
implicit def reactElementNodeArrayToReactNodeArray(
elm: js.Array[ReactElementNode]): js.Array[ReactNode] =
elm.asInstanceOf[js.Array[ReactNode]]
def createListViewDataSource[R, H](
rowHasChanged: js.Function2[R, R, Boolean],
sectionHeaderHasChanged: js.Function2[H, H, Boolean] = null,
getRowData: js.Function3[_, String | Int, String | Int, _] = null,
getSectionHeaderData: js.Function2[_, String | Int, _] = null)
: ListViewDataSource[R, H] = {
new ListViewDataSource[R, H](new ListViewDataSourceInput[R, H] {
rowHasChangedF = rowHasChanged;
sectionHeaderHasChangedF =
if (sectionHeaderHasChanged != null) sectionHeaderHasChanged
else js.undefined;
getRowDataF = if (getRowDataF != null) getRowData else js.undefined;
getSectionHeaderDataF =
if (getSectionHeaderData != null) getSectionHeaderData
else js.undefined
})
}
@inline def windowDimensions = Dimensions.get("window")
}
| scalajs-react-interface/universal | src/main/scala/sri/universal/package.scala | Scala | apache-2.0 | 2,536 |
/*
* Copyright 2014 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s.blaze.http.http2
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets.UTF_8
import scala.collection.mutable
import scala.util.control.NoStackTrace
sealed abstract class Http2Exception(msg: String)
extends Exception(msg)
with NoStackTrace
with Product
with Serializable {
// A 32-bit unsigned Integer
def code: Long
final def name: String = Http2Exception.errorName(code)
/** Convert this exception to a stream exception
*
* @note
* If this is already a stream exception but with a different stream id, the id will be changed
*/
final def toStreamException(streamId: Int): Http2StreamException =
this match {
case ex: Http2StreamException if ex.stream == streamId => ex
case ex => new Http2StreamException(streamId, ex.code, ex.getMessage)
}
/** Convert this exception to a session exception */
final def toSessionException(): Http2SessionException =
this match {
case Http2StreamException(_, code, msg) => Http2SessionException(code, msg)
case ex: Http2SessionException => ex
}
/** Was the exception due to refusal by the peer.
*
* These exceptions are safe to automatically retry even if the HTTP method is not an idempotent
* method. See https://tools.ietf.org/html/rfc7540#section-8.1.4 for more details.
*/
final def isRefusedStream: Boolean =
code == Http2Exception.REFUSED_STREAM.code
/** serialize the message as a `ByteBuffer` */
final def msgBuffer(): ByteBuffer = ByteBuffer.wrap(msg.getBytes(UTF_8))
}
final case class Http2StreamException(stream: Int, code: Long, msg: String)
extends Http2Exception(msg)
final case class Http2SessionException(code: Long, msg: String) extends Http2Exception(msg)
///////////////////// HTTP/2.0 Errors //////////////////////////////
object Http2Exception {
final class ErrorGenerator private[http2] (val code: Long, val name: String) {
/** Create a Http2Exception with stream id 0 */
def goaway(): Http2Exception = Http2SessionException(code, name)
/** Create a Http2Exception with stream id 0 */
def goaway(msg: String): Http2SessionException =
Http2SessionException(code, name + ": " + msg)
/** Create a Http2Exception with the requisite stream id */
def rst(stream: Int): Http2StreamException = rst(stream, name)
/** Create a Http2Exception with the requisite stream id */
def rst(stream: Int, msg: String): Http2StreamException =
Http2StreamException(stream, code, msg)
/** Extract the optional stream id and the exception message */
def unapply(ex: Http2Exception): Option[(Option[Int], String)] =
if (ex.code == code) {
val stream = ex match {
case ex: Http2StreamException => Some(ex.stream)
case _ => None
}
Some(stream -> ex.getMessage)
} else None
def unapply(code: Int): Boolean = code == this.code
override val toString: String =
s"$name(0x${Integer.toHexString(code.toInt)})"
}
def errorGenerator(code: Long): ErrorGenerator =
exceptionsMap.get(code) match {
case Some(gen) => gen
case None =>
new ErrorGenerator(code, s"UNKNOWN(0x${Integer.toHexString(code.toInt)})")
}
/** Get the name associated with the error code */
def errorName(code: Long): String = errorGenerator(code).name
private[this] val exceptionsMap = new mutable.HashMap[Long, ErrorGenerator]()
private def mkErrorGen(code: Long, name: String): ErrorGenerator = {
val g = new ErrorGenerator(code, name)
exceptionsMap += ((code, g))
g
}
val NO_ERROR = mkErrorGen(0x0, "NO_ERROR")
val PROTOCOL_ERROR = mkErrorGen(0x1, "PROTOCOL_ERROR")
val INTERNAL_ERROR = mkErrorGen(0x2, "INTERNAL_ERROR")
val FLOW_CONTROL_ERROR = mkErrorGen(0x3, "FLOW_CONTROL_ERROR")
val SETTINGS_TIMEOUT = mkErrorGen(0x4, "SETTINGS_TIMEOUT")
val STREAM_CLOSED = mkErrorGen(0x5, "STREAM_CLOSED")
val FRAME_SIZE_ERROR = mkErrorGen(0x6, "FRAME_SIZE_ERROR")
val REFUSED_STREAM = mkErrorGen(0x7, "REFUSED_STREAM")
val CANCEL = mkErrorGen(0x8, "CANCEL")
val COMPRESSION_ERROR = mkErrorGen(0x9, "COMPRESSION_ERROR")
val CONNECT_ERROR = mkErrorGen(0xa, "CONNECT_ERROR")
val ENHANCE_YOUR_CALM = mkErrorGen(0xb, "ENHANCE_YOUR_CALM")
val INADEQUATE_SECURITY = mkErrorGen(0xc, "INADEQUATE_SECURITY")
val HTTP_1_1_REQUIRED = mkErrorGen(0xd, "HTTP_1_1_REQUIRED")
}
| http4s/blaze | http/src/main/scala/org/http4s/blaze/http/http2/Http2Exception.scala | Scala | apache-2.0 | 5,022 |
package org.genericConfig.admin.models.json.dependency
import play.api.libs.json.Json
/**
* Copyright (C) 2016 Gennadi Heimann genaheimann@gmail.com
*
* Created by Gennadi Heimann 14.05.2017
*/
case class JsonDependencyIn (
params: JsonDependencyParams
)
object JsonDependencyIn {
implicit val format = Json.reads[JsonDependencyIn]
} | gennadij/admin | shared/src/main/scala/org/genericConfig/admin/shared/dependency/JsonDependencyIn.scala | Scala | apache-2.0 | 348 |
/*
* Copyright (C) 2011-2013 org.bayswater
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bayswater.musicrest.model
import com.mongodb.casbah.Imports._
import com.mongodb.ServerAddress
/* Utility to get a Mongo Casbah client connection */
object MongoCasbahUtil {
def buildMongoClient (dbHost: String, dbPort: Int, dbLogin: String, dbPassword: String, dbName: String, poolSize: Option[Int]= None) : MongoClient = {
val password = dbPassword.toCharArray
val server = new ServerAddress(dbHost, dbPort)
val credentials = MongoCredential.createCredential(dbLogin, dbName, password)
poolSize match {
case Some(size) =>
val mongoOptions = MongoClientOptions ( connectionsPerHost = size )
MongoClient(server, List(credentials), mongoOptions)
case None =>
MongoClient(server, List(credentials))
}
}
}
| newlandsvalley/musicrest | src/main/scala/org/bayswater/musicrest/model/MongoCasbahUtil.scala | Scala | apache-2.0 | 1,384 |
/**
* © 2019 Refinitiv. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.tracking
import cmwell.util.string.Base64.encodeBase64URLSafeString
import org.scalatest.{FlatSpec, Matchers, Succeeded}
/**
* Created by yaakov on 3/15/17.
*/
class TrackingSpec extends FlatSpec with Matchers {
"TrackingId encode decode" should "be successful" in {
val actorId = "myAwesomeActor"
val originalTid = TrackingId(actorId)
originalTid.token match {
case TrackingId(extractedTid) => extractedTid should equal(originalTid)
case _ => Failed
}
}
"Malformed token (not base64)" should "be None when unapplied" in
assertNotValid("bla bla")
"Malformed token (without createTime)" should "be None when unapplied" in
assertNotValid(encodeBase64URLSafeString("id"))
"Malformed token (malformed createTime)" should "be None when unapplied" in
assertNotValid(encodeBase64URLSafeString("id|not-a-long"))
private def assertNotValid(payload: String) = payload match {
case TrackingId(_) => Failed
case _ => Succeeded
}
}
| dudi3001/CM-Well | server/cmwell-tracking/src/test/scala/cmwell/tracking/TrackingSpec.scala | Scala | apache-2.0 | 1,634 |
package com.twitter.util
/**
* A "stack" with a bounded size. If you push a new element on the top
* when the stack is full, the oldest element gets dropped off the bottom.
*/
class BoundedStack[A: ClassManifest](val maxSize: Int) extends Seq[A] {
private val array = new Array[A](maxSize)
private var top = 0
private var count_ = 0
def length = count_
override def size = count_
def clear() {
top = 0
count_ = 0
}
/**
* Gets the element from the specified index in constant time.
*/
def apply(i: Int): A = {
if (i >= count_) throw new IndexOutOfBoundsException(i.toString)
else array((top + i) % maxSize)
}
/**
* Pushes an element, possibly forcing out the oldest element in the stack.
*/
def +=(elem: A) {
top = if (top == 0) maxSize - 1 else top - 1
array(top) = elem
if (count_ < maxSize) count_ += 1
}
/**
* Inserts an element 'i' positions down in the stack. An 'i' value
* of 0 is the same as calling this += elem. This is a O(n) operation
* as elements need to be shifted around.
*/
def insert(i: Int, elem: A) {
if (i == 0) this += elem
else if (i > count_) throw new IndexOutOfBoundsException(i.toString)
else if (i == count_) {
array((top + i) % maxSize) = elem
count_ += 1
} else {
val swapped = this(i)
this(i) = elem
insert(i - 1, swapped)
}
}
/**
* Replaces an element in the stack.
*/
def update(index: Int, elem: A) {
array((top + index) % maxSize) = elem
}
/**
* Adds multiple elements, possibly overwriting the oldest elements in
* the stack. If the given iterable contains more elements that this
* stack can hold, then only the last maxSize elements will end up in
* the stack.
*/
def ++=(iter: Iterable[A]) {
for (elem <- iter) this += elem
}
/**
* Removes the top element in the stack.
*/
def pop: A = {
if (count_ == 0) throw new NoSuchElementException
else {
val res = array(top)
top = (top + 1) % maxSize
count_ -= 1
res
}
}
override def iterator = new Iterator[A] {
var idx = 0
def hasNext = idx != count_
def next = {
val res = apply(idx)
idx += 1
res
}
}
}
| travisbrown/util | util-core/src/main/scala/com/twitter/util/BoundedStack.scala | Scala | apache-2.0 | 2,264 |
package io.github.loustler.collection
import io.github.loustler.BaseTest
/**
* @author loustler
* @since 11/02/2018
*/
class ImplLogictest extends BaseTest {
"ImplLogic" should "be able to partition like TraversableLike" in {
val x: List[Int] = List(1, 2, 3, 4, 5, 6, 7, 8, 9)
val f: Int => Boolean = a => a < 5
val (success: List[Int], fail: List[Int]) = ImplLogic.partition(x)(f)
success should contain allOf (1, 2, 3, 4)
fail should contain allOf (5, 6, 7, 8, 9)
}
it should "be able to return flatten list start from left" in {
val list1: List[Int] = 1 to 5 toList
val list2: List[Int] = 6 to 10 toList
val list3: List[Int] = 11 to 15 toList
val list4: List[Int] = 16 to 20 toList
val x: List[List[Int]] = List(list1, list2, list3, list4)
val expectedList: List[Int] = 1 to 20 toList
val result: List[Int] = ImplLogic.flattenLeft(x)
result should not be empty
result should contain allElementsOf expectedList
}
it should "be able to return flatten list start from right" in {
val list1: List[Int] = 1 to 5 toList
val list2: List[Int] = 6 to 10 toList
val list3: List[Int] = 11 to 15 toList
val list4: List[Int] = 16 to 20 toList
val x: List[List[Int]] = List(list1, list2, list3, list4)
val expectedList: List[Int] = 1 to 20 toList
val result: List[Int] = ImplLogic.flattenRight(x)
result should not be empty
result should contain allElementsOf expectedList
}
it should "be able to reverse a list start from left" in {
val x: List[Char] = "Hello World!".toCharArray.toList
val expectedList: List[Char] = List('!', 'd', 'l', 'r', 'o', 'W', ' ', 'o', 'l', 'l', 'e', 'H')
val result: List[Char] = ImplLogic.reverseLeft(x)
result should not be empty
result should contain allElementsOf expectedList
}
it should "be able to reverse a list start from right" in {
val x: List[Char] = "Hello World!".toCharArray.toList
val expectedList: List[Char] = List('!', 'd', 'l', 'r', 'o', 'W', ' ', 'o', 'l', 'l', 'e', 'H')
val result: List[Char] = ImplLogic.reverseRight(x)
result should not be empty
result should contain allElementsOf expectedList
}
it should "be able to create map from two list" in {
val keys: List[String] = List("CP", "LeBRON", "KD", "Kyrie")
val values: List[Int] = List(3, 23, 35, 11)
val nba: Map[String, Int] = ImplLogic.toMap(keys, values)
nba should not be empty
nba should have size 4
}
}
| loustler/sKaLa | src/test/scala/io/github/loustler/collection/ImplLogictest.scala | Scala | apache-2.0 | 2,598 |
import org.apache.spark.mllib.clustering.KMeans
import org.apache.spark.mllib.linalg.Vectors
// Load and parse the data
val data = sc.textFile("../datasets/iris_kmeans.csv")
val parsedData = data.map(s => Vectors.dense(s.split(',').map(_.toDouble)))
// Cluster the data into three classes using KMeans
val numIterations = 20
val numClusters = 3
val kmeansModel = KMeans.train(parsedData, numClusters, numIterations)
// Export clustering model to PMML
kmeansModel.toPMML("../exported_pmml_models/kmeans.xml")
// Test model on training data
// Show cluster centers
val centers = kmeansModel.clusterCenters
// First from iris_kmeans.csv associated to Iris-setosa
var predictedCluster = kmeansModel.predict(Vectors.dense(5.1,3.5,1.4,0.2))
// Last from iris_kmeans.csv associated to Iris-virginica
predictedCluster = kmeansModel.predict(Vectors.dense(5.9,3.0,5.1,1.8))
| rahuldhote/spark-pmml-exporter-validator | src/main/resources/spark_shell_exporter/kmeans_iris.scala | Scala | agpl-3.0 | 868 |
import org.scalatest.FunSuite
import org.scalatest.Matchers._
class AccountTest extends FunSuite {
test("equity equals balance on start") {
val acc = Account(BigDecimal("100.0"), leverage = 100)
BigDecimal("100.0") should equal(acc.equity)
}
test("registerProfit") {
val acc = Account(BigDecimal("100.0"), leverage = 100)
acc.realiseFloating(5F)
105F shouldEqual(acc.balance)
}
}
| AttilaForgacs/sfx2 | backtest/src/test/scala/AccountTest.scala | Scala | mit | 409 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.qscript
import slamdata.Predef._
import quasar.Planner.NoFilesFound
import quasar.contrib.pathy._
import quasar.contrib.scalaz._
import quasar.fp._
import quasar.fp.ski._
import quasar.fs._
import quasar.qscript.MapFuncsCore._
import matryoshka._
import matryoshka.data._
import matryoshka.implicits._
import pathy.Path.{dir1, file1}
import scalaz._, Scalaz._
/** Converts any {Shifted}Read containing a directory to a union of all the
* files in that directory.
*/
trait ExpandDirs[IN[_]] {
type IT[F[_]]
type OUT[A]
def expandDirs[M[_]: Monad: MonadFsErr, F[_]: Functor]
(OutToF: OUT ~> F, g: DiscoverPath.ListContents[M])
: IN[IT[F]] => M[F[IT[F]]]
}
object ExpandDirs extends ExpandDirsInstances {
type Aux[T[_[_]], IN[_], F[_]] = ExpandDirs[IN] {
type IT[F[_]] = T[F]
type OUT[A] = F[A]
}
def apply[T[_[_]], IN[_], OUT[_]](implicit ev: ExpandDirs.Aux[T, IN, OUT]) =
ev
}
abstract class ExpandDirsInstances {
def expandDirsPath[T[_[_]]: BirecursiveT, O[_]: Functor](
implicit
FI: Injectable.Aux[O, QScriptTotal[T, ?]],
QC: QScriptCore[T, ?] :<: O
): ExpandDirsPath[T, O] =
new ExpandDirsPath[T, O]
def expandDirsBranch[T[_[_]]: BirecursiveT]: ExpandDirsBranch[T] =
new ExpandDirsBranch[T]
// real instances
implicit def readDir[T[_[_]]: BirecursiveT, F[_]: Functor]
(implicit
R: Const[Read[AFile], ?] :<: F,
QC: QScriptCore[T, ?] :<: F,
FI: Injectable.Aux[F, QScriptTotal[T, ?]])
: ExpandDirs.Aux[T, Const[Read[ADir], ?], F] =
expandDirsPath[T, F].readDir
implicit def shiftedReadDir[T[_[_]]: BirecursiveT, F[_]: Functor]
(implicit
SR: Const[ShiftedRead[AFile], ?] :<: F,
QC: QScriptCore[T, ?] :<: F,
FI: Injectable.Aux[F, QScriptTotal[T, ?]])
: ExpandDirs.Aux[T, Const[ShiftedRead[ADir], ?], F] =
expandDirsPath[T, F].shiftedReadDir
// branch handling
implicit def qscriptCore[T[_[_]]: BirecursiveT, F[_]](implicit QC: QScriptCore[T, ?] :<: F)
: ExpandDirs.Aux[T, QScriptCore[T, ?], F] =
expandDirsBranch[T].qscriptCore[F]
implicit def thetaJoin[T[_[_]]: BirecursiveT, F[_]](implicit TJ: ThetaJoin[T, ?] :<: F)
: ExpandDirs.Aux[T, ThetaJoin[T, ?], F] =
expandDirsBranch[T].thetaJoin[F]
implicit def equiJoin[T[_[_]]: BirecursiveT, F[_]](implicit EJ: EquiJoin[T, ?] :<: F)
: ExpandDirs.Aux[T, EquiJoin[T, ?], F] =
expandDirsBranch[T].equiJoin[F]
implicit def coproduct[T[_[_]], F[_], G[_], H[_]]
(implicit F: ExpandDirs.Aux[T, F, H], G: ExpandDirs.Aux[T, G, H])
: ExpandDirs.Aux[T, Coproduct[F, G, ?], H] =
new ExpandDirs[Coproduct[F, G, ?]] {
type IT[F[_]] = T[F]
type OUT[A] = H[A]
def expandDirs[M[_]: Monad: MonadFsErr, F[_]: Functor]
(OutToF: OUT ~> F, g: DiscoverPath.ListContents[M]) =
_.run.fold(F.expandDirs(OutToF, g), G.expandDirs(OutToF, g))
}
def default[T[_[_]], F[_], G[_]](implicit F: F :<: G)
: ExpandDirs.Aux[T, F, G] =
new ExpandDirs[F] {
type IT[F[_]] = T[F]
type OUT[A] = G[A]
def expandDirs[M[_]: Monad: MonadFsErr, H[_]: Functor]
(OutToF: OUT ~> H, g: DiscoverPath.ListContents[M]) =
fa => OutToF(F.inj(fa)).point[M]
}
implicit def deadEnd[T[_[_]], F[_]](implicit DE: Const[DeadEnd, ?] :<: F)
: ExpandDirs.Aux[T, Const[DeadEnd, ?], F] =
default
implicit def readFile[T[_[_]], F[_]](implicit R: Const[Read[AFile], ?] :<: F)
: ExpandDirs.Aux[T, Const[Read[AFile], ?], F] =
default
implicit def shiftedReadFile[T[_[_]], F[_]]
(implicit SR: Const[ShiftedRead[AFile], ?] :<: F)
: ExpandDirs.Aux[T, Const[ShiftedRead[AFile], ?], F] =
default
implicit def projectBucket[T[_[_]], F[_]]
(implicit PB: ProjectBucket[T, ?] :<: F)
: ExpandDirs.Aux[T, ProjectBucket[T, ?], F] =
default
}
private[qscript] final class ExpandDirsPath[T[_[_]]: BirecursiveT, O[_]: Functor](
implicit FI: Injectable.Aux[O, QScriptTotal[T, ?]], QC: QScriptCore[T, ?] :<: O
) extends TTypes[T] {
def union(elems: NonEmptyList[O[T[O]]]): O[T[O]] =
elems.foldRight1(
(elem, acc) => QC.inj(Union(QC.inj(Unreferenced[T, T[O]]()).embed,
elem.embed.cata[Free[QScriptTotal, Hole]](g => Free.roll(FI.inject(g))),
acc.embed.cata[Free[QScriptTotal, Hole]](g => Free.roll(FI.inject(g))))))
def wrapDir(name: String, d: O[T[O]]): O[T[O]] =
QC.inj(Map(d.embed, Free.roll(MFC(MakeMap(StrLit(name), HoleF)))))
@SuppressWarnings(Array("org.wartremover.warts.Recursion"))
def allDescendents[M[_]: Monad: MonadFsErr]
(listContents: DiscoverPath.ListContents[M], wrapFile: AFile => O[T[O]])
: ADir => M[List[O[T[O]]]] =
dir => (listContents(dir) >>=
(ps => ISet.fromList(ps.toList).toList.traverseM(_.fold(
d => allDescendents[M](listContents, wrapFile).apply(dir </> dir1(d)) ∘ (_ ∘ (wrapDir(d.value, _))),
f => List(wrapDir(f.value, wrapFile(dir </> file1(f)))).point[M]))))
.handleError(κ(List.empty[O[T[O]]].point[M]))
def unionDirs[M[_]: Monad: MonadFsErr]
(g: DiscoverPath.ListContents[M], wrapFile: AFile => O[T[O]])
: ADir => M[Option[NonEmptyList[O[T[O]]]]] =
allDescendents[M](g, wrapFile).apply(_) ∘ {
case Nil => None
case h :: t => NonEmptyList.nel(h, t.toIList).some
}
def unionAll[M[_]: Monad: MonadFsErr, F[_]: Functor]
(OutToF: O ~> F, g: DiscoverPath.ListContents[M], wrapFile: AFile => O[T[O]])
: ADir => M[F[T[F]]] =
dir => unionDirs[M](g, wrapFile).apply(dir) >>= (_.fold[M[F[T[F]]]](
MonadError_[M, FileSystemError].raiseError(FileSystemError.qscriptPlanningFailed(NoFilesFound(List(dir)))))(
nel => OutToF(union(nel) ∘ (_.transAna[T[F]](OutToF))).point[M]))
def readDir(implicit R: Const[Read[AFile], ?] :<: O): ExpandDirs.Aux[T, Const[Read[ADir], ?], O] =
new ExpandDirs[Const[Read[ADir], ?]] {
type IT[F[_]] = T[F]
type OUT[A] = O[A]
def wrapRead[F[_]](file: AFile) =
R.inj(Const[Read[AFile], T[F]](Read(file)))
def expandDirs[M[_]: Monad: MonadFsErr, F[_]: Functor]
(OutToF: OUT ~> F, g: DiscoverPath.ListContents[M]) =
r => unionAll(OutToF, g, wrapRead[OUT]) apply r.getConst.path
}
def shiftedReadDir(implicit SR: Const[ShiftedRead[AFile], ?] :<: O): ExpandDirs.Aux[T, Const[ShiftedRead[ADir], ?], O] =
new ExpandDirs[Const[ShiftedRead[ADir], ?]] {
type IT[F[_]] = T[F]
type OUT[A] = O[A]
def wrapRead[F[_]](file: AFile, idStatus: IdStatus) =
SR.inj(Const[ShiftedRead[AFile], T[F]](ShiftedRead(file, idStatus)))
def expandDirs[M[_]: Monad: MonadFsErr, F[_]: Functor]
(OutToF: OUT ~> F, g: DiscoverPath.ListContents[M]) =
r => unionAll(OutToF, g, wrapRead[OUT](_, r.getConst.idStatus)) apply r.getConst.path
}
}
private[qscript] final class ExpandDirsBranch[T[_[_]]: BirecursiveT] extends TTypes[T] {
private def ExpandDirsTotal = ExpandDirs[T, QScriptTotal, QScriptTotal]
def applyToBranch[M[_]: Monad: MonadFsErr]
(listContents: DiscoverPath.ListContents[M], branch: FreeQS)
: M[FreeQS] =
branch.transCataM[M, T[CoEnvQS], CoEnvQS](
liftCoM[T, M, QScriptTotal, Hole, T[CoEnvQS]](
ExpandDirsTotal.expandDirs(
coenvPrism[QScriptTotal, Hole].reverseGet,
listContents))
) ∘ (_.convertTo[FreeQS])
def qscriptCore[O[_]](implicit QC: QScriptCore :<: O)
: ExpandDirs.Aux[T, QScriptCore, O] =
new ExpandDirs[QScriptCore] {
type IT[F[_]] = T[F]
type OUT[A] = O[A]
def expandDirs[M[_]: Monad: MonadFsErr, F[_]: Functor]
(OutToF: OUT ~> F, g: DiscoverPath.ListContents[M]) =
fa => (fa match {
case Union(src, lb, rb) =>
(applyToBranch(g, lb) ⊛ applyToBranch(g, rb))(Union(src, _, _))
case Subset(src, lb, o, rb) =>
(applyToBranch(g, lb) ⊛ applyToBranch(g, rb))(Subset(src, _, o, _))
case x => x.point[M]
}) ∘ (OutToF.compose(QC))
}
def thetaJoin[O[_]](implicit TJ: ThetaJoin :<: O)
: ExpandDirs.Aux[T, ThetaJoin, O] =
new ExpandDirs[ThetaJoin] {
type IT[F[_]] = T[F]
type OUT[A] = O[A]
def expandDirs[M[_]: Monad: MonadFsErr, F[_]: Functor]
(OutToF: OUT ~> F, g: DiscoverPath.ListContents[M]) =
fa => (fa match {
case ThetaJoin(src, lb, rb, on, jType, combine) =>
(applyToBranch(g, lb) ⊛ applyToBranch(g, rb))(
ThetaJoin(src, _, _, on, jType, combine))
}) ∘ (OutToF.compose(TJ))
}
def equiJoin[O[_]](implicit EJ: EquiJoin :<: O)
: ExpandDirs.Aux[T, EquiJoin, O] =
new ExpandDirs[EquiJoin] {
type IT[F[_]] = T[F]
type OUT[A] = O[A]
def expandDirs[M[_]: Monad: MonadFsErr, F[_]: Functor]
(OutToF: OUT ~> F, g: DiscoverPath.ListContents[M]) =
fa => (fa match {
case EquiJoin(src, lb, rb, k, jType, combine) =>
(applyToBranch(g, lb) ⊛ applyToBranch(g, rb))(
EquiJoin(src, _, _, k, jType, combine))
}) ∘ (OutToF.compose(EJ))
}
}
| drostron/quasar | connector/src/main/scala/quasar/qscript/ExpandDirs.scala | Scala | apache-2.0 | 9,764 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.createTable
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
/**
* Test functionality of create table with page size
*/
class TestCreateTableWithPageSizeInMb extends QueryTest with BeforeAndAfterAll {
override def beforeAll {
sql("use default")
sql("drop table if exists source")
}
test("test create table with invalid page size") {
val ex = intercept[MalformedCarbonCommandException] {
sql(
"CREATE TABLE T1(name String) STORED AS CARBONDATA TBLPROPERTIES" +
"('table_page_size_inmb'='3X')")
}
assert(ex.getMessage.toLowerCase.contains("invalid table_page_size_inmb"))
val ex1 = intercept[MalformedCarbonCommandException] {
sql(
"CREATE TABLE T1(name String) STORED AS CARBONDATA TBLPROPERTIES" +
"('table_page_size_inmb'='0')")
}
assert(ex1.getMessage.toLowerCase.contains("invalid table_page_size_inmb"))
val ex2 = intercept[MalformedCarbonCommandException] {
sql(
"CREATE TABLE T1(name String) STORED AS CARBONDATA TBLPROPERTIES" +
"('table_page_size_inmb'='-1')")
}
assert(ex2.getMessage.toLowerCase.contains("invalid table_page_size_inmb"))
val ex3 = intercept[MalformedCarbonCommandException] {
sql(
"CREATE TABLE T1(name String) STORED AS CARBONDATA TBLPROPERTIES" +
"('table_page_size_inmb'='1999')")
}
assert(ex3.getMessage.toLowerCase.contains("invalid table_page_size_inmb"))
}
override def afterAll {
sql("use default")
sql("drop table if exists source")
}
}
| zzcclp/carbondata | integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableWithPageSizeInMb.scala | Scala | apache-2.0 | 2,514 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.index.api
import java.util.concurrent.TimeUnit
import com.github.benmanes.caffeine.cache.{CacheLoader, Caffeine}
import org.locationtech.geomesa.index.geotools.GeoMesaDataStore
import org.locationtech.geomesa.index.metadata.TableBasedMetadata
import org.locationtech.geomesa.utils.cache.CacheKeyGenerator
import org.locationtech.geomesa.utils.index.IndexMode
import org.locationtech.geomesa.utils.index.IndexMode.IndexMode
import org.opengis.feature.simple.SimpleFeatureType
import scala.util.control.NonFatal
/**
* Manages available indices and versions. @see GeoMesaFeatureIndex
*
* @param ds data store
*/
class IndexManager(ds: GeoMesaDataStore[_]) {
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
private val expiry = TableBasedMetadata.Expiry.toDuration.get.toMillis
private val cache = Caffeine.newBuilder().expireAfterWrite(expiry, TimeUnit.MILLISECONDS).build(
new CacheLoader[String, (Seq[GeoMesaFeatureIndex[_, _]], Map[String, GeoMesaFeatureIndex[_, _]])]() {
override def load(key: String): (Seq[GeoMesaFeatureIndex[_, _]], Map[String, GeoMesaFeatureIndex[_, _]]) = {
val sft = CacheKeyGenerator.restore(key)
val indices = GeoMesaFeatureIndexFactory.create(ds, sft, sft.getIndices)
(indices, indices.map(i => (i.identifier, i)).toMap)
}
}
)
/**
* Gets configured indices for this sft
*
* @param sft simple feature type
* @param mode read/write mode
* @return
*/
def indices(sft: SimpleFeatureType, mode: IndexMode = IndexMode.Any): Seq[GeoMesaFeatureIndex[_, _]] = {
try {
cache.get(CacheKeyGenerator.cacheKey(sft))._1.filter(_.mode.supports(mode))
} catch {
case NonFatal(e) =>
throw new IllegalArgumentException(s"Invalid indices for simple feature type '${sft.getTypeName}':", e)
}
}
/**
* Return an index with the specified identifier
*
* @param sft simple feature type
* @param identifier identifier
* @return
*/
def index[T, U](sft: SimpleFeatureType, identifier: String, mode: IndexMode = IndexMode.Any): GeoMesaFeatureIndex[T, U] = {
val idx = cache.get(CacheKeyGenerator.cacheKey(sft))._2.getOrElse(identifier,
throw new IllegalArgumentException(s"No index exists with identifier '$identifier'"))
if (idx.mode.supports(mode)) {
idx.asInstanceOf[GeoMesaFeatureIndex[T, U]]
} else {
throw new IllegalArgumentException(s"Index '$identifier' does not support mode $mode")
}
}
}
| aheyne/geomesa | geomesa-index-api/src/main/scala/org/locationtech/geomesa/index/api/IndexManager.scala | Scala | apache-2.0 | 3,033 |
//
// SupportForSynchronousExecution.scala -- Scala trait SupportForSynchronousExecution
// Project OrcScala
//
// Created by dkitchin on Jul 10, 2010.
//
// Copyright (c) 2016 The University of Texas at Austin. All rights reserved.
//
// Use and redistribution of this file is governed by the license terms in
// the LICENSE file found in the project's top-level directory and also found at
// URL: http://orc.csres.utexas.edu/license.shtml .
//
package orc.run.extensions
import orc.OrcRuntime
import orc.OrcEvent
import orc.HaltedOrKilledEvent
import orc.OrcExecutionOptions
import orc.ast.oil.nameless.Expression
import orc.error.runtime.ExecutionException
import orc.util.LatchingSignal
/** @author dkitchin
*/
trait SupportForSynchronousExecution extends OrcRuntime {
protected var runSyncThread: Thread = null
/** Wait for execution to complete, rather than dispatching asynchronously.
* The continuation takes only values, not events.
*/
@throws(classOf[ExecutionException])
@throws(classOf[InterruptedException])
def runSynchronous(node: Expression, eventHandler: OrcEvent => Unit, options: OrcExecutionOptions) {
synchronized {
if (runSyncThread != null) throw new IllegalStateException("runSynchronous on an engine that is already running synchronously")
runSyncThread = Thread.currentThread()
}
val doneSignal = new LatchingSignal()
def syncAction(event: OrcEvent) {
event match {
case HaltedOrKilledEvent => { doneSignal.signal() }
case _ => {}
}
eventHandler(event)
}
try {
this.run(node, syncAction, options)
doneSignal.await()
} finally {
// Important: runSyncThread must be null before calling stop
synchronized {
runSyncThread = null
}
this.stop()
}
}
/** If no continuation is given, discard published values and run silently to completion. */
@throws(classOf[ExecutionException])
@throws(classOf[InterruptedException])
def runSynchronous(node: Expression, options: OrcExecutionOptions) {
runSynchronous(node, { _: OrcEvent => }, options)
}
abstract override def stop() = {
super.stop()
synchronized {
if (runSyncThread != null) runSyncThread.interrupt()
}
}
}
| orc-lang/orc | OrcScala/src/orc/run/extensions/SupportForSynchronousExecution.scala | Scala | bsd-3-clause | 2,266 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.