code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package works.weave.socks.aws.orders
import com.fasterxml.jackson.annotation.JsonAutoDetect
import com.fasterxml.jackson.annotation.JsonInclude
import com.fasterxml.jackson.annotation.PropertyAccessor
import com.fasterxml.jackson.databind.DeserializationFeature
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.databind.SerializationFeature
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule
import com.fasterxml.jackson.module.scala.DefaultScalaModule
object ProjectDefaultJacksonMapper {
def build() : ObjectMapper = {
val mapper = new ObjectMapper()
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
mapper.setSerializationInclusion(JsonInclude.Include.ALWAYS)
mapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY)
mapper.enable(SerializationFeature.INDENT_OUTPUT)
mapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS)
val javaTime : JavaTimeModule = new JavaTimeModule
mapper.registerModule(javaTime)
val scalaModule = new DefaultScalaModule()
mapper.registerModule(scalaModule)
mapper
}
}
| Compositional/orders-aws | src/main/scala/works.weave.socks.aws.orders/ProjectDefaultJacksonMapper.scala | Scala | apache-2.0 | 1,142 |
/**
* Copyright 2014-2015 Martin Cooper
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.martincooper.datatable.DataRowSpecs
import com.github.martincooper.datatable.{ DataColumn, DataRow, DataTable }
import org.scalatest.{ FlatSpec, Matchers }
class DataRowSpec extends FlatSpec with Matchers {
private def buildTestTable(): DataTable = {
val dataColOne = new DataColumn[Int]("ColOne", (0 to 10) map { i => i })
val dataColTwo = new DataColumn[String]("ColTwo", (0 to 10) map { i => "Value : " + i })
val dataColThree = new DataColumn[Boolean]("ColThree", (0 to 10) map { i => true })
DataTable("TestTable", Seq(dataColOne, dataColTwo, dataColThree)).get
}
"A new DataRow" can "be created with a valid table and index" in {
val testTable = buildTestTable()
val dataRow = DataRow(testTable, 5)
dataRow.isSuccess should be(true)
dataRow.get.rowIndex should be(5)
dataRow.get.table should be(testTable)
}
"Creating a new DataRow" should "fail with an invalid index" in {
val testTable = buildTestTable()
val dataRow = DataRow(testTable, 500)
dataRow.isSuccess should be(false)
dataRow.failed.get.getMessage should be("Invalid row index for DataRow.")
}
}
| martincooper/scala-datatable | src/test/scala/com/github/martincooper/datatable/DataRowSpecs/DataRowSpec.scala | Scala | apache-2.0 | 1,761 |
package freeslick.profile.utils
import slick.driver.JdbcDriver
trait FreeslickSequenceDDLBuilder {
jdbcDriver: JdbcDriver =>
def buildSeqDDL(seq: Sequence[_]): DDL = {
val b = new StringBuilder append "create sequence " append quoteIdentifier(seq.name)
seq._increment.foreach { i =>
b append " increment by " append i
}
seq._minValue.foreach { m =>
b append " minvalue " append m
}
seq._maxValue.foreach { m =>
b append " maxvalue " append m
}
seq._start.foreach { s =>
b append " start with " append s
}
if (seq._cycle) {
b append " cycle"
//TODO Sue add nocache/cache size option
val cacheSize = 20 // Oracle default http://www.dba-oracle.com/t_sequence_caching.htm
for {
maxValue <- seq._maxValue
minValue <- seq._minValue
} yield {
try {
val cycleSize = math.abs(maxValue.toString.toInt - minValue.toString.toInt)
if (cacheSize > cycleSize) b append " cache " append cycleSize
} catch {
case _: Exception => //if max and min aren't convertible to ints, nothing to put here
}
}
}
DDL(b.toString, "drop sequence " + quoteIdentifier(seq.name))
}
}
| fommil/freeslick | src/main/scala/freeslick/profile/utils/FreeslickSequenceDDLBuilder.scala | Scala | lgpl-3.0 | 1,236 |
package org.kokho.scheduling.multicritical.system
import org.kokho.scheduling.{PeriodicTask, PeriodicTaskBehavior, TaskBehavior}
import org.scalatest.FlatSpec
/**
* Created with IntelliJ IDEA on 5/28/15.
* @author: Mikhail Kokho
*/
class HiCriticalTaskTestSuite extends FlatSpec with TaskBehavior with PeriodicTaskBehavior{
def hiCriticalTask = HiCriticalTask(10, 4, 6, isOdd(_))
def isOdd(x: Int) = x % 2 == 1
"A high critical task" should behave like aTask(hiCriticalTask)
it should behave like aPeriodicTask(hiCriticalTask)
it should "return jobs that take low WCET" in {
val jobs = hiCriticalTask.jobs()
val job0 = jobs.next()
val job1 = jobs.next()
assert(job0.length == hiCriticalTask.hiExecution)
assert(job1.length == hiCriticalTask.loExecution)
}
}
| mkokho/dynoslack | src/test/scala/org/kokho/scheduling/multicritical/system/HiCriticalTaskTestSuite.scala | Scala | apache-2.0 | 801 |
/*
* This file is a part of the "sur la plaque" toolkit for cycling
* data analytics and visualization.
*
* Copyright (c) 2013--2014 William C. Benton and Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.freevariable.surlaplaque.geometry
trait GenericAnnotatable[T,K,V] {
type AnnotationKey = K
type AnnotationValue = V
def annotate(k: K, v: V): T
}
trait Annotatable[T] extends GenericAnnotatable[T, String, String] {} | willb/sur-la-plaque | analysis/src/main/scala/com/freevariable/surlaplaque/geometry/annotatable.scala | Scala | apache-2.0 | 978 |
package de.kappmeier.asnarc
import de.kappmeier.asnarc.board.{AsnarcBoard, Point}
import de.kappmeier.asnarc.render.{AsnarcJSEditorRenderer, AsnarcJSRenderer}
import de.kappmeier.asnarc.render.localization.AsnarcLocalizationDe
import org.scalajs.dom
import org.scalajs.dom.html
import scala.scalajs.js.annotation.{JSExport, JSExportTopLevel}
/**
* The ScalaJS export for Asnarc editor. Renders the board, waits for user input and updates the board.
*
* The visualization consists of a rectangular field.
*/
@JSExportTopLevel("AsnarcJSEditor")
object AsnarcJSEditor {
@JSExport
def main(canvas: html.Canvas, level: String): Unit = {
val board: AsnarcBoard = new AsnarcBoard(level)
val localization = new AsnarcLocalizationDe
val detailsCanvas: html.Canvas = dom.document.getElementById("canvas-details").asInstanceOf[html.Canvas]
val renderer: AsnarcJSEditorRenderer = new AsnarcJSEditorRenderer(canvas, detailsCanvas, localization)
renderer.renderBoard(board, "")
canvas.onclick = (e: dom.MouseEvent) => {
val x: Int = e.clientX.asInstanceOf[Int] / AsnarcJSRenderer.Size
val y = e.clientY.asInstanceOf[Int] / AsnarcJSRenderer.Size
if (x < board.cols && y < board.rows) {
renderer.renderBoard(board, "Click: " + board.elementAt(Point(x,y)) + " at " + x + "," + y)
renderer.highlight(x, y)
renderer.highlightElement(board.elementAt(Point(x, y)))
}
}
canvas.onkeydown = (e: dom.KeyboardEvent) => {
}
}
}
| kappmeier/asnarc | src/main/scala/de/kappmeier/asnarc/AsnarcJSEditor.scala | Scala | apache-2.0 | 1,510 |
import scala.tools.partest._
import java.io.File
// used to crash with an assertion failure in flatten because the type symbol created for the missing
// package was a ClassSymbol, not a PackageClassSymbol
// - isFlattenablePrefix(vanishingPackage) was true (wrongly)
// - therefore flatten tried to flatten the class defined in the package, but the class is
// top-level, vanishingClass.enclosingTopLevelClass is NoSymbol
object Test extends StoreReporterDirectTest {
def code = ???
def compileCode(code: String) = {
val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code)
}
def show(): Unit = {
compileCode("""
class Outer {
class Nested extends vanishing.Vanishing
}
package vanishing {
class Vanishing
}
""")
assert(filteredInfos.isEmpty, filteredInfos)
deletePackage("vanishing")
compileCode("""
class Test {
def f(o: Outer): Outer = o
}
""")
assert(storeReporter.infos.isEmpty, storeReporter.infos.mkString("\n")) // Included a MissingRequirementError before.
}
def deletePackage(name: String) {
val directory = new File(testOutput.path, name)
for (f <- directory.listFiles()) {
assert(f.getName.endsWith(".class"))
assert(f.delete())
}
assert(directory.listFiles().isEmpty)
assert(directory.delete())
}
}
| felixmulder/scala | test/files/run/t8502b.scala | Scala | bsd-3-clause | 1,488 |
/*
* This software is licensed under the GNU Affero General Public License, quoted below.
*
* This file is a part of PowerAPI.
*
* Copyright (C) 2011-2016 Inria, University of Lille 1.
*
* PowerAPI is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* PowerAPI is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with PowerAPI.
*
* If not, please consult http://www.gnu.org/licenses/agpl-3.0.html.
*/
package org.powerapi.core
import scala.concurrent.duration.DurationInt
import akka.actor.SupervisorStrategy.{Directive, Resume}
import akka.actor.{Actor, ActorLogging, OneForOneStrategy, SupervisorStrategy, SupervisorStrategyConfigurator}
/**
* Base trait for components which use Actor.
*
* @author <a href="mailto:maxime.colmant@gmail.com">Maxime Colmant</a>
*/
trait ActorComponent extends Actor with ActorLogging {
/**
* Default behavior when a received message is unknown.
*/
def default: Actor.Receive = {
case unknown => throw new UnsupportedOperationException(s"unable to process message $unknown")
}
}
/**
* Base trait for API component.
*
* @author <a href="mailto:maxime.colmant@gmail.com">Maxime Colmant</a>
*/
trait APIComponent extends ActorComponent
/**
* Supervisor strategy.
*
* @author <a href="mailto:maxime.colmant@gmail.com">Maxime Colmant</a>
*/
trait Supervisor extends ActorComponent {
def handleFailure: PartialFunction[Throwable, Directive]
override def supervisorStrategy: SupervisorStrategy =
OneForOneStrategy(10, 1.seconds)(handleFailure orElse SupervisorStrategy.defaultStrategy.decider)
}
/**
* This class is used for defining a default supervisor strategy for the Guardian Actor.
* The Guardian Actor is the main actor used when system.actorOf(...) is used.
*
* @author <a href="mailto:maxime.colmant@gmail.com">Maxime Colmant</a>
*/
class GuardianFailureStrategy extends SupervisorStrategyConfigurator {
def create(): SupervisorStrategy = {
OneForOneStrategy(10, 1.seconds)(handleFailure orElse SupervisorStrategy.defaultStrategy.decider)
}
def handleFailure: PartialFunction[Throwable, Directive] = {
case _: UnsupportedOperationException => Resume
}
}
| Spirals-Team/powerapi | powerapi-core/src/main/scala/org/powerapi/core/Component.scala | Scala | agpl-3.0 | 2,650 |
/**
* Created by Mathieu Leclaire on 19/04/18.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package org.openmole.gui.plugin.wizard.r
import org.openmole.core.services._
import org.openmole.core.workspace.Workspace
import org.openmole.gui.ext.data._
import org.openmole.gui.ext.data.DataUtils._
import org.openmole.tool.file._
import org.openmole.gui.ext.server._
import org.openmole.gui.ext.server.utils._
class RWizardApiImpl(s: Services) extends RWizardAPI {
import s._
import org.openmole.gui.ext.data.ServerFileSystemContext.project
def toTask(
target: SafePath,
executableName: String,
command: String,
inputs: Seq[ProtoTypePair],
outputs: Seq[ProtoTypePair],
libraries: Option[String],
resources: Resources,
data: RWizardData): WizardToTask = {
val modelData = WizardUtils.wizardModelData(inputs, outputs, resources.all.map {
_.safePath.name
} :+ executableName, Some("inputs"), Some("ouputs"))
val task = s"${executableName.split('.').head.toLowerCase}Task"
val content = modelData.vals +
s"""\nval $task = RTask(\"\"\"\n source("$executableName")\n \"\"\") set(\n""".stripMargin +
WizardUtils.expandWizardData(modelData) +
s""")\n\n$task hook ToStringHook()"""
target.toFile.content = content
WizardToTask(target)
}
def parse(safePath: SafePath): Option[LaunchingCommand] = None
} | openmole/openmole | openmole/gui/plugins/org.openmole.gui.plugin.wizard.r/src/main/scala/org/openmole/gui/plugin/wizard/r/RWizardApiImpl.scala | Scala | agpl-3.0 | 2,062 |
import sbt._
import Keys._
object Quiet {
// Workaround sbt issue described:
//
// https://github.com/scala/scala-dev/issues/100
def silenceScalaBinaryVersionWarning = ivyConfiguration := {
ivyConfiguration.value match {
case c: InlineIvyConfiguration =>
val delegate = c.log
val logger = new Logger {
override def trace(t: => Throwable): Unit = delegate.trace(t)
override def log(level: sbt.Level.Value, message: => String): Unit = {
level match {
case sbt.Level.Warn =>
val message0 = message
val newLevel = if (message.contains("differs from Scala binary version in project"))
delegate.log(sbt.Level.Debug, message)
else
delegate.log(level, message)
case _ => delegate.log(level, message)
}
}
override def success(message: => String): Unit = delegate.success(message)
}
new InlineIvyConfiguration(c.paths, c.resolvers, c.otherResolvers, c.moduleConfigurations, c.localOnly, c.lock, c.checksums, c.resolutionCacheDir, c.updateOptions, logger)
case x => x
}
}
}
| felixmulder/scala | project/Quiet.scala | Scala | bsd-3-clause | 1,204 |
package com.github.pedrovgs.haveaniceday.smiles
import com.github.pedrovgs.haveaniceday.smiles.model.Smile
import generators.smiles._
import extensions.futures._
import com.github.pedrovgs.haveaniceday.smiles.storage.SmilesRepository
import com.github.pedrovgs.haveaniceday.utils.model.ItemNotFound
import org.scalacheck.Arbitrary._
import org.scalatest.mockito.MockitoSugar
import org.scalatest.prop.PropertyChecks
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}
import specs.InMemoryDatabase
class GetSmileByIdSpec
extends FlatSpec
with Matchers
with InMemoryDatabase
with BeforeAndAfter
with PropertyChecks
with MockitoSugar {
private val repository = new SmilesRepository(database)
private val getSmile = new GetSmileById(repository)
before {
resetDatabase()
}
after {
resetDatabase()
}
"GetSmileById" should "return item not found error if the smile does not exist" in {
forAll(arbitrary[Long]) { id =>
val result = getSmile(id).awaitForResult
result shouldBe Left(ItemNotFound(id.toString))
}
}
it should "return item not found if the smile exist but it was not sent" in {
forAll(arbitraryNotSentSmile) { smile =>
val id = saveSmile(smile).id
val result = getSmile(id).awaitForResult
result shouldBe Left(ItemNotFound(id.toString))
}
}
it should "return the smile associated with the id passed as parameter" in {
forAll(arbitrarySentSmile) { smile =>
val savedSmile = saveSmile(smile)
val result = getSmile(savedSmile.id).awaitForResult
result shouldBe Right(savedSmile)
}
}
private def saveSmile(smile: Smile): Smile = repository.saveSmiles(Seq(smile)).awaitForResult.head
}
| pedrovgs/HaveANiceDay | src/test/scala/com/github/pedrovgs/haveaniceday/smiles/GetSmileByIdSpec.scala | Scala | gpl-3.0 | 1,739 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.testsuite.javalib
import java.lang.{Short => JShort}
import org.scalajs.jasminetest.JasmineTest
/**
* tests the implementation of the java standard library Short
*/
object ShortTest extends JasmineTest {
describe("java.lang.Short") {
it("should provide `compareTo`") {
def compare(x: Short, y: Short): Int =
new JShort(x).compareTo(new JShort(y))
expect(compare(0.toShort, 5.toShort)).toBeLessThan(0)
expect(compare(10.toShort, 9.toShort)).toBeGreaterThan(0)
expect(compare(-2.toShort, -1.toShort)).toBeLessThan(0)
expect(compare(3.toShort, 3.toShort)).toEqual(0)
}
it("should be a Comparable") {
def compare(x: Any, y: Any): Int =
x.asInstanceOf[Comparable[Any]].compareTo(y)
expect(compare(0.toShort, 5.toShort)).toBeLessThan(0)
expect(compare(10.toShort, 9.toShort)).toBeGreaterThan(0)
expect(compare(-2.toShort, -1.toShort)).toBeLessThan(0)
expect(compare(3.toShort, 3.toShort)).toEqual(0)
}
it("should parse strings") {
def test(s: String, v: Short): Unit = {
expect(JShort.parseShort(s)).toEqual(v)
expect(JShort.valueOf(s).shortValue()).toEqual(v)
expect(new JShort(s).shortValue()).toEqual(v)
}
test("0", 0)
test("5", 5)
test("127", 127)
test("-100", -100)
test("30000", 30000)
}
it("should reject invalid strings when parsing") {
def test(s: String): Unit =
expect(() => JShort.parseShort(s)).toThrow
test("abc")
test("")
test("60000") // out of range
test("-90000") // out of range
}
}
}
| jmnarloch/scala-js | test-suite/src/test/scala/org/scalajs/testsuite/javalib/ShortTest.scala | Scala | bsd-3-clause | 2,174 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.s2graph.core.types
import org.apache.hadoop.hbase.util.Bytes
import org.apache.s2graph.core.mysqls.LabelMeta
object HBaseType {
val VERSION4 = "v4"
val VERSION3 = "v3"
val VERSION2 = "v2"
val VERSION1 = "v1"
// val DEFAULT_VERSION = VERSION2
val DEFAULT_VERSION = VERSION3
// val EMPTY_SEQ_BYTE = Byte.MaxValue
val DEFAULT_COL_ID = 0
val bitsForDir = 2
val maxBytes = Bytes.toBytes(Int.MaxValue)
val toSeqByte = -5.toByte
val defaultTgtVertexId = null
}
object HBaseDeserializable {
import HBaseType._
// 6 bits is used for index sequence so total index per label is limited to 2^6
def bytesToLabelIndexSeqWithIsInverted(bytes: Array[Byte], offset: Int): (Byte, Boolean) = {
val byte = bytes(offset)
val isInverted = if ((byte & 1) != 0) true else false
val labelOrderSeq = byte >> 1
(labelOrderSeq.toByte, isInverted)
}
def bytesToKeyValues(bytes: Array[Byte],
offset: Int,
length: Int,
version: String = DEFAULT_VERSION): (Array[(Byte, InnerValLike)], Int) = {
var pos = offset
val len = bytes(pos)
pos += 1
val kvs = new Array[(Byte, InnerValLike)](len)
var i = 0
while (i < len) {
val k = bytes(pos)
pos += 1
val (v, numOfBytesUsed) = InnerVal.fromBytes(bytes, pos, 0, version)
pos += numOfBytesUsed
kvs(i) = (k -> v)
i += 1
}
val ret = (kvs, pos)
// logger.debug(s"bytesToProps: $ret")
ret
}
def bytesToKeyValuesWithTs(bytes: Array[Byte],
offset: Int,
version: String = DEFAULT_VERSION): (Array[(Byte, InnerValLikeWithTs)], Int) = {
var pos = offset
val len = bytes(pos)
pos += 1
val kvs = new Array[(Byte, InnerValLikeWithTs)](len)
var i = 0
while (i < len) {
val k = bytes(pos)
pos += 1
val (v, numOfBytesUsed) = InnerValLikeWithTs.fromBytes(bytes, pos, 0, version)
pos += numOfBytesUsed
kvs(i) = (k -> v)
i += 1
}
val ret = (kvs, pos)
// logger.debug(s"bytesToProps: $ret")
ret
}
def bytesToProps(bytes: Array[Byte],
offset: Int,
version: String = DEFAULT_VERSION): (Array[(Byte, InnerValLike)], Int) = {
var pos = offset
val len = bytes(pos)
pos += 1
val kvs = new Array[(Byte, InnerValLike)](len)
var i = 0
while (i < len) {
val k = LabelMeta.emptySeq
val (v, numOfBytesUsed) = InnerVal.fromBytes(bytes, pos, 0, version)
pos += numOfBytesUsed
kvs(i) = (k -> v)
i += 1
}
// logger.error(s"bytesToProps: $kvs")
val ret = (kvs, pos)
ret
}
}
object HBaseSerializable {
def propsToBytes(props: Seq[(Byte, InnerValLike)]): Array[Byte] = {
val len = props.length
assert(len < Byte.MaxValue)
var bytes = Array.fill(1)(len.toByte)
for ((k, v) <- props) bytes = Bytes.add(bytes, v.bytes)
bytes
}
def propsToKeyValues(props: Seq[(Byte, InnerValLike)]): Array[Byte] = {
val len = props.length
assert(len < Byte.MaxValue)
var bytes = Array.fill(1)(len.toByte)
for ((k, v) <- props) bytes = Bytes.add(bytes, Array.fill(1)(k), v.bytes)
bytes
}
def propsToKeyValuesWithTs(props: Seq[(Byte, InnerValLikeWithTs)]): Array[Byte] = {
val len = props.length
assert(len < Byte.MaxValue)
var bytes = Array.fill(1)(len.toByte)
for ((k, v) <- props) bytes = Bytes.add(bytes, Array.fill(1)(k), v.bytes)
bytes
}
def labelOrderSeqWithIsInverted(labelOrderSeq: Byte, isInverted: Boolean): Array[Byte] = {
assert(labelOrderSeq < (1 << 6))
val byte = labelOrderSeq << 1 | (if (isInverted) 1 else 0)
Array.fill(1)(byte.toByte)
}
}
trait HBaseSerializable {
def bytes: Array[Byte]
}
trait HBaseDeserializable {
import HBaseType._
def fromBytes(bytes: Array[Byte],
offset: Int,
len: Int,
version: String = DEFAULT_VERSION): (HBaseSerializable, Int)
// def fromBytesWithIndex(bytes: Array[Byte],
// offset: Int,
// len: Int,
// version: String = DEFAULT_VERSION): (HBaseSerializable, Int)
def notSupportedEx(version: String) = new RuntimeException(s"not supported version, $version")
}
trait HBaseDeserializableWithIsVertexId {
import HBaseType._
def fromBytes(bytes: Array[Byte],
offset: Int,
len: Int,
version: String = DEFAULT_VERSION,
isVertexId: Boolean = false): (HBaseSerializable, Int)
def notSupportedEx(version: String) = new RuntimeException(s"not supported version, $version")
}
| daewon/incubator-s2graph | s2core/src/main/scala/org/apache/s2graph/core/types/HBaseType.scala | Scala | apache-2.0 | 5,539 |
package com.arcusys.valamis.web.listener
import java.util.UUID
import com.arcusys.learn.liferay.LiferayClasses.{LBaseModelListener, LSocialActivity}
import com.arcusys.learn.liferay.LogFactoryHelper
import com.arcusys.learn.liferay.services.{CompanyHelper, UserLocalServiceHelper}
import com.arcusys.learn.liferay.util.PortalUtilHelper
import com.arcusys.valamis.certificate.model.{CertificateActivityType, CertificateStateFilter, CertificateStateType, CertificateStatuses}
import com.arcusys.valamis.certificate.service.CertificateStatusChecker
import com.arcusys.valamis.certificate.storage.CertificateStateRepository
import com.arcusys.valamis.lrs.service.util.TincanHelper._
import com.arcusys.valamis.lrs.service.{LrsClientManager, LrsRegistration}
import com.arcusys.valamis.lrs.tincan._
import com.arcusys.valamis.settings.storage.ActivityToStatementStorage
import com.arcusys.valamis.web.configuration.ioc.Configuration
import com.escalatesoft.subcut.inject.Injectable
import org.joda.time.DateTime
import scala.collection.JavaConverters._
class ActivityListener extends LBaseModelListener[LSocialActivity] with Injectable {
implicit lazy val bindingModule = Configuration
val logger = LogFactoryHelper.getLog(getClass)
private lazy val activityToStatementStorage = inject[ActivityToStatementStorage]
private lazy val lrsReader = inject[LrsClientManager]
private lazy val lrsRegistration = inject[LrsRegistration]
private lazy val certificateStatusChecker = inject[CertificateStatusChecker]
private lazy val certificateStateRepository = inject[CertificateStateRepository]
val unsupportedForChecking = Set(
CertificateStateType.getClass.getName,
CertificateActivityType.getClass.getName
)
// this method should not be aborted by exception, it will broken liferay socialActivity entity
override def onAfterCreate(socialActivity: LSocialActivity) {
val userId = socialActivity.getUserId
// we need to setup company id for ModelListener
if (CompanyHelper.getCompanyId == 0L) {
CompanyHelper.setCompanyId(socialActivity.getCompanyId)
}
if (!unsupportedForChecking.contains(socialActivity.getClassName)) {
try {
certificateStateRepository.getBy(userId, CertificateStatuses.InProgress).foreach (
certificateStatusChecker.updateActivityGoalState(_, userId)
)
certificateStatusChecker.checkAndGetStatus(new CertificateStateFilter(Some(userId), statuses = Set(CertificateStatuses.InProgress)))
}
catch {
case e: Throwable => logger.error(e)
}
}
if (socialActivity.getAssetEntry != null) {
val asset = socialActivity.getAssetEntry
// check if new
if (asset.getCreateDate != null
&& asset.getModifiedDate != null
&& asset.getCreateDate.compareTo(asset.getModifiedDate) != 0) return
try {
sendStatement(socialActivity, userId, CompanyHelper.getCompanyId)
}
catch {
case e: Throwable => logger.error(e)
}
}
}
def sendStatement(socialActivity: LSocialActivity, userId: Long, companyId: Long)
{
val user = UserLocalServiceHelper().getUser(userId)
val courseId = socialActivity.getGroupId
val verbName = activityToStatementStorage
.getBy(courseId, socialActivity.getClassNameId)
.map(_.verb)
if (verbName.isDefined) {
val verb = verbName.get match {
case "completed" =>
Verb("http://adlnet.gov/expapi/verbs/completed", Map("en-US" -> "completed"))
case "attempted" =>
Verb("http://adlnet.gov/expapi/verbs/attempted", Map("en-US" -> "attempted"))
case "interacted" =>
Verb("http://adlnet.gov/expapi/verbs/interacted", Map("en-US" -> "interacted"))
case "experienced" =>
Verb("http://adlnet.gov/expapi/verbs/experienced", Map("en-US" -> "experienced"))
case _ => return
}
val titleMap = socialActivity.getAssetEntry.getTitleMap.asScala.filter(!_._2.isEmpty)
.map(titleTuple => (titleTuple._1.getLanguage, titleTuple._2)).toMap[String, String]
val descriptionMap = socialActivity.getAssetEntry.getDescriptionMap.asScala.filter(!_._2.isEmpty)
.map(titleTuple => (titleTuple._1.getLanguage, titleTuple._2)).toMap[String, String]
val url = PortalUtilHelper.getLocalHostUrl(companyId, false)
val statement = Statement(
Option(UUID.randomUUID),
UserLocalServiceHelper().getUser(userId).getAgentByUuid,
verb,
Activity(
id = s"${url}/SocialActivity/${socialActivity.getPrimaryKey}",
name = Some(titleMap),
description = Some(descriptionMap)),
timestamp = DateTime.now,
stored = DateTime.now
)
val lrsAuth = lrsRegistration.getLrsEndpointInfo(AuthorizationScope.All).auth
lrsReader.statementApi(_.addStatement(statement), Some(lrsAuth), Seq(statement))
}
}
}
| igor-borisov/JSCORM | valamis-portlets/src/main/scala/com/arcusys/valamis/web/listener/ActivityListener.scala | Scala | gpl-3.0 | 4,934 |
/**
* Exercise 2:
*
* Write a loop that swaps adjacent elements of an array of integers. For example
* , Array(1, 2, 3, 4, 5) becomes Array(2, 1, 4, 3, 5).
*
**/
// 1
val a = Array(1, 2, 3, 4, 5)
val result = for(i <- 0 until a.length by 2 if (i < a.length -1)) {
val temp = a(i)
a(i) = a(i + 1)
a(i + 1) = temp
}
a //=> Array(2, 1, 4, 3, 5): Array[Int]
// 2
val a = Array(1, 2, 3, 4, 5)
a.grouped(2).flatMap(_.reverse).toArray // => Array(2, 1, 4, 3, 5): Array[Int]
// 3
val a = Array(1, 2, 3, 4, 5)
val result = for { b <- a.grouped(2); c <- b.reverse } yield c
result.toArray // => Array(2, 1, 4, 3, 5): Array[Int]
// 4
val a = Array(1, 2, 3, 4, 5)
a.grouped(2)
.flatMap{
case Array(x, y) => Array(y, x)
case Array(x) => Array(x)
}.toArray
// => Array(2, 1, 4, 3, 5): Array[Int]
| ragmha/scala-impatient | solutions/working-with-arrays/ex2.scala | Scala | mit | 813 |
package main.scala.org.cc.asm
import main.scala.org.cc.asm.Pos._
import main.scala.org.cc.asm.Pruner._
import scala.util.parsing.combinator.PackratParsers
import main.scala.org.cc.asm.Tokens._
import main.scala.org.cc.asm.AST._
import collection.immutable.PagedSeq
object UASMParser extends PackratParsers {
type Elem = Positioned[Token]
type PParser[T] = PackratParser[Positioned[T]]
def tokenIf(p : Token => Boolean) = acceptIf((e : Elem) => p(e.value))(_ => "End of input")
def token(t : Token) = tokenIf(_ == t)
val anyToken = acceptIf(_ => true)(_ => "End of input")
def lparen = token(TLParen)
def rparen = token(TRParen)
def comma = token(TComma)
def op : PParser[Op] = anyToken >> { (e : Elem) => e.value match {
case TOp(op) => success(e.map(_ => op))
case _ => failure("operator expexted at " + e.toString)
}}
def ident : PParser[String] = anyToken >> { (e : Elem) => e.value match {
case TIdent(n) => success(e.map(_ => n))
case _ => failure("integer expexted at " + e.toString)
}}
def text : PParser[Boolean] = anyToken >> { (e : Elem) => e.value match {
case TIdent(".text") => success(e.map(_ => true ))
case TIdent(".asci") => success(e.map(_ => false))
case _ => failure("integer expexted at " + e.toString)
}}
def kwmacro : PParser[Null] = anyToken >> { (e : Elem) => e.value match {
case TIdent(".macro") => success(e.map(_ => null))
case _ => failure("integer expexted at " + e.toString)
}}
def align : PParser[Null] = anyToken >> { (e : Elem) => e.value match {
case TIdent(".align") => success(e.map(_ => null))
case _ => failure("integer expexted at " + e.toString)
}}
lazy val expr : PParser[Expr] = {
val pdot : PParser[Expr] = token(TDot) ^^ ((e : Elem) => e.map(_ => Dot : Expr))
val pfun : PParser[Expr] = (ident ~ (lparen ~> (expr ~ (comma ~> expr).*).? <~ rparen).?) ^^ {
case e ~ None => fun(e , None)
case e ~ Some(None) => fun(e , Some(Nil))
case e ~ Some(Some(x ~ l)) => fun(e , Some(x :: l))
}
val paren : PParser[Expr] = lparen ~> expr <~ rparen
def int : PParser[Expr] = anyToken >> { (e : Elem) => e.value match {
case TInt(i) => success(e.map(_ => EInt(i) : Expr))
case _ => failure("integer expexted at " + e.toString)
}}
val term : PParser[Expr] = pdot | pfun | int | paren
val puni : PParser[Expr] = (op ~ term) ^^ { case o ~ e => uni(o, e) }
val pbin : PParser[Expr] = (term ~ op ~ term) ^^ { case l ~ o ~ r => bin(l,o,r) }
pbin | puni | term
}
/*lazy val statement : PParser[Statement] = {
case class Text(value : String, istext : Boolean) extends Statement
case class Align(value : Int) extends Statement
case class Def(name: String, args : Option[List[String]], body : AST) extends Statement
} */
def parse(i : Iterator[Positioned[Token]]) : Option[PExpr] = {
type T = Positioned[Token]
var reader : Input = { val ps = PagedSeq.fromIterator(i)
val read = new PagedSeqReaderAny[Elem](ps, null)
new PackratReader[Elem](read)
}
expr(reader) match {
case Success(e , _) => Some(e)
case _ => None
}
}
}
| christophe-calves/alpha | src/main/scala/org/cc/asm/UASMParser.scala | Scala | gpl-3.0 | 3,429 |
/*
* Copyright (C) 2014 - 2017 Contributors as noted in the AUTHORS.md file
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.wegtam.tensei.agent.writers
import java.io.File
import java.net.InetAddress
import akka.actor.{ Actor, ActorLogging, ActorRef, ActorSystem, FSM, Props, Terminated }
import akka.cluster.pubsub.{ DistributedPubSub, DistributedPubSubMediator }
import akka.event.{ DiagnosticLoggingAdapter, Logging }
import akka.stream.alpakka.ftp.{ FtpSettings, FtpsSettings }
import akka.stream.alpakka.ftp.scaladsl._
import akka.stream.alpakka.ftp.{ FtpCredentials, SftpSettings }
import akka.stream.scaladsl.{ Flow, Sink, Source }
import akka.stream.{ ActorMaterializer, IOResult, OverflowStrategy }
import akka.util.ByteString
import com.wegtam.tensei.adt.{ ConnectionInformation, DFASDL }
import com.wegtam.tensei.agent.adt.ConnectionTypeFileFromNetwork
import com.wegtam.tensei.agent.helpers.{ LoggingHelpers, NetworkFileWriterHelper, URIHelpers }
import com.wegtam.tensei.agent.processor.UniqueValueBuffer
import com.wegtam.tensei.agent.processor.UniqueValueBuffer.UniqueValueBufferMessages
import com.wegtam.tensei.agent.writers.BaseWriter.BaseWriterMessages.{ AreYouReady, ReadyToWork }
import com.wegtam.tensei.agent.writers.BaseWriter.State.{ Closing, Initializing, Working }
import com.wegtam.tensei.agent.writers.BaseWriter.{
BaseWriterMessages,
DEFAULT_CHARSET,
DEFAULT_STOP_SIGN,
SKIP_STOP_SIGN_OPTION
}
import com.wegtam.tensei.agent.writers.FileWriterActor.FileWriterActorMessages.CloseResources
import com.wegtam.tensei.agent.writers.NetworkFileWriterActor.NetworkConnectionType.{
FtpConnection,
FtpsConnection,
SftpConnection
}
import com.wegtam.tensei.agent.writers.NetworkFileWriterActor.{
NetworkConnectionType,
NetworkFileWriterData
}
import org.dfasdl.utils.{ AttributeNames, DocumentHelpers }
import org.w3c.dom.traversal.{ DocumentTraversal, NodeFilter }
import org.w3c.dom.{ Document, Element }
import scala.collection.SortedSet
import scala.concurrent.Future
import scala.concurrent.duration.{ FiniteDuration, MILLISECONDS }
import scalaz.Scalaz._
object NetworkFileWriterActor {
/**
* Helper method to create a network file writer actor.
*
* @param target The connection information for the target data sink.
* @param dfasdl The dfasdl describing the target file. It is needed to write sequence columns in the correct order.
* @param agentRunIdentifier An optional agent run identifier which is usually an uuid.
* @return The props to generate the actor.
*/
def props(target: ConnectionInformation,
dfasdl: DFASDL,
agentRunIdentifier: Option[String]): Props =
Props(new NetworkFileWriterActor(target, dfasdl, agentRunIdentifier))
sealed trait NetworkFileWriterActorMessages
object NetworkFileWriterActorMessages {
case object CloseResources extends NetworkFileWriterActorMessages
}
/**
* A class that buffers the state of the file writer.
*
* @param closeRequester An option to the actor ref that requested the closing of the writer.
* @param messages The message buffer with the already received writer messages.
* @param readyRequests A list of actor refs that have asked if we are ready to work.
* @param writer An actor that writes the messages to the target.
*/
final case class NetworkFileWriterData(
closeRequester: Option[ActorRef],
messages: SortedSet[BaseWriterMessages.WriteData],
readyRequests: List[ActorRef],
writer: Option[ActorRef]
)
sealed trait NetworkConnectionType
object NetworkConnectionType {
case object FtpConnection extends NetworkConnectionType
case object FtpsConnection extends NetworkConnectionType
case object SftpConnection extends NetworkConnectionType
}
}
class NetworkFileWriterActor(target: ConnectionInformation,
dfasdl: DFASDL,
agentRunIdentifier: Option[String])
extends BaseWriter(target = target)
with Actor
with FSM[BaseWriter.State, NetworkFileWriterData]
with ActorLogging
with BaseWriterFunctions
with DocumentHelpers
with NetworkFileWriterHelper {
// Create a distributed pub sub mediator.
import DistributedPubSubMediator.Publish
val mediator: ActorRef = DistributedPubSub(context.system).mediator
override val log
: DiagnosticLoggingAdapter = Logging(this) // Override the standard logger to be able to add stuff via MDC.
log.mdc(LoggingHelpers.generateMdcEntryForRunIdentifier(agentRunIdentifier))
implicit val actorSystem: ActorSystem = context.system
implicit val materializer: ActorMaterializer = ActorMaterializer()
lazy val writeTriggerInterval = FiniteDuration(
context.system.settings.config
.getDuration("tensei.agents.writers.network.write-interval", MILLISECONDS),
MILLISECONDS
)
setTimer("writeTrigger",
BaseWriterMessages.WriteBufferedData,
writeTriggerInterval,
repeat = true)
lazy val dfasdlDocument: Document = createNormalizedDocument(dfasdl.content)
lazy val orderedDataElementIds: Vector[String] =
try {
val traversal = dfasdlDocument.asInstanceOf[DocumentTraversal]
val iterator = traversal.createNodeIterator(dfasdlDocument.getDocumentElement,
NodeFilter.SHOW_ELEMENT,
new DataElementFilter(),
true)
val builder = Vector.newBuilder[String]
var nextNode = iterator.nextNode()
while (nextNode != null) {
builder += nextNode.asInstanceOf[Element].getAttribute("id")
nextNode = iterator.nextNode()
}
builder.result()
} catch {
case e: Throwable =>
log.error(
e,
"An error occurred while trying to calculate the ordered target data element ids!"
)
Vector.empty[String]
}
lazy val uniqueDataElementIds: Set[String] =
getUniqueDataElements(dfasdlDocument).map(_.getAttribute("id"))
@throws[Exception](classOf[Exception])
override def postStop(): Unit = {
log.clearMDC()
cancelTimer("writeTrigger")
super.postStop()
}
startWith(
Initializing,
NetworkFileWriterData(closeRequester = None,
messages = SortedSet.empty[BaseWriterMessages.WriteData],
readyRequests = List.empty[ActorRef],
writer = None)
)
when(Initializing) {
case Event(BaseWriterMessages.InitializeTarget, data) =>
val ftpWriter: Option[ActorRef] =
URIHelpers.connectionType(target.uri) match {
case ConnectionTypeFileFromNetwork =>
val connectionType = getConnectionType(target)
val credentials = getFtpCredentials(target)
val host = InetAddress.getByName(target.uri.getHost)
val port = target.uri.getPort
val takePort =
if (port > 0) port
else 21
val byteSource: Source[ByteString, ActorRef] =
Source.actorRef[ByteString](Int.MaxValue, OverflowStrategy.fail)
val ftpConnection: Sink[ByteString, Future[IOResult]] =
defineFtpConnection(connectionType, target, host, takePort, credentials)
Option(
Flow[ByteString].to(ftpConnection).runWith(byteSource)
)
case _ =>
log.error("NetworkFileWriter not implemented for connection: {}", target.uri)
None
}
goto(Working) using data.copy(
writer = ftpWriter
)
case Event(AreYouReady, data) =>
stay() using data.copy(readyRequests = sender() :: data.readyRequests)
}
when(Working) {
case Event(msg: BaseWriterMessages.WriteData, data) =>
log.debug("Got write request.")
stay() using data.copy(messages = data.messages + msg)
case Event(msg: BaseWriterMessages.WriteBatchData, data) =>
log.debug("Got bulk write request containing {} messages.", msg.batch.size)
stay() using data.copy(messages = data.messages ++ msg.batch)
case Event(BaseWriterMessages.WriteBufferedData, data) =>
log.debug("Received write buffered data request.")
data.writer.fold(log.error("No network file writer defined!"))(
w => writeMessages(w, data.messages)
)
stay() using data.copy(messages = SortedSet.empty[BaseWriterMessages.WriteData])
case Event(AreYouReady, data) =>
sender() ! ReadyToWork
stay() using data
case Event(BaseWriterMessages.CloseWriter, data) =>
log.debug("Got close request for NetworkFileWriter.")
data.writer.fold(log.error("No network file writer defined!"))(
w => writeMessages(w, data.messages)
)
self ! CloseResources
goto(Closing) using data.copy(closeRequester = Option(sender()))
}
when(Closing) {
case Event(CloseResources, data) =>
data.writer.foreach { w =>
w ! akka.actor.Status.Success("Success".getBytes)
context.watch(w)
}
stay() using data
case Event(Terminated(ref), data) =>
if (data.closeRequester.isDefined)
data.closeRequester.get ! BaseWriterMessages.WriterClosed("".right[String])
stay() using data
}
onTransition {
case _ -> Working => nextStateData.readyRequests foreach (a => a ! ReadyToWork)
}
whenUnhandled {
case Event(msg: BaseWriterMessages.WriteData, data) =>
log.warning("Got unhandled network writer message!")
stay() using data
case Event(msg: BaseWriterMessages.WriteBatchData, data) =>
log.warning("Got unhandled bulk network writer message!")
stay() using data
}
initialize()
/**
* Initialize the target.
*
* @return Returns `true` upon success and `false` if an error occurred.
*/
override def initializeTarget: Boolean = {
val file = new File(target.uri.getSchemeSpecificPart)
file.createNewFile()
}
private def defineFtpConnection(
connectionType: NetworkConnectionType,
target: ConnectionInformation,
host: InetAddress,
port: Int,
credentials: FtpCredentials
): Sink[ByteString, Future[IOResult]] = {
val path = target.uri.getPath
connectionType match {
case FtpConnection =>
val settings =
FtpSettings(
host,
port,
credentials,
binary = true,
passiveMode = false
)
Ftp.toPath(path, settings, append = true)
case FtpsConnection =>
val settings =
FtpsSettings(
host,
port,
credentials,
binary = true,
passiveMode = false
)
Ftps.toPath(path, settings, append = true)
case SftpConnection =>
val settings =
SftpSettings(
host,
port,
credentials,
strictHostKeyChecking = false
)
Sftp.toPath(path, settings, append = true)
case _ => throw new RuntimeException(s"Connection type not implemented $connectionType")
}
}
/**
* Analyze the stopSign and write specific characters not from the variable but as defined String.
*
* @param stopSign The given stopSign of the data element.
* @param writer The network file writer.
* @param charset The name of the charset to use.
*/
private def analyzeStopSign(stopSign: String, writer: ActorRef, charset: String): Unit = {
var part = ""
for (i <- 1 to stopSign.length) {
part = part + stopSign.charAt(i - 1)
if (part.startsWith("\\\\")) {
if (part.length == 2) {
// Tab
if (part.equals("\\\\t"))
writer ! ByteString("\\t".getBytes(charset))
else
writer ! ByteString(part.getBytes(charset))
part = ""
}
}
// Stop sign that does not start with a backslash
else {
writer ! ByteString(part.getBytes(charset))
part = ""
}
}
}
/**
* Process a batch of `WriterMessage`s and write them to the target filewriter.
*
* @param target The filewriter that should consume the data.
* @param messages A list of `WriterMessage`s.
*/
private def writeMessages(target: ActorRef,
messages: SortedSet[BaseWriterMessages.WriteData]): Unit = {
val uniqueValues = getUniqueMessageValues(dfasdl, messages, uniqueDataElementIds)
messages.foreach(msg => writeMessage(target, msg))
uniqueValues.foreach(
p =>
mediator ! Publish(UniqueValueBuffer.UNIQUE_VALUE_BUFFER_CHANNEL,
UniqueValueBufferMessages.StoreS(p._1, p._2))
)
}
/**
* Pass the given data into a filewrite.
*
* @param target The filewriter that should consume the data.
* @param message The `WriterMessage` containing the data and possible options.
*/
private def writeMessage(target: ActorRef, message: BaseWriterMessages.WriteData): Unit = {
val stopSign: Option[String] = getOption(AttributeNames.STOP_SIGN, message.options)
val charset: Option[String] = getOption(AttributeNames.ENCODING, message.options)
val bs: Array[Byte] =
message.data match {
case binary: Array[Byte] => binary
case byteString: ByteString =>
byteString.utf8String.getBytes(charset.getOrElse(DEFAULT_CHARSET))
case date: java.sql.Date =>
date.toString.getBytes(charset.getOrElse(DEFAULT_CHARSET))
case date: java.time.LocalDate =>
date.toString.getBytes(charset.getOrElse(DEFAULT_CHARSET))
case decimal: java.math.BigDecimal =>
decimal.toPlainString.getBytes(charset.getOrElse(DEFAULT_CHARSET))
case number: Number =>
number.toString.getBytes(charset.getOrElse(DEFAULT_CHARSET))
case string: String => string.getBytes(charset.getOrElse(DEFAULT_CHARSET))
case time: java.sql.Time =>
time.toString.getBytes(charset.getOrElse(DEFAULT_CHARSET))
case time: java.time.LocalTime =>
time.toString.getBytes(charset.getOrElse(DEFAULT_CHARSET))
case timestamp: java.sql.Timestamp =>
timestamp.toString.getBytes(charset.getOrElse(DEFAULT_CHARSET))
case timestamp: java.time.OffsetDateTime =>
timestamp.toString.getBytes(charset.getOrElse(DEFAULT_CHARSET))
case None => Array.empty[Byte]
case _ =>
log.warning("Using generic writer algorithm for unsupported data format {}!",
message.data.getClass)
message.data.toString.getBytes(charset.getOrElse(DEFAULT_CHARSET))
}
target ! ByteString(bs)
if (getOption(SKIP_STOP_SIGN_OPTION, message.options).isEmpty)
analyzeStopSign(stopSign.getOrElse(DEFAULT_STOP_SIGN),
target,
charset.getOrElse(DEFAULT_CHARSET))
}
}
| Tensei-Data/tensei-agent | src/main/scala/com/wegtam/tensei/agent/writers/NetworkFileWriterActor.scala | Scala | agpl-3.0 | 15,719 |
package dispatch.spec
import org.scalacheck._
object RetrySpecification
extends Properties("Retry Handling")
with DispatchCleanup {
import Prop.{forAll,AnyOperators}
import Gen._
val server = {
import unfiltered.netty
import unfiltered.response._
import unfiltered.request._
object Echo extends Params.Extract("echo", Params.first)
netty.Http.anylocal.handler(netty.cycle.Planify {
case Params(Echo(echo)) =>
PlainTextContent ~> ResponseString(echo)
}).start()
}
import dispatch._
import scala.concurrent.duration.Duration
import java.util.concurrent.TimeUnit
import io.netty.util.{Timer, HashedWheelTimer}
// We're using a very fine grained timer, and short retry intervals,
// to keep the tests fast. These are unlikely to be good settings
// for an application.
implicit val timer: Timer =
new HashedWheelTimer(1, TimeUnit.MILLISECONDS)
val localhost = host("127.0.0.1", server.port)
// wrapping num in Option because scalacheck is
// determined to test 0 if the type is int
val smallNums = Gen.choose(0, 10)
class RetryCounter {
private val retried = new java.util.concurrent.atomic.AtomicInteger
def succeedOn(successRetry: Int)() = {
Http(localhost << Map("echo" -> retried.getAndIncrement.toString)
OK as.String).either.map { eth =>
eth.right.flatMap { numstr =>
val num = numstr.toInt
if (num == successRetry)
Right(num)
else
Left(num)
}
}
}
}
property("succeed on the first request") = forAll(smallNums) { maxRetries =>
val rc = new RetryCounter
val p = retry.Backoff(maxRetries)(rc.succeedOn(0))
p() ?= Right(0)
}
property("succeed on the max retry") = forAll(smallNums) { maxRetries =>
val rc = new RetryCounter
val p = retry.Directly(maxRetries)(rc.succeedOn(maxRetries))
p() ?= Right(maxRetries)
}
property("fail after max retries") = forAll(smallNums) { maxRetries =>
val rc = new RetryCounter
val p = retry.Directly(maxRetries)(rc.succeedOn(maxRetries + 1))
p() ?= Left(maxRetries)
}
property("succeed on the max backoff retry") = forAll(smallNums) { max =>
val rc = new RetryCounter
val p = retry.Backoff(
max,
Duration(2, TimeUnit.MICROSECONDS)
)(rc.succeedOn(max))
p() ?= Right(max)
}
property("fail after max pause retry") = forAll(smallNums) { max =>
val rc = new RetryCounter
val p = retry.Pause(
max,
Duration(500, TimeUnit.MICROSECONDS)
)(rc.succeedOn(max + 1))
p() ?= Left(max)
}
}
| maiflai/reboot | core/src/test/scala/retry.scala | Scala | lgpl-3.0 | 2,615 |
package almhirt.configuration
import scala.concurrent.duration.FiniteDuration
sealed trait RetrySettings { def pause: FiniteDuration; def infiniteLoopPause: Option[FiniteDuration] }
final case class TimeLimitedRetrySettings(pause: FiniteDuration, maxTime: FiniteDuration, infiniteLoopPause: Option[FiniteDuration]) extends RetrySettings
final case class AttemptLimitedRetrySettings(pause: FiniteDuration, maxAttempts: Int, infiniteLoopPause: Option[FiniteDuration]) extends RetrySettings
| chridou/almhirt | almhirt-common/src/main/scala/almhirt/configuration/RetrySettings.scala | Scala | apache-2.0 | 492 |
package net.rrm.ehour.ui.common.panel.multiselect
import java.{util => ju}
import com.google.common.collect.Lists
import net.rrm.ehour.domain.User
import net.rrm.ehour.ui.common.border.GreyBlueRoundedBorder
import net.rrm.ehour.ui.common.panel.AbstractBasePanel
import net.rrm.ehour.ui.common.wicket.WicketDSL._
import net.rrm.ehour.ui.common.wicket.{Container, Event, NonEmptyLabel}
import net.rrm.ehour.user.service.UserService
import org.apache.wicket.AttributeModifier
import org.apache.wicket.ajax.AjaxRequestTarget
import org.apache.wicket.event.Broadcast
import org.apache.wicket.markup.head.{CssHeaderItem, IHeaderResponse}
import org.apache.wicket.markup.html.list.{ListItem, ListView}
import org.apache.wicket.model.util.ListModel
import org.apache.wicket.model.{IModel, PropertyModel}
import org.apache.wicket.request.resource.CssResourceReference
import org.apache.wicket.spring.injection.annot.SpringBean
import scala.collection.mutable.{Map => MMap}
class MultiUserSelect(id: String, model: IModel[ju.List[User]] = new ListModel[User](Lists.newArrayList())) extends AbstractBasePanel(id, model) with Filterable with Highlights {
val Css = new CssResourceReference(classOf[MultiUserSelect], "multiUserSelect.css")
val SelectedContainerId = "selectedContainer"
val SelectedUsersListId = "selectedUsers"
val AllUsersBorderId = "allBorder"
val userToItemId: MMap[User, String] = MMap.empty
val self = this
override def listFilterId = "#filterUserInput"
override def listId = "#allUsers"
@SpringBean
var userService: UserService = _
override def onInitialize() {
super.onInitialize()
userToItemId.clear()
val users = userService.getActiveUsers
ju.Collections.sort(users)
val allBorder = new GreyBlueRoundedBorder(AllUsersBorderId)
addOrReplace(allBorder)
val selectedContainer = new Container(SelectedContainerId)
addOrReplace(selectedContainer)
selectedContainer.setOutputMarkupId(true)
selectedContainer.addOrReplace(createSelectedUserView(SelectedUsersListId, model))
allBorder.addOrReplace(createAllUserView("users", users))
}
private def selectedContainer = get(SelectedContainerId)
def selectedUsers = selectedContainer.get(SelectedUsersListId).getDefaultModel.asInstanceOf[IModel[ju.List[User]]]
def createAllUserView(id: String, users: ju.List[User]): ListView[User] = {
val selected = selectedUsers.getObject
new ListView[User](id, users) {
override def populateItem(item: ListItem[User]) {
val itemModel = item.getModel
val user = itemModel.getObject
item.add(ajaxClick({
target => {
val users = selectedUsers.getObject
val markupId = item.getMarkupId
if (users.contains(user)) {
target.appendJavaScript("listHighlight.deselect('%s')" format markupId)
users.remove(user)
} else {
target.appendJavaScript("listHighlight.select('%s')" format markupId)
users.add(user)
ju.Collections.sort(users)
userToItemId.put(user, markupId)
}
target.add(selectedContainer)
sendEvent(target)
}
}))
if (selected.contains(user)) {
item.add(AttributeModifier.append("class", "selected"))
}
item.add(new NonEmptyLabel("name", new PropertyModel(itemModel, "fullName")))
}
}
}
def createSelectedUserView(id: String, users: IModel[ju.List[User]]): ListView[User] = {
new ListView[User](id, users) {
override def populateItem(item: ListItem[User]) {
val itemModel = item.getModel
item.add(ajaxClick({
target => {
val users = selectedUsers.getObject
val user = itemModel.getObject
users.remove(user)
target.add(selectedContainer)
userToItemId.get(user) match {
case Some(itemId) => target.appendJavaScript("listHighlight.deselect('%s')" format itemId)
case None =>
}
sendEvent(target)
}
}))
item.add(new NonEmptyLabel("name", new PropertyModel(itemModel, "fullName")))
}
}
}
def sendEvent(target: AjaxRequestTarget) {
send(self, Broadcast.BUBBLE, SelectionUpdatedEvent(target))
}
override def renderHead(response: IHeaderResponse) {
super.renderHead(response)
response.render(CssHeaderItem.forReference(Css))
}
}
case class SelectionUpdatedEvent(override val target: AjaxRequestTarget) extends Event(target) | momogentoo/ehour | eHour-wicketweb/src/main/scala/net/rrm/ehour/ui/common/panel/multiselect/MultiUserSelect.scala | Scala | gpl-2.0 | 4,601 |
package play.api.libs.json.ops
import play.api.libs.json._
import scala.language.experimental.macros
import scala.reflect.macros.Context
object JsonMacroOps {
/**
* Creates a [[Format]] at compile time with Play's [[Json.format]] macro, but returns it as an [[OFormat]].
*
* [[OFormat]]s have writes that return [[JsObject]] instead of [[JsValue]], which will always be correct
* for any macro based case class [[Format]].
*
* Play will always actually return an [[OFormat]] instance, but for some reason they chose to
* return it as [[Format]] instead (not sure why). This macro basically just hides the ugly task of
* calling .asInstanceOf[OFormat] which you can safely assume is always the case.
*/
def oformat[A]: OFormat[A] = macro JsonMacroImpl.oformatImpl[A]
/**
* Creates a [[Writes]] at compile time with Play's [[Json.writes]] macro, but returns it as an [[OWrites]].
*
* [[OWrites]]s have writes that return [[JsObject]] instead of [[JsValue]], which will always be correct
* for any macro based case class [[Writes]].
*
* Play will always actually return an [[OWrites]] instance, but for some reason they chose to
* return it as [[Writes]] instead (not sure why). This macro basically just hides the ugly task of
* calling .asInstanceOf[OWrites] which you can safely assume is always the case.
*/
def owrites[A]: OWrites[A] = macro JsonMacroImpl.owritesImpl[A]
}
object JsonMacroImpl {
/**
* A simple macro that just calls .asInstanceOf on the [[Json.format]] generated format.
*/
def oformatImpl[A: c.WeakTypeTag](c: Context): c.Expr[OFormat[A]] = {
import c.universe._
val expFormatA = JsMacroImpl.formatImpl[A](c)
reify {
expFormatA.splice.asInstanceOf[OFormat[A]]
}
}
/**
* A simple macro that just calls .asInstanceOf on the [[Json.writes]] generated writer.
*/
def owritesImpl[A: c.WeakTypeTag](c: Context): c.Expr[OWrites[A]] = {
import c.universe._
val expFormatA = JsMacroImpl.writesImpl[A](c)
reify {
expFormatA.splice.asInstanceOf[OWrites[A]]
}
}
} | jeffmay/play-json-ops | play23-json-ops/src/main/scala/play/api/libs/json/ops/JsonMacroOps.scala | Scala | apache-2.0 | 2,109 |
package aug.profile
import java.awt.Component
import java.io.File
import java.lang.Boolean
import java.util
import java.util.concurrent.PriorityBlockingQueue
import java.util.concurrent.atomic.{AtomicBoolean, AtomicLong}
import javax.swing.{BorderFactory, JSplitPane, SwingUtilities}
import aug.gui.text.{ConsoleTextArea, HasHighlight, SplittableTextArea}
import aug.gui.{MainWindow, ProfilePanel}
import aug.io.{Mongo, PrefixSystemLog, Telnet}
import aug.script.framework._
import aug.script.framework.tools.ScalaUtils
import aug.script.{Client, ClientCaller, ClientTimeoutException, ScriptLoader}
import aug.util.Util
import com.typesafe.scalalogging.Logger
import org.mongodb.scala.{MongoClient, MongoDatabase}
import org.slf4j.LoggerFactory
import scala.annotation.tailrec
import scala.util.{Failure, Success, Try}
sealed trait ProfileEvent extends Comparable[ProfileEvent] {
def priority : Int
def subPriority : Long
override def compareTo(other: ProfileEvent) : Int = {
val diff = if (priority == other.priority) {
subPriority - other.subPriority
} else {
priority - other.priority
}
if (diff > 0) 1 else if (diff == 0) 0 else -1
}
}
abstract class AbstractProfileEvent(major: Int, minor : Long) extends ProfileEvent {
override def priority: Int = major
override def subPriority: Long = minor
}
private[profile] object EventId {
private val next = new AtomicLong(0)
def nextId: Long = next.incrementAndGet()
}
case class CloseProfile() extends AbstractProfileEvent(Int.MinValue, EventId.nextId)
case class TelnetConnect(id: Long, url: String, port: Int) extends AbstractProfileEvent(Int.MinValue + 1, EventId.nextId)
case class TelnetDisconnect(id: Long) extends AbstractProfileEvent(Int.MinValue + 1, EventId.nextId)
case class UserCommand(data: String) extends AbstractProfileEvent(Int.MinValue + 1, EventId.nextId)
case class SendData(data: String, silent: Boolean = false) extends AbstractProfileEvent(Int.MinValue + 1, EventId.nextId)
case class ProfileLog(on: Boolean, color: Boolean) extends AbstractProfileEvent(Int.MinValue + 1, EventId.nextId)
case class MongoStart() extends AbstractProfileEvent(Int.MinValue + 1, EventId.nextId)
case class MongoInit(client: MongoClient, db: MongoDatabase) extends AbstractProfileEvent(Int.MinValue + 1, EventId.nextId)
case class MongoStop() extends AbstractProfileEvent(Int.MinValue + 1, EventId.nextId)
case class ClientEvent(event: ClientCaller) extends AbstractProfileEvent(Int.MinValue + 2, EventId.nextId)
case class TelnetError(data: String) extends AbstractProfileEvent(0, EventId.nextId)
case class TelnetRecv(data: String, ga: Boolean) extends AbstractProfileEvent(0, EventId.nextId)
case class TelnetGMCP(data: String) extends AbstractProfileEvent(0, EventId.nextId)
case class ProfileConnect() extends AbstractProfileEvent(0, EventId.nextId)
case class ProfileDisconnect() extends AbstractProfileEvent(0, EventId.nextId)
case class ClientStart() extends AbstractProfileEvent(0, EventId.nextId)
case class ClientStop() extends AbstractProfileEvent(0, EventId.nextId)
class Profile(private var profileConfig: ProfileConfig, mainWindow: MainWindow) extends AutoCloseable
with HasHighlight {
import Profile.log
import Util.closeQuietly
val profilePanel = new ProfilePanel(mainWindow, this)
val name: String = profileConfig.name
val slog = new PrefixSystemLog(s"[$name]: ", mainWindow.slog)
mainWindow.tabbedPane.addProfile(name, profilePanel)
val logDir = new File(ConfigManager.getProfileDir(name), "log")
logDir.mkdir()
private val thread = new Thread(() => threadLoop(), "ProfileThread: " + name)
private val windows = scala.collection.mutable.Map[String, SplittableTextArea]()
private val threadQueue = new PriorityBlockingQueue[ProfileEvent]()
private val running = new AtomicBoolean(true)
private var telnet : Option[Telnet] = None
private var client : Option[Client] = None
private var mongo : Option[Mongo] = None
private var db : Option[(MongoClient, MongoDatabase)] = None
private var clientReloadData = new ReloadData
private var schedulerState = List.empty[String]
val console = new ConsoleTextArea(profileConfig, this)
windows("console") = console
console.addLine("profile: " + profileConfig.name)
thread.setUncaughtExceptionHandler((t: Thread, e: Throwable) => {
e.printStackTrace()
})
thread.start()
profilePanel.setProfileConfig(profileConfig)
profilePanel.setContents(console)
setProfileConfig(profileConfig)
if (profileConfig.javaConfig.clientMode == "autostart") {
offer(ClientStart())
}
if (profileConfig.mongoConfig.enabled) {
offer(MongoStart())
}
if (profileConfig.autoLog == "without color" || profileConfig.autoLog == "both") {
offer(ProfileLog(true, false))
}
if (profileConfig.autoLog == "with color" || profileConfig.autoLog == "both") {
offer(ProfileLog(true, true))
}
def setProfileConfig(profileConfig: ProfileConfig): Unit = synchronized {
this.profileConfig = profileConfig
profilePanel.setProfileConfig(profileConfig)
windows.values.foreach({ w =>
w.setProfileConfig(profileConfig)
w.setActiveFont(profileConfig.consoleWindow.font.toFont)
w.repaint()
})
}
def connect(): Unit = offer(ProfileConnect())
def reconnect(): Unit = {
offer(ProfileDisconnect())
offer(ProfileConnect())
}
def disconnect(): Unit = offer(ProfileDisconnect())
private def threadLoop() : Unit = {
while(running.get()) {
try {
val event = threadQueue.take()
event match {
case TelnetConnect(id, url, port) =>
console.addLine(ScalaUtils.encodeColor("0") + "--connected--")
slog.info(s"connected $telnet")
withClient(_.onConnect(id, url, port))
case TelnetError(data) =>
slog.info(s"telnet error: $data")
case TelnetDisconnect(id) =>
onDisconnect(id)
slog.info(s"disconnected $id")
case TelnetRecv(data, ga) =>
console.processText(data, ga)
case TelnetGMCP(data) =>
withClient(_.handleGmcp(data))
case UserCommand(data) =>
client match {
case Some(c) =>
if(!c.handleCommand(data)) {
sendNow(data, false)
}
case None => sendNow(data, false)
}
case CloseProfile() =>
closeQuietly(telnet.foreach(_.close()))
closeQuietly(client.foreach(_.shutdown()))
closeQuietly(mongo.foreach(_.close()))
mainWindow.tabbedPane.remove(profilePanel)
case ProfileConnect() =>
telnet match {
case Some(_) => slog.error(s"already connected")
case None =>
telnet = Some(new Telnet(this, profileConfig))
slog.info(f"starting connection to ${profileConfig.telnetConfig.host}:" +
f"${profileConfig.telnetConfig.port}")
telnet.foreach(_.connect())
}
case ProfileDisconnect() =>
telnet.foreach {t =>
onDisconnect(t.id)
t.close()
}
case ClientEvent(clientCaller) =>
clientCaller.callOnClient()
case ClientStart() =>
Try {
client match {
case Some(_) => throw new RuntimeException(s"failed to init client, already has a client")
case None => ScriptLoader.constructScript(this, profileConfig)
}
} match {
case Failure(e) =>
slog.error(f"failed to init script: ${e.getMessage}")
log.error(f"failed to init script", e)
case Success(script) =>
this.client = Some(script)
Try {
script.init(new ProfileProxy(this), clientReloadData)
db.foreach(d=> script.initDB(d._1, d._2))
} match {
case Failure(e) =>
slog.error(s"failed to init client, won't autostart, ${e.getMessage}")
offer(ClientStop())
case Success(_) => slog.info(s"started client successfully")
}
}
case ClientStop() =>
client match {
case Some(scr) =>
client = None
schedulerState = scr.schedulerState
clientReloadData = scr.shutdown()
case None =>
slog.info(f"no client to shutdown")
}
case SendData(cmds, silent) =>
sendNow(cmds, silent)
case ProfileLog(on, color) =>
console.log(on, color)
case MongoStart() =>
mongo = Some(new Mongo(this, profileConfig))
db = None
case MongoStop() =>
closeQuietly(mongo.foreach(_.close()))
db = None
mongo = None
case MongoInit(mongoClient: MongoClient, db: MongoDatabase) =>
if (mongo.isDefined) {
this.db = Some((mongoClient, db))
withClient(_.initDB(mongoClient, db))
}
case unhandledEvent =>
log.error(s"unhandled event $unhandledEvent")
}
} catch {
case to: ClientTimeoutException => clientTimedOut(to)
case e: Throwable =>
log.error("event handling failure", e)
slog.error(s"event handling failure: ${e.getMessage}")
}
}
slog.info(s"event thread exiting")
}
def unsplitAll(): Unit = windows.values.foreach(_.unsplit())
def offer(event: ProfileEvent): Unit = {
if (!threadQueue.offer(event)) {
slog.error(f"failed to offer event $event")
log.error(f"failed to offer event $event")
}
}
def mongoStart(): Unit = offer(MongoStart())
def mongoStop(): Unit = offer(MongoStop())
def mongoRestart(): Unit = {
offer(MongoStop())
offer(MongoStart())
}
def clientStart(): Unit = offer(ClientStart())
def clientStop(): Unit = offer(ClientStop())
def clientRestart(): Unit = {
offer(ClientStop())
offer(ClientStart())
}
override def close(): Unit = {
if(running.compareAndSet(true, false)) {
slog.info(s"closing profile")
offer(CloseProfile())
thread.join(profileConfig.javaConfig.clientTimeout + 500)
}
}
def handleClientException(throwable: Throwable): Unit = {
slog.error(s"received exception from client", throwable)
}
def withClient[RT](f: (Client) => RT): Option[RT] = {
try {
client.map(f)
} catch {
case to: ClientTimeoutException =>
clientTimedOut(to)
None
case _ : Throwable =>
None
}
}
/**
* <p>Handle disconnect whether by server or client.</p>
*
* <p><STRONG>This should only be called by the event thread!</STRONG></p>
*
*/
private def onDisconnect(id: Long): Unit = {
telnet.foreach{ t=>
if (t.id == id) {
console.addLine(ScalaUtils.encodeColor("0") + "--disconnected--")
withClient(_.onDisconnect(id))
}
telnet = None
}
}
/**
* <p>Handle client timing out.</p>
*
* <p><STRONG>This should only be called by the event thread!</STRONG></p>
*/
private def clientTimedOut(clientTimeoutException: ClientTimeoutException) : Unit = {
log.error(s"script ran out of time to respond\\n${clientTimeoutException.tinfo}")
slog.error(s"script ran out of time to respond\\n${clientTimeoutException.tinfo}")
offer(ClientStop())
if (profileConfig.javaConfig.clientMode == "autostart") {
offer(ClientStart())
}
}
/**
* <p>Send text now, without using event loop.</p>
*
* <p><STRONG>This should only be called by the event thread!</STRONG></p>
*
*/
private def sendNow(cmds: String, silent: Boolean) : Unit = {
telnet match {
case Some(t) =>
cmds.split("\\n").foreach { cmd =>
t.send(cmd + "\\n")
if (!silent) console.echoCommand(cmd)
}
case None => slog.info(s"command ignored: $cmds")
}
}
/**
* <p><STRONG>This should *only* be called by the client.</STRONG></p>
*/
private[profile] def setWindowGraph(windowReference: WindowReference): java.lang.Boolean = {
@tailrec
def getNames(windows: List[WindowReference], names: List[String] = List.empty): List[String] = {
if (windows.isEmpty) {
names
} else {
val newNames = windows.map(_.getName).filter(!_.isEmpty)
val newWindows = windows.filter(_.isInstanceOf[SplitWindow])
.map(_.asInstanceOf[SplitWindow])
.flatMap(sw => List(sw.getTopLeft, sw.getBotRight))
getNames(newWindows, names ++ newNames)
}
}
val names = getNames(List(windowReference))
if (names.exists(windows.get(_).isEmpty)) {
slog.error(s"not every name in $names existed in ${windows.keys}")
return false
}
if (!names.contains("console")) {
slog.error(s"window graph did not contain windows console")
return false
}
def convertToComponents(windowReference: WindowReference): (Component, List[(JSplitPane, Float)]) = {
windowReference match {
case sw: SplitWindow =>
val (c1, l1) = convertToComponents(sw.getTopLeft)
val (c2, l2) = convertToComponents(sw.getBotRight)
val splitPanel = new JSplitPane()
splitPanel.setDividerSize(2)
splitPanel.setBorder(BorderFactory.createEmptyBorder())
if (sw.isHorizontal) {
splitPanel.setOrientation(JSplitPane.HORIZONTAL_SPLIT)
splitPanel.setLeftComponent(c1)
splitPanel.setRightComponent(c2)
} else {
splitPanel.setOrientation(JSplitPane.VERTICAL_SPLIT)
splitPanel.setTopComponent(c1)
splitPanel.setRightComponent(c2)
}
(splitPanel, l1 ++ l2 :+ (splitPanel, sw.getDividerLocation))
case _ => (windows(windowReference.getName), List.empty)
}
}
val (component, dividerLocations) = convertToComponents(windowReference)
profilePanel.setContents(component)
// really terrible hack
Util.invokeLater(10, () => SwingUtilities.invokeLater(() => {
dividerLocations.foreach(s => s._1.setDividerLocation(s._2))
}))
true
}
/**
* <p><STRONG>This should *only* be called by the client.</STRONG></p>
*/
private[profile] def getWindowNames: util.List[String] = {
import scala.collection.JavaConverters._
windows.keys.toList.asJava
}
/**
* <p><STRONG>This should *only* be called by the client.</STRONG></p>
*/
private[profile] def createTextWindow(name: String): TextWindowInterface = {
windows.getOrElseUpdate(name, {
val sta = new SplittableTextArea(profileConfig, this)
sta.setActiveFont(profileConfig.consoleWindow.font.toFont)
sta
})
}
/**
* <p><STRONG>This should *only* be called by the client.</STRONG></p>
*/
private[profile] def getTextWindow(name: String): TextWindowInterface = {
windows.getOrElse(name, throw new RuntimeException(s"no window found with name $name"))
}
/**
* <p><STRONG>This should *only* be called by the client.</STRONG></p>
*/
def getScheduler(reloaders: Seq[RunnableReloader[_ <: Runnable]]): SchedulerInterface = {
withClient(_.getScheduler(schedulerState, reloaders)).getOrElse(throw new RuntimeException("client not found"))
}
}
object Profile {
val log = Logger(LoggerFactory.getLogger(Profile.getClass))
}
| austinmiller/augustmc | src/main/scala/aug/profile/Profile.scala | Scala | apache-2.0 | 15,685 |
package org.usagram.clarify.validator
import org.usagram.clarify.error.ShouldBeGreaterThanOrEqualTo
import org.scalatest._
import org.scalatest.OptionValues._
class GreaterThanOrEqualToSpec extends FunSpec {
import Matchers._
describe(".apply") {
it("returns a GreaterThanOrEqualTo with given that") {
val validator = GreaterThanOrEqualTo(10)
val error = validator.validate(9)
error.value.asInstanceOf[ShouldBeGreaterThanOrEqualTo[Int]].value should be(10)
}
}
describe(".zero") {
it("returns a GreaterThanOrEqualTo with that = 0") {
val validator = GreaterThanOrEqualTo.zero[Int]
val error = validator.validate(-1)
error.value.asInstanceOf[ShouldBeGreaterThanOrEqualTo[Int]].value should be(0)
}
}
describe(".one") {
it("returns a GreaterThanOrEqualTo with that = 1") {
val validator = GreaterThanOrEqualTo.one[Int]
val error = validator.validate(0)
error.value.asInstanceOf[ShouldBeGreaterThanOrEqualTo[Int]].value should be(1)
}
}
describe("#validate") {
val validator = GreaterThanOrEqualTo.zero[Int]
describe("when be greater than or equal to that") {
it("returns no errors") {
val error = validator.validate(0)
error should be(empty)
}
}
describe("when be not greater than or equal to that") {
it("returns ShouldBeGreaterThanOrEqualTo") {
val error = validator.validate(-1)
error should be(defined)
error.value.asInstanceOf[ShouldBeGreaterThanOrEqualTo[Int]].value should be(0)
}
}
}
}
| takkkun/clarify | core/src/test/scala/org/usagram/clarify/validator/GreaterThanOrEqualToSpec.scala | Scala | mit | 1,581 |
/* Copyright 2009-2011 Jay Conrod
*
* This file is part of Tungsten.
*
* Tungsten is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2 of
* the License, or (at your option) any later version.
*
* Tungsten is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with Tungsten. If not, see
* <http://www.gnu.org/licenses/>.
*/
package tungsten.llvm
import scala.util.parsing.combinator.Parsers
import scala.util.parsing.combinator.RegexParsers
import scala.util.parsing.combinator.lexical.Lexical
import scala.util.parsing.input._
import scala.util.matching.Regex
import Utilities._
object Lexer extends Lexical with RegexParsers {
override type Elem = Char
val reservedOperators = Set("=", ":", "{", "}", "(", ")", "[", "]", "*", ",")
val reservedWords = Set("datalayout", "define", "nounwind", "target", "triple", "to",
"align", "label", "void", "float", "double",
"add", "alloca", "and", "asr", "bitcast", "br", "extractvalue",
"fadd", "fcmp", "fdiv", "fmul", "fpext", "fptosi", "fptoui", "frem",
"fsub", "fptrunc", "getelementptr", "icmp", "insertvalue",
"inttoptr", "load", "lsr", "mul", "or", "phi", "ptrtoint", "ret",
"sdiv", "sext", "shl", "sitofp", "srem", "store", "sub", "trunc",
"uitofp", "unreachable", "udiv", "urem", "xor", "zext",
"false", "oeq", "ogt", "oge", "olt", "ole", "ord", "ueq", "ugt", "uge",
"ult", "ule", "une", "uno", "true", "eq", "ne", "sgt", "sge", "slt",
"sle",
"zeroext", "signext", "inreg", "byval", "sret", "noalias",
"nocapture", "nest",
"alwaysinline", "inlinehint", "optsize", "noreturn", "nounwind",
"readnone", "readonly", "ssp", "sspreq", "noredzone",
"noimplicithint", "naked")
override def whitespaceChar: Parser[Elem] = elem(' ') | elem('\\t') | elem('\\n') | elem('\\r')
def comment: Parser[Any] = elem(';') ~ rep(chrExcept('\\n')) ~ elem('\\n')
def whitespace: Parser[Any] = rep(whitespaceChar | comment)
override def errorToken(message: String): Token = {
ErrorToken(message)
}
def word: Parser[Token] = {
def checkReserved(s: String): Parser[Token] = {
if (reservedWords(s))
success(ReservedToken(s))
else
elem(':') ^^^ LabelToken(s)
}
regex(new Regex("[A-Za-z._$][A-Za-z0-9._$]*")) >> checkReserved
}
def operator: Parser[Token] = {
def parseOperator(r: String): Parser[ReservedToken] = {
accept(r.toList) ^^ { s => ReservedToken(s.mkString) }
}
val reservedArray = new Array[String](reservedOperators.size)
reservedOperators.copyToArray(reservedArray, 0)
scala.util.Sorting.quickSort(reservedArray)
val reservedParsers = reservedArray.toList.map(parseOperator _)
val fail: Parser[ReservedToken] = failure("no matching reserved string")
(fail /: reservedParsers) {(x, y) => y | x}
}
def hexChar: Parser[Char] = {
elem("hex character", { c => ('0' <= c && c <= '9') || ('A' <= c && c <= 'F') })
}
def stringChar: Parser[List[Char]] = {
(elem('\\\\') ~ repN(2, hexChar) ^^ { case bs ~ hex => bs :: hex }) |
(chrExcept('"', '\\n') ^^ { (c: Char) => List(c) })
}
def string: Parser[StringToken] = {
elem('"') ~ rep(stringChar) ~ elem('"') ^^ {
case q1 ~ s ~ q2 => StringToken(q1 + s.flatten.mkString + q2)
}
}
def integer: Parser[IntToken] = {
opt('-') ~ rep1(digit) ^^ {
case sign ~ digits => {
val signStr = sign.map(_.toString).getOrElse("")
IntToken(signStr + digits.mkString)
}
}
}
def float: Parser[FloatToken] = {
val optSign = opt(elem('-') | elem('+')) ^^ { c => c.map(_.toString).getOrElse("") }
val num = rep1(digit) ^^ { _.mkString }
val optNum = opt(num) ^^ { _.getOrElse("") }
val exp = (elem('e') | elem('E')) ~ optSign ~ num ^^ {
case e ~ s ~ n => e + s + n
}
val optExp = opt(exp) ^^ { _.getOrElse("") }
val float1 = optSign ~ (num <~ '.') ~ optNum ~ optExp ^^ {
case s ~ n ~ f ~ e => s + n + '.' + f + e
}
val float2 = (optSign <~ '.') ~ num ~ optExp ^^ {
case s ~ f ~ e => s + '.' + f + e
}
val float3 = optSign ~ num ~ exp ^^ {
case s ~ n ~ e => s + n + e
}
(float1 | float2 | float3) ^^ { case s => FloatToken(s) }
}
def intType: Parser[IntTypeToken] = {
elem('i') ~ rep1(digit) ^^ { case i ~ n => IntTypeToken(i + n.mkString) }
}
def symbol: Parser[SymbolToken] = {
def normalSymbol = identifier
def quotedSymbol = elem('"') ~ rep1(chrExcept('"')) ~ elem('"') ^^ {
case q1 ~ s ~ q2 => q1 + s.mkString + q2
}
def numericSymbol = rep1(digit) ^^ { _.mkString }
(elem('%') | elem('@')) ~ (normalSymbol | quotedSymbol | numericSymbol) ^^ {
case prefix ~ sym => SymbolToken(prefix + sym) }
}
def label: Parser[LabelToken] = {
(identifier <~ ':' ^^ { case s => LabelToken(s) }) |
(string <~ ':' ^^ { case s => LabelToken(s.value) })
}
def identifier: Parser[String] = regex(identifierRegex)
def token: Parser[Token] = {
operator |
word |
label |
string |
float |
integer |
intType |
symbol
}
def test(input: String) = {
val reader = new CharArrayReader(input.toArray)
phrase(token)(reader) match {
case Success(tok, _) => tok
case error: NoSuccess => throw new RuntimeException(error.msg)
}
}
}
| jayconrod/tungsten | llvm/src/main/scala/tungsten/llvm/Lexer.scala | Scala | gpl-2.0 | 6,106 |
package com.darkknight.analytics.textsummary.textmodel
/**
* Created by apple on 1/20/17.
*/
object Main {
def main(args: Array[String]): Unit = {
val stories = new CSVFileReader(args(0))
//each story --> break into sentences --> phrases
//this is a list of stories from the csv file
val corpus = stories.getData().take(20)
// this is a list of stories in the form of basic components from which
//features shall be built
val storyModels = corpus.map(c => new StoryModel(c))
//execute the tile extractor for each story
storyModels.foreach(s => {
println("-----------------------------------------------")
println("Story number:"+s.index)
val (shortTitle,longTitle) = Extractor.titleExtractor(s)
println("short title candidates")
shortTitle.foreach(t => {println("\\t----- "+t)})
println("\\n longer title candidates")
longTitle.foreach(t => {println("--- "+t)})
val extract = Extractor.summaryExtractor(s)
println("\\n ------------- Extract ----------")
extract.foreach(e => println(e))
println("*********************************")
})
}
} | raviguntur/TextAnalytics | src/main/scala/com/darkknight/analytics/textsummary/textmodel/Main.scala | Scala | apache-2.0 | 1,158 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this thing except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import SharedHelpers.thisLineNumber
import enablers.Definition
import exceptions.TestFailedException
import org.scalactic.Prettifier
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers._
class ShouldBeDefinedLogicalOrImplicitSpec extends AnyFunSpec {
private val prettifier = Prettifier.default
val fileName: String = "ShouldBeDefinedLogicalOrImplicitSpec.scala"
def wasEqualTo(left: Any, right: Any): String =
FailureMessages.wasEqualTo(prettifier, left, right)
def wasNotEqualTo(left: Any, right: Any): String =
FailureMessages.wasNotEqualTo(prettifier, left, right)
def equaled(left: Any, right: Any): String =
FailureMessages.equaled(prettifier, left, right)
def didNotEqual(left: Any, right: Any): String =
FailureMessages.didNotEqual(prettifier, left, right)
def wasNotDefined(left: Any): String =
FailureMessages.wasNotDefined(prettifier, left)
def wasDefined(left: Any): String =
FailureMessages.wasDefined(prettifier, left)
def allError(message: String, lineNumber: Int, left: Any): String = {
val messageWithIndex = UnquotedString(" " + FailureMessages.forAssertionsGenTraversableMessageWithStackDepth(prettifier, 0, UnquotedString(message), UnquotedString(fileName + ":" + lineNumber)))
FailureMessages.allShorthandFailed(prettifier, messageWithIndex, left)
}
trait Thing {
def isDefined: Boolean
}
val something = new Thing {
val isDefined = true
}
val nothing = new Thing {
val isDefined = false
}
implicit def definitionOfThing[T <: Thing]: Definition[T] =
new Definition[T] {
def isDefined(thing: T): Boolean = thing.isDefined
}
describe("Defined matcher") {
describe("when work with 'thing should be (defined)'") {
it("should do nothing when thing is defined") {
something should (be (defined) or be (something))
nothing should (be (defined) or be (nothing))
something should (be (defined) or be (nothing))
something should (be (something) or be (defined))
something should (be (nothing) or be (defined))
nothing should (be (nothing) or be (defined))
something should (be (defined) or equal (something))
nothing should (be (defined) or equal (nothing))
something should (be (defined) or equal (nothing))
something should (equal (something) or be (defined))
something should (equal (nothing) or be (defined))
nothing should (equal (nothing) or be (defined))
}
it("should throw TestFailedException with correct stack depth when thing is not defined") {
val caught1 = intercept[TestFailedException] {
nothing should (be (defined) or be (something))
}
assert(caught1.message === Some(wasNotDefined(nothing) + ", and " + wasNotEqualTo(nothing, something)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[TestFailedException] {
nothing should (be (something) or be (defined))
}
assert(caught2.message === Some(wasNotEqualTo(nothing, something) + ", and " + wasNotDefined(nothing)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[TestFailedException] {
nothing should (be (defined) or equal (something))
}
assert(caught3.message === Some(wasNotDefined(nothing) + ", and " + didNotEqual(nothing, something)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[TestFailedException] {
nothing should (equal (something) or be (defined))
}
assert(caught4.message === Some(didNotEqual(nothing, something) + ", and " + wasNotDefined(nothing)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("when work with 'thing should not be defined'") {
it("should do nothing when thing is not defined") {
nothing should (not be defined or not be something)
something should (not be defined or not be nothing)
nothing should (not be defined or not be nothing)
nothing should (not be something or not be defined)
nothing should (not be nothing or not be defined)
something should (not be nothing or not be defined)
nothing should (not be defined or not equal something)
something should (not be defined or not equal nothing)
nothing should (not be defined or not equal nothing)
nothing should (not equal something or not be defined)
nothing should (not equal nothing or not be defined)
something should (not equal nothing or not be defined)
}
it("should throw TestFailedException with correct stack depth when thing is defined") {
val caught1 = intercept[TestFailedException] {
something should (not be defined or not be something)
}
assert(caught1.message === Some(wasDefined(something) + ", and " + wasEqualTo(something, something)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[TestFailedException] {
something should (not be something or not be defined)
}
assert(caught2.message === Some(wasEqualTo(something, something) + ", and " + wasDefined(something)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[TestFailedException] {
something should (not be defined or not equal something)
}
assert(caught3.message === Some(wasDefined(something) + ", and " + equaled(something, something)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[TestFailedException] {
something should (not equal something or not be defined)
}
assert(caught4.message === Some(equaled(something, something) + ", and " + wasDefined(something)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("when work with 'all(xs) should be (defined)'") {
it("should do nothing when all(xs) is defined") {
all(List(something)) should (be (defined) or be (something))
all(List(nothing)) should (be (defined) or be (nothing))
all(List(something)) should (be (defined) or be (nothing))
all(List(something)) should (be (something) or be (defined))
all(List(something)) should (be (nothing) or be (defined))
all(List(nothing)) should (be (nothing) or be (defined))
all(List(something)) should (be (defined) or equal (something))
all(List(nothing)) should (be (defined) or equal (nothing))
all(List(something)) should (be (defined) or equal (nothing))
all(List(something)) should (equal (something) or be (defined))
all(List(something)) should (equal (nothing) or be (defined))
all(List(nothing)) should (equal (nothing) or be (defined))
}
it("should throw TestFailedException with correct stack depth when xs is not defined") {
val left1 = List(nothing)
val caught1 = intercept[TestFailedException] {
all(left1) should (be (something) or be (defined))
}
assert(caught1.message === Some(allError(wasNotEqualTo(nothing, something) + ", and " + wasNotDefined(nothing), thisLineNumber - 2, left1)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val left2 = List(nothing)
val caught2 = intercept[TestFailedException] {
all(left2) should (be (defined) or be (something))
}
assert(caught2.message === Some(allError(wasNotDefined(nothing) + ", and " + wasNotEqualTo(nothing, something), thisLineNumber - 2, left2)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val left3 = List(nothing)
val caught3 = intercept[TestFailedException] {
all(left3) should (equal (something) or be (defined))
}
assert(caught3.message === Some(allError(didNotEqual(nothing, something) + ", and " + wasNotDefined(nothing), thisLineNumber - 2, left3)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val left4 = List(nothing)
val caught4 = intercept[TestFailedException] {
all(left4) should (be (defined) or equal (something))
}
assert(caught4.message === Some(allError(wasNotDefined(nothing) + ", and " + didNotEqual(nothing, something), thisLineNumber - 2, left4)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("when work with 'all(xs) should not be defined'") {
it("should do nothing when xs is not defined") {
all(List(nothing)) should (not be defined or not be something)
all(List(something)) should (not be defined or not be nothing)
all(List(nothing)) should (not be defined or not be nothing)
all(List(nothing)) should (not be something or not be defined)
all(List(nothing)) should (not be nothing or not be defined)
all(List(something)) should (not be nothing or not be defined)
all(List(nothing)) should (not be defined or not equal something)
all(List(something)) should (not be defined or not equal nothing)
all(List(nothing)) should (not be defined or not equal nothing)
all(List(nothing)) should (not equal something or not be defined)
all(List(nothing)) should (not equal nothing or not be defined)
all(List(something)) should (not equal nothing or not be defined)
}
it("should throw TestFailedException with correct stack depth when xs is not defined") {
val left1 = List(something)
val caught1 = intercept[TestFailedException] {
all(left1) should (not be something or not be defined)
}
assert(caught1.message === Some(allError(wasEqualTo(something, something) + ", and " + wasDefined(something), thisLineNumber - 2, left1)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val left2 = List(something)
val caught2 = intercept[TestFailedException] {
all(left2) should (not be defined or not be something)
}
assert(caught2.message === Some(allError(wasDefined(something) + ", and " + wasEqualTo(something, something), thisLineNumber - 2, left2)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val left3 = List(something)
val caught3 = intercept[TestFailedException] {
all(left3) should (not equal something or not be defined)
}
assert(caught3.message === Some(allError(equaled(something, something) + ", and " + wasDefined(something), thisLineNumber - 2, left3)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val left4 = List(something)
val caught4 = intercept[TestFailedException] {
all(left4) should (not be defined or not equal something)
}
assert(caught4.message === Some(allError(wasDefined(something) + ", and " + equaled(something, something), thisLineNumber - 2, left4)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
}
}
| scalatest/scalatest | jvm/scalatest-test/src/test/scala/org/scalatest/ShouldBeDefinedLogicalOrImplicitSpec.scala | Scala | apache-2.0 | 13,299 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler.v2_3.planner.logical.steps
import org.neo4j.cypher.internal.compiler.v2_3.planner.logical.plans.LogicalPlan
import org.neo4j.cypher.internal.compiler.v2_3.planner.logical.{LogicalPlanningContext, PlanTransformer}
import org.neo4j.cypher.internal.compiler.v2_3.planner.{CantHandleQueryException, PlannerQuery}
import org.neo4j.cypher.internal.compiler.v2_3.spi.PlanContext
import org.neo4j.cypher.internal.frontend.v2_3.ast._
import org.neo4j.cypher.internal.frontend.v2_3.notification.{IndexHintUnfulfillableNotification, JoinHintUnfulfillableNotification}
import org.neo4j.cypher.internal.frontend.v2_3.{IndexHintException, JoinHintException}
object verifyBestPlan extends PlanTransformer[PlannerQuery] {
def apply(plan: LogicalPlan, expected: PlannerQuery)(implicit context: LogicalPlanningContext): LogicalPlan = {
val constructed = plan.solved
if (expected != constructed) {
val unfulfillableIndexHints = findUnfulfillableIndexHints(expected, context.planContext)
val unfulfillableJoinHints = findUnfulfillableJoinHints(expected, context.planContext)
val expectedWithoutHints = expected.withoutHints(unfulfillableIndexHints ++ unfulfillableJoinHints)
if (expectedWithoutHints != constructed) {
if (expected.withoutHints(expected.allHints) != constructed.withoutHints(constructed.allHints)) {
// unknown planner issue failed to find plan (without regard for differences in hints)
throw new CantHandleQueryException(s"Expected \\n$expected \\n\\n\\nInstead, got: \\n$constructed")
} else {
// unknown planner issue failed to find plan matching hints (i.e. "implicit hints")
throw new CantHandleQueryException(s"Expected \\n${expected.allHints} \\n\\n\\nInstead, got: \\n${constructed.allHints}")
}
} else {
processUnfulfilledIndexHints(context, unfulfillableIndexHints)
processUnfulfilledJoinHints(context, unfulfillableJoinHints)
}
}
plan
}
private def processUnfulfilledIndexHints(context: LogicalPlanningContext, hints: Set[UsingIndexHint]) = {
if (hints.nonEmpty) {
// hints referred to non-existent indexes ("explicit hints")
if (context.useErrorsOverWarnings) {
val firstIndexHint = hints.head
throw new IndexHintException(firstIndexHint.identifier.name, firstIndexHint.label.name, firstIndexHint.property.name, "No such index")
} else {
hints.foreach { hint =>
context.notificationLogger.log(IndexHintUnfulfillableNotification(hint.label.name, hint.property.name))
}
}
}
}
private def processUnfulfilledJoinHints(context: LogicalPlanningContext, hints: Set[UsingJoinHint]) = {
if (hints.nonEmpty) {
// we were unable to plan hash join on some requested nodes
if (context.useErrorsOverWarnings) {
val firstJoinHint = hints.head
throw new JoinHintException(firstJoinHint.identifiers.map(_.name).reduceLeft(_ + ", " + _), "Unable to plan hash join")
} else {
hints.foreach { hint =>
context.notificationLogger.log(JoinHintUnfulfillableNotification(hint.identifiers.map(_.name).toSeq))
}
}
}
}
private def findUnfulfillableIndexHints(query: PlannerQuery, planContext: PlanContext): Set[UsingIndexHint] = {
query.allHints.flatMap {
// using index name:label(property)
case hint@UsingIndexHint(Identifier(name), LabelName(label), PropertyKeyName(property))
if planContext.getIndexRule( label, property ).isDefined ||
planContext.getUniqueIndexRule( label, property ).isDefined => None
// no such index exists
case hint@UsingIndexHint(Identifier(name), LabelName(label), PropertyKeyName(property)) => Option(hint)
// don't care about other hints
case hint => None
}
}
private def findUnfulfillableJoinHints(query: PlannerQuery, planContext: PlanContext): Set[UsingJoinHint] = {
query.allHints.collect {
case hint: UsingJoinHint => hint
}
}
}
| HuangLS/neo4j | community/cypher/cypher-compiler-2.3/src/main/scala/org/neo4j/cypher/internal/compiler/v2_3/planner/logical/steps/verifyBestPlan.scala | Scala | apache-2.0 | 4,855 |
package com.mz.training.jdbc
import java.sql.ResultSet
import akka.actor.ActorSystem
import akka.testkit.{ImplicitSender, TestKit}
import com.mz.training.common.jdbc.JDBCConnectionActor
import com.mz.training.common.jdbc.JDBCConnectionActor.{JdbcSelect, JdbcSelectResult}
import com.mz.training.common.supervisors.DataSourceSupervisorActor
import com.mz.training.domains.user.User
import org.scalatest.mock.MockitoSugar
import org.scalatest.{BeforeAndAfterAll, FunSuiteLike, Matchers}
import scala.concurrent.duration._
/**
* Created by zemo on 04/10/15.
*/
class JDBCConnectionActorTest extends TestKit(ActorSystem("test-jdbc-demo-JDBCConnectionActorTest")) with FunSuiteLike
with BeforeAndAfterAll
with Matchers
with ImplicitSender {
implicit val timeOut: akka.util.Timeout = 2000.millisecond
override protected def beforeAll(): Unit = {
super.beforeAll()
system.actorOf(DataSourceSupervisorActor.props, DataSourceSupervisorActor.actorName)
}
override def afterAll(): Unit = {
system.terminate()
}
// test("GetConnection timeout") {
// val jdbcActor = system.actorOf(JDBCConnectionActor.props)
// val query = "Select from users where id = 0"
// def mapper (resultSet: ResultSet): Option[User] = {None}
// jdbcActor ! JdbcSelect(query, mapper)
// expectNoMsg(2 seconds)
// }
test("select operation") {
val jdbcActor = system.actorOf(JDBCConnectionActor.props)
val query = "Select from users where id = 0"
def mapper (resultSet: ResultSet): Option[User] = {None}
jdbcActor ! JdbcSelect(query, mapper)
expectMsgAnyOf(JdbcSelectResult(None))
jdbcActor ! JdbcSelect(query, mapper)
expectMsgAnyOf(JdbcSelectResult(None))
}
}
| michalzeman/angular2-training | akka-http-server/src/test/scala/com/mz/training/jdbc/JDBCConnectionActorTest.scala | Scala | mit | 1,704 |
package repositories
import com.google.inject.Inject
import common.slick.SchemaInitializer
import models.MessageAttachmentTable
import models.MessageAttachmentTable.MessageAttachmentTable
import scala.concurrent.ExecutionContext
class MessageAttachmentSchemaInitializer @Inject()(implicit val executionContext: ExecutionContext)
extends SchemaInitializer[MessageAttachmentTable] {
import driver.api._
override val name: String = MessageAttachmentTable.name
override val table = TableQuery[MessageAttachmentTable]
}
| sysgears/apollo-universal-starter-kit | modules/chat/server-scala/src/main/scala/repositories/MessageAttachmentSchemaInitializer.scala | Scala | mit | 528 |
package me.axiometry.blocknet.entity
trait FishingBob extends Projectile | Axiometry/Blocknet | blocknet-api/src/main/scala/me/axiometry/blocknet/entity/FishingBob.scala | Scala | bsd-2-clause | 73 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.communication.socket
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.scalatest.{Matchers, BeforeAndAfter, OneInstancePerTest, FunSpec}
import org.scalatestplus.mockito.MockitoSugar
import org.mockito.Mockito._
import org.zeromq.ZMsg
class JeroMQSocketSpec extends FunSpec with MockitoSugar
with OneInstancePerTest with BeforeAndAfter with Matchers
{
private val runnable = mock[ZeroMQSocketRunnable]
@volatile private var running = true
// Mock the running of the runnable for the tests
doAnswer(new Answer[Unit] {
override def answer(invocation: InvocationOnMock): Unit = while (running) {
Thread.sleep(1)
}
}).when(runnable).run()
// Mock the close of the runnable to shutdown
doAnswer(new Answer[Unit] {
override def answer(invocation: InvocationOnMock): Unit = running = false
}).when(runnable).close()
private val socket: JeroMQSocket = new JeroMQSocket(runnable)
after {
running = false
}
describe("JeroMQSocket") {
describe("#send") {
it("should offer a message to the runnable") {
val message: String = "Some Message"
val expected = ZMsg.newStringMsg(message)
socket.send(message.getBytes)
verify(runnable).offer(expected)
}
it("should thrown and AssertionError when socket is no longer alive") {
socket.close()
intercept[AssertionError] {
socket.send("".getBytes)
}
}
}
describe("#close") {
it("should close the runnable") {
socket.close()
verify(runnable).close()
}
it("should close the socket thread") {
socket.close()
socket.isAlive should be (false)
}
}
describe("#isAlive") {
it("should evaluate to true when the socket thread is alive") {
socket.isAlive should be (true)
}
it("should evaluate to false when the socket thread is dead") {
socket.close()
socket.isAlive should be (false)
}
}
}
}
| lresende/incubator-toree | communication/src/test/scala/org/apache/toree/communication/socket/JeroMQSocketSpec.scala | Scala | apache-2.0 | 2,878 |
package com.mesosphere.universe
/**
* Conforms to: https://github.com/mesosphere/universe/blob/version-2.x/repo/meta/schema/resource-schema.json
*/
case class Resource(
assets: Option[Assets] = None,
images: Option[Images] = None
)
| movicha/cosmos | cosmos-model/src/main/scala/com/mesosphere/universe/Resource.scala | Scala | apache-2.0 | 241 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.text.SimpleDateFormat
import java.util.{Calendar, TimeZone}
import org.apache.spark.sql.catalyst.expressions.codegen.{CodeGenContext, GeneratedExpressionCode}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String}
import scala.util.Try
/**
* Returns the current date at the start of query evaluation.
* All calls of current_date within the same query return the same value.
*
* There is no code generation since this expression should get constant folded by the optimizer.
*/
case class CurrentDate() extends LeafExpression with CodegenFallback {
override def foldable: Boolean = true
override def nullable: Boolean = false
override def dataType: DataType = DateType
override def eval(input: InternalRow): Any = {
DateTimeUtils.millisToDays(System.currentTimeMillis())
}
}
/**
* Returns the current timestamp at the start of query evaluation.
* All calls of current_timestamp within the same query return the same value.
*
* There is no code generation since this expression should get constant folded by the optimizer.
*/
case class CurrentTimestamp() extends LeafExpression with CodegenFallback {
override def foldable: Boolean = true
override def nullable: Boolean = false
override def dataType: DataType = TimestampType
override def eval(input: InternalRow): Any = {
System.currentTimeMillis() * 1000L
}
}
/**
* Adds a number of days to startdate.
*/
case class DateAdd(startDate: Expression, days: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def left: Expression = startDate
override def right: Expression = days
override def inputTypes: Seq[AbstractDataType] = Seq(DateType, IntegerType)
override def dataType: DataType = DateType
override def nullSafeEval(start: Any, d: Any): Any = {
start.asInstanceOf[Int] + d.asInstanceOf[Int]
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
nullSafeCodeGen(ctx, ev, (sd, d) => {
s"""${ev.value} = $sd + $d;"""
})
}
}
/**
* Subtracts a number of days to startdate.
*/
case class DateSub(startDate: Expression, days: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def left: Expression = startDate
override def right: Expression = days
override def inputTypes: Seq[AbstractDataType] = Seq(DateType, IntegerType)
override def dataType: DataType = DateType
override def nullSafeEval(start: Any, d: Any): Any = {
start.asInstanceOf[Int] - d.asInstanceOf[Int]
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
nullSafeCodeGen(ctx, ev, (sd, d) => {
s"""${ev.value} = $sd - $d;"""
})
}
}
case class Hour(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq(TimestampType)
override def dataType: DataType = IntegerType
override protected def nullSafeEval(timestamp: Any): Any = {
DateTimeUtils.getHours(timestamp.asInstanceOf[Long])
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
defineCodeGen(ctx, ev, c => s"$dtu.getHours($c)")
}
}
case class Minute(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq(TimestampType)
override def dataType: DataType = IntegerType
override protected def nullSafeEval(timestamp: Any): Any = {
DateTimeUtils.getMinutes(timestamp.asInstanceOf[Long])
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
defineCodeGen(ctx, ev, c => s"$dtu.getMinutes($c)")
}
}
case class Second(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq(TimestampType)
override def dataType: DataType = IntegerType
override protected def nullSafeEval(timestamp: Any): Any = {
DateTimeUtils.getSeconds(timestamp.asInstanceOf[Long])
}
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
defineCodeGen(ctx, ev, c => s"$dtu.getSeconds($c)")
}
}
case class DayOfYear(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq(DateType)
override def dataType: DataType = IntegerType
override protected def nullSafeEval(date: Any): Any = {
DateTimeUtils.getDayInYear(date.asInstanceOf[Int])
}
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
defineCodeGen(ctx, ev, c => s"$dtu.getDayInYear($c)")
}
}
case class Year(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq(DateType)
override def dataType: DataType = IntegerType
override protected def nullSafeEval(date: Any): Any = {
DateTimeUtils.getYear(date.asInstanceOf[Int])
}
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
defineCodeGen(ctx, ev, c => s"$dtu.getYear($c)")
}
}
case class Quarter(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq(DateType)
override def dataType: DataType = IntegerType
override protected def nullSafeEval(date: Any): Any = {
DateTimeUtils.getQuarter(date.asInstanceOf[Int])
}
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
defineCodeGen(ctx, ev, c => s"$dtu.getQuarter($c)")
}
}
case class Month(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq(DateType)
override def dataType: DataType = IntegerType
override protected def nullSafeEval(date: Any): Any = {
DateTimeUtils.getMonth(date.asInstanceOf[Int])
}
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
defineCodeGen(ctx, ev, c => s"$dtu.getMonth($c)")
}
}
case class DayOfMonth(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq(DateType)
override def dataType: DataType = IntegerType
override protected def nullSafeEval(date: Any): Any = {
DateTimeUtils.getDayOfMonth(date.asInstanceOf[Int])
}
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
defineCodeGen(ctx, ev, c => s"$dtu.getDayOfMonth($c)")
}
}
case class WeekOfYear(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq(DateType)
override def dataType: DataType = IntegerType
@transient private lazy val c = {
val c = Calendar.getInstance(TimeZone.getTimeZone("UTC"))
c.setFirstDayOfWeek(Calendar.MONDAY)
c.setMinimalDaysInFirstWeek(4)
c
}
override protected def nullSafeEval(date: Any): Any = {
c.setTimeInMillis(date.asInstanceOf[Int] * 1000L * 3600L * 24L)
c.get(Calendar.WEEK_OF_YEAR)
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
nullSafeCodeGen(ctx, ev, time => {
val cal = classOf[Calendar].getName
val c = ctx.freshName("cal")
ctx.addMutableState(cal, c,
s"""
$c = $cal.getInstance(java.util.TimeZone.getTimeZone("UTC"));
$c.setFirstDayOfWeek($cal.MONDAY);
$c.setMinimalDaysInFirstWeek(4);
""")
s"""
$c.setTimeInMillis($time * 1000L * 3600L * 24L);
${ev.value} = $c.get($cal.WEEK_OF_YEAR);
"""
})
}
}
case class DateFormatClass(left: Expression, right: Expression) extends BinaryExpression
with ImplicitCastInputTypes {
override def dataType: DataType = StringType
override def inputTypes: Seq[AbstractDataType] = Seq(TimestampType, StringType)
override protected def nullSafeEval(timestamp: Any, format: Any): Any = {
val sdf = new SimpleDateFormat(format.toString)
UTF8String.fromString(sdf.format(new java.util.Date(timestamp.asInstanceOf[Long] / 1000)))
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val sdf = classOf[SimpleDateFormat].getName
defineCodeGen(ctx, ev, (timestamp, format) => {
s"""UTF8String.fromString((new $sdf($format.toString()))
.format(new java.util.Date($timestamp / 1000)))"""
})
}
override def prettyName: String = "date_format"
}
/**
* Converts time string with given pattern.
* Deterministic version of [[UnixTimestamp]], must have at least one parameter.
*/
case class ToUnixTimestamp(timeExp: Expression, format: Expression) extends UnixTime {
override def left: Expression = timeExp
override def right: Expression = format
def this(time: Expression) = {
this(time, Literal("yyyy-MM-dd HH:mm:ss"))
}
}
/**
* Converts time string with given pattern.
* (see [http://docs.oracle.com/javase/tutorial/i18n/format/simpleDateFormat.html])
* to Unix time stamp (in seconds), returns null if fail.
* Note that hive Language Manual says it returns 0 if fail, but in fact it returns null.
* If the second parameter is missing, use "yyyy-MM-dd HH:mm:ss".
* If no parameters provided, the first parameter will be current_timestamp.
* If the first parameter is a Date or Timestamp instead of String, we will ignore the
* second parameter.
*/
case class UnixTimestamp(timeExp: Expression, format: Expression) extends UnixTime {
override def left: Expression = timeExp
override def right: Expression = format
def this(time: Expression) = {
this(time, Literal("yyyy-MM-dd HH:mm:ss"))
}
def this() = {
this(CurrentTimestamp())
}
}
abstract class UnixTime extends BinaryExpression with ExpectsInputTypes {
override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(StringType, DateType, TimestampType), StringType)
override def dataType: DataType = LongType
private lazy val constFormat: UTF8String = right.eval().asInstanceOf[UTF8String]
override def eval(input: InternalRow): Any = {
val t = left.eval(input)
if (t == null) {
null
} else {
left.dataType match {
case DateType =>
DateTimeUtils.daysToMillis(t.asInstanceOf[Int]) / 1000L
case TimestampType =>
t.asInstanceOf[Long] / 1000000L
case StringType if right.foldable =>
if (constFormat != null) {
Try(new SimpleDateFormat(constFormat.toString).parse(
t.asInstanceOf[UTF8String].toString).getTime / 1000L).getOrElse(null)
} else {
null
}
case StringType =>
val f = right.eval(input)
if (f == null) {
null
} else {
val formatString = f.asInstanceOf[UTF8String].toString
Try(new SimpleDateFormat(formatString).parse(
t.asInstanceOf[UTF8String].toString).getTime / 1000L).getOrElse(null)
}
}
}
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
left.dataType match {
case StringType if right.foldable =>
val sdf = classOf[SimpleDateFormat].getName
val fString = if (constFormat == null) null else constFormat.toString
val formatter = ctx.freshName("formatter")
if (fString == null) {
s"""
boolean ${ev.isNull} = true;
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
"""
} else {
val eval1 = left.gen(ctx)
s"""
${eval1.code}
boolean ${ev.isNull} = ${eval1.isNull};
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
try {
$sdf $formatter = new $sdf("$fString");
${ev.value} =
$formatter.parse(${eval1.value}.toString()).getTime() / 1000L;
} catch (java.lang.Throwable e) {
${ev.isNull} = true;
}
}
"""
}
case StringType =>
val sdf = classOf[SimpleDateFormat].getName
nullSafeCodeGen(ctx, ev, (string, format) => {
s"""
try {
${ev.value} =
(new $sdf($format.toString())).parse($string.toString()).getTime() / 1000L;
} catch (java.lang.Throwable e) {
${ev.isNull} = true;
}
"""
})
case TimestampType =>
val eval1 = left.gen(ctx)
s"""
${eval1.code}
boolean ${ev.isNull} = ${eval1.isNull};
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
${ev.value} = ${eval1.value} / 1000000L;
}
"""
case DateType =>
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
val eval1 = left.gen(ctx)
s"""
${eval1.code}
boolean ${ev.isNull} = ${eval1.isNull};
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
${ev.value} = $dtu.daysToMillis(${eval1.value}) / 1000L;
}
"""
}
}
}
/**
* Converts the number of seconds from unix epoch (1970-01-01 00:00:00 UTC) to a string
* representing the timestamp of that moment in the current system time zone in the given
* format. If the format is missing, using format like "1970-01-01 00:00:00".
* Note that hive Language Manual says it returns 0 if fail, but in fact it returns null.
*/
case class FromUnixTime(sec: Expression, format: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def left: Expression = sec
override def right: Expression = format
def this(unix: Expression) = {
this(unix, Literal("yyyy-MM-dd HH:mm:ss"))
}
override def dataType: DataType = StringType
override def inputTypes: Seq[AbstractDataType] = Seq(LongType, StringType)
private lazy val constFormat: UTF8String = right.eval().asInstanceOf[UTF8String]
override def eval(input: InternalRow): Any = {
val time = left.eval(input)
if (time == null) {
null
} else {
if (format.foldable) {
if (constFormat == null) {
null
} else {
Try(UTF8String.fromString(new SimpleDateFormat(constFormat.toString).format(
new java.util.Date(time.asInstanceOf[Long] * 1000L)))).getOrElse(null)
}
} else {
val f = format.eval(input)
if (f == null) {
null
} else {
Try(UTF8String.fromString(new SimpleDateFormat(
f.asInstanceOf[UTF8String].toString).format(new java.util.Date(
time.asInstanceOf[Long] * 1000L)))).getOrElse(null)
}
}
}
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val sdf = classOf[SimpleDateFormat].getName
if (format.foldable) {
if (constFormat == null) {
s"""
boolean ${ev.isNull} = true;
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
"""
} else {
val t = left.gen(ctx)
s"""
${t.code}
boolean ${ev.isNull} = ${t.isNull};
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
try {
${ev.value} = UTF8String.fromString(new $sdf("${constFormat.toString}").format(
new java.util.Date(${t.value} * 1000L)));
} catch (java.lang.Throwable e) {
${ev.isNull} = true;
}
}
"""
}
} else {
nullSafeCodeGen(ctx, ev, (seconds, f) => {
s"""
try {
${ev.value} = UTF8String.fromString((new $sdf($f.toString())).format(
new java.util.Date($seconds * 1000L)));
} catch (java.lang.Throwable e) {
${ev.isNull} = true;
}""".stripMargin
})
}
}
}
/**
* Returns the last day of the month which the date belongs to.
*/
case class LastDay(startDate: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def child: Expression = startDate
override def inputTypes: Seq[AbstractDataType] = Seq(DateType)
override def dataType: DataType = DateType
override def nullSafeEval(date: Any): Any = {
DateTimeUtils.getLastDayOfMonth(date.asInstanceOf[Int])
}
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
defineCodeGen(ctx, ev, sd => s"$dtu.getLastDayOfMonth($sd)")
}
override def prettyName: String = "last_day"
}
/**
* Returns the first date which is later than startDate and named as dayOfWeek.
* For example, NextDay(2015-07-27, Sunday) would return 2015-08-02, which is the first
* Sunday later than 2015-07-27.
*
* Allowed "dayOfWeek" is defined in [[DateTimeUtils.getDayOfWeekFromString]].
*/
case class NextDay(startDate: Expression, dayOfWeek: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def left: Expression = startDate
override def right: Expression = dayOfWeek
override def inputTypes: Seq[AbstractDataType] = Seq(DateType, StringType)
override def dataType: DataType = DateType
override def nullSafeEval(start: Any, dayOfW: Any): Any = {
val dow = DateTimeUtils.getDayOfWeekFromString(dayOfW.asInstanceOf[UTF8String])
if (dow == -1) {
null
} else {
val sd = start.asInstanceOf[Int]
DateTimeUtils.getNextDateForDayOfWeek(sd, dow)
}
}
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
nullSafeCodeGen(ctx, ev, (sd, dowS) => {
val dateTimeUtilClass = DateTimeUtils.getClass.getName.stripSuffix("$")
val dayOfWeekTerm = ctx.freshName("dayOfWeek")
if (dayOfWeek.foldable) {
val input = dayOfWeek.eval().asInstanceOf[UTF8String]
if ((input eq null) || DateTimeUtils.getDayOfWeekFromString(input) == -1) {
s"""
|${ev.isNull} = true;
""".stripMargin
} else {
val dayOfWeekValue = DateTimeUtils.getDayOfWeekFromString(input)
s"""
|${ev.value} = $dateTimeUtilClass.getNextDateForDayOfWeek($sd, $dayOfWeekValue);
""".stripMargin
}
} else {
s"""
|int $dayOfWeekTerm = $dateTimeUtilClass.getDayOfWeekFromString($dowS);
|if ($dayOfWeekTerm == -1) {
| ${ev.isNull} = true;
|} else {
| ${ev.value} = $dateTimeUtilClass.getNextDateForDayOfWeek($sd, $dayOfWeekTerm);
|}
""".stripMargin
}
})
}
override def prettyName: String = "next_day"
}
/**
* Adds an interval to timestamp.
*/
case class TimeAdd(start: Expression, interval: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def left: Expression = start
override def right: Expression = interval
override def toString: String = s"$left + $right"
override def inputTypes: Seq[AbstractDataType] = Seq(TimestampType, CalendarIntervalType)
override def dataType: DataType = TimestampType
override def nullSafeEval(start: Any, interval: Any): Any = {
val itvl = interval.asInstanceOf[CalendarInterval]
DateTimeUtils.timestampAddInterval(
start.asInstanceOf[Long], itvl.months, itvl.microseconds)
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
defineCodeGen(ctx, ev, (sd, i) => {
s"""$dtu.timestampAddInterval($sd, $i.months, $i.microseconds)"""
})
}
}
/**
* Assumes given timestamp is UTC and converts to given timezone.
*/
case class FromUTCTimestamp(left: Expression, right: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq(TimestampType, StringType)
override def dataType: DataType = TimestampType
override def prettyName: String = "from_utc_timestamp"
override def nullSafeEval(time: Any, timezone: Any): Any = {
DateTimeUtils.fromUTCTime(time.asInstanceOf[Long],
timezone.asInstanceOf[UTF8String].toString)
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
if (right.foldable) {
val tz = right.eval()
if (tz == null) {
s"""
|boolean ${ev.isNull} = true;
|long ${ev.value} = 0;
""".stripMargin
} else {
val tzTerm = ctx.freshName("tz")
val tzClass = classOf[TimeZone].getName
ctx.addMutableState(tzClass, tzTerm, s"""$tzTerm = $tzClass.getTimeZone("$tz");""")
val eval = left.gen(ctx)
s"""
|${eval.code}
|boolean ${ev.isNull} = ${eval.isNull};
|long ${ev.value} = 0;
|if (!${ev.isNull}) {
| ${ev.value} = ${eval.value} +
| ${tzTerm}.getOffset(${eval.value} / 1000) * 1000L;
|}
""".stripMargin
}
} else {
defineCodeGen(ctx, ev, (timestamp, format) => {
s"""$dtu.fromUTCTime($timestamp, $format.toString())"""
})
}
}
}
/**
* Subtracts an interval from timestamp.
*/
case class TimeSub(start: Expression, interval: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def left: Expression = start
override def right: Expression = interval
override def toString: String = s"$left - $right"
override def inputTypes: Seq[AbstractDataType] = Seq(TimestampType, CalendarIntervalType)
override def dataType: DataType = TimestampType
override def nullSafeEval(start: Any, interval: Any): Any = {
val itvl = interval.asInstanceOf[CalendarInterval]
DateTimeUtils.timestampAddInterval(
start.asInstanceOf[Long], 0 - itvl.months, 0 - itvl.microseconds)
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
defineCodeGen(ctx, ev, (sd, i) => {
s"""$dtu.timestampAddInterval($sd, 0 - $i.months, 0 - $i.microseconds)"""
})
}
}
/**
* Returns the date that is num_months after start_date.
*/
case class AddMonths(startDate: Expression, numMonths: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def left: Expression = startDate
override def right: Expression = numMonths
override def inputTypes: Seq[AbstractDataType] = Seq(DateType, IntegerType)
override def dataType: DataType = DateType
override def nullSafeEval(start: Any, months: Any): Any = {
DateTimeUtils.dateAddMonths(start.asInstanceOf[Int], months.asInstanceOf[Int])
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
defineCodeGen(ctx, ev, (sd, m) => {
s"""$dtu.dateAddMonths($sd, $m)"""
})
}
}
/**
* Returns number of months between dates date1 and date2.
*/
case class MonthsBetween(date1: Expression, date2: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def left: Expression = date1
override def right: Expression = date2
override def inputTypes: Seq[AbstractDataType] = Seq(TimestampType, TimestampType)
override def dataType: DataType = DoubleType
override def nullSafeEval(t1: Any, t2: Any): Any = {
DateTimeUtils.monthsBetween(t1.asInstanceOf[Long], t2.asInstanceOf[Long])
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
defineCodeGen(ctx, ev, (l, r) => {
s"""$dtu.monthsBetween($l, $r)"""
})
}
}
/**
* Assumes given timestamp is in given timezone and converts to UTC.
*/
case class ToUTCTimestamp(left: Expression, right: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq(TimestampType, StringType)
override def dataType: DataType = TimestampType
override def prettyName: String = "to_utc_timestamp"
override def nullSafeEval(time: Any, timezone: Any): Any = {
DateTimeUtils.toUTCTime(time.asInstanceOf[Long],
timezone.asInstanceOf[UTF8String].toString)
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
if (right.foldable) {
val tz = right.eval()
if (tz == null) {
s"""
|boolean ${ev.isNull} = true;
|long ${ev.value} = 0;
""".stripMargin
} else {
val tzTerm = ctx.freshName("tz")
val tzClass = classOf[TimeZone].getName
ctx.addMutableState(tzClass, tzTerm, s"""$tzTerm = $tzClass.getTimeZone("$tz");""")
val eval = left.gen(ctx)
s"""
|${eval.code}
|boolean ${ev.isNull} = ${eval.isNull};
|long ${ev.value} = 0;
|if (!${ev.isNull}) {
| ${ev.value} = ${eval.value} -
| ${tzTerm}.getOffset(${eval.value} / 1000) * 1000L;
|}
""".stripMargin
}
} else {
defineCodeGen(ctx, ev, (timestamp, format) => {
s"""$dtu.toUTCTime($timestamp, $format.toString())"""
})
}
}
}
/**
* Returns the date part of a timestamp or string.
*/
case class ToDate(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
// Implicit casting of spark will accept string in both date and timestamp format, as
// well as TimestampType.
override def inputTypes: Seq[AbstractDataType] = Seq(DateType)
override def dataType: DataType = DateType
override def eval(input: InternalRow): Any = child.eval(input)
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
defineCodeGen(ctx, ev, d => d)
}
}
/**
* Returns date truncated to the unit specified by the format.
*/
case class TruncDate(date: Expression, format: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def left: Expression = date
override def right: Expression = format
override def inputTypes: Seq[AbstractDataType] = Seq(DateType, StringType)
override def dataType: DataType = DateType
override def prettyName: String = "trunc"
private lazy val truncLevel: Int =
DateTimeUtils.parseTruncLevel(format.eval().asInstanceOf[UTF8String])
override def eval(input: InternalRow): Any = {
val level = if (format.foldable) {
truncLevel
} else {
DateTimeUtils.parseTruncLevel(format.eval().asInstanceOf[UTF8String])
}
if (level == -1) {
// unknown format
null
} else {
val d = date.eval(input)
if (d == null) {
null
} else {
DateTimeUtils.truncDate(d.asInstanceOf[Int], level)
}
}
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
if (format.foldable) {
if (truncLevel == -1) {
s"""
boolean ${ev.isNull} = true;
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
"""
} else {
val d = date.gen(ctx)
s"""
${d.code}
boolean ${ev.isNull} = ${d.isNull};
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
${ev.value} = $dtu.truncDate(${d.value}, $truncLevel);
}
"""
}
} else {
nullSafeCodeGen(ctx, ev, (dateVal, fmt) => {
val form = ctx.freshName("form")
s"""
int $form = $dtu.parseTruncLevel($fmt);
if ($form == -1) {
${ev.isNull} = true;
} else {
${ev.value} = $dtu.truncDate($dateVal, $form);
}
"""
})
}
}
}
/**
* Returns the number of days from startDate to endDate.
*/
case class DateDiff(endDate: Expression, startDate: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def left: Expression = endDate
override def right: Expression = startDate
override def inputTypes: Seq[AbstractDataType] = Seq(DateType, DateType)
override def dataType: DataType = IntegerType
override def nullSafeEval(end: Any, start: Any): Any = {
end.asInstanceOf[Int] - start.asInstanceOf[Int]
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
defineCodeGen(ctx, ev, (end, start) => s"$end - $start")
}
}
| chenc10/Spark-PAF | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala | Scala | apache-2.0 | 30,473 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.accumulo.iterators
import org.apache.accumulo.core.data.{ByteSequence, Key, Value, Range => AccRange}
import org.apache.accumulo.core.iterators.{IteratorEnvironment, SortedKeyValueIterator}
import org.apache.hadoop.io.Text
import org.locationtech.geomesa.index.filters.RowFilter
import org.locationtech.geomesa.index.filters.RowFilter.RowFilterFactory
/**
* Abstract base class for filtering iterators. Implementations must have a no-arg default constructor, used
* when deep copying an instance.
*
* @param factory filter factory
* @tparam T filter type bound
*/
abstract class RowFilterIterator[T <: RowFilter](factory: RowFilterFactory[T])
extends SortedKeyValueIterator[Key, Value] {
import scala.collection.JavaConverters._
private var source: SortedKeyValueIterator[Key, Value] = _
private var filter: T = _
private var offset: Int = -1
private var topKey: Key = _
private var topValue: Value = _
private val row = new Text()
override def init(
source: SortedKeyValueIterator[Key, Value],
options: java.util.Map[String, String],
env: IteratorEnvironment): Unit = {
this.source = source
offset = options.get(RowFilterIterator.RowOffsetKey).toInt
filter = factory.deserializeFromStrings(options.asScala)
}
override def next(): Unit = {
source.next()
findTop()
}
private def findTop(): Unit = {
topKey = null
topValue = null
while (source.hasTop) {
source.getTopKey.getRow(row)
if (filter.inBounds(row.getBytes, offset)) {
topKey = source.getTopKey
topValue = source.getTopValue
return
} else {
source.next()
}
}
}
override def seek(range: AccRange, columnFamilies: java.util.Collection[ByteSequence], inclusive: Boolean): Unit = {
source.seek(range, columnFamilies, inclusive)
findTop()
}
override def getTopValue: Value = topValue
override def getTopKey: Key = topKey
override def hasTop: Boolean = topKey != null
override def deepCopy(env: IteratorEnvironment): SortedKeyValueIterator[Key, Value] = {
val opts = factory.serializeToStrings(filter) + (RowFilterIterator.RowOffsetKey -> offset.toString)
val iter = getClass.newInstance() // note: requires default (zero-arg) constructor
iter.init(source.deepCopy(env), opts.asJava, env)
iter
}
}
object RowFilterIterator {
val RowOffsetKey = "zo"
}
| aheyne/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/main/scala/org/locationtech/geomesa/accumulo/iterators/RowFilterIterator.scala | Scala | apache-2.0 | 2,899 |
package exsplay.examples.sprayactor
import akka.actor.{Props, ActorSystem}
import akka.io.IO
import spray.can.Http
import exsplay.tools.IF
/**
* User: wert
* Date: 21.07.13
* Time: 16:06
*/
object SimpleActorServer extends App {
implicit val system = ActorSystem()
val serviceActor = system.actorOf(Props[SimpleActor])
IO(Http) ! Http.Bind(serviceActor, interface = "0.0.0.0", port=8080)
IF develop {
println("press Enter to reload")
readLine()
system.shutdown()
}
}
| wertlex/exsplay | examples/src/main/scala/exsplay/examples/sprayactor/SimpleActorServer.scala | Scala | apache-2.0 | 498 |
/*
We group all exceptions in a single file.
See the Scala style guide: http://docs.scala-lang.org/style/files.html
*/
package at.forsyte.apalache.tla.imp
/**
* This exception is thrown, whenever a call to SANY resulted in an error.
* For more detailed causes, see the exceptions that inherit from SanyException.
*
* @author konnov
*/
class SanyException(message: String) extends Exception(message)
/**
* This exception is thrown when our SanyImporter meets something unexpected.
*
* @author konnov
*/
class SanyImporterException(message: String) extends SanyException(message)
/**
* This exception is thrown when SANY aborts.
*
* @author konnov
*/
class SanyAbortException(message: String) extends SanyException(message)
/**
* This exception is thrown when SANY reports a syntax error.
*
* @author konnov
*/
class SanySyntaxException(message: String) extends SanyException(message)
/**
* This exception is thrown when SANY reports a semantic error.
*
* @author konnov
*/
class SanySemanticException(message: String) extends SanyException(message)
| konnov/apalache | tla-import/src/main/scala/at/forsyte/apalache/tla/imp/exceptions.scala | Scala | apache-2.0 | 1,101 |
package io.scrapeyard
import akka.actor._
import akka.testkit.{ImplicitSender, TestKit, TestProbe}
import io.scrapeyard.Models._
import org.joda.time.{Duration, DateTime}
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
import scala.concurrent.duration._
import scala.util.Success
class DispatcherTest extends TestKit(ActorSystem("TestSys"))
with ImplicitSender with WordSpecLike with Matchers with BeforeAndAfterAll {
"A dispatcher" when {
"request message received" should {
"dispatch request to all scrapers and send mail with results" in {
val scrapers = Map(
"fake" -> system.actorOf(Props(new FakeScraperActor("100", "EUR"))),
"fake2" -> system.actorOf(Props(new FakeScraperActor("200", "USD")))
)
val mailer = TestProbe()
val mailerProps = Props(new Forwarder(mailer.ref))
val dispatcher = system.actorOf(
Props(new Dispatcher(scrapers, mailerProps)), "dispatcher")
val dep = DateTime.parse("2015-05-20T00:00:00Z")
val ret = DateTime.parse("2015-07-20T00:00:00Z")
val stayDays = new Duration(dep, ret).toStandardDays.getDays
// create batch search criteria with fixed departure airport,
// departure and return dates, and two possible destination
// airports
val criteria = BatchSearchCriteria(
Set("ZAG"),
Set("BRU", "OST"),
dep,
dep,
ret,
ret,
stayDays,
stayDays
)
val req = SearchRequest("user@mail.com", criteria)
dispatcher ! req
val params1 = SearchParams("ZAG", "BRU", dep, ret)
val params2 = SearchParams("ZAG", "OST", dep, ret)
val results = Set(
SearchResult(params1, SearchYield(100, "EUR", "url")),
SearchResult(params2, SearchYield(100, "EUR", "url")),
SearchResult(params1, SearchYield(200, "USD", "url")),
SearchResult(params2, SearchYield(200, "USD", "url"))
)
val expected = SendResults(
"user@mail.com",
"Search results",
results
)
mailer.expectMsg(expected)
import scala.concurrent.duration._
import scala.language.postfixOps
val dispWatch = TestProbe()
dispWatch watch dispatcher
// verify that actor is stopped
dispWatch.expectMsgPF(2 seconds) { case Terminated(_) => true }
}
"dispatch request only to selected scrapers" in {
val scraperActors = (1 to 3).map(_ => TestProbe())
val Seq(momondo, airHr, qatar) = scraperActors
val scrapers = Map(
"momondo" -> momondo.ref,
"airHr" -> airHr.ref,
"qatar" -> qatar.ref
)
val dispatcher = system.actorOf(
Props(new Dispatcher(scrapers, null)), "dispatcher")
val dep = DateTime.parse("2015-05-20T00:00:00Z")
val ret = DateTime.parse("2015-07-20T00:00:00Z")
val stayDays = new Duration(dep, ret).toStandardDays.getDays
// create batch search criteria with fixed departure airport,
// departure and return dates, and one possible destination
// airport
val criteria = BatchSearchCriteria(
Set("ZAG"),
Set("BRU"),
dep,
dep,
ret,
ret,
stayDays,
stayDays
)
val req = SearchRequest("user@mail.com", criteria, Some(Set("momondo", "qatar")))
dispatcher ! req
val params = SearchParams("ZAG", "BRU", dep, ret)
momondo.expectMsg(1.seconds, params)
qatar.expectMsg(1.seconds, params)
airHr.expectNoMsg(1.seconds)
}
}
}
override protected def afterAll(): Unit = system.shutdown()
}
class Forwarder(target: ActorRef) extends Actor {
def receive = { case m => target forward m }
}
class FakeScraperActor(amount: String, currency: String) extends Actor {
def receive = {
case ps: SearchParams =>
sender ! (ps, Success(SearchYield(amount.toDouble, currency, "url")))
}
}
| zoltanmaric/scrapeyard | server/src/test/scala/io/scrapeyard/DispatcherTest.scala | Scala | gpl-2.0 | 4,094 |
/*
Copyright (c) 2009-2012, The Regents of the University of California
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the University of California nor the names of
its contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.cdlib.was.weari.webgraph;
import org.joda.time.DateTime;
class Outlink (val from : String,
val to : String,
val date : DateTime,
val text : String) {}
| cdlib/weari | src/main/scala/org/cdlib/was/weari/webgraph/Outlink.scala | Scala | bsd-3-clause | 1,742 |
package scala.tools.nsc
import org.junit.Assert._
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
@RunWith(classOf[JUnit4])
class ScriptRunnerTest {
@Test
def testEmptyScriptSucceeds: Unit = {
val s = new GenericRunnerSettings(s => ())
s.nc.value = true
s.usejavacp.value = true
// scala -nc -e ''
assertTrue(ScriptRunner.runCommand(s, "", Nil))
// scala -nc -save -e ''
s.save.value = true
assertTrue(ScriptRunner.runCommand(s, "", Nil))
}
}
| felixmulder/scala | test/junit/scala/tools/nsc/ScriptRunnerTest.scala | Scala | bsd-3-clause | 523 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v3
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtOptionalInteger, Input}
case class B735(value: Option[Int]) extends CtBoxIdentifier("") with CtOptionalInteger with Input
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600/v3/B735.scala | Scala | apache-2.0 | 807 |
package com.madgag.git.bfg.model
import java.nio.charset.Charset
import com.madgag.git._
import com.madgag.git.bfg.cleaner._
import org.eclipse.jgit.lib.Constants.OBJ_COMMIT
import org.eclipse.jgit.lib._
import java.nio.charset.IllegalCharsetNameException
import org.eclipse.jgit.revwalk.{RevWalk, RevCommit}
/*
* Copyright (c) 2012, 2013 Roberto Tyley
*
* This file is part of 'BFG Repo-Cleaner' - a tool for removing large
* or troublesome blobs from Git repositories.
*
* BFG Repo-Cleaner is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* BFG Repo-Cleaner is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see http://www.gnu.org/licenses/ .
*/
object Commit {
def apply(revCommit: RevCommit): Commit = Commit(CommitNode(revCommit), revCommit.arcs)
}
case class Commit(node: CommitNode, arcs: CommitArcs) {
def toBytes: Array[Byte] = {
import scala.collection.convert.wrapAsJava._
val c = new CommitBuilder
c.setParentIds(arcs.parents)
c.setTreeId(arcs.tree)
c.setAuthor(node.author)
c.setCommitter(node.committer)
c.setEncoding(node.encoding)
c.setMessage(node.message)
c.toByteArray
}
lazy val id = new ObjectInserter.Formatter().idFor(OBJ_COMMIT, toBytes)
override lazy val toString = s"commit[${id.shortName}${node.subject.map(s=> s" '${s.take(50)}'").getOrElse("")}]"
}
case class CommitArcs(parents: Seq[ObjectId], tree: ObjectId) {
def cleanWith(cleaner: ObjectIdCleaner) = CommitArcs(parents.map(cleaner.cleanCommit).filterNot(_ == ObjectId.zeroId()), cleaner.cleanTree(tree))
def isEmptyCommit(implicit revWalk: RevWalk) = parents match {
case Seq() => tree == Tree.Empty.objectId
case Seq(singleParent) => singleParent.asRevCommit.getTree == tree
case _ => false
}
}
object CommitNode {
def apply(c: RevCommit): CommitNode = CommitNode(c.getAuthorIdent, c.getCommitterIdent, c.getFullMessage,
try c.getEncoding catch {case e: IllegalCharsetNameException => Constants.CHARSET})
}
case class CommitNode(author: PersonIdent, committer: PersonIdent, message: String, encoding: Charset = Constants.CHARSET) {
lazy val subject = message.lines.toStream.headOption
lazy val lastParagraphBreak = message.lastIndexOf("\\n\\n")
lazy val messageWithoutFooters = if (footers.isEmpty) message else (message take lastParagraphBreak)
lazy val footers: List[Footer] = message.drop(lastParagraphBreak).lines.collect {
case Footer.FooterPattern(key, value) => Footer(key, value)
}.toList
def add(footer: Footer) = copy(message = message + "\\n" + (if (footers.isEmpty) "\\n" else "") + footer.toString)
}
| javabrett/bfg-repo-cleaner | bfg-library/src/main/scala/com/madgag/git/bfg/model/Commit.scala | Scala | gpl-3.0 | 3,086 |
package com.thoughtworks.datacommons.prepbuddy.surrogateKeyGeneration
import com.thoughtworks.datacommons.prepbuddy.SparkTestCase
import com.thoughtworks.datacommons.prepbuddy.rdds.TransformableRDD
import org.apache.spark.rdd.RDD
class SurrogateKeyGenerationTest extends SparkTestCase {
test("should add surrogate key at the beginning of the row with incremental value greater than the offset") {
val dataset: RDD[String] = sparkContext.parallelize(Array(
"One,Two,Three",
"Four,Five,Six",
"Seven,Eight,Nine",
"Ten,Eleven,Twelve"
), 3)
val transformableRDD: TransformableRDD = new TransformableRDD(dataset)
val surrogateKeys: Set[String] = transformableRDD.addSurrogateKey(100).select(0).collect().toSet
val expected: Set[String] = (101 to 104).map(_.toString).toSet
assertResult(4)(surrogateKeys.size)
assertResult(expected)(surrogateKeys)
}
test("should add UUID as surrogate key at the beginning of the row") {
val dataset: RDD[String] = sparkContext.parallelize(Array(
"One,Two,Three",
"Four,Five,Six",
"Seven,Eight,Nine",
"Ten,Eleven,Twelve"
), 3)
val transformableRDD: TransformableRDD = new TransformableRDD(dataset)
val surrogateKeys: Array[String] = transformableRDD.addSurrogateKey().select(0).collect()
assertResult(4)(surrogateKeys.distinct.length)
}
}
| data-commons/prep-buddy | src/test/scala/com/thoughtworks/datacommons/prepbuddy/surrogateKeyGeneration/SurrogateKeyGenerationTest.scala | Scala | apache-2.0 | 1,479 |
/*
* Copyright [2014] [Jason Nerothin]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jasonnerothin.project.hrmodel
/**
* Created by IntelliJ IDEA.
* User: jason
* Date: 2/20/14
* Time: 10:18 AM
* Provides...
*/
class Employee {
}
| jasonnerothin/projectit | src/main/scala/com/jasonnerothin/project/hrmodel/Employee.scala | Scala | apache-2.0 | 782 |
package com.sksamuel.scapegoat.inspections.string
import com.sksamuel.scapegoat._
/**
* @author
* Zack Grannan
*/
class UnsafeStringContains
extends Inspection(
text = "Unsafe string contains",
defaultLevel = Levels.Error,
description = "Checks for String.contains(value) for invalid types.",
explanation =
"String.contains() accepts arguments af any type, which means you might be checking if your string contains an element of an unrelated type."
) {
def inspector(context: InspectionContext): Inspector =
new Inspector(context) {
override def postTyperTraverser: context.Traverser =
new context.Traverser {
import context.global._
import treeInfo.Applied
private val Contains = TermName("contains")
private def isChar(tree: Tree) = tree.tpe.widen.baseClasses.contains(typeOf[Char].typeSymbol)
private def isString(tree: Tree): Boolean = {
tree.tpe.widen.baseClasses.contains(typeOf[CharSequence].typeSymbol) || (tree match {
case Apply(left, _) =>
Set("scala.LowPriorityImplicits.wrapString", "scala.Predef.augmentString")(
left.symbol.fullName
)
case _ => false
})
}
private def isCompatibleType(value: Tree) = isString(value) || isChar(value)
override def inspect(tree: Tree): Unit = {
tree match {
case Applied(Select(lhs, Contains), targ :: Nil, (_ :: Nil) :: Nil)
if isString(lhs) && !isCompatibleType(targ) =>
context.warn(tree.pos, self, tree.toString.take(300))
case Applied(Select(lhs, Contains), _, (arg :: Nil) :: Nil)
if isString(lhs) && !isCompatibleType(arg) =>
context.warn(tree.pos, self, tree.toString.take(300))
case _ =>
continue(tree)
}
}
}
}
}
| sksamuel/scapegoat | src/main/scala/com/sksamuel/scapegoat/inspections/string/UnsafeStringContains.scala | Scala | apache-2.0 | 1,996 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package iht.controllers.registration.deceased
import iht.config.AppConfig
import iht.connector.{CachingConnector, IhtConnector}
import iht.controllers.ControllerHelper.Mode
import iht.controllers.registration.applicant.{routes => applicantRoutes}
import iht.forms.registration.DeceasedForms._
import iht.models.{DeceasedDetails, RegistrationDetails}
import iht.utils.{CommonHelper, DeceasedInfoHelper}
import iht.views.html.registration.deceased.deceased_address_details_uk
import javax.inject.Inject
import play.api.data.Form
import play.api.i18n.Messages
import play.api.mvc.{AnyContent, Call, MessagesControllerComponents, Request}
import uk.gov.hmrc.auth.core.AuthConnector
import uk.gov.hmrc.play.bootstrap.frontend.controller.FrontendController
class DeceasedAddressDetailsUKControllerImpl @Inject()(val ihtConnector: IhtConnector,
val cachingConnector: CachingConnector,
val authConnector: AuthConnector,
val deceasedAddressDetailsUkView: deceased_address_details_uk,
implicit val appConfig: AppConfig,
val cc: MessagesControllerComponents) extends FrontendController(cc) with DeceasedAddressDetailsUKController {
}
trait DeceasedAddressDetailsUKController extends RegistrationDeceasedControllerWithEditMode {
def form(implicit messsages: Messages) = deceasedAddressDetailsUKForm
override def guardConditions: Set[Predicate] = guardConditionsDeceasedLastContactAddress
override val storageFailureMessage = "Storage of registration details fails during deceased address details UK"
lazy val submitRoute: Call = routes.DeceasedAddressDetailsUKController.onSubmit
lazy val editSubmitRoute: Call = routes.DeceasedAddressDetailsUKController.onEditSubmit
lazy val switchToUkRoute: Call = routes.DeceasedAddressDetailsOutsideUKController.onPageLoad
lazy val switchToUkEditRoute: Call = routes.DeceasedAddressDetailsOutsideUKController.onEditPageLoad
val deceasedAddressDetailsUkView: deceased_address_details_uk
def okForPageLoad(form: Form[DeceasedDetails], name: Option[String])(implicit request: Request[AnyContent]) =
Ok(deceasedAddressDetailsUkView(form,
DeceasedInfoHelper.getDeceasedNameOrDefaultString(name),
submitRoute,
switchToUkRoute))
def okForEditPageLoad(form: Form[DeceasedDetails], name: Option[String])(implicit request: Request[AnyContent]) =
Ok(deceasedAddressDetailsUkView(form,
DeceasedInfoHelper.getDeceasedNameOrDefaultString(name),
editSubmitRoute,
switchToUkEditRoute,
cancelToRegSummary))
def badRequestForSubmit(form: Form[DeceasedDetails], name: Option[String])(implicit request: Request[AnyContent]) =
BadRequest(deceasedAddressDetailsUkView(form,
DeceasedInfoHelper.getDeceasedNameOrDefaultString(name),
submitRoute,
switchToUkRoute))
def badRequestForEditSubmit(form: Form[DeceasedDetails], name: Option[String])(implicit request: Request[AnyContent]) =
BadRequest(deceasedAddressDetailsUkView(form,
DeceasedInfoHelper.getDeceasedNameOrDefaultString(name),
editSubmitRoute,
switchToUkEditRoute,
cancelToRegSummary))
override def fillForm(rd: RegistrationDetails)(implicit request: Request[_]) = {
val dd = CommonHelper.getOrException(rd.deceasedDetails)
if (CommonHelper.getOrException(dd.isAddressInUK)) {
deceasedAddressDetailsUKForm.fill(dd)
} else {
deceasedAddressDetailsUKForm
}
}
def applyChangesToRegistrationDetails(rd: RegistrationDetails, dd: DeceasedDetails, mode: Mode.Value) = {
val x = rd.deceasedDetails.map( _ copy(isAddressInUK = Some(true), ukAddress = dd.ukAddress ))
rd copy (deceasedDetails = x)
}
def onwardRoute(rd: RegistrationDetails) = applicantRoutes.ApplyingForProbateController.onPageLoad
}
| hmrc/iht-frontend | app/iht/controllers/registration/deceased/DeceasedAddressDetailsUKController.scala | Scala | apache-2.0 | 4,617 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.apache.spark.geomesa
import java.io.Serializable
import com.typesafe.scalalogging.LazyLogging
import org.apache.spark.rpc.{RpcCallContext, RpcEndpoint, RpcEnv, RpcTimeout}
import org.apache.spark.util.RpcUtils
import org.apache.spark.{SparkContext, SparkEnv}
import org.locationtech.geomesa.spark.GeoMesaSparkKryoRegistrator
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes._
import org.opengis.feature.simple.SimpleFeatureType
import scala.reflect.ClassTag
import scala.util.{Failure, Success, Try}
object GeoMesaSparkKryoRegistratorEndpoint extends LazyLogging {
val EnablePropertyKey = "spark.geomesa.kryo.rpc.enable"
val EndpointName = "kryo-schema"
private lazy val Timeout = RpcUtils.askRpcTimeout(SparkEnv.get.conf)
private lazy val EndpointRef = RpcUtils.makeDriverRef(EndpointName, SparkEnv.get.conf, SparkEnv.get.rpcEnv)
lazy val Client: KryoClient = Option(SparkEnv.get)
.filterNot(_.executorId == SparkContext.DRIVER_IDENTIFIER)
.filter(endpointEnabled)
.map(_ => ExecutorKryoClient).getOrElse(NoOpKryoClient)
private def endpointEnabled(sparkEnv: SparkEnv) =
!sparkEnv.conf.get(EnablePropertyKey, "true").equalsIgnoreCase("false")
def init(): Unit = {
Option(SparkEnv.get).foreach {
sparkEnv =>
if (endpointEnabled(sparkEnv)) {
sparkEnv.executorId match {
case SparkContext.DRIVER_IDENTIFIER =>
val rpcEnv = sparkEnv.rpcEnv
Try(rpcEnv.setupEndpoint(EndpointName, new KryoEndpoint(rpcEnv))) match {
case Success(ref) =>
logger.info(s"$EndpointName rpc endpoint registered on driver ${ref.address}")
case Failure(e: IllegalArgumentException) =>
logger.debug(s"$EndpointName rpc endpoint registration failed, may have been already registered", e)
case Failure(e: Exception) =>
logger.warn(s"$EndpointName rpc endpoint registration failed", e)
}
case _ => GeoMesaSparkKryoRegistrator.putTypes(Client.getTypes())
}
} else {
logger.debug(s"$EndpointName rpc endpoint disabled")
}
}
}
class KryoEndpoint(val rpcEnv: RpcEnv) extends RpcEndpoint {
override def receiveAndReply(context: RpcCallContext): PartialFunction[Any, Unit] = {
case message: KryoMessage[_] =>
logger.info(s"$message received via rpc from ${context.senderAddress}")
context.reply(message.reply)
}
}
trait KryoMessage[R] {
def reply: R
def ask(timeout: RpcTimeout = Timeout)(implicit c: ClassTag[R]): R = {
logger.info(s"$this sent via rpc to ${EndpointRef.address}")
val start = System.nanoTime()
val result = timeout.awaitResult[R](EndpointRef.ask[R](this, timeout))
val delta = (System.nanoTime() - start) / 1000000L
logger.info(s"$this response via rpc, $delta ms")
result
}
}
class KryoGetTypeMessage(id: Int) extends KryoMessage[Option[(String, String)]] with Serializable {
def reply: Option[(String, String)] = Option(GeoMesaSparkKryoRegistrator.getType(id))
override def toString: String = s"getType(id=$id)"
}
class KryoGetTypesMessage() extends KryoMessage[Seq[(String, String)]] with Serializable {
def reply: Seq[(String, String)] = GeoMesaSparkKryoRegistrator.getTypes.map(encodeSchema)
override def toString: String = s"getTypes()"
}
class KryoPutTypeMessage(id: Int, name: String, spec: String) extends KryoMessage[Int] with Serializable {
def this(id: Int, sft: SimpleFeatureType) = this(id, sft.getTypeName, encodeType(sft))
def reply: Int = GeoMesaSparkKryoRegistrator.putType(createType(name, spec))
override def toString: String = s"putType(id=$id, name=$name, spec=...)"
}
trait KryoClient {
def getTypes(): Seq[SimpleFeatureType]
def getType(id: Int): Option[SimpleFeatureType]
def putType(id: Int, schema: SimpleFeatureType): Int
}
protected object ExecutorKryoClient extends KryoClient {
def getTypes(): Seq[SimpleFeatureType] = new KryoGetTypesMessage().ask().map(decodeSchema)
def getType(id: Int): Option[SimpleFeatureType] = new KryoGetTypeMessage(id).ask()
def putType(id: Int, sft: SimpleFeatureType): Int = new KryoPutTypeMessage(id, sft).ask()
}
protected object NoOpKryoClient extends KryoClient {
def getTypes(): Seq[SimpleFeatureType] = Seq.empty
def getType(id: Int) = None
def putType(id: Int, sft: SimpleFeatureType): Int = id
}
implicit def encodeSchema(t: SimpleFeatureType): (String, String) = (t.getTypeName, encodeType(t))
implicit def decodeSchema(t: (String, String)): SimpleFeatureType = createType(t._1, t._2)
implicit def optionSchema(t: Option[(String, String)]): Option[SimpleFeatureType] = t.map(decodeSchema)
}
| MutahirKazmi/geomesa | geomesa-spark/geomesa-spark-core/src/main/scala/org/apache/spark/geomesa/GeoMesaSparkKryoRegistratorEndpoint.scala | Scala | apache-2.0 | 5,298 |
package ooyala.common.akka.metrics
import com.yammer.metrics.Metrics
import com.yammer.metrics.core.{Histogram, Meter, Gauge}
import java.util.concurrent.TimeUnit
/**
* Utility trait to make metrics creation slightly less verbose
*/
trait YammerMetrics {
def meter(name: String, eventType: String): Meter =
Metrics.newMeter(getClass, name, eventType, TimeUnit.SECONDS)
def gauge[T](name: String, metric: => T, scope: String = null): Gauge[T] =
Metrics.newGauge(getClass, name, scope, new Gauge[T] {
override def value(): T = metric
})
def histogram(name: String): Histogram = Metrics.newHistogram(getClass, name, true)
}
| nachiketa-shukla/spark-jobserver | akka-app/src/ooyala.common.akka/metrics/YammerMetrics.scala | Scala | apache-2.0 | 651 |
import play.api.mvc.WithFilters
object Global extends WithFilters(new play.modules.statsd.api.StatsdFilter()) {
}
| relrod/fedakka | app/Global.scala | Scala | apache-2.0 | 115 |
object Const { final val VAL = 1 ; final val VAR = 2 }
import Const._
object Test {
def test(i: Int) = i match { case v @ (VAR | VAL) => v == VAR case _ => "default" }
def main(args: Array[String]): Unit = {
println(test(VAR))
println(test(VAL))
println(test(-1))
}
}
| scala/scala | test/files/run/sd455.scala | Scala | apache-2.0 | 286 |
package org.transkop.ast
import org.objectweb.asm.MethodVisitor
import org.objectweb.asm.Opcodes._
import org.transkop.SymbolTable
case class MainMethodNode(statements: List[StatementNode]) extends AbstractMethodNode {
val methodName: String = "main"
val arguments = Nil
val returnsValue = false
def generate(mv: MethodVisitor, symbolTable: SymbolTable) = {
mv.visitCode()
statements.foreach(_.generate(mv, symbolTable))
mv.visitInsn(RETURN)
mv.visitMaxs(100, 100)
mv.visitEnd()
}
}
| dzinot/transkOP | src/main/scala/org/transkop/ast/MainMethodNode.scala | Scala | mit | 518 |
package com.example.http4s
package jetty
import javax.servlet._
import com.codahale.metrics.MetricRegistry
import org.http4s.server.ServerApp
import org.http4s.server.jetty.JettyBuilder
import org.http4s.server.metrics._
object JettyExample extends ServerApp {
val metrics = new MetricRegistry
def server(args: List[String]) = JettyBuilder
.bindHttp(8080)
.mountService(ExampleService.service, "/http4s")
.mountService(metricsService(metrics), "/metrics/*")
.mountFilter(NoneShallPass, "/http4s/science/black-knight/*")
.start
}
| m4dc4p/http4s | examples/jetty/src/main/scala/com/example/http4s/jetty/JettyExample.scala | Scala | apache-2.0 | 557 |
package eva4s
import language.higherKinds
package object util {
implicit class MA[M[_],A](val value: M[A]) extends AnyVal {
// ---------------------------------------------------------------------------------------------
// sorting
// ---------------------------------------------------------------------------------------------
def sortWith(lt: (A,A) => Boolean)(implicit sorter: Sorter[M]): M[A] =
sorted(Ordering fromLessThan lt, sorter)
def sortBy[B](f: A => B)(implicit ord: Ordering[B], sorter: Sorter[M]): M[A] =
sorted(ord on f, sorter)
def sorted(implicit ord: Ordering[A], sorter: Sorter[M]): M[A] =
sorter.sort(value)
// ---------------------------------------------------------------------------------------------
// shuffling
// ---------------------------------------------------------------------------------------------
def shuffle(implicit mixer: Mixer[M]): M[A] =
mixer.shuffle(value)
def choose(n: Int)(implicit mixer: Mixer[M]): M[A] =
mixer.choose(value, n)
def choosePair(implicit mixer: Mixer[M]): (A,A) =
mixer.choosePair(value)
// ---------------------------------------------------------------------------------------------
// statistical
// ---------------------------------------------------------------------------------------------
def arithmeticMean(implicit stats: Statistics[M], int: Integral[A]): A =
stats.arithmeticMean(value)
def arithmeticMeanBy[B](f: A => B)(implicit stats: Statistics[M], int: Integral[B]): B =
stats.arithmeticMeanBy(value)(f)
def average(implicit stats: Statistics[M], int: Integral[A]): A =
stats.arithmeticMean(value)
def averageBy[B](f: A => B)(implicit stats: Statistics[M], int: Integral[B]): B =
stats.arithmeticMeanBy(value)(f)
def geometricMean(implicit stats: Statistics[M], num: Numeric[A]): Double =
stats.geometricMean(value)
def geometricMeanBy[B](f: A => B)(implicit stats: Statistics[M], num: Numeric[B]): Double =
stats.geometricMeanBy(value)(f)
def harmonicMean(implicit stats: Statistics[M], int: Integral[A]): A =
stats.harmonicMean(value)
def harmonicMeanBy[B](f: A => B)(implicit stats: Statistics[M], int: Integral[B]): B =
stats.harmonicMeanBy(value)(f)
def quadraticMean(implicit stats: Statistics[M], num: Numeric[A]): Double =
stats.quadraticMean(value)
def quadraticMean[B](f: A => B)(implicit stats: Statistics[M], num: Numeric[B]): Double =
stats.quadraticMeanBy(value)(f)
def median(implicit stats: Statistics[M], sorter: Sorter[M], int: Integral[A]): A =
stats.median(value)
def medianBy[B](f: A => B)(implicit stats: Statistics[M], sorter: Sorter[M], int: Integral[B]): B =
stats.medianBy(value)(f)
}
}
| wookietreiber/eva4s-old | core/main/scala/util/package.scala | Scala | gpl-3.0 | 2,840 |
import org.apache.spark.ml.classification.MultilayerPerceptronClassifier
import org.apache.spark.mllib.feature.StandardScalerModel
import org.apache.spark.mllib.linalg.{Vector, Vectors}
import org.apache.spark.sql.types.IntegerType
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}
import org.junit.{Before, Test}
/**
* Created by Christian on 05.06.2016.
*/
@Test
object NeuralNetworkTest {
@Before
def prepare(): Unit = {
System.setProperty("hadoop.home.dir", "C:\\\\Users\\\\Christian\\\\Dev\\\\hadoop-2.6.0")
}
def scaleValue(min: Double, max: Double, value: Double): Double = {
(value - min) / max - min
}
case class Feature(v: Vector)
def main(args: Array[String]) {
val conf = new SparkConf().setAppName("Neural_network_titanic")
conf.set("spark.master", "local[4]")
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
conf.set("spark.kryoserializer.buffer.max", "512m")
val sc = new SparkContext(conf)
val sqlContext = new SQLContext(sc)
import sqlContext.implicits._
val trainingDf: DataFrame = Util.getTrainingDf(sqlContext, true)
val summary = Util.summary
val stddev = Vectors.dense(math.sqrt(summary.variance(0)), math.sqrt(summary.variance(1)))
val mean = Vectors.dense(summary.mean(0), summary.mean(1))
val scaler = new StandardScalerModel(stddev, mean)
val scaledData = trainingDf.map { row =>
(row.getAs[Int]("Survived").toDouble,
Util.getScaledVector(row.getAs[Double]("Fare"), row.getAs[Double]("Age"), row.getAs[Int]("Pclass"), row.getAs[Int]("Sex"), row.getAs[Int]("Embarked"), scaler))
}
val data: DataFrame = scaledData.toDF("label", "features")
val layers = Array[Int](10, 1, 2)
// create the trainer and set its parameters
val trainer = new MultilayerPerceptronClassifier()
.setLayers(layers)
.setBlockSize(64)
.setSeed(1234L)
.setTol(1E-4)
.setMaxIter(1000)
val model = trainer.fit(data)
//scaledData.saveAsTextFile("results/vectors")
val validationDf: DataFrame = Util.getValidationDf(sqlContext)
val vectors = validationDf.map { row =>
(row.getAs[Int]("PassengerId"), Util.getScaledVector(row.getAs[Double]("Fare"), row.getAs[Double]("Age"), row.getAs[Int]("Pclass"), row.getAs[Int]("Sex"), row.getAs[Int]("Embarked"), scaler))
}.toDF("PassengerId", "features")
val predicted: DataFrame = model.transform(vectors)
Util.saveResult("NeuralNetwork", sqlContext, predicted.select(predicted("PassengerId"), predicted("prediction").alias("Survived").cast(IntegerType)).rdd)
// predicted.write.format("com.databricks.spark.csv")
// .option("header", "true") // Use first line of all files as header
// .option("inferSchema", "true") // Automatically infer data types
// .save("results/NeuralNetwork_" + System.currentTimeMillis())
}
} | iXeption/spark-titanic | src/test/scala/NeuralNetworkTest.scala | Scala | apache-2.0 | 2,952 |
package services.support
import akka.pattern.FutureTimeoutSupport
import org.mockito.Mockito._
import org.scalatest.{BeforeAndAfter, FunSuiteLike}
import org.specs2.mock.Mockito
import play.api.inject._
import play.api.inject.guice.GuiceApplicationBuilder
import services.EnvironmentHelper
trait TestBase extends FunSuiteLike with Mockito with BeforeAndAfter with FutureTimeoutSupport {
def getTestGuiceApplicationBuilder = {
val environmentHelper = mock[EnvironmentHelper]
when(environmentHelper.isInTest) thenReturn true
new GuiceApplicationBuilder().overrides(bind[EnvironmentHelper].toInstance(environmentHelper))
}
}
| phdezann/connectus | connectus-backend/test/services/support/TestBase.scala | Scala | mit | 640 |
package sylvestris.example.service
import akka.actor.{ Actor, ActorSystem, Props }
import spray.can.Http
import akka.io.IO
import scalaz.EitherT
import sylvestris._, core._, Graph._, example.model._
object boot {
def populate = {
for {
o1 <- addNode(Node[Organization](Id("org1"), Organization("Org 1")))
o2 <- addNode(Node[Organization](Id("org2"), Organization("Org 2")))
o3 <- addNode(Node[Organization](Id("org3"), Organization("Org 3")))
c <- addNode(Node[Customer](Id("cust1"), Customer("Dave Corp.")))
_ <- o1.toOne[Customer](Some(c))
_ <- o2.tree.children(Set(o1))
_ <- o3.tree.parent(Option(o1))
}
yield {}
}.run
def main(args: Array[String]): Unit = {
populate.run(InMemoryGraph())
implicit val actorSystem = ActorSystem("service")
val service = actorSystem.actorOf(
Props(classOf[ServiceActor], NodeRoutes.nodeRoutes, NodeRoutes.nodeWithRelationshipsOps))
IO(Http) ! Http.Bind(service, interface = "0.0.0.0", port = 8080)
}
}
/*
*
* For toMany relationships
* def apply[T](nodes: Set[T]) = clears if empty, else replaces
*
* For toOne relationships
* def apply[T](nodes: Option[T]) = clears if empty, else replaces
*
* To relationships:
* * OneToOne
* * OneToMany
* * ManyToOne
* * ManyToMany
*
* Operations:
*
* Update
* Replace
* AddOrUpdate
* Remove
*
*
*/
| drostron/sylvestris | example/src/main/scala/sylvestris/example/service/boot.scala | Scala | mit | 1,366 |
package com.sksamuel.elastic4s.requests.searches.aggs.builders
import com.sksamuel.elastic4s.json.{XContentBuilder, XContentFactory}
import com.sksamuel.elastic4s.requests.searches.aggs.{AggMetaDataFn, NestedAggregation, SubAggsBuilderFn}
object NestedAggregationBuilder {
def apply(agg: NestedAggregation): XContentBuilder = {
val builder = XContentFactory.obj().startObject("nested")
builder.field("path", agg.path)
builder.endObject()
SubAggsBuilderFn(agg, builder)
AggMetaDataFn(agg, builder)
builder
}
}
| stringbean/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/requests/searches/aggs/builders/NestedAggregationBuilder.scala | Scala | apache-2.0 | 539 |
/**********************************************************************************************************************
* This file is part of Scrupal, a Scalable Reactive Web Application Framework for Content Management *
* *
* Copyright (c) 2015, Reactific Software LLC. All Rights Reserved. *
* *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed *
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for *
* the specific language governing permissions and limitations under the License. *
**********************************************************************************************************************/
package scrupal.utils
import org.specs2.mutable.Specification
/** Created by reid on 11/11/14.
*/
class ClassHelpersSpec extends Specification {
"ClassHelpers" should {
"perform some tests" in {
pending
}
}
}
| scrupal/scrupal | scrupal-utils/src/test/scala/scrupal/utils/ClassHelpersSpec.scala | Scala | apache-2.0 | 1,914 |
package net.hasor.little.db
import java.io.{InputStreamReader, BufferedReader}
import java.net.Socket
import java.util.Map
import com.alibaba.fastjson.JSON
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.receiver.Receiver
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
/**
*
*/
class UserDataCustomReceiver(host: String, port: Int)
extends Receiver[String](StorageLevel.MEMORY_AND_DISK_2) {
//
def onStart() {
// Start the thread that receives data over a connection
new Thread("Socket Receiver") {
override def run() {
receive()
}
}.start()
}
def onStop() {
// There is nothing much to do as the thread calling receive()
// is designed to stop by itself isStopped() returns false
}
/** Create a socket connection and receive data until receiver is stopped */
private def receive() {
var socket: Socket = null
var userInput: String = null
try {
// Connect to host:port
socket = new Socket(host, port)
// Until stopped or connection broken continue reading
val reader = new BufferedReader(new InputStreamReader(socket.getInputStream(), "UTF-8"))
userInput = reader.readLine()
while (!isStopped && userInput != null) {
// val data:Map[String,Object] = JSON.parseObject(userInput,classOf[Map[String,Object]])
store(userInput)
userInput = reader.readLine()
}
reader.close()
socket.close()
// Restart in an attempt to connect again when server is active again
restart("Trying to connect again")
} catch {
case e: java.net.ConnectException =>
// restart if could not connect to server
restart("Error connecting to " + host + ":" + port, e)
case t: Throwable =>
// restart if there is any other error
restart("Error receiving data", t)
}
}
} | zycgit/configuration | hasor-garbage/demo-all/demo-Scala/src/main/scala/net/hasor/little/db/UserDataCustomReceiver.scala | Scala | apache-2.0 | 1,920 |
import sbt._
class Plugins(info: ProjectInfo) extends PluginDefinition(info) {
lazy val eclipse = "de.element34" % "sbt-eclipsify" % "0.6.1"
val proguard = "org.scala-tools.sbt" % "sbt-proguard-plugin" % "0.0.5"
}
| spendchart/banksync | project/plugins/Plugins.scala | Scala | apache-2.0 | 219 |
package com.pwootage.sor1k.registers
import com.pwootage.sor1k.IllegalSRStateException
/**
* Basic read-only SPR
*/
class ReadOnlySPR(val value: Int) extends SpecialPurposeRegister {
def get = value
def set(v: Int) = throw new IllegalSRStateException("Attempted to write to read-only SPR")
}
| Pwootage/sor1k | src/main/scala/com/pwootage/sor1k/registers/ReadOnlySPR.scala | Scala | mit | 301 |
/*
* Copyright (c) 2012-2017 by its authors. Some rights reserved.
* See the project homepage at: https://github.com/monix/shade
*
* Licensed under the MIT License (the "License"); you may not use this
* file except in compliance with the License. You may obtain a copy
* of the License at:
*
* https://github.com/monix/shade/blob/master/LICENSE.txt
*/
package shade.tests
import org.scalatest.FunSuite
import shade.inmemory.InMemoryCache
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.concurrent.{ Await, Future }
class InMemoryCacheVer2Suite extends FunSuite {
test("get(), set()") {
withInstance { cache =>
assert(cache.get[String]("hello") === None)
cache.set("hello", "world")
assert(cache.get[String]("hello") === Some("world"))
}
}
test("add()") {
withInstance { cache =>
assert(cache.get[String]("hello") === None)
assert(cache.add("hello", "world"), "value should be added successfully")
assert(cache.get[String]("hello") === Some("world"))
assert(!cache.add("hello", "world version 2"), "value already exists")
assert(cache.get[String]("hello") === Some("world"))
cache.set("hello", "world version 2")
assert(cache.get[String]("hello") === Some("world version 2"))
}
}
test("getOrElse()") {
withInstance { cache =>
assert(cache.getOrElse("hello", "default") === "default")
cache.set("hello", "world")
assert(cache.getOrElse("hello", "world") === "world")
}
}
test("delete()") {
withInstance { cache =>
assert(cache.get[String]("hello") === None)
cache.set("hello", "world")
assert(cache.get[String]("hello") === Some("world"))
assert(cache.delete("hello"), "item should be deleted")
assert(cache.get[String]("hello") === None)
assert(!cache.delete("hello"), "item should not be there anymore")
}
}
test("cachedFuture()") {
withInstance { cache =>
assert(cache.get[String]("hello") === None)
def future() = cache.cachedFuture("hello", 1.minute) {
Future {
Thread.sleep(1000)
"world"
}
}
for (idx <- 0 until 10000)
assert(Await.result(future(), 4.seconds) === "world")
}
}
test("compareAndSet()") {
withInstance { cache =>
assert(cache.compareAndSet("hello", None, "world"), "first CAS should succeed")
assert(cache.compareAndSet("hello", Some("world"), "world updated"), "second CAS should succeed")
assert(cache.get[String]("hello") === Some("world updated"))
assert(!cache.compareAndSet("hello", Some("bollocks"), "world"), "third CAS should fail")
}
}
test("transformAndGet() (with expiry)") {
withInstance { cache =>
def incr() = cache.transformAndGet[Int]("number", 1.second) {
case Some(nr) => nr + 1
case None => 0
}
for (idx <- 0 until 100)
assert(incr() === idx)
Thread.sleep(1000)
assert(incr() === 0)
}
}
test("getAndTransform() (with expiry)") {
withInstance { cache =>
def incr() = cache.getAndTransform[Int]("number", 1.second) {
case Some(nr) => nr + 1
case None => 1
}
for (idx <- 0 until 100)
if (idx == 0)
assert(incr() === None)
else
assert(incr() === Some(idx))
Thread.sleep(1000)
assert(incr() === None)
}
}
test("add() expiration") {
withInstance { cache =>
assert(cache.add("hello", "world", 1.second), "add() should work")
assert(cache.get[String]("hello") === Some("world"))
Thread.sleep(1000)
assert(cache.get[String]("hello") === None)
}
}
test("set() expiration") {
withInstance { cache =>
cache.set("hello", "world", 1.second)
assert(cache.get[String]("hello") === Some("world"))
Thread.sleep(1000)
assert(cache.get[String]("hello") === None)
}
}
test("delete() expiration") {
withInstance { cache =>
cache.set("hello", "world", 1.second)
assert(cache.get[String]("hello") === Some("world"))
Thread.sleep(1000)
assert(!cache.delete("hello"), "delete() should return false")
}
}
test("cachedFuture() expiration") {
withInstance { cache =>
val result = Await.result(cache.cachedFuture("hello", 1.second) { Future("world") }, 1.second)
assert(result === "world")
val size = cache.realSize
assert(size === 1)
Thread.sleep(1000)
assert(cache.get[String]("hello") === None)
}
}
test("compareAndSet() expiration") {
withInstance { cache =>
assert(cache.compareAndSet("hello", None, "world", 1.second), "CAS should succeed")
assert(cache.get[String]("hello") === Some("world"))
Thread.sleep(1000)
assert(cache.get[String]("hello") === None)
}
}
test("maintenance / scheduler") {
withInstance { cache =>
val startTS = System.currentTimeMillis()
cache.set("hello", "world", 1.second)
cache.set("hello2", "world2")
assert(cache.realSize === 2)
val diff = Await.result(cache.maintenance, 20.seconds)
val m1ts = System.currentTimeMillis()
assert(diff === 1)
assert(cache.realSize === 1)
val timeWindow1 = math.round((m1ts - startTS) / 1000.0)
assert(timeWindow1 >= 3 && timeWindow1 <= 7, "scheduler should run at no less than 3 secs and no more than 7 secs")
val diff2 = Await.result(cache.maintenance, 20.seconds)
val m2ts = System.currentTimeMillis()
assert(diff2 === 0)
assert(cache.realSize === 1)
val timeWindow2 = math.round((m2ts - m1ts) / 1000.0)
assert(timeWindow2 >= 3 && timeWindow2 <= 7, "scheduler should run at no less than 3 secs and no more than 7 secs")
}
}
def withInstance[T](cb: InMemoryCache => T) = {
val instance = InMemoryCache(global)
try cb(instance) finally {
instance.close()
}
}
}
| zaneli/shade | src/test/scala/shade/tests/InMemoryCacheVer2Suite.scala | Scala | mit | 6,006 |
package org.openurp.edu.eams.teach.program.majorapply.service
import org.beangle.security.blueprint.User
import org.openurp.edu.eams.teach.program.majorapply.exception.MajorPlanAuditException
import org.openurp.edu.eams.teach.program.majorapply.model.MajorCourseGroupModifyBean
//remove if not needed
trait MajorCourseGroupModifyAuditService {
def approved(apply: MajorCourseGroupModifyBean, assessor: User): Unit
def rejected(apply: MajorCourseGroupModifyBean, assessor: User): Unit
}
| openurp/edu-eams-webapp | plan/src/main/scala/org/openurp/edu/eams/teach/program/majorapply/service/MajorPlanCourseGroupModifyAuditService.scala | Scala | gpl-3.0 | 495 |
package com.crealytics.spark.excel
import java.sql.Timestamp
import java.text.SimpleDateFormat
import com.norbitltd.spoiwo.natures.xlsx.Model2XlsxConversions._
import com.norbitltd.spoiwo.model._
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.sql.DataFrame
import scala.collection.JavaConverters._
object ExcelFileSaver {
final val DEFAULT_SHEET_NAME = "Sheet1"
final val DEFAULT_DATE_FORMAT = "yy-m-d h:mm"
final val DEFAULT_TIMESTAMP_FORMAT = "yyyy-mm-dd hh:mm:ss.000"
}
class ExcelFileSaver(fs: FileSystem) {
import ExcelFileSaver._
def save(
location: Path,
dataFrame: DataFrame,
sheetName: String = DEFAULT_SHEET_NAME,
useHeader: Boolean = true,
dateFormat: String = DEFAULT_DATE_FORMAT,
timestampFormat: String = DEFAULT_TIMESTAMP_FORMAT
): Unit = {
val headerRow = Row(dataFrame.schema.fields.map(f => Cell(f.name)))
val dataRows = dataFrame.toLocalIterator().asScala.map { row =>
Row(row.toSeq.map(toCell(_, dateFormat, timestampFormat)))
}.toList
val rows = if (useHeader) headerRow :: dataRows else dataRows
val workbook = Sheet(name = sheetName, rows = rows).convertAsXlsx
val outputStream = fs.create(location)
workbook.write(outputStream)
outputStream.hflush()
outputStream.close()
}
def dateCell(time: Long, format: String): Cell = {
Cell(new java.util.Date(time), style = CellStyle(dataFormat = CellDataFormat(format)))
}
def toCell(a: Any, dateFormat: String, timestampFormat: String): Cell = a match {
case t: java.sql.Timestamp => dateCell(t.getTime, timestampFormat)
case d: java.sql.Date => dateCell(d.getTime, dateFormat)
case s: String => Cell(s)
case d: Double => Cell(d)
case b: Boolean => Cell(b)
case b: Byte => Cell(b.toInt)
case s: Short => Cell(s.toInt)
case i: Int => Cell(i)
case l: Long => Cell(l)
case null => Cell.Empty
}
}
| 3Dragan/spark-excel | src/main/scala/com/crealytics/spark/excel/ExcelFileSaver.scala | Scala | apache-2.0 | 1,923 |
/*
* Copyright (c) 2021, salesforce.com, inc.
* All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause
* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause
*/
package com.krux.hyperion.contrib.activity.file
import java.io.{File, FilenameFilter}
import java.nio.file._
import com.krux.hyperion.contrib.activity.file.CompressionFormat.CompressionFormat
import scopt.OptionParser
object RepartitionFile {
def stringToOptionalFile(s: String): Option[File] = Option(s).map(Paths.get(_)).map(_.toFile)
def applyDefaultTemporaryDirectory(options: Options): Options =
options.copy(temporaryDirectory = options.temporaryDirectory
.orElse(stringToOptionalFile(System.getenv("TMPDIR")))
.orElse(stringToOptionalFile("/tmp"))
)
def applyDefaultNumberOfFilesCalculation(options: Options): Options =
options.numberOfFiles.map { numberOfFiles =>
if (numberOfFiles == 1) {
options.copy(suffixLength = 0)
} else {
options
}
}.getOrElse(options)
def applyDefaultFileChecks(options: Options): Options = options.copy(inputs = options.inputs.flatMap { k =>
if (k.getName == "-") {
Seq(k)
} else if (k.isDirectory) {
k.listFiles(new FilenameFilter {
val matcher = options.pattern.map(pattern => FileSystems.getDefault.getPathMatcher(s"glob:$pattern"))
override def accept(dir: File, name: String): Boolean =
!name.startsWith("_") && matcher.forall(_.matches(Paths.get(name)))
}).toSeq
} else if (k.isFile) {
if (k.canRead) {
Seq(k.getAbsoluteFile)
} else {
System.err.println(s"ERROR: Cannot read $k")
Seq()
}
} else {
System.err.println(s"ERROR: Cannot access $k")
Seq()
}
})
def applyDefaultNumberOfFiles(options: Options): Options =
if (options.numberOfFiles.isEmpty && options.numberOfLinesPerFile.isEmpty && options.numberOfBytesPerFile.isEmpty) {
options.copy(numberOfFiles = Option(1))
} else {
options
}
def applyDefaultDirectory(options: Options): Options =
if (options.outputDirectory.isEmpty) {
if (System.getenv().containsKey("OUTPUT1_STAGING_DIR")) {
options.copy(outputDirectory = options.outputDirectory ++ (1 until 11).flatMap(n => stringToOptionalFile(System.getenv(s"OUTPUT${n}_STAGING_DIR"))))
} else {
options.copy(outputDirectory = options.outputDirectory ++ stringToOptionalFile(System.getProperty("user.dir")))
}
} else {
options
}
def applyDefaultInputs(options: Options): Options =
if (options.inputs.isEmpty) {
if (System.getenv().containsKey("INPUT1_STAGING_DIR")) {
options.copy(inputs = options.inputs ++ (1 until 11).flatMap(n => stringToOptionalFile(System.getenv(s"INPUT${n}_STAGING_DIR"))))
} else {
options
}
} else {
options
}
def applyDefaultCompression(options: Options): Options =
if (options.compressed && options.compressionFormat == CompressionFormat.GZ)
options.copy(output = s"${options.output}.gz")
else if (options.compressed && options.compressionFormat == CompressionFormat.BZ2)
options.copy(output = s"${options.output}.bz2")
else
options
def applyDefaults(options: Options): Options =
Seq(
applyDefaultTemporaryDirectory _,
applyDefaultInputs _,
applyDefaultDirectory _,
applyDefaultNumberOfFiles _,
applyDefaultFileChecks _,
applyDefaultNumberOfFilesCalculation _,
applyDefaultCompression _
).foldLeft(options)((acc, handler) => handler(acc))
def checkOptions(options: Options): Option[Options] = if (options.inputs.isEmpty && !options.ignoreEmptyInput) {
System.err.println("ERROR: No inputs specified.")
None
} else if (options.outputDirectory.isEmpty) {
System.err.println("ERROR: No outputs specified.")
None
} else {
Option(options)
}
def main(args: Array[String]): Unit = {
val parser = new OptionParser[Options](s"hyperion-file-repartition-activity") {
override def showUsageOnError = Option(false)
implicit val compressionFormatRead: scopt.Read[CompressionFormat.Value] =
scopt.Read.reads(CompressionFormat withName _)
note(
"""Repartitions a set of files into either a given number of files, lines per file or bytes per file.
|Options including compressing the output and adding header to each file.
""".stripMargin)
help("help").text("prints this usage text")
opt[Unit]('z', "compressed").optional().action((_, c) => c.copy(compressed = true))
.text("gzip the output file (ext will have .gz added at the end")
opt[Unit]("skip-first-line").optional().action((_, c) => c.copy(skipFirstLine = true))
.text("assume the input files have a header in the first line and skip it")
opt[Unit]('L', "link").optional().action((_, c) => c.copy(link = true))
.text("link the output files instead of copying into position")
opt[Unit]("ignore-empty-input").optional().action((_, c) => c.copy(ignoreEmptyInput = true))
.text("Ignores empty inputs")
opt[Unit]("mark-successful-jobs").optional().action((_, c) => c.copy(markSuccessfulJobs = true))
.text("Creates a _SUCCESS file to mark the successful completion of the job")
opt[String]('H', "header").valueName("HEAD").optional().action((x, c) => c.copy(header = Option(s"$x\\n")))
.text("prepend header HEAD to each file")
opt[Int]('a', "suffix-length").valueName("N").optional().action((x, c) => c.copy(suffixLength = x))
.text("use suffixes of length N (default: 5)").validate(x => if (1 <= x && x <= 10) success else failure("Suffix length must be between 1 and 10"))
opt[Int]('n', "files").valueName("N").optional().action((x, c) => c.copy(numberOfFiles = Option(x)))
.text("create N of files of roughly equal size").validate(x => if (x > 0) success else failure("Files must be positive"))
opt[Long]('l', "lines").valueName("N").optional().action((x, c) => c.copy(numberOfLinesPerFile = Option(x)))
.text("create smaller files than N number of lines").validate(x => if (x > 0) success else failure("Lines must be positive"))
opt[String]('C', "line-bytes").valueName("N").optional().action((x, c) => c.copy(numberOfBytesPerFile = Option(StorageUnit.parse(x))))
.text("create smaller files than N number of bytes")
opt[String]('S', "buffer-size").valueName("N").optional().action((x, c) => c.copy(bufferSize = StorageUnit.parse(x)))
.text("use N bytes for main memory buffer (default: 8192)")
opt[File]('i', "input").valueName("PATH").optional().unbounded().action((x, c) => c.copy(inputs = c.inputs :+ x))
.text("Use PATH as input. If PATH is a directory, then all files within the directory are used as inputs.")
opt[File]('o', "output").valueName("DIR").optional().unbounded().action((x, c) => c.copy(outputDirectory = c.outputDirectory :+ x))
.text("create the output files in DIR").validate(x => if (x.exists) success else failure("Directory must exist"))
opt[File]('T', "temporary-directory").valueName("DIR").optional().action((x, c) => c.copy(temporaryDirectory = Option(x)))
.text(s"use DIR for temporaries, not $$TMPDIR or /tmp").validate(x => if (x.exists) success else failure("Directory must exist"))
opt[String]("name").valueName("PATTERN").optional().action((x, c) => c.copy(pattern = Option(x)))
.text("Base of input file names (the path with the leading directories removed) matches shell pattern PATTERN.")
arg[String]("NAME").required().action((x, c) => c.copy(output = x))
.text("use NAME for the output filename. The actual files will have suffixes of suffix-length")
opt[CompressionFormat]('k', "compressionFormat").optional().action((x, c) => c.copy(compressionFormat = x))
.text("specify the compression format required for merging and splitting files")
note(s"\\nIf --input PATH is not specified, then directories specified by $${INPUT1_STAGING_DIR}..$${INPUT10_STAGING_DIR} are searched for files.\\n")
note(s"If --output PATH is not specified, then directories specified by $${OUTPUT1_STAGING_DIR}..$${OUTPUT10_STAGING_DIR} are used.")
note("If those directories are not specified, then the current directory is used.")
checkConfig { c =>
if (c.numberOfLinesPerFile.nonEmpty && c.numberOfBytesPerFile.nonEmpty) {
failure("cannot specify both number of lines and number of bytes")
} else if (c.numberOfFiles.nonEmpty && (c.numberOfLinesPerFile.nonEmpty || c.numberOfBytesPerFile.nonEmpty)) {
failure("cannot specify both number of files and number of lines/bytes")
} else {
success
}
}
}
if (
!parser
.parse(args, Options())
.map(applyDefaults)
.flatMap(checkOptions)
.exists(FileRepartitioner(_).repartition())
) {
System.exit(3)
}
}
}
| realstraw/hyperion | contrib/activity/file/src/main/scala/com/krux/hyperion/contrib/activity/file/RepartitionFile.scala | Scala | bsd-3-clause | 9,121 |
package com.stovokor.editor.state
import com.jme3.app.Application
import com.jme3.app.state.AppState
import com.jme3.app.state.AppStateManager
import com.jme3.scene.Spatial.CullHint
import com.stovokor.util.EditModeSwitch
import com.stovokor.util.EditorEvent
import com.stovokor.util.EditorEventListener
import com.stovokor.util.EventBus
import com.stovokor.util.SelectionChange
import com.stovokor.editor.input.Modes.EditMode
import com.simsilica.lemur.FillMode
import com.stovokor.editor.input.InputFunction
import com.simsilica.lemur.input.FunctionId
import com.simsilica.lemur.input.StateFunctionListener
import com.simsilica.lemur.input.InputState
import com.stovokor.util.SelectionModeSwitch
import com.stovokor.editor.input.Modes.SelectionMode
class Edit2DModeState extends BaseState
with EditorEventListener
with CanMapInput
with StateFunctionListener {
var modeKey = EditMode.Select
val modes: Map[EditMode, EditModeStrategy] = Map(
(EditMode.Select, SelectingMode),
(EditMode.Draw, DrawingMode),
(EditMode.Fill, FillMode))
def mode = modes(modeKey)
override def initialize(stateManager: AppStateManager, simpleApp: Application) {
super.initialize(stateManager, simpleApp)
EventBus.subscribeByType(this, classOf[EditModeSwitch])
modes(EditMode.Draw).exit
mode.enter
setupInput
}
override def cleanup() {
super.cleanup
EventBus.removeFromAll(this)
inputMapper.removeStateListener(this, InputFunction.clickKey)
inputMapper.removeStateListener(this, InputFunction.insertMode)
inputMapper.removeStateListener(this, InputFunction.selectPoints)
inputMapper.removeStateListener(this, InputFunction.selectLines)
inputMapper.removeStateListener(this, InputFunction.selectSectors)
inputMapper.removeStateListener(this, InputFunction.fillMode)
}
def setupInput {
inputMapper.addStateListener(this, InputFunction.cancel)
inputMapper.addStateListener(this, InputFunction.clickKey)
inputMapper.addStateListener(this, InputFunction.insertMode)
inputMapper.addStateListener(this, InputFunction.selectPoints)
inputMapper.addStateListener(this, InputFunction.selectLines)
inputMapper.addStateListener(this, InputFunction.selectSectors)
inputMapper.addStateListener(this, InputFunction.fillMode)
inputMapper.activateGroup(InputFunction.general)
}
def valueChanged(func: FunctionId, value: InputState, tpf: Double) {
if (value == InputState.Positive) func match {
case InputFunction.insertMode => {
EventBus.trigger(EditModeSwitch(EditMode.Draw))
EventBus.trigger(SelectionModeSwitch(SelectionMode.None))
}
case InputFunction.fillMode => {
EventBus.trigger(EditModeSwitch(EditMode.Fill))
EventBus.trigger(SelectionModeSwitch(SelectionMode.None))
}
case InputFunction.selectPoints => {
EventBus.trigger(EditModeSwitch(EditMode.Select))
EventBus.trigger(SelectionModeSwitch(SelectionMode.Point))
}
case InputFunction.selectLines => {
EventBus.trigger(EditModeSwitch(EditMode.Select))
EventBus.trigger(SelectionModeSwitch(SelectionMode.Line))
}
case InputFunction.selectSectors => {
EventBus.trigger(EditModeSwitch(EditMode.Select))
EventBus.trigger(SelectionModeSwitch(SelectionMode.Sector))
}
case _ =>
}
}
def onEvent(event: EditorEvent) = event match {
case EditModeSwitch(m) => setMode(m)
case _ =>
}
def setMode(newMode: EditMode) {
if (newMode != modeKey) {
println(s"edit mode $newMode")
mode.exit
EventBus.trigger(SelectionChange(Set()))
modeKey = newMode
mode.enter
}
}
abstract class EditModeStrategy(val id: String) {
def enter
def exit
}
object SelectingMode extends EditModeStrategy("selecting") {
def exit {
println("exiting selection")
disableStates(classOf[SelectionState], classOf[ModifyingState])
removeStates(classOf[SelectionState], classOf[ModifyingState])
}
def enter {
println("entering selection")
stateManager.attach(new SelectionState)
stateManager.attach(new ModifyingState)
}
}
object DrawingMode extends EditModeStrategy("drawing") {
def exit {
println("exiting drawing")
disableStates(classOf[DrawingState])
removeStates(classOf[DrawingState])
}
def enter {
println("entering drawing")
stateManager.attach(new DrawingState)
}
}
object FillMode extends EditModeStrategy("fill") {
def exit {
println("exiting fill mode")
disableStates(classOf[FillHoleState])
removeStates(classOf[FillHoleState])
}
def enter {
println("entering fill mode")
stateManager.attach(new FillHoleState)
}
}
} | jcfandino/leveleditor | src/main/scala/com/stovokor/editor/state/Edit2DModeState.scala | Scala | bsd-3-clause | 4,838 |
package org.elastic.rest.scala.driver.json.tests
import io.circe._
import io.circe.parser.parse
import org.elastic.rest.scala.driver.RestBase
import org.elastic.rest.scala.driver.RestBase._
import org.elastic.rest.scala.driver.RestBaseImplicits._
import org.elastic.rest.scala.driver.RestResources._
import org.elastic.rest.scala.driver.utils.MockRestDriver
import org.elastic.rest.scala.driver.json.CirceJsonModule._
import org.elastic.rest.scala.driver.json.flexible_typing.CirceTypeModule._
import utest._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
/** Register all concrete output types here, note has to be at the top of the file */
object ConcreteTypes {
implicit val RegisterTestRead = new RegisterType[TestDataModel.TestRead] {}
implicit val RegisterTestWrapperRead = new RegisterType[TestDataModel.TestWrapperRead] {}
}
object CirceTypeModuleTests extends TestSuite {
val tests = this {
val macroHandler: PartialFunction[BaseDriverOp, Future[String]] = {
case BaseDriverOp(TestApiTyped.`/typed`(), RestBase.PUT,
Some("""{"testWrite":"write"}"""), List(), List()) =>
Future.successful("""{ "test": "written" }""")
case BaseDriverOp(TestApiTyped.`/typed`(), RestBase.GET, _, List(), List()) =>
Future.successful("""{ "testRead": "get" }""")
case x @ _ =>
Future.failed(new Exception(s"Unexpected request: $x"))
}
val customHandler: PartialFunction[BaseDriverOp, Future[String]] = {
case BaseDriverOp(TestApiTyped.`/custom_typed`(), RestBase.PUT,
Some("""{"testWrite":"write"}"""), List(), List()) =>
Future.successful("""{ "test": "written" }""")
case BaseDriverOp(TestApiTyped.`/custom_typed`(), RestBase.GET, _, List(), List()) =>
Future.successful("""{ "testRead": "get" }""")
case x @ _ =>
Future.failed(new Exception(s"Unexpected request: $x"))
}
"Test macro version of typed (read)" - {
implicit val mockDriver = new MockRestDriver(macroHandler)
import ConcreteTypes._
TestApiTyped.`/typed`().read().exec().map { result =>
result ==> TestDataModel.TestRead("get")
}
}
"Test macro version of typed (write)" - {
implicit val mockDriver = new MockRestDriver(macroHandler)
TestApiTyped.`/typed`().write(TestDataModel.TestWrite("write")).execJ().map { result =>
result ==> parse("""{ "test": "written" }""").right.getOrElse(Json.Null)
}
}
"Test custom typed extensions (read)" - {
implicit val mockDriver = new MockRestDriver(customHandler)
import ConcreteTypes._
TestApiTyped.`/custom_typed`().read().exec().map { result =>
result ==> TestDataModel.TestWrapperRead("""{ "testRead": "get" }""")
}
}
"Test custom typed extensions (write)" - {
implicit val mockDriver = new MockRestDriver(customHandler)
TestApiTyped.`/custom_typed`().write(TestDataModel.TestWrapperWrite("write")).execJ().map { result =>
result ==> parse("""{ "test": "written" }""").right.getOrElse(Json.Null)
}
}
}
}
/** Test object containing example data model for `TestApiTyped`
* (sidenote: annotating `TestDataModel` doesn't make `TestDataModelComponent` visible)
*/
object TestDataModel extends TestDataModelComponent{
case class TestRead(testRead: String)
case class TestWrite(testWrite: String)
}
/**Illustrates the case where sub-components are used to partition
* the code
*/
trait TestDataModelComponent {
case class OtherTestRead(testRead: String)
case class OtherTestWrite(testWrite: String)
case class TestWrapperWrite(s: String) extends CustomTypedToString {
def fromTyped: String = s"""{"testWrite":"$s"}"""
}
case class TestWrapperRead(s: String) extends CustomStringToTyped
}
/** Sample API for testing CIRCE integration
*/
object TestApiTyped extends TestApiTypedExtensions {
case class `/typed`()
extends RestReadableT[Modifier, TestDataModel.TestRead]
with RestWritableTU[Modifier, TestDataModel.TestWrite]
with RestResource
}
trait TestApiTypedExtensions {
case class `/data_model`()
extends RestReadableT[Modifier, TestDataModel.OtherTestRead]
with RestWritableTU[Modifier, TestDataModel.OtherTestWrite]
with RestResource
case class `/custom_typed`()
extends RestReadableT[Modifier, TestDataModel.TestWrapperRead]
with RestWritableTU[Modifier, TestDataModel.TestWrapperWrite]
with RestResource
}
| Alex-At-Home/rest_client_library | rest_json_circe_module/shared/src/test/scala/org/elastic/rest/scala/driver/json/tests/CirceTypeModuleTests.scala | Scala | apache-2.0 | 4,630 |
package org.lanyard.dist.cont
import org.lanyard._
import org.lanyard.dist.Distribution
import org.lanyard.random.RNG
import org.lanyard.util.LogGamma
/** The beta distribution is a continuous probability distribution. It
* is defined over the range [0,1] so it distributes other
* probabilities.
*
* @constructor Creates a beta distribution with given parameters.
* @param alpha alpha parameter of the beta distribution
* @param beta beta parameter of the beta distribution
*/
case class Beta( val alpha: Double, val beta: Double) extends Distribution[Double] {
import math._
require( alpha > 0, s"Beta distribution parameter alpha needs to be stricly positive. Found value: ${alpha}" )
require( beta > 0, s"Beta distribution parameter beta needs to be stricly positive. Found value: ${beta}")
/** Gamma distributions used for sampling */
private lazy val alphaGamma = Gamma(alpha, 1)
private lazy val betaGamma = Gamma(beta, 1)
/** Precomputes the constant term used in the probability density function. */
private lazy val constantTerm = LogGamma( alpha + beta ) - LogGamma( alpha ) - LogGamma( beta )
/** Computes the mean. */
def mean = alpha / (alpha + beta)
/** Computes the variance. */
def variance = ( alpha * beta ) / ((alpha + beta) * (alpha + beta) * ( alpha + beta + 1))
/** Computes the mode. */
def mode = (alpha - 1) / ( alpha + beta - 2)
/** Computes the logarithm of the probability density function.
*
* @param value value to compute the log probability for
* @return logarithim of the probability if value is in [0,1], negative infinity otherwise
*/
override def logLike( value: Double): LogLike =
if( 0 <= value && value <= 1)
constantTerm + ( alpha - 1.0 ) * log( value ) + ( beta - 1.0 ) * log( 1.0 - value )
else
Double.NegativeInfinity
/** Draws a random sample from this beta distribution.
*
* @param source source of randomness
* @return pair of a beta sample and the updated RNG
*/
def random( source: RNG): (Double, RNG) = {
val (d1, rng1) = alphaGamma.random(source)
val (d2, rng2) = betaGamma.random(rng1)
val draw = d1 / (d1 + d2)
assume( 0 <= draw && draw <= 1, "Draw of the beta distribution was not in [0,1]. Parameters alpha: " + alpha + " beta: " + beta )
(draw, rng2)
}
}
| perian/Lanyard | src/main/scala/org/lanyard/dist/cont/Beta.scala | Scala | gpl-2.0 | 2,354 |
/*
* Copyright 2017 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.admin
import java.net.URLEncoder
import akka.http.scaladsl.model.Uri.Path
import akka.http.scaladsl.model._
import org.json4s.JsonAST.{JObject, JString}
import org.json4s.jackson.JsonMethods._
import org.squbs.unicomplex.{RouteDefinition, WebContext}
import org.squbs.util.ConfigUtil._
class AdminSvc extends RouteDefinition with WebContext {
val prefix = if (webContext == "") "/bean" else s"/$webContext/bean"
val exclusions = context.system.settings.config.get[Seq[String]]("squbs.admin.exclusions", Seq.empty[String]).toSet
val (exBeans, exFieldSet) = exclusions partition { !_.contains("::") }
val exFields = exFieldSet
.map { fieldSpec =>
val fields = fieldSpec split "::"
fields(0) -> fields(1)
}
.groupBy(_._1)
.map { case (k, v) => k -> v.map(_._2) }
val route =
get {
pathEndOrSingleSlash {
extractUri { uri =>
complete {
val kv = MBeanUtil.allObjectNames collect {
case name if !(exBeans contains name) =>
val resource = Path(s"$prefix/${URLEncoder.encode(name.replace('=', '~'), "UTF-8")}")
name -> JString(uri.withPath(resource).toString())
}
HttpResponse(entity = HttpEntity(ContentTypes.`application/json`, pretty(render(JObject(kv)))))
}
}
} ~
path("bean" / Segment) { encName =>
complete {
val name = encName.replace('~', '=').replace('%', '/')
val response: HttpResponse =
if (exBeans contains name) HttpResponse(StatusCodes.NotFound, entity = StatusCodes.NotFound.defaultMessage)
else MBeanUtil.asJSON(name, exFields getOrElse (name, Set.empty))
.map { json => HttpResponse(entity = json) }
.getOrElse (HttpResponse(StatusCodes.NotFound, entity = StatusCodes.NotFound.defaultMessage))
response
}
}
}
}
| paypal/squbs | squbs-admin/src/main/scala/org/squbs/admin/AdminSvc.scala | Scala | apache-2.0 | 2,518 |
package com.typesafe.akka.http.benchmark.handlers
import akka.http.scaladsl.server.Directives._
import com.typesafe.akka.http.benchmark.Infrastructure
import com.typesafe.akka.http.benchmark.datastore.DataStore
import com.typesafe.akka.http.benchmark.entity.World
import com.typesafe.akka.http.benchmark.util.RandomGenerator
import scala.concurrent.Future
import scala.util.Try
trait UpdatesHandler { _: Infrastructure with DataStore with RandomGenerator =>
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
import spray.json.DefaultJsonProtocol._
def updatesEndpoint =
get {
path("updates") {
parameter('queries.?) { numQueries =>
val realNumQueries = Try(numQueries.getOrElse("1").toInt).getOrElse(1).min(500).max(1)
def mutateOne(id: Int): Future[World] =
for {
world <- requireWorldById(id)
newWorld = world.copy(randomNumber = nextRandomIntBetween1And10000)
wasUpdated <- updateWorld(newWorld)
} yield newWorld // ignore `wasUpdated`
complete {
Future.traverse(Seq.fill(realNumQueries)(nextRandomIntBetween1And10000))(mutateOne)
}
}
}
}
}
| actframework/FrameworkBenchmarks | frameworks/Scala/akka-http/src/main/scala/com/typesafe/akka/http/benchmark/handlers/UpdatesHandler.scala | Scala | bsd-3-clause | 1,227 |
package mesosphere.marathon.core.task.tracker.impl
import akka.actor.{ Status, ActorRef }
import akka.event.EventStream
import akka.testkit.TestProbe
import com.codahale.metrics.MetricRegistry
import com.google.inject.Provider
import mesosphere.marathon.core.CoreGuiceModule
import mesosphere.marathon.core.launchqueue.LaunchQueue
import mesosphere.marathon.core.task.bus.TaskChangeObservables.TaskChanged
import mesosphere.marathon.core.task.bus.{ MarathonTaskStatus, MarathonTaskStatusTestHelper, TaskStatusEmitter }
import mesosphere.marathon.core.task.tracker.TaskUpdater
import mesosphere.marathon.core.task.update.impl.steps.{ NotifyHealthCheckManagerStepImpl, NotifyLaunchQueueStepImpl, NotifyRateLimiterStepImpl, PostToEventStreamStepImpl, ScaleAppUpdateStepImpl, TaskStatusEmitterPublishStepImpl }
import mesosphere.marathon.core.task.{ TaskStateChangeException, Task, TaskStateChange, TaskStateOp }
import mesosphere.marathon.health.HealthCheckManager
import mesosphere.marathon.metrics.Metrics
import mesosphere.marathon.state.{ AppRepository, PathId, TaskRepository, Timestamp }
import mesosphere.marathon.test.{ CaptureLogEvents, MarathonActorSupport, Mockito }
import mesosphere.marathon.{ MarathonSpec, MarathonTestHelper }
import org.apache.mesos.SchedulerDriver
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{ GivenWhenThen, Matchers }
import scala.collection.immutable.Seq
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.util.{ Failure, Success, Try }
class TaskOpProcessorImplTest
extends MarathonActorSupport with MarathonSpec with Mockito with GivenWhenThen with ScalaFutures with Matchers {
import scala.concurrent.ExecutionContext.Implicits.global
// ignored by the TaskOpProcessorImpl
val deadline = Timestamp.zero
test("process update with success") {
val f = new Fixture
val appId = PathId("/app")
Given("a taskRepository")
val task = MarathonTestHelper.mininimalTask(appId)
val taskProto = TaskSerializer.toProto(task)
val stateOp = f.stateOpUpdate(task, MarathonTaskStatusTestHelper.runningHealthy)
val mesosStatus = stateOp.status.mesosStatus.get
val expectedChange = TaskStateChange.Update(task, Some(task))
val taskChanged = TaskChanged(stateOp, expectedChange)
val ack = TaskTrackerActor.Ack(f.opSender.ref, expectedChange)
f.stateOpResolver.resolve(stateOp) returns Future.successful(expectedChange)
f.taskRepository.task(task.taskId.idString) returns Future.successful(Some(taskProto))
f.taskRepository.store(taskProto) returns Future.successful(taskProto)
f.taskUpdater.statusUpdate(appId, mesosStatus).asInstanceOf[Future[Unit]] returns Future.successful(())
When("the processor processes an update")
val result = f.processor.process(
TaskOpProcessor.Operation(deadline, f.opSender.ref, task.taskId, stateOp)
)
And("the taskTracker replies immediately")
f.taskTrackerProbe.expectMsg(TaskTrackerActor.StateChanged(taskChanged, ack))
f.taskTrackerProbe.reply(())
And("the processor replies with unit accordingly")
result.futureValue should be(()) // first wait for the call to complete
Then("The StateOpResolver is called")
verify(f.stateOpResolver).resolve(stateOp)
And("it calls store")
verify(f.taskRepository).store(taskProto)
And("no more interactions")
f.verifyNoMoreInteractions()
}
test("process update with failing taskRepository.store but successful load of existing task") {
val f = new Fixture
val appId = PathId("/app")
Given("a taskRepository and existing task")
val task = MarathonTestHelper.stagedTaskForApp(appId)
val taskProto = TaskSerializer.toProto(task)
val stateOp = f.stateOpUpdate(task, MarathonTaskStatusTestHelper.running)
val expectedChange = TaskStateChange.Update(task, Some(task))
val taskChanged = TaskChanged(stateOp, expectedChange)
val ack = TaskTrackerActor.Ack(f.opSender.ref, expectedChange)
f.stateOpResolver.resolve(stateOp) returns Future.successful(expectedChange)
f.taskRepository.store(taskProto) returns Future.failed(new RuntimeException("fail"))
f.taskRepository.task(taskProto.getId) returns Future.successful(Some(taskProto))
When("the processor processes an update")
var result: Try[Unit] = Failure(new RuntimeException("test executing failed"))
val logs = CaptureLogEvents.forBlock {
val resultF = f.processor.process(
TaskOpProcessor.Operation(deadline, f.opSender.ref, task.taskId, stateOp)
)
f.taskTrackerProbe.expectMsg(TaskTrackerActor.StateChanged(taskChanged, ack))
f.taskTrackerProbe.reply(())
result = Try(resultF.futureValue) // we need to complete the future here to get all the logs
}
Then("The StateOpResolver is called")
verify(f.stateOpResolver).resolve(stateOp)
Then("it calls store")
verify(f.taskRepository).store(taskProto)
And("logs a warning after detecting the error")
logs should have size 1
logs.head.getLevel should be(ch.qos.logback.classic.Level.WARN)
logs.head.getMessage should include(s"[${taskProto.getId}]")
And("loads the task")
verify(f.taskRepository).task(taskProto.getId)
And("it replies with unit immediately because the task is as expected")
result should be(Success(()))
And("no more interactions")
f.verifyNoMoreInteractions()
}
test("process update with failing taskRepository.store and successful load of non-existing task") {
val f = new Fixture
val appId = PathId("/app")
Given("a taskRepository and no task")
val task = MarathonTestHelper.mininimalTask(appId)
val taskProto = TaskSerializer.toProto(task)
val stateOp = f.stateOpUpdate(task, MarathonTaskStatusTestHelper.running)
val resolvedStateChange = TaskStateChange.Update(task, Some(task))
val storeException: RuntimeException = new scala.RuntimeException("fail")
val expectedTaskChanged = TaskChanged(stateOp, TaskStateChange.Failure(storeException))
val ack = TaskTrackerActor.Ack(f.opSender.ref, expectedTaskChanged.stateChange)
f.stateOpResolver.resolve(stateOp) returns Future.successful(resolvedStateChange)
f.taskRepository.store(taskProto) returns Future.failed(storeException)
f.taskRepository.task(taskProto.getId) returns Future.successful(None)
When("the processor processes an update")
var result: Try[Unit] = Failure(new RuntimeException("test executing failed"))
val logs = CaptureLogEvents.forBlock {
val resultF = f.processor.process(
TaskOpProcessor.Operation(deadline, f.opSender.ref, task.taskId, stateOp)
)
f.taskTrackerProbe.expectMsg(TaskTrackerActor.StateChanged(expectedTaskChanged, ack))
f.taskTrackerProbe.reply(())
result = Try(resultF.futureValue) // we need to complete the future here to get all the logs
}
Then("The StateOpResolver is called")
verify(f.stateOpResolver).resolve(stateOp)
Then("it calls store")
verify(f.taskRepository).store(taskProto)
And("logs a warning after detecting the error")
logs should have size 1
logs.head.getLevel should be(ch.qos.logback.classic.Level.WARN)
logs.head.getMessage should include(s"[${taskProto.getId}]")
And("loads the task")
verify(f.taskRepository).task(taskProto.getId)
And("it replies with unit immediately because the task is as expected")
result should be(Success(()))
And("no more interactions")
f.verifyNoMoreInteractions()
}
test("process update with failing taskRepository.store and load also fails") {
val f = new Fixture
val appId = PathId("/app")
Given("a taskRepository and existing task")
val task = MarathonTestHelper.mininimalTask(appId)
val taskProto = TaskSerializer.toProto(task)
val storeFailed: RuntimeException = new scala.RuntimeException("store failed")
val stateOp = f.stateOpUpdate(task, MarathonTaskStatusTestHelper.running)
val expectedChange = TaskStateChange.Update(task, Some(task))
f.stateOpResolver.resolve(stateOp) returns Future.successful(expectedChange)
f.taskRepository.store(taskProto) returns Future.failed(storeFailed)
f.taskRepository.task(taskProto.getId) returns Future.failed(new RuntimeException("task failed"))
When("the processor processes an update")
var result: Try[Unit] = Failure(new RuntimeException("test executing failed"))
val logs = CaptureLogEvents.forBlock {
result = Try(f.processor.process(
TaskOpProcessor.Operation(deadline, f.opSender.ref, task.taskId, stateOp)
).futureValue) // we need to complete the future here to get all the logs
}
Then("The StateOpResolver is called")
verify(f.stateOpResolver).resolve(stateOp)
Then("it calls store")
verify(f.taskRepository).store(taskProto)
And("loads the task")
verify(f.taskRepository).task(taskProto.getId)
And("it replies with the original error")
result.isFailure shouldBe true
result.failed.get.getCause.getMessage should be(storeFailed.getMessage)
And("logs a two warnings, for store and for task")
logs should have size 2
logs.head.getLevel should be(ch.qos.logback.classic.Level.WARN)
logs.head.getMessage should include(s"[${taskProto.getId}]")
logs(1).getLevel should be(ch.qos.logback.classic.Level.WARN)
logs(1).getMessage should include(s"[${taskProto.getId}]")
And("no more interactions")
f.verifyNoMoreInteractions()
}
test("process expunge with success") {
val f = new Fixture
val appId = PathId("/app")
Given("a taskRepository")
val task = MarathonTestHelper.mininimalTask(appId)
val taskId = task.taskId
val taskIdString = taskId.idString
val stateOp = f.stateOpExpunge(task)
val expectedChange = TaskStateChange.Expunge(task)
val taskChanged = TaskChanged(stateOp, expectedChange)
val ack = TaskTrackerActor.Ack(f.opSender.ref, expectedChange)
f.stateOpResolver.resolve(stateOp) returns Future.successful(expectedChange)
f.taskRepository.expunge(taskIdString) returns Future.successful(Iterable(true))
When("the processor processes an update")
val result = f.processor.process(
TaskOpProcessor.Operation(deadline, f.opSender.ref, taskId, TaskStateOp.ForceExpunge(taskId))
)
f.taskTrackerProbe.expectMsg(TaskTrackerActor.StateChanged(taskChanged, ack))
f.taskTrackerProbe.reply(())
Then("it replies with unit immediately")
result.futureValue should be(())
Then("The StateOpResolver is called")
verify(f.stateOpResolver).resolve(stateOp)
And("it calls expunge")
verify(f.taskRepository).expunge(taskIdString)
And("no more interactions")
f.verifyNoMoreInteractions()
}
test("process expunge, expunge fails but task reload confirms that task is gone") {
val f = new Fixture
val appId = PathId("/app")
Given("a taskRepository")
val task = MarathonTestHelper.mininimalTask(appId)
val taskId = task.taskId
val stateOp = f.stateOpExpunge(task)
val expectedChange = TaskStateChange.Expunge(task)
val taskChanged = TaskChanged(stateOp, expectedChange)
val ack = TaskTrackerActor.Ack(f.opSender.ref, expectedChange)
f.stateOpResolver.resolve(stateOp) returns Future.successful(expectedChange)
f.taskRepository.expunge(taskId.idString) returns Future.failed(new RuntimeException("expunge fails"))
f.taskRepository.task(taskId.idString) returns Future.successful(None)
When("the processor processes an update")
val result = f.processor.process(
TaskOpProcessor.Operation(deadline, f.opSender.ref, taskId, TaskStateOp.ForceExpunge(taskId))
)
f.taskTrackerProbe.expectMsg(TaskTrackerActor.StateChanged(taskChanged, ack))
f.taskTrackerProbe.reply(())
Then("it replies with unit immediately")
result.futureValue should be(())
Then("The StateOpResolver is called")
verify(f.stateOpResolver).resolve(stateOp)
And("it calls expunge")
verify(f.taskRepository).expunge(taskId.idString)
And("it reloads the task")
verify(f.taskRepository).task(taskId.idString)
And("the taskTracker gets the update")
And("no more interactions")
f.verifyNoMoreInteractions()
}
test("process expunge, expunge fails and task reload suggests that task is still there") {
val f = new Fixture
val appId = PathId("/app")
Given("a taskRepository")
val task = MarathonTestHelper.mininimalTask(appId)
val taskProto = TaskSerializer.toProto(task)
val taskId = taskProto.getId
val expungeException: RuntimeException = new scala.RuntimeException("expunge fails")
val stateOp = f.stateOpExpunge(task)
val resolvedStateChange = TaskStateChange.Expunge(task)
val expectedTaskChanged = TaskChanged(stateOp, TaskStateChange.Failure(expungeException))
val ack = TaskTrackerActor.Ack(f.opSender.ref, expectedTaskChanged.stateChange)
f.stateOpResolver.resolve(stateOp) returns Future.successful(resolvedStateChange)
f.taskRepository.expunge(taskId) returns Future.failed(expungeException)
f.taskRepository.task(taskId) returns Future.successful(Some(taskProto))
When("the processor processes an update")
val result = f.processor.process(
TaskOpProcessor.Operation(deadline, f.opSender.ref, task.taskId, TaskStateOp.ForceExpunge(task.taskId))
)
f.taskTrackerProbe.expectMsg(TaskTrackerActor.StateChanged(expectedTaskChanged, ack))
f.taskTrackerProbe.reply(())
Then("it replies with unit immediately")
result.futureValue should be(()) // first we make sure that the call completes
Then("The StateOpResolver is called")
verify(f.stateOpResolver).resolve(stateOp)
And("it calls expunge")
verify(f.taskRepository).expunge(taskId)
And("it reloads the task")
verify(f.taskRepository).task(taskId)
And("no more interactions")
f.verifyNoMoreInteractions()
}
test("process statusUpdate with NoChange") {
val f = new Fixture
val appId = PathId("/app")
Given("a statusUpdateResolver and an update")
val task = MarathonTestHelper.mininimalTask(appId)
val taskProto = TaskSerializer.toProto(task)
val stateOp = f.stateOpUpdate(task, MarathonTaskStatusTestHelper.running)
val expectedChange = TaskStateChange.NoChange(task.taskId)
f.stateOpResolver.resolve(stateOp) returns Future.successful(expectedChange)
f.taskRepository.task(taskProto.getId) returns Future.successful(Some(taskProto))
When("the processor processes an update")
val result = f.processor.process(
TaskOpProcessor.Operation(deadline, testActor, task.taskId, stateOp)
)
Then("it replies with unit immediately")
result.futureValue should be(())
Then("The StateOpResolver is called")
verify(f.stateOpResolver).resolve(stateOp)
And("the initiator gets its ack")
expectMsg(expectedChange)
And("no more interactions")
f.verifyNoMoreInteractions()
}
test("process statusUpdate with Failure") {
val f = new Fixture
val appId = PathId("/app")
Given("a statusUpdateResolver and an update")
val task = MarathonTestHelper.mininimalTask(appId)
val taskProto = TaskSerializer.toProto(task)
val stateOp = f.stateOpReservationTimeout(task)
val exception = TaskStateChangeException("ReservationTimeout on LaunchedEphemeral is unexpected")
val expectedChange = TaskStateChange.Failure(exception)
f.stateOpResolver.resolve(stateOp) returns Future.successful(expectedChange)
f.taskRepository.task(taskProto.getId) returns Future.successful(Some(taskProto))
When("the processor processes an update")
val result = f.processor.process(
TaskOpProcessor.Operation(deadline, testActor, task.taskId, stateOp)
)
Then("it replies with unit immediately")
result.futureValue should be(())
Then("The StateOpResolver is called")
verify(f.stateOpResolver).resolve(stateOp)
And("the initiator gets its ack")
expectMsg(Status.Failure(exception))
And("no more interactions")
f.verifyNoMoreInteractions()
}
class Fixture {
lazy val config = MarathonTestHelper.defaultConfig()
lazy val taskTrackerProbe = TestProbe()
lazy val opSender = TestProbe()
lazy val taskRepository = mock[TaskRepository]
lazy val stateOpResolver = mock[TaskOpProcessorImpl.TaskStateOpResolver]
lazy val metrics = new Metrics(new MetricRegistry)
lazy val now = Timestamp(0)
def stateOpLaunch(task: Task) = TaskStateOp.LaunchEphemeral(task)
def stateOpUpdate(task: Task, status: MarathonTaskStatus, now: Timestamp = now) = TaskStateOp.MesosUpdate(task, status, now)
def stateOpExpunge(task: Task) = TaskStateOp.ForceExpunge(task.taskId)
def stateOpLaunchOnReservation(task: Task, status: Task.Status) = TaskStateOp.LaunchOnReservation(task.taskId, now, status, Seq.empty)
def stateOpReservationTimeout(task: Task) = TaskStateOp.ReservationTimeout(task.taskId)
def stateOpReserve(task: Task) = TaskStateOp.Reserve(task.asInstanceOf[Task.Reserved])
lazy val healthCheckManager: HealthCheckManager = mock[HealthCheckManager]
lazy val schedulerActor: TestProbe = TestProbe()
lazy val schedulerActorProvider = new Provider[ActorRef] {
override def get(): ActorRef = schedulerActor.ref
}
lazy val appRepository: AppRepository = mock[AppRepository]
lazy val appRepositoryProvider: Provider[AppRepository] = new Provider[AppRepository] {
override def get(): AppRepository = appRepository
}
lazy val launchQueue: LaunchQueue = mock[LaunchQueue]
lazy val launchQueueProvider: Provider[LaunchQueue] = new Provider[LaunchQueue] {
override def get(): LaunchQueue = launchQueue
}
lazy val schedulerDriver: SchedulerDriver = mock[SchedulerDriver]
lazy val eventBus: EventStream = mock[EventStream]
lazy val taskUpdater: TaskUpdater = mock[TaskUpdater]
lazy val taskStatusEmitter: TaskStatusEmitter = mock[TaskStatusEmitter]
lazy val taskStatusEmitterProvider: Provider[TaskStatusEmitter] = new Provider[TaskStatusEmitter] {
override def get(): TaskStatusEmitter = taskStatusEmitter
}
lazy val guiceModule = new CoreGuiceModule
// Use module method to ensure that we keep the list of steps in sync with the test.
lazy val statusUpdateSteps = guiceModule.taskStatusUpdateSteps(
notifyHealthCheckManager,
notifyRateLimiter,
notifyLaunchQueue,
emitUpdate,
postToEventStream,
scaleApp
)
// task status update steps
lazy val notifyHealthCheckManager = new NotifyHealthCheckManagerStepImpl(healthCheckManager)
lazy val notifyRateLimiter = new NotifyRateLimiterStepImpl(launchQueueProvider, appRepositoryProvider)
lazy val postToEventStream = new PostToEventStreamStepImpl(eventBus)
lazy val notifyLaunchQueue = new NotifyLaunchQueueStepImpl(launchQueueProvider)
lazy val emitUpdate = new TaskStatusEmitterPublishStepImpl(taskStatusEmitterProvider)
lazy val scaleApp = new ScaleAppUpdateStepImpl(schedulerActorProvider)
lazy val processor = new TaskOpProcessorImpl(taskTrackerProbe.ref, taskRepository, stateOpResolver, config)
def verifyNoMoreInteractions(): Unit = {
taskTrackerProbe.expectNoMsg(0.seconds)
noMoreInteractions(taskRepository)
noMoreInteractions(stateOpResolver)
}
def toLaunched(task: Task, taskStateOp: TaskStateOp.LaunchOnReservation): Task =
task.update(taskStateOp) match {
case TaskStateChange.Update(updatedTask, _) => updatedTask
case _ => throw new scala.RuntimeException("taskStateOp did not result in a launched task")
}
}
}
| ss75710541/marathon | src/test/scala/mesosphere/marathon/core/task/tracker/impl/TaskOpProcessorImplTest.scala | Scala | apache-2.0 | 19,761 |
package com.optrak.testakka.impl
import com.lightbend.lagom.scaladsl.server.LocalServiceLocator
import com.lightbend.lagom.scaladsl.testkit.ServiceTest
import com.optrak.testakka.api.{GreetingMessage, PersistentService}
import org.scalatest.{AsyncWordSpec, BeforeAndAfterAll, Matchers}
class PersistentServiceSpec extends PersistentServiceBase {
private val server = ServiceTest.startServer(
ServiceTest.defaultSetup
.withCassandra(true)
) { ctx =>
new PersistentApplication(ctx) with LocalServiceLocator
}
override lazy val client: PersistentService = server.serviceClient.implement[PersistentService]
override protected def afterAll() = server.stop()
}
| Optrak/lagom-testbed | test-akka-integration/persistent/impl/src/test/scala/com/optrak/testakka/impl/PersistentServiceSpec.scala | Scala | apache-2.0 | 684 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.nisp.services
import com.google.inject.Inject
import uk.gov.hmrc.domain.Nino
import uk.gov.hmrc.http.{HeaderCarrier, UpstreamErrorResponse}
import uk.gov.hmrc.nisp.connectors.NationalInsuranceConnectorImpl
import uk.gov.hmrc.nisp.models.StatePensionExclusion._
import uk.gov.hmrc.nisp.models._
import uk.gov.hmrc.nisp.utils.ExclusionHelper
import scala.concurrent.{ExecutionContext, Future}
class NationalInsuranceService @Inject()(nationalInsuranceConnector: NationalInsuranceConnectorImpl)
(implicit executor: ExecutionContext) {
def getSummary(nino: Nino)(implicit hc: HeaderCarrier): Future[Either[UpstreamErrorResponse, Either[StatePensionExclusionFilter, NationalInsuranceRecord]]] = {
nationalInsuranceConnector.getNationalInsurance(nino)
.map {
case Right(Right(ni)) => {
if (ni.reducedRateElection) Right(Left(StatePensionExclusionFiltered(Exclusion.MarriedWomenReducedRateElection)))
else Right(Right(
ni.copy(
taxYears = ni.taxYears.sortBy(_.taxYear)(Ordering[String].reverse),
qualifyingYearsPriorTo1975 = ni.qualifyingYears - ni.taxYears.count(_.qualifying)
)
))
}
case Right(Left(OkStatePensionExclusion(exclusions, _, _, _))) =>
Right(Left(StatePensionExclusionFiltered(ExclusionHelper.filterExclusions(exclusions))))
case Right(Left(ForbiddenStatePensionExclusion(exclusion, _))) =>
Right(Left(StatePensionExclusionFiltered(exclusion)))
case Right(Left(CopeStatePensionExclusion(exclusion, copeAvailableDate, previousDate))) =>
Right(Left(StatePensionExclusionFilteredWithCopeDate(exclusion, copeAvailableDate, previousDate)))
case Left(errorResponse) => Left(errorResponse)
}
}
}
| hmrc/nisp-frontend | app/uk/gov/hmrc/nisp/services/NationalInsuranceService.scala | Scala | apache-2.0 | 2,447 |
import scala.quoted.*
opaque type EmailAddress = String
object EmailAddress extends EmailAddressOps[EmailAddress]:
given (using s: ToExpr[String]): ToExpr[EmailAddress] = s
def parse(s: String): Either[String, EmailAddress] =
if (s contains "@") Right(s)
else Left("No @ symbol")
| dotty-staging/dotty | tests/run/opaque-inline/EmailAddress.scala | Scala | apache-2.0 | 295 |
package org.pfcoperez.dailyalgorithm.applications
import scala.collection.immutable.Queue
import org.pfcoperez.dailyalgorithm.datastructures.graphs.directed.trees.binary._
object BFSAndApps extends App {
/**
* Level in-order traversal: First last level, from left to right,...
* ... then second to last, from left to right, third to last ...
* up to first element.
*
* O(n)
*
*/
def levelOrderTreeTraversal[T](tree: BinaryTree[T]): Seq[T] = {
val res = bfsWithAccFunction(List.empty[T])(Queue(tree)) {
(prev, level, v) =>
v :: prev
} { (a, b) => (b, a) }
res
}
/**
* Breadth-First traversal with accumulation function.
* O(n), n = no elements
*/
def bfsWithAccFunction[T, R](acc: R, h: Int = 0)(
toVisit: Queue[BinaryTree[T]])(update: (R, Int, T) => R)(
inLevelOrder: (BinaryTree[T], BinaryTree[T]) => (BinaryTree[T], BinaryTree[T])): R =
if (toVisit.isEmpty) acc
else {
val (currentNode, remToVisit) = toVisit.dequeue
val (newToVisit: Queue[BinaryTree[T]], newAcc) = currentNode match {
case Node(left, v, right) =>
val (a, b) = inLevelOrder(left, right)
(remToVisit ++ Seq(a, b), update(acc, h, v))
case _ => remToVisit -> acc
}
bfsWithAccFunction[T, R](newAcc, h + 1)(newToVisit)(update)(inLevelOrder)
}
val o = Node(Empty, 15, Empty)
val n = Node(Empty, 14, Empty)
val m = Node(Empty, 13, Empty)
val l = Node(Empty, 12, Empty)
val k = Node(Empty, 11, Empty)
val j = Node(Empty, 10, Empty)
val i = Node(Empty, 9, Empty)
val h = Node(Empty, 8, Empty)
val g = Node(n, 7, o)
val f = Node(l, 6, m)
val e = Node(j, 5, k)
val d = Node(h, 4, i)
val b = Node(d, 2, e)
val c = Node(f, 3, g)
val a = Node(b, 1, c)
println(levelOrderTreeTraversal(a))
}
| pfcoperez/algorithmaday | src/main/scala/org/pfcoperez/dailyalgorithm/applications/BFSAndApps.scala | Scala | gpl-3.0 | 1,833 |
/*
* Copyright 2012 Eike Kettner
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eknet.publet.engine.scalate
import org.eknet.publet.engine.PubletEngine
import org.eknet.publet.Publet
import org.fusesource.scalate.{TemplateSource, TemplateEngine}
import org.eknet.publet.vfs.{Content, ContentResource}
/**
* @author Eike Kettner eike.kettner@gmail.com
* @since 07.10.12 01:22
*/
trait ScalateEngine extends PubletEngine {
def attributes: Map[String, Any]
def attributes_=(m: Map[String, Any])
def engine: TemplateEngine
def setDefaultLayoutUri(uri: String)
def disableLayout()
def processUri(uri: String, data: Option[ContentResource], attributes: Map[String, Any] = Map()): Content
def processSource(source: TemplateSource, data: Option[ContentResource], attributes: Map[String, Any] = Map()): Content
}
object ScalateEngine {
def apply(name: Symbol, publet: Publet): ScalateEngine = {
val engine = new TemplateEngine
VfsResourceLoader.install(engine, publet)
new ScalateEngineImpl(name, engine)
}
} | eikek/publet | scalate/src/main/scala/org/eknet/publet/engine/scalate/ScalateEngine.scala | Scala | apache-2.0 | 1,568 |
package howitworks.cats
import cats.Id
import cats.data.{Reader, ReaderT}
class ReaderDemo extends wp.Spec {
"hello reader" in {
//Let's say there are some functions from
// f: String => Whatever
val size: String => Int = _.size
val ovalLettersCount: String => Int = _.count(List('q', 'Q', 'o', 'O', 'p', 'P', 'd', 'D', 'R', 'a','A','g','b').contains(_))
val lettersDistribution: String => Map[Char, Int] = _.foldLeft(Map[Char, Int]())((acc, curr) => acc.updated(curr, 1 + acc.getOrElse(curr, 0)))
//and you would like to create 3rd function based on 'returned values' of these above functions
//standard approach is:
val stringAnalysis: String => String = {s =>
val sSize = size(s)
val sOvalLettersSize = ovalLettersCount(s)
val sLetterDistribution = lettersDistribution(s).toList.sortBy(x => (x._1.toUpper, x._1)).mkString(", ")
s"The string '${s}' contains $sSize letters, $sOvalLettersSize oval letters and in general here this is distribution of letters: $sLetterDistribution"
}
//let's test it
val exampleIn = "what are reader monads all about"
val expectedOut = "The string 'what are reader monads all about' contains 32 letters, 11 oval letters and in general here this is distribution of letters: ( ,5), (a,6), (b,1), (d,2), (e,3), (h,1), (l,2), (m,1), (n,1), (o,2), (r,3), (s,1), (t,2), (u,1), (w,1)"
//it works
stringAnalysis(exampleIn) mustBe expectedOut
//using writer monad you can create stringAnalysis it in different way:
//Reader[A, B] represents function A => B
//flatMap and map works on returned value B
val stringAnalysis2: Reader[String, String] = for {
input <- Reader[String, String](identity)
size <- Reader(size)
ovalCount <- Reader(ovalLettersCount)
dist <- Reader(lettersDistribution)
distFormatted = dist.toList.sortBy(x => (x._1.toUpper, x._1)).mkString(", ")
} yield s"The string '$input' contains $size letters, $ovalCount oval letters and in general here this is distribution of letters: $distFormatted"
//running
stringAnalysis2.run(exampleIn) mustBe expectedOut
//or even simler (apply deletagets to run)
stringAnalysis2(exampleIn) mustBe expectedOut
//and if you're interested in unwrapped function:
val stringAnalysis2Fun = stringAnalysis2.run
stringAnalysis2Fun(exampleIn) mustBe expectedOut
//Let's obtain Monad for reader
def m[A] = implicitly[cats.Monad[ReaderT[Id, A, ?]]]
m[String]
}
}
| jawp/wicked-playground | modules/server/src/test/scala/howitworks/cats/ReaderDemo.scala | Scala | mit | 2,519 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.feature
import org.apache.spark.SparkFunSuite
import org.apache.spark.ml.linalg.{Vector, Vectors}
import org.apache.spark.ml.util.{DefaultReadWriteTest, MLTestingUtils}
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.sql.Row
class MinMaxScalerSuite extends SparkFunSuite with MLlibTestSparkContext with DefaultReadWriteTest {
import testImplicits._
test("MinMaxScaler fit basic case") {
val data = Array(
Vectors.dense(1, 0, Long.MinValue),
Vectors.dense(2, 0, 0),
Vectors.sparse(3, Array(0, 2), Array(3, Long.MaxValue)),
Vectors.sparse(3, Array(0), Array(1.5)))
val expected: Array[Vector] = Array(
Vectors.dense(-5, 0, -5),
Vectors.dense(0, 0, 0),
Vectors.sparse(3, Array(0, 2), Array(5, 5)),
Vectors.sparse(3, Array(0), Array(-2.5)))
val df = data.zip(expected).toSeq.toDF("features", "expected")
val scaler = new MinMaxScaler()
.setInputCol("features")
.setOutputCol("scaled")
.setMin(-5)
.setMax(5)
val model = scaler.fit(df)
model.transform(df).select("expected", "scaled").collect()
.foreach { case Row(vector1: Vector, vector2: Vector) =>
assert(vector1.equals(vector2), "Transformed vector is different with expected.")
}
MLTestingUtils.checkCopyAndUids(scaler, model)
}
test("MinMaxScaler arguments max must be larger than min") {
withClue("arguments max must be larger than min") {
val dummyDF = Seq((1, Vectors.dense(1.0, 2.0))).toDF("id", "features")
intercept[IllegalArgumentException] {
val scaler = new MinMaxScaler().setMin(10).setMax(0).setInputCol("features")
scaler.transformSchema(dummyDF.schema)
}
intercept[IllegalArgumentException] {
val scaler = new MinMaxScaler().setMin(0).setMax(0).setInputCol("features")
scaler.transformSchema(dummyDF.schema)
}
}
}
test("MinMaxScaler read/write") {
val t = new MinMaxScaler()
.setInputCol("myInputCol")
.setOutputCol("myOutputCol")
.setMax(1.0)
.setMin(-1.0)
testDefaultReadWrite(t)
}
test("MinMaxScalerModel read/write") {
val instance = new MinMaxScalerModel(
"myMinMaxScalerModel", Vectors.dense(-1.0, 0.0), Vectors.dense(1.0, 10.0))
.setInputCol("myInputCol")
.setOutputCol("myOutputCol")
.setMin(-1.0)
.setMax(1.0)
val newInstance = testDefaultReadWrite(instance)
assert(newInstance.originalMin === instance.originalMin)
assert(newInstance.originalMax === instance.originalMax)
}
test("MinMaxScaler should remain NaN value") {
val data = Array(
Vectors.dense(1, Double.NaN, 2.0, 2.0),
Vectors.dense(2, 2.0, 0.0, 3.0),
Vectors.dense(3, Double.NaN, 0.0, 1.0),
Vectors.dense(6, 2.0, 2.0, Double.NaN))
val expected: Array[Vector] = Array(
Vectors.dense(-5.0, Double.NaN, 5.0, 0.0),
Vectors.dense(-3.0, 0.0, -5.0, 5.0),
Vectors.dense(-1.0, Double.NaN, -5.0, -5.0),
Vectors.dense(5.0, 0.0, 5.0, Double.NaN))
val df = data.zip(expected).toSeq.toDF("features", "expected")
val scaler = new MinMaxScaler()
.setInputCol("features")
.setOutputCol("scaled")
.setMin(-5)
.setMax(5)
val model = scaler.fit(df)
model.transform(df).select("expected", "scaled").collect()
.foreach { case Row(vector1: Vector, vector2: Vector) =>
assert(vector1.equals(vector2), "Transformed vector is different with expected.")
}
}
}
| minixalpha/spark | mllib/src/test/scala/org/apache/spark/ml/feature/MinMaxScalerSuite.scala | Scala | apache-2.0 | 4,353 |
package com.blockcypher.api.events
import com.blockcypher.api.config.{BlockCypherTestNet, ActorSystemConfig}
import com.blockcypher.api.util.TestUtil
import org.scalacoin.protocol.BitcoinAddress
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{FlatSpec, MustMatchers}
import scala.concurrent.Future
import scala.concurrent.duration.DurationInt
import spray.json._
import DefaultJsonProtocol._
import spray.httpx.SprayJsonSupport._
/**
* Created by chris on 12/2/15.
*/
class BlockCypherEventApiTest extends FlatSpec with MustMatchers
with ScalaFutures with BlockCypherEventApi with BlockCypherTestNet with ActorSystemConfig {
val txConfirmationResponse =
"""
|{"block_hash":"00000000000dccabccac0d795b3d3faf11023e01a2de1fd94c6f30db80aba5e8",
|"block_height":626996,"block_index":10,"hash":"fbce5568e05cccf5e2caf25f85ed5a61e8899f2d382cd41d8a949132719ef3e4",
|"addresses":["mhgDFW3fh6QTm5j1BtUEaUFjregYkqqcNh","mokS58yDd4DwC4AESQavBfP6EVg9TX6jjq",
|"mtQLgLiqmytKkgE9sVGwypAFsLvkxBQ6XX"],"total":7554642,"fees":22679,"size":225,"preference":"high",
|"relayed_by":"148.251.92.108:18333","confirmed":"2015-12-19T20:06:58Z","received":"2015-12-19T19:47:36.956Z",
|"ver":1,"lock_time":626985,"double_spend":false,"vin_sz":1,"vout_sz":2,"confirmations":2,
|"confidence":0,"inputs":[{"prev_hash":"679efcfecadfbcca283c8f307ba703199662de48c3f1fbf8bd4489d05b403f40",
|"output_index":1,
|"script":"47304402204b8e1053639878b579442ab0a5e8f060fffe8752774f775bde6b481ebbe5d80e0220684b85a0cbe0950017900f014b8b29cafba3e8e3e92602d7aeda2e64064cba5701210381c82dc267a958be06f1c920dc635bcd191d698c167e67a45a882a551c57ce1d",
|"output_value":7577321,"sequence":4294967294,"addresses":["mokS58yDd4DwC4AESQavBfP6EVg9TX6jjq"],
|"script_type":"pay-to-pubkey-hash"}],"outputs":[{"value":7454642,
|"script":"76a9148d5968ad26f9e277849ff9f8f39920f28944467388ac",
|"addresses":["mtQLgLiqmytKkgE9sVGwypAFsLvkxBQ6XX"],"script_type":"pay-to-pubkey-hash"},
|{"value":100000,"script":"76a91417b07ad7f6303e381b856034001bacf2c750e9ac88ac",
|"addresses":["mhgDFW3fh6QTm5j1BtUEaUFjregYkqqcNh"],"script_type":"pay-to-pubkey-hash"}]}
""".stripMargin
import actorSystem._
"BlockCypherEventApi" must "query the example from blockcypher's api docs" in {
//'{"event": "unconfirmed-tx",
// "address": "15qx9ug952GWGTNn7Uiv6vode4RcGrRemh", "url": "https://my.domain.com/callbacks/new-tx"}'
val event = BlockCypherEventImpl(None,"unconfirmed-tx",None, None, None,
Some(BitcoinAddress("15qx9ug952GWGTNn7Uiv6vode4RcGrRemh")),None,None,None,Some("https://my.domain.com/callbacks/new-tx"),0)
val result : Future[BlockCypherEvent] = sendEvent(event)
whenReady(result, timeout( 5 seconds), interval(5 millis)) { event =>
event.address must be (Some(TestUtil.bitcoinAddress))
event.url must be (Some("https://my.domain.com/callbacks/new-tx"))
event.event must be ("unconfirmed-tx")
}
}
it must "notify us when an address transaction receives another confirmation" in {
val callBackUrl = "https://my.domain.com/callbacks/new-tx"
val address = BitcoinAddress("15qx9ug952GWGTNn7Uiv6vode4RcGrRemh")
val result : Future[BlockCypherEvent] = txConfirmation(address,callBackUrl)
whenReady(result, timeout(5 seconds), interval(5 millis)) { event =>
event.address must be (Some(address))
event.confirmations must be (Some(3))
}
}
}
| Christewart/blockcypher-api | src/test/scala/com/blockcypher/api/events/BlockCypherEventApiTest.scala | Scala | mit | 3,483 |
import java.{util, io}
import java.text.SimpleDateFormat
import java.util.{Calendar, Properties}
import kafka.serializer.StringDecoder
import scala.collection.mutable.ArrayBuffer
import scala.util.matching.Regex
//import SQLContextSingleton
import com.typesafe.config.ConfigFactory
import kafka.producer.{KeyedMessage, Producer, ProducerConfig}
import org.apache.spark.SparkConf
import org.apache.spark.examples.sql.hive.HiveFromSpark.Record
import org.apache.spark.rdd.RDD
import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext, Time}
/**
* Created by 801762473 on 27/10/2015.
*/
object SparkStreamingNetflow extends Serializable {
def sendToKafka(enrichKafkaLine: DStream[String]): Unit = {
// ********** Start of write to Apache Kafka **********
println("In the sendToKafka DStream method")
enrichKafkaLine.foreachRDD { rdd =>
rdd.foreachPartition { partitionOfRecords =>
val props = new Properties()
// props.put("metadata.broker.list", "bow-grd-res-01.bowdev.net:9092,bow-grd-res-02.bowdev.net:9092,bow-grd-res-03.bowdev.net:9092")
// props.put("metadata.broker.list", "vm-cluster-node2:9092,vm-cluster-node3:9092,vm-cluster-node4:9092")
props.put("metadata.broker.list", "localhost:9092")
props.put("serializer.class", "kafka.serializer.StringEncoder")
// some properties we might wish to set commented out below
// props.put("compression.codec", codec.toString)
// props.put("producer.type", "sync")
// props.put("batch.num.messages", BatchSize.toString)
// props.put("message.send.max.retries", maxRetries.toString)
// props.put("request.required.acks", "-1")
val config = new ProducerConfig(props)
val producer = new Producer[String, String](config)
partitionOfRecords.foreach(row => {
val msg = row.toString
println("DStream : " + msg)
this.synchronized {
producer.send(new KeyedMessage[String, String]("proxy-output", msg))
}
})
producer.close()
}
}
// ********** End of write to Apache Kafka **********
}
def sendToKafka(enrichKafkaLine: RDD[String]): Unit = {
println("In the sendToKafka RDD method")
// ********** Start of write to Apache Kafka **********
enrichKafkaLine.foreachPartition { partitionOfRecords =>
println("In the sendToKafka RDD partitionOfRecords")
val props = new Properties()
// props.put("metadata.broker.list", "bow-grd-res-01.bowdev.net:9092,bow-grd-res-02.bowdev.net:9092,bow-grd-res-03.bowdev.net:9092")
// props.put("metadata.broker.list", "vm-cluster-node2:9092,vm-cluster-node3:9092,vm-cluster-node4:9092")
props.put("metadata.broker.list", "localhost:9092")
props.put("serializer.class", "kafka.serializer.StringEncoder")
props.put("producer.type", "async")
// some properties we might wish to set commented out below
// props.put("compression.codec", codec.toString)
// props.put("producer.type", "sync")
// props.put("batch.num.messages", BatchSize.toString)
// props.put("message.send.max.retries", maxRetries.toString)
// props.put("request.required.acks", "-1")
val config = new ProducerConfig(props)
val producer = new Producer[String, String](config)
partitionOfRecords.foreach(row => {
// val msg = row.toString
// println("About to send message")
// val msg = "Hello Paul"
val msg = row.toString
// println("RDD proxy-output : " + msg)
this.synchronized {
producer.send(new KeyedMessage[String, String]("proxy-output", msg))
}
})
producer.close()
}
def sendToKafka(enrichLine: DStream[_ >: String with (String, String) <: io.Serializable]): Unit = {
}
// ********** End of write to Apache Kafka **********
}
// def lineParse(pStrWholeLine: String): String = {
// // construct an Array[String] with the correct parsing of the space delimted fields to take into account the "" quoted fields
// // val patternStart = new Regex("^\\".*")
// // val patternEnd = new Regex("$\\"")
//
// // println(pArrayWholeLine.deep.mkString("\\n"))
//
// var replaceArrayWholeLine = ArrayBuffer[String]()
// var replaceStr: String = ""
// var startFlag: Boolean = false
// val EoLPattern = """.*\\"$""".r
//
// for(myString <- pArrayWholeLine) {
// // println("myString is : " + myString)
// if (myString.matches("^\\".*")) {
// // println("myString found is : " + myString)
// replaceStr += myString
// // replaceArray :+ myString
// startFlag = true
// }
// // else if (myString.matches(".*12.0\\"$")) {
// else if (myString.matches(".*\\"$")) {
// // println("eol myString found is : " + myString)
// // replaceArray :+ myString
// replaceStr += myString
// // also add to replaceArrayWholeLine as an elemnt in the array because we have detected the end of the quoted
// // string
// replaceArrayWholeLine += replaceStr
// replaceStr = "" // reset the replace string
// startFlag = false
// }
// else {
// if (startFlag) {
// replaceStr += myString
// }
// else {
// replaceArrayWholeLine += myString
// }
// }
// }
// return pArrayWholeLine
// }
// def lineParse(pArrayWholeLine: Array[String]): Array[String] = {
def lineParse(pArrayWholeLine: Array[String]): Array[String] = {
// construct an Array[String] with the correct parsing of the space delimted fields to take into account the "" quoted fields
// val patternStart = new Regex("^\\".*")
// val patternEnd = new Regex("$\\"")
// println(pArrayWholeLine.deep.mkString(" "))
var replaceArrayWholeLine = ArrayBuffer[String]()
var replaceStr: String = ""
var startFlag: Boolean = false
val EoLPattern = """.*\\"$""".r
for(myString <- pArrayWholeLine) {
// println("myString is : " + myString)
if (myString.matches("^\\".*\\"$")) {
replaceArrayWholeLine += myString + " "
}
else if (myString.matches("^\\".*")) {
// println("myString found is : " + myString)
replaceStr += myString + " "
// replaceArray :+ myString
startFlag = true
}
// else if (myString.matches(".*12.0\\"$")) {
else if (myString.matches(".*\\"$")) {
// println("eol myString found is : " + myString)
// replaceArray :+ myString
replaceStr += myString + " "
// also add to replaceArrayWholeLine as an elemnt in the array because we have detected the end of the quoted
// string
replaceArrayWholeLine += replaceStr
replaceStr = "" // reset the replace string
startFlag = false
}
else {
if (startFlag) {
replaceStr += myString + " "
}
else {
replaceArrayWholeLine += myString + " "
}
}
}
println("Return array length is : " + replaceArrayWholeLine.length)
println(replaceArrayWholeLine)
println(pArrayWholeLine.deep.mkString(" "))
return replaceArrayWholeLine.toArray
// return pArrayWholeLine
}
def main(args: Array[String]) {
if (args.length < 5) {
System.err.println("Usage: SparkStreamingNetflow <zkQuorum> <group> <topics> <numThreads> <countryEnrichment>")
System.exit(1)
}
val conf = ConfigFactory.load()
val alertSQL = conf.getString("netflow-streaming.alertSql")
val alertSQLList = conf.getStringList("netflow-streaming.alertSqlList")
val argsCountryEnrichment = args(4)
val format = new SimpleDateFormat("d/MM/y/hh/mm")
val formatESIndexDate = new SimpleDateFormat("YYYY.MM.dd")
// elasticsearch date format for automatic housekeeping with python curator is YYYY.MM.DD e.g. 2015.03.23
val hdfsPartitionDir = format.format(Calendar.getInstance().getTime())
val ESIndexDate = formatESIndexDate.format(Calendar.getInstance().getTime())
val elasticResource = "netflow-" + ESIndexDate + "/docs"
val Array(zkQuorum, group, topics, numThreads, countryEnrichment) = args
// val sparkConf = new SparkConf().setMaster("local[2]").setAppName("netflowkafka")
// the jars array below is only needed when running on an IDE when the IDE points to a spark master
// i.e. when the spark conf is something like this sparkConf.setMaster("spark://an-ip-address-or-hostname:7077")
// val jars = Array("C:\\\\Users\\\\801762473\\\\.m2\\\\repository\\\\org\\\\apache\\\\spark\\\\spark-streaming-kafka_2.10\\\\1.3.0-cdh5.4.5\\\\spark-streaming-kafka_2.10-1.3.0-cdh5.4.5.jar",
// // "C:\\\\Users\\\\801762473\\\\.m2\\\\repository\\\\org\\\\apache\\\\kafka\\\\kafka_2.10\\\\0.8.0\\\\kafka_2.10-0.8.0.jar",
// "C:\\\\Users\\\\801762473\\\\.m2\\\\repository\\\\org\\\\apache\\\\kafka\\\\kafka_2.10\\\\0.8.2.0\\\\kafka_2.10-0.8.2.0.jar",
// "C:\\\\Users\\\\801762473\\\\.m2\\\\repository\\\\org\\\\apache\\\\spark\\\\spark-core_2.10\\\\1.3.0-cdh5.4.5\\\\spark-core_2.10-1.3.0-cdh5.4.5.jar",
// "C:\\\\Users\\\\801762473\\\\.m2\\\\repository\\\\com\\\\101tec\\\\zkclient\\\\0.3\\\\zkclient-0.3.jar",
// "C:\\\\Users\\\\801762473\\\\.m2\\\\repository\\\\com\\\\yammer\\\\metrics\\\\metrics-core\\\\2.2.0\\\\metrics-core-2.2.0.jar",
// "C:\\\\Users\\\\801762473\\\\.m2\\\\repository\\\\com\\\\esotericsoftware\\\\kryo\\\\kryo\\\\2.21\\\\kryo-2.21.jar",
// "C:\\\\Users\\\\801762473\\\\.m2\\\\repository\\\\org\\\\elasticsearch\\\\elasticsearch-spark_2.10\\\\2.1.0.Beta3\\\\elasticsearch-spark_2.10-2.1.0.Beta3.jar",
// "C:\\\\Users\\\\801762473\\\\.m2\\\\repository\\\\com\\\\maxmind\\\\db\\\\maxmind-db\\\\1.0.0\\\\maxmind-db-1.0.0.jar",
// "C:\\\\Users\\\\801762473\\\\.m2\\\\repository\\\\com\\\\maxmind\\\\geoip2\\\\geoip2\\\\2.1.0\\\\geoip2-2.1.0.jar",
// "C:\\\\Users\\\\801762473\\\\.m2\\\\repository\\\\org\\\\apache\\\\spark\\\\spark-hive_2.10\\\\1.3.0-cdh5.4.5\\\\spark-hive_2.10-1.3.0-cdh5.4.5.jar",
// "D:\\\\Bowen_Raw_Source\\\\IntelijProjects\\\\KafkaStreamingPOC\\\\target\\\\netflow-streaming-0.0.1-SNAPSHOT-jar-with-dependencies.jar")
// setup Spark
val sparkConf = new SparkConf()
// sparkConf.setJars(jars)
sparkConf.set("spark.serializer", classOf[KryoSerializer].getName) // Enable the Kryo serialization support with Spark for ES
sparkConf.set("es.index.auto.create", "true") // set to auto create the ES index
sparkConf.set("es.nodes", "192.168.160.72") // note, for multiple elastisearch nodes specify a csv list
sparkConf.setMaster("local[4]") // this specifies the master to be run in this IDe i.e. locally with 2 threads
// sparkConf.setMaster("spark://vm-cluster-node2:7077")
sparkConf.set("spark.executor.memory", "512m")
sparkConf.set("spark.driver.memory", "512m")
sparkConf.set("spark.cores.max", "4")
// Below line is the hostname or IP address for the driver to listen on. This is used for communicating with the executors and the standalone Master.
// sparkConf.set("spark.driver.host", "192.168.56.1")
// sparkConf.setJars(jars)
// sparkConf.setMaster("spark://bow-grd-nn-02.bowdev.net:7077")
// sparkConf.setMaster("spark://quickstart.cloudera:7077")
sparkConf.setAppName("netflowkafka")
// sparkConf.set("spark.executor.memory", "16g")
// sparkConf.set("spark.driver.memory", "4g")
// sparkConf.set("spark.driver.maxResultSize", "1g") // this is the default
// sparkConf.setJars(jars)
val ssc = new StreamingContext(sparkConf, Seconds(1))
// ssc.checkpoint("hdfs://bow-grd-nn-01.bowdev.net/user/faganp/spark_checkpoint") // specify an hdfs directory if working on an hadoop platform
ssc.checkpoint("spark_checkpoint") // specify an hdfs directory if working on an hadoop platform
// start to process the lines
// val topicMap = topics.split(",").map((_, numThreads.toInt)).toMap
// val lines = KafkaUtils.createStream(ssc, zkQuorum, group, topicMap).map(_._2) // we may need to set the storage policy here
// val topicsSet = topics.split(",").toSet
val topicsSet = Set("netflow-input")
val kafkaParams = Map[String, String]("metadata.broker.list" -> "localhost:9092")
val lines = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](
ssc, kafkaParams, topicsSet)
// val filteredLinesByLength = lines.filter(_.length > 1)
// val enrichLine = lines.map(line => line.split(",")(0).trim + line + "," + MaxMindSingleton.getInstance().getCountry(line.split(",")(3).trim))
// set the enrichLine val based on if we want Contry Enrichment or not
val enrichLine = {
if (countryEnrichment == true) {
lines.map(line => line + "," + MaxMindSingleton.getInstance().getCountry(line._2.split(",")(3).trim))
// lines.map(line => line)
}
else {
lines.map(line => line._2)
}
}
// sendToKafka(enrichLine._1)
/* Below only save rdd's with actual data in them and avoid the - java.lang.UnsupportedOperationException: empty collection
exception being raised */
// enrichLine.saveAsTextFiles("hdfs://bow-grd-nn-01.bowdev.net/user/faganp/spark-streaming/netflow_records", "/" + hdfsPartitionDir)
// enrichLine.foreachRDD( rdd => {
// if(!rdd.partitions.isEmpty)
// enrichLine.saveAsTextFiles("netflow_records", "/" + hdfsPartitionDir)
// })
// ********** Start of rules based engine **********
// convert csv RDD to space based RDD for below code
// val spaceEnrichLine = enrichLine.map(x => x.split(" "))
// enrichLine.foreachRDD((rdd: RDD[String], time: Time) => {
// // Get the singleton instance of SQLContext
// val sqlContext = SQLContextSingleton.getInstance(rdd.sparkContext)
// import sqlContext.implicits._
//
// // Convert RDD[String] to RDD[case class] to DataFrame
// // val wordsDataFrame = rdd.map(w => Record(w.split(",").toString)).toDF()
// val wordsDataFrame = rdd.map(w => Record(w)).toDF()
//
// // Register as table
// wordsDataFrame.registerTempTable("alerts")
//
// // Do word count on table using SQL and print it
// val wordCountsDataFrame =
// sqlContext.sql("select count(*) from alerts")
// println(s"========= $time =========")
// wordCountsDataFrame.show()
//
// alertSQLList.toArray().foreach( sqlToRun => {
// println("Running SQL Alert: " + sqlToRun)
// val results = sqlContext.sql(sqlToRun.toString)
// val alert = results.map(r => r.toString())
// println("Number of results in alert is : " + alert.count())
// // alert.saveAsTextFile("alert")
// sendToKafka(alert)
// })
//
// })
// sc is an existing SparkContext.
// val sqlContext = new org.apache.spark.sql.SQLContext(ssc)
// Start of Comment Out
enrichLine.foreachRDD((rdd: RDD[String], time: Time) => {
val sqlContext = SQLContextSingletonNetFlow.getInstance(rdd.sparkContext)
// The schema is encoded in a string, tak eoff the dashes
val schemaString = {
if (countryEnrichment == true) {
"starttime duration protocol srcaddr dir dstaddr dport state stos dtos totpkts totbytes country"
}
else {
"date time time-taken c-ip sc-status s-action sc-bytes cs-bytes cs-method cs-uri-scheme cs-host cs-uri-port cs-uri-path cs-uri-query cs-username cs-auth-group s-supplier-name rs(Content-Type) cs(Referer) cs(User-Agent) sc-filter-result cscategories x-virus-id s-ip s-action x-exception-id r-ip"
}
}
// Import Row.
import org.apache.spark.sql.Row;
// Import Spark SQL data types
import org.apache.spark.sql.types.{StringType, StructField, StructType};
// Generate the schema based on the string of schema
val schema =
StructType(
schemaString.split(" ").map(fieldName => StructField(fieldName, StringType, true)))
// Convert records of the RDD (people) to Rows.
// below the implicit conversion from datatypes can be done e.g. p(0).toInt if needed
val rowRDD = rdd.map(_.split(" ")).map(p => {
// val rowRDD = rdd.map(p => {
val p1 = lineParse(p)
// Row(p(0), p(1).trim, p(2), p(3), p(4), p(5), p(6), p(7), p(8), p(9), p(10), p(11), p(12), p(13), p(14), p(15), p(16), p(17), p(18), p(19), p(20), p(21), p(22), p(23), p(24), p(25), p(26))
Row(p1(0), p1(1).trim, p1(2), p1(3), p1(4), p1(5), p1(6), p1(7), p1(8), p1(9), p1(10), p1(11), p1(12), p1(13), p1(14), p1(15), p1(16), p1(17), p1(18), p1(19), p1(20), p1(21), p1(22), p1(23), p1(24), p1(25), p1(26))
}
)
//
// // Apply the schema to the RDD.
val alertsDataFrame = sqlContext.createDataFrame(rowRDD, schema)
// Convert RDD[String] to DataFrame
import sqlContext.implicits._
// val sscDataFrame = rdd.toDF("word")
// Register the DataFrames as a table.
alertsDataFrame.registerTempTable("alerts")
// alertsDataFrame.show(10)
// SQL statements can be run by using the sql methods provided by sqlContext.
// val results = sqlContext.sql("SELECT dir FROM people where dir = '->'")
alertSQLList.toArray().foreach( sqlToRun => {
println("Running SQL Alert: " + sqlToRun)
val results = sqlContext.sql(sqlToRun.toString)
val alert = results.map(r => r.toString())
println("Number of results in alert is : " + alert.count())
sendToKafka(alert)
})
//
//// alertSQLList.toList.foreach(sqlToRun => {
//// println("sqlToRun is : " + sqlToRun)
//// val results = sqlContext.sql(sqlToRun)
//// // convert the DataFrame to RDD[String]
//// val alert = results.map(r => r.toString())
//// alert.saveAsTextFile("alert")
//// // iterate around the RDD[String] and send to kafka
//// sendToKafka(alert)
//// }
// // The results of SQL queries are DataFrames and support all the normal RDD operations.
// // The columns of a row in the result can be accessed by field index or by field name.
// // results.map(t => "Name: " + t(0)).collect().foreach(println)
//
//// )
// // val results = sqlContext.sql(alertSQL)
// // // convert the DataFrame to RDD[String]
// // val alert = results.map(r => r.toString())
// // alert.saveAsTextFile("alert")
// // // iterate around the RDD[String] and send to kafka
// // sendToKafka(alert)
// //
// // // The results of SQL queries are DataFrames and support all the normal RDD operations.
// // // The columns of a row in the result can be accessed by field index or by field name.
// // results.map(t => "Name: " + t(0)).collect().foreach(println)
//
})
// End of Comment Out
// ********** End of rules based engine **********
// ********** Start of write to Elasticsearch **********
// prepare the elasticsearch DStream[String] with json formated data, p represents the whole enriched
// netflow line, note: if you click or highlight a word in the code Ctrl-j in Intelij will tell you
// the type of Scala object
// val enrichLineES = enrichLine.map(p => "{\\"StartTime\\" : " + "\\"" + p.split(",")(0) + "\\"" + " , " +
// "\\"Dur\\" : " + "\\"" + p.split(",")(1) + "\\"" + " , " +
// "\\"Proto\\" : " + "\\"" + p.split(",")(2) + "\\"" + " , " +
// "\\"SrcAddr\\" : " + "\\"" + p.split(",")(3) + "\\"" + " , " +
// "\\"Dir\\" : " + "\\"" + p.split(",")(5) + "\\"" + " , " +
// "\\"DstAddr\\" : " + "\\"" + p.split(",")(6) + "\\"" + " , " +
// "\\"Dport\\" : " + "\\"" + p.split(",")(7) + "\\"" + " , " +
// "\\"State\\" : " + "\\"" + p.split(",")(8) + "\\"" + " , " +
// "\\"sTos\\" : " + "\\"" + p.split(",")(9) + "\\"" + " , " +
// "\\"sTos\\" : " + "\\"" + p.split(",")(10) + "\\"" + " , " +
// "\\"dTos\\" : " + "\\"" + p.split(",")(11) + "\\"" + " , " +
// "\\"TotPkts\\" : " + "\\"" + p.split(",")(12) + "\\"" + " , " +
// "\\"TotBytes\\" : " + "\\"" + p.split(",")(13) + "\\"" + " , " +
// "\\"Label\\" : " + "\\"" + p.split(",")(14) + "\\"" + " , " +
// "\\"Country\\" : " + "\\"" + p.split(",")(3) + "\\"}")
//
// enrichLineES.foreachRDD { rdd =>
// if(!rdd.partitions.isEmpty) {
// val sparkConf = rdd.context
// val sqlContext = new SQLContext(sparkConf)
// val sendToEs = sqlContext.jsonRDD(rdd)
// sendToEs.saveToEs(elasticResource)
// }
// }
// ********** End of write to Elasticsearch **********
// ********** Start of write to Apache Kafka **********
// enrichLine.foreachRDD { rdd =>
// rdd.foreachPartition { partitionOfRecords =>
// val props = new Properties()
//// props.put("metadata.broker.list", "bow-grd-res-01.bowdev.net:9092,bow-grd-res-02.bowdev.net:9092,bow-grd-res-03.bowdev.net:9092")
// props.put("metadata.broker.list", "vm-cluster-node2:9092,vm-cluster-node3:9092,vm-cluster-node4:9092")
// props.put("serializer.class", "kafka.serializer.StringEncoder")
//
// // some properties we might wish to set commented out below
// // props.put("compression.codec", codec.toString)
// // props.put("producer.type", "sync")
// // props.put("batch.num.messages", BatchSize.toString)
// // props.put("message.send.max.retries", maxRetries.toString)
// // props.put("request.required.acks", "-1")
//
// val config = new ProducerConfig(props)
// val producer = new Producer[String, String](config)
// partitionOfRecords.foreach(row => {
// val msg = row.toString
// this.synchronized {
// producer.send(new KeyedMessage[String, String]("netflow-output2", msg))
// }
// })
// producer.close()
// }
// }
// ********** End of write to Apache Kafka **********
ssc.start()
ssc.awaitTermination()
}
} | faganpe/KafkaStreamingPOC | src/main/scala/SparkStreamingNetflow.scala | Scala | apache-2.0 | 22,928 |
package controllers
import org.specs2.specification.Scope
import org.specs2.matcher.JsonMatchers
import play.api.mvc.Result
import scala.concurrent.Future
import controllers.auth.AuthorizedRequest
import controllers.backend.{DocumentNodeBackend,SelectionBackend}
import models.InMemorySelection
class DocumentNodeControllerSpec extends ControllerSpecification with JsonMatchers {
trait BaseScope extends Scope {
val selection = InMemorySelection(Array(2L, 3L, 4L)) // override for a different Selection
val mockDocumentNodeBackend = smartMock[DocumentNodeBackend]
val mockSelectionBackend = smartMock[SelectionBackend]
// We assume this controller doesn't care about onProgress because the user
// recently cached a Selection. That's not necessarily true, but it should
// hold true most of the time.
mockSelectionBackend.findOrCreate(any, any, any, any) returns Future { selection }
val controller = new DocumentNodeController(
mockDocumentNodeBackend,
mockSelectionBackend,
fakeControllerComponents
)
}
"#countByNode" should {
trait CountByNodeScope extends BaseScope {
val documentSetId = 123L
mockDocumentNodeBackend.countByNode(any, any) returns Future.successful(Map())
val requestBody: Vector[(String,String)] = Vector("countNodes" -> "1,2,3", "tags" -> "3")
lazy val request = fakeAuthorizedRequest("POST", "/count").withFormUrlEncodedBody(requestBody: _*)
lazy val result = controller.countByNode(documentSetId)(request)
}
"return counts as a JsObject" in new CountByNodeScope {
mockDocumentNodeBackend.countByNode(any, any) returns Future.successful(Map(1L -> 2, 3L -> 4))
h.status(result) must beEqualTo(h.OK)
h.contentType(result) must beSome("application/json")
val json = h.contentAsString(result)
json must /("1" -> 2)
json must /("3" -> 4)
}
"pass Selection and nodes to documentNodeBackend" in new CountByNodeScope {
override val requestBody = Vector("countNodes" -> "1,2,3", "tags" -> "3")
h.status(result)
there was one(mockDocumentNodeBackend).countByNode(selection, Vector(1L, 2L, 3L))
}
"succeed if countNodes are not specified" in new CountByNodeScope {
override val requestBody = Vector("tags" -> "3")
h.status(result) must beEqualTo(h.OK)
there was one(mockDocumentNodeBackend).countByNode(selection, Vector())
}
}
}
| overview/overview-server | web/test/controllers/DocumentNodeControllerSpec.scala | Scala | agpl-3.0 | 2,449 |
/*
* Copyright (c) 2014 - 2015 Contributor. All rights reserved.
*/
package org.scalaide.debug.internal.expression
package proxies.phases
import Names.Debugger
import scala.reflect.NameTransformer
import scala.reflect.runtime.universe
import scala.util.Success
import org.scalaide.debug.internal.expression.Names.Debugger
/**
* Transforms all operations on primitives (both unary and binary) into method calls on newly created proxies.
*
* Also takes care of if-then-else control expression.
*
* Transforms:
* {{{
* int.unary_-.^(2)./(1).+(double).-(float)
* }}}
* into:
* {{{
* __context.proxy(
* __context.proxy(
* __context.proxy(
* __context.proxy(
* __context.proxy(int.__value[Int].unary_$minus).__value[Int]
* .$up(2)
* ).__value[Int]
* .$div(1)
* ).__value[Int]
* .$plus(double.__value[Double])
* ).__value[Double]
* .$minus(float.__value[Float])
* ).__value[Double]
* }}}
*/
class MockPrimitivesOperations
extends AstTransformer[AfterTypecheck]
with PrimitivesCommons {
import universe._
override final def transformSingleTree(tree: Tree, transformFurther: Tree => Tree): Tree = tree match {
case Apply(Select(on, name), List(arg)) if isPrimitiveOperation(name) && isPrimitive(on) && isPrimitive(arg) =>
repackTwoPrimitives(on, name, arg, transformFurther)
case Select(on, name) if unaryOperations.contains(name.toString) && isPrimitive(on) =>
repackUnaryOperation(on, name, transformFurther)
case If(nested, thenExpr, elseExpr) => If(
obtainPrimitive(nested, transformFurther),
transformSingleTree(thenExpr, transformFurther),
transformSingleTree(elseExpr, transformFurther))
case other => transformFurther(other)
}
private def obtainPrimitive(on: Tree, transformFurther: Tree => Tree): Tree =
on match {
case ProxifiedPrimitive(literal) => literal
case _ =>
val typeForPrimitiveGetter = mirrorMethodType(on)
TypeApply(
Select(transformSingleTree(on, transformFurther), TermName(Debugger.primitiveValueOfProxyMethodName)),
List(typeForPrimitiveGetter))
}
private def mirrorMethodType(on: Tree): Tree = {
val typeName = TypeNames.getFromTree(on)
Ident(TypeName(typeName))
}
private def repackTwoPrimitives(on: Tree, name: Name, arg: Tree, transformFurther: Tree => Tree): Tree = {
val l = obtainPrimitive(on, transformFurther)
val r = obtainPrimitive(arg, transformFurther)
packPrimitive(Apply(Select(l, name), List(r)))
}
private def repackUnaryOperation(on: Tree, operationName: Name, transformFurther: Tree => Tree) =
packPrimitive(Select(obtainPrimitive(on, transformFurther), operationName))
private val notPrimitiveOperation = Set("==", "!=").map(NameTransformer.encode)
private val unaryOperations = Set("!", "~", "-", "+").map(name => s"unary_${NameTransformer.encode(name)}")
private def isPrimitiveOperation(name: Name): Boolean = !notPrimitiveOperation.contains(name.toString)
}
| Kwestor/scala-ide | org.scala-ide.sdt.debug.expression/src/org/scalaide/debug/internal/expression/proxies/phases/MockPrimitivesOperations.scala | Scala | bsd-3-clause | 3,085 |
/*
* Copyright (c) 2011-2012, Alex McGuire, Louis Botterill
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package maker.utils.os
import maker.MakerProps
import scala.util.Properties
object OsUtils {
def isLinux = Properties.osName.toLowerCase.contains("linux")
def isOSX = Properties.osName.toLowerCase.contains("os x")
def isUnix = isLinux || isOSX
def isPortUsed(port : Int) = {
List("tcp", "udp").exists{ t =>
Command("fuser", port + "/" + t).withNoOutput.exec == 0
}
}
}
| syl20bnr/maker | utils/src/maker/utils/os/OsUtils.scala | Scala | bsd-2-clause | 1,789 |
package akka.persistence.cassandra.journal
import java.util.concurrent.Executors
import akka.actor.{ActorRef, ActorSystem, PoisonPill, Props}
import akka.persistence._
import akka.persistence.cassandra.{CassandraLifecycle, CassandraPluginConfig}
import akka.testkit.{ImplicitSender, TestKit}
import com.datastax.driver.core.Session
import com.typesafe.config.{Config, ConfigFactory, ConfigValueFactory}
import org.scalatest.{MustMatchers, WordSpecLike}
import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContext, Future}
import scala.util.{Failure, Success, Try}
object CassandraConfigCheckerSpec {
val config = ConfigFactory.parseString(
"""
|akka.persistence.snapshot-store.plugin = "cassandra-snapshot-store"
|akka.persistence.journal.plugin = "cassandra-journal"
|akka.persistence.journal.max-deletion-batch-size = 3
|akka.persistence.publish-confirmations = on
|akka.persistence.publish-plugin-commands = on
|akka.test.single-expect-default = 10s
|cassandra-journal.target-partition-size = 5
|cassandra-journal.max-result-size = 3
|cassandra-journal.port = 9142
|cassandra-snapshot-store.port = 9142
""".stripMargin)
class DummyActor(val persistenceId: String, receiver: ActorRef) extends PersistentActor {
def receiveRecover: Receive = {
case x => ()
}
def receiveCommand: Receive = {
case x: String => persist(x) { msg => receiver ! s"Received $msg" }
}
}
}
import akka.persistence.cassandra.journal.CassandraConfigCheckerSpec._
class CassandraConfigCheckerSpec extends TestKit(ActorSystem("test", config)) with ImplicitSender with WordSpecLike with MustMatchers with CassandraLifecycle {
implicit val cfg = config.withFallback(system.settings.config).getConfig("cassandra-journal")
implicit val pluginConfig = new CassandraPluginConfig(cfg)
"CassandraConfigChecker" should {
"persist value in cassandra" in {
waitForPersistenceInitialization()
val underTest = createCassandraConfigChecker
underTest.session.execute(s"TRUNCATE ${pluginConfig.keyspace}.${pluginConfig.configTable}")
val persistentConfig = underTest.initializePersistentConfig
persistentConfig.get(CassandraJournalConfig.TargetPartitionProperty) must be(defined)
persistentConfig.get(CassandraJournalConfig.TargetPartitionProperty).get must be("5")
getTargetSize(underTest) must be("5")
}
"multiple persistence should keep the same value" in {
waitForPersistenceInitialization()
val underTest = createCassandraConfigChecker
underTest.session.execute(s"TRUNCATE ${pluginConfig.keyspace}.${pluginConfig.configTable}")
(1 to 5).foreach(i => {
val underTest = createCassandraConfigChecker(pluginConfig, cfg.withValue("target-partition-size", ConfigValueFactory.fromAnyRef("5")))
val persistentConfig = underTest.initializePersistentConfig
persistentConfig.get(CassandraJournalConfig.TargetPartitionProperty).get must be("5")
assert(persistentConfig.contains(CassandraJournalConfig.TargetPartitionProperty))
getTargetSize(underTest) must be("5")
})
}
"throw exception when starting with wrong value" in {
waitForPersistenceInitialization()
val underTest = createCassandraConfigChecker
underTest.session.execute(s"TRUNCATE ${pluginConfig.keyspace}.${pluginConfig.configTable}")
underTest.initializePersistentConfig
val try3Size = createCassandraConfigChecker(pluginConfig, cfg.withValue("target-partition-size", ConfigValueFactory.fromAnyRef("3")))
intercept[IllegalArgumentException] {
try3Size.initializePersistentConfig
}
getTargetSize(underTest) must be("5")
}
"concurrent calls keep consistent value" in {
waitForPersistenceInitialization()
createCassandraConfigChecker.session.execute(s"TRUNCATE ${pluginConfig.keyspace}.${pluginConfig.configTable}")
implicit val ec = ExecutionContext.fromExecutor(Executors.newFixedThreadPool(10))
val resultFuture = Future.sequence((1 to 10).map(i => Future { (i, Try {
val underTest = createCassandraConfigChecker(pluginConfig, cfg.withValue("target-partition-size", ConfigValueFactory.fromAnyRef(i.toString)))
underTest.initializePersistentConfig
}) }))
val result = Await.result(resultFuture, 5.seconds)
val firstSize = getTargetSize(createCassandraConfigChecker)
val (success, failure) = result.partition(_._1 == firstSize.toInt)
success.size must be(1)
success.head._2.isSuccess must be(true)
success.head._2.get.get(CassandraJournalConfig.TargetPartitionProperty).get must be(firstSize)
failure.foreach(_._2 match {
case Success(_) => fail("instance should fail due to wrong target-partition-size")
case Failure(e) => e.isInstanceOf[IllegalArgumentException] must be(true)
})
}
}
def createCassandraConfigChecker(implicit pluginConfig: CassandraPluginConfig, cfg: Config): CassandraConfigChecker = {
val clusterSession = pluginConfig.clusterBuilder.build.connect()
new CassandraConfigChecker {
override def session: Session = clusterSession
override def config: CassandraJournalConfig = new CassandraJournalConfig(cfg)
}
}
def waitForPersistenceInitialization() = {
val actor = system.actorOf(Props(classOf[DummyActor], "p1", self))
actor ! "Hi"
expectMsg("Received Hi")
actor ! PoisonPill
}
def getTargetSize(checker: CassandraConfigChecker): String = {
checker.session.execute(s"${checker.selectConfig} WHERE property='${CassandraJournalConfig.TargetPartitionProperty}'").one().getString("value")
}
}
| jparkie/akka-persistence-cassandra | src/test/scala/akka/persistence/cassandra/journal/CassandraConfigCheckerSpec.scala | Scala | apache-2.0 | 5,749 |
package at.logic.gapt.provers.maxsat
import at.logic.gapt.proofs.resolution.FClause
/**
* Created by frain on 3/31/15.
*/
class ToySAT extends MaxSATSolverBinary {
def format() = Format.ToySAT
def noBinaryWarn() = "Please put the toysat binary (available at https://github.com/msakai/toysolver) into PATH"
def command( in: String, out: String ) = List( "toysat", "--maxsat", in )
def solve( hard: List[FClause], soft: List[Tuple2[FClause, Int]] ) =
getFromMaxSATBinary( hard, soft )
}
| gisellemnr/gapt | src/main/scala/at/logic/gapt/provers/maxsat/ToySAT.scala | Scala | gpl-3.0 | 501 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.streaming.sources
import java.io.ByteArrayOutputStream
import org.apache.spark.sql.execution.streaming.MemoryStream
import org.apache.spark.sql.streaming.{StreamTest, Trigger}
class ConsoleWriteSupportSuite extends StreamTest {
import testImplicits._
test("microbatch - default") {
val input = MemoryStream[Int]
val captured = new ByteArrayOutputStream()
Console.withOut(captured) {
val query = input.toDF().writeStream.format("console").start()
try {
input.addData(1, 2, 3)
query.processAllAvailable()
input.addData(4, 5, 6)
query.processAllAvailable()
input.addData()
query.processAllAvailable()
} finally {
query.stop()
}
}
assert(captured.toString() ==
"""-------------------------------------------
|Batch: 0
|-------------------------------------------
|+-----+
||value|
|+-----+
|| 1|
|| 2|
|| 3|
|+-----+
|
|-------------------------------------------
|Batch: 1
|-------------------------------------------
|+-----+
||value|
|+-----+
|| 4|
|| 5|
|| 6|
|+-----+
|
|-------------------------------------------
|Batch: 2
|-------------------------------------------
|+-----+
||value|
|+-----+
|+-----+
|
|""".stripMargin)
}
test("microbatch - with numRows") {
val input = MemoryStream[Int]
val captured = new ByteArrayOutputStream()
Console.withOut(captured) {
val query = input.toDF().writeStream.format("console").option("NUMROWS", 2).start()
try {
input.addData(1, 2, 3)
query.processAllAvailable()
} finally {
query.stop()
}
}
assert(captured.toString() ==
"""-------------------------------------------
|Batch: 0
|-------------------------------------------
|+-----+
||value|
|+-----+
|| 1|
|| 2|
|+-----+
|only showing top 2 rows
|
|""".stripMargin)
}
test("microbatch - truncation") {
val input = MemoryStream[String]
val captured = new ByteArrayOutputStream()
Console.withOut(captured) {
val query = input.toDF().writeStream.format("console").option("TRUNCATE", true).start()
try {
input.addData("123456789012345678901234567890")
query.processAllAvailable()
} finally {
query.stop()
}
}
assert(captured.toString() ==
"""-------------------------------------------
|Batch: 0
|-------------------------------------------
|+--------------------+
|| value|
|+--------------------+
||12345678901234567...|
|+--------------------+
|
|""".stripMargin)
}
test("continuous - default") {
val captured = new ByteArrayOutputStream()
Console.withOut(captured) {
val input = spark.readStream
.format("rate")
.option("numPartitions", "1")
.option("rowsPerSecond", "5")
.load()
.select('value)
val query = input.writeStream.format("console").trigger(Trigger.Continuous(200)).start()
assert(query.isActive)
query.stop()
}
}
}
| darionyaphet/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/ConsoleWriteSupportSuite.scala | Scala | apache-2.0 | 4,263 |
package java.nio
private[nio] final class HeapByteBufferDoubleView private (
_capacity: Int,
override private[nio] val _byteArray: Array[Byte],
override private[nio] val _byteArrayOffset: Int,
_initialPosition: Int, _initialLimit: Int,
_readOnly: Boolean, override private[nio] val isBigEndian: Boolean)
extends DoubleBuffer(_capacity, null, -1) {
position(_initialPosition)
limit(_initialLimit)
private[this] implicit def newHeapDoubleBufferView =
HeapByteBufferDoubleView.NewHeapByteBufferDoubleView
def isReadOnly(): Boolean = _readOnly
def isDirect(): Boolean = false
@noinline
def slice(): DoubleBuffer =
GenHeapBufferView(this).generic_slice()
@noinline
def duplicate(): DoubleBuffer =
GenHeapBufferView(this).generic_duplicate()
@noinline
def asReadOnlyBuffer(): DoubleBuffer =
GenHeapBufferView(this).generic_asReadOnlyBuffer()
@noinline
def get(): Double =
GenBuffer(this).generic_get()
@noinline
def put(c: Double): DoubleBuffer =
GenBuffer(this).generic_put(c)
@noinline
def get(index: Int): Double =
GenBuffer(this).generic_get(index)
@noinline
def put(index: Int, c: Double): DoubleBuffer =
GenBuffer(this).generic_put(index, c)
@noinline
override def get(dst: Array[Double], offset: Int, length: Int): DoubleBuffer =
GenBuffer(this).generic_get(dst, offset, length)
@noinline
override def put(src: Array[Double], offset: Int, length: Int): DoubleBuffer =
GenBuffer(this).generic_put(src, offset, length)
@noinline
def compact(): DoubleBuffer =
GenHeapBufferView(this).generic_compact()
@noinline
def order(): ByteOrder =
GenHeapBufferView(this).generic_order()
// Private API
@inline
private[nio] def load(index: Int): Double =
GenHeapBufferView(this).byteArrayBits.loadDouble(index)
@inline
private[nio] def store(index: Int, elem: Double): Unit =
GenHeapBufferView(this).byteArrayBits.storeDouble(index, elem)
}
private[nio] object HeapByteBufferDoubleView {
private[nio] implicit object NewHeapByteBufferDoubleView
extends GenHeapBufferView.NewHeapBufferView[DoubleBuffer] {
def bytesPerElem: Int = 8
def apply(capacity: Int, byteArray: Array[Byte], byteArrayOffset: Int,
initialPosition: Int, initialLimit: Int, readOnly: Boolean,
isBigEndian: Boolean): DoubleBuffer = {
new HeapByteBufferDoubleView(capacity, byteArray, byteArrayOffset,
initialPosition, initialLimit, readOnly, isBigEndian)
}
}
@inline
private[nio] def fromHeapByteBuffer(byteBuffer: HeapByteBuffer): DoubleBuffer =
GenHeapBufferView.generic_fromHeapByteBuffer(byteBuffer)
}
| colinrgodsey/scala-js | javalib/src/main/scala/java/nio/HeapByteBufferDoubleView.scala | Scala | bsd-3-clause | 2,689 |
/* sbt -- Simple Build Tool
* Copyright 2009, 2010, 2011 Mark Harrah
*/
package xsbt.boot
import Pre._
import java.io.{File, FileFilter}
import java.net.{URL, URLClassLoader}
import java.util.concurrent.Callable
trait Provider
{
def configuration: UpdateConfiguration
def baseDirectories: List[File]
def testLoadClasses: List[String]
def extraClasspath: Array[File]
def target: UpdateTarget
def failLabel: String
def parentLoader: ClassLoader
def lockFile: Option[File]
def classpath: Array[File] = Provider.getJars(baseDirectories)
def fullClasspath:Array[File] = concat(classpath, extraClasspath)
def reason: String = ""
def retrieveFailed: Nothing = fail("")
def retrieveCorrupt(missing: Iterable[String]): Nothing = fail(": missing " + missing.mkString(", "))
private def fail(extra: String) =
throw new xsbti.RetrieveException(versionString, "Could not retrieve " + failLabel + extra)
private def versionString: String = target match { case _: UpdateScala => configuration.scalaVersion; case a: UpdateApp => Value.get(a.id.version) }
val (jars, loader) = Locks(orNull(lockFile), new initialize)
private[this] def orNull[T >: Null](opt: Option[T]): T = opt match { case None => null; case Some(x) => x }
private final class initialize extends Callable[(Array[File], ClassLoader)]
{
def call =
{
val (existingJars, existingLoader) = createLoader
if(Provider.getMissing(existingLoader, testLoadClasses).isEmpty)
(existingJars, existingLoader)
else
{
val retrieveSuccess = ( new Update(configuration) )(target, reason)
if(retrieveSuccess)
{
val (newJars, newLoader) = createLoader
val missing = Provider.getMissing(newLoader, testLoadClasses)
if(missing.isEmpty) (newJars, newLoader) else retrieveCorrupt(missing)
}
else
retrieveFailed
}
}
def createLoader =
{
val full = fullClasspath
(full, new URLClassLoader(Provider.toURLs(full), parentLoader) )
}
}
}
object Provider
{
def getJars(directories: List[File]): Array[File] = toArray(directories.flatMap(directory => wrapNull(directory.listFiles(JarFilter))))
def wrapNull(a: Array[File]): Array[File] = if(a == null) new Array[File](0) else a
object JarFilter extends FileFilter
{
def accept(file: File) = !file.isDirectory && file.getName.endsWith(".jar")
}
def getMissing(loader: ClassLoader, classes: Iterable[String]): Iterable[String] =
{
def classMissing(c: String) = try { Class.forName(c, false, loader); false } catch { case e: ClassNotFoundException => true }
classes.toList.filter(classMissing)
}
def toURLs(files: Array[File]): Array[URL] = files.map(_.toURI.toURL)
} | kuochaoyi/xsbt | launch/Provider.scala | Scala | bsd-3-clause | 2,657 |
package applicant.etl
import applicant.nlp._
import applicant.ml.score._
import applicant.ml.regression._
import java.text.DecimalFormat
import java.net.{URL, HttpURLConnection}
import scala.io._
import scala.util._
import scala.collection.mutable.{ListBuffer, Map, LinkedHashMap}
import org.apache.spark.mllib.feature.{Word2Vec, Word2VecModel}
import scala.collection.JavaConversions._
import org.apache.commons.lang3.text.WordUtils
class ApplicantData {
var name, recentTitle, recentLocation, recentOrganization, degree, school, email, phone, linkedin, indeed, github, fullText, applicantid: String = ""
var languageList: ListBuffer[String] = new ListBuffer[String]()
var bigDataList: ListBuffer[String] = new ListBuffer[String]()
var etlList: ListBuffer[String] = new ListBuffer[String]()
var databaseList: ListBuffer[String] = new ListBuffer[String]()
var webappList: ListBuffer[String] = new ListBuffer[String]()
var mobileList: ListBuffer[String] = new ListBuffer[String]()
var urlList: ListBuffer[String] = new ListBuffer[String]()
var otherTitleList: ListBuffer[String] = new ListBuffer[String]()
var otherLocationList: ListBuffer[String] = new ListBuffer[String]()
var otherOrganizationList: ListBuffer[String] = new ListBuffer[String]()
var df: DecimalFormat = new DecimalFormat("#.##")
var featureScores = Map[String,Double]()
var githubData = Map[String,String]()
var score = -1.0
var gpa = 0.0
/**
* Converts the ApplicantData object into a map for saving to Elasticsearch
*
* @return A map representation of this ApplicantData object
*/
def toMap(): Map[String, Any] = {
val map: Map[String, Any] = Map(
"id" -> applicantid,
"name" -> name,
"score" -> score,
"currentLocation" -> Map(
"title" -> recentTitle,
"location" -> recentLocation,
"organization" -> recentOrganization
),
"skills" -> Map(
"language" -> languageList,
"bigdata" -> bigDataList,
"etl" -> etlList,
"database" -> databaseList,
"webapp" -> webappList,
"mobile" -> mobileList
),
"education" -> Map(
"degree" -> degree,
"school" -> school,
"gpa" -> gpa
),
"contact" -> Map(
"indeed" -> indeed,
"linkedin" -> linkedin,
"github" -> github,
"email" -> email,
"phone" -> phone
),
"additionalInfo" -> Map(
"pastPositions" -> Map(
"title" -> otherTitleList,
"location" -> otherLocationList,
"organization" -> otherOrganizationList
),
"url" -> urlList,
"githubData" -> githubData,
"resume" -> fullText
),
"summary" -> ResumeSummarizer.summarize(fullText, 150),
"features" -> featureScores
)
return map
}
}
object ApplicantData {
/**
* Creates a new ApplicantData object and loads variables
*
* @param taggedEntities A LinkedHashSet object from the EntityGrabber class
* @param applicantID A String to be used as the applicant's unique ID
* @param fullText A String of the full parsed resume from extractText
*/
def apply(taggedEntities: LinkedHashMap[(String, String),(String,String)], applicantid: String, fullText: String): ApplicantData = {
//degree, location, organization, person, school, title, bigdata, database, etl, webapp, mobile, language, gpa, email, phone, url
val app = new ApplicantData()
val notFound : String = ""
app.applicantid = applicantid
app.fullText = fullText
taggedEntities.values.foreach { pair =>
pair match {
case ("degree", _) if (app.degree == notFound) => (app.degree = pair._2)
case ("location", _) => if (app.recentLocation == notFound) {app.recentLocation = pair._2 }
app.otherLocationList += pair._2
case ("organization", _) => if (app.recentOrganization == notFound) {app.recentOrganization = pair._2 }
app.otherOrganizationList += pair._2
case ("person", _) if (app.name == notFound) => app.name = pair._2
case ("school", _) if (app.school == notFound) => app.school = pair._2
case ("title", _) => if (app.recentTitle == notFound) {app.recentTitle = pair._2 }
app.otherTitleList += pair._2
case ("bigdata", _) => (app.bigDataList += pair._2)
case ("database", _) => (app.databaseList += pair._2)
case ("etl", _) => (app.etlList += pair._2)
case ("webapp", _) => (app.webappList += pair._2)
case ("mobile", _) => (app.mobileList += pair._2)
case ("language", _) => (app.languageList += pair._2)
case ("gpa", _) if (app.gpa == 0.0) => app.gpa = if(pair._2.count(_ == '.') > 1) 0.0 else pair._2.toDouble
case ("url", _) => (app.urlList += pair._2)
case ("indeed", _) if (app.indeed == notFound && pair._2.startsWith("http")) => app.indeed = pair._2
case ("indeed", _) if (app.indeed == notFound && pair._2.startsWith("www")) => app.indeed = "http://" + pair._2
case ("indeed", _) if (app.indeed == notFound) => app.indeed = "http://www." + pair._2
case ("linkedin", _) if (app.linkedin == notFound && pair._2.startsWith("http")) => app.linkedin = pair._2
case ("linkedin", _) if (app.linkedin == notFound && pair._2.startsWith("www")) => app.linkedin = "http://" + pair._2
case ("linkedin", _) if (app.linkedin == notFound) => app.linkedin = "http://www." + pair._2
case ("github", _) if (app.github == notFound && pair._2.startsWith("https")) => app.github = pair._2
case ("github", _) if (app.github == notFound && pair._2.startsWith("http")) => app.github = "https" + pair._2.substring(4)
case ("github", _) if (app.github == notFound && pair._2.startsWith("www")) => app.github = "https://" + pair._2.substring(3)
case ("github", _) if (app.github == notFound) => app.github = "https://" + pair._2
case ("email", _) if (app.email == notFound) => app.email = pair._2
case ("phone", _) if (app.phone == notFound) => app.phone = pair._2
case _ =>
}
}
app.githubData = collection.mutable.Map(ApiMapper.githubAPI(app.github).toSeq: _*)
if (app.name == notFound) {
if (app.githubData != collection.mutable.Map() && app.githubData("name") != null && app.githubData("name") != "") {
app.name = app.githubData("name")
}
else if (app.githubData != collection.mutable.Map() && app.githubData("login") != null && app.githubData("login") != "") {
app.name = app.githubData("login")
}
else {
val textArr = fullText.trim().split("\\s+")
app.name = textArr(0) + " " + textArr(1)
}
}
app.name = WordUtils.capitalizeFully(app.name)
return app
}
/**
* Creates a new ApplicantData object and loads variables
*
* @param elasticMap A map structure returned from querying on the elasticsearch applicant index
* @return A new ApplicantData object
*/
def apply(elasticMap: scala.collection.Map[String, AnyRef]): ApplicantData = {
val app = new ApplicantData()
app.applicantid = EsUtils.getString(elasticMap("id"))
app.name = EsUtils.getString(elasticMap("name"))
app.score = EsUtils.getDouble(elasticMap("score"))
elasticMap.get("features") match {
case Some(features) =>
app.featureScores = features.asInstanceOf[Map[String, Double]]
case None =>
}
elasticMap.get("currentLocation") match {
case Some(any) =>
val locMap = any.asInstanceOf[Map[String, String]]
app.recentTitle = EsUtils.getString(locMap("title"))
app.recentLocation = EsUtils.getString(locMap("location"))
app.recentOrganization = EsUtils.getString(locMap("organization"))
case None =>
}
elasticMap.get("skills") match {
case Some(any) =>
val skillMap = any.asInstanceOf[Map[String, JListWrapper[String]]]
app.languageList = EsUtils.getList(skillMap("language"))
app.bigDataList = EsUtils.getList(skillMap("bigdata"))
app.etlList = EsUtils.getList(skillMap("etl"))
app.databaseList = EsUtils.getList(skillMap("database"))
app.webappList = EsUtils.getList(skillMap("webapp"))
app.mobileList = EsUtils.getList(skillMap("mobile"))
case None =>
}
elasticMap.get("education") match {
case Some(any) =>
val eduMap = any.asInstanceOf[Map[String, String]]
app.degree = EsUtils.getString(eduMap("degree"))
app.school = EsUtils.getString(eduMap("school"))
app.gpa = EsUtils.getDouble(eduMap("gpa"))
case None =>
}
elasticMap.get("contact") match {
case Some(any) =>
val contactMap = any.asInstanceOf[Map[String, String]]
app.indeed = EsUtils.getString(contactMap("indeed"))
app.linkedin = EsUtils.getString(contactMap("linkedin"))
app.github = EsUtils.getString(contactMap("github"))
app.email = EsUtils.getString(contactMap("email"))
app.phone = EsUtils.getString(contactMap("phone"))
case None =>
}
elasticMap.get("additionalInfo") match {
case Some(any) =>
val infoMap = any.asInstanceOf[Map[String, AnyRef]]
infoMap.get("pastPositions") match {
case Some(anyPos) =>
val pastPosMap = anyPos.asInstanceOf[Map[String, JListWrapper[String]]]
app.otherTitleList = EsUtils.getList(pastPosMap("title"))
app.otherLocationList = EsUtils.getList(pastPosMap("location"))
app.otherOrganizationList = EsUtils.getList(pastPosMap("organization"))
case None =>
}
infoMap.get("url") match {
case Some(any) =>
app.urlList = EsUtils.getList(any.asInstanceOf[JListWrapper[String]])
case None =>
}
infoMap.get("githubData") match {
case Some(anyGit) =>
app.githubData = (anyGit.asInstanceOf[Map[String, String]])
case None =>
}
app.fullText = EsUtils.getString(infoMap("resume"))
case None =>
}
return app
}
}
| dataworks/internship-2016 | etl/src/scala/applicant/etl/ApplicantData.scala | Scala | apache-2.0 | 10,151 |
package jp.opap.material.resource
import java.util.UUID
import akka.Done
import akka.actor.ActorRef
import akka.http.scaladsl.marshalling.sse.EventStreamMarshalling.toEventStream
import akka.http.scaladsl.model.HttpMethods._
import akka.http.scaladsl.model.sse.ServerSentEvent
import akka.http.scaladsl.model.{HttpEntity, MediaTypes, StatusCodes}
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import akka.stream.scaladsl.Source
import akka.stream.{CompletionStrategy, OverflowStrategy}
import ch.megard.akka.http.cors.scaladsl.CorsDirectives.cors
import ch.megard.akka.http.cors.scaladsl.settings.CorsSettings
import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport
import io.circe.generic.auto._
import io.circe.syntax._
import jp.opap.material.MaterialExplorer.ServiceBundle
import jp.opap.material.dao.{MongoComponentDao, MongoRepositoryDao, MongoThumbnailDao}
import jp.opap.material.facade.RepositoryDataEventEmitter
import jp.opap.material.facade.RepositoryDataEventEmitter.{Progress, ProgressListener}
import jp.opap.material.resource.AppResources.CORS_SETTINGS
import scala.concurrent.ExecutionContext
class AppResources(val services: ServiceBundle, val eventEmitter: RepositoryDataEventEmitter)
(implicit executionContext: ExecutionContext)
extends /* SprayJsonSupport with */ FailFastCirceSupport {
val projectDao: MongoRepositoryDao = services.repositoryDao
val componentDao: MongoComponentDao = services.componentDao
val thumbnailDao: MongoThumbnailDao = services.thumbnailDao
val route: Route = cors(CORS_SETTINGS) {
concat(
pathPrefix("repositories") {
get {
val items = this.projectDao.find()
val data = Map("items" -> items)
complete(data.asJson)
}
},
path("images") {
get {
val items = this.componentDao.findImages()
val data = Map("items" -> items)
complete(data.asJson)
}
},
path("thumbnail" / Segment) { fileId =>
get {
val id = UUID.fromString(fileId)
try {
val blobId = this.componentDao.findFileById(id).get.blobId
val thumbnail = this.thumbnailDao.findData(blobId).get
complete(HttpEntity(MediaTypes.`image/png`, thumbnail))
} catch {
case _: NoSuchElementException => complete(StatusCodes.NotFound)
}
}
},
path("progress") {
get {
complete {
// TODO: テストが必要です
Source
.actorRef(
completionMatcher = { case Done => CompletionStrategy.immediately },
failureMatcher = PartialFunction.empty,
bufferSize = 100,
overflowStrategy = OverflowStrategy.dropHead,
).mapMaterializedValue(doProgress)
}
}
},
)
}
def doProgress(actorRef: ActorRef): Unit = {
if (eventEmitter.getRunning) {
eventEmitter.subscribe(new ProgressListener {
override def onUpdate(progress: Progress): Unit = {
actorRef ! ServerSentEvent(progress.asJson.toString)
}
override def onFinish(): Unit = {
actorRef ! ServerSentEvent("close", "close")
actorRef ! Done
}
})
} else {
actorRef ! ServerSentEvent("negative", "negative")
actorRef ! Done
}
}
}
object AppResources {
val CORS_SETTINGS: CorsSettings = CorsSettings.defaultSettings
.withAllowedMethods(Seq(GET, POST, PUT, DELETE, OPTIONS).to)
}
| opap-jp/material-explorer | rest/src/main/scala/jp/opap/material/resource/AppResources.scala | Scala | mit | 3,575 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.api.scala
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows
import org.apache.flink.streaming.api.windowing.time.Time
import org.junit.{Assert, Test}
/**
* Unit test for [[org.apache.flink.streaming.api.scala.CoGroupedStreams]]
*/
class CoGroupedStreamsTest {
private val env = StreamExecutionEnvironment.getExecutionEnvironment
private val dataStream1 = env.fromElements("a1", "a2", "a3")
private val dataStream2 = env.fromElements("a1", "a2")
private val keySelector = (s: String) => s
private val tsAssigner = TumblingEventTimeWindows.of(Time.milliseconds(1))
@Test
def testSetAllowedLateness(): Unit = {
val lateness = Time.milliseconds(42)
val withLateness = dataStream1.coGroup(dataStream2)
.where(keySelector)
.equalTo(keySelector)
.window(tsAssigner)
.allowedLateness(lateness)
Assert.assertEquals(lateness.toMilliseconds, withLateness.allowedLateness.toMilliseconds)
}
}
| tzulitai/flink | flink-streaming-scala/src/test/scala/org/apache/flink/streaming/api/scala/CoGroupedStreamsTest.scala | Scala | apache-2.0 | 1,816 |
package scwebapp.header
import scutil.core.implicits.*
import scwebapp.HeaderType
// TODO not typesafe
object CacheControl extends HeaderType[CacheControl] {
val key = "Cache-Control"
def parse(it:String):Option[CacheControl] =
Some(CacheControl(it splitAroundChar ','))
def unparse(it:CacheControl):String =
it.directives mkString ", "
}
final case class CacheControl(directives:Seq[String])
| ritschwumm/scwebapp | modules/core/src/main/scala/scwebapp/header/CacheControl.scala | Scala | bsd-2-clause | 407 |
package skidbladnir
import scala.collection.mutable.{Map => MutMap}
trait Assembly {
//Fields
private val runtime = new Runtime
//Components construction
implicit def C2NB (b:Base):NB = {new NB(b)}
protected final class NB(val b:Base) {
def named(n:String) = {
runtime.newStaticBase(b, n)
}
}
implicit def C2NC (c:Compo):NC = {new NC(c)}
protected final class NC(val p:Compo) {
def connected (pin:(String)) = {
new CF(p, pin)
}
}
protected final class CF(c:Compo, pin:(String)) {
def from(pcn:String) = {
new NCC(c, pin, pcn)
}
}
protected final class NCC(c:Compo, pin:String, pcn:String) {
def named(n:String) = {
runtime.newStaticCompo(c, n, pin, pcn)
}
}
//Components connection
implicit def S2F (jin:String):FC = {new FC(jin)}
protected final class FC(val jin:String) {
def from(jcn:String) = {
new CC(jin, jcn)
}
}
protected final class CC(jin:String, jcn:String) {
def connect(n:String):CIC = {
if(runtime.compoList.static.contains(n)){
if(! runtime.workMutex.construction){throw new CompoException("You can not connect components after start")}
runtime.staticConnect(jin, runtime.getShadowForName(jcn), jin, runtime.getShadowForName(n))
null}
else{
new CIC(jin, jcn, n)}
}
}
protected final class CIC(jin:String, jcn:String, pin:String) {
def from(pcn:String) = {
if(! runtime.workMutex.construction){throw new CompoException("You can not connect components after start")}
runtime.staticConnect(jin, runtime.getShadowForName(jcn), pin, runtime.getShadowForName(pcn))
}
}
//Dev tools
protected def visualization = {
runtime.visualization = new Visualization(runtime)
}
protected def console = {
//Get current pack name
val cp = getClass().getName().split("\\\\.").init.mkString(".")
//Create console and run assembly
runtime.console = new Console(runtime)
runtime.go() //Run assembly
runtime.console.work(cp)
}
//Start mains / end
protected def go() = {
if(runtime.compoList.base.isEmpty){throw new CompoException("No not one component")}
runtime.go() //Run assembly
}
protected def gowait() = {
if(runtime.compoList.base.isEmpty){throw new CompoException("No not one component")}
runtime.go() //Run assembly
runtime.waitMutex.synchronized{ //Wait for end work
if(runtime.waitMutex.wr){
runtime.waitMutex.wt = true
runtime.waitMutex.wait()}}
}
protected def end() = {
runtime.end()
System.exit(0)
}
protected def breakdown() = {
runtime.end()
}
}
| AlexCAB/CompoDev | skidbladnir/Assembly.scala | Scala | mit | 2,710 |
package scalaz.stream.mongodb.userguide
import org.specs2.Specification
import scalaz.stream.mongodb.aggregate.{PipelineSpec, MapReduceSpec, BasicAggregationSpec}
class AggregationUsageSpec extends Specification{
def is =
s2"""
${"Aggregation framework".title}
Mongo Streams has support for all aggregation framework operations that are supported by mongodb.
Please take look into following examples to see more:
### Simple Aggregation operations
These are simple operations to return count of elements or distinct values. For more please look ${ "here" ~/ new BasicAggregationSpec() }.
### Map-Reduce aggregation operations
Mongo Streams allows you to create mongo's mapreduce javascript aggregation operations. For syntax and examples look ${ "here" ~/ new MapReduceSpec() }.
### Aggregation Pipieline operations
Mongo Streams has limitted support for aggregation pipeline commands. All the aggregation pipeline commands are now
supported, but only basic expressions are supported now for $$project operation. For list and exmaples of supported
functionality, please look ${ "here" ~/ new PipelineSpec() }.
"""
}
| Spinoco/scalaz-stream-mongodb | core/src/test/scala/scalaz/stream/mongodb/userguide/AggregationUsageSpec.scala | Scala | mit | 1,208 |
package engine
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import play.modules.reactivemongo.ReactiveMongoPlugin
import reactivemongo.api._
import reactivemongo.api.collections.default.BSONCollectionProducer
import reactivemongo.bson._
import play.api.Play.current
import models.{ Hacker, Project }
import controllers.Ctx
object Projects {
def db = ReactiveMongoPlugin.db
def collection = db("projects")(BSONCollectionProducer)
def isEmpty: Future[Boolean] = {
collection.find(BSONDocument()).one[BSONDocument].map(_.isEmpty)
}
def insert(project: Project): Future[Project] = {
for {
_ <- collection.insert(project)
} yield project
}
def findById(id: BSONObjectID): Future[Option[Project]] = {
collection.find(BSONDocument("_id" -> id)).one[Project]
}
def findAll(): Future[Seq[Project]] = {
collection.find(BSONDocument()).cursor[Project].collect[Seq]()
}
def findAllWithHackers(): Future[(Seq[Project], Map[BSONObjectID, Hacker])] = {
for {
projects <- findAll
hackers <- findHackersOf(projects)
hackersMap = hackers.map(h => (h.oid, h)).toMap
} yield (projects, hackersMap)
}
def findHackersOf(projects: Seq[Project]): Future[Seq[Hacker]] = {
Hackers.findAllById(projects.flatMap(_.team))
}
def findHackersOf(project: Project): Future[Seq[Hacker]] = {
Hackers.findAllById(project.team)
}
def findAllById(ids: Seq[BSONObjectID]): Future[Seq[Project]] = {
collection.find(BSONDocument("_id" -> BSONDocument("$in" -> ids)))
.cursor[Project]
.collect[Seq]()
}
def findAllByIdWithHackers(ids: Seq[BSONObjectID]): Future[(Seq[Project], Map[BSONObjectID, Hacker])] = {
findAllById(ids).flatMap { projects =>
val hackerIds = projects.flatMap(_.team)
Hackers.findAllById(hackerIds).map { hackers =>
(projects, hackers.map(h => (h.oid -> h)).toMap)
}
}
}
def findByName(name: String): Future[Option[Project]] = {
collection.find(BSONDocument("name" -> name)).one[Project]
}
def addTeammate(project: Project, hacker: Hacker): Future[Project] = {
collection.update(
BSONDocument("_id" -> project.oid),
BSONDocument("$addToSet" -> BSONDocument("team" -> hacker.oid))
).flatMap { _ =>
collection.find(
BSONDocument("_id" -> project.oid),
BSONDocument("team" -> 1)
).one[BSONDocument].map { doc =>
val team = doc.flatMap(_.getAs[Seq[BSONObjectID]]("team")).getOrElse {
throw new java.lang.RuntimeException(s"Can't find player ${project.oid}'s ‘team’ field!")
}
project.copy(team = team)
}
}
}
def ofZentrepreneur(leader: Hacker)(implicit ctx: Ctx): Future[Option[Project]] = {
collection.find(BSONDocument(
"leaderId" -> leader.oid,
"_id" -> BSONDocument("$in"-> ctx.event.map(_.projects).getOrElse(Nil))
)).one[Project]
}
implicit val projectHandler = Macros.handler[Project]
}
| dohzya/Hackinder | app/engine/Projects.scala | Scala | agpl-3.0 | 3,028 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.stream
import org.apache.flink.api.java.tuple.{Tuple5 => JTuple5}
import org.apache.flink.api.java.typeutils.TupleTypeInfo
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.datastream.DataStream
import org.apache.flink.streaming.api.environment.{StreamExecutionEnvironment => JStreamExecEnv}
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.Expressions.$
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.java.internal.{StreamTableEnvironmentImpl => JStreamTableEnvironmentImpl}
import org.apache.flink.table.api.bridge.java.{StreamTableEnvironment => JStreamTableEnv}
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.table.catalog.FunctionCatalog
import org.apache.flink.table.executor.StreamExecutor
import org.apache.flink.table.module.ModuleManager
import org.apache.flink.table.planner.StreamPlanner
import org.apache.flink.table.runtime.utils.StreamTestData
import org.apache.flink.table.utils.TableTestUtil._
import org.apache.flink.table.utils.{CatalogManagerMocks, TableTestBase}
import org.apache.flink.types.Row
import org.junit.Assert.{assertEquals, assertFalse, assertTrue, fail}
import org.junit.Test
import org.mockito.Mockito.{mock, when}
import java.lang.{Integer => JInt, Long => JLong}
class StreamTableEnvironmentTest extends TableTestBase {
@Test
def testSqlWithoutRegistering(): Unit = {
val util = streamTestUtil()
val table = util.addTable[(Long, Int, String)]("tableName", 'a, 'b, 'c)
val sqlTable = util.tableEnv.sqlQuery(s"SELECT a, b, c FROM $table WHERE b > 12")
val expected = unaryNode(
"DataStreamCalc",
streamTableNode(table),
term("select", "a, b, c"),
term("where", ">(b, 12)"))
util.verifyTable(sqlTable, expected)
val table2 = util.addTable[(Long, Int, String)]('d, 'e, 'f)
val sqlTable2 = util.tableEnv.sqlQuery(s"SELECT d, e, f FROM $table2 " +
s"UNION ALL SELECT a, b, c FROM $table")
val expected2 = binaryNode(
"DataStreamUnion",
streamTableNode(table2),
streamTableNode(table),
term("all", "true"),
term("union all", "d, e, f"))
util.verifyTable(sqlTable2, expected2)
}
@Test
def testToAppendSinkOnUpdatingTable(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("Table is not an append-only table. Use the toRetractStream()" +
" in order to handle add and retract messages.")
val env = StreamExecutionEnvironment.getExecutionEnvironment
val settings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv = StreamTableEnvironment.create(env, settings)
val t = StreamTestData.get3TupleDataStream(env).toTable(tEnv, 'id, 'num, 'text)
t.groupBy('text)
.select('text, 'id.count, 'num.sum)
.toAppendStream[Row]
// must fail because table is not append-only
env.execute()
}
@Test
def testProctimeAttributeWithAtomicInput(): Unit = {
val util = streamTestUtil()
// cannot replace an attribute with proctime
util.addTable[String]('s, 'pt.proctime)
}
@Test
def testReplacingRowtimeAttributeWithAtomicInput(): Unit = {
val util = streamTestUtil()
util.addTable[Long]('rt.rowtime)
}
@Test
def testAppendedRowtimeAttributeWithAtomicInput(): Unit = {
val util = streamTestUtil()
util.addTable[String]('s, 'rt.rowtime)
}
@Test
def testRowtimeAndProctimeAttributeWithAtomicInput1(): Unit = {
val util = streamTestUtil()
util.addTable[String]('s, 'rt.rowtime, 'pt.proctime)
}
@Test
def testRowtimeAndProctimeAttributeWithAtomicInput2(): Unit = {
val util = streamTestUtil()
util.addTable[String]('s, 'pt.proctime, 'rt.rowtime)
}
@Test
def testRowtimeAndProctimeAttributeWithAtomicInput3(): Unit = {
val util = streamTestUtil()
util.addTable[Long]('rt.rowtime, 'pt.proctime)
}
@Test
def testProctimeAttribute(): Unit = {
val util = streamTestUtil()
// cannot replace an attribute with proctime
util.addTable[(Long, Int, String, Int, Long)]('a, 'b, 'c, 'd, 'e, 'pt.proctime)
}
@Test
def testReplacedRowtimeAttribute(): Unit = {
val util = streamTestUtil()
util.addTable[(Long, Int, String, Int, Long)]('rt.rowtime, 'b, 'c, 'd, 'e)
}
@Test
def testAppendedRowtimeAttribute(): Unit = {
val util = streamTestUtil()
util.addTable[(Long, Int, String, Int, Long)]('a, 'b, 'c, 'd, 'e, 'rt.rowtime)
}
@Test
def testRowtimeAndProctimeAttribute1(): Unit = {
val util = streamTestUtil()
util.addTable[(Long, Int, String, Int, Long)]('a, 'b, 'c, 'd, 'e, 'rt.rowtime, 'pt.proctime)
}
@Test
def testRowtimeAndProctimeAttribute2(): Unit = {
val util = streamTestUtil()
util.addTable[(Long, Int, String, Int, Long)]('a, 'b, 'c, 'd, 'e, 'pt.proctime, 'rt.rowtime)
}
@Test
def testRowtimeAndProctimeAttribute3(): Unit = {
val util = streamTestUtil()
util.addTable[(Long, Int, String, Int, Long)]('rt.rowtime, 'b, 'c, 'd, 'e, 'pt.proctime)
}
@Test
def testProctimeAttributeParsed(): Unit = {
val (jTEnv, ds) = prepareSchemaExpressionParser
jTEnv.fromDataStream(ds, $("a"), $("b"), $("c"), $("d"), $("e"), $("pt").proctime())
}
@Test
def testReplacingRowtimeAttributeParsed(): Unit = {
val (jTEnv, ds) = prepareSchemaExpressionParser
jTEnv.fromDataStream(ds, $("a").rowtime(), $("b"), $("c"), $("d"), $("e"))
}
@Test
def testAppedingRowtimeAttributeParsed(): Unit = {
val (jTEnv, ds) = prepareSchemaExpressionParser
jTEnv.fromDataStream(ds, $("a"), $("b"), $("c"), $("d"), $("e"), $("rt").rowtime())
}
@Test
def testRowtimeAndProctimeAttributeParsed1(): Unit = {
val (jTEnv, ds) = prepareSchemaExpressionParser
jTEnv.fromDataStream(
ds,
$("a"),
$("b"),
$("c"),
$("d"),
$("e"),
$("pt").proctime(),
$("rt").rowtime())
}
@Test
def testRowtimeAndProctimeAttributeParsed2(): Unit = {
val (jTEnv, ds) = prepareSchemaExpressionParser
jTEnv.fromDataStream(ds, $("rt").rowtime(), $("b"), $("c"), $("d"), $("e"), $("pt").proctime())
}
@Test
def testExecuteSqlWithExplainSelect(): Unit = {
val util = streamTestUtil()
val createTableStmt =
"""
|CREATE TABLE MyTable (
| a bigint,
| b int,
| c varchar
|) with (
| 'connector' = 'COLLECTION',
| 'is-bounded' = 'false'
|)
""".stripMargin
val tableResult1 = util.tableEnv.executeSql(createTableStmt)
assertEquals(ResultKind.SUCCESS, tableResult1.getResultKind)
val tableResult2 = util.tableEnv.executeSql(
"explain plan for select * from MyTable where a > 10")
assertEquals(ResultKind.SUCCESS_WITH_CONTENT, tableResult2.getResultKind)
val it = tableResult2.collect()
assertTrue(it.hasNext)
val row = it.next()
assertEquals(1, row.getArity)
val actual = row.getField(0).toString
val expected = readFromResource("testExecuteSqlWithExplainSelect0.out")
assertEquals(replaceStageId(expected), replaceStageId(actual))
assertFalse(it.hasNext)
}
@Test
def testExecuteSqlWithExplainInsert(): Unit = {
val util = streamTestUtil()
val createTableStmt1 =
"""
|CREATE TABLE MyTable (
| a bigint,
| b int,
| c varchar
|) with (
| 'connector' = 'COLLECTION',
| 'is-bounded' = 'false'
|)
""".stripMargin
val tableResult1 = util.tableEnv.executeSql(createTableStmt1)
assertEquals(ResultKind.SUCCESS, tableResult1.getResultKind)
val createTableStmt2 =
"""
|CREATE TABLE MySink (
| d bigint,
| e int
|) with (
| 'connector' = 'COLLECTION',
| 'is-bounded' = 'false'
|)
""".stripMargin
val tableResult2 = util.tableEnv.executeSql(createTableStmt2)
assertEquals(ResultKind.SUCCESS, tableResult2.getResultKind)
val tableResult3 = util.tableEnv.executeSql(
"explain plan for insert into MySink select a, b from MyTable where a > 10")
assertEquals(ResultKind.SUCCESS_WITH_CONTENT, tableResult3.getResultKind)
val it = tableResult3.collect()
assertTrue(it.hasNext)
val row = it.next()
assertEquals(1, row.getArity)
val actual = row.getField(0).toString
val expected = readFromResource("testExecuteSqlWithExplainInsert0.out")
assertEquals(replaceStageId(expected), replaceStageId(actual))
assertFalse(it.hasNext)
}
@Test
def testExecuteSqlWithUnsupportedExplain(): Unit = {
val util = streamTestUtil()
val createTableStmt =
"""
|CREATE TABLE MyTable (
| a bigint,
| b int,
| c varchar
|) with (
| 'connector' = 'COLLECTION',
| 'is-bounded' = 'false'
|)
""".stripMargin
val tableResult1 = util.tableEnv.executeSql(createTableStmt)
assertEquals(ResultKind.SUCCESS, tableResult1.getResultKind)
// TODO we can support them later
testUnsupportedExplain(util.tableEnv,
"explain plan excluding attributes for select * from MyTable")
testUnsupportedExplain(util.tableEnv,
"explain plan including all attributes for select * from MyTable")
testUnsupportedExplain(util.tableEnv,
"explain plan with type for select * from MyTable")
testUnsupportedExplain(util.tableEnv,
"explain plan without implementation for select * from MyTable")
testUnsupportedExplain(util.tableEnv,
"explain plan as xml for select * from MyTable")
testUnsupportedExplain(util.tableEnv,
"explain plan as json for select * from MyTable")
}
private def testUnsupportedExplain(tableEnv: StreamTableEnvironment, explain: String): Unit = {
try {
tableEnv.executeSql(explain)
fail("This should not happen")
} catch {
case e: TableException =>
assertTrue(e.getMessage.contains("Only default behavior is supported now"))
case e =>
fail("This should not happen, " + e.getMessage)
}
}
@Test
def testExplainSqlWithSelect(): Unit = {
val util = streamTestUtil()
val createTableStmt =
"""
|CREATE TABLE MyTable (
| a bigint,
| b int,
| c varchar
|) with (
| 'connector' = 'COLLECTION',
| 'is-bounded' = 'false'
|)
""".stripMargin
val tableResult1 = util.tableEnv.executeSql(createTableStmt)
assertEquals(ResultKind.SUCCESS, tableResult1.getResultKind)
val actual = util.tableEnv.explainSql("select * from MyTable where a > 10")
val expected = readFromResource("testExplainSqlWithSelect0.out")
assertEquals(replaceStageId(expected), replaceStageId(actual))
}
@Test
def testExplainSqlWithInsert(): Unit = {
val util = streamTestUtil()
val createTableStmt1 =
"""
|CREATE TABLE MyTable (
| a bigint,
| b int,
| c varchar
|) with (
| 'connector' = 'COLLECTION',
| 'is-bounded' = 'false'
|)
""".stripMargin
val tableResult1 = util.tableEnv.executeSql(createTableStmt1)
assertEquals(ResultKind.SUCCESS, tableResult1.getResultKind)
val createTableStmt2 =
"""
|CREATE TABLE MySink (
| d bigint,
| e int
|) with (
| 'connector' = 'COLLECTION',
| 'is-bounded' = 'false'
|)
""".stripMargin
val tableResult2 = util.tableEnv.executeSql(createTableStmt2)
assertEquals(ResultKind.SUCCESS, tableResult2.getResultKind)
val actual = util.tableEnv.explainSql(
"insert into MySink select a, b from MyTable where a > 10")
val expected = readFromResource("testExplainSqlWithInsert0.out")
assertEquals(replaceStageId(expected), replaceStageId(actual))
}
@Test
def testTableExplain(): Unit = {
val util = streamTestUtil()
val createTableStmt =
"""
|CREATE TABLE MyTable (
| a bigint,
| b int,
| c varchar
|) with (
| 'connector' = 'COLLECTION',
| 'is-bounded' = 'false'
|)
""".stripMargin
val tableResult1 = util.tableEnv.executeSql(createTableStmt)
assertEquals(ResultKind.SUCCESS, tableResult1.getResultKind)
val actual = util.tableEnv.sqlQuery("select * from MyTable where a > 10").explain()
val expected = readFromResource("testExplainSqlWithSelect0.out")
assertEquals(replaceStageId(expected), replaceStageId(actual))
}
private def prepareSchemaExpressionParser:
(JStreamTableEnv, DataStream[JTuple5[JLong, JInt, String, JInt, JLong]]) = {
val jStreamExecEnv = mock(classOf[JStreamExecEnv])
when(jStreamExecEnv.getStreamTimeCharacteristic).thenReturn(TimeCharacteristic.EventTime)
val config = new TableConfig
val catalogManager = CatalogManagerMocks.createEmptyCatalogManager()
val moduleManager: ModuleManager = new ModuleManager
val executor: StreamExecutor = new StreamExecutor(jStreamExecEnv)
val functionCatalog = new FunctionCatalog(config, catalogManager, moduleManager)
val streamPlanner = new StreamPlanner(executor, config, functionCatalog, catalogManager)
val jTEnv = new JStreamTableEnvironmentImpl(
catalogManager,
moduleManager,
functionCatalog,
config,
jStreamExecEnv,
streamPlanner,
executor,
true,
Thread.currentThread().getContextClassLoader)
val sType = new TupleTypeInfo(Types.LONG, Types.INT, Types.STRING, Types.INT, Types.LONG)
.asInstanceOf[TupleTypeInfo[JTuple5[JLong, JInt, String, JInt, JLong]]]
val ds = mock(classOf[DataStream[JTuple5[JLong, JInt, String, JInt, JLong]]])
when(ds.getType).thenReturn(sType)
(jTEnv, ds)
}
}
| tzulitai/flink | flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/StreamTableEnvironmentTest.scala | Scala | apache-2.0 | 14,761 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
package collection
package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import scala.language.implicitConversions
@deprecated("use JavaConverters or consider ToJavaImplicits", since="2.12.0")
trait WrapAsJava extends LowPriorityWrapAsJava {
// provide higher-priority implicits with names that don't exist in JavaConverters for the case
// when importing both JavaConverters._ and JavaConversions._. otherwise implicit conversions
// would not apply, see https://github.com/scala/scala/pull/5109#issuecomment-212417789
implicit def `deprecated asJavaIterator`[A](it: Iterator[A]): ju.Iterator[A] = asJavaIterator(it)
implicit def `deprecated asJavaEnumeration`[A](it: Iterator[A]): ju.Enumeration[A] = asJavaEnumeration(it)
implicit def `deprecated asJavaIterable`[A](i: Iterable[A]): jl.Iterable[A] = asJavaIterable(i)
implicit def `deprecated asJavaCollection`[A](it: Iterable[A]): ju.Collection[A] = asJavaCollection(it)
implicit def `deprecated bufferAsJavaList`[A](b: mutable.Buffer[A]): ju.List[A] = bufferAsJavaList(b)
implicit def `deprecated mutableSeqAsJavaList`[A](seq: mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList(seq)
implicit def `deprecated seqAsJavaList`[A](seq: Seq[A]): ju.List[A] = seqAsJavaList(seq)
implicit def `deprecated mutableSetAsJavaSet`[A](s: mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet(s)
implicit def `deprecated setAsJavaSet`[A](s: Set[A]): ju.Set[A] = setAsJavaSet(s)
implicit def `deprecated mutableMapAsJavaMap`[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = mutableMapAsJavaMap(m)
implicit def `deprecated asJavaDictionary`[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = asJavaDictionary(m)
implicit def `deprecated mapAsJavaMap`[A, B](m: Map[A, B]): ju.Map[A, B] = mapAsJavaMap(m)
implicit def `deprecated mapAsJavaConcurrentMap`[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = mapAsJavaConcurrentMap(m)
}
private[convert] trait LowPriorityWrapAsJava {
import Wrappers._
/**
* Implicitly converts a Scala Iterator to a Java Iterator.
* The returned Java Iterator is backed by the provided Scala
* Iterator and any side-effects of using it via the Java interface will
* be visible via the Scala interface and vice versa.
*
* If the Scala Iterator was previously obtained from an implicit or
* explicit call of `asIterator(java.util.Iterator)` then the original
* Java Iterator will be returned.
*
* @param it The Iterator to be converted.
* @return A Java Iterator view of the argument.
*/
implicit def asJavaIterator[A](it: Iterator[A]): ju.Iterator[A] = it match {
case null => null
case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]]
case _ => IteratorWrapper(it)
}
/**
* Implicitly converts a Scala Iterator to a Java Enumeration.
* The returned Java Enumeration is backed by the provided Scala
* Iterator and any side-effects of using it via the Java interface will
* be visible via the Scala interface and vice versa.
*
* If the Scala Iterator was previously obtained from an implicit or
* explicit call of `asIterator(java.util.Enumeration)` then the
* original Java Enumeration will be returned.
*
* @param it The Iterator to be converted.
* @return A Java Enumeration view of the argument.
*/
implicit def asJavaEnumeration[A](it: Iterator[A]): ju.Enumeration[A] = it match {
case null => null
case JEnumerationWrapper(wrapped) => wrapped.asInstanceOf[ju.Enumeration[A]]
case _ => IteratorWrapper(it)
}
/**
* Implicitly converts a Scala Iterable to a Java Iterable.
* The returned Java Iterable is backed by the provided Scala
* Iterable and any side-effects of using it via the Java interface will
* be visible via the Scala interface and vice versa.
*
* If the Scala Iterable was previously obtained from an implicit or
* explicit call of `asIterable(java.lang.Iterable)` then the original
* Java Iterable will be returned.
*
* @param i The Iterable to be converted.
* @return A Java Iterable view of the argument.
*/
implicit def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = i match {
case null => null
case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]]
case _ => IterableWrapper(i)
}
/**
* Implicitly converts a Scala Iterable to an immutable Java
* Collection.
*
* If the Scala Iterable was previously obtained from an implicit or
* explicit call of `asSizedIterable(java.util.Collection)` then the original
* Java Collection will be returned.
*
* @param it The SizedIterable to be converted.
* @return A Java Collection view of the argument.
*/
implicit def asJavaCollection[A](it: Iterable[A]): ju.Collection[A] = it match {
case null => null
case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]]
case _ => new IterableWrapper(it)
}
/**
* Implicitly converts a Scala mutable Buffer to a Java List.
* The returned Java List is backed by the provided Scala
* Buffer and any side-effects of using it via the Java interface will
* be visible via the Scala interface and vice versa.
*
* If the Scala Buffer was previously obtained from an implicit or
* explicit call of `asBuffer(java.util.List)` then the original
* Java List will be returned.
*
* @param b The Buffer to be converted.
* @return A Java List view of the argument.
*/
implicit def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = b match {
case null => null
case JListWrapper(wrapped) => wrapped
case _ => new MutableBufferWrapper(b)
}
/**
* Implicitly converts a Scala mutable Seq to a Java List.
* The returned Java List is backed by the provided Scala
* Seq and any side-effects of using it via the Java interface will
* be visible via the Scala interface and vice versa.
*
* If the Scala Seq was previously obtained from an implicit or
* explicit call of `asSeq(java.util.List)` then the original
* Java List will be returned.
*
* @param seq The Seq to be converted.
* @return A Java List view of the argument.
*/
implicit def mutableSeqAsJavaList[A](seq: mutable.Seq[A]): ju.List[A] = seq match {
case null => null
case JListWrapper(wrapped) => wrapped
case _ => new MutableSeqWrapper(seq)
}
/**
* Implicitly converts a Scala Seq to a Java List.
* The returned Java List is backed by the provided Scala
* Seq and any side-effects of using it via the Java interface will
* be visible via the Scala interface and vice versa.
*
* If the Scala Seq was previously obtained from an implicit or
* explicit call of `asSeq(java.util.List)` then the original
* Java List will be returned.
*
* @param seq The Seq to be converted.
* @return A Java List view of the argument.
*/
implicit def seqAsJavaList[A](seq: Seq[A]): ju.List[A] = seq match {
case null => null
case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]]
case _ => new SeqWrapper(seq)
}
/**
* Implicitly converts a Scala mutable Set to a Java Set.
* The returned Java Set is backed by the provided Scala
* Set and any side-effects of using it via the Java interface will
* be visible via the Scala interface and vice versa.
*
* If the Scala Set was previously obtained from an implicit or
* explicit call of `asSet(java.util.Set)` then the original
* Java Set will be returned.
*
* @param s The Set to be converted.
* @return A Java Set view of the argument.
*/
implicit def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = s match {
case null => null
case JSetWrapper(wrapped) => wrapped
case _ => new MutableSetWrapper(s)
}
/**
* Implicitly converts a Scala Set to a Java Set.
* The returned Java Set is backed by the provided Scala
* Set and any side-effects of using it via the Java interface will
* be visible via the Scala interface and vice versa.
*
* If the Scala Set was previously obtained from an implicit or
* explicit call of asSet(java.util.Set) then the original
* Java Set will be returned.
*
* @param s The Set to be converted.
* @return A Java Set view of the argument.
*/
implicit def setAsJavaSet[A](s: Set[A]): ju.Set[A] = s match {
case null => null
case JSetWrapper(wrapped) => wrapped
case _ => new SetWrapper(s)
}
/**
* Implicitly converts a Scala mutable Map to a Java Map.
* The returned Java Map is backed by the provided Scala
* Map and any side-effects of using it via the Java interface will
* be visible via the Scala interface and vice versa.
*
* If the Scala Map was previously obtained from an implicit or
* explicit call of `asMap(java.util.Map)` then the original
* Java Map will be returned.
*
* @param m The Map to be converted.
* @return A Java Map view of the argument.
*/
implicit def mutableMapAsJavaMap[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = m match {
case null => null
case JMapWrapper(wrapped) => wrapped
case _ => new MutableMapWrapper(m)
}
/**
* Implicitly converts a Scala mutable `Map` to a Java `Dictionary`.
*
* The returned Java `Dictionary` is backed by the provided Scala
* `Dictionary` and any side-effects of using it via the Java interface
* will be visible via the Scala interface and vice versa.
*
* If the Scala `Dictionary` was previously obtained from an implicit or
* explicit call of `asMap(java.util.Dictionary)` then the original
* Java Dictionary will be returned.
*
* @param m The `Map` to be converted.
* @return A Java `Dictionary` view of the argument.
*/
implicit def asJavaDictionary[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = m match {
case null => null
case JDictionaryWrapper(wrapped) => wrapped
case _ => new DictionaryWrapper(m)
}
/**
* Implicitly converts a Scala `Map` to a Java `Map`.
*
* The returned Java `Map` is backed by the provided Scala `Map` and
* any side-effects of using it via the Java interface will be visible
* via the Scala interface and vice versa.
*
* If the Scala `Map` was previously obtained from an implicit or
* explicit call of `asMap(java.util.Map)` then the original
* Java `Map` will be returned.
*
* @param m The `Map` to be converted.
* @return A Java `Map` view of the argument.
*/
implicit def mapAsJavaMap[A, B](m: Map[A, B]): ju.Map[A, B] = m match {
case null => null
case JMapWrapper(wrapped) => wrapped.asInstanceOf[ju.Map[A, B]]
case _ => new MapWrapper(m)
}
/**
* Implicitly converts a Scala mutable `concurrent.Map` to a Java
* `ConcurrentMap`.
*
* The returned Java `ConcurrentMap` is backed by the provided Scala
* `concurrent.Map` and any side-effects of using it via the Java interface
* will be visible via the Scala interface and vice versa.
*
* If the Scala `concurrent.Map` was previously obtained from an implicit or
* explicit call of `mapAsScalaConcurrentMap(java.util.concurrent.ConcurrentMap)`
* then the original Java ConcurrentMap will be returned.
*
* @param m The Scala `concurrent.Map` to be converted.
* @return A Java `ConcurrentMap` view of the argument.
*/
implicit def mapAsJavaConcurrentMap[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = m match {
case null => null
case JConcurrentMapWrapper(wrapped) => wrapped
case _ => new ConcurrentMapWrapper(m)
}
}
@deprecated("use JavaConverters or consider ImplicitConversionsToJava", since="2.12.0")
object WrapAsJava extends WrapAsJava
| felixmulder/scala | src/library/scala/collection/convert/WrapAsJava.scala | Scala | bsd-3-clause | 12,844 |
package com.cds.learnscala.test.numeric
import org.slf4j.{Logger, LoggerFactory}
import scala.collection.mutable.ArrayBuffer
object NumericTest {
def OptionTest(option: Option[Long]) = {
if (option.isEmpty) {
None
}
option match {
case None => None
case Some(_) => None
}
}
def testDoubleFload(): Unit = {
val row = ArrayBuffer[Any]()
row += 252.99
row += 252.99f
val test1 = row.toArray.asInstanceOf[Array[Object]]
println(test1)
}
val Log = LoggerFactory.getLogger(NumericTest.getClass)
def main(args: Array[String]): Unit = {
OptionTest(None)
val f = 252.99
println(f.asInstanceOf[Object])
println(f.toString.toFloat)
// println(f.toString.toInt)
testDoubleFload()
val test = null
Log.info(test)
println(test)
val value: Integer = new Integer((100))
println(value.asInstanceOf[Long])
// val value1 = value.asInstanceOf[Long]
val value1 = value.asInstanceOf[Int]
println(value1)
}
}
| anancds/scala-project | learn-scala/src/main/scala/com/cds/learnscala/test/numeric/NumericTest.scala | Scala | mit | 1,027 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2007-2014, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala.swing.examples
import java.awt.{Color, Font}
import scala.swing._
import scala.swing.event._
import scala.swing.Swing._
import scala.swing.BorderPanel._
/**
* Demo for ColorChooser.
* Based on http://download.oracle.com/javase/tutorial/uiswing/components/colorchooser.html
*
* @author andy@hicks.net
*/
object ColorChooserDemo extends SimpleSwingApplication {
def top = new MainFrame {
title = "ColorChooser Demo"
size = new Dimension(400, 400)
contents = ui
}
val banner = new Label("Welcome to Scala Swing") {
horizontalAlignment = Alignment.Center
foreground = Color.yellow
background = Color.blue
opaque = true
font = new Font("SansSerif", Font.BOLD, 24)
}
def ui = new BorderPanel {
val colorChooser = new ColorChooser {
reactions += {
case ColorChanged(_, c) =>
banner.foreground = c
}
}
colorChooser.border = TitledBorder(EtchedBorder, "Choose Text Color")
val bannerArea = new BorderPanel {
layout(banner) = Position.Center
border = TitledBorder(EtchedBorder, "Banner")
}
// Display a color selection dialog when button pressed
val selectColor = new Button("Choose Background Color") {
reactions += {
case ButtonClicked(_) =>
ColorChooser.showDialog(this, "Test", Color.red) match {
case Some(c) => banner.background = c
case None =>
}
}
}
layout(bannerArea) = Position.North
layout(colorChooser) = Position.Center
layout(selectColor) = Position.South
}
} | SethTisue/scala-swing | examples/src/main/scala/scala/swing/examples/ColorChooserDemo.scala | Scala | bsd-3-clause | 2,112 |
package org.machine.engine.graph
import org.scalatest._
import org.scalatest.mock._
import java.io.File;
import java.io.IOException;
import org.neo4j.graphdb._
import org.neo4j.graphdb.factory.GraphDatabaseFactory
import org.neo4j.io.fs.FileUtils
import org.machine.engine.Engine
import org.machine.engine.TestUtils
import org.machine.engine.graph._
import org.machine.engine.graph.utilities.DynamicCmdLoader
import org.machine.engine.graph.decisions._
import org.machine.engine.graph.commands._
import org.machine.engine.exceptions._
import org.machine.engine.graph.nodes._
class EngineStatementBuilderSpec extends FunSpec
with Matchers
with EasyMockSugar
with BeforeAndAfterAll{
import Neo4JHelper._
import TestUtils._
var engine:Engine = null
private var activeUserId:String = null
override def beforeAll(){
engine = Engine.getInstance
perge
activeUserId = Engine.getInstance
.createUser
.withFirstName("Bob")
.withLastName("Grey")
.withEmailAddress("onebadclown@derry-maine.com")
.withUserName("pennywise")
.withUserPassword("You'll float too...")
.end
}
override def afterAll(){
perge
}
/*
TODO: Test all 41 options
*/
describe("Engine Statement Builder"){
it ("should find all datasets"){
engine.forUser(activeUserId).createDataSet("Dataset A", "")
engine.forUser(activeUserId).createDataSet("Dataset B", "")
engine.forUser(activeUserId).createDataSet("Dataset C", "")
val result:EngineCmdResult = engine.forUser(activeUserId)
.setScope(CommandScopes.UserSpaceScope)
.setActionType(ActionTypes.Retrieve)
.setEntityType(EntityTypes.DataSet)
.setFilter(Filters.All)
.run
result shouldBe a [QueryCmdResult[_]]
result.asInstanceOf[QueryCmdResult[DataSet]].results.length should equal(3)
}
it("should use reflection"){
val cmd = DynamicCmdLoader.provision("ListDataSets", null, null, null)
cmd shouldBe a [ListDataSets]
}
}
}
| sholloway/graph-engine | src/test/scala/org/machine/engine/graph/EngineStatementBuilderSpec.scala | Scala | mit | 2,024 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import uk.gov.hmrc.ct.accounts.frs102.retriever.Frs102AccountsBoxRetriever
import uk.gov.hmrc.ct.box._
case class AC135(value: Option[Int]) extends CtBoxIdentifier(name = "Debtors - Trade debtors (PY)")
with CtOptionalInteger
with Input
with ValidatableBox[Frs102AccountsBoxRetriever]
with Validators {
override def validate(boxRetriever: Frs102AccountsBoxRetriever): Set[CtValidation] = {
collectErrors(
validateMoney(value, min = 0)
)
}
}
| pncampbell/ct-calculations | src/main/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/AC135.scala | Scala | apache-2.0 | 1,120 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.