code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package week3
/**
* Created by marlanbar on 19/5/2017.
*/
abstract class IntSet {
def incl(x: Int): IntSet
def contains(x: Int): Boolean
def union(other: IntSet): IntSet
}
class NonEmpty(val elem: Int, val left: IntSet, val right: IntSet) extends IntSet {
def contains(x: Int): Boolean =
if (x < elem) left contains x
else if (x > elem) right contains x
else true
def incl(x: Int): IntSet =
if (x < elem) new NonEmpty(elem, left incl x, right)
else if (x > elem) new NonEmpty(elem, left, right incl x)
else this
def union(other: IntSet) =
((left union right) union other) incl elem
override def toString = "{" + left + elem + right + "}"
}
class Empty extends IntSet {
def contains(x: Int): Boolean = false
def incl(x: Int): IntSet = new NonEmpty(x, new Empty, new Empty)
def union(other: IntSet) = other
override def toString = "."
}
| marlanbar/scala-coursera | session/src/week3/IntSet.scala | Scala | unlicense | 895 |
package org.raisercostin.util.gps
import org.raisercostin.jedi._
case class Distance(meters: Double) {
def toInternational =
if (meters >= 1.0)
f"$meters%.1f km"
else
f"${meters / 1000}%.0f m"
}
object Gps {
//http://download.geonames.org/export/dump/cities1000.zip
lazy val locations = fromFile(Locations.classpath("cities1000.zip").unzip)
def fromFile(src: NavigableInputLocation): Seq[Gps] = {
// 0 1 2 3 4 5 6 7 8 9 10 11 12 13
//3039154 El Tarter El Tarter Ehl Tarter,Эл Тартер 42.57952 1.65362 P PPL AD 02 1052 1721 Europe/Andorra 2012-11-03
src.child("cities1000.txt").readLines.map { line =>
val fields = line.split("\\t")
Gps(fields(4), "N", fields(5), "E", "12", "0", Some(fields(1)))
}.toSeq
}
def custom = Seq(
Gps("44.860046", "N", "24.867838", "E", "13.0", "0", Some("pitesti")),
Gps("44.4378258", "N", "26.0946376", "E", "12", "0", Some("bucuresti")),
Gps("50.854975", "N", "4.3753899", "E", "12", "0", Some("brussels")))
def apply(GPSLatitude: String, GPSLongitude: String) =
new Gps(GPSLatitude, if (GPSLatitude.toDouble >= 0) "N" else "S", GPSLongitude, if (GPSLongitude.toDouble >= 0) "E" else "W", "0", "0", None)
}
//https://www.google.com/maps/place/@44.85597,24.8735028,13z
//https://www.google.com/maps/place/Pite%C8%99ti,+Romania/@44.85597,24.8735028,13z
//https://www.google.com/maps/place/44%C2%B051'21.5%22N+24%C2%B052'24.6%22E/@44.85597,24.8735028,17z/data=!3m1!4b1!4m2!3m1!1s0x0:0x0
//44.860046, 24.867838
//44°51'21.5"N 24°52'24.6"E
case class Gps(GPSLatitude: String, GPSLatitudeRef: String, GPSLongitude: String, GPSLongitudeRef: String, GPSAltitude: String, GPSAltitudeRef: String, name: Option[String] = None) {
def latitude = GPSLatitude.toDouble
def longitude = GPSLongitude.toDouble
def distanceTo(to: Gps) = distance(latitude, longitude, to.latitude, to.longitude)
private def distance(lat1: Double, lon1: Double, lat2: Double, lon2: Double) = {
var R = 6371; // km
var dLat = toRad(lat2 - lat1);
var dLon = toRad(lon2 - lon1);
var lat1R = toRad(lat1);
var lat2R = toRad(lat2);
var a = Math.sin(dLat / 2) * Math.sin(dLat / 2) +
Math.sin(dLon / 2) * Math.sin(dLon / 2) * Math.cos(lat1R) * Math.cos(lat2R);
var c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a));
var d = R * c;
Distance(d * 1000)
}
private def toRad(value: Double): Double = value * Math.PI / 180
def mapHref = s"https://www.google.com/maps/@${GPSLatitude},${GPSLongitude},14z"
//
// def distance(position: Position): Option[String] = current map { x => distance(position, x) }
def closestLocation: Gps = {
val b = Gps.locations.toIterator.filter(location => near(location.latitude, 10))
val a = b.minBy(place => distanceTo(place).meters) //map(place => (place, distanceTo(place)))
//println(a)
//a.minBy(_._2.meters)._1
a
}
//Each degree of latitude is approximately 69 miles (111 kilometers) apart.
def near(newLatitude: Double, delta: Double) = (latitude - newLatitude).abs * 111 <= delta
}
| raisercostin/ownit | src/main/scala/org/raisercostin/util/Gps.scala | Scala | apache-2.0 | 3,202 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.types
import scala.math.{Integral, Numeric, Ordering}
import scala.reflect.runtime.universe.typeTag
import org.apache.spark.annotation.Stable
/**
* The data type representing `Long` values. Please use the singleton `DataTypes.LongType`.
*
* @since 1.3.0
*/
@Stable
class LongType private() extends IntegralType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "LongType$" in byte code.
// Defined with a private constructor so the companion object is the only possible instantiation.
private[sql] type InternalType = Long
@transient private[sql] lazy val tag = typeTag[InternalType]
private[sql] val numeric = implicitly[Numeric[Long]]
private[sql] val integral = implicitly[Integral[Long]]
private[sql] val ordering = implicitly[Ordering[InternalType]]
override private[sql] val exactNumeric = LongExactNumeric
/**
* The default size of a value of the LongType is 8 bytes.
*/
override def defaultSize: Int = 8
override def simpleString: String = "bigint"
private[spark] override def asNullable: LongType = this
}
/**
* @since 1.3.0
*/
@Stable
case object LongType extends LongType
| mahak/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/types/LongType.scala | Scala | apache-2.0 | 2,066 |
/**
*
* Copyright (C) 2013-2014 Pivotal Software, Inc.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the under the Apache License,
* Version 2.0 (the "License”); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gopivotal.sutils
import com.fasterxml.jackson.annotation.JsonSubTypes.Type
import com.fasterxml.jackson.annotation.{JsonSubTypes, JsonTypeInfo, JsonTypeName}
import org.slf4j.LoggerFactory
object SerializeTest {
case class Orchestration(module: String,
version: String,
description: String,
actions: List[Action],
roles: List[String])
case class Action(name: String,
description: String,
commands: List[Command])
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
@JsonSubTypes(Array(
new Type(classOf[ServiceCommand]),
new Type(classOf[PuppetCommand])
))
trait Command
@JsonTypeName("service")
case class ServiceCommand(name: String,
args: String,
classFilter: String,
factFilter: String) extends Command
@JsonTypeName("puppet")
case class PuppetCommand(resource: String,
classFilter: String,
factFilter: String) extends Command
}
class SerializeTest extends BaseTest {
import SerializeTest._
import com.gopivotal.sutils.Serialize._
import com.gopivotal.sutils.Format._
import com.gopivotal.sutils.JacksonSerialize._
import com.gopivotal.sutils.syntax.serialize._
val logger = LoggerFactory.getLogger(getClass.getName)
"json serialize on case class should return json response" in {
val command = PuppetCommand("hadoop::namenode", "hadoop::namenode", "")
val json = command.serialize[StringJSON]
logger.info("Command JSON: {}", json)
val expectedJson = """{"type":"puppet","resource":"hadoop::namenode","classFilter":"hadoop::namenode","factFilter":""}"""
//TODO this is order dependent, but JSON isn't
json shouldBe expectedJson
}
"json serialize on bytes on case class should return json response in bytes" in {
val command = PuppetCommand("hadoop::namenode", "hadoop::namenode", "")
val json = command.serialize[BytesJSON]
logger.info("Command JSON: {}", new String(json))
val expectedJson = """{"type":"puppet","resource":"hadoop::namenode","classFilter":"hadoop::namenode","factFilter":""}"""
//TODO this is order dependent, but JSON isn't
json shouldBe expectedJson.getBytes()
}
"yaml serialize on case class should return yaml response" in {
val command = PuppetCommand("hadoop::namenode", "hadoop::namenode", "")
val yaml = command.serialize[StringYAML]
logger.info("Command YAML: {}", yaml)
val expectedYaml =
"""--- !<puppet>
|resource: "hadoop::namenode"
|classFilter: "hadoop::namenode"
|factFilter: ""
""".stripMargin.trim
//TODO this is order dependent, but YAML isn't
yaml shouldBe expectedYaml
}
"yaml serialize on bytes on case class should return yaml response in bytes" in {
val command = PuppetCommand("hadoop::namenode", "hadoop::namenode", "")
val yaml = command.serialize[BytesYAML]
logger.info("Command YAML: {}", new String(yaml))
val expectedYaml =
"""--- !<puppet>
|resource: "hadoop::namenode"
|classFilter: "hadoop::namenode"
|factFilter: ""
""".stripMargin.trim
//TODO this is order dependent, but YAML isn't
yaml shouldBe expectedYaml.getBytes()
}
"json serialize on list of case class should return json response" in {
val command = List[Command](PuppetCommand("hadoop::namenode", "hadoop::namenode", ""), PuppetCommand("hadoop::namenode", "hadoop::namenode", ""))
val json = command.serialize[StringJSON]
logger.info("Command JSON: {}", json)
val expectedJson = """[{"type":"puppet","resource":"hadoop::namenode","classFilter":"hadoop::namenode","factFilter":""},{"type":"puppet","resource":"hadoop::namenode","classFilter":"hadoop::namenode","factFilter":""}]"""
//TODO this is order dependent, but JSON isn't
json shouldBe expectedJson
}
"json serialize on bytes on list of case class should return json response in bytes" in {
val command = List[Command](PuppetCommand("hadoop::namenode", "hadoop::namenode", ""), PuppetCommand("hadoop::namenode", "hadoop::namenode", ""))
val json = command.serialize[BytesJSON]
logger.info("Command JSON: {}", new String(json))
val expectedJson = """[{"type":"puppet","resource":"hadoop::namenode","classFilter":"hadoop::namenode","factFilter":""},{"type":"puppet","resource":"hadoop::namenode","classFilter":"hadoop::namenode","factFilter":""}]"""
//TODO this is order dependent, but JSON isn't
json shouldBe expectedJson.getBytes()
}
"yaml serialize on list of case class should return yaml response" in {
val command = List(PuppetCommand("hadoop::namenode", "hadoop::namenode", ""), PuppetCommand("hadoop::namenode", "hadoop::namenode", ""))
val yaml = command.serialize[StringYAML]
logger.info("Command YAML: {}", yaml)
val expectedYaml =
"""---
|- !<puppet>
| resource: "hadoop::namenode"
| classFilter: "hadoop::namenode"
| factFilter: ""
|- !<puppet>
| resource: "hadoop::namenode"
| classFilter: "hadoop::namenode"
| factFilter: ""
""".stripMargin.trim
//TODO this is order dependent, but YAML isn't
yaml shouldBe expectedYaml
}
"yaml serialize on bytes on list of case class should return yaml response in bytes" in {
val command = List(PuppetCommand("hadoop::namenode", "hadoop::namenode", ""), PuppetCommand("hadoop::namenode", "hadoop::namenode", ""))
val yaml = command.serialize[BytesYAML]
logger.info("Command YAML: {}", new String(yaml))
val expectedYaml =
"""---
|- !<puppet>
| resource: "hadoop::namenode"
| classFilter: "hadoop::namenode"
| factFilter: ""
|- !<puppet>
| resource: "hadoop::namenode"
| classFilter: "hadoop::namenode"
| factFilter: ""
""".stripMargin.trim
//TODO this is order dependent, but YAML isn't
yaml shouldBe expectedYaml.getBytes()
}
"serializing a object that cant convert to json while using safe should return failure" in {
object HardTypeForJackson
val json = HardTypeForJackson.serializeValidation[StringJSON]
logger.info("Failed JSON {}", json)
json.isFailure shouldBe true
}
"serialize with try" in {
object HardTypeForJackson
val json = HardTypeForJackson.serializeTry[StringJSON]
logger.info("Try JSON {}", json)
json.isFailure shouldBe true
}
}
| pivotalsoftware/sutils | sutils-jackson/src/test/scala/com/gopivotal/sutils/SerializeTest.scala | Scala | apache-2.0 | 7,385 |
package example
import org.scalatra.test.scalatest.ScalatraFlatSpec
import skinny.micro._
import scala.concurrent.Future
object HelloServlet extends SingleApp {
def message(implicit ctx: Context) = {
s"Hello, ${params(ctx).getOrElse("name", "Anonymous")}"
}
// synchronous action
get("/hello")(message)
post("/hello")(message)
// asynchronous action
get("/hello/async") {
implicit val ctx = context
Future { message(ctx) }
}
}
class HelloServletSpec extends ScalatraFlatSpec {
addServlet(HelloServlet, "/*")
it should "work fine with GET Requests" in {
get("/hello") {
status should equal(200)
body should equal("Hello, Anonymous")
}
get("/hello?name=Martin") {
status should equal(200)
body should equal("Hello, Martin")
}
}
it should "work fine with POST Requests" in {
post("/hello", Map()) {
status should equal(200)
body should equal("Hello, Anonymous")
}
post("/hello", Map("name" -> "Martin")) {
status should equal(200)
body should equal("Hello, Martin")
}
}
it should "work fine with AsyncResult" in {
get("/hello/async") {
status should equal(200)
body should equal("Hello, Anonymous")
}
get("/hello/async?name=Martin") {
status should equal(200)
body should equal("Hello, Martin")
}
}
}
| xerial/skinny-micro | micro/src/test/scala/example/HelloServletSpec.scala | Scala | bsd-2-clause | 1,370 |
/*
,i::,
:;;;;;;;
;:,,::;.
1ft1;::;1tL
t1;::;1,
:;::; _____ __ ___ __
fCLff ;:: tfLLC / ___/ / |/ /____ _ _____ / /_
CLft11 :,, i1tffLi \\__ \\ ____ / /|_/ // __ `// ___// __ \\
1t1i .;; .1tf ___/ //___// / / // /_/ // /__ / / / /
CLt1i :,: .1tfL. /____/ /_/ /_/ \\__,_/ \\___//_/ /_/
Lft1,:;: , 1tfL:
;it1i ,,,:::;;;::1tti s_mach.concurrent
.t1i .,::;;; ;1tt Copyright (c) 2017 S-Mach, Inc.
Lft11ii;::;ii1tfL: Author: lance.gatlin@gmail.com
.L1 1tt1ttt,,Li
...1LLLL...
*/
package s_mach.concurrent
/* WARNING: Generated code. To modify see s_mach.concurrent.codegen.TupleAsyncTaskRunnerTestCodeGen */
import scala.util.{Random, Success, Failure}
import org.scalatest.{FlatSpec, Matchers}
import s_mach.concurrent.TestBuilder._
import util._
class Tuple12AsyncTaskRunnerTest extends FlatSpec with Matchers with ConcurrentTestCommon {
"Tuple12AsyncTaskRunner-t0" must "wait on all Futures to complete concurrently" in {
val results =
test repeat TEST_COUNT run {
implicit val ctc = mkConcurrentTestContext()
import ctc._
sched.addEvent("start")
val items = IndexedSeq.fill(12)(Random.nextInt)
val fa = success(items(0))
val fb = success(items(1))
val fc = success(items(2))
val fd = success(items(3))
val fe = success(items(4))
val ff = success(items(5))
val fg = success(items(6))
val fh = success(items(7))
val fi = success(items(8))
val fj = success(items(9))
val fk = success(items(10))
val fl = success(items(11))
val result = async.par.run(fa,fb,fc,fd,fe,ff,fg,fh,fi,fj,fk,fl)
waitForActiveExecutionCount(0)
sched.addEvent("end")
result.awaitTry should be(Success((items(0),items(1),items(2),items(3),items(4),items(5),items(6),items(7),items(8),items(9),items(10),items(11))))
isConcurrentSchedule(Vector(items(0),items(1),items(2),items(3),items(4),items(5),items(6),items(7),items(8),items(9),items(10),items(11)), sched)
}
val concurrentPercent = results.count(_ == true) / results.size.toDouble
concurrentPercent should be >= MIN_CONCURRENCY_PERCENT
}
"TupleAsyncTaskRunner-t1" must "complete immediately after any Future fails" in {
test repeat TEST_COUNT run {
implicit val ctc = mkConcurrentTestContext()
import ctc._
sched.addEvent("start")
val endLatch = Latch()
val fb = fail(2)
// Note1: without hooking the end latch here there would be a race condition here between success 1,3,4,5,6
// and end. The latch is used to create a serialization schedule that can be reliably tested
// Note2: Due to this design, a bug in merge that does not complete immediately on failure will cause a
// deadlock here instead of a failing test
val fa = endLatch happensBefore success(1)
val fc = endLatch happensBefore success(3)
val fd = endLatch happensBefore success(4)
val fe = endLatch happensBefore success(5)
val ff = endLatch happensBefore success(6)
val fg = endLatch happensBefore success(7)
val fh = endLatch happensBefore success(8)
val fi = endLatch happensBefore success(9)
val fj = endLatch happensBefore success(10)
val fk = endLatch happensBefore success(11)
val fl = endLatch happensBefore success(12)
val result = async.par.run(fa,fb,fc,fd,fe,ff,fg,fh,fi,fj,fk,fl)
waitForActiveExecutionCount(0)
sched.addEvent("end")
endLatch.set()
waitForActiveExecutionCount(0)
result.awaitTry shouldBe a [Failure[_]]
result.awaitTry.failed.get shouldBe a [AsyncParThrowable]
sched.happensBefore("start","fail-2") should equal(true)
sched.happensBefore("fail-2","end") should equal(true)
(1 to 12).filter(_ != 2).foreach { i =>
sched.happensBefore("end", s"success-$i") should equal(true)
}
}
}
"TupleAsyncTaskRunner-t2" must "throw AsyncParThrowable which can wait for all failures" in {
test repeat TEST_COUNT run {
implicit val ctc = mkConcurrentTestContext()
import ctc._
val failures = Random.shuffle(Seq(1,2,3,4,5,6,7,8,9,10,11,12)).take(2)
def call(i: Int) = if(failures.contains(i)) {
fail(i)
} else {
success(i)
}
val result = async.par.run(call(1),call(2),call(3),call(4),call(5),call(6),call(7),call(8),call(9),call(10),call(11),call(12))
waitForActiveExecutionCount(0)
val thrown = result.failed.await.asInstanceOf[AsyncParThrowable]
// Even though there are two worker threads, it technically is a race condition to see which failure happens
// first. This actually happens in about 1/1000 runs where it appears worker one while processing fail-1 stalls
// and worker 2 is able to complete success-2, success-3 and fail-4 before fail-1 finishes
thrown.firstFailure.toString.startsWith("java.lang.RuntimeException: fail-") should equal(true)
thrown.allFailure.await.map(_.toString) should contain theSameElementsAs(
failures.map(failIdx => new RuntimeException(s"fail-$failIdx").toString)
)
}
}
}
| S-Mach/s_mach.concurrent | src/test/scala/s_mach/concurrent/Tuple12AsyncTaskRunnerTest.scala | Scala | mit | 5,451 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.granturing.spark.powerbi
import org.apache.spark.sql.{DataFrame, SaveMode, SQLContext}
import org.apache.spark.sql.sources.{BaseRelation, CreatableRelationProvider}
import scala.concurrent._
import scala.concurrent.ExecutionContext.Implicits._
import scala.concurrent.duration.Duration
class DefaultSource extends CreatableRelationProvider with PowerBISink {
override def createRelation(
sqlContext: SQLContext,
mode: SaveMode,
parameters: Map[String, String],
data: DataFrame): BaseRelation = {
val conf = ClientConf.fromSparkConf(sqlContext.sparkContext.getConf)
implicit val client = new Client(conf)
val dataset = parameters.getOrElse("dataset", sys.error("'dataset' must be specified"))
val table = parameters.getOrElse("table", sys.error("'table' must be specified"))
val batchSize = parameters.getOrElse("batchSize", conf.batchSize.toString).toInt
val group = parameters.get("group")
val step = for {
groupId <- getGroupId(group)
ds <- getOrCreateDataset(mode, groupId, dataset, table, data.schema)
} yield (groupId, ds)
val result = step map { case (groupId, ds) =>
val fields = data.schema.fieldNames.zipWithIndex
val _conf = conf
val _token = Some(client.currentToken)
val _table = table
val _batchSize = batchSize
val coalesced = data.rdd.partitions.size > _conf.maxPartitions match {
case true => data.coalesce(_conf.maxPartitions)
case false => data
}
coalesced foreachPartition { p =>
val rows = p map { r =>
fields map { case(name, index) => (name -> r(index)) } toMap
} toSeq
val _client = new Client(_conf, _token)
val submit = rows.
sliding(_batchSize, _batchSize).
foldLeft(future()) { (fAccum, batch) =>
fAccum flatMap { _ => _client.addRows(ds.id, _table, batch, groupId) } }
submit.onComplete { _ => _client.shutdown() }
Await.result(submit, _conf.timeout)
}
}
result.onComplete { _ => client.shutdown() }
Await.result(result, Duration.Inf)
new BaseRelation {
val sqlContext = data.sqlContext
val schema = data.schema
}
}
}
| granturing/spark-power-bi | src/main/scala/com/granturing/spark/powerbi/DefaultSource.scala | Scala | apache-2.0 | 2,804 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.binaryfile
import java.io.{File, IOException}
import java.nio.file.{Files, StandardOpenOption}
import java.sql.Timestamp
import scala.collection.JavaConverters._
import com.google.common.io.{ByteStreams, Closeables}
import org.apache.hadoop.fs.{FileStatus, FileSystem, GlobFilter, Path}
import org.mockito.Mockito.{mock, when}
import org.apache.spark.{SparkConf, SparkException}
import org.apache.spark.sql.{DataFrame, QueryTest, Row}
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.execution.datasources.PartitionedFile
import org.apache.spark.sql.functions.col
import org.apache.spark.sql.internal.SQLConf.SOURCES_BINARY_FILE_MAX_LENGTH
import org.apache.spark.sql.sources._
import org.apache.spark.sql.test.{SQLTestUtils, SharedSparkSession}
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
class BinaryFileFormatSuite extends QueryTest with SharedSparkSession {
import BinaryFileFormat._
override def sparkConf: SparkConf =
super.sparkConf
.setAppName("test")
.set("spark.sql.parquet.columnarReaderBatchSize", "4096")
.set("spark.sql.sources.useV1SourceList", "avro")
.set("spark.sql.extensions", "com.intel.oap.ColumnarPlugin")
.set("spark.sql.execution.arrow.maxRecordsPerBatch", "4096")
//.set("spark.shuffle.manager", "org.apache.spark.shuffle.sort.ColumnarShuffleManager")
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "false")
.set("spark.sql.columnar.window", "false")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
private var testDir: String = _
private var fsTestDir: Path = _
private var fs: FileSystem = _
private var file1Status: FileStatus = _
override def beforeAll(): Unit = {
super.beforeAll()
testDir = Utils.createTempDir().getAbsolutePath
fsTestDir = new Path(testDir)
fs = fsTestDir.getFileSystem(sparkContext.hadoopConfiguration)
val year2014Dir = new File(testDir, "year=2014")
year2014Dir.mkdir()
val year2015Dir = new File(testDir, "year=2015")
year2015Dir.mkdir()
val file1 = new File(year2014Dir, "data.txt")
Files.write(
file1.toPath,
Seq("2014-test").asJava,
StandardOpenOption.CREATE, StandardOpenOption.WRITE
)
file1Status = fs.getFileStatus(new Path(file1.getPath))
val file2 = new File(year2014Dir, "data2.bin")
Files.write(
file2.toPath,
"2014-test-bin".getBytes,
StandardOpenOption.CREATE, StandardOpenOption.WRITE
)
val file3 = new File(year2015Dir, "bool.csv")
Files.write(
file3.toPath,
Seq("bool", "True", "False", "true").asJava,
StandardOpenOption.CREATE, StandardOpenOption.WRITE
)
val file4 = new File(year2015Dir, "data.bin")
Files.write(
file4.toPath,
"2015-test".getBytes,
StandardOpenOption.CREATE, StandardOpenOption.WRITE
)
}
test("BinaryFileFormat methods") {
val format = new BinaryFileFormat
assert(format.shortName() === "binaryFile")
assert(format.isSplitable(spark, Map.empty, new Path("any")) === false)
assert(format.inferSchema(spark, Map.empty, Seq.empty) === Some(BinaryFileFormat.schema))
assert(BinaryFileFormat.schema === StructType(Seq(
StructField("path", StringType, false),
StructField("modificationTime", TimestampType, false),
StructField("length", LongType, false),
StructField("content", BinaryType, true))))
}
def testBinaryFileDataSource(pathGlobFilter: String): Unit = {
val dfReader = spark.read.format(BINARY_FILE)
if (pathGlobFilter != null) {
dfReader.option("pathGlobFilter", pathGlobFilter)
}
val resultDF = dfReader.load(testDir).select(
col(PATH),
col(MODIFICATION_TIME),
col(LENGTH),
col(CONTENT),
col("year") // this is a partition column
)
val expectedRowSet = new collection.mutable.HashSet[Row]()
val globFilter = if (pathGlobFilter == null) null else new GlobFilter(pathGlobFilter)
for (partitionDirStatus <- fs.listStatus(fsTestDir)) {
val dirPath = partitionDirStatus.getPath
val partitionName = dirPath.getName.split("=")(1)
val year = partitionName.toInt // partition column "year" value which is `Int` type
for (fileStatus <- fs.listStatus(dirPath)) {
if (globFilter == null || globFilter.accept(fileStatus.getPath)) {
val fpath = fileStatus.getPath.toString
val flen = fileStatus.getLen
val modificationTime = new Timestamp(fileStatus.getModificationTime)
val fcontent = {
val stream = fs.open(fileStatus.getPath)
val content = try {
ByteStreams.toByteArray(stream)
} finally {
Closeables.close(stream, true)
}
content
}
val row = Row(fpath, modificationTime, flen, fcontent, year)
expectedRowSet.add(row)
}
}
}
checkAnswer(resultDF, expectedRowSet.toSeq)
}
test("binary file data source test") {
testBinaryFileDataSource(null)
testBinaryFileDataSource("*.*")
testBinaryFileDataSource("*.bin")
testBinaryFileDataSource("*.txt")
testBinaryFileDataSource("*.{txt,csv}")
testBinaryFileDataSource("*.json")
}
test("binary file data source do not support write operation") {
val df = spark.read.format(BINARY_FILE).load(testDir)
withTempDir { tmpDir =>
val thrown = intercept[UnsupportedOperationException] {
df.write
.format(BINARY_FILE)
.save(tmpDir + "/test_save")
}
assert(thrown.getMessage.contains("Write is not supported for binary file data source"))
}
}
def mockFileStatus(length: Long, modificationTime: Long): FileStatus = {
val status = mock(classOf[FileStatus])
when(status.getLen).thenReturn(length)
when(status.getModificationTime).thenReturn(modificationTime)
when(status.toString).thenReturn(
s"FileStatus($LENGTH=$length, $MODIFICATION_TIME=$modificationTime)")
status
}
def testCreateFilterFunction(
filters: Seq[Filter],
testCases: Seq[(FileStatus, Boolean)]): Unit = {
val funcs = filters.map(BinaryFileFormat.createFilterFunction)
testCases.foreach { case (status, expected) =>
assert(funcs.forall(f => f(status)) === expected,
s"$filters applied to $status should be $expected.")
}
}
test("createFilterFunction") {
// test filter applied on `length` column
val l1 = mockFileStatus(1L, 0L)
val l2 = mockFileStatus(2L, 0L)
val l3 = mockFileStatus(3L, 0L)
testCreateFilterFunction(
Seq(LessThan(LENGTH, 2L)),
Seq((l1, true), (l2, false), (l3, false)))
testCreateFilterFunction(
Seq(LessThanOrEqual(LENGTH, 2L)),
Seq((l1, true), (l2, true), (l3, false)))
testCreateFilterFunction(
Seq(GreaterThan(LENGTH, 2L)),
Seq((l1, false), (l2, false), (l3, true)))
testCreateFilterFunction(
Seq(GreaterThanOrEqual(LENGTH, 2L)),
Seq((l1, false), (l2, true), (l3, true)))
testCreateFilterFunction(
Seq(EqualTo(LENGTH, 2L)),
Seq((l1, false), (l2, true), (l3, false)))
testCreateFilterFunction(
Seq(Not(EqualTo(LENGTH, 2L))),
Seq((l1, true), (l2, false), (l3, true)))
testCreateFilterFunction(
Seq(And(GreaterThan(LENGTH, 1L), LessThan(LENGTH, 3L))),
Seq((l1, false), (l2, true), (l3, false)))
testCreateFilterFunction(
Seq(Or(LessThanOrEqual(LENGTH, 1L), GreaterThanOrEqual(LENGTH, 3L))),
Seq((l1, true), (l2, false), (l3, true)))
// test filter applied on `modificationTime` column
val t1 = mockFileStatus(0L, 1L)
val t2 = mockFileStatus(0L, 2L)
val t3 = mockFileStatus(0L, 3L)
testCreateFilterFunction(
Seq(LessThan(MODIFICATION_TIME, new Timestamp(2L))),
Seq((t1, true), (t2, false), (t3, false)))
testCreateFilterFunction(
Seq(LessThanOrEqual(MODIFICATION_TIME, new Timestamp(2L))),
Seq((t1, true), (t2, true), (t3, false)))
testCreateFilterFunction(
Seq(GreaterThan(MODIFICATION_TIME, new Timestamp(2L))),
Seq((t1, false), (t2, false), (t3, true)))
testCreateFilterFunction(
Seq(GreaterThanOrEqual(MODIFICATION_TIME, new Timestamp(2L))),
Seq((t1, false), (t2, true), (t3, true)))
testCreateFilterFunction(
Seq(EqualTo(MODIFICATION_TIME, new Timestamp(2L))),
Seq((t1, false), (t2, true), (t3, false)))
testCreateFilterFunction(
Seq(Not(EqualTo(MODIFICATION_TIME, new Timestamp(2L)))),
Seq((t1, true), (t2, false), (t3, true)))
testCreateFilterFunction(
Seq(And(GreaterThan(MODIFICATION_TIME, new Timestamp(1L)),
LessThan(MODIFICATION_TIME, new Timestamp(3L)))),
Seq((t1, false), (t2, true), (t3, false)))
testCreateFilterFunction(
Seq(Or(LessThanOrEqual(MODIFICATION_TIME, new Timestamp(1L)),
GreaterThanOrEqual(MODIFICATION_TIME, new Timestamp(3L)))),
Seq((t1, true), (t2, false), (t3, true)))
// test filters applied on both columns
testCreateFilterFunction(
Seq(And(GreaterThan(LENGTH, 2L), LessThan(MODIFICATION_TIME, new Timestamp(2L)))),
Seq((l1, false), (l2, false), (l3, true), (t1, false), (t2, false), (t3, false)))
// test nested filters
testCreateFilterFunction(
// NOT (length > 2 OR modificationTime < 2)
Seq(Not(Or(GreaterThan(LENGTH, 2L), LessThan(MODIFICATION_TIME, new Timestamp(2L))))),
Seq((l1, false), (l2, false), (l3, false), (t1, false), (t2, true), (t3, true)))
}
test("buildReader") {
def testBuildReader(fileStatus: FileStatus, filters: Seq[Filter], expected: Boolean): Unit = {
val format = new BinaryFileFormat
val reader = format.buildReaderWithPartitionValues(
sparkSession = spark,
dataSchema = schema,
partitionSchema = StructType(Nil),
requiredSchema = schema,
filters = filters,
options = Map.empty,
hadoopConf = spark.sessionState.newHadoopConf())
val partitionedFile = mock(classOf[PartitionedFile])
when(partitionedFile.filePath).thenReturn(fileStatus.getPath.toString)
assert(reader(partitionedFile).nonEmpty === expected,
s"Filters $filters applied to $fileStatus should be $expected.")
}
testBuildReader(file1Status, Seq.empty, true)
testBuildReader(file1Status, Seq(LessThan(LENGTH, file1Status.getLen)), false)
testBuildReader(file1Status, Seq(
LessThan(MODIFICATION_TIME, new Timestamp(file1Status.getModificationTime))
), false)
testBuildReader(file1Status, Seq(
EqualTo(LENGTH, file1Status.getLen),
EqualTo(MODIFICATION_TIME, file1Status.getModificationTime)
), true)
}
private def readBinaryFile(file: File, requiredSchema: StructType): Row = {
val format = new BinaryFileFormat
val reader = format.buildReaderWithPartitionValues(
sparkSession = spark,
dataSchema = schema,
partitionSchema = StructType(Nil),
requiredSchema = requiredSchema,
filters = Seq.empty,
options = Map.empty,
hadoopConf = spark.sessionState.newHadoopConf()
)
val partitionedFile = mock(classOf[PartitionedFile])
when(partitionedFile.filePath).thenReturn(file.getPath)
val encoder = RowEncoder(requiredSchema).resolveAndBind()
encoder.createDeserializer().apply(reader(partitionedFile).next())
}
test("column pruning") {
withTempPath { file =>
val content = "123".getBytes
Files.write(file.toPath, content, StandardOpenOption.CREATE, StandardOpenOption.WRITE)
val actual = readBinaryFile(file, StructType(schema.takeRight(3)))
val expected = Row(new Timestamp(file.lastModified()), content.length, content)
assert(actual === expected)
}
}
ignore("column pruning - non-readable file") {
withTempPath { file =>
val content = "abc".getBytes
Files.write(file.toPath, content, StandardOpenOption.CREATE, StandardOpenOption.WRITE)
file.setReadable(false)
// If content is selected, it throws an exception because it's not readable.
intercept[IOException] {
readBinaryFile(file, StructType(schema(CONTENT) :: Nil))
}
// Otherwise, it should be able to read.
assert(
readBinaryFile(file, StructType(schema(LENGTH) :: Nil)) === Row(content.length),
"Get length should not read content.")
assert(
spark.read.format(BINARY_FILE).load(file.getPath).count() === 1,
"Count should not read content.")
}
}
test("fail fast and do not attempt to read if a file is too big") {
assert(spark.conf.get(SOURCES_BINARY_FILE_MAX_LENGTH) === Int.MaxValue)
withTempPath { file =>
val path = file.getPath
val content = "123".getBytes
Files.write(file.toPath, content, StandardOpenOption.CREATE, StandardOpenOption.WRITE)
def readContent(): DataFrame = {
spark.read.format(BINARY_FILE)
.load(path)
.select(CONTENT)
}
val expected = Seq(Row(content))
checkAnswer(readContent(), expected)
withSQLConf(SOURCES_BINARY_FILE_MAX_LENGTH.key -> content.length.toString) {
checkAnswer(readContent(), expected)
}
// Disable read. If the implementation attempts to read, the exception would be different.
file.setReadable(false)
val caught = intercept[SparkException] {
withSQLConf(SOURCES_BINARY_FILE_MAX_LENGTH.key -> (content.length - 1).toString) {
checkAnswer(readContent(), expected)
}
}
assert(caught.getMessage.contains("exceeds the max length allowed"))
}
}
test("SPARK-28030: support chars in file names that require URL encoding") {
withTempDir { dir =>
val file = new File(dir, "test space.txt")
val content = "123".getBytes
Files.write(file.toPath, content, StandardOpenOption.CREATE, StandardOpenOption.WRITE)
val df = spark.read.format(BINARY_FILE).load(dir.getPath)
df.select(col(PATH), col(CONTENT)).first() match {
case Row(p: String, c: Array[Byte]) =>
assert(p.endsWith(file.getAbsolutePath), "should support space in file name")
assert(c === content, "should read file with space in file name")
}
}
}
}
| Intel-bigdata/OAP | oap-native-sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/binaryfile/BinaryFileFormatSuite.scala | Scala | apache-2.0 | 15,573 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.types
import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
/**
* :: DeveloperApi ::
* The data type for Maps. Keys in a map are not allowed to have `null` values.
* Map的数据类型,Map中的键不允许具有“null”值。
*
* Please use [[DataTypes.createMapType()]] to create a specific instance.
*
* @param keyType The data type of map keys.
* @param valueType The data type of map values.
* @param valueContainsNull Indicates if map values have `null` values.
*/
case class MapType(
keyType: DataType,
valueType: DataType,
valueContainsNull: Boolean) extends DataType {
/** No-arg constructor for kryo. kryo的no-arg构造函数*/
def this() = this(null, null, false)
private[sql] def buildFormattedString(prefix: String, builder: StringBuilder): Unit = {
builder.append(s"$prefix-- key: ${keyType.typeName}\\n")
builder.append(s"$prefix-- value: ${valueType.typeName} " +
s"(valueContainsNull = $valueContainsNull)\\n")
DataType.buildFormattedString(keyType, s"$prefix |", builder)
DataType.buildFormattedString(valueType, s"$prefix |", builder)
}
override private[sql] def jsonValue: JValue =
("type" -> typeName) ~
("keyType" -> keyType.jsonValue) ~
("valueType" -> valueType.jsonValue) ~
("valueContainsNull" -> valueContainsNull)
/**
* The default size of a value of the MapType is
* 100 * (the default size of the key type + the default size of the value type).
* (We assume that there are 100 elements).
* MapType值的默认大小为100 *(键类型的默认大小+值类型的默认大小),我们假设有100个元素
*/
override def defaultSize: Int = 100 * (keyType.defaultSize + valueType.defaultSize)
override def simpleString: String = s"map<${keyType.simpleString},${valueType.simpleString}>"
override private[spark] def asNullable: MapType =
MapType(keyType.asNullable, valueType.asNullable, valueContainsNull = true)
override private[spark] def existsRecursively(f: (DataType) => Boolean): Boolean = {
f(this) || keyType.existsRecursively(f) || valueType.existsRecursively(f)
}
}
object MapType extends AbstractDataType {
override private[sql] def defaultConcreteType: DataType = apply(NullType, NullType)
override private[sql] def acceptsType(other: DataType): Boolean = {
other.isInstanceOf[MapType]
}
override private[sql] def simpleString: String = "map"
/**
* Construct a [[MapType]] object with the given key type and value type.
* The `valueContainsNull` is true.
*/
def apply(keyType: DataType, valueType: DataType): MapType =
MapType(keyType: DataType, valueType: DataType, valueContainsNull = true)
}
| tophua/spark1.52 | sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala | Scala | apache-2.0 | 3,531 |
package edu.gemini.spModel.gemini.nici
import InstNICI.calcWavelength
import NICIParams.{ImagingMode, Channel1FW, Channel2FW}
import org.junit.Test
import org.junit.Assert._
/**
* Test cases for the observing wavelength calculation.
*/
class ObsWavelengthCalcTest {
// Regardless of explicit red or blue channel FW, the imaging mode takes
// precendence.
@Test def testImagingMode() {
val wls = for {
red <- Channel1FW.values
blue <- Channel2FW.values
} yield calcWavelength(ImagingMode.H1SLA, red, blue)
val meta = NICIParams.ImagingModeMetaconfig.getMetaConfig(ImagingMode.H1SLA);
wls.foreach(wl => assertEquals(meta.getChannel1Fw.centralWavelength, wl, 0.000001))
}
// Explicit red takes precendece when in manual mode.
@Test def testRed() {
val wls = for {
blue <- Channel2FW.values
} yield calcWavelength(ImagingMode.MANUAL, Channel1FW.KS, blue)
wls.foreach(wl => assertEquals(Channel1FW.KS.centralWavelength, wl, 0.000001))
}
// Explicit blue is used when all else fails.
@Test def testBlue() {
val wl = calcWavelength(ImagingMode.MANUAL, Channel1FW.BLOCK, Channel2FW.J)
assertEquals(Channel2FW.J.centralWavelength, wl, 0.000001)
}
// When there is nothing to go by, use the default.
@Test def testDefault() {
val wl = calcWavelength(ImagingMode.MANUAL, Channel1FW.BLOCK, Channel2FW.BLOCK)
assertEquals(InstNICI.DEF_CENTRAL_WAVELENGTH, wl, 0.000001);
}
} | arturog8m/ocs | bundle/edu.gemini.pot/src/test/scala/edu/gemini/spModel/gemini/nici/ObsWavelengthCalcTest.scala | Scala | bsd-3-clause | 1,460 |
package scalanlp.graphs
/*
Copyright 2010 David Hall
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* Provides transformations on graphs.
* @author dlwh
*/
trait Transformations {
def reverse[Node,Edge](g: Digraph[Node,Edge])(implicit reverser: EdgeReverser[Edge]):Digraph[Node,Edge] = {
val reversedEdges = g.edges.map(reverser).toIndexedSeq;
val groupedBySource = reversedEdges.groupBy(g.source _);
new Digraph[Node,Edge] {
def edges = reversedEdges.iterator;
def nodes = g.nodes;
def endPoints(e: Edge) = g.endpoints(e).swap;
def edgesFrom(n: Node) = groupedBySource.getOrElse(n, Seq.empty).iterator;
def successors(n: Node) = groupedBySource.getOrElse(n, Seq.empty).map(sink).toSet.iterator;
def getEdge(n: Node, n2: Node) = groupedBySource.get(n).flatMap(_.find(e => sink(e) == n2));
// edges are already reversed, so use actual source/sink
def sink(e: Edge) = g.sink(e);
def source(e: Edge) = g.source(e);
}
}
def reverseWeighted[Node,Edge,W](g: Weighted[Node,Edge,W] with Digraph[Node,Edge])(implicit reverser: EdgeReverser[Edge]): WeightedDigraph[Node,Edge,W] = {
val reversedEdges = g.edges.map(reverser).toIndexedSeq;
val groupedBySource = reversedEdges.groupBy(g.source _);
new Digraph[Node,Edge] with Weighted[Node,Edge,W] {
def edges = reversedEdges.iterator;
def nodes = g.nodes;
def endPoints(e: Edge) = g.endpoints(e).swap;
def edgesFrom(n: Node) = groupedBySource.getOrElse(n, Seq.empty).iterator;
def successors(n: Node) = groupedBySource.getOrElse(n, Seq.empty).map(sink).toSet.iterator;
def getEdge(n: Node, n2: Node) = groupedBySource.get(n).flatMap(_.find(e => sink(e) == n2));
// edges are already reversed, so use actual source/sink
def sink(e: Edge) = g.sink(e);
def source(e: Edge) = g.source(e);
def weight(e: Edge) = g.weight(e);
}
}
}
trait EdgeReverser[Edge] extends (Edge=>Edge);
object EdgeReverser {
implicit def revPair[N]= new EdgeReverser[(N,N)] {
def apply(e: (N,N)) = e.swap;
}
implicit def revWeightedEdge[N,W]= new EdgeReverser[(N,N,W)] {
def apply(e: (N,N,W)) = {
val (n,m,w) = e;
(m,n,w);
}
}
}
| MLnick/scalanlp-core | graphs/src/main/scala/scalanlp/graphs/Transformations.scala | Scala | apache-2.0 | 2,724 |
package io.findify.sqsmock.actions
import akka.actor.ActorSystem
import akka.event.slf4j.Logger
import akka.http.scaladsl.model.{HttpResponse, StatusCodes}
import io.findify.sqsmock.messages.{DeleteMessageResponse, ErrorResponse, SendMessageResponse}
import io.findify.sqsmock.model.{Message, QueueCache}
import scala.collection.mutable
/**
* Created by shutty on 3/30/16.
*/
class DeleteMessageWorker(account:Long, queues:mutable.Map[String,QueueCache], system:ActorSystem) extends Worker {
val log = Logger(this.getClass, "delete_message_worker")
def process(fields:Map[String,String]) = {
val result = for (
queueUrl <- fields.get("QueueUrl");
handle <- fields.get("ReceiptHandle");
queue <- queues.get(queueUrl)
) yield {
log.debug("deleting message from queue")
queue.delete(handle)
HttpResponse(StatusCodes.OK, entity = DeleteMessageResponse.toXML.toString())
}
result.getOrElse {
log.warn("cannot send message: possibly, some request parameter is missing")
HttpResponse(StatusCodes.BadRequest, entity = ErrorResponse("Sender", "InvalidParameterValue", "oops").toXML.toString())
}
}
}
| findify/sqsmock | src/main/scala/io/findify/sqsmock/actions/DeleteMessageWorker.scala | Scala | mit | 1,172 |
package slinky.core.facade
import scala.scalajs.js
import slinky.core.ExternalComponent
import slinky.core.BuildingComponent
import slinky.readwrite.Writer
import slinky.core.ExternalPropsWriterProvider
object Profiler
extends ExternalComponent()(new Writer[Profiler.Props] {
override def write(value: Profiler.Props): js.Object =
js.Dynamic.literal(
id = value.id,
onRender = value.onRender: js.Function7[String, String, Double, Double, Double, Double, js.Object, Unit]
)
}.asInstanceOf[ExternalPropsWriterProvider]) {
case class Props(id: String, onRender: (String, String, Double, Double, Double, Double, js.Object) => Unit)
override val component = ReactRaw.Profiler
def apply(
id: String,
onRender: (String, String, Double, Double, Double, Double, js.Object) => Unit
): BuildingComponent[Nothing, js.Object] = apply(Props(id, onRender))
}
| shadaj/slinky | core/src/main/scala/slinky/core/facade/Profiler.scala | Scala | mit | 913 |
import scala.reflect.runtime.universe._
import scala.reflect.ClassTag
object Test extends App {
def typeTagWithoutClassTagIsnotManifest[T: TypeTag] = {
println(manifest[T])
}
typeTagWithoutClassTagIsnotManifest[Int]
typeTagWithoutClassTagIsnotManifest[String]
typeTagWithoutClassTagIsnotManifest[Array[Int]]
}
| yusuke2255/dotty | tests/untried/neg/interop_typetags_without_classtags_arenot_manifests.scala | Scala | bsd-3-clause | 326 |
/**
* (c) Copyright 2013 WibiData, Inc.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kiji.express.flow
import com.twitter.scalding.RichXHandler
import com.twitter.scalding.Tool
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.util.ToolRunner
import org.kiji.annotations.ApiAudience
import org.kiji.annotations.ApiStability
/**
* The main-method entry point for running an Express job. Functionally the same as Scalding's
* [[com.twitter.scalding.Tool.main]], but uses HBaseConfiguration to create the configuration,
* so properties in any `hbase-site.xml` on the classpath will be included.
*/
@ApiAudience.Public
@ApiStability.Stable
object ExpressTool {
def main(args: Array[String]) {
try {
ToolRunner.run(HBaseConfiguration.create(), new Tool, args)
} catch {
case t: Throwable => {
//create the exception URL link in GitHub wiki
val gitHubLink = RichXHandler.createXUrl(t)
val extraInfo = (if(RichXHandler().handlers.exists(h => h(t))) {
RichXHandler.mapping(t.getClass) + "\\n"
}
else {
""
}) +
"If you know what exactly caused this error, please consider contributing to" +
" GitHub via following link.\\n" + gitHubLink
//re-throw the exception with extra info
throw new Throwable(extraInfo, t)
}
}
}
}
| kijiproject/kiji-express | kiji-express/src/main/scala/org/kiji/express/flow/ExpressTool.scala | Scala | apache-2.0 | 2,030 |
package spark
import scala.collection.mutable
import scala.collection.immutable
import org.scalatest.FunSuite
import com.esotericsoftware.kryo._
import SparkContext._
class KryoSerializerSuite extends FunSuite {
test("basic types") {
val ser = (new KryoSerializer).newInstance()
def check[T](t: T): Unit =
assert(ser.deserialize[T](ser.serialize(t)) === t)
check(1)
check(1L)
check(1.0f)
check(1.0)
check(1.toByte)
check(1.toShort)
check("")
check("hello")
check(Integer.MAX_VALUE)
check(Integer.MIN_VALUE)
check(java.lang.Long.MAX_VALUE)
check(java.lang.Long.MIN_VALUE)
check[String](null)
check(Array(1, 2, 3))
check(Array(1L, 2L, 3L))
check(Array(1.0, 2.0, 3.0))
check(Array(1.0f, 2.9f, 3.9f))
check(Array("aaa", "bbb", "ccc"))
check(Array("aaa", "bbb", null))
check(Array(true, false, true))
check(Array('a', 'b', 'c'))
check(Array[Int]())
}
test("pairs") {
val ser = (new KryoSerializer).newInstance()
def check[T](t: T): Unit =
assert(ser.deserialize[T](ser.serialize(t)) === t)
check((1, 1))
check((1, 1L))
check((1L, 1))
check((1L, 1L))
check((1.0, 1))
check((1, 1.0))
check((1.0, 1.0))
check((1.0, 1L))
check((1L, 1.0))
check((1.0, 1L))
check(("x", 1))
check(("x", 1.0))
check(("x", 1L))
check((1, "x"))
check((1.0, "x"))
check((1L, "x"))
check(("x", "x"))
}
test("Scala data structures") {
val ser = (new KryoSerializer).newInstance()
def check[T](t: T): Unit =
assert(ser.deserialize[T](ser.serialize(t)) === t)
check(List[Int]())
check(List[Int](1, 2, 3))
check(List[String]())
check(List[String]("x", "y", "z"))
check(None)
check(Some(1))
check(Some("hi"))
check(mutable.ArrayBuffer(1, 2, 3))
check(mutable.ArrayBuffer("1", "2", "3"))
check(mutable.Map())
check(mutable.Map(1 -> "one", 2 -> "two"))
check(mutable.Map("one" -> 1, "two" -> 2))
check(mutable.HashMap(1 -> "one", 2 -> "two"))
check(mutable.HashMap("one" -> 1, "two" -> 2))
check(List(Some(mutable.HashMap(1->1, 2->2)), None, Some(mutable.HashMap(3->4))))
}
test("custom registrator") {
import spark.test._
System.setProperty("spark.kryo.registrator", classOf[MyRegistrator].getName)
val ser = (new KryoSerializer).newInstance()
def check[T](t: T): Unit =
assert(ser.deserialize[T](ser.serialize(t)) === t)
check(CaseClass(17, "hello"))
val c1 = new ClassWithNoArgConstructor
c1.x = 32
check(c1)
val c2 = new ClassWithoutNoArgConstructor(47)
check(c2)
val hashMap = new java.util.HashMap[String, String]
hashMap.put("foo", "bar")
check(hashMap)
System.clearProperty("spark.kryo.registrator")
}
}
package test {
case class CaseClass(i: Int, s: String) {}
class ClassWithNoArgConstructor {
var x: Int = 0
override def equals(other: Any) = other match {
case c: ClassWithNoArgConstructor => x == c.x
case _ => false
}
}
class ClassWithoutNoArgConstructor(val x: Int) {
override def equals(other: Any) = other match {
case c: ClassWithoutNoArgConstructor => x == c.x
case _ => false
}
}
class MyRegistrator extends KryoRegistrator {
override def registerClasses(k: Kryo) {
k.register(classOf[CaseClass])
k.register(classOf[ClassWithNoArgConstructor])
k.register(classOf[ClassWithoutNoArgConstructor])
k.register(classOf[java.util.HashMap[_, _]])
}
}
}
| javelinjs/spark | core/src/test/scala/spark/KryoSerializerSuite.scala | Scala | bsd-3-clause | 3,573 |
package model.dao
import javax.inject.Inject
import model.{Album, Artist, Rating}
import play.api.db.slick.{DatabaseConfigProvider, HasDatabaseConfigProvider}
import slick.driver.JdbcProfile
import slick.lifted.TableQuery
import scala.concurrent.ExecutionContext.Implicits.global
/**
* Created by lukasz on 06.11.16.
*/
class AlbumDAO @Inject()(protected val dbConfigProvider: DatabaseConfigProvider)
extends HasDatabaseConfigProvider[JdbcProfile] {
lazy val AlbumTable = TableQuery[AlbumTable]
lazy val ArtistTable = TableQuery[ArtistTable]
import driver.api._
implicit val columnType: BaseColumnType[Rating] = MappedColumnType.base[Rating, Int](Rating.toInt, Rating.fromInt)
class AlbumTable(tag: Tag) extends Table[Album](tag, "albums") {
def artistId = column[Long]("artist_id")
def title = column[String]("title")
def year = column[Int]("year")
def rating = column[Rating]("rating")
def id = column[Long]("id", O.PrimaryKey, O.AutoInc)
def * = (artistId, title, year, rating, id) <> (Album.tupled, Album.unapply)
}
class ArtistTable(tag: Tag) extends Table[Artist](tag, "artists") {
def name = column[String]("name")
def id = column[Long]("id", O.AutoInc, O.PrimaryKey)
def * = (name, id) <> (Artist.tupled, Artist.unapply)
}
def selectAlbumsAction = db.run(AlbumTable.sortBy(_.year.desc).result)
val selectAlbumsArtistsImplicitJoin = (for {
album <- AlbumTable
artist <- ArtistTable
if album.artistId === artist.id
} yield (album, artist)).result
def selectAlbumsArtistsAction = db.run(selectAlbumsArtistsImplicitJoin)
val selectAlbumsArtistsExplicitJoin =
ArtistTable.join(AlbumTable).on {
case (artist, album) => artist.id === album.artistId
}.sortBy {
case (artist, album) => artist.name.asc
}.result
def selectAlbumsArtistActionExplicit = db.run(selectAlbumsArtistsExplicitJoin)
def insertAllAction = db.run {
for {
rollingStonesId <- ArtistTable returning ArtistTable.map(_.id) += Artist("Rolling Stones")
burzumId <- ArtistTable returning ArtistTable.map(_.id) += Artist("Burzum")
michaelJacksonId <- ArtistTable returning ArtistTable.map(_.id) += Artist("Michael Jackson")
_ <- AlbumTable ++= Seq(
Album(rollingStonesId, "Rolling Stones Greatests Hits",1987, Rating.Awesome),
Album(burzumId, "Det Some Engang War", 1995, Rating.Good),
Album(michaelJacksonId, "Stranger in the Moscow", 1998, Rating.Awesome)
)
} yield ()
}
/* def insertAllAction = db.run {
for {
keyboardCatId <- ArtistTable returning ArtistTable.map(_.id) += Artist("Keyboard Cat")
spiceGirlsId <- ArtistTable returning ArtistTable.map(_.id) += Artist("Spice Girls")
_ <- AlbumTable ++= Seq(
Album(keyboardCatId, "Keyboard Cat's Greatests Hits", 2009, Rating.Awesome),
Album(spiceGirlsId, "SpiceGirls hgraetest hits", 2008, Rating.NotBad)
)
} yield ()
}*/
/* def selectSpiceGirls(authorName: String) = db.run(AlbumTable.filter(_.artist === authorName).result)
def albumsReleasedAfter1990WithNotBad(): Future[Seq[Album]] = db.run(AlbumTable
.filter(_.year > 1990)
.filter(_.rating >= (Rating.NotBad: Rating))
.sortBy(_.artist.desc).result)
def updateCatsHits() = db.run(AlbumTable
.filter(_.artist === "Keyboard cat")
.map(_.title)
.update("Even greater hits"))
def insertAction() = db.run(AlbumTable ++= Seq(
Album("Pink Floyd", "Dark Side of the Moon", 1973, Rating.Awesome),
Album("Pink Floyd", "Jakaś inna płyta z muzykom ;) łężćńóżźćę", 1974, Rating.Meh))
)
val selectionAction: DBIOAction[Seq[String], NoStream, Effect.Read] = AlbumTable.filter(_.artist === "Keyboard cat").map(_.title).result
val insertAllAction: DBIOAction[Option[Int], NoStream, Effect.Write] = AlbumTable ++= Seq(
Album("Pink Floyd", "Dark Side of the Moon", 1973, Rating.Awesome),
Album("Pink Floyd", "Jakaś inna płyta z muzykom ;) łężćńóżźćę", 1974, Rating.Meh))
def monadicInsertAction(artist: String, title: String, year: Int) = db.run {
for {
existing <- AlbumTable.filter { e => e.artist === artist && e.year < year }.result
rating = existing.length match {
case 0 => Rating.Awesome
case _ => Rating.Meh
}
_ <- AlbumTable += Album(artist, title, year, rating)
} yield ()
}*/
/*
val a: Query[AlbumTable, Album, Seq] = AlbumTable
.filter(_.year > 1990)
.filter(_.rating >= (Rating.NotBad: Rating))
.sortBy(_.artist.desc)
val b: Query[Rep[Rating], Rating, Seq] = AlbumTable
.filter(_.year > 1990)
.filter(_.rating >= (Rating.NotBad: Rating))
.map(_.rating)
*/
}
| lszku/ProductDatabase | app/model/dao/AlbumDAO.scala | Scala | bsd-3-clause | 4,835 |
package co.uproot.abandon
import org.scalatest.matchers.Matcher
import org.scalatest.Inside
import java.lang.Exception
import org.scalatest.StreamlinedXmlEquality._
import TestHelper._
import ParserHelper._
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
class ComplexProcessTest extends AnyFlatSpec with Matchers with Inside {
"Abandon" should "handle simple xml test case without configuration" in {
val quiet = true
val (parseError, scope, processedFiles) = Processor.parseAll(Seq("tests/small.ledger"), quiet)
assert(!parseError)
val xmlBalSettings = BalanceExportSettings(XMLType, None, Seq("not-used.xml"), None, true, Nil)
val xmlJournalSettings = JournalExportSettings(XMLType, None, Seq("not-used.xml"), None)
val settings = Settings(Nil, Nil, Nil, Nil, ReportOptions(Nil), Seq(xmlBalSettings), None, quiet, None, None)
val appState = Processor.process(scope,settings.accounts, None)
//TODO: Processor.checkConstaints(appState, settings.eodConstraints)
val xmlBalance = Reports.xmlExport(appState, xmlBalSettings, settings.txnFilters)
val xmlJournal = Reports.xmlExport(appState, xmlJournalSettings, settings.txnFilters)
val refXMLBalance = scala.xml.XML.loadFile("tests/refSmallBalance.xml")
val refXMLJournal = scala.xml.XML.loadFile("tests/refSmallJournal.xml")
//val prettyPrinter = new scala.xml.PrettyPrinter(1024,2)
//println(prettyPrinter.format(xmlJournal))
//println(prettyPrinter.format(refXMLJournal))
assert(xmlBalance === refXMLBalance)
assert(xmlJournal === refXMLJournal)
}
}
| hrj/abandon | base/src/test/scala/co/uproot/abandon/ComplexProcessTest.scala | Scala | apache-2.0 | 1,603 |
import org.specs2.mutable._
import org.specs2.runner._
import org.junit.runner._
import play.api.test._
import play.api.test.Helpers._
/**
* add your integration spec here.
* An integration test will fire up a whole play application in a real (or headless) browser
*/
@RunWith(classOf[JUnitRunner])
class IntegrationSpec extends Specification {
"Application" should {
"work from within a browser" in new WithBrowser {
browser.goTo("http://localhost:" + port)
browser.pageSource must contain("trackerApp")
}
}
}
| PaulKeeble/TVTracker | test/IntegrationSpec.scala | Scala | gpl-2.0 | 544 |
/**
* Copyright (C) 2017-2018 Koddi Inc
* See the LICENSE file distributed with this work for additional
* information regarding copyright ownership.
*/
package com.koddi.geocoder
package test
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.util.{Success,Failure}
class AsyncGeocoderSpec extends TestSpec {
import scala.concurrent.ExecutionContext.Implicits.global
"An AsyncGeocoder" should "lookup a given address and trigger a success" in {
val geocoder = new AsyncGeocoder(new MockGeocoder("api_response_address.xml"))
val query = geocoder.lookup("2821 West 7th St., Dallas, TX 76107, US")
query onComplete {
case Success(results) => {
val location = results.head.geometry.location
loseAccuracy(location.latitude) should be(33)
loseAccuracy(location.longitude) should be(-97)
}
case Failure(_) => fail
}
Await.ready(query, 5.seconds)
}
it should "reverse lookup lat/lng values and trigger a success" in {
val geocoder = new AsyncGeocoder(new MockGeocoder("api_response_latlng.xml"))
val query = geocoder.lookup(32.7505842, -97.3574015)
query onComplete {
case Success(results) => {
val address = results.head.formattedAddress
address should be("2821 W 7th St, Fort Worth, TX 76107, USA")
}
case Failure(_) => fail
}
Await.ready(query, 5.seconds)
}
it should "lookup an address by component objects" in {
val geocoder = new AsyncGeocoder(new MockGeocoder("api_response_address.xml"))
val query = geocoder.lookup(Seq(
PostalCodeComponent("76107"),
CountryComponent("us")
))
query onComplete {
case Success(results) => {
val location = results.head.geometry.location
loseAccuracy(location.latitude) should be(33)
loseAccuracy(location.longitude) should be(-97)
}
case Failure(_) => fail
}
}
it should "return an empty result sequence when the response has zero results" in {
val geocoder = new AsyncGeocoder(new MockGeocoder("api_response_zero_results.xml"))
val query = geocoder.lookup("NONE INVALID ADDRESS")
query onComplete {
case Success(results) => results should be(Seq.empty[Result])
case Failure(error) => fail
}
Await.ready(query, 5.seconds)
}
it should "trigger a failure when an invalid lat/lng is given" in {
val geocoder = new AsyncGeocoder(new MockGeocoder("api_response_invalid.xml"))
val query = geocoder.lookup(-900d, -900d)
query onComplete {
case Success(_) => fail
case Failure(error) => error should not be(null)
}
Await.ready(query, 5.seconds)
}
private def loseAccuracy(value: Double): Long = Math.round(value)
}
| mcross1882/geocoder | src/test/scala/com/koddi/geocoder/AsyncGeocoderSpec.scala | Scala | mit | 3,042 |
/**
* Copyright (C) 2011 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.portlet.liferay
import java.{util ⇒ ju}
import javax.portlet.filter.PortletRequestWrapper
import javax.portlet.{PortletRequest, PortletSession}
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.mockito.{Matchers, Mockito}
import org.orbeon.oxf.http.Headers
import org.orbeon.oxf.portlet.liferay.LiferayAPI.RoleFacade
import org.orbeon.oxf.test.ResourceManagerSupport
import org.scalatest.FunSpecLike
import org.scalatest.mockito.MockitoSugar
import scala.collection.JavaConverters._
import scala.collection.immutable.TreeMap
import scala.collection.mutable
class FormRunnerRequestFilterTest extends ResourceManagerSupport with FunSpecLike with MockitoSugar {
describe("The portlet filter's `amendRequest()` function") {
// Initial properties
val initialProperties = Map("p1" → List("v1a", "v1b"))
// Session
val sessionAttributes = mutable.Map[String, AnyRef]()
val mockSession = mock[PortletSession]
Mockito when mockSession.getAttribute(Matchers.anyString) thenAnswer new Answer[AnyRef] {
def answer(invocation: InvocationOnMock) =
sessionAttributes.get(invocation.getArguments()(0).asInstanceOf[String]).orNull
}
Mockito when mockSession.setAttribute(Matchers.anyString, Matchers.anyObject) thenAnswer new Answer[Unit] {
def answer(invocation: InvocationOnMock) =
sessionAttributes += invocation.getArguments()(0).asInstanceOf[String] → invocation.getArguments()(1)
}
// Request with initial properties
val mockRequest = new PortletRequestWrapper(mock[PortletRequest]) {
override def getProperty(name: String) = initialProperties.get(name) map (_.head) orNull
override def getProperties(name: String) =
(initialProperties.get(name) map (_.iterator) getOrElse Iterator.empty).asJavaEnumeration
override def getPropertyNames = initialProperties.keysIterator.asJavaEnumeration
override def getPortletSession = mockSession
override def getPortletSession(create: Boolean) = mockSession
}
class MyGroup {
def getGroupId = 42L
def getName = "universe"
def getDescriptiveName = getName
}
case class MyRole(getName: String) {
def getType = LiferayAPI.LiferayRegularRoleType.value
override def toString() = s"MyRole($getName)"
}
class MyUser {
def getUserId = 123L
def getScreenName = "jsmith"
def getFullName = "John Paul Smith"
def getFirstName = "John"
def getMiddleName = "Paul"
def getLastName = "Smith"
def getEmailAddress = "test@orbeon.com"
def getGroup = new MyGroup
def getRoles = ju.Arrays.asList(MyRole("manager"): RoleFacade, MyRole("employee"): RoleFacade)
}
class MyCompany {
def getAuthType = LiferayAPI.LiferayEmailAddressAuthType.name
}
import org.orbeon.oxf.portlet.liferay.FormRunnerAuthFilter._
val amendedRequest =
wrapWithOrbeonAuthHeaders(wrapWithLiferayUserHeaders(mockRequest, new LiferayUser {
override def userHeaders = LiferaySupport.userHeaders(new MyUser, new MyCompany, tests = true)
}))
val expectedProperties =
initialProperties ++ Map(
"orbeon-liferay-user-id" → List("123"),
"orbeon-liferay-user-screen-name" → List("jsmith"),
"orbeon-liferay-user-full-name" → List("John Paul Smith"),
"orbeon-liferay-user-first-name" → List("John"),
"orbeon-liferay-user-middle-name" → List("Paul"),
"orbeon-liferay-user-last-name" → List("Smith"),
"orbeon-liferay-user-email" → List("test@orbeon.com"),
"orbeon-liferay-user-group-id" → List("42"),
"orbeon-liferay-user-group-name" → List("universe"),
"orbeon-liferay-user-roles" → List("manager", "employee"),
Headers.OrbeonUsernameLower → List("test@orbeon.com"),
Headers.OrbeonGroupLower → List("universe"),
Headers.OrbeonRolesLower → List("manager", "employee"),
Headers.OrbeonCredentialsLower → List("""{"username":"test%40orbeon.com","groups":["universe"],"roles":[{"name":"manager"},{"name":"employee"}],"organizations":[]}""")
)
// NOTE: Don't use Array for comparison, because Array's == doesn't work as expected in Scala
val actualProperties =
amendedRequest.getPropertyNames.asScala map (n ⇒ n → amendedRequest.getProperties(n).asScala.toList) toMap
// Compare using TreeMap to get a reliable order
def toTreeMap[K, V](map: Map[K, V])(implicit ord: Ordering[K]) = TreeMap[K, V]() ++ map
it ("must set authentication headers based on incoming headers") {
assert(toTreeMap(expectedProperties) === toTreeMap(actualProperties))
}
}
} | brunobuzzi/orbeon-forms | form-runner/jvm/src/test/scala/org/orbeon/oxf/portlet/liferay/FormRunnerRequestFilterTest.scala | Scala | lgpl-2.1 | 5,533 |
package spatial.interpreter
import argon.core._
import argon.nodes._
import spatial.aliases._
import spatial.nodes._
import argon.interpreter.{Interpreter => AInterpreter}
trait Arrays extends AInterpreter {
override def matchNode(lhs: Sym[_]) = super.matchNode(lhs).orElse {
case ArrayApply(EArray(array), EInt(i)) =>
array(i.toInt)
case InputArguments() => spatialConfig.inputs
case MapIndices(_, _, _) =>
???
}
}
| stanford-ppl/spatial-lang | spatial/core/src/spatial/interpreter/Arrays.scala | Scala | mit | 462 |
package test.testing
import src.main.scala.courses.parsing._
import org.scalatest.FeatureSpec
class DataControllerSpec extends FeatureSpec {
var testFilePath = "/cs91r-harvardx-project/src/main/resources/courses/harvardx-2013-02-25/ER22/HarvardX-ER22x-2013_Spring-auth_userprofile.sql"
feature("Basic information extraction from filepath") {
scenario("check the path of a DataController") {
val testData = DataController(testFilePath)
assert(testData.path === testFilePath)
}
scenario("check the course name of a DataController") {
val testData = DataController(testFilePath)
assert(testData.courseName === "ER22")
}
scenario("check the file name of a DataController") {
val testData = DataController(testFilePath)
assert(testData.fileName === "HarvardX-ER22x-2013_Spring-auth_userprofile")
}
scenario("check the file extension of a DataController") {
val testData = DataController(testFilePath)
assert(testData.fileExtension match {
case SQLFile => true
case _ => false
})
}
scenario("check the year of a DataController") {
val testData = DataController(testFilePath)
assert(testData.year === "2013")
}
}
} | jimwaldo/HarvardX-Tools | src/test/scala/testing/DataControllerTest.scala | Scala | bsd-3-clause | 1,265 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import java.util.Properties
import scala.util.Random
import org.apache.spark._
import org.apache.spark.internal.config._
import org.apache.spark.memory.{TaskMemoryManager, TestMemoryManager}
import org.apache.spark.sql.{RandomDataGenerator, Row}
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow}
import org.apache.spark.sql.catalyst.expressions.{InterpretedOrdering, UnsafeProjection, UnsafeRow}
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.map.BytesToBytesMap
/**
* Test suite for [[UnsafeKVExternalSorter]], with randomly generated test data.
*/
class UnsafeKVExternalSorterSuite extends SparkFunSuite with SharedSparkSession {
private val keyTypes = Seq(IntegerType, FloatType, DoubleType, StringType)
private val valueTypes = Seq(IntegerType, FloatType, DoubleType, StringType)
testKVSorter(new StructType, new StructType, spill = true)
testKVSorter(new StructType().add("c1", IntegerType), new StructType, spill = true)
testKVSorter(new StructType, new StructType().add("c1", IntegerType), spill = true)
private val rand = new Random(42)
for (i <- 0 until 6) {
val keySchema = RandomDataGenerator.randomSchema(rand, rand.nextInt(10) + 1, keyTypes)
val valueSchema = RandomDataGenerator.randomSchema(rand, rand.nextInt(10) + 1, valueTypes)
testKVSorter(keySchema, valueSchema, spill = i > 3)
}
/**
* Create a test case using randomly generated data for the given key and value schema.
*
* The approach works as follows:
*
* - Create input by randomly generating data based on the given schema
* - Run [[UnsafeKVExternalSorter]] on the generated data
* - Collect the output from the sorter, and make sure the keys are sorted in ascending order
* - Sort the input by both key and value, and sort the sorter output also by both key and value.
* Compare the sorted input and sorted output together to make sure all the key/values match.
*
* If spill is set to true, the sorter will spill probabilistically roughly every 100 records.
*/
private def testKVSorter(keySchema: StructType, valueSchema: StructType, spill: Boolean): Unit = {
// Create the data converters
val kExternalConverter = CatalystTypeConverters.createToCatalystConverter(keySchema)
val vExternalConverter = CatalystTypeConverters.createToCatalystConverter(valueSchema)
val kConverter = UnsafeProjection.create(keySchema)
val vConverter = UnsafeProjection.create(valueSchema)
val keyDataGen = RandomDataGenerator.forType(keySchema, nullable = false).get
val valueDataGen = RandomDataGenerator.forType(valueSchema, nullable = false).get
val inputData = Seq.fill(1024) {
val k = kConverter(kExternalConverter.apply(keyDataGen.apply()).asInstanceOf[InternalRow])
val v = vConverter(vExternalConverter.apply(valueDataGen.apply()).asInstanceOf[InternalRow])
(k.asInstanceOf[InternalRow].copy(), v.asInstanceOf[InternalRow].copy())
}
val keySchemaStr = keySchema.map(_.dataType.simpleString).mkString("[", ",", "]")
val valueSchemaStr = valueSchema.map(_.dataType.simpleString).mkString("[", ",", "]")
test(s"kv sorting key schema $keySchemaStr and value schema $valueSchemaStr") {
testKVSorter(
keySchema,
valueSchema,
inputData,
pageSize = 16 * 1024 * 1024,
spill
)
}
}
/**
* Create a test case using the given input data for the given key and value schema.
*
* The approach works as follows:
*
* - Create input by randomly generating data based on the given schema
* - Run [[UnsafeKVExternalSorter]] on the input data
* - Collect the output from the sorter, and make sure the keys are sorted in ascending order
* - Sort the input by both key and value, and sort the sorter output also by both key and value.
* Compare the sorted input and sorted output together to make sure all the key/values match.
*
* If spill is set to true, the sorter will spill probabilistically roughly every 100 records.
*/
private def testKVSorter(
keySchema: StructType,
valueSchema: StructType,
inputData: Seq[(InternalRow, InternalRow)],
pageSize: Long,
spill: Boolean): Unit = {
val memoryManager =
new TestMemoryManager(new SparkConf().set(MEMORY_OFFHEAP_ENABLED.key, "false"))
val taskMemMgr = new TaskMemoryManager(memoryManager, 0)
TaskContext.setTaskContext(new TaskContextImpl(
stageId = 0,
stageAttemptNumber = 0,
partitionId = 0,
taskAttemptId = 98456,
attemptNumber = 0,
taskMemoryManager = taskMemMgr,
localProperties = new Properties,
metricsSystem = null))
val sorter = new UnsafeKVExternalSorter(
keySchema, valueSchema, SparkEnv.get.blockManager, SparkEnv.get.serializerManager,
pageSize, SHUFFLE_SPILL_NUM_ELEMENTS_FORCE_SPILL_THRESHOLD.defaultValue.get)
// Insert the keys and values into the sorter
inputData.foreach { case (k, v) =>
sorter.insertKV(k.asInstanceOf[UnsafeRow], v.asInstanceOf[UnsafeRow])
// 1% chance we will spill
if (rand.nextDouble() < 0.01 && spill) {
memoryManager.markExecutionAsOutOfMemoryOnce()
sorter.closeCurrentPage()
}
}
// Collect the sorted output
val out = new scala.collection.mutable.ArrayBuffer[(InternalRow, InternalRow)]
val iter = sorter.sortedIterator()
while (iter.next()) {
out += Tuple2(iter.getKey.copy(), iter.getValue.copy())
}
sorter.cleanupResources()
val keyOrdering = InterpretedOrdering.forSchema(keySchema.map(_.dataType))
val valueOrdering = InterpretedOrdering.forSchema(valueSchema.map(_.dataType))
val kvOrdering = new Ordering[(InternalRow, InternalRow)] {
override def compare(x: (InternalRow, InternalRow), y: (InternalRow, InternalRow)): Int = {
keyOrdering.compare(x._1, y._1) match {
case 0 => valueOrdering.compare(x._2, y._2)
case cmp => cmp
}
}
}
// Testing to make sure output from the sorter is sorted by key
var prevK: InternalRow = null
out.zipWithIndex.foreach { case ((k, v), i) =>
if (prevK != null) {
assert(keyOrdering.compare(prevK, k) <= 0,
s"""
|key is not in sorted order:
|previous key: $prevK
|current key : $k
""".stripMargin)
}
prevK = k
}
// Testing to make sure the key/value in output matches input
assert(out.sorted(kvOrdering) === inputData.sorted(kvOrdering))
// Make sure there is no memory leak
assert(0 === taskMemMgr.cleanUpAllAllocatedMemory)
TaskContext.unset()
}
test("kv sorting with records that exceed page size") {
val pageSize = 128
val schema = StructType(StructField("b", BinaryType) :: Nil)
val externalConverter = CatalystTypeConverters.createToCatalystConverter(schema)
val converter = UnsafeProjection.create(schema)
val rand = new Random()
val inputData = Seq.fill(1024) {
val kBytes = new Array[Byte](rand.nextInt(pageSize))
val vBytes = new Array[Byte](rand.nextInt(pageSize))
rand.nextBytes(kBytes)
rand.nextBytes(vBytes)
val k = converter(externalConverter.apply(Row(kBytes)).asInstanceOf[InternalRow])
val v = converter(externalConverter.apply(Row(vBytes)).asInstanceOf[InternalRow])
(k.asInstanceOf[InternalRow].copy(), v.asInstanceOf[InternalRow].copy())
}
testKVSorter(
schema,
schema,
inputData,
pageSize,
spill = true
)
}
test("SPARK-23376: Create UnsafeKVExternalSorter with BytesToByteMap having duplicated keys") {
val memoryManager = new TestMemoryManager(new SparkConf())
val taskMemoryManager = new TaskMemoryManager(memoryManager, 0)
val map = new BytesToBytesMap(taskMemoryManager, 64, taskMemoryManager.pageSizeBytes())
// Key/value are a unsafe rows with a single int column
val schema = new StructType().add("i", IntegerType)
val key = new UnsafeRow(1)
key.pointTo(new Array[Byte](32), 32)
key.setInt(0, 1)
val value = new UnsafeRow(1)
value.pointTo(new Array[Byte](32), 32)
value.setInt(0, 2)
for (_ <- 1 to 65) {
val loc = map.lookup(key.getBaseObject, key.getBaseOffset, key.getSizeInBytes)
loc.append(
key.getBaseObject, key.getBaseOffset, key.getSizeInBytes,
value.getBaseObject, value.getBaseOffset, value.getSizeInBytes)
}
// Make sure we can successfully create a UnsafeKVExternalSorter with a `BytesToBytesMap`
// which has duplicated keys and the number of entries exceeds its capacity.
try {
val context = new TaskContextImpl(0, 0, 0, 0, 0, taskMemoryManager, new Properties(), null)
TaskContext.setTaskContext(context)
new UnsafeKVExternalSorter(
schema,
schema,
sparkContext.env.blockManager,
sparkContext.env.serializerManager,
taskMemoryManager.pageSizeBytes(),
Int.MaxValue,
map)
} finally {
TaskContext.unset()
}
}
}
| ConeyLiu/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/UnsafeKVExternalSorterSuite.scala | Scala | apache-2.0 | 10,008 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.scala.dsl
import org.junit.Test
import builder.RouteBuilder
/**
* Content Based Router test with XPath expressions
*/
class HeaderContentBasedRouterTest extends ScalaTestSupport {
@Test
def testXPathContentBasedRouter = {
"mock:foo" expect {_.expectedBodiesReceived("Hello Foo")}
"mock:bar" expect {_.expectedBodiesReceived("Hello Bar")}
"mock:other" expect {_.expectedBodiesReceived("Hello World")}
test {
template.sendBodyAndHeader("direct:a", "Hello Foo", "foo", 123)
template.sendBodyAndHeader("direct:a", "Hello Bar", "bar", 456)
template.sendBody("direct:a", "Hello World")
}
assertMockEndpointsSatisfied()
}
val builder = new RouteBuilder {
//START SNIPPET: cbr
"direct:a" ==> {
choice {
when (header("foo")) to ("mock:foo")
when (header("bar")) to ("mock:bar")
otherwise to ("mock:other")
}
}
//END SNIPPET: cbr
}
} | aaronwalker/camel | components/camel-scala/src/test/scala/org/apache/camel/scala/dsl/HeaderContentBasedRouterTest.scala | Scala | apache-2.0 | 1,766 |
/*-
* #%L
* Core runtime for OOXOO
* %%
* Copyright (C) 2006 - 2017 Open Design Flow
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package com.idyria.osi.ooxoo.core.buffers.structural.io.sax
import java.io.File
import java.io.FileOutputStream
import com.idyria.osi.ooxoo.core.buffers.structural.Buffer
import com.idyria.osi.ooxoo.core.buffers.structural.ElementBuffer
import java.io.PrintStream
import java.net.URL
import java.io.ByteArrayOutputStream
import org.odfi.tea.files.FileWatcherAdvanced
import java.io.OutputStream
import java.io.InputStream
import java.io.FileInputStream
import java.lang.ref.WeakReference
import java.io.ByteArrayInputStream
import javax.xml.parsers.DocumentBuilderFactory
import org.w3c.dom.Element
import org.w3c.dom.Node
/**
* @author zm4632
*/
trait STAXSyncTrait extends ElementBuffer {
var staxPreviousFile: Option[File] = None
var __staxFileWatcher: Option[FileWatcherAdvanced] = None
var staxWatchListeners = Map[WeakReference[Any], (File => Any)]()
def staxFileWatcher_=(w: FileWatcherAdvanced) = {
this.__staxFileWatcher = Some(w)
staxTryWatchStart
}
def staxFileWatcher = __staxFileWatcher
def staxTryWatchStart = (__staxFileWatcher, staxPreviousFile) match {
case (None, _) =>
case (_, None) =>
case (Some(watcher), Some(file)) if (watcher.isMonitoredBy(this, file)) =>
case (Some(watcher), Some(file)) =>
// Monitor
watcher.onFileChange(this, file) {
f =>
staxIgnoreNextReload match {
case false =>
// Call all listeners, and clean weak ones
staxWatchListeners.foreach {
case (ref, cl) if (ref.get() == null) =>
staxWatchListeners = staxWatchListeners - ref
case (ref, cl) =>
cl(f)
}
case true =>
staxIgnoreNextReload = false
null
}
}
}
var staxIgnoreNextReload = false
/**
* Used to know which objets are listening for reload on this file, to ignore their run in case of local write out
*/
//var staxLocalListeners = List[WeakReference[Any]]()
def onFileReload(listener: Any)(cl: File => Any) = (__staxFileWatcher, staxPreviousFile) match {
case (None, _) => throw new IllegalArgumentException("Cannot watch file reload without a defined file watcher")
case (_, None) => throw new IllegalArgumentException("Cannot watch file reload without a defined file")
case (watcher, file) =>
//staxLocalListeners = staxLocalListeners :+ new WeakReference(listener)
watcher.get.onFileChange(listener, file.get) {
staxIgnoreNextReload match {
case false => cl(_)
case true => null
}
}
}
def toOutputStream(os: OutputStream, prefixes: Map[String, String] = Map[String, String]()) = {
var bytesWritten = StAXIOBuffer.writeToOutputStream(this, os, true, prefixes)
this
}
/**
* Parent of File is created by default
* Pleae check for validity before calling this method to ensure no useless folders are created
*/
def toFile(f: File, prefixes: Map[String, String] = Map[String, String]()) = {
this.synchronized {
val sourceFile = f.getCanonicalFile
sourceFile.getParentFile.mkdirs()
// Ignore next reload
this.__staxFileWatcher match {
case Some(watcher) =>
staxIgnoreNextReload = true
case None =>
}
// Write out
var fos = new FileOutputStream(sourceFile)
toOutputStream(fos, prefixes)
fos.close
staxPreviousFile = Some(sourceFile)
staxTryWatchStart
this
}
}
def fromURL(url: URL) = {
// Set Stax Parser and streamIn
var io = com.idyria.osi.ooxoo.core.buffers.structural.io.sax.StAXIOBuffer(url)
this.appendBuffer(io)
io.streamIn
this
}
def fromInputStream(is: InputStream) = {
var io = com.idyria.osi.ooxoo.core.buffers.structural.io.sax.StAXIOBuffer(is)
this.appendBuffer(io)
io.streamIn
is.close
this
}
def fromString(s: String) = fromInputStream(new ByteArrayInputStream(s.getBytes))
def fromNode(node:Node) = {
var io = com.idyria.osi.ooxoo.core.buffers.structural.io.sax.StAXIOBuffer(node)
this.appendBuffer(io)
io.streamIn
this
}
/**
* Parent of File is created by default
* Pleae check for validity before calling this method to ensure no useless folders are created
*/
def fromFile(f: File) = {
val sourceFile = f.getCanonicalFile
sourceFile.getParentFile.mkdirs()
try {
sourceFile.exists() match {
case true => this.fromInputStream(new FileInputStream(f))
case false =>
}
} catch {
case e: Throwable =>
e.printStackTrace()
}
this.staxPreviousFile = Some(sourceFile)
this
}
def fromElement(elt:Element) = {
}
def resyncToFile = staxPreviousFile match {
case Some(file) => this.toFile(file)
case None => throw new IllegalAccessException(s"Cannot Resync Class ${getClass.getCanonicalName} to file because none has been set. Use the fromFile method first to set the source file")
}
def toXMLDocument = {
var resStr = toXMLStringNoIndenting
//println(s"ToXMLDoc: "+resStr)
// Parse Back
//----------------
var factory =
DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(true)
var builder = factory.newDocumentBuilder();
builder.parse(new ByteArrayInputStream(resStr.getBytes))
}
def toXMLString: String = {
var res = StAXIOBuffer(this, indenting = true)
var bout = new ByteArrayOutputStream()
var out = new PrintStream(bout)
out.append(res)
out.close()
new String(bout.toByteArray())
}
def toXMLStringWithNamespaces(ns: Map[String, String], indenting: Boolean = false): String = {
var res = StAXIOBuffer(this, indenting = indenting, ns)
var bout = new ByteArrayOutputStream()
var out = new PrintStream(bout)
out.append(res)
out.close()
new String(bout.toByteArray())
}
def toXMLStringNoIndenting = {
var res = StAXIOBuffer(this, indenting = false)
var bout = new ByteArrayOutputStream()
var out = new PrintStream(bout)
out.append(res)
out.close()
new String(bout.toByteArray())
}
}
| richnou/ooxoo-core | ooxoo-core/src/main/scala/com/idyria/osi/ooxoo/core/buffers/structural/io/sax/STAXSyncTrait.scala | Scala | agpl-3.0 | 8,018 |
package sxr
import scala.tools.nsc.{ast, plugins, symtab, util, Global}
import ast.parser.Tokens
import plugins.Plugin
import symtab.Flags
import reflect.internal.util.SourceFile
object TokenUtils {
import Tokens.{COMMENT, USCORE, isBrace => _isBrace, isKeyword => _isKeyword, isIdentifier => _isIdentifier, isLiteral => _isLiteral}
def isBrace( code : Int ) = _isBrace( code )
def isKeyword( code : Int ) = _isKeyword( code )
def isIdentifier( code : Int ) = _isIdentifier( code )
def isLiteral( code : Int ) = _isLiteral( code )
def toStr( code : Int ) = {
import Tokens._
code match {
case ABSTRACT => "ABSTRACT"
case ARROW => "ARROW"
case AT => "AT"
case BACKQUOTED_IDENT => "BACKQUOTED_IDENT"
case CASE => "CASE"
case CASECLASS => "CASECLASS"
case CASEOBJECT => "CASEOBJECT"
case CATCH => "CATCH"
case CLASS => "CLASS"
case COLON => "COLON"
case COMMA => "COMMA"
case COMMENT => "COMMENT"
case DEF => "DEF"
case DO => "DO"
case DOT => "DOT"
case ELSE => "ELSE"
case EQUALS => "EQUALS"
case ESCAPE => "ESCAPE"
case EXTENDS => "EXTENDS"
case FALSE => "FALSE"
case FINAL => "FINAL"
case FINALLY => "FINALLY"
case FOR => "FOR"
case FORSOME => "FORSOME"
case HASH => "HASH"
case IDENTIFIER => "IDENTIFIER"
case IF => "IF"
case IGNORE => "IGNORE"
case IMPLICIT => "IMPLICIT"
case IMPORT => "IMPORT"
case INTERPOLATIONID => "INTERPOLATIONID"
case LARROW => "LARROW"
case LAZY => "LAZY"
case LBRACE => "LBRACE"
case LBRACKET => "LBRACKET"
case LPAREN => "LPAREN"
case MACRO => "MACRO"
case MATCH => "MATCH"
case NEW => "NEW"
case NEWLINE => "NEWLINE"
case NEWLINES => "NEWLINES"
case NULL => "NULL"
case OBJECT => "OBJECT"
case OVERRIDE => "OVERRIDE"
case PACKAGE => "PACKAGE"
case PRIVATE => "PRIVATE"
case PROTECTED => "PROTECTED"
case RBRACE => "RBRACE"
case RBRACKET => "RBRACKET"
case RETURN => "RETURN"
case RPAREN => "RPAREN"
case SEALED => "SEALED"
case SEMI => "SEMI"
case STRINGPART => "STRINGPART"
case SUBTYPE => "SUBTYPE"
case SUPER => "SUPER"
case SUPERTYPE => "SUPERTYPE"
case SYMBOLLIT => "SYMBOLLIT"
case THEN => "THEN"
case THIS => "THIS"
case THROW => "THROW"
case TRAIT => "TRAIT"
case TRUE => "TRUE"
case TRY => "TRY"
case TYPE => "TYPE"
case USCORE => "USCORE"
case VAL => "VAL"
case VAR => "VAR"
case VIEWBOUND => "VIEWBOUND"
case WHILE => "WHILE"
case WHITESPACE => "WHITESPACE"
case WITH => "WITH"
case XMLSTART => "XMLSTART"
case YIELD => "YIELD"
case _ =>
if(isOpenBrace(code))
"OPEN_BRACE"
else if(isCloseBrace(code))
"CLOSE_BRACE"
else if(this.isIdentifier(code))
s"IDENTIFIER($code)"
else if(this.isLiteral(code))
s"LITERAL($code)"
else if(this.isKeyword(code))
s"KEYWORD($code)"
else if(isSymbol(code))
s"SYMBOL($code)"
else
"UNK(" + code + ")"
}
}
}
abstract class BrowseBase extends Plugin {
val global : Global
import global._
/** Filters out unwanted tokens such as whitespace and commas. Braces are currently
* included because () is annotated as Unit, and a partial function created by
* { case ... } is associated with the opening brace. */
private def includeToken(code: Int) =
{
import Tokens.{COMMENT, USCORE, isBrace, isKeyword, isIdentifier, isLiteral}
code match
{
case COMMENT | USCORE => true
case _ => isKeyword(code) || isIdentifier(code) || isLiteral(code) || isBrace(code)
}
}
class Scan(unit : CompilationUnit) extends syntaxAnalyzer.UnitScanner(unit)
{
private[sxr] val tokens = wrap.Wrappers.treeSet[Token]
def addComment(start: Int, end: Int) { tokens += new Token(start, end - start + 1, Tokens.COMMENT) }
override def deprecationWarning(off: Int, msg: String) {}
override def error(off: Int, msg: String) {}
override def incompleteInputError(off: Int, msg: String) {}
override def foundComment(value: String, start: Int, end: Int) {
addComment(start, end)
super.foundComment(value, start, end)
}
override def foundDocComment(value: String, start: Int, end: Int) {
addComment(start, end)
super.foundDocComment(value, start, end)
}
override def nextToken() {
val offset0 = offset
val code = token
super.nextToken()
if(includeToken(code)) {
val length = (lastOffset - offset0) max 1
tokens += new Token(offset0, length, code)
}
}
}
}
| randompearl/browse | src/main/scala-2.10/BrowseBase.scala | Scala | bsd-3-clause | 4,842 |
package uk.gov.gds.ier.service
import uk.gov.gds.ier.test.MockingTestSuite
import uk.gov.gds.ier.config.Config
import uk.gov.gds.ier.client.IerApiClient
import uk.gov.gds.ier.model._
import org.joda.time.DateTime
import uk.gov.gds.ier.digest.ShaHashProvider
import uk.gov.gds.ier.model.Success
import uk.gov.gds.ier.model.Fail
import uk.gov.gds.ier.service.apiservice.{EroAuthorityDetails, IerApiApplicationResponse, IerApiService, ConcreteIerApiService}
import uk.gov.gds.ier.transaction.ordinary.InprogressOrdinary
class IerApiServiceTests extends MockingTestSuite {
val testHelper = new IerApiServiceTestsHelper {}
val successMessage = Success(s"""
{
"id": "5360fe69036424d9ec0a1657",
"localAuthority": {
"name": "Local authority name",
"urls": ["url1", "url2"],
"email": "some@email.com",
"phone": "0123456789",
"addressLine1": "line one",
"addressLine2": "line two",
"addressLine3": "line three",
"addressLine4": "line four",
"postcode": "WR26NJ"
}
}
""", 0)
behavior of "submitOrdinaryApplication"
it should "deserialize result correctly and return expected response" in {
val application = completeOrdinaryApplication
val r = testHelper.fakeServiceCall(
requestJson => {
requestJson should include("applicationType\":\"ordinary\"")
requestJson should include("John")
requestJson should include("Smith")
successMessage
}
).submitOrdinaryApplication(None, application, None, Some("1234"), "en")
r should be(IerApiApplicationResponse(
id = Some("5360fe69036424d9ec0a1657"),
localAuthority = EroAuthorityDetails(
name = "Local authority name",
urls = "url1" :: "url2" :: Nil,
email = Some("some@email.com"),
phone = Some("0123456789"),
addressLine1 = Some("line one"),
addressLine2 = Some("line two"),
addressLine3 = Some("line three"),
addressLine4 = Some("line four"),
postcode = Some("WR26NJ")
)
))
}
"submitOrdinaryApplication with specified IP and refNum" should
"deserialize result correctly and return expected response" in {
val application = completeOrdinaryApplication
val r = testHelper.fakeServiceCall(
requestJson => {
requestJson should include("applicationType\":\"ordinary\"")
requestJson should include("John")
requestJson should include("Smith")
successMessage
}
).submitOrdinaryApplication(Some("127.0.0.1"), application, Some("55631D"), Some("1234"), "en")
r should be(IerApiApplicationResponse(
id = Some("5360fe69036424d9ec0a1657"),
localAuthority = EroAuthorityDetails(
name = "Local authority name",
urls = "url1" :: "url2" :: Nil,
email = Some("some@email.com"),
phone = Some("0123456789"),
addressLine1 = Some("line one"),
addressLine2 = Some("line two"),
addressLine3 = Some("line three"),
addressLine4 = Some("line four"),
postcode = Some("WR26NJ")
)
))
}
it should "submit application with web hash from the application payload" in {
val application = completeOrdinaryApplication.copy(sessionId = Some("test session id"))
testHelper.fakeServiceCall(
requestJson => {
requestJson should include("webHash\":\"test session id\"")
successMessage
}
).submitOrdinaryApplication(None, application, None, None, "en")
}
behavior of "submitOverseasApplication"
it should "deserialize result correctly and return expected response" in {
val application = completeOverseasApplication
val r = testHelper.fakeServiceCall(
requestJson => {
requestJson should include("applicationType\":\"overseas\"")
requestJson should include("John")
requestJson should include("Smith")
successMessage
}
).submitOverseasApplication(None, application, None, Some("1234"))
r should be(IerApiApplicationResponse(
id = Some("5360fe69036424d9ec0a1657"),
localAuthority = EroAuthorityDetails(
name = "Local authority name",
urls = "url1" :: "url2" :: Nil,
email = Some("some@email.com"),
phone = Some("0123456789"),
addressLine1 = Some("line one"),
addressLine2 = Some("line two"),
addressLine3 = Some("line three"),
addressLine4 = Some("line four"),
postcode = Some("WR26NJ")
)
))
}
it should "submit application with web hash from the application payload" in {
val application = completeOverseasApplication.copy(sessionId = Some("test session id"))
testHelper.fakeServiceCall(
requestJson => {
requestJson should include("webHash\":\"test session id\"")
successMessage
}
).submitOverseasApplication(None, application, None, None)
}
behavior of "submitCrownApplication"
it should "deserialize result correctly and return expected response" in {
val application = completeCrownApplication
val r = testHelper.fakeServiceCall(
requestJson => {
requestJson should include("applicationType\":\"crown\"")
requestJson should include("John")
requestJson should include("Smith")
successMessage
}
).submitCrownApplication(None, application, None, Some("1234"))
r should be(IerApiApplicationResponse(
id = Some("5360fe69036424d9ec0a1657"),
localAuthority = EroAuthorityDetails(
name = "Local authority name",
urls = "url1" :: "url2" :: Nil,
email = Some("some@email.com"),
phone = Some("0123456789"),
addressLine1 = Some("line one"),
addressLine2 = Some("line two"),
addressLine3 = Some("line three"),
addressLine4 = Some("line four"),
postcode = Some("WR26NJ")
)
))
}
it should "submit application with web hash from the application payload" in {
val application = completeCrownApplication.copy(sessionId = Some("test session id"))
testHelper.fakeServiceCall(
requestJson => {
requestJson should include("webHash\":\"test session id\"")
successMessage
}
).submitCrownApplication(None, application, None, None)
}
it should "have ukAddr:resident when hasAddress:YesAndLivingThere" in {
val application = completeCrownApplication.copy(
address = Some(LastAddress(
hasAddress = Some(HasAddressOption.YesAndLivingThere),
address = Some(PartialAddress(
Some("123 Fake Street, Fakerton"),
Some("123456789"), "WR26NJ", None
))
))
)
testHelper.fakeServiceCall(
requestJson => {
requestJson should include("ukAddr\":\"resident\"")
successMessage
}
).submitCrownApplication(None, application, None, None)
}
it should "have ukAddr:not-resident when hasAddress:YesAndNotLivingThere" in {
val application = completeCrownApplication.copy(
address = Some(LastAddress(
hasAddress = Some(HasAddressOption.YesAndNotLivingThere),
address = Some(PartialAddress(
Some("123 Fake Street, Fakerton"),
Some("123456789"), "WR26NJ", None
))
))
)
testHelper.fakeServiceCall(
requestJson => {
requestJson should include("ukAddr\":\"not-resident\"")
successMessage
}
).submitCrownApplication(None, application, None, None)
}
it should "have ukAddr:no-connection when hasAddress:No" in {
val application = completeCrownApplication.copy(
address = Some(LastAddress(
hasAddress = Some(HasAddressOption.No),
address = Some(PartialAddress(
Some("123 Fake Street, Fakerton"),
Some("123456789"), "WR26NJ", None
))
))
)
testHelper.fakeServiceCall(
requestJson => {
requestJson should include("ukAddr\":\"no-connection\"")
successMessage
}
).submitCrownApplication(None, application, None, None)
}
behavior of "submitForcesApplication"
it should "deserialize result correctly and return expected response" in {
val application = completeForcesApplication
val r = testHelper.fakeServiceCall(
requestJson => {
requestJson should include("applicationType\":\"forces\"")
requestJson should include("John")
requestJson should include("Smith")
successMessage
}
).submitForcesApplication(None, application, None, Some("1234"))
r should be(IerApiApplicationResponse(
id = Some("5360fe69036424d9ec0a1657"),
localAuthority = EroAuthorityDetails(
name = "Local authority name",
urls = "url1" :: "url2" :: Nil,
email = Some("some@email.com"),
phone = Some("0123456789"),
addressLine1 = Some("line one"),
addressLine2 = Some("line two"),
addressLine3 = Some("line three"),
addressLine4 = Some("line four"),
postcode = Some("WR26NJ")
)
))
}
it should "submit application with web hash from the application payload" in {
val application = completeForcesApplication.copy(sessionId = Some("test session id"))
testHelper.fakeServiceCall(
requestJson => {
requestJson should include("webHash\":\"test session id\"")
successMessage
}
).submitForcesApplication(None, application, None, None)
}
it should "have ukAddr:resident when hasAddress:YesAndLivingThere" in {
val application = completeForcesApplication.copy(
address = Some(LastAddress(
hasAddress = Some(HasAddressOption.YesAndLivingThere),
address = Some(PartialAddress(
Some("123 Fake Street, Fakerton"),
Some("123456789"), "WR26NJ", None
))
))
)
testHelper.fakeServiceCall(
requestJson => {
requestJson should include("ukAddr\":\"resident\"")
successMessage
}
).submitForcesApplication(None, application, None, None)
}
it should "have ukAddr:not-resident when hasAddress:YesAndNotLivingThere" in {
val application = completeForcesApplication.copy(
address = Some(LastAddress(
hasAddress = Some(HasAddressOption.YesAndNotLivingThere),
address = Some(PartialAddress(
Some("123 Fake Street, Fakerton"),
Some("123456789"), "WR26NJ", None
))
))
)
testHelper.fakeServiceCall(
requestJson => {
requestJson should include("ukAddr\":\"not-resident\"")
successMessage
}
).submitForcesApplication(None, application, None, None)
}
it should "have ukAddr:no-connection when hasAddress:No" in {
val application = completeForcesApplication.copy(
address = Some(LastAddress(
hasAddress = Some(HasAddressOption.No),
address = Some(PartialAddress(
Some("123 Fake Street, Fakerton"),
Some("123456789"), "WR26NJ", None
))
))
)
testHelper.fakeServiceCall(
requestJson => {
requestJson should include("ukAddr\":\"no-connection\"")
successMessage
}
).submitForcesApplication(None, application, None, None)
}
behavior of "submitCrownApplication address hack"
it should "cause nat being resetted and explanation with nationality inserted as nonat" in {
val application = completeCrownApplication.copy(
nationality = Some(PartialNationality(
british = Some(true),
irish = Some(true),
hasOtherCountry = Some(true),
otherCountries = List("Czech"),
noNationalityReason = None
)),
address = Some(LastAddress(
hasAddress = Some(HasAddressOption.No),
address = Some(PartialAddress(
Some("123 Fake Street, Fakerton"), Some("123456789"), "WR26NJ", None
))
))
)
testHelper.fakeServiceCall(
requestJson => {
requestJson should include("applicationType\":\"crown\"")
requestJson should not include("\"nat\"")
requestJson should include("\"nonat\":\"Nationality is British, Irish and Czech. " +
"This person has no UK address so needs to be set as an 'other' elector: IER-DS.\"")
successMessage
}
).submitCrownApplication(None, application, None, None)
}
"submitCrownApplication address hack with no nationality" should "should cause nat being resetted and explanation with nationality appended to nonat" in {
val application = completeCrownApplication.copy(
nationality = Some(PartialNationality(
british = Some(false),
irish = Some(false),
hasOtherCountry = Some(false),
otherCountries = Nil,
noNationalityReason = Some("Where I was born is a mystery to me.")
)),
address = Some(LastAddress(
hasAddress = Some(HasAddressOption.No),
address = Some(PartialAddress(
Some("123 Fake Street, Fakerton"), Some("123456789"), "WR26NJ", None
))
))
)
testHelper.fakeServiceCall(
requestJson => {
requestJson should include("applicationType\":\"crown\"")
requestJson should not include("\"nat\"")
requestJson should include("\"nonat\":\"Where I was born is a mystery to me.\\n" +
"Nationality is unspecified. " +
"This person has no UK address so needs to be set as an 'other' elector: IER-DS.\"")
successMessage
}
).submitCrownApplication(None, application, None, None)
}
behavior of "submitForcesApplication address hack"
it should "cause 'nat' being resetted and explanation with nationality inserted as 'nonat'" in {
val application = completeForcesApplication.copy(
nationality = Some(PartialNationality(
british = Some(true),
irish = Some(true),
hasOtherCountry = Some(true),
otherCountries = List("Czech"),
noNationalityReason = None
)),
address = Some(LastAddress(
hasAddress = Some(HasAddressOption.No),
address = Some(PartialAddress(
Some("123 Fake Street, Fakerton"), Some("123456789"), "WR26NJ", None
))
))
)
testHelper.fakeServiceCall(
requestJson => {
requestJson should include("applicationType\":\"forces\"")
requestJson should include("ukAddr\":\"no-connection\"")
requestJson should not include("\"nat\"")
requestJson should include("\"nonat\":\"Nationality is British, Irish and Czech. " +
"This person has no UK address so needs to be set as an 'other' elector: IER-DS.\"")
successMessage
}
).submitForcesApplication(None, application, None, None)
}
"submitForcesApplication address hack with no nationality" should "cause 'nat' being resetted and explanation with nationality appended to 'nonat'" in {
val application = completeForcesApplication.copy(
nationality = Some(PartialNationality(
british = Some(false),
irish = Some(false),
hasOtherCountry = Some(false),
otherCountries = Nil,
noNationalityReason = Some("Where I was born is a mystery to me.")
)),
address = Some(LastAddress(
hasAddress = Some(HasAddressOption.No),
address = Some(PartialAddress(
Some("123 Fake Street, Fakerton"), Some("123456789"), "WR26NJ", None
))
))
)
testHelper.fakeServiceCall(
requestJson => {
requestJson should include("applicationType\":\"forces\"")
requestJson should not include("\"nat\"")
requestJson should include("\"nonat\":\"Where I was born is a mystery to me.\\n" +
"Nationality is unspecified. " +
"This person has no UK address so needs to be set as an 'other' elector: IER-DS.\"")
successMessage
}
).submitForcesApplication(None, application, None, None)
}
behavior of "getLocalAuthroityByGssCode"
it should "support missing fields for local authority contact info" in {
val json = """
{
"gssCode": "E09000030",
"contactDetails": {
"addressLine1": "address_line_1",
"postcode": "a11aa",
"emailAddress": "email@address.com",
"phoneNumber": "0123456789",
"name": "Tower Hamlets"
},
"eroIdentifier": "tower-hamlets",
"eroDescription": "Tower Hamlets"
}
"""
val mockApiClient = mock[IerApiClient]
val mockConfig = mock[Config]
val mockAddressService = mock[AddressService]
val mockSharHashProvider = mock[ShaHashProvider]
val mockIsoCountryService = mock[IsoCountryService]
val ierApiService = new ConcreteIerApiService (mockApiClient, jsonSerialiser, mockConfig,
mockAddressService, mockSharHashProvider, mockIsoCountryService)
when(mockApiClient.get(any[String], any[(String, String)])).thenReturn(Success(json, 0))
val authority = ierApiService.getLocalAuthorityByGssCode("123")
authority should have (
'gssCode (Some("E09000030")),
'eroIdentifier (Some("tower-hamlets")),
'eroDescription (Some("Tower Hamlets")),
'contactDetails (Some( LocalAuthorityContactDetails(
addressLine1 = Some("address_line_1"),
postcode =Some("a11aa"),
emailAddress = Some("email@address.com"),
phoneNumber = Some("0123456789"),
name = Some("Tower Hamlets"))))
)
}
}
| michaeldfallen/ier-frontend | test/uk/gov/gds/ier/service/IerApiServiceTests.scala | Scala | mit | 17,386 |
package com.theseventhsense.utils.collections
import scala.collection.JavaConverters._
import scala.util.Try
/**
* Created by erik on 2/12/16.
*/
class OnHeapMap[K, V](sorted: Boolean = false) extends OffHeapMap[K, V] {
private var underlying = new java.util.HashMap[K, V]()
override def entryIterator: Iterator[(K, V)] = underlying.entrySet()
.iterator.asScala
.map(entry => (entry.getKey, entry.getValue))
override def iterator: Iterator[V] = underlying.values().iterator().asScala
override def size: Int = underlying.size()
override def contains(key: K): Boolean = Try {
underlying.containsKey(key)
}.recover {
case t: Throwable =>
//logger.warn(s"Error checking for $key", t)
false
}.getOrElse(false)
override def set(key: K, value: V): Unit = Try {
underlying.put(key, value)
}.recover {
case t =>
//logger.warn(s"Error setting $key -> $value", t)
()
}
override def get(key: K): Option[V] = Option(underlying.get(key))
override def remove(key: K): Option[V] = Option(underlying.remove(key))
override def close(): Unit = {
underlying = null // scalastyle:ignore
}
}
| 7thsense/utils-collections | core/src/main/scala/com/theseventhsense/utils/collections/OnHeapMap.scala | Scala | mit | 1,164 |
import collection._
object Test {
class AlarmingBuffer[T] extends mutable.ArrayBuffer[T] {
override def sizeHint(x: Int): Unit = {
println("Received a size hint: " + x)
super.sizeHint(x)
}
}
def main(args: Array[String]): Unit = {
val iteratorBuilder = (new AlarmingBuffer[Int]) mapResult {
res => res.iterator
}
iteratorBuilder.sizeHint(10)
iteratorBuilder ++= (0 until 10)
iteratorBuilder.result().foreach(println)
}
}
| som-snytt/dotty | tests/pos/t5577.scala | Scala | apache-2.0 | 485 |
package com.sksamuel.elastic4s.mappings
import com.sksamuel.elastic4s.{ElasticApi, JsonSugar}
import com.sksamuel.elastic4s.analyzers.{EnglishLanguageAnalyzer, SpanishLanguageAnalyzer}
import com.sksamuel.elastic4s.http.index.CreateIndexContentBuilder
import org.scalatest.{Matchers, WordSpec}
class MappingDefinitionDslTest extends WordSpec with Matchers with JsonSugar with ElasticApi {
"mapping definition" should {
"insert source exclusion directives when set" in {
val mapping = MappingDefinition("test").sourceExcludes("excludeMe1", "excludeMe2")
val output = MappingBuilderFn.build(mapping).string()
output should include(""""_source":{"excludes":["excludeMe1","excludeMe2"]}""")
}
"insert source exclusion directives when set and override enabled directive" in {
val mapping = MappingDefinition("test").sourceExcludes("excludeMe1", "excludeMe2").source(true)
val output = MappingBuilderFn.build(mapping).string()
output should include(""""_source":{"excludes":["excludeMe1","excludeMe2"]}""")
output should not include """"enabled":true"""
}
"insert source enabling" in {
val mapping = MappingDefinition("test").source(false)
val output = MappingBuilderFn.build(mapping).string()
output should not include """"_source":{"excludes":["excludeMe1","excludeMe2"]}"""
output should include(""""enabled":false""")
}
"not insert date detection by default" in {
val mapping = MappingDefinition("type")
val output = MappingBuilderFn.build(mapping).string()
output should not include "date"
}
"insert date detection when set to true" in {
val mapping = MappingDefinition("type").dateDetection(true)
val output = MappingBuilderFn.build(mapping).string()
output should include("""date_detection":true""")
}
"insert date detection when set to false" in {
val mapping = MappingDefinition("type").dateDetection(false)
val output = MappingBuilderFn.build(mapping).string()
output should include("""date_detection":false""")
}
"not insert numeric detection by default" in {
val mapping = MappingDefinition("type")
val output = MappingBuilderFn.build(mapping).string()
output should not include "numeric"
}
"insert numeric detection when set to true" in {
val mapping = MappingDefinition("type").numericDetection(true)
val output = MappingBuilderFn.build(mapping).string()
output should include("""numeric_detection":true""")
}
"insert numeric detection when set to false" in {
val mapping = MappingDefinition("type").numericDetection(false)
val output = MappingBuilderFn.build(mapping).string()
output should include("""numeric_detection":false""")
}
"include dynamic templates" in {
val req = createIndex("docsAndTags").mappings(
mapping("my_type") templates(
dynamicTemplate("es", textField("") analyzer SpanishLanguageAnalyzer) matchPattern "regex" matching "*_es" matchMappingType "string",
dynamicTemplate("en", textField("") analyzer EnglishLanguageAnalyzer) matching "*" matchMappingType "string"
)
)
CreateIndexContentBuilder(req).string() should matchJsonResource("/json/mappings/mappings_with_dyn_templates.json")
}
}
}
| Tecsisa/elastic4s | elastic4s-tests/src/test/scala/com/sksamuel/elastic4s/mappings/MappingDefinitionDslTest.scala | Scala | apache-2.0 | 3,338 |
/*
* Copyright 2012-2013 Stephane Godbillon (@sgodbillon) and Zenexity
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package reactivemongo.core.protocol
import akka.actor.ActorRef
import java.nio.ByteOrder
import org.jboss.netty.buffer._
import org.jboss.netty.bootstrap._
import org.jboss.netty.channel._
import org.jboss.netty.channel.socket.nio._
import org.jboss.netty.handler.codec.oneone._
import org.jboss.netty.handler.codec.frame.FrameDecoder
import reactivemongo.core.actors.{ ChannelConnected, ChannelClosed, ChannelDisconnected }
import reactivemongo.api.SerializationPack
import reactivemongo.api.commands.GetLastError
import reactivemongo.core.errors._
import reactivemongo.core.netty._
import reactivemongo.utils.LazyLogger
import BufferAccessors._
import reactivemongo.api.ReadPreference
object `package` {
implicit class RichBuffer(val buffer: ChannelBuffer) extends AnyVal {
import scala.collection.mutable.ArrayBuffer
/** Write a UTF-8 encoded C-Style String. */
def writeCString(s: String): ChannelBuffer = {
val bytes = s.getBytes("utf-8")
buffer writeBytes bytes
buffer writeByte 0
buffer
}
/** Write a UTF-8 encoded String. */
def writeString(s: String): ChannelBuffer = {
val bytes = s.getBytes("utf-8")
buffer writeInt (bytes.size + 1)
buffer writeBytes bytes
buffer writeByte 0
buffer
}
/** Write the contents of the given [[reactivemongo.core.protocol.ChannelBufferWritable]]. */
def write(writable: ChannelBufferWritable) = writable writeTo buffer
/** Reads a UTF-8 String. */
def readString(): String = {
val bytes = new Array[Byte](buffer.readInt - 1)
buffer.readBytes(bytes)
buffer.readByte
new String(bytes, "UTF-8")
}
/**
* Reads an array of Byte of the given length.
*
* @param length Length of the newly created array.
*/
def readArray(length: Int): Array[Byte] = {
val bytes = new Array[Byte](length)
buffer.readBytes(bytes)
bytes
}
/** Reads a UTF-8 C-Style String. */
def readCString(): String = {
@scala.annotation.tailrec
def readCString(array: ArrayBuffer[Byte]): String = {
val byte = buffer.readByte
if (byte == 0x00)
new String(array.toArray, "UTF-8")
else readCString(array += byte)
}
readCString(new ArrayBuffer[Byte](16))
}
}
}
// traits
/**
* Something that can be written into a [[http://static.netty.io/3.5/api/org/jboss/netty/buffer/ChannelBuffer.html ChannelBuffer]].
*/
trait ChannelBufferWritable {
/** Write this instance into the given [[http://static.netty.io/3.5/api/org/jboss/netty/buffer/ChannelBuffer.html ChannelBuffer]]. */
def writeTo: ChannelBuffer => Unit
/** Size of the content that would be written. */
def size: Int
}
/**
* A constructor of T instances from a [[http://static.netty.io/3.5/api/org/jboss/netty/buffer/ChannelBuffer.html ChannelBuffer]].
*
* @tparam T type which instances can be constructed with this.
*/
trait ChannelBufferReadable[T] {
/** Makes an instance of T from the data from the given buffer. */
def readFrom(buffer: ChannelBuffer): T
/** @see readFrom */
def apply(buffer: ChannelBuffer): T = readFrom(buffer)
}
// concrete classes
/**
* Header of a Mongo Wire Protocol message.
*
* @param messageLength length of this message.
* @param requestID id of this request (> 0 for request operations, else 0).
* @param responseTo id of the request that the message including this a response to (> 0 for reply operation, else 0).
* @param opCode operation code of this message.
*/
case class MessageHeader(
messageLength: Int,
requestID: Int,
responseTo: Int,
opCode: Int) extends ChannelBufferWritable {
override val writeTo = writeTupleToBuffer4((messageLength, requestID, responseTo, opCode)) _
override def size = 4 + 4 + 4 + 4
}
/** Header deserializer from a [[http://static.netty.io/3.5/api/org/jboss/netty/buffer/ChannelBuffer.html ChannelBuffer]]. */
object MessageHeader extends ChannelBufferReadable[MessageHeader] {
override def readFrom(buffer: ChannelBuffer) = {
val messageLength = buffer.readInt
val requestID = buffer.readInt
val responseTo = buffer.readInt
val opCode = buffer.readInt
MessageHeader(
messageLength,
requestID,
responseTo,
opCode)
}
}
/**
* Request message.
*
* @param requestID id of this request, so that the response may be identifiable. Should be strictly positive.
* @param op request operation.
* @param documents body of this request, a [[http://static.netty.io/3.5/api/org/jboss/netty/buffer/ChannelBuffer.html ChannelBuffer]] containing 0, 1, or many documents.
* @param channelIdHint a hint for sending this request on a particular channel.
*/
case class Request(
requestID: Int,
responseTo: Int, // TODO remove, nothing to do here.
op: RequestOp,
documents: BufferSequence,
readPreference: ReadPreference = ReadPreference.primary,
channelIdHint: Option[Int] = None) extends ChannelBufferWritable {
override val writeTo = { buffer: ChannelBuffer =>
buffer write header
buffer write op
buffer writeBytes documents.merged
}
override def size = 16 + op.size + documents.merged.writerIndex
/** Header of this request */
lazy val header = MessageHeader(size, requestID, responseTo, op.code)
}
/**
* A helper to build write request which result needs to be checked (by sending a [[reactivemongo.core.commands.GetLastError]] command after).
*
* @param op write operation.
* @param documents body of this request, a [[http://static.netty.io/3.5/api/org/jboss/netty/buffer/ChannelBuffer.html ChannelBuffer]] containing 0, 1, or many documents.
* @param getLastError a [[reactivemongo.core.commands.GetLastError]] command message.
*/
case class CheckedWriteRequest(
op: WriteRequestOp,
documents: BufferSequence,
getLastError: GetLastError) {
def apply(): (RequestMaker, RequestMaker) = {
import reactivemongo.api.BSONSerializationPack
import reactivemongo.api.commands.Command
import reactivemongo.api.commands.bson.BSONGetLastErrorImplicits.GetLastErrorWriter
val gleRequestMaker = Command.requestMaker(BSONSerializationPack).onDatabase(op.db, getLastError, ReadPreference.primary)(GetLastErrorWriter).requestMaker
RequestMaker(op, documents) -> gleRequestMaker
}
}
/**
* A helper to build requests.
*
* @param op write operation.
* @param documents body of this request, a [[http://static.netty.io/3.5/api/org/jboss/netty/buffer/ChannelBuffer.html ChannelBuffer]] containing 0, 1, or many documents.
* @param channelIdHint a hint for sending this request on a particular channel.
*/
case class RequestMaker(
op: RequestOp,
documents: BufferSequence = BufferSequence.empty,
readPreference: ReadPreference = ReadPreference.primary,
channelIdHint: Option[Int] = None) {
def apply(id: Int) = Request(id, 0, op, documents, readPreference, channelIdHint)
}
/**
* @define requestID id of this request, so that the response may be identifiable. Should be strictly positive.
* @define op request operation.
* @define documentsA body of this request, an Array containing 0, 1, or many documents.
* @define documentsC body of this request, a [[http://static.netty.io/3.5/api/org/jboss/netty/buffer/ChannelBuffer.html ChannelBuffer]] containing 0, 1, or many documents.
*/
object Request {
/**
* Create a request.
*
* @param requestID $requestID
* @param op $op
* @param documents $documentsA
*/
def apply(requestID: Int, responseTo: Int, op: RequestOp, documents: Array[Byte]): Request = Request(
requestID,
responseTo,
op,
BufferSequence(ChannelBuffers.wrappedBuffer(ByteOrder.LITTLE_ENDIAN, documents)))
/**
* Create a request.
*
* @param requestID $requestID
* @param op $op
* @param documents $documentsA
*/
def apply(requestID: Int, op: RequestOp, documents: Array[Byte]): Request =
Request.apply(requestID, 0, op, documents)
/**
* Create a request.
*
* @param requestID $requestID
* @param op $op
*/
def apply(requestID: Int, op: RequestOp): Request =
Request.apply(requestID, op, new Array[Byte](0))
}
/**
* A Mongo Wire Protocol Response messages.
*
* @param header header of this response.
* @param reply the reply operation contained in this response.
* @param documents body of this response, a [[http://static.netty.io/3.5/api/org/jboss/netty/buffer/ChannelBuffer.html ChannelBuffer]] containing 0, 1, or many documents.
* @param info some meta information about this response, see [[reactivemongo.core.protocol.ResponseInfo]].
*/
case class Response(
header: MessageHeader,
reply: Reply,
documents: ChannelBuffer,
info: ResponseInfo) {
/**
* if this response is in error, explain this error.
*/
lazy val error: Option[DatabaseException] = {
if (reply.inError) {
val bson = Response.parse(this)
//val bson = ReplyDocumentIterator(reply, documents)
if (bson.hasNext)
Some(ReactiveMongoException(bson.next))
else None
} else None
}
}
object Response {
import reactivemongo.api.BSONSerializationPack
import reactivemongo.bson.BSONDocument
import reactivemongo.bson.DefaultBSONHandlers.BSONDocumentIdentity
//import reactivemongo.api.collections.default.BSONDocumentReaderAsBufferReader
def parse(response: Response): Iterator[BSONDocument] =
ReplyDocumentIterator(BSONSerializationPack)(response.reply, response.documents)(BSONDocumentIdentity)
}
/**
* Response meta information.
*
* @param channelId the id of the channel that carried this response.
*/
case class ResponseInfo(channelId: Int)
sealed trait MongoWireVersion extends Ordered[MongoWireVersion] {
def value: Int
final def compare(x: MongoWireVersion): Int =
if (value == x.value) 0
else if (value < x.value) -1
else 1
}
object MongoWireVersion {
/*
* Original meaning of MongoWireVersion is more about protocol features.
*
* - RELEASE_2_4_AND_BEFORE (0)
* - AGG_RETURNS_CURSORS (1)
* - BATCH_COMMANDS (2)
*
* But wireProtocol=1 is virtually non-existent; Mongo 2.4 was 0 and Mongo 2.6 is 2.
*/
object V24AndBefore extends MongoWireVersion { val value = 0 }
object V26 extends MongoWireVersion { val value = 2 }
object V30 extends MongoWireVersion { val value = 3 }
def apply(v: Int): MongoWireVersion =
if (v >= V30.value) V30
else if (v >= V26.value) V26
else V24AndBefore
def unapply(v: MongoWireVersion): Option[Int] = Some(v.value)
}
// protocol handlers for netty.
private[reactivemongo] class RequestEncoder extends OneToOneEncoder {
import RequestEncoder._
def encode(ctx: ChannelHandlerContext, channel: Channel, obj: Object) =
obj match {
case message: Request => {
val buffer: ChannelBuffer = ChannelBuffers.buffer(ByteOrder.LITTLE_ENDIAN, message.size) //ChannelBuffers.dynamicBuffer(ByteOrder.LITTLE_ENDIAN, 1000)
message writeTo buffer
buffer
}
case _ => {
logger.error("weird... do not know how to encode this object: " + obj)
obj
}
}
}
object ReplyDocumentIterator {
def apply[P <: SerializationPack, A](pack: P)(reply: Reply, buffer: ChannelBuffer)(implicit reader: pack.Reader[A]): Iterator[A] = new Iterator[A] {
override def hasNext = buffer.readable
override def next =
try {
val cbrb = ChannelBufferReadableBuffer(buffer.readBytes(buffer.getInt(buffer.readerIndex)))
pack.readAndDeserialize(cbrb, reader)
} catch {
case e: IndexOutOfBoundsException =>
/*
* If this happens, the buffer is exhausted, and there is probably a bug.
* It may happen if an enumerator relying on it is concurrently applied to many iteratees – which should not be done!
*/
throw new ReplyDocumentIteratorExhaustedException(e)
}
}
}
/*private[reactivemongo] case class ReplyDocumentIterator[P <: SerializationPack, T](pack: P, private val reply: Reply, private val buffer: ChannelBuffer)(implicit reader: P#Reader[T]) extends Iterator[T] {
def hasNext = buffer.readable
def next =
try {
val cbrb = ChannelBufferReadableBuffer(buffer.readBytes(buffer.getInt(buffer.readerIndex)))
pack.readAndDeserialize(cbrb, reader)
reader.read(ChannelBufferReadableBuffer(buffer.readBytes(buffer.getInt(buffer.readerIndex))))
} catch {
case e: IndexOutOfBoundsException =>
/*
* If this happens, the buffer is exhausted, and there is probably a bug.
* It may happen if an enumerator relying on it is concurrently applied to many iteratees – which should not be done!
*/
throw new ReplyDocumentIteratorExhaustedException(e)
}
}*/
case class ReplyDocumentIteratorExhaustedException(
val cause: Exception) extends Exception(cause)
private[reactivemongo] object RequestEncoder {
val logger = LazyLogger("reactivemongo.core.protocol.RequestEncoder")
}
private[reactivemongo] class ResponseFrameDecoder extends FrameDecoder {
override def decode(context: ChannelHandlerContext, channel: Channel, buffer: ChannelBuffer) = {
val readableBytes = buffer.readableBytes
if (readableBytes < 4) null
else {
buffer.markReaderIndex
val length = buffer.readInt
buffer.resetReaderIndex
if (length <= readableBytes && length > 0)
buffer.readBytes(length)
else null
}
}
}
private[reactivemongo] class ResponseDecoder extends OneToOneDecoder {
import java.net.InetSocketAddress
def decode(ctx: ChannelHandlerContext, channel: Channel, obj: Object) = {
val buffer = obj.asInstanceOf[ChannelBuffer]
val header = MessageHeader(buffer)
val reply = Reply(buffer)
Response(header, reply, buffer, ResponseInfo(channel.getId))
}
}
private[reactivemongo] class MongoHandler(receiver: ActorRef) extends SimpleChannelHandler {
import MongoHandler._
override def messageReceived(ctx: ChannelHandlerContext, e: MessageEvent) {
val response = e.getMessage.asInstanceOf[Response]
log(e, "messageReceived " + response + " will be send to " + receiver)
receiver ! response
super.messageReceived(ctx, e)
}
override def writeComplete(ctx: ChannelHandlerContext, e: WriteCompletionEvent) {
log(e, "a write is complete!")
super.writeComplete(ctx, e)
}
override def writeRequested(ctx: ChannelHandlerContext, e: MessageEvent) {
log(e, "a write is requested!")
super.writeRequested(ctx, e)
}
override def channelConnected(ctx: ChannelHandlerContext, e: ChannelStateEvent) {
log(e, "connected")
receiver ! ChannelConnected(e.getChannel.getId)
super.channelConnected(ctx, e)
}
override def channelDisconnected(ctx: ChannelHandlerContext, e: ChannelStateEvent) {
log(e, "disconnected")
receiver ! ChannelDisconnected(e.getChannel.getId)
}
override def channelClosed(ctx: ChannelHandlerContext, e: ChannelStateEvent) {
log(e, "closed")
receiver ! ChannelClosed(e.getChannel.getId)
}
override def exceptionCaught(ctx: org.jboss.netty.channel.ChannelHandlerContext, e: org.jboss.netty.channel.ExceptionEvent) {
log(e, "CHANNEL ERROR: " + e.getCause)
}
def log(e: ChannelEvent, s: String) = logger.trace("(channel=" + e.getChannel.getId + ") " + s)
}
private[reactivemongo] object MongoHandler {
private val logger = LazyLogger("reactivemongo.core.protocol.MongoHandler")
}
| bfil/ReactiveMongo | driver/src/main/scala/core/protocol/protocol.scala | Scala | apache-2.0 | 16,113 |
package com.twitter.finagle.factory
import com.twitter.conversions.time._
import com.twitter.finagle.{ClientConnection, MockTimer, Service, ServiceFactory,
ServiceTimeoutException, TimeoutException}
import com.twitter.util.{Await, Future, Promise, Return, Time}
import org.mockito.Matchers.any
import org.mockito.Mockito.{verify, when}
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import org.scalatest.mock.MockitoSugar
import scala.language.reflectiveCalls
@RunWith(classOf[JUnitRunner])
class TimeoutFactoryTest extends FunSuite with MockitoSugar {
trait TimeoutFactoryHelper {
val timer = new MockTimer
val underlying = mock[ServiceFactory[String, String]]
when(underlying.close(any[Time])).thenReturn(Future.Done)
val promise = new Promise[Service[String, String]] {
@volatile var interrupted: Option[Throwable] = None
setInterruptHandler { case exc => interrupted = Some(exc) }
}
when(underlying(any[ClientConnection])).thenReturn(promise)
val timeout = 1.second
val exception = new ServiceTimeoutException(timeout)
val factory = new TimeoutFactory(underlying, 1.second, exception, timer)
}
trait AfterHelper extends TimeoutFactoryHelper {
val res = factory()
Time.withCurrentTimeFrozen { tc =>
verify(underlying)(any[ClientConnection])
assert(promise.interrupted === None)
assert(!res.isDefined)
tc.advance(5.seconds)
timer.tick()
}
}
test("TimeoutFactory after the timeout should fail the service acquisition") {
new AfterHelper {
assert(res.isDefined)
val e = intercept[TimeoutException] {
Await.result(res)
}
assert(e === exception)
}
}
test("TimeoutFactory after the timeout should interrupt the underlying promise with a TimeoutException") {
new AfterHelper {
assert(promise.interrupted forall {
case _: java.util.concurrent.TimeoutException => true
case _ => false
})
}
}
test("TimeoutFactory before the timeout should pass the successfully created service through") {
new TimeoutFactoryHelper {
val res = factory()
assert(!res.isDefined)
val service = mock[Service[String, String]]
when(service.close(any[Time])).thenReturn(Future.Done)
promise() = Return(service)
assert(res.isDefined)
assert(res.poll === Some(Return(service)))
}
}
}
| LithiumTD/finagle | finagle-core/src/test/scala/com/twitter/finagle/factory/TimeoutFactoryTest.scala | Scala | apache-2.0 | 2,447 |
import sbt._
import Keys._
package systemz {
object Parent {
def version = "0.1.0"
def organization = "functionalops"
def name = "systemz"
def scalaVersion = "2.11.6"
def scalacOptions = Seq(
"-feature",
"-unchecked",
"-deprecation",
"-Xfatal-warnings",
"-Xlint",
"-encoding",
"utf8"
)
def license = ("BSD", url("https://github.com/functionalops/systemz/blob/master/LICENSE"))
}
object Versions {
def scalaz = "7.1.1"
def parboiled = "1.1.6"
def scalatest = "2.2.1"
def scalazStream = "0.7a"
def scodecCore = "1.6.0"
}
object Build extends Build {
/* default options at parent level */
lazy val defaultSettings =
Defaults.defaultSettings ++
Seq(
version := Parent.version,
organization := Parent.organization,
scalaVersion := Parent.scalaVersion,
scalacOptions := Parent.scalacOptions,
licenses += Parent.license,
publishTo := Some("Systemz Bintray Repo" at "https://dl.bintray.com/functionalops/systemz"),
resolvers := Seq("Scalaz Bintray Repo" at "http://dl.bintray.com/scalaz/releases")
)
/* aggregate/subproject spec */
lazy val parent = Project("systemz",
file("."),
settings = defaultSettings
)
.aggregate(core, cloud, management, examples)
lazy val core = Project("systemz-core",
file("core"),
settings = defaultSettings)
lazy val cloud = Project("systemz-cloud",
file("cloud"),
settings = defaultSettings).dependsOn(core)
lazy val management = Project("systemz-management",
file("management"),
settings = defaultSettings).dependsOn(core)
lazy val examples = Project("systemz-examples",
file("examples"),
settings = defaultSettings).dependsOn(core)
}
}
| functionalops/systemz | project/Systemz.scala | Scala | bsd-3-clause | 1,925 |
package com.rasterfoundry.database
import com.rasterfoundry.common.Generators.Implicits._
import com.rasterfoundry.datamodel._
import doobie.implicits._
import org.scalacheck.Prop.forAll
import org.scalatest._
import org.scalatestplus.scalacheck.Checkers
class ThumbnailDaoSpec
extends FunSuite
with Matchers
with Checkers
with DBTestConfig
with PropTestHelpers {
test("list thumbnails") {
ThumbnailDao.query.list.transact(xa).unsafeRunSync.length should be >= 0
}
test("insert a thumbnail") {
check {
forAll {
(org: Organization.Create,
user: User.Create,
platform: Platform,
scene: Scene.Create,
thumbnail: Thumbnail) =>
{
val thumbnailInsertIO = insertUserOrgPlatScene(user,
org,
platform,
scene) flatMap {
case (_: Organization,
_: User,
_: Platform,
dbScene: Scene.WithRelated) => {
ThumbnailDao.insert(fixupThumbnail(dbScene, thumbnail))
}
}
val insertedThumbnail =
thumbnailInsertIO.transact(xa).unsafeRunSync
insertedThumbnail.widthPx == thumbnail.widthPx &&
insertedThumbnail.heightPx == thumbnail.heightPx &&
insertedThumbnail.url == thumbnail.url &&
insertedThumbnail.thumbnailSize == thumbnail.thumbnailSize
}
}
}
}
test("insert many thumbnails") {
check {
forAll {
(org: Organization.Create,
user: User.Create,
platform: Platform,
scene: Scene.Create,
thumbnails: List[Thumbnail]) =>
{
val thumbnailsInsertIO = insertUserOrgPlatScene(user,
org,
platform,
scene) flatMap {
case (_: Organization,
_: User,
_: Platform,
dbScene: Scene.WithRelated) => {
ThumbnailDao.insertMany(thumbnails map {
fixupThumbnail(dbScene, _)
})
}
}
thumbnailsInsertIO.transact(xa).unsafeRunSync == thumbnails.length
}
}
}
}
test("update a thumbnail") {
check {
forAll {
(org: Organization.Create,
user: User.Create,
platform: Platform,
scene: Scene.Create,
insertThumbnail: Thumbnail,
updateThumbnail: Thumbnail) =>
{
val thumbnailInsertIO = insertUserOrgPlatScene(user,
org,
platform,
scene) flatMap {
case (_: Organization,
_: User,
_: Platform,
dbScene: Scene.WithRelated) => {
ThumbnailDao.insert(fixupThumbnail(dbScene, insertThumbnail))
}
}
val thumbnailUpdateWithThumbnailIO = thumbnailInsertIO flatMap {
(dbThumbnail: Thumbnail) =>
{
val withFks = updateThumbnail.copy(
id = dbThumbnail.id,
sceneId = dbThumbnail.sceneId
)
ThumbnailDao.update(withFks, dbThumbnail.id) flatMap {
(affectedRows: Int) =>
{
ThumbnailDao.unsafeGetThumbnailById(dbThumbnail.id) map {
(affectedRows, _)
}
}
}
}
}
val (affectedRows, updatedThumbnail) =
thumbnailUpdateWithThumbnailIO.transact(xa).unsafeRunSync
affectedRows == 1 &&
updatedThumbnail.widthPx == updateThumbnail.widthPx &&
updatedThumbnail.heightPx == updateThumbnail.heightPx &&
updatedThumbnail.url == updateThumbnail.url &&
updatedThumbnail.thumbnailSize == updateThumbnail.thumbnailSize
}
}
}
}
}
| azavea/raster-foundry | app-backend/db/src/test/scala/com/azavea/rf/database/ThumbnailDaoSpec.scala | Scala | apache-2.0 | 4,493 |
/*
* Copyright 2015 University of Basel, Graphics and Vision Research Group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scalismo.utils
import java.net.URLDecoder
import scalismo.ScalismoTestSuite
import scalismo.geometry._2D
import scalismo.io.{ImageIO, MeshIO}
class ConversionTests extends ScalismoTestSuite {
describe("a Mesh ") {
it("can be converted to and from vtk") {
val path = getClass.getResource("/facemesh.stl").getPath
val origmesh = MeshIO.readMesh(new java.io.File(URLDecoder.decode(path, "UTF-8"))).get
val vtkpd = MeshConversion.meshToVtkPolyData(origmesh)
val restoredMesh = MeshConversion.vtkPolyDataToTriangleMesh(vtkpd).get
origmesh should equal(restoredMesh)
// test conversion with template
val vtkpd2 = MeshConversion.meshToVtkPolyData(origmesh, Some(vtkpd))
val restoredMesh2 = MeshConversion.vtkPolyDataToTriangleMesh(vtkpd2).get
origmesh should equal(restoredMesh2)
}
}
describe("an 2D image") {
it("can be converted to and from vtk") {
val path = getClass.getResource("/lena.vtk").getPath
val origimg = ImageIO.read2DScalarImage[Short](new java.io.File(URLDecoder.decode(path, "UTF-8"))).get
val vtksp = ImageConversion.imageToVtkStructuredPoints(origimg)
val restoredImg = ImageConversion.vtkStructuredPointsToScalarImage[_2D, Short](vtksp).get
origimg should equal(restoredImg)
}
}
describe("a tetrahedral mesh ") {
it("can be converted to and from vtk") {
val path = getClass.getResource("/tetraMesh.vtk").getPath
val origmesh = MeshIO.readTetrahedralMesh(new java.io.File(URLDecoder.decode(path, "UTF-8"))).get
val vtkug = TetrahedralMeshConversion.tetrahedralMeshToVTKUnstructuredGrid(origmesh)
val restoredMesh = TetrahedralMeshConversion.vtkUnstructuredGridToTetrahedralMesh(vtkug).get
origmesh should equal(restoredMesh)
// test conversion with template
val vtkug2 = TetrahedralMeshConversion.tetrahedralMeshToVTKUnstructuredGrid(origmesh, Some(vtkug))
val restoredMesh2 = TetrahedralMeshConversion.vtkUnstructuredGridToTetrahedralMesh(vtkug2).get
origmesh should equal(restoredMesh2)
}
}
}
| unibas-gravis/scalismo | src/test/scala/scalismo/utils/ConversionTests.scala | Scala | apache-2.0 | 2,744 |
/* Copyright (C) 2008-2014 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.optimize
import cc.factorie._
import cc.factorie.la._
import cc.factorie.model.WeightsSet
import cc.factorie.util.{FastLogging, _}
/**
* Learns the parameters of a Model by processing the gradients and values from a collection of Examples.
* @author Alexandre Passos
*/
trait Trainer {
/**
* Process the examples once.
* @param examples Examples to be processed
*/
def processExamples(examples: Iterable[Example]): Unit
/** Would more training help? */
def isConverged: Boolean
/** Repeatedly process the examples until training has converged. */
def trainFromExamples(examples: Iterable[Example]): Unit = while (!isConverged) processExamples(examples)
}
/**
* Learns the parameters of a Model by summing the gradients and values of all Examples,
* and passing them to a GradientOptimizer (such as ConjugateGradient or LBFGS).
* @param weightsSet The parameters to be optimized
* @param optimizer The optimizer
* @author Alexandre Passos
*/
class BatchTrainer(val weightsSet: WeightsSet, val optimizer: GradientOptimizer = new LBFGS with L2Regularization, val maxIterations: Int = -1) extends Trainer with FastLogging {
var iteration = 0
val gradientAccumulator = new LocalWeightsMapAccumulator(weightsSet.blankDenseMap)
val valueAccumulator = new LocalDoubleAccumulator(0.0)
// TODO This is sad: The optimizer determines which of gradient/value/margin it needs, but we don't know here
// so we create them all, possibly causing the Example to do more work.
def processExamples(examples: Iterable[Example]): Unit = {
iteration += 1
if (isConverged) return
gradientAccumulator.tensorSet.zero()
valueAccumulator.value = 0.0
val startTime = System.currentTimeMillis
examples.foreach(example => example.accumulateValueAndGradient(valueAccumulator, gradientAccumulator))
val ellapsedTime = System.currentTimeMillis - startTime
logger.info(TrainerHelpers.getBatchTrainerStatus(gradientAccumulator.tensorSet.oneNorm, valueAccumulator.value, ellapsedTime))
optimizer.step(weightsSet, gradientAccumulator.tensorSet, valueAccumulator.value)
}
def isConverged = (maxIterations != -1 && iteration >= maxIterations) || optimizer.isConverged
}
/**
* Learns the parameters of a model by computing the gradient and calling the
* optimizer one example at a time.
* @param weightsSet The parameters to be optimized
* @param optimizer The optimizer
* @param maxIterations The maximum number of iterations until reporting convergence
* @param logEveryN After this many examples a log will be printed. If set to -1 10 logs will be printed.
* @author Alexandre Passos
*/
class OnlineTrainer(val weightsSet: WeightsSet, val optimizer: GradientOptimizer = new AdaGrad, val maxIterations: Int = 3, var logEveryN: Int = -1) extends Trainer with util.FastLogging {
var iteration = 0
val valueAccumulator = new LocalDoubleAccumulator
override def processExamples(examples: Iterable[Example]): Unit = {
if (logEveryN == -1) logEveryN = math.max(100, examples.size / 10)
iteration += 1
var valuesSeenSoFar = 0.0
var timePerIteration = 0L
var i = 0
val iter = examples.iterator
while (iter.hasNext) {
val example = iter.next()
val gradientAccumulator = new SmartGradientAccumulator
if ((logEveryN != 0) && (i % logEveryN == 0) && (i != 0)) {
logger.info(TrainerHelpers.getOnlineTrainerStatus(i, logEveryN, timePerIteration, valuesSeenSoFar))
valuesSeenSoFar = 0.0
timePerIteration = 0
}
val t0 = System.currentTimeMillis()
gradientAccumulator.clear()
valueAccumulator.value = 0
example.accumulateValueAndGradient(valueAccumulator, gradientAccumulator)
valuesSeenSoFar += valueAccumulator.value
optimizer.step(weightsSet, gradientAccumulator.getMap, valueAccumulator.value)
timePerIteration += System.currentTimeMillis() - t0
i+=1
}
}
def isConverged = iteration >= maxIterations
}
/** Train using one trainer, until it has converged, and then use the second trainer instead.
Typical use is to first train with an online stochastic gradient ascent such as OnlineTrainer and AdaGrad,
and then a batch trainer, like BatchTrainer and LBFGS.
@author Alexandre Passos */
class TwoStageTrainer(firstTrainer: Trainer, secondTrainer: Trainer) {
def processExamples(examples: Iterable[Example]) {
if (!firstTrainer.isConverged)
firstTrainer.processExamples(examples)
else
secondTrainer.processExamples(examples)
}
def isConverged = firstTrainer.isConverged && secondTrainer.isConverged
}
/** This parallel batch trainer keeps a single gradient in memory and locks accesses to it.
It is useful when computing the gradient in each example is more expensive than
adding this gradient to the accumulator.
If it performs slowly then mini-batches should help, or the ThreadLocalBatchTrainer.
@author Alexandre Passos */
class ParallelBatchTrainer(val weightsSet: WeightsSet, val optimizer: GradientOptimizer = new LBFGS with L2Regularization, val nThreads: Int = Runtime.getRuntime.availableProcessors(), val maxIterations: Int = -1)
extends Trainer with FastLogging {
var iteration = 0
val gradientAccumulator = new SynchronizedWeightsMapAccumulator(weightsSet.blankDenseMap)
val valueAccumulator = new SynchronizedDoubleAccumulator
def processExamples(examples: Iterable[Example]): Unit = {
iteration += 1
if (isConverged) return
gradientAccumulator.l.tensorSet.zero()
valueAccumulator.l.value = 0
val startTime = System.currentTimeMillis
util.Threading.parForeach(examples.toSeq, nThreads)(_.accumulateValueAndGradient(valueAccumulator, gradientAccumulator))
val ellapsedTime = System.currentTimeMillis - startTime
logger.info(TrainerHelpers.getBatchTrainerStatus(gradientAccumulator.l.tensorSet.oneNorm, valueAccumulator.l.value, ellapsedTime))
optimizer.step(weightsSet, gradientAccumulator.tensorSet, valueAccumulator.l.value)
}
def isConverged = (maxIterations != -1 && iteration >= maxIterations) || optimizer.isConverged
}
/** This parallel batch trainer keeps a per-thread gradient to which examples add weights.
It is useful when there is a very large number of examples, processing each example is
fast, and the weights are not too big, as it has to keep one copy of the weights per thread.
@author Alexandre Passos */
class ThreadLocalBatchTrainer(val weightsSet: WeightsSet, val optimizer: GradientOptimizer = new LBFGS with L2Regularization, numThreads: Int = Runtime.getRuntime.availableProcessors()) extends Trainer with FastLogging {
def processExamples(examples: Iterable[Example]): Unit = {
if (isConverged) return
val gradientAccumulator = new ThreadLocal(new LocalWeightsMapAccumulator(weightsSet.blankDenseMap))
val valueAccumulator = new ThreadLocal(new LocalDoubleAccumulator)
val startTime = System.currentTimeMillis
util.Threading.parForeach(examples, numThreads)(example => example.accumulateValueAndGradient(valueAccumulator.get, gradientAccumulator.get))
val grad = gradientAccumulator.instances.reduce((l, r) => { l.combine(r); l }).tensorSet
val value = valueAccumulator.instances.reduce((l, r) => { l.combine(r); l }).value
val ellapsedTime = System.currentTimeMillis - startTime
logger.info(TrainerHelpers.getBatchTrainerStatus(grad.oneNorm, value, ellapsedTime))
optimizer.step(weightsSet, grad, value)
}
def isConverged = optimizer.isConverged
}
/** This uses read-write locks on the tensors to ensure consistency while doing
parallel online training.
The guarantee is that while the examples read each tensor they will see a consistent
state, but this might not be the state the gradients will get applied to.
The optimizer, however, has no consistency guarantees across tensors.
@author Alexandre Passos */
class ParallelOnlineTrainer(weightsSet: WeightsSet, val optimizer: GradientOptimizer, val maxIterations: Int = 3, var logEveryN: Int = -1, val nThreads: Int = Runtime.getRuntime.availableProcessors())
extends Trainer with FastLogging {
var iteration = 0
var initialized = false
var examplesProcessed = 0
var accumulatedValue = 0.0
var t0 = 0L
private def processExample(e: Example) {
val gradientAccumulator = new SmartGradientAccumulator
val value = new LocalDoubleAccumulator()
e.accumulateValueAndGradient(value, gradientAccumulator)
// The following line will effectively call makeReadable on all the sparse tensors before acquiring the lock
val gradient = gradientAccumulator.getMap
gradient.tensors.foreach({ case t: SparseIndexedTensor => t.apply(0); case _ => })
optimizer.step(weightsSet, gradient, value.value)
this synchronized {
examplesProcessed += 1
accumulatedValue += value.value
if (logEveryN != 0 && examplesProcessed % logEveryN == 0) {
val accumulatedTime = System.currentTimeMillis() - t0
logger.info(TrainerHelpers.getOnlineTrainerStatus(examplesProcessed, logEveryN, accumulatedTime, accumulatedValue))
t0 = System.currentTimeMillis()
accumulatedValue = 0
}
}
}
def processExamples(examples: Iterable[Example]) {
if (!initialized) replaceTensorsWithLocks()
t0 = System.currentTimeMillis()
examplesProcessed = 0
accumulatedValue = 0.0
if (logEveryN == -1) logEveryN = math.max(100, examples.size / 10)
iteration += 1
util.Threading.parForeach(examples.toSeq, nThreads)(processExample(_))
}
def isConverged = iteration >= maxIterations
def replaceTensorsWithLocks() {
for (key <- weightsSet.keys) {
key.value match {
case t: Tensor1 => weightsSet(key) = new LockingTensor1(t)
case t: Tensor2 => weightsSet(key) = new LockingTensor2(t)
case t: Tensor3 => weightsSet(key) = new LockingTensor3(t)
case t: Tensor4 => weightsSet(key) = new LockingTensor4(t)
}
}
initialized = true
}
def removeLocks() {
for (key <- weightsSet.keys) {
key.value match {
case t: LockingTensor => weightsSet(key) = t.base
}
}
}
private trait LockingTensor extends Tensor with SparseDoubleSeq {
val base: Tensor
def activeDomainSize = lock.withReadLock { base.activeDomainSize }
override def foreachActiveElement(f: (Int, Double) => Unit) { lock.withReadLock(base.foreachActiveElement(f)) }
val lock = new util.RWLock
def activeDomain = base.activeDomain
def isDense = base.isDense
def zero() { lock.withWriteLock(base.zero())}
def +=(i: Int, incr: Double) { lock.withWriteLock( base.+=(i,incr))}
override def +=(i: DoubleSeq, v: Double) = lock.withWriteLock(base.+=(i,v))
def dot(ds: DoubleSeq) = lock.withReadLock(base.dot(ds))
def update(i: Int, v: Double) { lock.withWriteLock(base.update(i,v)) }
def apply(i: Int) = lock.withReadLock(base.apply(i))
override def *=(d:Double): Unit = lock.withWriteLock { base *= d}
override def *=(ds:DoubleSeq): Unit = lock.withWriteLock { base *= ds }
override def /=(ds:DoubleSeq): Unit = lock.withWriteLock { base /= ds }
}
private class LockingTensor1(val base: Tensor1) extends Tensor1 with LockingTensor {
def dim1 = base.dim1
override def copy = lock.withReadLock { base.copy }
}
private class LockingTensor2(val base: Tensor2) extends Tensor2 with LockingTensor {
def dim1 = base.dim1
def dim2 = base.dim2
def activeDomain1 = lock.withReadLock(base.activeDomain1)
def activeDomain2 = lock.withReadLock(base.activeDomain2)
override def *(other: Tensor1) = lock.withReadLock(base * other)
override def leftMultiply(other: Tensor1) = lock.withReadLock(base leftMultiply other)
override def copy = lock.withReadLock { base.copy }
}
private class LockingTensor3(val base: Tensor3) extends Tensor3 with LockingTensor {
def dim1 = base.dim1
def dim2 = base.dim2
def dim3 = base.dim3
def activeDomain1 = lock.withReadLock(base.activeDomain1)
def activeDomain2 = lock.withReadLock(base.activeDomain2)
def activeDomain3 = lock.withReadLock(base.activeDomain3)
override def copy = lock.withReadLock { base.copy }
}
private class LockingTensor4(val base: Tensor4) extends Tensor4 with LockingTensor {
def dim1 = base.dim1
def dim2 = base.dim2
def dim3 = base.dim3
def dim4 = base.dim4
def activeDomain1 = lock.withReadLock(base.activeDomain1)
def activeDomain2 = lock.withReadLock(base.activeDomain2)
def activeDomain3 = lock.withReadLock(base.activeDomain3)
def activeDomain4 = lock.withReadLock(base.activeDomain4)
override def copy = lock.withReadLock { base.copy }
}
}
/** This online trainer synchronizes only on the optimizer, so reads on the weights
can be done while they are being written to.
It provides orthogonal guarantees than the ParallelOnlineTrainer, as the examples can have
inconsistent reads from the same tensor but the optimizer will always
have a consistent view of all tensors.
@author Alexandre Passos */
class SynchronizedOptimizerOnlineTrainer(val weightsSet: WeightsSet, val optimizer: GradientOptimizer, val nThreads: Int = Runtime.getRuntime.availableProcessors(), val maxIterations: Int = 3, var logEveryN : Int = -1)
extends Trainer with FastLogging {
var examplesProcessed = 0
var accumulatedValue = 0.0
var t0 = System.currentTimeMillis()
private def processExample(e: Example): Unit = {
val gradientAccumulator = new SmartGradientAccumulator
val value = new LocalDoubleAccumulator()
e.accumulateValueAndGradient(value, gradientAccumulator)
// The following line will effectively call makeReadable on all the sparse tensors before acquiring the lock
val gradient = gradientAccumulator.getMap
gradient.tensors.foreach({ case t: SparseIndexedTensor => t.apply(0); case _ => })
optimizer synchronized {
optimizer.step(weightsSet, gradient, value.value)
examplesProcessed += 1
accumulatedValue += value.value
if (examplesProcessed % logEveryN == 0) {
val accumulatedTime = System.currentTimeMillis() - t0
logger.info(TrainerHelpers.getOnlineTrainerStatus(examplesProcessed, logEveryN, accumulatedTime, accumulatedValue))
t0 = System.currentTimeMillis()
accumulatedValue = 0
}
}
}
var iteration = 0
def processExamples(examples: Iterable[Example]): Unit = {
if (logEveryN == -1) logEveryN = math.max(100, examples.size / 10)
iteration += 1
t0 = System.currentTimeMillis()
examplesProcessed = 0
accumulatedValue = 0.0
util.Threading.parForeach(examples.toSeq, nThreads)(processExample(_))
}
def isConverged = iteration >= maxIterations
}
/**
* A parallel online trainer which has no locks or synchronization.
* Only use this if you know what you're doing.
* @param weightsSet The parameters to optimize
* @param optimizer The optimizer
* @param nThreads How many threads to use
* @param maxIterations The maximum number of iterations
* @param logEveryN How often to log.
* @param locksForLogging Whether to lock around logging. Disabling this might make logging not work at all.
* @author Alexandre Passos
*/
class HogwildTrainer(val weightsSet: WeightsSet, val optimizer: GradientOptimizer, val nThreads: Int = Runtime.getRuntime.availableProcessors(), val maxIterations: Int = 3, var logEveryN : Int = -1, val locksForLogging: Boolean = true)
extends Trainer with FastLogging {
var examplesProcessed = 0
var accumulatedValue = 0.0
var t0 = System.currentTimeMillis()
val lock = new util.RWLock
private def processExample(e: Example): Unit = {
val gradientAccumulator = new SmartGradientAccumulator
val value = new LocalDoubleAccumulator()
e.accumulateValueAndGradient(value, gradientAccumulator)
optimizer.step(weightsSet, gradientAccumulator.getMap, value.value)
if (locksForLogging) lock.writeLock()
try {
examplesProcessed += 1
accumulatedValue += value.value
if (examplesProcessed % logEveryN == 0) {
val accumulatedTime = System.currentTimeMillis() - t0
logger.info(TrainerHelpers.getOnlineTrainerStatus(examplesProcessed, logEveryN, accumulatedTime, accumulatedValue))
t0 = System.currentTimeMillis()
accumulatedValue = 0
}
} finally {
if (locksForLogging) lock.writeUnlock()
}
}
var iteration = 0
def processExamples(examples: Iterable[Example]): Unit = {
if (logEveryN == -1) logEveryN = math.max(100, examples.size / 10)
iteration += 1
t0 = System.currentTimeMillis()
examplesProcessed = 0
accumulatedValue = 0.0
util.Threading.parForeach(examples.toSeq, nThreads)(processExample(_))
}
def isConverged = iteration >= maxIterations
}
object TrainerHelpers {
def getTimeString(ms: Long): String =
if (ms > 120000) f"${ms/60000}%d minutes" else if (ms> 5000) f"${ms/1000}%d seconds" else s"$ms milliseconds"
def getBatchTrainerStatus(gradNorm: => Double, value: => Double, ms: => Long) =
f"GradientNorm: $gradNorm%-10g value $value%-10g ${getTimeString(ms)}%s"
def getOnlineTrainerStatus(examplesProcessed: Int, logEveryN: Int, accumulatedTime: Long, accumulatedValue: Double) =
f"$examplesProcessed%20s examples at ${1000.0*logEveryN/accumulatedTime}%5.2f examples/sec. Average objective: ${accumulatedValue / logEveryN}%5.5f"
}
/** A collection of convenience methods for creating Trainers and running them with recommended default values.
@author Alexandre Passos */
object Trainer {
/**
* Convenient function for training. Creates a trainer, trains until convergence, and evaluates after every iteration.
* @param parameters The parameters to be optimized
* @param examples The examples to train on
* @param maxIterations The maximum number of iterations for training
* @param evaluate The function for evaluation
* @param optimizer The optimizer
* @param useParallelTrainer Whether to use parallel training
* @param useOnlineTrainer Whether to use online training
* @param logEveryN How often to log, if using online training
*/
def train(parameters: WeightsSet, examples: Seq[Example], maxIterations: Int, evaluate: () => Unit, optimizer: GradientOptimizer, useParallelTrainer: Boolean, useOnlineTrainer: Boolean, logEveryN: Int = -1, nThreads: Int = Runtime.getRuntime.availableProcessors(), miniBatch: Int)(implicit random: scala.util.Random) {
parameters.keys.foreach(_.value) // make sure we initialize the values in a single thread
optimizer.initializeWeights(parameters)
val actualEx: Seq[Example] = if (miniBatch == -1) examples else MiniBatchExample(miniBatch, examples).toSeq
val trainer = if (useOnlineTrainer && useParallelTrainer) new ParallelOnlineTrainer(parameters, optimizer=optimizer, maxIterations=maxIterations, logEveryN=logEveryN, nThreads=nThreads)
else if (useOnlineTrainer && !useParallelTrainer) new OnlineTrainer(parameters, optimizer=optimizer, maxIterations=maxIterations, logEveryN=logEveryN)
else if (!useOnlineTrainer && useParallelTrainer) new ParallelBatchTrainer(parameters, optimizer=optimizer, maxIterations=maxIterations, nThreads=nThreads)
else new BatchTrainer(parameters, optimizer=optimizer, maxIterations=maxIterations)
trainer match { case t: ParallelOnlineTrainer => t.replaceTensorsWithLocks(); case _ => }
try {
while (!trainer.isConverged) {
trainer.processExamples(actualEx.shuffle)
optimizer match { case o: ParameterAveraging => o.setWeightsToAverage(parameters); case _ => }
evaluate()
optimizer match { case o: ParameterAveraging => o.unSetWeightsToAverage(parameters); case _ => }
}
} finally {
trainer match { case t: ParallelOnlineTrainer => t.removeLocks(); case _ => }
optimizer.finalizeWeights(parameters)
}
}
/**
* A convenient way to call Trainer.train() for online trainers.
* @param parameters The parameters to be optimized
* @param examples The examples
* @param evaluate The evaluation function
* @param useParallelTrainer Whether to train in parallel
* @param maxIterations The maximum number of iterations
* @param optimizer The optimizer
* @param logEveryN How often to log
*/
def onlineTrain(parameters: WeightsSet, examples: Seq[Example], evaluate: () => Unit = () => (), useParallelTrainer: Boolean=false, maxIterations: Int = 3, optimizer: GradientOptimizer = new AdaGrad with ParameterAveraging, logEveryN: Int = -1 ,nThreads: Int = Runtime.getRuntime.availableProcessors(), miniBatch: Int = -1)(implicit random: scala.util.Random) {
train(parameters, examples, maxIterations, evaluate, optimizer, useParallelTrainer=useParallelTrainer, useOnlineTrainer=true, logEveryN=logEveryN, nThreads=nThreads, miniBatch)
}
/**
* A convenient way to call Trainer.train() for batch training.
* @param parameters The parameters to be optimized
* @param examples The examples
* @param evaluate The evaluation function
* @param useParallelTrainer Whether to use a parallel trainer
* @param maxIterations The maximum number of iterations
* @param optimizer The optimizer
*/
def batchTrain(parameters: WeightsSet, examples: Seq[Example], evaluate: () => Unit = () => (), useParallelTrainer: Boolean=true, maxIterations: Int = 200, optimizer: GradientOptimizer = new LBFGS with L2Regularization, nThreads: Int = Runtime.getRuntime.availableProcessors())(implicit random: scala.util.Random) {
train(parameters, examples, maxIterations, evaluate, optimizer, useParallelTrainer=useParallelTrainer, useOnlineTrainer=false, nThreads=nThreads, miniBatch= -1)
}
}
| patverga/factorie | src/main/scala/cc/factorie/optimize/Trainer.scala | Scala | apache-2.0 | 22,542 |
/*
* Copyright 2014 Claude Mamo
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package controllers
import play.api.mvc.{Controller, Action}
import play.api.libs.json._
import play.api.data.{Forms, Form}
import play.api.libs.concurrent.Akka
import common.Message
import play.api.data.Forms._
import scala.Some
import models.Setting
import play.api.Play.current
import play.Logger
object Settings extends Controller {
def update() = Action { request =>
request.body.asJson match {
case Some(JsArray(settings)) => {
updateSettings(settings)
Ok
}
case _ => BadRequest
}
}
def index() = Action {
Ok(Json.toJson(Setting.findAll))
}
def updateSettings(settings : Seq[JsValue]) {
settings.map { s =>
Setting.update(Setting(s.\\("key").as[String], s.\\("value").as[String]))
Akka.system.actorSelection("akka://application/user/router") ! Message.SettingsUpdateNotification
}
}
}
| hivefans/kafka-web-console | app/controllers/Settings.scala | Scala | apache-2.0 | 1,465 |
package breeze.stats
import org.scalatest.FunSuite
import breeze.linalg._
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
/**Tests for breeze.linalg.max.scala
* Test for clip is currently located in "DenseVectorTest.scala"
* @author ktakagaki
* @date 3/13/14.
*/
@RunWith(classOf[JUnitRunner])
class histogramTest extends FunSuite {
val testDV = DenseVector(0.0, 0.1, 2.8, 2.9, 5)
val testWeights = DenseVector(0.5, 0.5, 1.0, 3.0, 7.0)
test("histogram returns correct values") {
val result = hist(testDV, 3)
assert( result.hist == DenseVector(2.0,2.0,1.0) )
assert( result.binEdges == DenseVector(0.0, 5.0/3.0, 2*5.0/3.0, 5.0) )
}
test("histogram respects range argument") {
val result = hist(testDV, 3, (0.0, 3.0))
assert( result.hist == DenseVector(2.0,0.0,2.0) )
assert( result.binEdges == DenseVector(0.0, 1.0, 2.0, 3.0) )
}
test("histogram handles weights") {
val result = hist(testDV, 3, testWeights)
assert( result.hist == DenseVector(1.0,4.0,7.0) )
assert( result.binEdges == DenseVector(0.0, 5.0/3.0, 2*5.0/3.0, 5.0) )
}
test("fails for empty array") {
intercept[IllegalArgumentException] {
hist( DenseVector[Int]())
}
}
test("negative values") {
val v_neg = DenseVector( -4,-3,-4, 1,1,1, 4,3,4 )
val h_neg = hist(v_neg,3)
assert(h_neg.hist == DenseVector(3,3,3))
val v_ok = v_neg+4
val h_ok = hist(v_ok,3)
assert(h_ok.hist == DenseVector(3,3,3))
}
}
| nikdon/breeze | math/src/test/scala/breeze/stats/histogramTest.scala | Scala | apache-2.0 | 1,502 |
package lila.monitor
import akka.actor._
import com.typesafe.config.Config
import lila.common.PimpedConfig._
final class Env(
config: Config,
db: lila.db.Env,
hub: lila.hub.Env,
system: ActorSystem,
scheduler: lila.common.Scheduler) {
private val ActorName = config getString "actor.name"
private val SocketName = config getString "socket.name"
private val RpsIntervall = config duration "rps.interval"
private val SocketUidTtl = config duration "socket.uid.ttl"
private lazy val socket = system.actorOf(
Props(new Socket(timeout = SocketUidTtl)), name = SocketName)
lazy val socketHandler = new SocketHandler(socket, hub)
val reporting = system.actorOf(
Props(new Reporting(
rpsProvider = rpsProvider,
mpsProvider = mpsProvider,
socket = socket,
db = db,
hub = hub
)), name = ActorName)
{
import scala.concurrent.duration._
scheduler.message(1 seconds) {
reporting -> lila.hub.actorApi.monitor.Update
}
}
// requests per second
private lazy val rpsProvider = new RpsProvider(RpsIntervall)
// moves per second
private lazy val mpsProvider = new RpsProvider(RpsIntervall)
}
object Env {
lazy val current = "[boot] monitor" describes new Env(
config = lila.common.PlayApp loadConfig "monitor",
db = lila.db.Env.current,
hub = lila.hub.Env.current,
system = lila.common.PlayApp.system,
scheduler = lila.common.PlayApp.scheduler)
}
| danilovsergey/i-bur | modules/monitor/src/main/Env.scala | Scala | mit | 1,469 |
/*
* Happy Melly Teller
* Copyright (C) 2013 - 2016, Happy Melly http://www.happymelly.com
*
* This file is part of the Happy Melly Teller.
*
* Happy Melly Teller is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Happy Melly Teller is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Happy Melly Teller. If not, see <http://www.gnu.org/licenses/>.
*
* If you have questions concerning this license or the applicable additional
* terms, you may contact by email Sergey Kotlov, sergey.kotlov@happymelly.com or
* in writing Happy Melly One, Handelsplein 37, Rotterdam, The Netherlands, 3071 PR
*/
package models.repository.core.payment
import models.core.payment.Charge
import models.database.core.payment.ChargeTable
import play.api.Application
import play.api.db.slick.{DatabaseConfigProvider, HasDatabaseConfig}
import slick.driver.JdbcProfile
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
/** Provides operations with database related to charges */
class ChargeRepository(app: Application) extends HasDatabaseConfig[JdbcProfile]
with ChargeTable {
val dbConfig = DatabaseConfigProvider.get[JdbcProfile](app)
import driver.api._
private val charges = TableQuery[Charges]
/**
* Checks if the record with the remote id exists
* @param chargeId Remote charge id
*/
def exists(chargeId: String): Future[Boolean] = db.run(charges.filter(_.remoteId === chargeId).exists.result)
def findByCustomer(customerId: Long): Future[Seq[Charge]] =
db.run(charges.filter(_.customerId === customerId).result)
/**
* Inserts the given record to database
*
* @param charge Object to insert
* @return Returns charge object with updated id
*/
def insert(charge: Charge): Future[Charge] = {
val query = charges returning charges.map(_.id) into ((value, id) => value.copy(id = Some(id)))
db.run(query += charge)
}
}
| HappyMelly/teller | app/models/repository/core/payment/ChargeRepository.scala | Scala | gpl-3.0 | 2,369 |
package troy
package meta
import scala.annotation.compileTimeOnly
trait ParsingOps {
type ParseAs[R]
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[R](mapper: () => R): ParseAs[R] = ???
// (1 to 22).map(1 to _).map(_.map(i => s"T$i").mkString(", ")).map(tstr => s"def as[$tstr, R](mapper: ($tstr) => R): MacroDSL[M[R]]").foreach(println)
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[T1, R](mapper: (T1) => R): ParseAs[R] = ???
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[T1, T2, R](mapper: (T1, T2) => R): ParseAs[R] = ???
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[T1, T2, T3, R](mapper: (T1, T2, T3) => R): ParseAs[R] = ???
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[T1, T2, T3, T4, R](mapper: (T1, T2, T3, T4) => R): ParseAs[R] = ???
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[T1, T2, T3, T4, T5, R](mapper: (T1, T2, T3, T4, T5) => R): ParseAs[R] = ???
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[T1, T2, T3, T4, T5, T6, R](mapper: (T1, T2, T3, T4, T5, T6) => R): ParseAs[R] = ???
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[T1, T2, T3, T4, T5, T6, T7, R](mapper: (T1, T2, T3, T4, T5, T6, T7) => R): ParseAs[R] = ???
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[T1, T2, T3, T4, T5, T6, T7, T8, R](mapper: (T1, T2, T3, T4, T5, T6, T7, T8) => R): ParseAs[R] = ???
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[T1, T2, T3, T4, T5, T6, T7, T8, T9, R](mapper: (T1, T2, T3, T4, T5, T6, T7, T8, T9) => R): ParseAs[R] = ???
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R](mapper: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10) => R): ParseAs[R] = ???
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R](mapper: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11) => R): ParseAs[R] = ???
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R](mapper: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12) => R): ParseAs[R] = ???
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R](mapper: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13) => R): ParseAs[R] = ???
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R](mapper: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14) => R): ParseAs[R] = ???
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R](mapper: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15) => R): ParseAs[R] = ???
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R](mapper: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16) => R): ParseAs[R] = ???
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, R](mapper: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17) => R): ParseAs[R] = ???
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, R](mapper: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18) => R): ParseAs[R] = ???
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, R](mapper: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19) => R): ParseAs[R] = ???
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, R](mapper: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20) => R): ParseAs[R] = ???
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, R](mapper: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21) => R): ParseAs[R] = ???
@compileTimeOnly("as can be called only inside troy.dsl.withSchema block")
def as[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, R](mapper: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22) => R): ParseAs[R] = ???
}
| schemasafe/troy | troy-meta/src/main/scala/troy/meta/ParsingOps.scala | Scala | apache-2.0 | 5,276 |
/*
* Copyright 2015, The Querydsl Team (http://www.querydsl.com/team)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.querydsl.scala
import com.querydsl.codegen.utils.model.TypeCategory
import com.querydsl.codegen.TypeMappings
import com.querydsl.core.types.{Expression, Path}
/**
* Factory for TypeMappings creation
*
* @author tiwe
*
*/
object ScalaTypeMappings {
def create: TypeMappings = new ScalaTypeMappings()
}
class ScalaTypeMappings extends TypeMappings {
register(TypeCategory.STRING, classOf[StringExpression], classOf[StringPath], classOf[StringTemplate])
register(TypeCategory.BOOLEAN, classOf[BooleanExpression], classOf[BooleanPath], classOf[BooleanTemplate])
register(TypeCategory.COMPARABLE, classOf[ComparableExpression[_]], classOf[ComparablePath[_]], classOf[ComparableTemplate[_]])
register(TypeCategory.ENUM, classOf[EnumExpression[_]], classOf[EnumPath[_]], classOf[EnumTemplate[_]])
register(TypeCategory.DATE, classOf[DateExpression[_]], classOf[DatePath[_]], classOf[DateTemplate[_]])
register(TypeCategory.DATETIME, classOf[DateTimeExpression[_]], classOf[DateTimePath[_]], classOf[DateTimeTemplate[_]])
register(TypeCategory.TIME, classOf[TimeExpression[_]], classOf[TimePath[_]], classOf[TimeTemplate[_]])
register(TypeCategory.NUMERIC, classOf[NumberExpression[_]], classOf[NumberPath[_]], classOf[NumberTemplate[_]])
register(TypeCategory.SIMPLE, classOf[Expression[_]], classOf[SimplePath[_]], classOf[SimpleTemplate[_]])
register(TypeCategory.ARRAY, classOf[Expression[_]], classOf[ArrayPath[_]], classOf[SimpleTemplate[_]])
register(TypeCategory.COLLECTION, classOf[Expression[_]], classOf[SimplePath[_]], classOf[SimpleTemplate[_]])
register(TypeCategory.SET, classOf[Expression[_]], classOf[SimplePath[_]], classOf[SimpleTemplate[_]])
register(TypeCategory.LIST, classOf[Expression[_]], classOf[SimplePath[_]], classOf[SimpleTemplate[_]])
register(TypeCategory.MAP, classOf[Expression[_]], classOf[SimplePath[_]], classOf[SimpleTemplate[_]])
register(TypeCategory.CUSTOM, classOf[Expression[_]], classOf[Path[_]], classOf[SimpleTemplate[_]])
register(TypeCategory.ENTITY, classOf[Expression[_]], classOf[Path[_]], classOf[SimpleTemplate[_]])
} | lpandzic/querydsl | querydsl-scala/src/main/scala/com/querydsl/scala/ScalaTypeMappings.scala | Scala | apache-2.0 | 2,743 |
package com.danielwestheide.kontextfrei.rdd
import com.danielwestheide.kontextfrei.DCollectionOps
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
trait RDDOpsSupport {
implicit def rddCollectionOps(
implicit sparkContext: SparkContext): DCollectionOps[RDD] =
new RDDOps(sparkContext)
}
object RDDOpsSupport extends RDDOpsSupport
| dwestheide/kontextfrei | core/src/main/scala/com/danielwestheide/kontextfrei/rdd/RDDOpsSupport.scala | Scala | apache-2.0 | 366 |
package common.db
import slick.lifted.CanBeQueryCondition
/**
* Created by Fedor Chekhonadskikh on 9/8/2016.
*/
object DBHelpers {
case class MaybeFilter[X, Y](query: slick.lifted.Query[X, Y, Seq]) {
def filter[T, R: CanBeQueryCondition](data: Option[T])(f: T => X => R) = {
data.map(v => MaybeFilter(query.withFilter(f(v)))) getOrElse this
}
}
}
| astrofed/akka-rest-slick-flyway-auth-token | common/src/main/scala/common/db/DBHelpers.scala | Scala | mit | 373 |
package org.scalajs.openui5.sap.ui.ux3
import org.scalajs.openui5.sap.ui.core.Control
import scala.scalajs.js
import scala.scalajs.js.annotation.JSName
@JSName("sap.ui.ux3.NotificationBar")
@js.native
class NotificationBar(id: js.UndefOr[String] = js.native,
settings: js.UndefOr[js.Any] = js.native)
extends Control {
def this(id: String) = this(id, js.undefined)
def this(settings: js.Any) = this(js.undefined, settings)
/** Sets a new value for property visibleStatus.
*
* This property displays the bar corresponding to given status
*
* When called with a value of null or undefined, the default value of the
* property will be restored.
*
* Default value is [[NotificationBarStatus.Default]].
*
* @param visibleStatus new valaue for property visibleStatus
* @return referense to this in order to allow method chaining
*/
def setVisibleStatus(visibleStatus: NotificationBarStatus): this.type = js.native
/** Sets the aggregated messageNotifier.
*
* @param messageNotifier The messageNotifier to set
* @return reference to this in order to allow method chaining
*/
def setMessageNotifier(messageNotifier: Notifier): this.type = js.native
}
| lastsys/scalajs-openui5 | src/main/scala/org/scalajs/openui5/sap/ui/ux3/NotificationBar.scala | Scala | mit | 1,245 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util.random
import java.util.Random
import scala.reflect.ClassTag
import scala.collection.mutable.ArrayBuffer
import org.apache.commons.math3.distribution.PoissonDistribution
import org.apache.spark.annotation.DeveloperApi
/**
* :: DeveloperApi ::
* A pseudorandom sampler. It is possible to change the sampled item type. For example, we might
* want to add weights for stratified sampling or importance sampling. Should only use
* transformations that are tied to the sampler and cannot be applied after sampling.
*
* @tparam T item type
* @tparam U sampled item type
*/
@DeveloperApi
trait RandomSampler[T, U] extends Pseudorandom with Cloneable with Serializable {
/** take a random sample */
def sample(items: Iterator[T]): Iterator[U]
/** return a copy of the RandomSampler object */
override def clone: RandomSampler[T, U] =
throw new NotImplementedError("clone() is not implemented.")
}
private[spark]
object RandomSampler {
/** Default random number generator used by random samplers. */
def newDefaultRNG: Random = new XORShiftRandom
/**
* Default maximum gap-sampling fraction.
* For sampling fractions <= this value, the gap sampling optimization will be applied.
* Above this value, it is assumed that "tradtional" Bernoulli sampling is faster. The
* optimal value for this will depend on the RNG. More expensive RNGs will tend to make
* the optimal value higher. The most reliable way to determine this value for a new RNG
* is to experiment. When tuning for a new RNG, I would expect a value of 0.5 to be close
* in most cases, as an initial guess.
*/
val defaultMaxGapSamplingFraction = 0.4
/**
* Default epsilon for floating point numbers sampled from the RNG.
* The gap-sampling compute logic requires taking log(x), where x is sampled from an RNG.
* To guard against errors from taking log(0), a positive epsilon lower bound is applied.
* A good value for this parameter is at or near the minimum positive floating
* point value returned by "nextDouble()" (or equivalent), for the RNG being used.
*/
val rngEpsilon = 5e-11
/**
* Sampling fraction arguments may be results of computation, and subject to floating
* point jitter. I check the arguments with this epsilon slop factor to prevent spurious
* warnings for cases such as summing some numbers to get a sampling fraction of 1.000000001
*/
val roundingEpsilon = 1e-6
}
/**
* :: DeveloperApi ::
* A sampler based on Bernoulli trials for partitioning a data sequence.
*
* @param lb lower bound of the acceptance range
* @param ub upper bound of the acceptance range
* @param complement whether to use the complement of the range specified, default to false
* @tparam T item type
*/
@DeveloperApi
class BernoulliCellSampler[T](lb: Double, ub: Double, complement: Boolean = false)
extends RandomSampler[T, T] {
/** epsilon slop to avoid failure from floating point jitter. */
require(
lb <= (ub + RandomSampler.roundingEpsilon),
s"Lower bound ($lb) must be <= upper bound ($ub)")
require(
lb >= (0.0 - RandomSampler.roundingEpsilon),
s"Lower bound ($lb) must be >= 0.0")
require(
ub <= (1.0 + RandomSampler.roundingEpsilon),
s"Upper bound ($ub) must be <= 1.0")
private val rng: Random = new XORShiftRandom
override def setSeed(seed: Long): Unit = rng.setSeed(seed)
override def sample(items: Iterator[T]): Iterator[T] = {
if (ub - lb <= 0.0) {
if (complement) items else Iterator.empty
} else {
if (complement) {
items.filter { item => {
val x = rng.nextDouble()
(x < lb) || (x >= ub)
}}
} else {
items.filter { item => {
val x = rng.nextDouble()
(x >= lb) && (x < ub)
}}
}
}
}
/**
* Return a sampler that is the complement of the range specified of the current sampler.
*/
def cloneComplement(): BernoulliCellSampler[T] =
new BernoulliCellSampler[T](lb, ub, !complement)
override def clone: BernoulliCellSampler[T] = new BernoulliCellSampler[T](lb, ub, complement)
}
/**
* :: DeveloperApi ::
* A sampler based on Bernoulli trials.
*
* @param fraction the sampling fraction, aka Bernoulli sampling probability
* @tparam T item type
*/
@DeveloperApi
class BernoulliSampler[T: ClassTag](fraction: Double) extends RandomSampler[T, T] {
/** epsilon slop to avoid failure from floating point jitter */
require(
fraction >= (0.0 - RandomSampler.roundingEpsilon)
&& fraction <= (1.0 + RandomSampler.roundingEpsilon),
s"Sampling fraction ($fraction) must be on interval [0, 1]")
private val rng: Random = RandomSampler.newDefaultRNG
override def setSeed(seed: Long): Unit = rng.setSeed(seed)
override def sample(items: Iterator[T]): Iterator[T] = {
if (fraction <= 0.0) {
Iterator.empty
} else if (fraction >= 1.0) {
items
} else if (fraction <= RandomSampler.defaultMaxGapSamplingFraction) {
new GapSamplingIterator(items, fraction, rng, RandomSampler.rngEpsilon)
} else {
items.filter { _ => rng.nextDouble() <= fraction }
}
}
override def clone: BernoulliSampler[T] = new BernoulliSampler[T](fraction)
}
/**
* :: DeveloperApi ::
* A sampler for sampling with replacement, based on values drawn from Poisson distribution.
*
* @param fraction the sampling fraction (with replacement)
* @param useGapSamplingIfPossible if true, use gap sampling when sampling ratio is low.
* @tparam T item type
*/
@DeveloperApi
class PoissonSampler[T: ClassTag](
fraction: Double,
useGapSamplingIfPossible: Boolean) extends RandomSampler[T, T] {
def this(fraction: Double) = this(fraction, useGapSamplingIfPossible = true)
/** Epsilon slop to avoid failure from floating point jitter. */
require(
fraction >= (0.0 - RandomSampler.roundingEpsilon),
s"Sampling fraction ($fraction) must be >= 0")
// PoissonDistribution throws an exception when fraction <= 0
// If fraction is <= 0, Iterator.empty is used below, so we can use any placeholder value.
private val rng = new PoissonDistribution(if (fraction > 0.0) fraction else 1.0)
private val rngGap = RandomSampler.newDefaultRNG
override def setSeed(seed: Long) {
rng.reseedRandomGenerator(seed)
rngGap.setSeed(seed)
}
override def sample(items: Iterator[T]): Iterator[T] = {
if (fraction <= 0.0) {
Iterator.empty
} else if (useGapSamplingIfPossible &&
fraction <= RandomSampler.defaultMaxGapSamplingFraction) {
new GapSamplingReplacementIterator(items, fraction, rngGap, RandomSampler.rngEpsilon)
} else {
items.flatMap { item =>
val count = rng.sample()
if (count == 0) Iterator.empty else Iterator.fill(count)(item)
}
}
}
override def clone: PoissonSampler[T] = new PoissonSampler[T](fraction, useGapSamplingIfPossible)
}
private[spark]
class GapSamplingIterator[T: ClassTag](
var data: Iterator[T],
f: Double,
rng: Random = RandomSampler.newDefaultRNG,
epsilon: Double = RandomSampler.rngEpsilon) extends Iterator[T] {
require(f > 0.0 && f < 1.0, s"Sampling fraction ($f) must reside on open interval (0, 1)")
require(epsilon > 0.0, s"epsilon ($epsilon) must be > 0")
/** implement efficient linear-sequence drop until Scala includes fix for jira SI-8835. */
private val iterDrop: Int => Unit = {
val arrayClass = Array.empty[T].iterator.getClass
val arrayBufferClass = ArrayBuffer.empty[T].iterator.getClass
data.getClass match {
case `arrayClass` =>
(n: Int) => { data = data.drop(n) }
case `arrayBufferClass` =>
(n: Int) => { data = data.drop(n) }
case _ =>
(n: Int) => {
var j = 0
while (j < n && data.hasNext) {
data.next()
j += 1
}
}
}
}
override def hasNext: Boolean = data.hasNext
override def next(): T = {
val r = data.next()
advance()
r
}
private val lnq = math.log1p(-f)
/** skip elements that won't be sampled, according to geometric dist P(k) = (f)(1-f)^k. */
private def advance(): Unit = {
val u = math.max(rng.nextDouble(), epsilon)
val k = (math.log(u) / lnq).toInt
iterDrop(k)
}
/** advance to first sample as part of object construction. */
advance()
// Attempting to invoke this closer to the top with other object initialization
// was causing it to break in strange ways, so I'm invoking it last, which seems to
// work reliably.
}
private[spark]
class GapSamplingReplacementIterator[T: ClassTag](
var data: Iterator[T],
f: Double,
rng: Random = RandomSampler.newDefaultRNG,
epsilon: Double = RandomSampler.rngEpsilon) extends Iterator[T] {
require(f > 0.0, s"Sampling fraction ($f) must be > 0")
require(epsilon > 0.0, s"epsilon ($epsilon) must be > 0")
/** implement efficient linear-sequence drop until scala includes fix for jira SI-8835. */
private val iterDrop: Int => Unit = {
val arrayClass = Array.empty[T].iterator.getClass
val arrayBufferClass = ArrayBuffer.empty[T].iterator.getClass
data.getClass match {
case `arrayClass` =>
(n: Int) => { data = data.drop(n) }
case `arrayBufferClass` =>
(n: Int) => { data = data.drop(n) }
case _ =>
(n: Int) => {
var j = 0
while (j < n && data.hasNext) {
data.next()
j += 1
}
}
}
}
/** current sampling value, and its replication factor, as we are sampling with replacement. */
private var v: T = _
private var rep: Int = 0
override def hasNext: Boolean = data.hasNext || rep > 0
override def next(): T = {
val r = v
rep -= 1
if (rep <= 0) advance()
r
}
/**
* Skip elements with replication factor zero (i.e. elements that won't be sampled).
* Samples 'k' from geometric distribution P(k) = (1-q)(q)^k, where q = e^(-f), that is
* q is the probabililty of Poisson(0; f)
*/
private def advance(): Unit = {
val u = math.max(rng.nextDouble(), epsilon)
val k = (math.log(u) / (-f)).toInt
iterDrop(k)
// set the value and replication factor for the next value
if (data.hasNext) {
v = data.next()
rep = poissonGE1
}
}
private val q = math.exp(-f)
/**
* Sample from Poisson distribution, conditioned such that the sampled value is >= 1.
* This is an adaptation from the algorithm for Generating Poisson distributed random variables:
* http://en.wikipedia.org/wiki/Poisson_distribution
*/
private def poissonGE1: Int = {
// simulate that the standard poisson sampling
// gave us at least one iteration, for a sample of >= 1
var pp = q + ((1.0 - q) * rng.nextDouble())
var r = 1
// now continue with standard poisson sampling algorithm
pp *= rng.nextDouble()
while (pp > q) {
r += 1
pp *= rng.nextDouble()
}
r
}
/** advance to first sample as part of object construction. */
advance()
// Attempting to invoke this closer to the top with other object initialization
// was causing it to break in strange ways, so I'm invoking it last, which seems to
// work reliably.
}
| ArvinDevel/onlineAggregationOnSparkV2 | core/src/main/scala/org/apache/spark/util/random/RandomSampler.scala | Scala | apache-2.0 | 12,088 |
package is.hail.expr.ir
import is.hail.types.virtual.TStream
import is.hail.utils.HailException
object FoldConstants {
def apply(ctx: ExecuteContext, ir: BaseIR): BaseIR =
ExecuteContext.scopedNewRegion(ctx) { ctx =>
foldConstants(ctx, ir)
}
private def foldConstants(ctx: ExecuteContext, ir: BaseIR): BaseIR =
RewriteBottomUp(ir, {
case _: Ref |
_: In |
_: RelationalRef |
_: RelationalLet |
_: ApplySeeded |
_: UUID4 |
_: ApplyAggOp |
_: ApplyScanOp |
_: AggLet |
_: Begin |
_: MakeNDArray |
_: NDArrayShape |
_: NDArrayReshape |
_: NDArrayConcat |
_: NDArraySlice |
_: NDArrayFilter |
_: NDArrayMap |
_: NDArrayMap2 |
_: NDArrayReindex |
_: NDArrayAgg |
_: NDArrayWrite |
_: NDArrayMatMul |
_: Die => None
case ir: IR if ir.typ.isInstanceOf[TStream] => None
case ir: IR if !IsConstant(ir) &&
Interpretable(ir) &&
ir.children.forall {
case c: IR => IsConstant(c)
case _ => false
} =>
try {
Some(Literal.coerce(ir.typ, Interpret.alreadyLowered(ctx, ir)))
} catch {
case _: HailException => None
}
case _ => None
})
}
| danking/hail | hail/src/main/scala/is/hail/expr/ir/FoldConstants.scala | Scala | mit | 1,401 |
/* Copyright 2009-2021 EPFL, Lausanne */
object MyTuple1 {
def foo(): Int = {
val t = (1, true, 3)
val a1 = t._1
val a2 = t._2
val a3 = t._3
a3
} ensuring( _ == 3)
}
| epfl-lara/stainless | frontends/benchmarks/verification/valid/MicroTests/MyTuple1.scala | Scala | apache-2.0 | 193 |
package com.szadowsz.cadisainmduit.ships
import java.io.{File, StringReader}
import com.szadowsz.common.io.read.CsvReader
import com.szadowsz.spark.ml.{Lineage, LocalDataframeIO}
import com.szadowsz.spark.ml.feature.{CsvColumnExtractor, StringMapper, StringStatistics}
import net.sf.extjwnl.data.PointerUtils
import net.sf.extjwnl.dictionary.Dictionary
import org.apache.spark.sql.types._
import org.apache.spark.sql.{DataFrame, Row, SparkSession}
import scala.collection.JavaConverters._
import org.apache.spark.sql.functions._
import scala.util.Try
/**
* Created on 05/06/2017.
*/
object ShipPreparer extends LocalDataframeIO {
val dictionary = Dictionary.getDefaultResourceInstance()
val schema = Array("name", "type", "class", "navy", "country", "startDate", "endDate", "daysActive")
def getHypernyms(s: String): Array[String] = {
val indexes = Try(dictionary.lookupAllIndexWords(s).getIndexWordArray)
val senses = indexes.map(_.flatMap(iw => iw.getSenses.asScala))
val hypernyms = senses.map(_.map(s => PointerUtils.getDirectHypernyms(s)))
val pointers = hypernyms.map(_.flatMap(list => list.iterator().asScala.toList))
val words = pointers.map(_.flatMap(node => node.getSynset.getWords.asScala.map(_.getLemma)).distinct)
val results = words.getOrElse(Array())
results
}
def main(args: Array[String]): Unit = {
val sess = SparkSession.builder()
.config("spark.driver.host", "localhost")
.master("local[4]")
.getOrCreate()
val dfRN = extractFile(sess, new File("./data/web/rn/rnInfo.csv"), true)
val dfUSN = extractFile(sess, new File("./data/web/usn/usnInfo.csv"), true)
val dfUboat = extractFile(sess, new File("./data/web/uboat/uboatInfo.csv"), true)
val pipe = new Lineage("ship")
pipe.addStage(classOf[CsvColumnExtractor], "inputCol" -> "fields", "outputCols" -> schema, "size" -> schema.length)
val model = pipe.fit(dfRN)
val rn = model.transform(dfRN)
val uboat = model.transform(dfUboat)
val usa = model.transform(dfUSN)
val ships = uboat.union(rn).union(usa).distinct()
val typeData = new CsvReader("./archives/dict/ships/powerRatings.csv")
val typeRows = typeData.readAll().drop(1)
val ratings = typeRows.map(s => s.head.trim -> s.last.trim).toMap
val classify = typeRows.map(s => s.head.trim -> s(1).trim).toMap
val pipe2 = new Lineage("ship2")
pipe2.addStage(classOf[StringMapper], Map("mapping" -> classify, "inputCol" -> "type", "outputCol" -> "role"))
pipe2.addStage(classOf[StringMapper], Map("mapping" -> ratings, "inputCol" -> "type", "outputCol" -> "rating"))
pipe2.addPassThroughTransformer(classOf[StringStatistics], Map("isDebug" -> true, "debugPath" -> "./data/debug/ships/"))
val model2 = pipe2.fit(ships)
val idiResults = model2.transform(ships).na.fill("unknown", List("role")).withColumn("rating", col("rating").cast(DoubleType)).na.fill(12.0)
val dictFunct = (name: String) => {
if (!(name.contains(" ") || name.contains("-"))) {
//dictionary.lookupAllIndexWords(name).getValidPOSSet.asScala.map(_.toString).mkString(",")
// Relationship.findRelationships()
getHypernyms(name) mkString (",")
} else {
null
}
}
val dictUDF = udf[String, String](dictFunct)
val results = idiResults.filter(col("type") =!= "Starship").groupBy("name").agg(
dictUDF(col("name")).alias("pos"),
round(avg(col("rating")), 2).alias("power"),
count(when(col("country") === "Commonwealth", true)).alias("commonwealth"),
count(when(col("country") =!= "Commonwealth", true)).alias("usa"),
min(col("startDate")).alias("firstUsed"),
round(avg(col("daysActive"))).cast(IntegerType).alias("avgDaysUsed"),
count(when(col("role") === "capital", true)).alias("capital"),
count(when(col("role") === "battle", true)).alias("battle"),
count(when(col("role") === "stealth", true)).alias("stealth"),
count(when(col("role") === "bombardment", true)).alias("bombardment"),
count(when(col("role") === "assault", true)).alias("assault"),
count(when(col("role") === "escort", true)).alias("escort"),
count(when(col("role") === "patrol", true)).alias("patrol"),
count(when(col("role") === "scout", true)).alias("scout"),
count(when(col("role") === "cargo", true)).alias("cargo"),
count(when(col("role") === "resupply", true)).alias("resupply"),
count(when(col("role") === "auxiliary", true)).alias("auxiliary"),
count(when(col("role") === "unknown", true)).alias("unknown")
)
val finalOrd: Ordering[Seq[String]] = Ordering.by(seq => seq.head)
writeDF(results, "./data/web/ships.csv", "UTF-8", (s: Seq[String]) => true, finalOrd)
}
}
| zakski/project-cadisainmduit | module/dataprep-people/src/main/scala/com/szadowsz/cadisainmduit/ships/ShipPreparer.scala | Scala | apache-2.0 | 4,769 |
/*
* Loops.scala
*
* Copyright (c) 2013 Lonnie Pryor III
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fulcrum.code
/**
* Definitions of the looping constructs.
*
* @author Lonnie Pryor III (lonnie@pryor.us.com)
*/
trait Loops { self: Macro with Basics =>
import c.universe._
/**
* Factory for while expressions.
*/
object $while {
/** Creates a while expression using the specified condition and body. */
def apply[T](condition: Expr[Boolean])(body: => Expr[T]): $expr[Unit] =
apply("while", condition)(body)
/** Creates a freshly-named while expression using the specified condition and body. */
def apply[T](name: String, condition: Expr[Boolean])(body: => Expr[T]): $expr[Unit] =
apply(newTermName(c.fresh(name)), condition)(body)
/** Creates a named while expression using the specified condition and body. */
def apply[T](name: TermName, condition: Expr[Boolean])(body: => Expr[T]): $expr[Unit] = {
val block = $block($expr(body))
$expr(LabelDef(name, List(),
If(condition.tree,
Block(List(block.tree), Apply(Ident(name), List())),
Literal(Constant(()))
)
), weakTypeOf[Unit])
}
}
/**
* Factory for do/while expressions.
*/
object $do {
/** Creates a factory for do/while expressions using the specified body. */
def apply[T](body: => Expr[T]): Condition =
apply("do")(body)
/** Creates a factory for freshly-named do/while expressions using the specified body. */
def apply[T](name: String)(body: => Expr[T]): Condition =
apply(newTermName(c.fresh(name)))(body)
/** Creates a factory for named do/while expressions from the specified body. */
def apply[T](name: TermName)(body: => Expr[T]): Condition = new Condition {
override def $while(condition: Expr[Boolean]) = {
val block = $block($expr(body))
$expr(LabelDef(name, List(),
Block(List(block.tree),
If(condition.tree,
Apply(Ident(name), List()),
Literal(Constant(())))
)
), weakTypeOf[Unit])
}
}
/**
* Factory that completes the construction of a do/while expression.
*/
trait Condition {
/** Completes the construction of a do/while expression using the specified condition. */
def $while(condition: Expr[Boolean]): $expr[Unit]
}
}
} | lpryor/fulcrum-spike | code/src/main/scala/fulcrum/code/Loops.scala | Scala | apache-2.0 | 2,934 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.sql.execution.exchange.ReusedExchangeExec
import org.apache.spark.sql.execution.metric.SQLMetricInfo
import org.apache.spark.util.Utils
/**
* :: DeveloperApi ::
* Stores information about a SQL SparkPlan.
*/
@DeveloperApi
class SparkPlanInfo(
val nodeName: String,
val simpleString: String,
val children: Seq[SparkPlanInfo],
val metadata: Map[String, String],
val metrics: Seq[SQLMetricInfo]) {
override def hashCode(): Int = {
// hashCode of simpleString should be good enough to distinguish the plans from each other
// within a plan
simpleString.hashCode
}
override def equals(other: Any): Boolean = other match {
case o: SparkPlanInfo =>
nodeName == o.nodeName && simpleString == o.simpleString && children == o.children
case _ => false
}
}
private[execution] object SparkPlanInfo {
def fromSparkPlan(plan: SparkPlan): SparkPlanInfo = {
val children = plan match {
case ReusedExchangeExec(_, child) => child :: Nil
case _ => plan.children ++ plan.subqueries
}
val metrics = plan.metrics.toSeq.map { case (key, metric) =>
new SQLMetricInfo(metric.name.getOrElse(key), metric.id, metric.metricType)
}
new SparkPlanInfo(plan.nodeName, plan.simpleString, children.map(fromSparkPlan),
plan.metadata, metrics)
}
}
| wangyixiaohuihui/spark2-annotation | sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlanInfo.scala | Scala | apache-2.0 | 2,301 |
package com.catinthedark.gban.network
import java.util.concurrent.ConcurrentLinkedQueue
import com.catinthedark.gban.common.Const
import com.catinthedark.lib.Pipe
import org.zeromq.ZMQ
import org.zeromq.ZMQ.{Socket, Poller, PollItem}
trait NetworkControl extends Runnable {
var isConnected: Option[Unit] = None
val MOVE_PREFIX = "MOVE"
val SHOOT_PREFIX = "SHOOT"
val ILOOSE_PREFIX = "ILOOSE"
val IWON_PREFIX = "IWON"
val HELLO_PREFIX = "HELLO"
val PROGRESS_PREFIX = "PROGRESS"
val ALIVE_PREFIX = "ALIVE"
val buffer = new ConcurrentLinkedQueue[String]()
val bufferIn = new ConcurrentLinkedQueue[() => Unit]()
val onMovePipe = new Pipe[(Float, Boolean)]()
val onShootPipe = new Pipe[Boolean]()
val onILoosePipe = new Pipe[Unit]()
val onIWonPipe = new Pipe[Unit]()
val onProgressPipe = new Pipe[Int]()
val onAlivePipe = new Pipe[Unit]()
def onMove(msg: (Float, Boolean)) = bufferIn.add(() => onMovePipe(msg))
def onShoot(msg: Boolean) = bufferIn.add(() => onShootPipe(msg))
def onILoose() = bufferIn.add(() => onILoosePipe())
def onIWon() = bufferIn.add(() => onIWonPipe())
def onProgress(msg: Int) = bufferIn.add(() => onProgressPipe(msg))
def onAlive() = bufferIn.add(() => onAlivePipe())
def onHello(pushSocket: Socket) = println("Received hello package")
def move(x: Float, standUp: Boolean): Unit = {
buffer.add(s"$MOVE_PREFIX:$x;$standUp")
}
def shoot(exactly: Boolean): Unit = {
buffer.add(s"$SHOOT_PREFIX:$exactly")
}
def iLoose(): Unit = {
buffer.add(s"$ILOOSE_PREFIX:")
}
def iWon(): Unit = {
buffer.add(s"$IWON_PREFIX:")
}
def progress(progress: Int): Unit = {
buffer.add(s"$PROGRESS_PREFIX:$progress")
}
def iAlive(): Unit = {
buffer.add(s"$ALIVE_PREFIX:")
}
def processIn() = {
while(!bufferIn.isEmpty)
bufferIn.poll()()
}
def isServer: Boolean
def work(pushSocket: Socket, pullSocket: Socket): Unit = {
val pollItems = Array(new PollItem(pullSocket, Poller.POLLIN), new PollItem(pushSocket, Poller.POLLOUT))
var shouldStop: Boolean = false
var detectedGameEnd: Boolean = false
while (!shouldStop && !Thread.currentThread().isInterrupted) {
try {
ZMQ.poll(pollItems, Const.pollTimeout)
if (pollItems(0).isReadable) {
val rawData = pullSocket.recvStr()
println(s"Received data $rawData")
val data = rawData.split(":")
data(0) match {
case MOVE_PREFIX =>
val attrs = data(1).split(";")
val x = attrs(0).toFloat
val standUp = attrs(1).toBoolean
onMove(x, standUp)
case SHOOT_PREFIX =>
val attrs = data(1).split(";")
val exactly = attrs(0).toBoolean
onShoot(exactly)
case ILOOSE_PREFIX =>
detectedGameEnd = true
onILoose()
case IWON_PREFIX =>
detectedGameEnd = true
onIWon()
case HELLO_PREFIX =>
onHello(pushSocket)
isConnected = Some()
case PROGRESS_PREFIX =>
val attrs = data(1).split(";")
val progress = attrs(0).toInt
onProgress(progress)
case ALIVE_PREFIX =>
println("enemy alive")
onAlive()
case _ => println(s"UPS, wrong prefix $rawData")
}
}
if (!buffer.isEmpty && pollItems(1).isWritable) {
val message = buffer.poll()
pushSocket.send(message)
if (message.startsWith(IWON_PREFIX) || message.startsWith(ILOOSE_PREFIX)) {
detectedGameEnd = true
shouldStop = true
}
}
} catch {
case e: InterruptedException =>
println("Interrupted network thread")
shouldStop = true
}
}
if (!detectedGameEnd) {
pushSocket.send(s"$IWON_PREFIX:")
}
buffer.clear()
bufferIn.clear()
pullSocket.close()
pushSocket.close()
isConnected = None
println("Connection closed")
}
}
| cat-in-the-dark/old48_34_game | src/main/scala/com/catinthedark/gban/network/NetworkControl.scala | Scala | mit | 4,109 |
object Solution {
def summarize(xsRaw : Array[Char]) : Array[Int] = {
val xs : Array[Int] = xsRaw.map(_ - '0')
val ys = Array.fill(10){0}
for (x <- xs) {
ys(x) += 1
}
return ys
}
def getHint(secretRaw: String, guessRaw: String): String = {
val secret = secretRaw.toCharArray()
val guess = guessRaw.toCharArray()
// split into perfect matches and all leftovers
val (matches, mismatches) = secret.zip(guess).partition(
{case (sCh, gCh) => sCh == gCh}
)
val bulls = matches.length
val (umSecret, umGuess) = mismatches.unzip[Char, Char]
// make a summarize, in which we just count how many times a specific digit occurs
val sSecret = summarize(umSecret)
val sGuess = summarize(umGuess)
val commons = sSecret.zip(sGuess).map(
{case (a,b) => Math.min(a,b)}
)
val cows = commons.sum
return s"${bulls}A${cows}B"
}
def main(args: Array[String]) = {
println(getHint("1123", "0111") == "1A1B")
println(getHint("1807", "7810") == "1A3B")
println(getHint("", "") == "0A0B")
println(getHint("114514", "123456") == "1A2B")
}
}
| Javran/leetcode | bulls-and-cows/Solution.scala | Scala | mit | 1,136 |
/*
* The definition of Indexed proposition is taken from:
* A Schemata Calculus for Propositional Logic by Vincent Aravantinos, Ricardo Caferra, and Nicolas Peltier
*
*/
package at.logic.gapt.expr.schema
import at.logic.gapt.expr._
import at.logic.gapt.expr.hol._
import at.logic.gapt.expr.schema.logicSymbols._
object Tindex extends TBase( "ω" )
/******************** SPECIAL INTEGERS ************************************/
object IntVar {
def apply( name: String ) = Var( StringSymbol( name ), Tindex )
def unapply( t: IntegerTerm ) = t match {
case Var( name, Tindex ) => Some( name )
case _ => None
}
}
object IntConst {
def apply( sym: SymbolA ): IntConst = Const( sym, Tindex )
def unapply( t: IntegerTerm ) = t match {
case c @ Const( name, Tindex ) => Some( c.asInstanceOf[Const].sym )
case _ => None
}
}
object IntZero extends MonomorphicLogicalC( "0", Tindex )
/**************************************************************************/
object IndexedPredicate {
def apply( name: String, indexTerms: List[SchemaExpression] ): SchemaFormula = {
val pred = Const( name, FunctionType( To, indexTerms.head.exptype :: Nil ) )
App( pred, indexTerms.head :: Nil ).asInstanceOf[SchemaFormula]
}
def apply( sym: SymbolA, indexTerms: List[SchemaExpression] ): SchemaFormula = {
val pred = Const( sym, FunctionType( To, indexTerms.head.exptype :: Nil ) )
App( pred, indexTerms.head :: Nil ).asInstanceOf[SchemaFormula]
}
def apply( name: String, indexTerm: IntegerTerm ): SchemaFormula = apply( name, indexTerm :: Nil )
def unapply( expression: SchemaExpression ): Option[( Const, List[SchemaExpression] )] = expression match {
case HOLAtom( head: Const, args ) if args.forall( t => t.exptype == Tindex ) =>
Some( ( head, args ) )
case _ => None
}
}
case class IndexedVarSymbol( name: String, val index: SchemaExpression ) extends SymbolA {
override def toString = name + "(" + index + ")"
}
object indexedFOVar {
def apply( name: String, i: SchemaExpression ) = Var( IndexedVarSymbol( name, i ), Ti )
def unapply( s: SchemaExpression ) = s match {
case Var( _, Ti ) => s.asInstanceOf[Var].sym match {
case IndexedVarSymbol( name, index ) => Some( name, index )
case _ => None
}
case _ => None
}
}
object indexedOmegaVar {
def apply( name: String, i: SchemaExpression ) = Var( IndexedVarSymbol( name, i ), Tindex )
def unapply( s: SchemaExpression ) = s match {
case Var( _, Tindex ) => s.asInstanceOf[Var].sym match {
case IndexedVarSymbol( name, index ) => Some( name, index )
case _ => None
}
case _ => None
}
}
object foVar {
def apply( name: String ) = FOLVar( name )
def unapply( t: FOLVar ) = FOLVar.unapply( t )
}
//indexed second-order variable of type: ind->i
object fo2Var {
def apply( name: String ) = Var( name, Tindex -> Ti )
def unapply( s: SchemaExpression ) = s match {
case Var( name, Tindex -> Ti ) => Some( name )
case _ => None
}
}
//first-order constant
object foConst {
def apply( name: String ) = FOLConst( name )
def unapply( t: FOLConst ) = FOLConst.unapply( t )
}
//first-order variable of type ω
object fowVar {
def apply( name: String ) = Var( name, Tindex )
def unapply( t: SchemaExpression ) = t match {
case Var( name, Tindex ) => Some( name, Tindex )
case _ => None
}
}
object SchemaFunction {
/*
def apply(head: Var, args: List[SchemaExpression]): SchemaExpression = apply_(head, args)
def apply(head: Const, args: List[SchemaExpression]): SchemaExpression = apply_(head, args)
*/
// I added the following method to replace the ones above to avoid case distinctions
// in user code. Maybe better: Add a trait "AtomHead" or something, and add it to
// both Const and Var. Then, use SchemaExpression with AtomHead instead
// of SchemaExpression below.
//
// The above methods are not so good since the unapply method returns SchemaExpressions,
// which cannot directly be fed to the above apply methods without casting/case distinction.
//
def apply( head: SchemaExpression, args: List[SchemaExpression] ): SchemaExpression = {
require( head.isInstanceOf[Var] || head.isInstanceOf[Const] )
apply_( head, args )
}
private def apply_( head: SchemaExpression, args: List[SchemaExpression] ): SchemaExpression = args match {
case Nil => head
case t :: tl => apply_( App( head, t ), tl )
}
def unapply( expression: SchemaExpression ) = expression match {
case App( c: Const, _ ) if isLogicalConstant( c ) => None
case App( App( c: Const, _ ), _ ) if isLogicalConstant( c ) => None
case App( _, _ ) if ( expression.exptype != To ) =>
val t = unapply_( expression )
Some( ( t._1, t._2, expression.exptype ) )
case _ => None
}
// Recursive unapply to get the head and args
private def unapply_( e: SchemaExpression ): ( SchemaExpression, List[SchemaExpression] ) = e match {
case v: Var => ( v, Nil )
case c: Const => ( c, Nil )
case App( e1, e2 ) =>
val t = unapply_( e1 )
( t._1, t._2 :+ e2 )
}
}
/*************** OPERATORS *****************/
object BigAndC extends MonomorphicLogicalC( BigAndSymbol.toString, ( Tindex -> To ) -> ( Tindex -> ( Tindex -> To ) ) )
object BigOrC extends MonomorphicLogicalC( BigOrSymbol.toString, ( ( Tindex -> To ) -> ( Tindex -> ( Tindex -> To ) ) ) )
object BiggerThanC extends MonomorphicLogicalC( BiggerThanSymbol.toString, ( Tindex -> ( Tindex -> To ) ) )
object LessThanC extends MonomorphicLogicalC( LessThanSymbol.toString, ( Tindex -> ( Tindex -> To ) ) )
object LeqC extends MonomorphicLogicalC( LeqSymbol.toString, ( Tindex -> ( Tindex -> To ) ) )
object SuccC extends MonomorphicLogicalC( "s", ( Tindex -> Tindex ) )
object BigAnd {
def apply( i: IntVar, iter: SchemaFormula, init: IntegerTerm, end: IntegerTerm ): SchemaFormula =
apply( Abs( i, iter ), init, end )
def apply( iter: Abs, init: IntegerTerm, end: IntegerTerm ): SchemaFormula =
App( BigAndC(), iter :: init :: end :: Nil ).asInstanceOf[SchemaFormula]
def unapply( expression: SchemaExpression ) = expression match {
case App( App( App( BigAndC(), Abs( v, formula ) ), init: IntegerTerm ), end: IntegerTerm ) =>
Some( v, formula.asInstanceOf[SchemaFormula], init, end )
case _ => None
}
}
object BigOr {
def apply( i: IntVar, iter: SchemaFormula, init: IntegerTerm, end: IntegerTerm ): SchemaFormula =
apply( Abs( i, iter ), init, end )
def apply( iter: Abs, init: IntegerTerm, end: IntegerTerm ): SchemaFormula =
App( BigOrC(), iter :: init :: end :: Nil ).asInstanceOf[SchemaFormula]
def unapply( expression: SchemaExpression ) = expression match {
case App( App( App( BigOrC(), Abs( v, formula ) ), init: IntegerTerm ), end: IntegerTerm ) =>
Some( v, formula.asInstanceOf[SchemaFormula], init, end )
case _ => None
}
}
object BiggerThan {
def apply( l: IntegerTerm, r: IntegerTerm ) = App( App( BiggerThanC(), l ), r )
def unapply( e: SchemaExpression ) = e match {
case App( App( BiggerThanC(), l ), r ) => Some( ( l, r ) )
case _ => None
}
}
object Succ {
def apply( t: IntegerTerm ): IntegerTerm = App( SuccC(), t ).asInstanceOf[IntegerTerm]
// def apply( t: SchemaExpression ): SchemaExpression = App( SuccC, t )
def unapply( p: SchemaExpression ) = p match {
case App( SuccC(), t: IntegerTerm ) => Some( t )
case _ => None
}
}
object Pred {
def apply( t: IntegerTerm ): IntegerTerm = t match {
case Succ( t1 ) => t1
case _ => throw new Exception( "ERROR in Predecessor" )
}
}
//object representing a schematic atom: P(i:ω, args)
object SchemaAtom {
/*
def apply(head: Var, args: List[SchemaExpression]): SchemaFormula = apply_(head, args).asInstanceOf[SchemaFormula]
def apply(head: Const, args: List[SchemaExpression]): SchemaFormula = apply_(head, args).asInstanceOf[SchemaFormula]
*/
// I added the following method to replace the ones above to avoid case distinctions
// in user code. Maybe better: Add a trait "AtomHead" or something, and add it to
// both Const and Var. Then, use SchemaExpression with AtomHead instead
// of SchemaExpression below.
//
// The above methods are not so good since the unapply method returns SchemaExpressions,
// which cannot directly be fed to the above apply methods without casting/case distinction.
//
def apply( head: SchemaExpression, args: List[SchemaExpression] ): SchemaFormula = {
require( head.isInstanceOf[Var] || head.isInstanceOf[Const] )
apply_( head, args ).asInstanceOf[SchemaFormula]
}
private def apply_( head: SchemaExpression, args: List[SchemaExpression] ): SchemaExpression = args match {
case Nil => head
case t :: tl => apply_( App( head, t ), tl )
}
def unapply( expression: SchemaExpression ) = expression match {
case App( c: Const, _ ) if isLogicalConstant( c ) => None
case App( App( c: Const, _ ), _ ) if isLogicalConstant( c ) => None
case App( _, _ ) if ( expression.exptype == To ) => Some( unapply_( expression ) )
case Const( _, _ ) if ( expression.exptype == To ) => Some( ( expression, Nil ) )
case Var( _, _ ) if ( expression.exptype == To ) => Some( ( expression, Nil ) )
case _ => None
}
// Recursive unapply to get the head and args
private def unapply_( e: SchemaExpression ): ( SchemaExpression, List[SchemaExpression] ) = e match {
case v: Var => ( v, Nil )
case c: Const => ( c, Nil )
case App( e1, e2 ) =>
val t = unapply_( e1 )
( t._1, t._2 :+ e2 )
}
}
object lessThan {
def apply( left: SchemaExpression, right: SchemaExpression ) = {
require( left.exptype == right.exptype )
App( App( LessThanC(), left ), right ).asInstanceOf[SchemaFormula]
}
def unapply( expression: SchemaExpression ) = expression match {
case App( App( LessThanC(), left ), right ) => Some( left, right )
case _ => None
}
}
object leq {
def apply( left: SchemaExpression, right: SchemaExpression ) = {
require( left.exptype == right.exptype )
App( App( LeqC(), left ), right ).asInstanceOf[SchemaFormula]
}
def unapply( expression: SchemaExpression ) = expression match {
case App( App( LeqC(), left ), right ) => Some( left, right )
case _ => None
}
}
object aTerm {
def apply( name: Const, ind: IntegerTerm ): IntegerTerm = {
App( name, ind ).asInstanceOf[IntegerTerm]
}
}
// Create a var or const????
object foTerm {
def apply( name: String, args: List[SchemaExpression] ): SchemaExpression = {
val v = Var( name, args.head.exptype -> Ti )
App( v, args.head )
}
def apply( v: SchemaExpression, args: List[SchemaExpression] ): SchemaExpression = {
App( v, args.head )
}
def unapply( s: SchemaExpression ) = s match {
case a: App if a.arg.exptype == Ti && a.function.exptype == Ti -> Ti => Some( a.function.asInstanceOf[SchemaExpression], a.arg.asInstanceOf[SchemaExpression] )
case _ => None
}
}
// TODO: this seems to be hardcoded for a a single parameter
// plus 0 or 1 arguments. Generalize to simplify the code!
object sTerm {
//the i should be of type Tindex !
def apply( f: String, i: SchemaExpression, l: List[SchemaExpression] ): SchemaExpression = {
require( i.exptype == Tindex )
if ( l.isEmpty ) {
val func = Const( f, Tindex -> Ti )
return App( func, i )
} else {
val func = Const( f, Tindex -> ( Ti -> Ti ) )
return App( App( func, i ), l.head )
}
}
def apply( f: Const, i: SchemaExpression, l: List[SchemaExpression] ): SchemaExpression = {
require( i.exptype == Tindex )
if ( l.isEmpty ) App( f, i )
else App( App( f, i ), l.head )
}
def unapply( s: SchemaExpression ) = s match {
case App( App( func: Const, i ), arg ) if i.exptype == Tindex => Some( ( func, i, arg :: Nil ) )
case App( func: Const, i ) if i.exptype == Tindex => Some( ( func, i, Nil ) )
case _ => None
}
}
//indexed s-term of type ω->ω
object sIndTerm {
//the i should be of type Tindex !
def apply( f: String, i: IntegerTerm ): SchemaExpression = {
val func = Const( f, Tindex -> Tindex )
return App( func, i )
}
def unapply( s: SchemaExpression ) = s match {
case App( func: Const, i ) if i.exptype == Tindex => Some( ( func, i ) )
case _ => None
}
}
//This version of the function is used specifically to find the highest level subterms
//within atoms and satoms. Terms within terms are not located within the set.
object SchemaSubTerms {
def apply( f: LambdaExpression ): Seq[LambdaExpression] = f match {
case Var( _, _ ) => List( f )
case SchemaAtom( _, args ) => args.map( a => apply( a.asInstanceOf[SchemaExpression] ) ).flatten
case SchemaFunction( _, args, _ ) => {
List( f ).toSeq
}
case And( x, y ) => apply( x.asInstanceOf[SchemaExpression] ) ++ apply( y.asInstanceOf[LambdaExpression] )
case Or( x, y ) => apply( x.asInstanceOf[SchemaExpression] ) ++ apply( y.asInstanceOf[LambdaExpression] )
case Imp( x, y ) => apply( x.asInstanceOf[SchemaExpression] ) ++ apply( y.asInstanceOf[LambdaExpression] )
case Neg( x ) => apply( x.asInstanceOf[SchemaExpression] )
case Ex( v, x ) => apply( x.asInstanceOf[SchemaExpression] )
case All( v, x ) => apply( x.asInstanceOf[SchemaExpression] )
case Abs( _, x ) => apply( x.asInstanceOf[SchemaExpression] )
case App( x, y ) => List( f ).toSeq
}
}
//object representing a schematic atom: P(i:ω, args)
object sAtom {
def apply( sym: SymbolA, args: List[SchemaExpression] ): SchemaFormula = {
val pred: Var = Var( sym, FunctionType( To, args.map( a => a.exptype ) ) )
apply( pred, args )
}
def unapply( s: SchemaExpression ) = s match {
case App( func: Const, i ) if i.exptype == Tindex => Some( ( func, i ) )
case _ => None
}
def apply( head: Var, args: List[SchemaExpression] ): SchemaFormula = {
App( head, args ).asInstanceOf[SchemaFormula]
}
}
//database for trs
object dbTRS extends Iterable[( Const, ( ( SchemaExpression, SchemaExpression ), ( SchemaExpression, SchemaExpression ) ) )] {
val map = new scala.collection.mutable.HashMap[Const, ( ( SchemaExpression, SchemaExpression ), ( SchemaExpression, SchemaExpression ) )]
def get( name: Const ) = map( name )
def getOption( name: Const ) = map.get( name )
def clear = map.clear
def add( name: Const, base: ( SchemaExpression, SchemaExpression ), step: ( SchemaExpression, SchemaExpression ) ): Unit = {
map.put( name, ( base, step ) )
}
def iterator = map.iterator
}
object SimsC extends MonomorphicLogicalC( simSymbol.toString, Ti -> ( Ti -> To ) )
class sTermRewriteSys( val func: Const, val base: SchemaExpression, val rec: SchemaExpression )
object sTermRewriteSys {
def apply( f: Const, base: SchemaExpression, step: SchemaExpression ) = new sTermRewriteSys( f, base, step )
}
object sims {
def apply( left: SchemaExpression, right: SchemaExpression ) = {
require( left.exptype == right.exptype )
App( App( SimsC(), left ), right ).asInstanceOf[SchemaFormula]
}
def unapply( expression: SchemaExpression ) = expression match {
case App( App( SimsC(), left ), right ) => Some( left.asInstanceOf[SchemaExpression], right.asInstanceOf[SchemaExpression] )
case _ => None
}
}
object sTermDB extends Iterable[( Const, sTermRewriteSys )] with TraversableOnce[( Const, sTermRewriteSys )] {
val terms = new scala.collection.mutable.HashMap[Const, sTermRewriteSys]
def clear = terms.clear
def get( func: Const ) = terms( func )
def put( sterm: sTermRewriteSys ) = terms.put( sterm.func, sterm )
def iterator = terms.iterator
}
| loewenheim/gapt | src/main/scala/at/logic/gapt/expr/schema/schema.scala | Scala | gpl-3.0 | 16,081 |
/*
* Copyright (c) 2013 David Soergel <dev@davidsoergel.com>
* Licensed under the Apache License, Version 2.0
* http://www.apache.org/licenses/LICENSE-2.0
*/
package worldmake.lib.vcs
import worldmake.{ManagedPath, TypedPathReference, Recipe, WorldMakeConfig}
import scalax.file.Path
import scala.sys.process.{Process, ProcessLogger}
import com.typesafe.scalalogging.slf4j.Logging
/**
* @author <a href="mailto:dev@davidsoergel.com">David Soergel</a>
*/
trait VcsWorkspaces extends Logging {
def defaultBranchName : String
def toUrl(id: String): String
//def toLocalRepo(id: String): Path
/**
* A Derivation that extracts the requested version from the VCS repository to the output path.
* @param id
* @param requestVersion
* @return
*/
def get(id: String, requestVersion: String = "latest"): Recipe[ManagedPath]
def getLatestVersions(id: String): Map[String, String]
def executeWithLog(command: Seq[String], workingDir: Path) {
val pb = Process(command, workingDir.toAbsolute.fileOption) //, environment.toArray: _*)
logger.debug("in " + workingDir.toAbsolute.path + ", executing " + command.mkString(" "))
// any successful output should be written to a file in the output directory, so anything on stdout or stderr is
// logging output and should be combined for easier debugging
val pbLogger = ProcessLogger(
(o: String) => logger.debug(o),
(e: String) => logger.warn(e))
val exitCode = pb ! pbLogger
}
}
| davidsoergel/worldmake | src/main/scala/worldmake/lib/vcs/VcsWorkspaces.scala | Scala | apache-2.0 | 1,500 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.optimizer
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.sql.catalyst.SchemaPruningTest
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.RuleExecutor
import org.apache.spark.sql.types.{StringType, StructField, StructType}
class NestedColumnAliasingSuite extends SchemaPruningTest {
import NestedColumnAliasingSuite._
object Optimize extends RuleExecutor[LogicalPlan] {
val batches = Batch("Nested column pruning", FixedPoint(100),
ColumnPruning,
CollapseProject,
RemoveNoopOperators) :: Nil
}
private val name = StructType.fromDDL("first string, middle string, last string")
private val employer = StructType.fromDDL("id int, company struct<name:string, address:string>")
private val contact = LocalRelation(
'id.int,
'name.struct(name),
'address.string,
'friends.array(name),
'relatives.map(StringType, name),
'employer.struct(employer))
test("Pushing a single nested field projection") {
def testSingleFieldPushDown(op: LogicalPlan => LogicalPlan): Unit = {
val middle = GetStructField('name, 1, Some("middle"))
val query = op(contact).select(middle).analyze
val optimized = Optimize.execute(query)
val expected = op(contact.select(middle)).analyze
comparePlans(optimized, expected)
}
testSingleFieldPushDown((input: LogicalPlan) => input.limit(5))
testSingleFieldPushDown((input: LogicalPlan) => input.repartition(1))
testSingleFieldPushDown((input: LogicalPlan) => Sample(0.0, 0.6, false, 11L, input))
}
test("Pushing multiple nested field projection") {
val first = GetStructField('name, 0, Some("first"))
val last = GetStructField('name, 2, Some("last"))
val query = contact
.limit(5)
.select('id, first, last)
.analyze
val optimized = Optimize.execute(query)
val expected = contact
.select('id, first, last)
.limit(5)
.analyze
comparePlans(optimized, expected)
}
test("function with nested field inputs") {
val first = GetStructField('name, 0, Some("first"))
val last = GetStructField('name, 2, Some("last"))
val query = contact
.limit(5)
.select('id, ConcatWs(Seq(first, last)))
.analyze
val optimized = Optimize.execute(query)
val aliases = collectGeneratedAliases(optimized)
val expected = contact
.select('id, first.as(aliases(0)), last.as(aliases(1)))
.limit(5)
.select(
'id,
ConcatWs(Seq($"${aliases(0)}", $"${aliases(1)}")).as("concat_ws(name.first, name.last)"))
.analyze
comparePlans(optimized, expected)
}
test("multi-level nested field") {
val field1 = GetStructField(GetStructField('employer, 1, Some("company")), 0, Some("name"))
val field2 = GetStructField('employer, 0, Some("id"))
val query = contact
.limit(5)
.select(field1, field2)
.analyze
val optimized = Optimize.execute(query)
val expected = contact
.select(field1, field2)
.limit(5)
.analyze
comparePlans(optimized, expected)
}
test("Push original case-sensitive names") {
val first1 = GetStructField('name, 0, Some("first"))
val first2 = GetStructField('name, 1, Some("FIRST"))
val query = contact
.limit(5)
.select('id, first1, first2)
.analyze
val optimized = Optimize.execute(query)
val expected = contact
.select('id, first1, first2)
.limit(5)
.analyze
comparePlans(optimized, expected)
}
test("Pushing a single nested field projection - negative") {
val ops = Seq(
(input: LogicalPlan) => input.distribute('name)(1),
(input: LogicalPlan) => input.distribute($"name.middle")(1),
(input: LogicalPlan) => input.orderBy('name.asc),
(input: LogicalPlan) => input.orderBy($"name.middle".asc),
(input: LogicalPlan) => input.sortBy('name.asc),
(input: LogicalPlan) => input.sortBy($"name.middle".asc),
(input: LogicalPlan) => input.union(input)
)
val queries = ops.map { op =>
op(contact.select('name))
.select(GetStructField('name, 1, Some("middle")))
.analyze
}
val optimizedQueries :+ optimizedUnion = queries.map(Optimize.execute)
val expectedQueries = queries.init
optimizedQueries.zip(expectedQueries).foreach { case (optimized, expected) =>
comparePlans(optimized, expected)
}
val expectedUnion =
contact.select('name).union(contact.select('name.as('name)))
.select(GetStructField('name, 1, Some("middle"))).analyze
comparePlans(optimizedUnion, expectedUnion)
}
test("Pushing a single nested field projection through filters - negative") {
val ops = Array(
(input: LogicalPlan) => input.where('name.isNotNull),
(input: LogicalPlan) => input.where($"name.middle".isNotNull)
)
val queries = ops.map { op =>
op(contact)
.select(GetStructField('name, 1, Some("middle")))
.analyze
}
val optimizedQueries = queries.map(Optimize.execute)
val expectedQueries = queries
optimizedQueries.zip(expectedQueries).foreach { case (optimized, expected) =>
comparePlans(optimized, expected)
}
}
test("Do not optimize when parent field is used") {
val query = contact
.limit(5)
.select('id, GetStructField('name, 0, Some("first")), 'name)
.analyze
val optimized = Optimize.execute(query)
val expected = contact
.select('id, 'name)
.limit(5)
.select('id, GetStructField('name, 0, Some("first")), 'name)
.analyze
comparePlans(optimized, expected)
}
test("Some nested column means the whole structure") {
val nestedRelation = LocalRelation('a.struct('b.struct('c.int, 'd.int, 'e.int)))
val query = nestedRelation
.limit(5)
.select(GetStructField('a, 0, Some("b")))
.analyze
val optimized = Optimize.execute(query)
comparePlans(optimized, query)
}
test("nested field pruning for getting struct field in array of struct") {
val field1 = GetArrayStructFields(child = 'friends,
field = StructField("first", StringType),
ordinal = 0,
numFields = 3,
containsNull = true)
val field2 = GetStructField('employer, 0, Some("id"))
val query = contact
.limit(5)
.select(field1, field2)
.analyze
val optimized = Optimize.execute(query)
val expected = contact
.select(field1, field2)
.limit(5)
.analyze
comparePlans(optimized, expected)
}
test("nested field pruning for getting struct field in map") {
val field1 = GetStructField(GetMapValue('relatives, Literal("key")), 0, Some("first"))
val field2 = GetArrayStructFields(child = MapValues('relatives),
field = StructField("middle", StringType),
ordinal = 1,
numFields = 3,
containsNull = true)
val query = contact
.limit(5)
.select(field1, field2)
.analyze
val optimized = Optimize.execute(query)
val expected = contact
.select(field1, field2)
.limit(5)
.analyze
comparePlans(optimized, expected)
}
test("Nested field pruning for Project and Generate") {
val query = contact
.generate(Explode('friends.getField("first")), outputNames = Seq("explode"))
.select('explode, 'friends.getField("middle"))
.analyze
val optimized = Optimize.execute(query)
val aliases = collectGeneratedAliases(optimized)
val expected = contact
.select(
'friends.getField("middle").as(aliases(0)),
'friends.getField("first").as(aliases(1)))
.generate(Explode($"${aliases(1)}"),
unrequiredChildIndex = Seq(1),
outputNames = Seq("explode"))
.select('explode, $"${aliases(0)}".as("friends.middle"))
.analyze
comparePlans(optimized, expected)
}
test("Nested field pruning for Generate") {
val query = contact
.generate(Explode('friends.getField("first")), outputNames = Seq("explode"))
.select('explode)
.analyze
val optimized = Optimize.execute(query)
val aliases = collectGeneratedAliases(optimized)
val expected = contact
.select('friends.getField("first").as(aliases(0)))
.generate(Explode($"${aliases(0)}"),
unrequiredChildIndex = Seq(0),
outputNames = Seq("explode"))
.analyze
comparePlans(optimized, expected)
}
test("Nested field pruning for Project and Generate: not prune on generator output") {
val companies = LocalRelation(
'id.int,
'employers.array(employer))
val query = companies
.generate(Explode('employers.getField("company")), outputNames = Seq("company"))
.select('company.getField("name"))
.analyze
val optimized = Optimize.execute(query)
val aliases = collectGeneratedAliases(optimized)
val expected = companies
.select('employers.getField("company").as(aliases(0)))
.generate(Explode($"${aliases(0)}"),
unrequiredChildIndex = Seq(0),
outputNames = Seq("company"))
.select('company.getField("name").as("company.name"))
.analyze
comparePlans(optimized, expected)
}
test("Nested field pruning for Generate: not prune on required child output") {
val query = contact
.generate(
Explode('friends.getField("first")),
outputNames = Seq("explode"))
.select('explode, 'friends)
.analyze
val optimized = Optimize.execute(query)
val expected = contact
.select('friends)
.generate(Explode('friends.getField("first")),
outputNames = Seq("explode"))
.select('explode, 'friends)
.analyze
comparePlans(optimized, expected)
}
}
object NestedColumnAliasingSuite {
def collectGeneratedAliases(query: LogicalPlan): ArrayBuffer[String] = {
val aliases = ArrayBuffer[String]()
query.transformAllExpressions {
case a @ Alias(_, name) if name.startsWith("_gen_alias_") =>
aliases += name
a
}
aliases
}
}
| matthewfranglen/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/NestedColumnAliasingSuite.scala | Scala | mit | 11,075 |
package com.roundeights.shnappy.admin
import scala.concurrent.Promise
import scala.concurrent.ExecutionContext.Implicits.global
import com.roundeights.skene.{Provider, Bundle, Registry}
import com.roundeights.tubeutil.BodyData
import com.roundeights.attempt._
import com.roundeights.scalon.{nParser, nObject, nTypeMismatch}
import com.roundeights.vfunk.{Validate, InvalidValueException}
import dispatch._
/** @see Persona */
object Persona {
/** General Persona errors */
class Error( message: String ) extends Exception( message )
}
/**
* The result of a Persona verification request
*/
trait Persona {
/** The email address */
def email: String
/** {@inheritDoc} */
override def toString = "Persona(%s)".format( email )
}
/**
* Executes a persona verification request
*/
class PersonaProvider (
private val audience: String,
private val live: Boolean
) extends Provider[Persona] {
/** {@inheritDoc} */
override def dependencies: Set[Class[_]] = Set( classOf[BodyData] )
// Validates an email address
private val emailValid = Validate.email
// The URL to send verification requests to
private val verifyURL = "https://verifier.login.persona.org/verify"
/** Sends an auth request off to the person verification URL */
private def verify ( assertion: String ): Future[nObject] = {
if ( live || !emailValid.validate(assertion).isValid ) {
val request = dispatch.url( verifyURL )
request << Map( "assertion" -> assertion, "audience" -> audience )
Http( request.OK(as.String) ).map( nParser.jsonObj _ )
}
else {
Future.successful(nObject("status" -> "okay", "email" -> assertion))
}
}
/** {@inheritDoc} */
override def build( bundle: Bundle, next: Promise[Persona] ): Unit = {
val obj = bundle.get[BodyData].json.asObject_?.getOrElse( nObject() )
for {
assertion <- obj.str_?("assertion") :: OnFail {
next.failure( new MissingKey("assertion") )
}
json <- verify(assertion) :: OnFail.alsoFail( next )
status <- json.str_?("status") :: OnFail {
next.failure( new Persona.Error(
"Persona response missing status: %s".format( json )
))
}
_ <- ( status == "okay" ) :: OnFail {
next.failure( new Unauthorized(
"Persona response status not ok: %s".format( json )
))
}
emailAddr <- json.str_?("email") :: OnFail {
next.failure( new Persona.Error(
"Persona response missing email: %s: ".format( json )
))
}
_ <- TryTo.except {
emailValid.validate( emailAddr ).require
} onFailMatch {
case err: InvalidValueException => next.failure(
new Persona.Error(
"Invalid Email returned from Persona: %s, %s".format(
emailAddr, err.firstError
)
)
)
}
} next.success( new Persona {
override val email = emailAddr
} )
}
}
| Nycto/Shnappy | src/main/scala/admin/Persona.scala | Scala | mit | 3,308 |
package jsky.app.ot.gemini.inst
import edu.gemini.spModel.core.Angle
import edu.gemini.spModel.gemini.gmos.GmosCommonType.{FPUnitMode, FPUnit}
import edu.gemini.spModel.gemini.gmos.GmosCommonType.DetectorManufacturer.HAMAMATSU
import edu.gemini.spModel.gemini.gmos.GmosScienceAreaGeometry.{ImagingFov, imagingFov, mosFov}
import edu.gemini.spModel.gemini.gmos.{InstGmosSouth, InstGmosNorth, GmosScienceAreaGeometry}
import edu.gemini.spModel.inst.FeatureGeometry
import edu.gemini.spModel.obscomp.SPInstObsComp
import jsky.app.ot.tpe.TpeImageInfo
import java.awt.{Graphics2D, Graphics}
import java.awt.geom.Point2D
import scalaz._
import Scalaz._
/** Plot feature for GMOS, which adds CCD labels to the basic science area plot. */
object GmosSciAreaPlotFeature extends SciAreaPlotFeature(GmosScienceAreaGeometry) {
override def draw(g: Graphics, tii: TpeImageInfo): Unit = {
super.draw(g, tii)
g match {
case g2d: Graphics2D => drawCcdLabels(g2d)
case _ => // do nothing
}
}
def fov(obsComp: SPInstObsComp): Option[ImagingFov] = {
def go(m: FPUnitMode, f: FPUnit): Option[ImagingFov] =
(((m == FPUnitMode.BUILTIN) && f.isImaging) option imagingFov) orElse
((m == FPUnitMode.CUSTOM_MASK) option mosFov)
obsComp match {
case gn: InstGmosNorth => go(gn.getFPUnitMode, gn.getFPUnit)
case gs: InstGmosSouth => go(gs.getFPUnitMode, gs.getFPUnit)
case _ => none
}
}
case class CcdLabels(left: String, center: String, right: String) {
def toList: List[String] = List(left, center, right)
}
val hamamatsuLabels = CcdLabels("CCDr", "CCDg", "CCDb")
val e2vLabels = CcdLabels("CCD1", "CCD2", "CCD3")
def labels(obsComp: SPInstObsComp): Option[CcdLabels] = {
val man = obsComp match {
case gn: InstGmosNorth => some(gn.getDetectorManufacturer)
case gs: InstGmosSouth => some(gs.getDetectorManufacturer)
case _ => none
}
man.map(m => if (m == HAMAMATSU) hamamatsuLabels else e2vLabels)
}
def drawCcdLabels(g: Graphics2D): Unit = {
val inst = _iw.getContext.instrument.get
for {
f <- fov(inst)
l <- labels(inst)
} drawCcdLabels(g, f, l)
}
def drawCcdLabels(g2d: Graphics2D, fov: ImagingFov, labels: CcdLabels): Unit = {
g2d.setFont(SciAreaPlotFeature.PosAngleFont)
val fm = g2d.getFontMetrics
val posAngle = Angle.fromDegrees(_iw.getContext.instrument.posAngleOrZero)
val posAngleXform = FeatureGeometry.posAngleTransform(posAngle)
val xform = _tii.toScreen <| (_.concatenate(posAngleXform))
fov.toList.zip(labels.toList).foreach { case (shape, label) =>
val w = fm.stringWidth(label)
val b = shape.getBounds2D
val x = b.getCenterX
val y = b.getY + b.getHeight / 4
val p0 = new Point2D.Double(x, y)
val p1 = new Point2D.Double()
xform.transform(p0, p1)
g2d.drawString(label, (p1.getX - w/2).toInt, p1.getY.toInt)
}
}
} | arturog8m/ocs | bundle/jsky.app.ot/src/main/scala/jsky/app/ot/gemini/inst/GmosSciAreaPlotFeature.scala | Scala | bsd-3-clause | 3,016 |
package debop4s.timeperiod
import java.lang.{Iterable => JIterable}
import debop4s.core.NotSupportedException
import debop4s.core.conversions.jodatime._
import debop4s.timeperiod.TimeSpec._
import debop4s.timeperiod.utils.Times
import org.joda.time.{DateTime, Duration}
import org.slf4j.LoggerFactory
import scala.annotation.varargs
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
trait ITimePeriodContainer extends mutable.Buffer[ITimePeriod] with ITimePeriod {
def periods: mutable.ArrayBuffer[ITimePeriod]
override def +=:(elem: ITimePeriod): this.type = {
elem +=: periods
this
}
override def +=(elem: ITimePeriod): this.type = {
add(elem)
this
}
override def length: Int = periods.length
override def size = periods.size
override def isEmpty = periods.isEmpty
// def contains(elem: Any): Boolean = periods.contains(elem)
/** 시작시각을 설정합니다. */
def start_=(x: DateTime)
/** 완료시각을 설정합니다. */
def end_=(x: DateTime)
/** 읽기전용 여부 */
override def isReadonly: Boolean
def apply(index: Int) = periods(index)
def get(index: Int) = periods(index)
def clear() {
periods.clear()
}
def containsPeriod(target: ITimePeriod): Boolean = {
if (target == null) false
else periods.contains(target)
}
def add(x: ITimePeriod) {
x match {
case container: ITimePeriodContainer => container.foreach(add)
case _ => if (!periods.contains(x)) periods += x
}
}
def addAll(elems: JIterable[_ <: ITimePeriod]) {
elems.asScala.foreach(add)
}
@varargs
def addAll(elems: ITimePeriod*) {
elems.foreach(add)
}
@varargs
override def insert(n: Int, elems: ITimePeriod*) {
periods.insert(n, elems: _*)
}
override def insertAll(n: Int, elems: Traversable[ITimePeriod]) {
periods.insertAll(n, elems)
}
override def iterator = periods.iterator
def insertAll(n: Int, elems: Iterable[ITimePeriod]) = {
periods.insert(n, elems.toSeq: _*)
}
def containsAll(elems: JIterable[_]): Boolean = {
elems.asScala.filter(_.isInstanceOf[ITimePeriod]).forall(x => periods.contains(x))
}
def remove(x: Any): Boolean = {
x match {
case period: ITimePeriod if periods.contains(x) =>
periods -= period
true
case _ => false
}
}
def removeAll(elems: JIterable[_]): Boolean = {
elems.asScala.foreach {
case elem: ITimePeriod if periods.contains(elem) => remove(elem)
}
true
}
def retainAll(elems: JIterable[_]): Boolean = {
periods.clear()
elems.asScala.foreach {
case elem: ITimePeriod if !periods.contains(elem) => periods += elem
}
true
}
override def update(n: Int, newelem: ITimePeriod) {
periods.update(n, newelem)
}
def set(index: Int, elem: ITimePeriod) = {
periods.update(index, elem)
periods(index)
}
override def remove(index: Int) = periods.remove(index)
override def indexOf[T >: ITimePeriod](o: T): Int = periods.indexOf(o)
override def lastIndexOf[T >: ITimePeriod](o: T): Int = periods.lastIndexOf(o)
def sortByStart(sortDir: OrderDirection) {
var sorted = null: ArrayBuffer[ITimePeriod]
if (sortDir == OrderDirection.ASC) {
sorted = periods.sortBy(_.start)
} else {
sorted = periods.sortBy(-_.start.getMillis)
}
periods.clear()
periods ++= sorted
}
def sortByEnd(sortDir: OrderDirection) {
var sorted = null: ArrayBuffer[ITimePeriod]
if (sortDir == OrderDirection.ASC) {
sorted = periods.sortBy(_.end)
} else {
sorted = periods.sortBy(-_.end.getMillis)
}
periods.clear()
periods ++= sorted
}
def sortByDuration(sortDir: OrderDirection) {
var sorted = null: ArrayBuffer[ITimePeriod]
if (sortDir == OrderDirection.ASC) {
sorted = periods.sortBy(_.duration) // periods.sortWith((x, y) => x.duration < y.duration)
} else {
sorted = periods.sortBy(-_.duration.getMillis) // periods.sortWith((x, y) => x.duration > y.duration)
}
periods.clear()
periods ++= sorted
}
def subList(fromIndex: Int, toIndex: Int): Seq[ITimePeriod] = {
periods.slice(fromIndex, toIndex)
}
def compare(x: ITimePeriod, y: ITimePeriod): Int = x.start.compareTo(y.start)
override protected def buildStringHelper =
super.buildStringHelper
.add("periods", periods)
}
@SerialVersionUID(-7112720659283751048L)
class TimePeriodContainer extends ITimePeriodContainer {
private[this] lazy val log = LoggerFactory.getLogger(getClass)
implicit val dateTimeOrdering = new DateTimeOrdering()
val _periods = ArrayBuffer[ITimePeriod]()
def periods = _periods
override def start: DateTime = {
if (size == 0) MinPeriodTime
else if (periods.isEmpty) MinPeriodTime
else periods.par.minBy(x => x.start).start
}
override def end: DateTime = {
if (size == 0) MaxPeriodTime
else if (periods.isEmpty) MaxPeriodTime
else periods.par.maxBy(x => x.end).end
}
override def start_=(x: DateTime) {
if (size > 0)
move(new Duration(start, x))
}
override def end_=(x: DateTime) {
if (size > 0) {
move(new Duration(end, x))
}
}
def duration: Duration = if (hasPeriod) new Duration(start, end) else MaxDuration
def hasStart = start != MinPeriodTime
def hasEnd = end != MaxPeriodTime
def hasPeriod = hasStart && hasEnd
def isMoment = hasStart && (start eq end)
def isAnytime = !hasStart && !hasEnd
def isReadonly = false
def setup(ns: DateTime, ne: DateTime) {
throw new NotSupportedException("TimePeriodContainer에서는 setup 메소드를 지원하지 않습니다.")
}
def copy(offset: Duration): ITimePeriod = {
throw new NotSupportedException("TimePeriodContainer에서는 setup 메소드를 지원하지 않습니다.")
}
def move(offset: Duration) {
if (offset != null && offset.getMillis != 0) {
log.trace(s"모든 기간을 offset=[$offset] 만큼 이동합니다.")
_periods.par.foreach(_.move(offset))
}
}
def isSamePeriod(other: ITimePeriod) = (other != null) && (start eq other.start) && (end eq other.end)
def hasInside(moment: DateTime) = Times.hasInside(this, moment)
def hasInside(other: ITimePeriod) = Times.hasInside(this, other)
def intersectsWith(other: ITimePeriod) = Times.intersectWith(this, other)
def overlapsWith(other: ITimePeriod) = Times.overlapsWith(this, other)
def reset() = periods.clear()
def relation(other: ITimePeriod) = Times.relation(this, other)
def intersection(other: ITimePeriod) = Times.intersectRange(this, other)
def union(other: ITimePeriod) = Times.unionRange(this, other)
}
object TimePeriodContainer {
def apply(collection: JIterable[_ <: ITimePeriod]): TimePeriodContainer = {
val container = new TimePeriodContainer()
container.addAll(collection)
container
}
@varargs
def apply(periods: ITimePeriod*): TimePeriodContainer = {
val container = new TimePeriodContainer()
container.addAll(periods: _*)
container
}
}
| debop/debop4s | debop4s-timeperiod/src/main/scala/debop4s/timeperiod/TimePeriodContainer.scala | Scala | apache-2.0 | 7,132 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.time.ZoneId
import java.util.Locale
import java.util.concurrent.TimeUnit._
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.{TypeCheckResult, TypeCoercion}
import org.apache.spark.sql.catalyst.expressions.Cast.{forceNullable, resolvableNullability}
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.catalyst.trees.TreeNodeTag
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.catalyst.util.DateTimeConstants._
import org.apache.spark.sql.catalyst.util.DateTimeUtils._
import org.apache.spark.sql.errors.QueryExecutionErrors
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.UTF8StringBuilder
import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String}
import org.apache.spark.unsafe.types.UTF8String.{IntWrapper, LongWrapper}
object Cast {
/**
* A tag to decide if a CAST is specified by user.
*/
val USER_SPECIFIED_CAST = new TreeNodeTag[Boolean]("user_specified_cast")
/**
* Returns true iff we can cast `from` type to `to` type.
*/
def canCast(from: DataType, to: DataType): Boolean = (from, to) match {
case (fromType, toType) if fromType == toType => true
case (NullType, _) => true
case (_, StringType) => true
case (StringType, BinaryType) => true
case (_: IntegralType, BinaryType) => true
case (StringType, BooleanType) => true
case (DateType, BooleanType) => true
case (TimestampType, BooleanType) => true
case (_: NumericType, BooleanType) => true
case (StringType, TimestampType) => true
case (BooleanType, TimestampType) => true
case (DateType, TimestampType) => true
case (_: NumericType, TimestampType) => true
case (StringType, DateType) => true
case (TimestampType, DateType) => true
case (StringType, CalendarIntervalType) => true
case (StringType, _: NumericType) => true
case (BooleanType, _: NumericType) => true
case (DateType, _: NumericType) => true
case (TimestampType, _: NumericType) => true
case (_: NumericType, _: NumericType) => true
case (ArrayType(fromType, fn), ArrayType(toType, tn)) =>
canCast(fromType, toType) &&
resolvableNullability(fn || forceNullable(fromType, toType), tn)
case (MapType(fromKey, fromValue, fn), MapType(toKey, toValue, tn)) =>
canCast(fromKey, toKey) &&
(!forceNullable(fromKey, toKey)) &&
canCast(fromValue, toValue) &&
resolvableNullability(fn || forceNullable(fromValue, toValue), tn)
case (StructType(fromFields), StructType(toFields)) =>
fromFields.length == toFields.length &&
fromFields.zip(toFields).forall {
case (fromField, toField) =>
canCast(fromField.dataType, toField.dataType) &&
resolvableNullability(
fromField.nullable || forceNullable(fromField.dataType, toField.dataType),
toField.nullable)
}
case (udt1: UserDefinedType[_], udt2: UserDefinedType[_]) if udt2.acceptsType(udt1) => true
case _ => false
}
/**
* Return true if we need to use the `timeZone` information casting `from` type to `to` type.
* The patterns matched reflect the current implementation in the Cast node.
* c.f. usage of `timeZone` in:
* * Cast.castToString
* * Cast.castToDate
* * Cast.castToTimestamp
*/
def needsTimeZone(from: DataType, to: DataType): Boolean = (from, to) match {
case (StringType, TimestampType | DateType) => true
case (TimestampType | DateType, StringType) => true
case (DateType, TimestampType) => true
case (TimestampType, DateType) => true
case (ArrayType(fromType, _), ArrayType(toType, _)) => needsTimeZone(fromType, toType)
case (MapType(fromKey, fromValue, _), MapType(toKey, toValue, _)) =>
needsTimeZone(fromKey, toKey) || needsTimeZone(fromValue, toValue)
case (StructType(fromFields), StructType(toFields)) =>
fromFields.length == toFields.length &&
fromFields.zip(toFields).exists {
case (fromField, toField) =>
needsTimeZone(fromField.dataType, toField.dataType)
}
case _ => false
}
/**
* Returns true iff we can safely up-cast the `from` type to `to` type without any truncating or
* precision lose or possible runtime failures. For example, long -> int, string -> int are not
* up-cast.
*/
def canUpCast(from: DataType, to: DataType): Boolean = (from, to) match {
case _ if from == to => true
case (from: NumericType, to: DecimalType) if to.isWiderThan(from) => true
case (from: DecimalType, to: NumericType) if from.isTighterThan(to) => true
case (f, t) if legalNumericPrecedence(f, t) => true
case (DateType, TimestampType) => true
case (_: AtomicType, StringType) => true
case (_: CalendarIntervalType, StringType) => true
case (NullType, _) => true
// Spark supports casting between long and timestamp, please see `longToTimestamp` and
// `timestampToLong` for details.
case (TimestampType, LongType) => true
case (LongType, TimestampType) => true
case (ArrayType(fromType, fn), ArrayType(toType, tn)) =>
resolvableNullability(fn, tn) && canUpCast(fromType, toType)
case (MapType(fromKey, fromValue, fn), MapType(toKey, toValue, tn)) =>
resolvableNullability(fn, tn) && canUpCast(fromKey, toKey) && canUpCast(fromValue, toValue)
case (StructType(fromFields), StructType(toFields)) =>
fromFields.length == toFields.length &&
fromFields.zip(toFields).forall {
case (f1, f2) =>
resolvableNullability(f1.nullable, f2.nullable) && canUpCast(f1.dataType, f2.dataType)
}
case (from: UserDefinedType[_], to: UserDefinedType[_]) if to.acceptsType(from) => true
case _ => false
}
/**
* Returns true iff we can cast the `from` type to `to` type as per the ANSI SQL.
* In practice, the behavior is mostly the same as PostgreSQL. It disallows certain unreasonable
* type conversions such as converting `string` to `int` or `double` to `boolean`.
*/
def canANSIStoreAssign(from: DataType, to: DataType): Boolean = (from, to) match {
case _ if from == to => true
case (NullType, _) => true
case (_: NumericType, _: NumericType) => true
case (_: AtomicType, StringType) => true
case (_: CalendarIntervalType, StringType) => true
case (DateType, TimestampType) => true
case (TimestampType, DateType) => true
case (ArrayType(fromType, fn), ArrayType(toType, tn)) =>
resolvableNullability(fn, tn) && canANSIStoreAssign(fromType, toType)
case (MapType(fromKey, fromValue, fn), MapType(toKey, toValue, tn)) =>
resolvableNullability(fn, tn) && canANSIStoreAssign(fromKey, toKey) &&
canANSIStoreAssign(fromValue, toValue)
case (StructType(fromFields), StructType(toFields)) =>
fromFields.length == toFields.length &&
fromFields.zip(toFields).forall {
case (f1, f2) =>
resolvableNullability(f1.nullable, f2.nullable) &&
canANSIStoreAssign(f1.dataType, f2.dataType)
}
case _ => false
}
private def legalNumericPrecedence(from: DataType, to: DataType): Boolean = {
val fromPrecedence = TypeCoercion.numericPrecedence.indexOf(from)
val toPrecedence = TypeCoercion.numericPrecedence.indexOf(to)
fromPrecedence >= 0 && fromPrecedence < toPrecedence
}
def canNullSafeCastToDecimal(from: DataType, to: DecimalType): Boolean = from match {
case from: BooleanType if to.isWiderThan(DecimalType.BooleanDecimal) => true
case from: NumericType if to.isWiderThan(from) => true
case from: DecimalType =>
// truncating or precision lose
(to.precision - to.scale) > (from.precision - from.scale)
case _ => false // overflow
}
/**
* Returns `true` if casting non-nullable values from `from` type to `to` type
* may return null. Note that the caller side should take care of input nullability
* first and only call this method if the input is not nullable.
*/
def forceNullable(from: DataType, to: DataType): Boolean = (from, to) match {
case (NullType, _) => false // empty array or map case
case (_, _) if from == to => false
case (StringType, BinaryType) => false
case (StringType, _) => true
case (_, StringType) => false
case (FloatType | DoubleType, TimestampType) => true
case (TimestampType, DateType) => false
case (_, DateType) => true
case (DateType, TimestampType) => false
case (DateType, _) => true
case (_, CalendarIntervalType) => true
case (_, to: DecimalType) if !canNullSafeCastToDecimal(from, to) => true
case (_: FractionalType, _: IntegralType) => true // NaN, infinity
case _ => false
}
def resolvableNullability(from: Boolean, to: Boolean): Boolean = !from || to
/**
* We process literals such as 'Infinity', 'Inf', '-Infinity' and 'NaN' etc in case
* insensitive manner to be compatible with other database systems such as PostgreSQL and DB2.
*/
def processFloatingPointSpecialLiterals(v: String, isFloat: Boolean): Any = {
v.trim.toLowerCase(Locale.ROOT) match {
case "inf" | "+inf" | "infinity" | "+infinity" =>
if (isFloat) Float.PositiveInfinity else Double.PositiveInfinity
case "-inf" | "-infinity" =>
if (isFloat) Float.NegativeInfinity else Double.NegativeInfinity
case "nan" =>
if (isFloat) Float.NaN else Double.NaN
case _ => null
}
}
}
abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression with NullIntolerant {
def child: Expression
def dataType: DataType
/**
* Returns true iff we can cast `from` type to `to` type.
*/
def canCast(from: DataType, to: DataType): Boolean
/**
* Returns the error message if casting from one type to another one is invalid.
*/
def typeCheckFailureMessage: String
override def toString: String = {
val ansi = if (ansiEnabled) "ansi_" else ""
s"${ansi}cast($child as ${dataType.simpleString})"
}
override def checkInputDataTypes(): TypeCheckResult = {
if (canCast(child.dataType, dataType)) {
TypeCheckResult.TypeCheckSuccess
} else {
TypeCheckResult.TypeCheckFailure(typeCheckFailureMessage)
}
}
override def nullable: Boolean = child.nullable || Cast.forceNullable(child.dataType, dataType)
protected def ansiEnabled: Boolean
// When this cast involves TimeZone, it's only resolved if the timeZoneId is set;
// Otherwise behave like Expression.resolved.
override lazy val resolved: Boolean =
childrenResolved && checkInputDataTypes().isSuccess && (!needsTimeZone || timeZoneId.isDefined)
def needsTimeZone: Boolean = Cast.needsTimeZone(child.dataType, dataType)
// [[func]] assumes the input is no longer null because eval already does the null check.
@inline private[this] def buildCast[T](a: Any, func: T => Any): Any = func(a.asInstanceOf[T])
private lazy val dateFormatter = DateFormatter(zoneId)
private lazy val timestampFormatter = TimestampFormatter.getFractionFormatter(zoneId)
private val legacyCastToStr = SQLConf.get.getConf(SQLConf.LEGACY_COMPLEX_TYPES_TO_STRING)
// The brackets that are used in casting structs and maps to strings
private val (leftBracket, rightBracket) = if (legacyCastToStr) ("[", "]") else ("{", "}")
// UDFToString
private[this] def castToString(from: DataType): Any => Any = from match {
case CalendarIntervalType =>
buildCast[CalendarInterval](_, i => UTF8String.fromString(i.toString))
case BinaryType => buildCast[Array[Byte]](_, UTF8String.fromBytes)
case DateType => buildCast[Int](_, d => UTF8String.fromString(dateFormatter.format(d)))
case TimestampType => buildCast[Long](_,
t => UTF8String.fromString(timestampFormatter.format(t)))
case ArrayType(et, _) =>
buildCast[ArrayData](_, array => {
val builder = new UTF8StringBuilder
builder.append("[")
if (array.numElements > 0) {
val toUTF8String = castToString(et)
if (array.isNullAt(0)) {
if (!legacyCastToStr) builder.append("null")
} else {
builder.append(toUTF8String(array.get(0, et)).asInstanceOf[UTF8String])
}
var i = 1
while (i < array.numElements) {
builder.append(",")
if (array.isNullAt(i)) {
if (!legacyCastToStr) builder.append(" null")
} else {
builder.append(" ")
builder.append(toUTF8String(array.get(i, et)).asInstanceOf[UTF8String])
}
i += 1
}
}
builder.append("]")
builder.build()
})
case MapType(kt, vt, _) =>
buildCast[MapData](_, map => {
val builder = new UTF8StringBuilder
builder.append(leftBracket)
if (map.numElements > 0) {
val keyArray = map.keyArray()
val valueArray = map.valueArray()
val keyToUTF8String = castToString(kt)
val valueToUTF8String = castToString(vt)
builder.append(keyToUTF8String(keyArray.get(0, kt)).asInstanceOf[UTF8String])
builder.append(" ->")
if (valueArray.isNullAt(0)) {
if (!legacyCastToStr) builder.append(" null")
} else {
builder.append(" ")
builder.append(valueToUTF8String(valueArray.get(0, vt)).asInstanceOf[UTF8String])
}
var i = 1
while (i < map.numElements) {
builder.append(", ")
builder.append(keyToUTF8String(keyArray.get(i, kt)).asInstanceOf[UTF8String])
builder.append(" ->")
if (valueArray.isNullAt(i)) {
if (!legacyCastToStr) builder.append(" null")
} else {
builder.append(" ")
builder.append(valueToUTF8String(valueArray.get(i, vt))
.asInstanceOf[UTF8String])
}
i += 1
}
}
builder.append(rightBracket)
builder.build()
})
case StructType(fields) =>
buildCast[InternalRow](_, row => {
val builder = new UTF8StringBuilder
builder.append(leftBracket)
if (row.numFields > 0) {
val st = fields.map(_.dataType)
val toUTF8StringFuncs = st.map(castToString)
if (row.isNullAt(0)) {
if (!legacyCastToStr) builder.append("null")
} else {
builder.append(toUTF8StringFuncs(0)(row.get(0, st(0))).asInstanceOf[UTF8String])
}
var i = 1
while (i < row.numFields) {
builder.append(",")
if (row.isNullAt(i)) {
if (!legacyCastToStr) builder.append(" null")
} else {
builder.append(" ")
builder.append(toUTF8StringFuncs(i)(row.get(i, st(i))).asInstanceOf[UTF8String])
}
i += 1
}
}
builder.append(rightBracket)
builder.build()
})
case pudt: PythonUserDefinedType => castToString(pudt.sqlType)
case udt: UserDefinedType[_] =>
buildCast[Any](_, o => UTF8String.fromString(udt.deserialize(o).toString))
case _ => buildCast[Any](_, o => UTF8String.fromString(o.toString))
}
// BinaryConverter
private[this] def castToBinary(from: DataType): Any => Any = from match {
case StringType => buildCast[UTF8String](_, _.getBytes)
case ByteType => buildCast[Byte](_, NumberConverter.toBinary)
case ShortType => buildCast[Short](_, NumberConverter.toBinary)
case IntegerType => buildCast[Int](_, NumberConverter.toBinary)
case LongType => buildCast[Long](_, NumberConverter.toBinary)
}
// UDFToBoolean
private[this] def castToBoolean(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => {
if (StringUtils.isTrueString(s)) {
true
} else if (StringUtils.isFalseString(s)) {
false
} else {
null
}
})
case TimestampType =>
buildCast[Long](_, t => t != 0)
case DateType =>
// Hive would return null when cast from date to boolean
buildCast[Int](_, d => null)
case LongType =>
buildCast[Long](_, _ != 0)
case IntegerType =>
buildCast[Int](_, _ != 0)
case ShortType =>
buildCast[Short](_, _ != 0)
case ByteType =>
buildCast[Byte](_, _ != 0)
case DecimalType() =>
buildCast[Decimal](_, !_.isZero)
case DoubleType =>
buildCast[Double](_, _ != 0)
case FloatType =>
buildCast[Float](_, _ != 0)
}
// TimestampConverter
private[this] def castToTimestamp(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, utfs => {
if (ansiEnabled) {
DateTimeUtils.stringToTimestampAnsi(utfs, zoneId)
} else {
DateTimeUtils.stringToTimestamp(utfs, zoneId).orNull
}
})
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1L else 0)
case LongType =>
buildCast[Long](_, l => longToTimestamp(l))
case IntegerType =>
buildCast[Int](_, i => longToTimestamp(i.toLong))
case ShortType =>
buildCast[Short](_, s => longToTimestamp(s.toLong))
case ByteType =>
buildCast[Byte](_, b => longToTimestamp(b.toLong))
case DateType =>
buildCast[Int](_, d => daysToMicros(d, zoneId))
// TimestampWritable.decimalToTimestamp
case DecimalType() =>
buildCast[Decimal](_, d => decimalToTimestamp(d))
// TimestampWritable.doubleToTimestamp
case DoubleType =>
buildCast[Double](_, d => doubleToTimestamp(d))
// TimestampWritable.floatToTimestamp
case FloatType =>
buildCast[Float](_, f => doubleToTimestamp(f.toDouble))
}
private[this] def decimalToTimestamp(d: Decimal): Long = {
(d.toBigDecimal * MICROS_PER_SECOND).longValue
}
private[this] def doubleToTimestamp(d: Double): Any = {
if (d.isNaN || d.isInfinite) null else (d * MICROS_PER_SECOND).toLong
}
// converting seconds to us
private[this] def longToTimestamp(t: Long): Long = SECONDS.toMicros(t)
// converting us to seconds
private[this] def timestampToLong(ts: Long): Long = {
Math.floorDiv(ts, MICROS_PER_SECOND)
}
// converting us to seconds in double
private[this] def timestampToDouble(ts: Long): Double = {
ts / MICROS_PER_SECOND.toDouble
}
// DateConverter
private[this] def castToDate(from: DataType): Any => Any = from match {
case StringType =>
if (ansiEnabled) {
buildCast[UTF8String](_, s => DateTimeUtils.stringToDateAnsi(s, zoneId))
} else {
buildCast[UTF8String](_, s => DateTimeUtils.stringToDate(s, zoneId).orNull)
}
case TimestampType =>
// throw valid precision more than seconds, according to Hive.
// Timestamp.nanos is in 0 to 999,999,999, no more than a second.
buildCast[Long](_, t => microsToDays(t, zoneId))
}
// IntervalConverter
private[this] def castToInterval(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => IntervalUtils.safeStringToInterval(s))
}
// LongConverter
private[this] def castToLong(from: DataType): Any => Any = from match {
case StringType if ansiEnabled =>
buildCast[UTF8String](_, _.toLongExact())
case StringType =>
val result = new LongWrapper()
buildCast[UTF8String](_, s => if (s.toLong(result)) result.value else null)
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1L else 0L)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType =>
buildCast[Long](_, t => timestampToLong(t))
case x: NumericType if ansiEnabled =>
b => x.exactNumeric.asInstanceOf[Numeric[Any]].toLong(b)
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toLong(b)
}
// IntConverter
private[this] def castToInt(from: DataType): Any => Any = from match {
case StringType if ansiEnabled =>
buildCast[UTF8String](_, _.toIntExact())
case StringType =>
val result = new IntWrapper()
buildCast[UTF8String](_, s => if (s.toInt(result)) result.value else null)
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1 else 0)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType if ansiEnabled =>
buildCast[Long](_, t => {
val longValue = timestampToLong(t)
if (longValue == longValue.toInt) {
longValue.toInt
} else {
throw QueryExecutionErrors.castingCauseOverflowError(t, Int.getClass.getName)
}
})
case TimestampType =>
buildCast[Long](_, t => timestampToLong(t).toInt)
case x: NumericType if ansiEnabled =>
b => x.exactNumeric.asInstanceOf[Numeric[Any]].toInt(b)
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toInt(b)
}
// ShortConverter
private[this] def castToShort(from: DataType): Any => Any = from match {
case StringType if ansiEnabled =>
buildCast[UTF8String](_, _.toShortExact())
case StringType =>
val result = new IntWrapper()
buildCast[UTF8String](_, s => if (s.toShort(result)) {
result.value.toShort
} else {
null
})
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1.toShort else 0.toShort)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType if ansiEnabled =>
buildCast[Long](_, t => {
val longValue = timestampToLong(t)
if (longValue == longValue.toShort) {
longValue.toShort
} else {
throw QueryExecutionErrors.castingCauseOverflowError(t, Short.getClass.getName)
}
})
case TimestampType =>
buildCast[Long](_, t => timestampToLong(t).toShort)
case x: NumericType if ansiEnabled =>
b =>
val intValue = try {
x.exactNumeric.asInstanceOf[Numeric[Any]].toInt(b)
} catch {
case _: ArithmeticException =>
throw QueryExecutionErrors.castingCauseOverflowError(b, Short.getClass.getName)
}
if (intValue == intValue.toShort) {
intValue.toShort
} else {
throw QueryExecutionErrors.castingCauseOverflowError(b, Short.getClass.getName)
}
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toInt(b).toShort
}
// ByteConverter
private[this] def castToByte(from: DataType): Any => Any = from match {
case StringType if ansiEnabled =>
buildCast[UTF8String](_, _.toByteExact())
case StringType =>
val result = new IntWrapper()
buildCast[UTF8String](_, s => if (s.toByte(result)) {
result.value.toByte
} else {
null
})
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1.toByte else 0.toByte)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType if ansiEnabled =>
buildCast[Long](_, t => {
val longValue = timestampToLong(t)
if (longValue == longValue.toByte) {
longValue.toByte
} else {
throw QueryExecutionErrors.castingCauseOverflowError(t, Byte.getClass.getName)
}
})
case TimestampType =>
buildCast[Long](_, t => timestampToLong(t).toByte)
case x: NumericType if ansiEnabled =>
b =>
val intValue = try {
x.exactNumeric.asInstanceOf[Numeric[Any]].toInt(b)
} catch {
case _: ArithmeticException =>
throw QueryExecutionErrors.castingCauseOverflowError(b, Byte.getClass.getName)
}
if (intValue == intValue.toByte) {
intValue.toByte
} else {
throw QueryExecutionErrors.castingCauseOverflowError(b, Byte.getClass.getName)
}
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toInt(b).toByte
}
/**
* Change the precision / scale in a given decimal to those set in `decimalType` (if any),
* modifying `value` in-place and returning it if successful. If an overflow occurs, it
* either returns null or throws an exception according to the value set for
* `spark.sql.ansi.enabled`.
*
* NOTE: this modifies `value` in-place, so don't call it on external data.
*/
private[this] def changePrecision(value: Decimal, decimalType: DecimalType): Decimal = {
if (value.changePrecision(decimalType.precision, decimalType.scale)) {
value
} else {
if (!ansiEnabled) {
null
} else {
throw QueryExecutionErrors.cannotChangeDecimalPrecisionError(
value, decimalType.precision, decimalType.scale)
}
}
}
/**
* Create new `Decimal` with precision and scale given in `decimalType` (if any).
* If overflow occurs, if `spark.sql.ansi.enabled` is false, null is returned;
* otherwise, an `ArithmeticException` is thrown.
*/
private[this] def toPrecision(value: Decimal, decimalType: DecimalType): Decimal =
value.toPrecision(
decimalType.precision, decimalType.scale, Decimal.ROUND_HALF_UP, !ansiEnabled)
private[this] def castToDecimal(from: DataType, target: DecimalType): Any => Any = from match {
case StringType if !ansiEnabled =>
buildCast[UTF8String](_, s => {
val d = Decimal.fromString(s)
if (d == null) null else changePrecision(d, target)
})
case StringType if ansiEnabled =>
buildCast[UTF8String](_, s => changePrecision(Decimal.fromStringANSI(s), target))
case BooleanType =>
buildCast[Boolean](_, b => toPrecision(if (b) Decimal.ONE else Decimal.ZERO, target))
case DateType =>
buildCast[Int](_, d => null) // date can't cast to decimal in Hive
case TimestampType =>
// Note that we lose precision here.
buildCast[Long](_, t => changePrecision(Decimal(timestampToDouble(t)), target))
case dt: DecimalType =>
b => toPrecision(b.asInstanceOf[Decimal], target)
case t: IntegralType =>
b => changePrecision(Decimal(t.integral.asInstanceOf[Integral[Any]].toLong(b)), target)
case x: FractionalType =>
b => try {
changePrecision(Decimal(x.fractional.asInstanceOf[Fractional[Any]].toDouble(b)), target)
} catch {
case _: NumberFormatException => null
}
}
// DoubleConverter
private[this] def castToDouble(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => {
val doubleStr = s.toString
try doubleStr.toDouble catch {
case _: NumberFormatException =>
val d = Cast.processFloatingPointSpecialLiterals(doubleStr, false)
if(ansiEnabled && d == null) {
throw QueryExecutionErrors.invalidInputSyntaxForNumericError(s)
} else {
d
}
}
})
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1d else 0d)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType =>
buildCast[Long](_, t => timestampToDouble(t))
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toDouble(b)
}
// FloatConverter
private[this] def castToFloat(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => {
val floatStr = s.toString
try floatStr.toFloat catch {
case _: NumberFormatException =>
val f = Cast.processFloatingPointSpecialLiterals(floatStr, true)
if (ansiEnabled && f == null) {
throw QueryExecutionErrors.invalidInputSyntaxForNumericError(s)
} else {
f
}
}
})
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1f else 0f)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType =>
buildCast[Long](_, t => timestampToDouble(t).toFloat)
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toFloat(b)
}
private[this] def castArray(fromType: DataType, toType: DataType): Any => Any = {
val elementCast = cast(fromType, toType)
// TODO: Could be faster?
buildCast[ArrayData](_, array => {
val values = new Array[Any](array.numElements())
array.foreach(fromType, (i, e) => {
if (e == null) {
values(i) = null
} else {
values(i) = elementCast(e)
}
})
new GenericArrayData(values)
})
}
private[this] def castMap(from: MapType, to: MapType): Any => Any = {
val keyCast = castArray(from.keyType, to.keyType)
val valueCast = castArray(from.valueType, to.valueType)
buildCast[MapData](_, map => {
val keys = keyCast(map.keyArray()).asInstanceOf[ArrayData]
val values = valueCast(map.valueArray()).asInstanceOf[ArrayData]
new ArrayBasedMapData(keys, values)
})
}
private[this] def castStruct(from: StructType, to: StructType): Any => Any = {
val castFuncs: Array[(Any) => Any] = from.fields.zip(to.fields).map {
case (fromField, toField) => cast(fromField.dataType, toField.dataType)
}
// TODO: Could be faster?
buildCast[InternalRow](_, row => {
val newRow = new GenericInternalRow(from.fields.length)
var i = 0
while (i < row.numFields) {
newRow.update(i,
if (row.isNullAt(i)) null else castFuncs(i)(row.get(i, from.apply(i).dataType)))
i += 1
}
newRow
})
}
private[this] def cast(from: DataType, to: DataType): Any => Any = {
// If the cast does not change the structure, then we don't really need to cast anything.
// We can return what the children return. Same thing should happen in the codegen path.
if (DataType.equalsStructurally(from, to)) {
identity
} else if (from == NullType) {
// According to `canCast`, NullType can be casted to any type.
// For primitive types, we don't reach here because the guard of `nullSafeEval`.
// But for nested types like struct, we might reach here for nested null type field.
// We won't call the returned function actually, but returns a placeholder.
_ => throw QueryExecutionErrors.cannotCastFromNullTypeError(to)
} else {
to match {
case dt if dt == from => identity[Any]
case StringType => castToString(from)
case BinaryType => castToBinary(from)
case DateType => castToDate(from)
case decimal: DecimalType => castToDecimal(from, decimal)
case TimestampType => castToTimestamp(from)
case CalendarIntervalType => castToInterval(from)
case BooleanType => castToBoolean(from)
case ByteType => castToByte(from)
case ShortType => castToShort(from)
case IntegerType => castToInt(from)
case FloatType => castToFloat(from)
case LongType => castToLong(from)
case DoubleType => castToDouble(from)
case array: ArrayType =>
castArray(from.asInstanceOf[ArrayType].elementType, array.elementType)
case map: MapType => castMap(from.asInstanceOf[MapType], map)
case struct: StructType => castStruct(from.asInstanceOf[StructType], struct)
case udt: UserDefinedType[_] if udt.acceptsType(from) =>
identity[Any]
case _: UserDefinedType[_] =>
throw QueryExecutionErrors.cannotCastError(from, to)
}
}
}
private[this] lazy val cast: Any => Any = cast(child.dataType, dataType)
protected override def nullSafeEval(input: Any): Any = cast(input)
override def genCode(ctx: CodegenContext): ExprCode = {
// If the cast does not change the structure, then we don't really need to cast anything.
// We can return what the children return. Same thing should happen in the interpreted path.
if (DataType.equalsStructurally(child.dataType, dataType)) {
child.genCode(ctx)
} else {
super.genCode(ctx)
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val eval = child.genCode(ctx)
val nullSafeCast = nullSafeCastFunction(child.dataType, dataType, ctx)
ev.copy(code = eval.code +
castCode(ctx, eval.value, eval.isNull, ev.value, ev.isNull, dataType, nullSafeCast))
}
// The function arguments are: `input`, `result` and `resultIsNull`. We don't need `inputIsNull`
// in parameter list, because the returned code will be put in null safe evaluation region.
private[this] type CastFunction = (ExprValue, ExprValue, ExprValue) => Block
private[this] def nullSafeCastFunction(
from: DataType,
to: DataType,
ctx: CodegenContext): CastFunction = to match {
case _ if from == NullType => (c, evPrim, evNull) => code"$evNull = true;"
case _ if to == from => (c, evPrim, evNull) => code"$evPrim = $c;"
case StringType => castToStringCode(from, ctx)
case BinaryType => castToBinaryCode(from)
case DateType => castToDateCode(from, ctx)
case decimal: DecimalType => castToDecimalCode(from, decimal, ctx)
case TimestampType => castToTimestampCode(from, ctx)
case CalendarIntervalType => castToIntervalCode(from)
case BooleanType => castToBooleanCode(from)
case ByteType => castToByteCode(from, ctx)
case ShortType => castToShortCode(from, ctx)
case IntegerType => castToIntCode(from, ctx)
case FloatType => castToFloatCode(from, ctx)
case LongType => castToLongCode(from, ctx)
case DoubleType => castToDoubleCode(from, ctx)
case array: ArrayType =>
castArrayCode(from.asInstanceOf[ArrayType].elementType, array.elementType, ctx)
case map: MapType => castMapCode(from.asInstanceOf[MapType], map, ctx)
case struct: StructType => castStructCode(from.asInstanceOf[StructType], struct, ctx)
case udt: UserDefinedType[_] if udt.acceptsType(from) =>
(c, evPrim, evNull) => code"$evPrim = $c;"
case _: UserDefinedType[_] =>
throw QueryExecutionErrors.cannotCastError(from, to)
}
// Since we need to cast input expressions recursively inside ComplexTypes, such as Map's
// Key and Value, Struct's field, we need to name out all the variable names involved in a cast.
private[this] def castCode(ctx: CodegenContext, input: ExprValue, inputIsNull: ExprValue,
result: ExprValue, resultIsNull: ExprValue, resultType: DataType, cast: CastFunction): Block = {
val javaType = JavaCode.javaType(resultType)
code"""
boolean $resultIsNull = $inputIsNull;
$javaType $result = ${CodeGenerator.defaultValue(resultType)};
if (!$inputIsNull) {
${cast(input, result, resultIsNull)}
}
"""
}
private def appendIfNotLegacyCastToStr(buffer: ExprValue, s: String): Block = {
if (!legacyCastToStr) code"""$buffer.append("$s");""" else EmptyBlock
}
private def writeArrayToStringBuilder(
et: DataType,
array: ExprValue,
buffer: ExprValue,
ctx: CodegenContext): Block = {
val elementToStringCode = castToStringCode(et, ctx)
val funcName = ctx.freshName("elementToString")
val element = JavaCode.variable("element", et)
val elementStr = JavaCode.variable("elementStr", StringType)
val elementToStringFunc = inline"${ctx.addNewFunction(funcName,
s"""
|private UTF8String $funcName(${CodeGenerator.javaType(et)} $element) {
| UTF8String $elementStr = null;
| ${elementToStringCode(element, elementStr, null /* resultIsNull won't be used */)}
| return elementStr;
|}
""".stripMargin)}"
val loopIndex = ctx.freshVariable("loopIndex", IntegerType)
code"""
|$buffer.append("[");
|if ($array.numElements() > 0) {
| if ($array.isNullAt(0)) {
| ${appendIfNotLegacyCastToStr(buffer, "null")}
| } else {
| $buffer.append($elementToStringFunc(${CodeGenerator.getValue(array, et, "0")}));
| }
| for (int $loopIndex = 1; $loopIndex < $array.numElements(); $loopIndex++) {
| $buffer.append(",");
| if ($array.isNullAt($loopIndex)) {
| ${appendIfNotLegacyCastToStr(buffer, " null")}
| } else {
| $buffer.append(" ");
| $buffer.append($elementToStringFunc(${CodeGenerator.getValue(array, et, loopIndex)}));
| }
| }
|}
|$buffer.append("]");
""".stripMargin
}
private def writeMapToStringBuilder(
kt: DataType,
vt: DataType,
map: ExprValue,
buffer: ExprValue,
ctx: CodegenContext): Block = {
def dataToStringFunc(func: String, dataType: DataType) = {
val funcName = ctx.freshName(func)
val dataToStringCode = castToStringCode(dataType, ctx)
val data = JavaCode.variable("data", dataType)
val dataStr = JavaCode.variable("dataStr", StringType)
val functionCall = ctx.addNewFunction(funcName,
s"""
|private UTF8String $funcName(${CodeGenerator.javaType(dataType)} $data) {
| UTF8String $dataStr = null;
| ${dataToStringCode(data, dataStr, null /* resultIsNull won't be used */)}
| return dataStr;
|}
""".stripMargin)
inline"$functionCall"
}
val keyToStringFunc = dataToStringFunc("keyToString", kt)
val valueToStringFunc = dataToStringFunc("valueToString", vt)
val loopIndex = ctx.freshVariable("loopIndex", IntegerType)
val mapKeyArray = JavaCode.expression(s"$map.keyArray()", classOf[ArrayData])
val mapValueArray = JavaCode.expression(s"$map.valueArray()", classOf[ArrayData])
val getMapFirstKey = CodeGenerator.getValue(mapKeyArray, kt, JavaCode.literal("0", IntegerType))
val getMapFirstValue = CodeGenerator.getValue(mapValueArray, vt,
JavaCode.literal("0", IntegerType))
val getMapKeyArray = CodeGenerator.getValue(mapKeyArray, kt, loopIndex)
val getMapValueArray = CodeGenerator.getValue(mapValueArray, vt, loopIndex)
code"""
|$buffer.append("$leftBracket");
|if ($map.numElements() > 0) {
| $buffer.append($keyToStringFunc($getMapFirstKey));
| $buffer.append(" ->");
| if ($map.valueArray().isNullAt(0)) {
| ${appendIfNotLegacyCastToStr(buffer, " null")}
| } else {
| $buffer.append(" ");
| $buffer.append($valueToStringFunc($getMapFirstValue));
| }
| for (int $loopIndex = 1; $loopIndex < $map.numElements(); $loopIndex++) {
| $buffer.append(", ");
| $buffer.append($keyToStringFunc($getMapKeyArray));
| $buffer.append(" ->");
| if ($map.valueArray().isNullAt($loopIndex)) {
| ${appendIfNotLegacyCastToStr(buffer, " null")}
| } else {
| $buffer.append(" ");
| $buffer.append($valueToStringFunc($getMapValueArray));
| }
| }
|}
|$buffer.append("$rightBracket");
""".stripMargin
}
private def writeStructToStringBuilder(
st: Seq[DataType],
row: ExprValue,
buffer: ExprValue,
ctx: CodegenContext): Block = {
val structToStringCode = st.zipWithIndex.map { case (ft, i) =>
val fieldToStringCode = castToStringCode(ft, ctx)
val field = ctx.freshVariable("field", ft)
val fieldStr = ctx.freshVariable("fieldStr", StringType)
val javaType = JavaCode.javaType(ft)
code"""
|${if (i != 0) code"""$buffer.append(",");""" else EmptyBlock}
|if ($row.isNullAt($i)) {
| ${appendIfNotLegacyCastToStr(buffer, if (i == 0) "null" else " null")}
|} else {
| ${if (i != 0) code"""$buffer.append(" ");""" else EmptyBlock}
|
| // Append $i field into the string buffer
| $javaType $field = ${CodeGenerator.getValue(row, ft, s"$i")};
| UTF8String $fieldStr = null;
| ${fieldToStringCode(field, fieldStr, null /* resultIsNull won't be used */)}
| $buffer.append($fieldStr);
|}
""".stripMargin
}
val writeStructCode = ctx.splitExpressions(
expressions = structToStringCode.map(_.code),
funcName = "fieldToString",
arguments = ("InternalRow", row.code) ::
(classOf[UTF8StringBuilder].getName, buffer.code) :: Nil)
code"""
|$buffer.append("$leftBracket");
|$writeStructCode
|$buffer.append("$rightBracket");
""".stripMargin
}
private[this] def castToStringCode(from: DataType, ctx: CodegenContext): CastFunction = {
from match {
case BinaryType =>
(c, evPrim, evNull) => code"$evPrim = UTF8String.fromBytes($c);"
case DateType =>
val df = JavaCode.global(
ctx.addReferenceObj("dateFormatter", dateFormatter),
dateFormatter.getClass)
(c, evPrim, evNull) => code"""$evPrim = UTF8String.fromString(${df}.format($c));"""
case TimestampType =>
val tf = JavaCode.global(
ctx.addReferenceObj("timestampFormatter", timestampFormatter),
timestampFormatter.getClass)
(c, evPrim, evNull) => code"""$evPrim = UTF8String.fromString($tf.format($c));"""
case CalendarIntervalType =>
(c, evPrim, _) => code"""$evPrim = UTF8String.fromString($c.toString());"""
case ArrayType(et, _) =>
(c, evPrim, evNull) => {
val buffer = ctx.freshVariable("buffer", classOf[UTF8StringBuilder])
val bufferClass = JavaCode.javaType(classOf[UTF8StringBuilder])
val writeArrayElemCode = writeArrayToStringBuilder(et, c, buffer, ctx)
code"""
|$bufferClass $buffer = new $bufferClass();
|$writeArrayElemCode;
|$evPrim = $buffer.build();
""".stripMargin
}
case MapType(kt, vt, _) =>
(c, evPrim, evNull) => {
val buffer = ctx.freshVariable("buffer", classOf[UTF8StringBuilder])
val bufferClass = JavaCode.javaType(classOf[UTF8StringBuilder])
val writeMapElemCode = writeMapToStringBuilder(kt, vt, c, buffer, ctx)
code"""
|$bufferClass $buffer = new $bufferClass();
|$writeMapElemCode;
|$evPrim = $buffer.build();
""".stripMargin
}
case StructType(fields) =>
(c, evPrim, evNull) => {
val row = ctx.freshVariable("row", classOf[InternalRow])
val buffer = ctx.freshVariable("buffer", classOf[UTF8StringBuilder])
val bufferClass = JavaCode.javaType(classOf[UTF8StringBuilder])
val writeStructCode = writeStructToStringBuilder(fields.map(_.dataType), row, buffer, ctx)
code"""
|InternalRow $row = $c;
|$bufferClass $buffer = new $bufferClass();
|$writeStructCode
|$evPrim = $buffer.build();
""".stripMargin
}
case pudt: PythonUserDefinedType => castToStringCode(pudt.sqlType, ctx)
case udt: UserDefinedType[_] =>
val udtRef = JavaCode.global(ctx.addReferenceObj("udt", udt), udt.sqlType)
(c, evPrim, evNull) => {
code"$evPrim = UTF8String.fromString($udtRef.deserialize($c).toString());"
}
case _ =>
(c, evPrim, evNull) => code"$evPrim = UTF8String.fromString(String.valueOf($c));"
}
}
private[this] def castToBinaryCode(from: DataType): CastFunction = from match {
case StringType =>
(c, evPrim, evNull) =>
code"$evPrim = $c.getBytes();"
case _: IntegralType =>
(c, evPrim, evNull) =>
code"$evPrim = ${NumberConverter.getClass.getName.stripSuffix("$")}.toBinary($c);"
}
private[this] def castToDateCode(
from: DataType,
ctx: CodegenContext): CastFunction = {
def getZoneId() = {
val zoneIdClass = classOf[ZoneId]
JavaCode.global(
ctx.addReferenceObj("zoneId", zoneId, zoneIdClass.getName),
zoneIdClass)
}
from match {
case StringType =>
val intOpt = ctx.freshVariable("intOpt", classOf[Option[Integer]])
val zid = getZoneId()
(c, evPrim, evNull) =>
if (ansiEnabled) {
code"""
$evPrim = org.apache.spark.sql.catalyst.util.DateTimeUtils.stringToDateAnsi($c, $zid);
"""
} else {
code"""
scala.Option<Integer> $intOpt =
org.apache.spark.sql.catalyst.util.DateTimeUtils.stringToDate($c, $zid);
if ($intOpt.isDefined()) {
$evPrim = ((Integer) $intOpt.get()).intValue();
} else {
$evNull = true;
}
"""
}
case TimestampType =>
val zid = getZoneId()
(c, evPrim, evNull) =>
code"""$evPrim =
org.apache.spark.sql.catalyst.util.DateTimeUtils.microsToDays($c, $zid);"""
case _ =>
(c, evPrim, evNull) => code"$evNull = true;"
}
}
private[this] def changePrecision(d: ExprValue, decimalType: DecimalType,
evPrim: ExprValue, evNull: ExprValue, canNullSafeCast: Boolean): Block = {
if (canNullSafeCast) {
code"""
|$d.changePrecision(${decimalType.precision}, ${decimalType.scale});
|$evPrim = $d;
""".stripMargin
} else {
val overflowCode = if (!ansiEnabled) {
s"$evNull = true;"
} else {
s"""
|throw QueryExecutionErrors.cannotChangeDecimalPrecisionError(
| $d, ${decimalType.precision}, ${decimalType.scale});
""".stripMargin
}
code"""
|if ($d.changePrecision(${decimalType.precision}, ${decimalType.scale})) {
| $evPrim = $d;
|} else {
| $overflowCode
|}
""".stripMargin
}
}
private[this] def castToDecimalCode(
from: DataType,
target: DecimalType,
ctx: CodegenContext): CastFunction = {
val tmp = ctx.freshVariable("tmpDecimal", classOf[Decimal])
val canNullSafeCast = Cast.canNullSafeCastToDecimal(from, target)
from match {
case StringType if !ansiEnabled =>
(c, evPrim, evNull) =>
code"""
Decimal $tmp = Decimal.fromString($c);
if ($tmp == null) {
$evNull = true;
} else {
${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast)}
}
"""
case StringType if ansiEnabled =>
(c, evPrim, evNull) =>
code"""
Decimal $tmp = Decimal.fromStringANSI($c);
${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast)}
"""
case BooleanType =>
(c, evPrim, evNull) =>
code"""
Decimal $tmp = $c ? Decimal.apply(1) : Decimal.apply(0);
${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast)}
"""
case DateType =>
// date can't cast to decimal in Hive
(c, evPrim, evNull) => code"$evNull = true;"
case TimestampType =>
// Note that we lose precision here.
(c, evPrim, evNull) =>
code"""
Decimal $tmp = Decimal.apply(
scala.math.BigDecimal.valueOf(${timestampToDoubleCode(c)}));
${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast)}
"""
case DecimalType() =>
(c, evPrim, evNull) =>
code"""
Decimal $tmp = $c.clone();
${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast)}
"""
case x: IntegralType =>
(c, evPrim, evNull) =>
code"""
Decimal $tmp = Decimal.apply((long) $c);
${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast)}
"""
case x: FractionalType =>
// All other numeric types can be represented precisely as Doubles
(c, evPrim, evNull) =>
code"""
try {
Decimal $tmp = Decimal.apply(scala.math.BigDecimal.valueOf((double) $c));
${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast)}
} catch (java.lang.NumberFormatException e) {
$evNull = true;
}
"""
}
}
private[this] def castToTimestampCode(
from: DataType,
ctx: CodegenContext): CastFunction = from match {
case StringType =>
val zoneIdClass = classOf[ZoneId]
val zid = JavaCode.global(
ctx.addReferenceObj("zoneId", zoneId, zoneIdClass.getName),
zoneIdClass)
val longOpt = ctx.freshVariable("longOpt", classOf[Option[Long]])
(c, evPrim, evNull) =>
if (ansiEnabled) {
code"""
$evPrim =
org.apache.spark.sql.catalyst.util.DateTimeUtils.stringToTimestampAnsi($c, $zid);
"""
} else {
code"""
scala.Option<Long> $longOpt =
org.apache.spark.sql.catalyst.util.DateTimeUtils.stringToTimestamp($c, $zid);
if ($longOpt.isDefined()) {
$evPrim = ((Long) $longOpt.get()).longValue();
} else {
$evNull = true;
}
"""
}
case BooleanType =>
(c, evPrim, evNull) => code"$evPrim = $c ? 1L : 0L;"
case _: IntegralType =>
(c, evPrim, evNull) => code"$evPrim = ${longToTimeStampCode(c)};"
case DateType =>
val zoneIdClass = classOf[ZoneId]
val zid = JavaCode.global(
ctx.addReferenceObj("zoneId", zoneId, zoneIdClass.getName),
zoneIdClass)
(c, evPrim, evNull) =>
code"""$evPrim =
org.apache.spark.sql.catalyst.util.DateTimeUtils.daysToMicros($c, $zid);"""
case DecimalType() =>
(c, evPrim, evNull) => code"$evPrim = ${decimalToTimestampCode(c)};"
case DoubleType =>
(c, evPrim, evNull) =>
code"""
if (Double.isNaN($c) || Double.isInfinite($c)) {
$evNull = true;
} else {
$evPrim = (long)($c * $MICROS_PER_SECOND);
}
"""
case FloatType =>
(c, evPrim, evNull) =>
code"""
if (Float.isNaN($c) || Float.isInfinite($c)) {
$evNull = true;
} else {
$evPrim = (long)($c * $MICROS_PER_SECOND);
}
"""
}
private[this] def castToIntervalCode(from: DataType): CastFunction = from match {
case StringType =>
val util = IntervalUtils.getClass.getCanonicalName.stripSuffix("$")
(c, evPrim, evNull) =>
code"""$evPrim = $util.safeStringToInterval($c);
if(${evPrim} == null) {
${evNull} = true;
}
""".stripMargin
}
private[this] def decimalToTimestampCode(d: ExprValue): Block = {
val block = inline"new java.math.BigDecimal($MICROS_PER_SECOND)"
code"($d.toBigDecimal().bigDecimal().multiply($block)).longValue()"
}
private[this] def longToTimeStampCode(l: ExprValue): Block = code"$l * (long)$MICROS_PER_SECOND"
private[this] def timestampToLongCode(ts: ExprValue): Block =
code"java.lang.Math.floorDiv($ts, $MICROS_PER_SECOND)"
private[this] def timestampToDoubleCode(ts: ExprValue): Block =
code"$ts / (double)$MICROS_PER_SECOND"
private[this] def castToBooleanCode(from: DataType): CastFunction = from match {
case StringType =>
val stringUtils = inline"${StringUtils.getClass.getName.stripSuffix("$")}"
(c, evPrim, evNull) =>
code"""
if ($stringUtils.isTrueString($c)) {
$evPrim = true;
} else if ($stringUtils.isFalseString($c)) {
$evPrim = false;
} else {
$evNull = true;
}
"""
case TimestampType =>
(c, evPrim, evNull) => code"$evPrim = $c != 0;"
case DateType =>
// Hive would return null when cast from date to boolean
(c, evPrim, evNull) => code"$evNull = true;"
case DecimalType() =>
(c, evPrim, evNull) => code"$evPrim = !$c.isZero();"
case n: NumericType =>
(c, evPrim, evNull) => code"$evPrim = $c != 0;"
}
private[this] def castTimestampToIntegralTypeCode(
ctx: CodegenContext,
integralType: String): CastFunction = {
if (ansiEnabled) {
val longValue = ctx.freshName("longValue")
(c, evPrim, evNull) =>
code"""
long $longValue = ${timestampToLongCode(c)};
if ($longValue == ($integralType) $longValue) {
$evPrim = ($integralType) $longValue;
} else {
throw QueryExecutionErrors.castingCauseOverflowError($c, "$integralType");
}
"""
} else {
(c, evPrim, evNull) => code"$evPrim = ($integralType) ${timestampToLongCode(c)};"
}
}
private[this] def castDecimalToIntegralTypeCode(
ctx: CodegenContext,
integralType: String): CastFunction = {
if (ansiEnabled) {
(c, evPrim, evNull) => code"$evPrim = $c.roundTo${integralType.capitalize}();"
} else {
(c, evPrim, evNull) => code"$evPrim = $c.to${integralType.capitalize}();"
}
}
private[this] def castIntegralTypeToIntegralTypeExactCode(integralType: String): CastFunction = {
assert(ansiEnabled)
(c, evPrim, evNull) =>
code"""
if ($c == ($integralType) $c) {
$evPrim = ($integralType) $c;
} else {
throw QueryExecutionErrors.castingCauseOverflowError($c, "$integralType");
}
"""
}
private[this] def lowerAndUpperBound(integralType: String): (String, String) = {
val (min, max, typeIndicator) = integralType.toLowerCase(Locale.ROOT) match {
case "long" => (Long.MinValue, Long.MaxValue, "L")
case "int" => (Int.MinValue, Int.MaxValue, "")
case "short" => (Short.MinValue, Short.MaxValue, "")
case "byte" => (Byte.MinValue, Byte.MaxValue, "")
}
(min.toString + typeIndicator, max.toString + typeIndicator)
}
private[this] def castFractionToIntegralTypeCode(integralType: String): CastFunction = {
assert(ansiEnabled)
val (min, max) = lowerAndUpperBound(integralType)
val mathClass = classOf[Math].getName
// When casting floating values to integral types, Spark uses the method `Numeric.toInt`
// Or `Numeric.toLong` directly. For positive floating values, it is equivalent to `Math.floor`;
// for negative floating values, it is equivalent to `Math.ceil`.
// So, we can use the condition `Math.floor(x) <= upperBound && Math.ceil(x) >= lowerBound`
// to check if the floating value x is in the range of an integral type after rounding.
(c, evPrim, evNull) =>
code"""
if ($mathClass.floor($c) <= $max && $mathClass.ceil($c) >= $min) {
$evPrim = ($integralType) $c;
} else {
throw QueryExecutionErrors.castingCauseOverflowError($c, "$integralType");
}
"""
}
private[this] def castToByteCode(from: DataType, ctx: CodegenContext): CastFunction = from match {
case StringType if ansiEnabled =>
(c, evPrim, evNull) => code"$evPrim = $c.toByteExact();"
case StringType =>
val wrapper = ctx.freshVariable("intWrapper", classOf[UTF8String.IntWrapper])
(c, evPrim, evNull) =>
code"""
UTF8String.IntWrapper $wrapper = new UTF8String.IntWrapper();
if ($c.toByte($wrapper)) {
$evPrim = (byte) $wrapper.value;
} else {
$evNull = true;
}
$wrapper = null;
"""
case BooleanType =>
(c, evPrim, evNull) => code"$evPrim = $c ? (byte) 1 : (byte) 0;"
case DateType =>
(c, evPrim, evNull) => code"$evNull = true;"
case TimestampType => castTimestampToIntegralTypeCode(ctx, "byte")
case DecimalType() => castDecimalToIntegralTypeCode(ctx, "byte")
case ShortType | IntegerType | LongType if ansiEnabled =>
castIntegralTypeToIntegralTypeExactCode("byte")
case FloatType | DoubleType if ansiEnabled =>
castFractionToIntegralTypeCode("byte")
case x: NumericType =>
(c, evPrim, evNull) => code"$evPrim = (byte) $c;"
}
private[this] def castToShortCode(
from: DataType,
ctx: CodegenContext): CastFunction = from match {
case StringType if ansiEnabled =>
(c, evPrim, evNull) => code"$evPrim = $c.toShortExact();"
case StringType =>
val wrapper = ctx.freshVariable("intWrapper", classOf[UTF8String.IntWrapper])
(c, evPrim, evNull) =>
code"""
UTF8String.IntWrapper $wrapper = new UTF8String.IntWrapper();
if ($c.toShort($wrapper)) {
$evPrim = (short) $wrapper.value;
} else {
$evNull = true;
}
$wrapper = null;
"""
case BooleanType =>
(c, evPrim, evNull) => code"$evPrim = $c ? (short) 1 : (short) 0;"
case DateType =>
(c, evPrim, evNull) => code"$evNull = true;"
case TimestampType => castTimestampToIntegralTypeCode(ctx, "short")
case DecimalType() => castDecimalToIntegralTypeCode(ctx, "short")
case IntegerType | LongType if ansiEnabled =>
castIntegralTypeToIntegralTypeExactCode("short")
case FloatType | DoubleType if ansiEnabled =>
castFractionToIntegralTypeCode("short")
case x: NumericType =>
(c, evPrim, evNull) => code"$evPrim = (short) $c;"
}
private[this] def castToIntCode(from: DataType, ctx: CodegenContext): CastFunction = from match {
case StringType if ansiEnabled =>
(c, evPrim, evNull) => code"$evPrim = $c.toIntExact();"
case StringType =>
val wrapper = ctx.freshVariable("intWrapper", classOf[UTF8String.IntWrapper])
(c, evPrim, evNull) =>
code"""
UTF8String.IntWrapper $wrapper = new UTF8String.IntWrapper();
if ($c.toInt($wrapper)) {
$evPrim = $wrapper.value;
} else {
$evNull = true;
}
$wrapper = null;
"""
case BooleanType =>
(c, evPrim, evNull) => code"$evPrim = $c ? 1 : 0;"
case DateType =>
(c, evPrim, evNull) => code"$evNull = true;"
case TimestampType => castTimestampToIntegralTypeCode(ctx, "int")
case DecimalType() => castDecimalToIntegralTypeCode(ctx, "int")
case LongType if ansiEnabled => castIntegralTypeToIntegralTypeExactCode("int")
case FloatType | DoubleType if ansiEnabled =>
castFractionToIntegralTypeCode("int")
case x: NumericType =>
(c, evPrim, evNull) => code"$evPrim = (int) $c;"
}
private[this] def castToLongCode(from: DataType, ctx: CodegenContext): CastFunction = from match {
case StringType if ansiEnabled =>
(c, evPrim, evNull) => code"$evPrim = $c.toLongExact();"
case StringType =>
val wrapper = ctx.freshVariable("longWrapper", classOf[UTF8String.LongWrapper])
(c, evPrim, evNull) =>
code"""
UTF8String.LongWrapper $wrapper = new UTF8String.LongWrapper();
if ($c.toLong($wrapper)) {
$evPrim = $wrapper.value;
} else {
$evNull = true;
}
$wrapper = null;
"""
case BooleanType =>
(c, evPrim, evNull) => code"$evPrim = $c ? 1L : 0L;"
case DateType =>
(c, evPrim, evNull) => code"$evNull = true;"
case TimestampType =>
(c, evPrim, evNull) => code"$evPrim = (long) ${timestampToLongCode(c)};"
case DecimalType() => castDecimalToIntegralTypeCode(ctx, "long")
case FloatType | DoubleType if ansiEnabled =>
castFractionToIntegralTypeCode("long")
case x: NumericType =>
(c, evPrim, evNull) => code"$evPrim = (long) $c;"
}
private[this] def castToFloatCode(from: DataType, ctx: CodegenContext): CastFunction = {
from match {
case StringType =>
val floatStr = ctx.freshVariable("floatStr", StringType)
(c, evPrim, evNull) =>
val handleNull = if (ansiEnabled) {
s"throw QueryExecutionErrors.invalidInputSyntaxForNumericError($c);"
} else {
s"$evNull = true;"
}
code"""
final String $floatStr = $c.toString();
try {
$evPrim = Float.valueOf($floatStr);
} catch (java.lang.NumberFormatException e) {
final Float f = (Float) Cast.processFloatingPointSpecialLiterals($floatStr, true);
if (f == null) {
$handleNull
} else {
$evPrim = f.floatValue();
}
}
"""
case BooleanType =>
(c, evPrim, evNull) => code"$evPrim = $c ? 1.0f : 0.0f;"
case DateType =>
(c, evPrim, evNull) => code"$evNull = true;"
case TimestampType =>
(c, evPrim, evNull) => code"$evPrim = (float) (${timestampToDoubleCode(c)});"
case DecimalType() =>
(c, evPrim, evNull) => code"$evPrim = $c.toFloat();"
case x: NumericType =>
(c, evPrim, evNull) => code"$evPrim = (float) $c;"
}
}
private[this] def castToDoubleCode(from: DataType, ctx: CodegenContext): CastFunction = {
from match {
case StringType =>
val doubleStr = ctx.freshVariable("doubleStr", StringType)
(c, evPrim, evNull) =>
val handleNull = if (ansiEnabled) {
s"throw QueryExecutionErrors.invalidInputSyntaxForNumericError($c);"
} else {
s"$evNull = true;"
}
code"""
final String $doubleStr = $c.toString();
try {
$evPrim = Double.valueOf($doubleStr);
} catch (java.lang.NumberFormatException e) {
final Double d = (Double) Cast.processFloatingPointSpecialLiterals($doubleStr, false);
if (d == null) {
$handleNull
} else {
$evPrim = d.doubleValue();
}
}
"""
case BooleanType =>
(c, evPrim, evNull) => code"$evPrim = $c ? 1.0d : 0.0d;"
case DateType =>
(c, evPrim, evNull) => code"$evNull = true;"
case TimestampType =>
(c, evPrim, evNull) => code"$evPrim = ${timestampToDoubleCode(c)};"
case DecimalType() =>
(c, evPrim, evNull) => code"$evPrim = $c.toDouble();"
case x: NumericType =>
(c, evPrim, evNull) => code"$evPrim = (double) $c;"
}
}
private[this] def castArrayCode(
fromType: DataType, toType: DataType, ctx: CodegenContext): CastFunction = {
val elementCast = nullSafeCastFunction(fromType, toType, ctx)
val arrayClass = JavaCode.javaType(classOf[GenericArrayData])
val fromElementNull = ctx.freshVariable("feNull", BooleanType)
val fromElementPrim = ctx.freshVariable("fePrim", fromType)
val toElementNull = ctx.freshVariable("teNull", BooleanType)
val toElementPrim = ctx.freshVariable("tePrim", toType)
val size = ctx.freshVariable("n", IntegerType)
val j = ctx.freshVariable("j", IntegerType)
val values = ctx.freshVariable("values", classOf[Array[Object]])
val javaType = JavaCode.javaType(fromType)
(c, evPrim, evNull) =>
code"""
final int $size = $c.numElements();
final Object[] $values = new Object[$size];
for (int $j = 0; $j < $size; $j ++) {
if ($c.isNullAt($j)) {
$values[$j] = null;
} else {
boolean $fromElementNull = false;
$javaType $fromElementPrim =
${CodeGenerator.getValue(c, fromType, j)};
${castCode(ctx, fromElementPrim,
fromElementNull, toElementPrim, toElementNull, toType, elementCast)}
if ($toElementNull) {
$values[$j] = null;
} else {
$values[$j] = $toElementPrim;
}
}
}
$evPrim = new $arrayClass($values);
"""
}
private[this] def castMapCode(from: MapType, to: MapType, ctx: CodegenContext): CastFunction = {
val keysCast = castArrayCode(from.keyType, to.keyType, ctx)
val valuesCast = castArrayCode(from.valueType, to.valueType, ctx)
val mapClass = JavaCode.javaType(classOf[ArrayBasedMapData])
val keys = ctx.freshVariable("keys", ArrayType(from.keyType))
val convertedKeys = ctx.freshVariable("convertedKeys", ArrayType(to.keyType))
val convertedKeysNull = ctx.freshVariable("convertedKeysNull", BooleanType)
val values = ctx.freshVariable("values", ArrayType(from.valueType))
val convertedValues = ctx.freshVariable("convertedValues", ArrayType(to.valueType))
val convertedValuesNull = ctx.freshVariable("convertedValuesNull", BooleanType)
(c, evPrim, evNull) =>
code"""
final ArrayData $keys = $c.keyArray();
final ArrayData $values = $c.valueArray();
${castCode(ctx, keys, FalseLiteral,
convertedKeys, convertedKeysNull, ArrayType(to.keyType), keysCast)}
${castCode(ctx, values, FalseLiteral,
convertedValues, convertedValuesNull, ArrayType(to.valueType), valuesCast)}
$evPrim = new $mapClass($convertedKeys, $convertedValues);
"""
}
private[this] def castStructCode(
from: StructType, to: StructType, ctx: CodegenContext): CastFunction = {
val fieldsCasts = from.fields.zip(to.fields).map {
case (fromField, toField) => nullSafeCastFunction(fromField.dataType, toField.dataType, ctx)
}
val tmpResult = ctx.freshVariable("tmpResult", classOf[GenericInternalRow])
val rowClass = JavaCode.javaType(classOf[GenericInternalRow])
val tmpInput = ctx.freshVariable("tmpInput", classOf[InternalRow])
val fieldsEvalCode = fieldsCasts.zipWithIndex.map { case (cast, i) =>
val fromFieldPrim = ctx.freshVariable("ffp", from.fields(i).dataType)
val fromFieldNull = ctx.freshVariable("ffn", BooleanType)
val toFieldPrim = ctx.freshVariable("tfp", to.fields(i).dataType)
val toFieldNull = ctx.freshVariable("tfn", BooleanType)
val fromType = JavaCode.javaType(from.fields(i).dataType)
val setColumn = CodeGenerator.setColumn(tmpResult, to.fields(i).dataType, i, toFieldPrim)
code"""
boolean $fromFieldNull = $tmpInput.isNullAt($i);
if ($fromFieldNull) {
$tmpResult.setNullAt($i);
} else {
$fromType $fromFieldPrim =
${CodeGenerator.getValue(tmpInput, from.fields(i).dataType, i.toString)};
${castCode(ctx, fromFieldPrim,
fromFieldNull, toFieldPrim, toFieldNull, to.fields(i).dataType, cast)}
if ($toFieldNull) {
$tmpResult.setNullAt($i);
} else {
$setColumn;
}
}
"""
}
val fieldsEvalCodes = ctx.splitExpressions(
expressions = fieldsEvalCode.map(_.code),
funcName = "castStruct",
arguments = ("InternalRow", tmpInput.code) :: (rowClass.code, tmpResult.code) :: Nil)
(input, result, resultIsNull) =>
code"""
final $rowClass $tmpResult = new $rowClass(${fieldsCasts.length});
final InternalRow $tmpInput = $input;
$fieldsEvalCodes
$result = $tmpResult;
"""
}
override def sql: String = dataType match {
// HiveQL doesn't allow casting to complex types. For logical plans translated from HiveQL, this
// type of casting can only be introduced by the analyzer, and can be omitted when converting
// back to SQL query string.
case _: ArrayType | _: MapType | _: StructType => child.sql
case _ => s"CAST(${child.sql} AS ${dataType.sql})"
}
}
/**
* Cast the child expression to the target data type.
*
* When cast from/to timezone related types, we need timeZoneId, which will be resolved with
* session local timezone by an analyzer [[ResolveTimeZone]].
*/
@ExpressionDescription(
usage = "_FUNC_(expr AS type) - Casts the value `expr` to the target data type `type`.",
examples = """
Examples:
> SELECT _FUNC_('10' as int);
10
""",
since = "1.0.0",
group = "conversion_funcs")
case class Cast(child: Expression, dataType: DataType, timeZoneId: Option[String] = None)
extends CastBase {
override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression =
copy(timeZoneId = Option(timeZoneId))
override protected val ansiEnabled: Boolean = SQLConf.get.ansiEnabled
override def canCast(from: DataType, to: DataType): Boolean = if (ansiEnabled) {
AnsiCast.canCast(from, to)
} else {
Cast.canCast(from, to)
}
override def typeCheckFailureMessage: String = if (ansiEnabled) {
AnsiCast.typeCheckFailureMessage(child.dataType, dataType, SQLConf.ANSI_ENABLED.key, "false")
} else {
s"cannot cast ${child.dataType.catalogString} to ${dataType.catalogString}"
}
}
/**
* Cast the child expression to the target data type as per ANSI SQL standard.
* A runtime exception will be thrown on casting failure such as converting an out-of-range value
* to an integral type.
*
* When cast from/to timezone related types, we need timeZoneId, which will be resolved with
* session local timezone by an analyzer [[ResolveTimeZone]].
*/
case class AnsiCast(child: Expression, dataType: DataType, timeZoneId: Option[String] = None)
extends CastBase {
override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression =
copy(timeZoneId = Option(timeZoneId))
override protected val ansiEnabled: Boolean = true
override def canCast(from: DataType, to: DataType): Boolean = AnsiCast.canCast(from, to)
// For now, this expression is only used in table insertion.
// If there are more scenarios for this expression, we should update the error message on type
// check failure.
override def typeCheckFailureMessage: String =
AnsiCast.typeCheckFailureMessage(child.dataType, dataType,
SQLConf.STORE_ASSIGNMENT_POLICY.key, SQLConf.StoreAssignmentPolicy.LEGACY.toString)
}
object AnsiCast {
/**
* As per section 6.13 "cast specification" in "Information technology — Database languages " +
* "- SQL — Part 2: Foundation (SQL/Foundation)":
* If the <cast operand> is a <value expression>, then the valid combinations of TD and SD
* in a <cast specification> are given by the following table. “Y” indicates that the
* combination is syntactically valid without restriction; “M” indicates that the combination
* is valid subject to other Syntax Rules in this Sub- clause being satisfied; and “N” indicates
* that the combination is not valid:
* SD TD
* EN AN C D T TS YM DT BO UDT B RT CT RW
* EN Y Y Y N N N M M N M N M N N
* AN Y Y Y N N N N N N M N M N N
* C Y Y Y Y Y Y Y Y Y M N M N N
* D N N Y Y N Y N N N M N M N N
* T N N Y N Y Y N N N M N M N N
* TS N N Y Y Y Y N N N M N M N N
* YM M N Y N N N Y N N M N M N N
* DT M N Y N N N N Y N M N M N N
* BO N N Y N N N N N Y M N M N N
* UDT M M M M M M M M M M M M M N
* B N N N N N N N N N M Y M N N
* RT M M M M M M M M M M M M N N
* CT N N N N N N N N N M N N M N
* RW N N N N N N N N N N N N N M
*
* Where:
* EN = Exact Numeric
* AN = Approximate Numeric
* C = Character (Fixed- or Variable-Length, or Character Large Object)
* D = Date
* T = Time
* TS = Timestamp
* YM = Year-Month Interval
* DT = Day-Time Interval
* BO = Boolean
* UDT = User-Defined Type
* B = Binary (Fixed- or Variable-Length or Binary Large Object)
* RT = Reference type
* CT = Collection type
* RW = Row type
*
* Spark's ANSI mode follows the syntax rules, except it specially allow the following
* straightforward type conversions which are disallowed as per the SQL standard:
* - Numeric <=> Boolean
* - String <=> Binary
*/
def canCast(from: DataType, to: DataType): Boolean = (from, to) match {
case (fromType, toType) if fromType == toType => true
case (NullType, _) => true
case (StringType, _: BinaryType) => true
case (StringType, BooleanType) => true
case (_: NumericType, BooleanType) => true
case (StringType, TimestampType) => true
case (DateType, TimestampType) => true
case (StringType, _: CalendarIntervalType) => true
case (StringType, DateType) => true
case (TimestampType, DateType) => true
case (_: NumericType, _: NumericType) => true
case (StringType, _: NumericType) => true
case (BooleanType, _: NumericType) => true
case (_: NumericType, StringType) => true
case (_: DateType, StringType) => true
case (_: TimestampType, StringType) => true
case (_: CalendarIntervalType, StringType) => true
case (BooleanType, StringType) => true
case (BinaryType, StringType) => true
case (ArrayType(fromType, fn), ArrayType(toType, tn)) =>
canCast(fromType, toType) &&
resolvableNullability(fn || forceNullable(fromType, toType), tn)
case (MapType(fromKey, fromValue, fn), MapType(toKey, toValue, tn)) =>
canCast(fromKey, toKey) &&
(!forceNullable(fromKey, toKey)) &&
canCast(fromValue, toValue) &&
resolvableNullability(fn || forceNullable(fromValue, toValue), tn)
case (StructType(fromFields), StructType(toFields)) =>
fromFields.length == toFields.length &&
fromFields.zip(toFields).forall {
case (fromField, toField) =>
canCast(fromField.dataType, toField.dataType) &&
resolvableNullability(
fromField.nullable || forceNullable(fromField.dataType, toField.dataType),
toField.nullable)
}
case (udt1: UserDefinedType[_], udt2: UserDefinedType[_]) if udt2.acceptsType(udt1) => true
case _ => false
}
// Show suggestion on how to complete the disallowed explicit casting with built-in type
// conversion functions.
private def suggestionOnConversionFunctions (
from: DataType,
to: DataType,
functionNames: String): String = {
// scalastyle:off line.size.limit
s"""cannot cast ${from.catalogString} to ${to.catalogString}.
|To convert values from ${from.catalogString} to ${to.catalogString}, you can use $functionNames instead.
|""".stripMargin
// scalastyle:on line.size.limit
}
def typeCheckFailureMessage(
from: DataType,
to: DataType,
fallbackConfKey: String,
fallbackConfValue: String): String =
(from, to) match {
case (_: NumericType, TimestampType) =>
suggestionOnConversionFunctions(from, to,
"functions TIMESTAMP_SECONDS/TIMESTAMP_MILLIS/TIMESTAMP_MICROS")
case (TimestampType, _: NumericType) =>
suggestionOnConversionFunctions(from, to, "functions UNIX_SECONDS/UNIX_MILLIS/UNIX_MICROS")
case (_: NumericType, DateType) =>
suggestionOnConversionFunctions(from, to, "function DATE_FROM_UNIX_DATE")
case (DateType, _: NumericType) =>
suggestionOnConversionFunctions(from, to, "function UNIX_DATE")
// scalastyle:off line.size.limit
case (_: ArrayType, StringType) =>
s"""
| cannot cast ${from.catalogString} to ${to.catalogString} with ANSI mode on.
| If you have to cast ${from.catalogString} to ${to.catalogString}, you can use the function ARRAY_JOIN or set $fallbackConfKey as $fallbackConfValue.
|""".stripMargin
case _ if Cast.canCast(from, to) =>
s"""
| cannot cast ${from.catalogString} to ${to.catalogString} with ANSI mode on.
| If you have to cast ${from.catalogString} to ${to.catalogString}, you can set $fallbackConfKey as $fallbackConfValue.
|""".stripMargin
case _ => s"cannot cast ${from.catalogString} to ${to.catalogString}"
// scalastyle:on line.size.limit
}
}
/**
* Cast the child expression to the target data type, but will throw error if the cast might
* truncate, e.g. long -> int, timestamp -> data.
*
* Note: `target` is `AbstractDataType`, so that we can put `object DecimalType`, which means
* we accept `DecimalType` with any valid precision/scale.
*/
case class UpCast(child: Expression, target: AbstractDataType, walkedTypePath: Seq[String] = Nil)
extends UnaryExpression with Unevaluable {
override lazy val resolved = false
def dataType: DataType = target match {
case DecimalType => DecimalType.SYSTEM_DEFAULT
case _ => target.asInstanceOf[DataType]
}
}
| witgo/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala | Scala | apache-2.0 | 77,307 |
package org.joda.time.format
import org.joda.time.DateTime
class DateTimeFormatter(val pattern:String) {
def parseDateTime(text: String): DateTime = DateTime.parse(text, this)
def print(instant: DateTime): String = instant.toString(this)
}
| easel/play-json-extra | scalajs-joda-time/src/main/scala/org/joda/time/format/DateTimeFormatter.scala | Scala | apache-2.0 | 247 |
package fpinscala.parsing
import java.util.regex._
import scala.util.matching.Regex
import fpinscala.testing._
import fpinscala.testing.Prop
import scala.language.higherKinds
import scala.language.implicitConversions
import fpinscala.monads.Monad
// object Parser {
// type Parser[+A] = Location => Result[A]
// }
/*
All Parsers in this example take a String as input. The parametric type A is the type "measured": a count, a string, a char.
Parser[Int] that counts the number of chars "x" will require a Parser[Char] to function.
*/
// parametric type ParseError no longer needed -- made concrete
//trait Parsers[ParseError, Parser[+_]] { self => // so
// Keep parametric type Parser in Parsers signature -- part of lesson
trait Parsers[Parser[+_]] { self => // so inner inner classes may call methods of trait
implicit def string(s: String): Parser[String]
// not an implicit convesion String => Regex?
// Regex => Parser[String]??
//implicit def regex(r: Regex): Parser[String]
// connects methods below to instances of Parser
implicit def operators[A](p: Parser[A]): ParserOps[A] = ParserOps[A](p)
implicit def asStringParser[A](a: A)(
implicit f: A => Parser[String]): ParserOps[String] =
ParserOps(f(a))
implicit def regex(r: Regex): Parser[String]
val parserMonad: Monad[Parser] = Monad.parserMonad(this)
//val parserMonad = Monad.parserMonad[Parser](self)
// trait Result[+A]
// case class Success[+A](get: A, charsConsumed: Int) extends
// Result[A]
// case class Failure(get: ParseError) extends
// Result[Nothing]
// from 9.6.2
/* Parsers has knowledge of Result, but not of Location
So concrete type Parser is restricted to Something => Result
In other words, all Parsers use Result, but not all Parsers use Location.
*/
import Parsers.Result
def run[A](p: Parser[A])(input: String): Result[A]
def flatMap[A,B](p: Parser[A])(f: A=>Parser[B]): Parser[B]
def or[A](p1: Parser[A], p2: => Parser[A]): Parser[A]
def map[A,B](p: Parser[A])(f: A=>B): Parser[B] =
parserMonad.map(p)(f)
// {
// // verify that 'succeed' serves as 'unit'
// val g: A => Parser[B] = (a: A) => succeed(f(a))
// p.flatMap(g)
// }
// def map2[A,B,C](p: Parser[A], p2: => Parser[B])(f: (A,=>B)=>C): Parser[C] =
// parserMonad.map2(p, p2)(f)
def map2[A,B,C](parserA: Parser[A], parserB: => Parser[B])(f: (A,=>B)=>C): Parser[C] =
flatMap(parserA) { a =>
map(parserB) { b => f(a, b) }
}
// def product[A,B](p: Parser[A], p2: => Parser[B]): Parser[(A,B)] =
// parserMonad.product(p, p2)
def product[A,B](parserA: Parser[A], parserB: => Parser[B]): Parser[(A,B)] = {
def f(a: A, b: => B): Tuple2[A,B] = (a, b)
map2(parserA, parserB)(f)
}
// akin to unit?
def succeed[A](a: A): Parser[A] =
string("").map((s: String) => a)
// parser returned recognized char c
def char(c: Char): Parser[Char] =
string(c.toString).map((s: String)=>s.charAt(0))
// parser returned recognizes string s
//def string(s: String): Parser[String]
// equivalent to *
def many[A](p: Parser[A]): Parser[List[A]] = {
val empty: Parser[List[A]] = succeed(List[A]())
val combined: Parser[List[A]] = many1(p).or(empty)
combined
}
/*
With many1 , we can now implement the parser for zero or more 'a' followed by one or more 'b' as follows:
char('a').many.slice.map(_.size) ** char('b').many1.slice.map(_.size)
*/
def many1[A](p: Parser[A]): Parser[List[A]] = {
def combiner(a: A, la: => List[A]): List[A] = a::la
map2(p,many(p))(combiner)
}
// section 9.2.1 infers many1 is to be implemented with product
// product(p, many(p))
// "see what portion of the input string" is examined
// need an example...
def slice[A](p: Parser[A]): Parser[String]
//def scope[A]
def label[A](msg: String)(p: Parser[A]): Parser[A]
def errorLocation(e: ParseError): Option[Location]
def errorMessage(e: ParseError): Option[String]
// parser returned recognizes either p1 or p2
/*
run(or(string("abra"),string("cadabra")))("abra") == Right("abra")
run(or(string("abra"),string("cadabra")))("cadabra") == Right("cadabra")
*/
// use map2 and succeed
/*
run(listOfN(3, "ab" | "cad"))("ababcad") == Right(List("ab","ab","cad"))
run(listOfN(3, "ab" | "cad"))("cadabab") == Right(List("cad","ab","ab"))
run(listOfN(3, "ab" | "cad"))("ababab") == Right(List("ab","ab","ab"))
*/
def listOfN[A](n: Int, p: Parser[A]): Parser[List[A]] =
parserMonad.replicateM(n, p)
// {
// if(n<=1){
// p.map((a: A)=>List(a))
// } else {
// val pla: Parser[List[A]] = listOfN(n-1, p)
// p.map2(pla)((a: A, la: List[A]) => a::la)
// }
// }
// from answers
/** A parser that succeeds when given empty input. */
def eof: Parser[String] =
regex("\\\\z".r)
// from answers
/** The root of the grammar, expects no further input following `p`. */
def root[A](p: Parser[A]): Parser[A] =
skipR(p, eof)
// from answers
/** Sequences two parsers, ignoring the result of the first.
* We wrap the ignored half in slice, since we don't care about its result. */
def skipL[B](p: Parser[Any], p2: => Parser[B]): Parser[B] =
map2(slice(p), p2)((_,b) => b)
// from answers
/** Sequences two parsers, ignoring the result of the second.
* We wrap the ignored half in slice, since we don't care about its result. */
def skipR[A](p: Parser[A], p2: => Parser[Any]): Parser[A] =
map2(p, slice(p2))((a,b) => a)
// [error] Note: implicit method operators is not applicable here because it comes after the application point and it lacks an explicit result type
// implicit def string(s: String): Parser[String]
// implicit def operators[A](p: Parser[A]) = ParserOps[A](p)
// implicit def asStringParser[A](a: A)(
// implicit f: A => Parser[String]): ParserOps[String] =
// ParserOps(f(a))
case class ParserOps[A](p: Parser[A]) {
def map[B](f: A=>B): Parser[B] = self.map(p)(f)
def map2[B,C](p2: Parser[B])(f: (A, =>B)=>C): Parser[C] =
self.map2(p, p2)(f)
def flatMap[B](f: A => Parser[B]): Parser[B] =
self.flatMap(p)(f)
def product[B](p2: Parser[B]): Parser[(A,B)] =
self.product(p, p2)
def **[B](p2: Parser[B]): Parser[(A,B)] = this.product(p2)
def many: Parser[List[A]] = self.many(p)
def slice: Parser[String] = self.slice(p)
def or[B>:A](p2: => Parser[B]): Parser[B] = self.or(p, p2)
def |[B>:A](p2: => Parser[B]): Parser[B] = this.or(p2)
// We expect that, for instance,
// run(numA)("aaa") gives Right(3) ,
// and run(numA)("b") gives Right(0) .
// val numberOfA: Parser[Int] = char('a').many.map(
// (s: String) => s.size)
// val numberOfA: Parser[Int] = char('a').many.slice.map(
// (s: String) => s.size)
}
object Laws {
/*
These laws test the concrete type of Parser,
the concrete implementation of trait Parsers,
and the various instances of Parser[A]: Parser[String],
Parser[Int], etc.
What about Parsers[JSON]?
*/
//run(char(c))(c.toString) == Right(c)
// map(p)(a => a) == p
def equal[A](p1: Parser[A], p2: Parser[A])(in: Gen[String]): Prop =
Prop.forAll(in)((s: String) => run(p1)(s) == run(p2)(s))
def mapLaw[A](p: Parser[A])(in: Gen[String]): Prop =
equal(p, p.map(a => a))(in)
// run(succeed(a))(s) == Right(s)
def succeedLaw[A](genString: Gen[String], genA: Gen[A]): Prop = {
val genStringAndA: Gen[(String, A)] =
genString.**(genA)
Prop.forAll(genStringAndA)((tup: (String, A)) => {
val string: String = tup._1
val a: A = tup._2
val sucA: Parser[A] = succeed(a)
run(sucA)(string) == Right(a)
}
)
}
// listing 9.2
def labelLaw[A](p: Parser[A], inputs: Gen[String]): Prop =
Prop.forAll(inputs.product(Gen.string)) { case (input, msg) => {
// ^ make explicit
// http://stackoverflow.com/questions/754166/how-is-pattern-matching-in-scala-implemented-at-the-bytecode-level
/*
While type Parser is still abstract, we have restricted its
concrete implementations to returning a Result (so it's only
partially abstract...)
*/
import Parsers.Failure
val resultA: Result[A] = run(label(msg)(p))(input)
resultA match {
case Failure(parseErr, optionLabel) => {
// check embedded error message equals generated
// error message. Failed Parser is intentional.
errorMessage(parseErr) == msg
}
case _ => true
}
}
}
// check the behavior of product ---> Monad Laws
// def productLaw[A,B](pa: Parser[A], pb: Parser[B])(
// in: Gen[String]): Prop = {
// val pab: Parser[(A,B)] = pa.product(pb)
// /*
// What needs to be shown for Parser[(A,B)]
// and Parser[A], Parser[B]?
// */
// }
}
}
object Parsers {
// section 9.6.1
// replaced by section 9.6.2
// type Parser[+A] = String => Either[ParseError, A]
trait Result[+A]
case class Success[+A](get: A, charsConsumed: Int) extends
Result[A]
case class Failure(get: ParseError,
failLabel: Option[String] = None) extends Result[Nothing]
type LocationResultParser[+A] = Location => Result[A]
/*
Parametric type Err no longer gives any benefit in signature
of Parsers. The concrete type of Err is now baked into the
concrete type of Parser. We are not limited in what
concrete Err type we can use, then.
*/
object SimpleParser extends Parsers[LocationResultParser]{
implicit def string(s: String): LocationResultParser[String] =
(in: Location) => {
val strIn: String = in.currentLine
println("string loc: "+in)
if(strIn.startsWith(s))
Success(strIn, s.length) // why is strIn.length necessary?
else Failure(
ParseError(
List((in, strIn))
)
)
}
implicit def regex(r: Regex): LocationResultParser[String] =
string(r.regex) // sure???
def run[A](p: LocationResultParser[A])(input: String): Result[A] =
p(Location(input))
// def flatMap[A,B](p: LocationResultParser[A])(f: A=>LocationResultParser[B]): LocationResultParser[B] =
// (locIn: Location) => {
// val resultA: Result[A] = p(locIn)
// //val resultB: Result[B] = f(resultA)
// // not Result[B]! Parser[B].
// val parserB: LocationResultParser[B] = resultA match {
// case Success(a: A, charsConsumed: Int) => f(a)
// case Failure(err: ParseError) =>
// (failLoc: Location) => Failure(err: ParseError)
// }
// parserB(locIn) // sure the same Location is used twice?
// }
// improved flatMap from Listing 9.3
def flatMap[A,B](lrpa: LocationResultParser[A])(
alrpb: A => LocationResultParser[B]): LocationResultParser[B] =
(locIn: Location) => {
println("flatMap loc: "+locIn)
lrpa(locIn) match {
case Success(a: A, charsConsumed: Int) => {
val parserB: LocationResultParser[B] = alrpb(a)
val advancedLocation: Location =
locIn.advanceBy(charsConsumed)
println("advanced location: "+advancedLocation)
val resultB: Result[B] = parserB(advancedLocation)
resultB
}
case fail@Failure(_,_) => fail
}
}
def or[A](p1: LocationResultParser[A], p2: => LocationResultParser[A]): LocationResultParser[A] =
(locIn: Location) => {
val result1: Result[A] = p1(locIn)
lazy val result2: Result[A] = p2(locIn)
result1 match {
case suc1: Success[A] => suc1
case Failure(err1: ParseError, optionLabel1) => result2 match {
case suc2: Success[A] => suc2
case Failure(err2: ParseError, optionLabel2) => {
val combinedErr: ParseError =
ParseError(err1.stack,
err2 :: err1.otherFailures)
val combinedFailure: Failure = Failure(combinedErr)
combinedFailure
}
}
}
}
// returns first error only -- should be improved!
// Shouldn't need to return a non-existent location
// def errorLocation(e: ParseError): Location =
// e.stack match {
// case List(firstTup, tail) => firstTup._1
// case Nil => Location("")
// }
// def errorMessage(e: ParseError): String =
// e.stack match {
// case List(firstTup, tail) => firstTup._2
// case Nil => "no error; parse error is empty"
// }
def errorLocation(e: ParseError): Option[Location] =
e.stack match {
case List(firstTup, tail) => Some(firstTup._1)
case Nil => None
}
def errorMessage(e: ParseError): Option[String] =
e.stack match {
case List(firstTup, tail) => Some(firstTup._2)
case Nil => None
}
// label shows up if p: Parser fails
def label[A](msg: String)(p: LocationResultParser[A]):
LocationResultParser[A] =
(loc: Location) => {
val result0: Result[A] = p(loc)
val result1: Result[A] = result0 match {
case suc@Success(_,_) => suc
case Failure(parseErr, priorOptionMsg) =>
Failure(parseErr, Some(msg))
}
result1
}
/*
Return the portion of the input string examined by the parser.
*/
def slice[A](p: LocationResultParser[A]):
LocationResultParser[String] =
(loc: Location) => {
val result0: Result[A] = p(loc)
val result1: Result[String] = result0 match {
case suc@Success(a, charsConsumed) => {
val inputSlice: String =
loc.input.substring(charsConsumed)
Success(inputSlice, charsConsumed)
}
case fail@Failure(_,_) => fail
}
result1
}
}
}
case class Location(input: String, offset: Int = 0) {
println(s"input: $input offset: $offset")
lazy val line = input.slice(0,offset+1).count(_ == '\\n') + 1
lazy val col = input.slice(0,offset+1).reverse.indexOf('\\n')
def toError(msg: String): ParseError =
ParseError(List((this, msg)))
def advanceBy(n: Int): Location = copy(offset = offset+n)
/* Returns the line corresponding to this location */
def currentLine: String =
if (input.length > 1) input.lines.drop(line-1).next
else ""
}
case class ParseError(stack: List[(Location,String)] = List(),
otherFailures: List[ParseError] = List()) {
}
object SimpleParserExample extends App {
import Parsers._
val detectFoo: LocationResultParser[String] = SimpleParser.string("foo")
val detectBar: LocationResultParser[String] = SimpleParser.string("bar")
val detectWord: LocationResultParser[String] = SimpleParser.string("word")
// val operators = SimpleParser.operators(LocationResultParser[String])
println("document")
val document = "foobar"
println(document)
println("parse 'foo'")
val resultFoo: Result[String] = SimpleParser.run(detectFoo)(document)
println(resultFoo)
println("parse 'bar'")
val resultBar: Result[String] = SimpleParser.run(detectBar)(document)
println(resultBar)
println("parse 'word'")
val resultWord: Result[String] = SimpleParser.run(detectWord)(document)
println(resultWord)
val documentFoo = "foo"
val detectFooBar = SimpleParser.product(detectFoo, detectBar)
val resultFoo2: Result[String] = SimpleParser.run(detectFoo)(documentFoo)
println(resultFoo2)
println("document")
val documentFooFoo = "foofoofoofoofoofoofoofoofoofoofoofoo"
println(documentFooFoo)
val detectFooMany: LocationResultParser[Tuple2[List[String], String]] =
SimpleParser.product(SimpleParser.many(detectFoo), SimpleParser.eof)
val resultFoo3 = SimpleParser.run(detectFooMany)(documentFooFoo)
println(resultFoo3)
}
| peterbecich/fpinscala | exercises/src/main/scala/fpinscala/parsing/Parsers.scala | Scala | mit | 16,105 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.aggregate
import java.lang.Iterable
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.windowing.RichWindowFunction
import org.apache.flink.streaming.api.windowing.windows.Window
import org.apache.flink.table.runtime.types.CRow
import org.apache.flink.types.Row
import org.apache.flink.util.Collector
/**
* Computes the final aggregate value from incrementally computed aggregates.
*
* @param numGroupingKey The number of grouping keys.
* @param numAggregates The number of aggregates.
* @param finalRowArity The arity of the final output row.
*/
class IncrementalAggregateWindowFunction[W <: Window](
private val numGroupingKey: Int,
private val numAggregates: Int,
private val finalRowArity: Int)
extends RichWindowFunction[Row, CRow, Row, W] {
private var output: CRow = _
override def open(parameters: Configuration): Unit = {
output = new CRow(new Row(finalRowArity), true)
}
/**
* Calculate aggregated values output by aggregate buffer, and set them into output
* Row based on the mapping relation between intermediate aggregate data and output data.
*/
override def apply(
key: Row,
window: W,
records: Iterable[Row],
out: Collector[CRow]): Unit = {
val iterator = records.iterator
if (iterator.hasNext) {
val record = iterator.next()
var i = 0
while (i < numGroupingKey) {
output.row.setField(i, key.getField(i))
i += 1
}
i = 0
while (i < numAggregates) {
output.row.setField(numGroupingKey + i, record.getField(i))
i += 1
}
out.collect(output)
}
}
}
| ueshin/apache-flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/runtime/aggregate/IncrementalAggregateWindowFunction.scala | Scala | apache-2.0 | 2,533 |
/*
* Copyright (c) 2014-2016
* nonblocking.at gmbh [http://www.nonblocking.at]
*
* This file is part of Cliwix.
*
* Cliwix is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package at.nonblocking.cliwix.core.util
import java.io.Serializable
import java.{util => jutil}
import at.nonblocking.cliwix.core.validation.CliwixValidationException
import at.nonblocking.cliwix.model.LiferayEntity
import org.mapdb.HTreeMap
import scala.collection.JavaConversions._
/**
* Wraps the map values for serialization.
* Only iterator() is implemented!
*
* @param underlyingMap java.util.Map[K, V]
* @tparam String Key
* @tparam V Value
*/
private[core] class MapValuesListWrapper[String, V <: LiferayEntity](val underlyingMap: jutil.Map[String, V]) extends jutil.List[V] with Serializable {
override def iterator(): jutil.Iterator[V] =
if (underlyingMap.isInstanceOf[HTreeMap[_, _]]) new AutoUpdateIterator(underlyingMap.values.iterator)
else underlyingMap.values.iterator
override def isEmpty: Boolean = underlyingMap.isEmpty
override def size(): Int = underlyingMap.size
override def remove(elem: scala.Any): Boolean = {
assert(elem != null, "elem != null")
val key = elem.asInstanceOf[LiferayEntity].identifiedBy()
if (key == null) throw new CliwixValidationException(s"Invalid element of type ${elem.getClass.getName} found with no identifier!")
underlyingMap.remove(key)
true
}
override def removeAll(c: jutil.Collection[_]): Boolean = {
c.foreach(remove)
true
}
override def add(elem: V): Boolean = {
assert(elem != null, "elem != null")
val key = elem.asInstanceOf[LiferayEntity].identifiedBy()
if (key == null) throw new CliwixValidationException(s"Invalid element of type ${elem.getClass.getName} found with no identifier!")
underlyingMap.put(key.asInstanceOf[String], elem)
true
}
override def clear(): Unit = underlyingMap.clear()
//Not implemented
override def contains(o: scala.Any): Boolean = ???
override def subList(fromIndex: Int, toIndex: Int): jutil.List[V] = ???
override def listIterator(index: Int): jutil.ListIterator[V] = ???
override def listIterator(): jutil.ListIterator[V] = ???
override def lastIndexOf(o: scala.Any): Int = ???
override def indexOf(o: scala.Any): Int = ???
override def remove(index: Int): V = ???
override def add(index: Int, element: V): Unit = ???
override def set(index: Int, element: V): V = ???
override def get(index: Int): V = ???
override def retainAll(c: jutil.Collection[_]): Boolean = ???
override def addAll(index: Int, c: jutil.Collection[_ <: V]): Boolean = ???
override def addAll(c: jutil.Collection[_ <: V]): Boolean = ???
override def containsAll(c: jutil.Collection[_]): Boolean = ???
override def toArray[T](a: Array[T with Object]): Array[T with Object] = ???
override def toArray: Array[AnyRef] = ???
private class AutoUpdateIterator(val underlyingIterator: jutil.Iterator[V]) extends jutil.Iterator[V] {
var last: V = _
override def hasNext: Boolean = underlyingIterator.hasNext
override def next(): V = {
if (last != null) {
underlyingMap.put(last.identifiedBy().asInstanceOf[String], last)
}
last = underlyingIterator.next()
last
}
override def remove(): Unit = ???
}
}
//Companion
private[core] object MapValuesListWrapper {
def apply[JavaString, V <: LiferayEntity](map: jutil.Map[JavaString, V]): MapValuesListWrapper[JavaString, V] = new MapValuesListWrapper[JavaString, V](map)
}
| nonblocking/cliwix | cliwix-core/src/main/scala/at/nonblocking/cliwix/core/util/MapValuesListWrapper.scala | Scala | agpl-3.0 | 4,149 |
package auctionsniper.xmpp
import org.jivesoftware.smack.{Chat, XMPPConnection, XMPPException}
import auctionsniper.{Auction, AuctionEventListener, PriceSource}
import auctionsniper.util.Announcer
class XMPPAuction(
connection: XMPPConnection,
auctionJID: String,
failureReporter: XMPPFailureReporter) extends Auction {
import XMPPAuction._
private val auctionEventListeners = Announcer.to[AuctionEventListener]
private val translator = translatorFor(connection)
private val chat = connection.getChatManager.createChat(auctionJID, translator)
addAuctionEventListener(chatDisconnectorFor(translator))
def bid(amount: Int) {
sendMessage(BID_COMMAND_FORMAT.format(amount))
}
def join() {
sendMessage(JOIN_COMMAND_FORMAT)
}
final def addAuctionEventListener(listener: AuctionEventListener) {
auctionEventListeners += listener
}
private def translatorFor(connection: XMPPConnection) =
new AuctionMessageTranslator(connection.getUser, auctionEventListeners.announce(), failureReporter)
private def chatDisconnectorFor(translator: AuctionMessageTranslator) = new AuctionEventListener() {
def auctionFailed() { chat.removeMessageListener(translator) }
def auctionClosed() { }
def currentPrice(price: Int, increment: Int, priceSource: PriceSource) { }
}
private def sendMessage(message: String) {
try {
chat.sendMessage(message)
} catch {
case e: XMPPException => e.printStackTrace()
}
}
}
object XMPPAuction {
val JOIN_COMMAND_FORMAT = "SOLVersion: 1.1; Command: JOIN;"
val BID_COMMAND_FORMAT = "SOLVersion: 1.1; Command: BID; Price: %d;"
}
| sptz45/goos-scala | src/auctionsniper/xmpp/XMPPAuction.scala | Scala | apache-2.0 | 1,662 |
package com.github.mrpowers.spark.examples.chaining
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.functions._
object TransformMethod {
def withGreeting(df: DataFrame): DataFrame = {
df.withColumn("greeting", lit("hello world"))
}
def withFarewell(df: DataFrame): DataFrame = {
df.withColumn("farewell", lit("goodbye"))
}
def withHiBye(df: DataFrame): DataFrame = {
df
.transform(withGreeting)
.transform(withFarewell)
}
}
| MrPowers/spark-examples | src/main/scala/com/github/mrpowers/spark/examples/chaining/TransformMethod.scala | Scala | mit | 484 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.util
import java.io._
import java.nio.ByteBuffer
import java.util.{Iterator => JIterator}
import java.util.concurrent.{CountDownLatch, RejectedExecutionException, ThreadPoolExecutor, TimeUnit}
import java.util.concurrent.atomic.AtomicInteger
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
import scala.concurrent._
import scala.concurrent.duration._
import scala.language.{implicitConversions, postfixOps}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.mockito.ArgumentCaptor
import org.mockito.Matchers.{eq => meq, _}
import org.mockito.Mockito._
import org.scalatest.{BeforeAndAfter, BeforeAndAfterEach, PrivateMethodTester}
import org.scalatest.concurrent.Eventually
import org.scalatest.concurrent.Eventually._
import org.scalatest.mock.MockitoSugar
import org.apache.spark.{SparkConf, SparkException, SparkFunSuite}
import org.apache.spark.streaming.scheduler._
import org.apache.spark.util.{CompletionIterator, ManualClock, ThreadUtils, Utils}
/** Common tests for WriteAheadLogs that we would like to test with different configurations. */
abstract class CommonWriteAheadLogTests(
allowBatching: Boolean,
closeFileAfterWrite: Boolean,
testTag: String = "")
extends SparkFunSuite with BeforeAndAfter {
import WriteAheadLogSuite._
protected val hadoopConf = new Configuration()
protected var tempDir: File = null
protected var testDir: String = null
protected var testFile: String = null
protected var writeAheadLog: WriteAheadLog = null
protected def testPrefix = if (testTag != "") testTag + " - " else testTag
before {
tempDir = Utils.createTempDir()
testDir = tempDir.toString
testFile = new File(tempDir, "testFile").toString
if (writeAheadLog != null) {
writeAheadLog.close()
writeAheadLog = null
}
}
after {
Utils.deleteRecursively(tempDir)
}
test(testPrefix + "read all logs") {
// Write data manually for testing reading through WriteAheadLog
val writtenData = (1 to 10).flatMap { i =>
val data = generateRandomData()
val file = testDir + s"/log-$i-$i"
writeDataManually(data, file, allowBatching)
data
}
val logDirectoryPath = new Path(testDir)
val fileSystem = HdfsUtils.getFileSystemForPath(logDirectoryPath, hadoopConf)
assert(fileSystem.exists(logDirectoryPath) === true)
// Read data using manager and verify
val readData = readDataUsingWriteAheadLog(testDir, closeFileAfterWrite, allowBatching)
assert(readData === writtenData)
}
test(testPrefix + "write logs") {
// Write data with rotation using WriteAheadLog class
val dataToWrite = generateRandomData()
writeDataUsingWriteAheadLog(testDir, dataToWrite, closeFileAfterWrite = closeFileAfterWrite,
allowBatching = allowBatching)
// Read data manually to verify the written data
val logFiles = getLogFilesInDirectory(testDir)
assert(logFiles.size > 1)
val writtenData = readAndDeserializeDataManually(logFiles, allowBatching)
assert(writtenData === dataToWrite)
}
test(testPrefix + "read all logs after write") {
// Write data with manager, recover with new manager and verify
val dataToWrite = generateRandomData()
writeDataUsingWriteAheadLog(testDir, dataToWrite, closeFileAfterWrite, allowBatching)
val logFiles = getLogFilesInDirectory(testDir)
assert(logFiles.size > 1)
val readData = readDataUsingWriteAheadLog(testDir, closeFileAfterWrite, allowBatching)
assert(dataToWrite === readData)
}
test(testPrefix + "clean old logs") {
logCleanUpTest(waitForCompletion = false)
}
test(testPrefix + "clean old logs synchronously") {
logCleanUpTest(waitForCompletion = true)
}
private def logCleanUpTest(waitForCompletion: Boolean): Unit = {
// Write data with manager, recover with new manager and verify
val manualClock = new ManualClock
val dataToWrite = generateRandomData()
writeAheadLog = writeDataUsingWriteAheadLog(testDir, dataToWrite, closeFileAfterWrite,
allowBatching, manualClock, closeLog = false)
val logFiles = getLogFilesInDirectory(testDir)
assert(logFiles.size > 1)
writeAheadLog.clean(manualClock.getTimeMillis() / 2, waitForCompletion)
if (waitForCompletion) {
assert(getLogFilesInDirectory(testDir).size < logFiles.size)
} else {
eventually(Eventually.timeout(1 second), interval(10 milliseconds)) {
assert(getLogFilesInDirectory(testDir).size < logFiles.size)
}
}
}
test(testPrefix + "handling file errors while reading rotating logs") {
// Generate a set of log files
val manualClock = new ManualClock
val dataToWrite1 = generateRandomData()
writeDataUsingWriteAheadLog(testDir, dataToWrite1, closeFileAfterWrite, allowBatching,
manualClock)
val logFiles1 = getLogFilesInDirectory(testDir)
assert(logFiles1.size > 1)
// Recover old files and generate a second set of log files
val dataToWrite2 = generateRandomData()
manualClock.advance(100000)
writeDataUsingWriteAheadLog(testDir, dataToWrite2, closeFileAfterWrite, allowBatching,
manualClock)
val logFiles2 = getLogFilesInDirectory(testDir)
assert(logFiles2.size > logFiles1.size)
// Read the files and verify that all the written data can be read
val readData1 = readDataUsingWriteAheadLog(testDir, closeFileAfterWrite, allowBatching)
assert(readData1 === (dataToWrite1 ++ dataToWrite2))
// Corrupt the first set of files so that they are basically unreadable
logFiles1.foreach { f =>
val raf = new FileOutputStream(f, true).getChannel()
raf.truncate(1)
raf.close()
}
// Verify that the corrupted files do not prevent reading of the second set of data
val readData = readDataUsingWriteAheadLog(testDir, closeFileAfterWrite, allowBatching)
assert(readData === dataToWrite2)
}
test(testPrefix + "do not create directories or files unless write") {
val nonexistentTempPath = File.createTempFile("test", "")
nonexistentTempPath.delete()
assert(!nonexistentTempPath.exists())
val writtenSegment = writeDataManually(generateRandomData(), testFile, allowBatching)
val wal = createWriteAheadLog(testDir, closeFileAfterWrite, allowBatching)
assert(!nonexistentTempPath.exists(), "Directory created just by creating log object")
if (allowBatching) {
intercept[UnsupportedOperationException](wal.read(writtenSegment.head))
} else {
wal.read(writtenSegment.head)
}
assert(!nonexistentTempPath.exists(), "Directory created just by attempting to read segment")
}
test(testPrefix + "parallel recovery not enabled if closeFileAfterWrite = false") {
// write some data
val writtenData = (1 to 10).flatMap { i =>
val data = generateRandomData()
val file = testDir + s"/log-$i-$i"
writeDataManually(data, file, allowBatching)
data
}
val wal = createWriteAheadLog(testDir, closeFileAfterWrite, allowBatching)
// create iterator but don't materialize it
val readData = wal.readAll().asScala.map(byteBufferToString)
wal.close()
if (closeFileAfterWrite) {
// the threadpool is shutdown by the wal.close call above, therefore we shouldn't be able
// to materialize the iterator with parallel recovery
intercept[RejectedExecutionException](readData.toArray)
} else {
assert(readData.toSeq === writtenData)
}
}
}
class FileBasedWriteAheadLogSuite
extends CommonWriteAheadLogTests(false, false, "FileBasedWriteAheadLog") {
import WriteAheadLogSuite._
test("FileBasedWriteAheadLog - seqToParIterator") {
/*
If the setting `closeFileAfterWrite` is enabled, we start generating a very large number of
files. This causes recovery to take a very long time. In order to make it quicker, we
parallelized the reading of these files. This test makes sure that we limit the number of
open files to the size of the number of threads in our thread pool rather than the size of
the list of files.
*/
val numThreads = 8
val fpool = ThreadUtils.newForkJoinPool("wal-test-thread-pool", numThreads)
val executionContext = ExecutionContext.fromExecutorService(fpool)
class GetMaxCounter {
private val value = new AtomicInteger()
@volatile private var max: Int = 0
def increment(): Unit = synchronized {
val atInstant = value.incrementAndGet()
if (atInstant > max) max = atInstant
}
def decrement(): Unit = synchronized { value.decrementAndGet() }
def get(): Int = synchronized { value.get() }
def getMax(): Int = synchronized { max }
}
try {
// If Jenkins is slow, we may not have a chance to run many threads simultaneously. Having
// a latch will make sure that all the threads can be launched altogether.
val latch = new CountDownLatch(1)
val testSeq = 1 to 1000
val counter = new GetMaxCounter()
def handle(value: Int): Iterator[Int] = {
new CompletionIterator[Int, Iterator[Int]](Iterator(value)) {
counter.increment()
// block so that other threads also launch
latch.await(10, TimeUnit.SECONDS)
override def completion() { counter.decrement() }
}
}
@volatile var collected: Seq[Int] = Nil
val t = new Thread() {
override def run() {
// run the calculation on a separate thread so that we can release the latch
val iterator = FileBasedWriteAheadLog.seqToParIterator[Int, Int](executionContext,
testSeq, handle)
collected = iterator.toSeq
}
}
t.start()
eventually(Eventually.timeout(10.seconds)) {
// make sure we are doing a parallel computation!
assert(counter.getMax() > 1)
}
latch.countDown()
t.join(10000)
assert(collected === testSeq)
// make sure we didn't open too many Iterators
assert(counter.getMax() <= numThreads)
} finally {
fpool.shutdownNow()
}
}
test("FileBasedWriteAheadLogWriter - writing data") {
val dataToWrite = generateRandomData()
val segments = writeDataUsingWriter(testFile, dataToWrite)
val writtenData = readDataManually(segments)
assert(writtenData === dataToWrite)
}
test("FileBasedWriteAheadLogWriter - syncing of data by writing and reading immediately") {
val dataToWrite = generateRandomData()
val writer = new FileBasedWriteAheadLogWriter(testFile, hadoopConf)
dataToWrite.foreach { data =>
val segment = writer.write(stringToByteBuffer(data))
val dataRead = readDataManually(Seq(segment)).head
assert(data === dataRead)
}
writer.close()
}
test("FileBasedWriteAheadLogReader - sequentially reading data") {
val writtenData = generateRandomData()
writeDataManually(writtenData, testFile, allowBatching = false)
val reader = new FileBasedWriteAheadLogReader(testFile, hadoopConf)
val readData = reader.toSeq.map(byteBufferToString)
assert(readData === writtenData)
assert(reader.hasNext === false)
intercept[Exception] {
reader.next()
}
reader.close()
}
test("FileBasedWriteAheadLogReader - sequentially reading data written with writer") {
val dataToWrite = generateRandomData()
writeDataUsingWriter(testFile, dataToWrite)
val readData = readDataUsingReader(testFile)
assert(readData === dataToWrite)
}
test("FileBasedWriteAheadLogReader - reading data written with writer after corrupted write") {
// Write data manually for testing the sequential reader
val dataToWrite = generateRandomData()
writeDataUsingWriter(testFile, dataToWrite)
val fileLength = new File(testFile).length()
// Append some garbage data to get the effect of a corrupted write
val fw = new FileWriter(testFile, true)
fw.append("This line appended to file!")
fw.close()
// Verify the data can be read and is same as the one correctly written
assert(readDataUsingReader(testFile) === dataToWrite)
// Corrupt the last correctly written file
val raf = new FileOutputStream(testFile, true).getChannel()
raf.truncate(fileLength - 1)
raf.close()
// Verify all the data except the last can be read
assert(readDataUsingReader(testFile) === (dataToWrite.dropRight(1)))
}
test("FileBasedWriteAheadLogReader - handles errors when file doesn't exist") {
// Write data manually for testing the sequential reader
val dataToWrite = generateRandomData()
writeDataUsingWriter(testFile, dataToWrite)
val tFile = new File(testFile)
assert(tFile.exists())
// Verify the data can be read and is same as the one correctly written
assert(readDataUsingReader(testFile) === dataToWrite)
tFile.delete()
assert(!tFile.exists())
val reader = new FileBasedWriteAheadLogReader(testFile, hadoopConf)
assert(!reader.hasNext)
reader.close()
// Verify that no exception is thrown if file doesn't exist
assert(readDataUsingReader(testFile) === Nil)
}
test("FileBasedWriteAheadLogRandomReader - reading data using random reader") {
// Write data manually for testing the random reader
val writtenData = generateRandomData()
val segments = writeDataManually(writtenData, testFile, allowBatching = false)
// Get a random order of these segments and read them back
val writtenDataAndSegments = writtenData.zip(segments).toSeq.permutations.take(10).flatten
val reader = new FileBasedWriteAheadLogRandomReader(testFile, hadoopConf)
writtenDataAndSegments.foreach { case (data, segment) =>
assert(data === byteBufferToString(reader.read(segment)))
}
reader.close()
}
test("FileBasedWriteAheadLogRandomReader- reading data using random reader written with writer") {
// Write data using writer for testing the random reader
val data = generateRandomData()
val segments = writeDataUsingWriter(testFile, data)
// Read a random sequence of segments and verify read data
val dataAndSegments = data.zip(segments).toSeq.permutations.take(10).flatten
val reader = new FileBasedWriteAheadLogRandomReader(testFile, hadoopConf)
dataAndSegments.foreach { case (data, segment) =>
assert(data === byteBufferToString(reader.read(segment)))
}
reader.close()
}
}
abstract class CloseFileAfterWriteTests(allowBatching: Boolean, testTag: String)
extends CommonWriteAheadLogTests(allowBatching, closeFileAfterWrite = true, testTag) {
import WriteAheadLogSuite._
test(testPrefix + "close after write flag") {
// Write data with rotation using WriteAheadLog class
val numFiles = 3
val dataToWrite = Seq.tabulate(numFiles)(_.toString)
// total advance time is less than 1000, therefore log shouldn't be rolled, but manually closed
writeDataUsingWriteAheadLog(testDir, dataToWrite, closeLog = false, clockAdvanceTime = 100,
closeFileAfterWrite = true, allowBatching = allowBatching)
// Read data manually to verify the written data
val logFiles = getLogFilesInDirectory(testDir)
assert(logFiles.size === numFiles)
val writtenData: Seq[String] = readAndDeserializeDataManually(logFiles, allowBatching)
assert(writtenData === dataToWrite)
}
}
class FileBasedWriteAheadLogWithFileCloseAfterWriteSuite
extends CloseFileAfterWriteTests(allowBatching = false, "FileBasedWriteAheadLog")
class BatchedWriteAheadLogSuite extends CommonWriteAheadLogTests(
allowBatching = true,
closeFileAfterWrite = false,
"BatchedWriteAheadLog")
with MockitoSugar
with BeforeAndAfterEach
with Eventually
with PrivateMethodTester {
import BatchedWriteAheadLog._
import WriteAheadLogSuite._
private var wal: WriteAheadLog = _
private var walHandle: WriteAheadLogRecordHandle = _
private var walBatchingThreadPool: ThreadPoolExecutor = _
private var walBatchingExecutionContext: ExecutionContextExecutorService = _
private val sparkConf = new SparkConf()
private val queueLength = PrivateMethod[Int]('getQueueLength)
override def beforeEach(): Unit = {
super.beforeEach()
wal = mock[WriteAheadLog]
walHandle = mock[WriteAheadLogRecordHandle]
walBatchingThreadPool = ThreadUtils.newDaemonFixedThreadPool(8, "wal-test-thread-pool")
walBatchingExecutionContext = ExecutionContext.fromExecutorService(walBatchingThreadPool)
}
override def afterEach(): Unit = {
try {
if (walBatchingExecutionContext != null) {
walBatchingExecutionContext.shutdownNow()
}
} finally {
super.afterEach()
}
}
test("BatchedWriteAheadLog - serializing and deserializing batched records") {
val events = Seq(
BlockAdditionEvent(ReceivedBlockInfo(0, None, None, null)),
BatchAllocationEvent(null, null),
BatchCleanupEvent(Nil)
)
val buffers = events.map(e => Record(ByteBuffer.wrap(Utils.serialize(e)), 0L, null))
val batched = BatchedWriteAheadLog.aggregate(buffers)
val deaggregate = BatchedWriteAheadLog.deaggregate(batched).map(buffer =>
Utils.deserialize[ReceivedBlockTrackerLogEvent](buffer.array()))
assert(deaggregate.toSeq === events)
}
test("BatchedWriteAheadLog - failures in wrappedLog get bubbled up") {
when(wal.write(any[ByteBuffer], anyLong)).thenThrow(new RuntimeException("Hello!"))
// the BatchedWriteAheadLog should bubble up any exceptions that may have happened during writes
val batchedWal = new BatchedWriteAheadLog(wal, sparkConf)
val e = intercept[SparkException] {
val buffer = mock[ByteBuffer]
batchedWal.write(buffer, 2L)
}
assert(e.getCause.getMessage === "Hello!")
}
// we make the write requests in separate threads so that we don't block the test thread
private def writeAsync(wal: WriteAheadLog, event: String, time: Long): Promise[Unit] = {
val p = Promise[Unit]()
p.completeWith(Future {
val v = wal.write(event, time)
assert(v === walHandle)
}(walBatchingExecutionContext))
p
}
test("BatchedWriteAheadLog - name log with the highest timestamp of aggregated entries") {
val blockingWal = new BlockingWriteAheadLog(wal, walHandle)
val batchedWal = new BatchedWriteAheadLog(blockingWal, sparkConf)
val event1 = "hello"
val event2 = "world"
val event3 = "this"
val event4 = "is"
val event5 = "doge"
// The queue.take() immediately takes the 3, and there is nothing left in the queue at that
// moment. Then the promise blocks the writing of 3. The rest get queued.
writeAsync(batchedWal, event1, 3L)
eventually(timeout(1 second)) {
assert(blockingWal.isBlocked)
assert(batchedWal.invokePrivate(queueLength()) === 0)
}
// rest of the records will be batched while it takes time for 3 to get written
writeAsync(batchedWal, event2, 5L)
writeAsync(batchedWal, event3, 8L)
// we would like event 5 to be written before event 4 in order to test that they get
// sorted before being aggregated
writeAsync(batchedWal, event5, 12L)
eventually(timeout(1 second)) {
assert(blockingWal.isBlocked)
assert(batchedWal.invokePrivate(queueLength()) === 3)
}
writeAsync(batchedWal, event4, 10L)
eventually(timeout(1 second)) {
assert(walBatchingThreadPool.getActiveCount === 5)
assert(batchedWal.invokePrivate(queueLength()) === 4)
}
blockingWal.allowWrite()
val buffer = wrapArrayArrayByte(Array(event1))
val queuedEvents = Set(event2, event3, event4, event5)
eventually(timeout(1 second)) {
assert(batchedWal.invokePrivate(queueLength()) === 0)
verify(wal, times(1)).write(meq(buffer), meq(3L))
// the file name should be the timestamp of the last record, as events should be naturally
// in order of timestamp, and we need the last element.
val bufferCaptor = ArgumentCaptor.forClass(classOf[ByteBuffer])
verify(wal, times(1)).write(bufferCaptor.capture(), meq(12L))
val records = BatchedWriteAheadLog.deaggregate(bufferCaptor.getValue).map(byteBufferToString)
assert(records.toSet === queuedEvents)
}
}
test("BatchedWriteAheadLog - shutdown properly") {
val batchedWal = new BatchedWriteAheadLog(wal, sparkConf)
batchedWal.close()
verify(wal, times(1)).close()
intercept[IllegalStateException](batchedWal.write(mock[ByteBuffer], 12L))
}
test("BatchedWriteAheadLog - fail everything in queue during shutdown") {
val blockingWal = new BlockingWriteAheadLog(wal, walHandle)
val batchedWal = new BatchedWriteAheadLog(blockingWal, sparkConf)
val event1 = "hello"
val event2 = "world"
val event3 = "this"
// The queue.take() immediately takes the 3, and there is nothing left in the queue at that
// moment. Then the promise blocks the writing of 3. The rest get queued.
val promise1 = writeAsync(batchedWal, event1, 3L)
eventually(timeout(1 second)) {
assert(blockingWal.isBlocked)
assert(batchedWal.invokePrivate(queueLength()) === 0)
}
// rest of the records will be batched while it takes time for 3 to get written
val promise2 = writeAsync(batchedWal, event2, 5L)
val promise3 = writeAsync(batchedWal, event3, 8L)
eventually(timeout(1 second)) {
assert(walBatchingThreadPool.getActiveCount === 3)
assert(blockingWal.isBlocked)
assert(batchedWal.invokePrivate(queueLength()) === 2) // event1 is being written
}
val writePromises = Seq(promise1, promise2, promise3)
batchedWal.close()
eventually(timeout(1 second)) {
assert(writePromises.forall(_.isCompleted))
assert(writePromises.forall(_.future.value.get.isFailure)) // all should have failed
}
}
}
class BatchedWriteAheadLogWithCloseFileAfterWriteSuite
extends CloseFileAfterWriteTests(allowBatching = true, "BatchedWriteAheadLog")
object WriteAheadLogSuite {
private val hadoopConf = new Configuration()
/** Write data to a file directly and return an array of the file segments written. */
def writeDataManually(
data: Seq[String],
file: String,
allowBatching: Boolean): Seq[FileBasedWriteAheadLogSegment] = {
val segments = new ArrayBuffer[FileBasedWriteAheadLogSegment]()
val writer = HdfsUtils.getOutputStream(file, hadoopConf)
def writeToStream(bytes: Array[Byte]): Unit = {
val offset = writer.getPos
writer.writeInt(bytes.size)
writer.write(bytes)
segments += FileBasedWriteAheadLogSegment(file, offset, bytes.size)
}
if (allowBatching) {
writeToStream(wrapArrayArrayByte(data.toArray[String]).array())
} else {
data.foreach { item =>
writeToStream(Utils.serialize(item))
}
}
writer.close()
segments
}
/**
* Write data to a file using the writer class and return an array of the file segments written.
*/
def writeDataUsingWriter(
filePath: String,
data: Seq[String]): Seq[FileBasedWriteAheadLogSegment] = {
val writer = new FileBasedWriteAheadLogWriter(filePath, hadoopConf)
val segments = data.map {
item => writer.write(item)
}
writer.close()
segments
}
/** Write data to rotating files in log directory using the WriteAheadLog class. */
def writeDataUsingWriteAheadLog(
logDirectory: String,
data: Seq[String],
closeFileAfterWrite: Boolean,
allowBatching: Boolean,
manualClock: ManualClock = new ManualClock,
closeLog: Boolean = true,
clockAdvanceTime: Int = 500): WriteAheadLog = {
if (manualClock.getTimeMillis() < 100000) manualClock.setTime(10000)
val wal = createWriteAheadLog(logDirectory, closeFileAfterWrite, allowBatching)
// Ensure that 500 does not get sorted after 2000, so put a high base value.
data.foreach { item =>
manualClock.advance(clockAdvanceTime)
wal.write(item, manualClock.getTimeMillis())
}
if (closeLog) wal.close()
wal
}
/** Read data from a segments of a log file directly and return the list of byte buffers. */
def readDataManually(segments: Seq[FileBasedWriteAheadLogSegment]): Seq[String] = {
segments.map { segment =>
val reader = HdfsUtils.getInputStream(segment.path, hadoopConf)
try {
reader.seek(segment.offset)
val bytes = new Array[Byte](segment.length)
reader.readInt()
reader.readFully(bytes)
val data = Utils.deserialize[String](bytes)
reader.close()
data
} finally {
reader.close()
}
}
}
/** Read all the data from a log file directly and return the list of byte buffers. */
def readDataManually[T](file: String): Seq[T] = {
val reader = HdfsUtils.getInputStream(file, hadoopConf)
val buffer = new ArrayBuffer[T]
try {
while (true) {
// Read till EOF is thrown
val length = reader.readInt()
val bytes = new Array[Byte](length)
reader.read(bytes)
buffer += Utils.deserialize[T](bytes)
}
} catch {
case ex: EOFException =>
} finally {
reader.close()
}
buffer
}
/** Read all the data from a log file using reader class and return the list of byte buffers. */
def readDataUsingReader(file: String): Seq[String] = {
val reader = new FileBasedWriteAheadLogReader(file, hadoopConf)
val readData = reader.toList.map(byteBufferToString)
reader.close()
readData
}
/** Read all the data in the log file in a directory using the WriteAheadLog class. */
def readDataUsingWriteAheadLog(
logDirectory: String,
closeFileAfterWrite: Boolean,
allowBatching: Boolean): Seq[String] = {
val wal = createWriteAheadLog(logDirectory, closeFileAfterWrite, allowBatching)
val data = wal.readAll().asScala.map(byteBufferToString).toArray
wal.close()
data
}
/** Get the log files in a directory. */
def getLogFilesInDirectory(directory: String): Seq[String] = {
val logDirectoryPath = new Path(directory)
val fileSystem = HdfsUtils.getFileSystemForPath(logDirectoryPath, hadoopConf)
if (fileSystem.exists(logDirectoryPath) &&
fileSystem.getFileStatus(logDirectoryPath).isDirectory) {
fileSystem.listStatus(logDirectoryPath).map { _.getPath() }.sortBy {
_.getName().split("-")(1).toLong
}.map {
_.toString.stripPrefix("file:")
}
} else {
Seq.empty
}
}
def createWriteAheadLog(
logDirectory: String,
closeFileAfterWrite: Boolean,
allowBatching: Boolean): WriteAheadLog = {
val sparkConf = new SparkConf
val wal = new FileBasedWriteAheadLog(sparkConf, logDirectory, hadoopConf, 1, 1,
closeFileAfterWrite)
if (allowBatching) new BatchedWriteAheadLog(wal, sparkConf) else wal
}
def generateRandomData(): Seq[String] = {
(1 to 100).map { _.toString }
}
def readAndDeserializeDataManually(logFiles: Seq[String], allowBatching: Boolean): Seq[String] = {
if (allowBatching) {
logFiles.flatMap { file =>
val data = readDataManually[Array[Array[Byte]]](file)
data.flatMap(byteArray => byteArray.map(Utils.deserialize[String]))
}
} else {
logFiles.flatMap { file => readDataManually[String](file)}
}
}
implicit def stringToByteBuffer(str: String): ByteBuffer = {
ByteBuffer.wrap(Utils.serialize(str))
}
implicit def byteBufferToString(byteBuffer: ByteBuffer): String = {
Utils.deserialize[String](byteBuffer.array)
}
def wrapArrayArrayByte[T](records: Array[T]): ByteBuffer = {
ByteBuffer.wrap(Utils.serialize[Array[Array[Byte]]](records.map(Utils.serialize[T])))
}
/**
* A wrapper WriteAheadLog that blocks the write function to allow batching with the
* BatchedWriteAheadLog.
*/
class BlockingWriteAheadLog(
wal: WriteAheadLog,
handle: WriteAheadLogRecordHandle) extends WriteAheadLog {
@volatile private var isWriteCalled: Boolean = false
@volatile private var blockWrite: Boolean = true
override def write(record: ByteBuffer, time: Long): WriteAheadLogRecordHandle = {
isWriteCalled = true
eventually(Eventually.timeout(2 second)) {
assert(!blockWrite)
}
wal.write(record, time)
isWriteCalled = false
handle
}
override def read(segment: WriteAheadLogRecordHandle): ByteBuffer = wal.read(segment)
override def readAll(): JIterator[ByteBuffer] = wal.readAll()
override def clean(threshTime: Long, waitForCompletion: Boolean): Unit = {
wal.clean(threshTime, waitForCompletion)
}
override def close(): Unit = wal.close()
def allowWrite(): Unit = {
blockWrite = false
}
def isBlocked: Boolean = isWriteCalled
}
}
| Panos-Bletsos/spark-cost-model-optimizer | streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogSuite.scala | Scala | apache-2.0 | 29,665 |
// Classes: Access Modification
// Properties and methods can have access modifiers
// similar to those in Java.
class Point(xc: Int, yc: Int) {
private var x: Int = xc
private var y: Int = yc
def move(dx: Int, dy: Int) {
x = x + dx
y = y + dy
}
override def toString(): String = {
"(" + x + ", " + y + ")"
}
}
// Classes can support inheritance, and
// access modification becomes more relevant.
| agconti/scala-school | 01-intro-to-scala/slides/slide021.scala | Scala | mit | 435 |
/*
* Copyright 2001-2009 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.fixture
/**
* <strong>FixtureFunSuite has been deprecated and will be removed in a future
* release of ScalaTest. Please change any uses of <code>org.scalatest.fixture.FixtureFunSuite</code>
* to a corresponding use of <a href="FunSuite.html"><code>org.scalatest.fixture.FunSuite</code></a>.</strong>
*
* <p>
* <strong> This is just
* a rename, so the only thing you need to do is change the name. However, the recommended way to
* write it is to import <code>org.scalatest.fixture</code> and then write <code>fixture.FunSuite</code> when
* you use it, to differentiate it more clearly from <code>org.scalatest.FunSuite</code>. For example:
* </strong>
* </p>
*
* <pre>
* import org.scalatest.fixture
*
* class ExampleSpec extends fixture.FunSuite {
* // ...
* }
* </pre>
*/
@deprecated("Please use org.scalatest.fixture.FunSuite instead.")
trait FixtureFunSuite extends FunSuite { thisSuite =>
override private[scalatest] val sourceFileName = "FixtureFunSuite.scala"
}
| svn2github/scalatest | src/main/scala/org/scalatest/fixture/FixtureFunSuite.scala | Scala | apache-2.0 | 1,619 |
package org.jetbrains.plugins.scala.lang.parser.parsing.expressions
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder
/**
* @author Alexander Podkhalyuzin
* Date: 13.02.2008
*/
/*
* SelfInvocation ::= 'this' ArgumentExprs {ArgumentExprs}
*/
object SelfInvocation extends SelfInvocation {
override protected def argumentExprs = ArgumentExprs
}
trait SelfInvocation {
protected def argumentExprs: ArgumentExprs
def parse(builder: ScalaPsiBuilder): Boolean = {
val selfMarker = builder.mark
builder.getTokenType match {
case ScalaTokenTypes.kTHIS =>
builder.advanceLexer() //Ate this
case _ =>
//error moved to ScalaAnnotator to differentiate with compiled files
selfMarker.drop()
return true
}
if (!argumentExprs.parse(builder)) {
selfMarker.done(ScalaElementTypes.SELF_INVOCATION)
return true
}
while (!builder.newlineBeforeCurrentToken && argumentExprs.parse(builder)) {}
selfMarker.done(ScalaElementTypes.SELF_INVOCATION)
true
}
} | gtache/intellij-lsp | intellij-lsp-dotty/src/org/jetbrains/plugins/scala/lang/parser/parsing/expressions/SelfInvocation.scala | Scala | apache-2.0 | 1,194 |
package com.azavea.math
import scala.math.{abs, min, max, pow}
import annotation.implicitNotFound
/**
* @author Erik Osheim
*/
/**
* Numeric typeclass for doing operations on generic types.
*
* Importantly, this package does not deliver classes for you to instantiate.
* Rather, it gives you a trait to associated with your generic types, which
* allows actual uses of your generic code with concrete types (e.g. Int) to
* link up with concrete implementations (e.g. IntIsNumeric) of Numeric's
* method for that type.
*
* @example {{{
* import demo.Numeric
* import demo.Numeric.FastImplicits._
*
* def pythagoreanTheorem[T:Numeric](a:T, b:T): Double = {
* val c = (a * a) + (b * b)
* math.sqrt(c.toDouble)
* }
*
* def
* }}}
*
*/
//@implicitNotFound(msg = "Cannot find Numeric type class for ${A}")
trait Numeric[@specialized(Int,Long,Float,Double) A]
extends ConvertableFrom[A] with ConvertableTo[A] {
/**
* Computes the absolute value of `a`.
*
* @return the absolute value of `a`
*/
def abs(a:A):A
/**
* Returns an integer whose sign denotes the relationship between
* `a` and `b`. If `a` < `b` it returns -1, if `a` == `b` it returns
* 0 and if `a` > `b` it returns 1.
*
* @return -1, 0 or 1
*
* @see math.abs
*/
def compare(a:A, b:A):Int = if (lt(a, b)) -1 else if (gt(a, b)) 1 else 0
/**
* Divides `a` by `b`.
*
* This method maintains the type of the arguments (`A`). If this
* method is used with `Int` or `Long` then the quotient (as in
* integer division). Otherwise (with `Float` and `Double`) a
* fractional result is returned.
*
* @return `a` / `b`
*/
def div(a:A, b:A):A
/**
* Tests if `a` and `b` are equivalent.
*
* @return `a` == `b`
*/
def equiv(a:A, b:A):Boolean
/**
* Tests if `a` and `b` are not equivalent.
*
* @return `a` != `b`
*/
def nequiv(a:A, b:A):Boolean
/**
* Tests if `a` is greater than `b`.
*
* @return `a` > `b`
*/
def gt(a:A, b:A):Boolean
/**
* Tests if `a` is greater than or equal to `b`.
*
* @return `a` >= `b`
*/
def gteq(a:A, b:A):Boolean
/**
* Tests if `a` is less than `b`.
*
* @return `a` <= `b`
*/
def lt(a:A, b:A):Boolean
/**
* Tests if `a` is less than or equal to `b`.
*
* @return `a` <= `b`
*/
def lteq(a:A, b:A):Boolean
/**
* Returns the larger of `a` and `b`.
*
* @return max(`a`, `b`)
*
* @see math.max
*/
def max(a:A, b:A):A
/**
* Returns the smaller of `a` and `b`.
*
* @return min(`a`, `b`)
*
* @see math.min
*/
def min(a:A, b:A):A
/**
* Returns `a` minus `b`.
*
* @return `a` - `b`
*/
def minus(a:A, b:A):A
/**
* Returns `a` modulo `b`.
*
* @return `a` % `b`
*/
def mod(a:A, b:A):A
/**
* Returns the additive inverse `a`.
*
* @return -`a`
*/
def negate(a:A):A
/**
* Returns one.
*
* @return 1
*/
def one:A
/**
* Returns `a` plus `b`.
*
* @return `a` + `b`
*/
def plus(a:A, b:A):A
/**
* Returns `a` to the `b`th power.
*
* Note that with large numbers this method will overflow and
* return Infinity, which becomes MaxValue for whatever type
* is being used. This behavior is inherited from `math.pow`.
*
* @returns pow(`a`, `b`)
*
* @see math.pow
*/
def pow(a:A, b:A):A
/**
* Returns an integer whose sign denotes the sign of `a`.
* If `a` is negative it returns -1, if `a` is zero it
* returns 0 and if `a` is positive it returns 1.
*
* @return -1, 0 or 1
*/
def signum(a:A):Int = compare(a, zero)
/**
* Returns `a` times `b`.
*
* @return `a` * `b`
*/
def times(a:A, b:A):A
/**
* Returns zero.
*
* @return 0
*/
def zero:A
/**
* Convert a value `b` of type `B` to type `A`.
*
* This method can be used to coerce one generic numeric type to
* another, to allow operations on them jointly.
*
* @example {{{
* def foo[A:Numeric,B:Numeric](a:A, b:B) = {
* val n = implicitly[Numeric[A]]
* n.add(a, n.fromType(b))
* }
* }}}
*
* Note that `b` may lose precision when represented as an `A`
* (e.g. if B is Long and A is Int).
*
* @return the value of `b` encoded in type `A`
*/
def fromType[@specialized(Int, Long, Float, Double) B](b:B)(implicit c:ConvertableFrom[B]): A
def toType[@specialized(Int, Long, Float, Double) B](a:A)(implicit c:ConvertableTo[B]): B
/**
* Used to get an Ordering[A] instance.
*/
def getOrdering():Ordering[A] = new NumericOrdering(this)
}
/**
* This is a little helper class that allows us to support the Ordering trait.
*
* If Numeric extended Ordering directly then we'd have to override all of
* the comparison operators, losing specialization and other performance
* benefits.
*/
class NumericOrdering[A](n:Numeric[A]) extends Ordering[A] {
def compare(a:A, b:A) = n.compare(a, b)
}
trait IntIsNumeric
extends Numeric[Int] with ConvertableFromInt with ConvertableToInt {
def abs(a:Int): Int = scala.math.abs(a)
def div(a:Int, b:Int): Int = a / b
def equiv(a:Int, b:Int): Boolean = a == b
def gt(a:Int, b:Int): Boolean = a > b
def gteq(a:Int, b:Int): Boolean = a >= b
def lt(a:Int, b:Int): Boolean = a < b
def lteq(a:Int, b:Int): Boolean = a <= b
def max(a:Int, b:Int): Int = scala.math.max(a, b)
def min(a:Int, b:Int): Int = scala.math.min(a, b)
def minus(a:Int, b:Int): Int = a - b
def mod(a:Int, b:Int): Int = a % b
def negate(a:Int): Int = -a
def nequiv(a:Int, b:Int): Boolean = a != b
def one: Int = 1
def plus(a:Int, b:Int): Int = a + b
def pow(a:Int, b:Int): Int = scala.math.pow(a, b).toInt
def times(a:Int, b:Int): Int = a * b
def zero: Int = 0
def fromType[@specialized(Int, Long, Float, Double) B](b:B)(implicit c:ConvertableFrom[B]) = c.toInt(b)
def toType[@specialized(Int, Long, Float, Double) B](a:Int)(implicit c:ConvertableTo[B]) = c.fromInt(a)
}
trait LongIsNumeric
extends Numeric[Long] with ConvertableFromLong with ConvertableToLong {
def abs(a:Long): Long = scala.math.abs(a)
def div(a:Long, b:Long): Long = a / b
def equiv(a:Long, b:Long): Boolean = a == b
def gt(a:Long, b:Long): Boolean = a > b
def gteq(a:Long, b:Long): Boolean = a >= b
def lt(a:Long, b:Long): Boolean = a < b
def lteq(a:Long, b:Long): Boolean = a <= b
def max(a:Long, b:Long): Long = scala.math.max(a, b)
def min(a:Long, b:Long): Long = scala.math.min(a, b)
def minus(a:Long, b:Long): Long = a - b
def mod(a:Long, b:Long): Long = a % b
def negate(a:Long): Long = -a
def nequiv(a:Long, b:Long): Boolean = a != b
def one: Long = 1L
def plus(a:Long, b:Long): Long = a + b
def pow(a:Long, b:Long): Long = scala.math.pow(a, b).toLong
def times(a:Long, b:Long): Long = a * b
def zero: Long = 0L
def fromType[@specialized(Int, Long, Float, Double) B](b:B)(implicit c:ConvertableFrom[B]) = c.toLong(b)
def toType[@specialized(Int, Long, Float, Double) B](a:Long)(implicit c:ConvertableTo[B]) = c.fromLong(a)
}
trait FloatIsNumeric
extends Numeric[Float] with ConvertableFromFloat with ConvertableToFloat {
def abs(a:Float): Float = scala.math.abs(a)
def div(a:Float, b:Float): Float = a / b
def equiv(a:Float, b:Float): Boolean = a == b
def gt(a:Float, b:Float): Boolean = a > b
def gteq(a:Float, b:Float): Boolean = a >= b
def lt(a:Float, b:Float): Boolean = a < b
def lteq(a:Float, b:Float): Boolean = a <= b
def max(a:Float, b:Float): Float = scala.math.max(a, b)
def min(a:Float, b:Float): Float = scala.math.min(a, b)
def minus(a:Float, b:Float): Float = a - b
def mod(a:Float, b:Float): Float = a % b
def negate(a:Float): Float = -a
def nequiv(a:Float, b:Float): Boolean = a != b
def one: Float = 1.0F
def plus(a:Float, b:Float): Float = a + b
def pow(a:Float, b:Float): Float = scala.math.pow(a, b).toFloat
def times(a:Float, b:Float): Float = a * b
def zero: Float = 0.0F
def fromType[@specialized(Int, Long, Float, Double) B](b:B)(implicit c:ConvertableFrom[B]) = c.toFloat(b)
def toType[@specialized(Int, Long, Float, Double) B](a:Float)(implicit c:ConvertableTo[B]) = c.fromFloat(a)
}
trait DoubleIsNumeric
extends Numeric[Double] with ConvertableFromDouble with ConvertableToDouble {
def abs(a:Double): Double = scala.math.abs(a)
def div(a:Double, b:Double): Double = a / b
def equiv(a:Double, b:Double): Boolean = a == b
def gt(a:Double, b:Double): Boolean = a > b
def gteq(a:Double, b:Double): Boolean = a >= b
def lt(a:Double, b:Double): Boolean = a < b
def lteq(a:Double, b:Double): Boolean = a <= b
def max(a:Double, b:Double): Double = scala.math.max(a, b)
def min(a:Double, b:Double): Double = scala.math.min(a, b)
def minus(a:Double, b:Double): Double = a - b
def mod(a:Double, b:Double): Double = a % b
def negate(a:Double): Double = -a
def nequiv(a:Double, b:Double): Boolean = a != b
def one: Double = 1.0
def plus(a:Double, b:Double): Double = a + b
def pow(a:Double, b:Double): Double = scala.math.pow(a, b)
def times(a:Double, b:Double): Double = a * b
def zero: Double = 0.0
def fromType[@specialized(Int, Long, Float, Double) B](b:B)(implicit c:ConvertableFrom[B]) = c.toDouble(b)
def toType[@specialized(Int, Long, Float, Double) B](a:Double)(implicit c:ConvertableTo[B]) = c.fromDouble(a)
}
trait BigIntIsNumeric
extends Numeric[BigInt] with ConvertableFromBigInt with ConvertableToBigInt {
def abs(a:BigInt): BigInt = a.abs
def div(a:BigInt, b:BigInt): BigInt = a / b
def equiv(a:BigInt, b:BigInt): Boolean = a == b
def gt(a:BigInt, b:BigInt): Boolean = a > b
def gteq(a:BigInt, b:BigInt): Boolean = a >= b
def lt(a:BigInt, b:BigInt): Boolean = a < b
def lteq(a:BigInt, b:BigInt): Boolean = a <= b
def max(a:BigInt, b:BigInt): BigInt = a.max(b)
def min(a:BigInt, b:BigInt): BigInt = a.min(b)
def minus(a:BigInt, b:BigInt): BigInt = a - b
def mod(a:BigInt, b:BigInt): BigInt = a % b
def negate(a:BigInt): BigInt = -a
def nequiv(a:BigInt, b:BigInt): Boolean = a != b
def one: BigInt = BigInt(1)
def plus(a:BigInt, b:BigInt): BigInt = a + b
def pow(a:BigInt, b:BigInt): BigInt = a.pow(b)
def times(a:BigInt, b:BigInt): BigInt = a * b
def zero: BigInt = BigInt(0)
def fromType[@specialized(Int, Long, Float, Double) B](b:B)(implicit c:ConvertableFrom[B]) = c.toBigInt(b)
def toType[@specialized(Int, Long, Float, Double) B](a:BigInt)(implicit c:ConvertableTo[B]) = c.fromBigInt(a)
}
trait BigDecimalIsNumeric
extends Numeric[BigDecimal] with ConvertableFromBigDecimal with ConvertableToBigDecimal {
def abs(a:BigDecimal): BigDecimal = a.abs
def div(a:BigDecimal, b:BigDecimal): BigDecimal = a / b
def equiv(a:BigDecimal, b:BigDecimal): Boolean = a == b
def gt(a:BigDecimal, b:BigDecimal): Boolean = a > b
def gteq(a:BigDecimal, b:BigDecimal): Boolean = a >= b
def lt(a:BigDecimal, b:BigDecimal): Boolean = a < b
def lteq(a:BigDecimal, b:BigDecimal): Boolean = a <= b
def max(a:BigDecimal, b:BigDecimal): BigDecimal = a.max(b)
def min(a:BigDecimal, b:BigDecimal): BigDecimal = a.min(b)
def minus(a:BigDecimal, b:BigDecimal): BigDecimal = a - b
def mod(a:BigDecimal, b:BigDecimal): BigDecimal = a % b
def negate(a:BigDecimal): BigDecimal = -a
def nequiv(a:BigDecimal, b:BigDecimal): Boolean = a != b
def one: BigDecimal = BigDecimal(1.0)
def plus(a:BigDecimal, b:BigDecimal): BigDecimal = a + b
def pow(a:BigDecimal, b:BigDecimal): BigDecimal = a.pow(b)
def times(a:BigDecimal, b:BigDecimal): BigDecimal = a * b
def zero: BigDecimal = BigDecimal(0.0)
def fromType[@specialized(Int, Long, Float, Double) B](b:B)(implicit c:ConvertableFrom[B]) = c.toBigDecimal(b)
def toType[@specialized(Int, Long, Float, Double) B](a:BigDecimal)(implicit c:ConvertableTo[B]) = c.fromBigDecimal(a)
}
/**
* This companion object provides the instances (e.g. IntIsNumeric)
* associating the type class (Numeric) with its member type (Int).
*/
object Numeric {
implicit object IntIsNumeric extends IntIsNumeric
implicit object LongIsNumeric extends LongIsNumeric
implicit object FloatIsNumeric extends FloatIsNumeric
implicit object DoubleIsNumeric extends DoubleIsNumeric
implicit object BigIntIsNumeric extends BigIntIsNumeric
implicit object BigDecimalIsNumeric extends BigDecimalIsNumeric
def numeric[@specialized(Int, Long, Float, Double) A:Numeric]:Numeric[A] = implicitly[Numeric[A]]
}
object FastImplicits {
implicit def infixOps[@specialized(Int, Long, Float, Double) A:Numeric](a:A) = new FastNumericOps(a)
implicit def infixIntOps(i:Int) = new LiteralIntOps(i)
implicit def infixLongOps(l:Long) = new LiteralLongOps(l)
implicit def infixFloatOps(f:Float) = new LiteralFloatOps(f)
implicit def infixDoubleOps(d:Double) = new LiteralDoubleOps(d)
implicit def infixBigIntOps(f:BigInt) = new LiteralBigIntOps(f)
implicit def infixBigDecimalOps(d:BigDecimal) = new LiteralBigDecimalOps(d)
def numeric[@specialized(Int, Long, Float, Double) A:Numeric]:Numeric[A] = implicitly[Numeric[A]]
}
object EasyImplicits {
implicit def infixOps[@specialized(Int, Long, Float, Double) A:Numeric](a:A) = new EasyNumericOps(a)
implicit def infixIntOps(i:Int) = new LiteralIntOps(i)
implicit def infixLongOps(l:Long) = new LiteralLongOps(l)
implicit def infixFloatOps(f:Float) = new LiteralFloatOps(f)
implicit def infixDoubleOps(d:Double) = new LiteralDoubleOps(d)
implicit def infixBigIntOps(f:BigInt) = new LiteralBigIntOps(f)
implicit def infixBigDecimalOps(d:BigDecimal) = new LiteralBigDecimalOps(d)
def numeric[@specialized(Int, Long, Float, Double) A:Numeric]:Numeric[A] = implicitly[Numeric[A]]
}
| azavea/numeric | src/main/scala/com/azavea/math/Numeric.scala | Scala | mit | 13,738 |
package com.zhranklin.homepage.notice
import com.zhranklin.homepage.RouteService
trait NoticeRoute extends RouteService {
// NoticeServiceObjects.serviceList.map(_.getUrls.take(3) mkString "\\n").foreach(println)
lazy val news = NoticeServiceObjects.serviceList.map(s ⇒ (s.source, s.notices().map(s.toArticle).take(5).toList)).toList.toMap
abstract override def myRoute = super.myRoute ~
path("notice") {
lazy val sources = news.keys.toList.sorted.map(s ⇒ (s, "/notice/" + encode(s)))
complete {
html.notice.render(sources)
}
} ~
path("notice" / Segment) { sourceRaw ⇒
parameter('url) { url ⇒
val source = decode(sourceRaw)
complete {
html.noticeArticle.render(news(source).filter(a ⇒ decode(a.itemLink).contains(url)).head)
}
} ~
pathEnd {
val source = decode(sourceRaw)
news.get(source).map(notices ⇒ complete {
html.index.render(source, "notice", notices)
}).get
}
}
}
| zhranklin/Private_Blog | server/src/main/scala/com/zhranklin/homepage/notice/NoticeRoute.scala | Scala | gpl-3.0 | 1,024 |
/*
Copyright 2013 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.algebird.util.summer
import org.scalatest.{ PropSpec, Matchers }
import org.scalatest.prop.PropertyChecks
class SyncSummingQueueProperties extends PropSpec with PropertyChecks with Matchers {
import AsyncSummerLaws._
property("Summing with and without the summer should match") {
forAll { (inputs: List[List[(Int, Long)]],
flushFrequency: FlushFrequency,
bufferSize: BufferSize,
memoryFlushPercent: MemoryFlushPercent) =>
val timeOutCounter = Counter("timeOut")
val sizeCounter = Counter("size")
val memoryCounter = Counter("memory")
val tuplesIn = Counter("tuplesIn")
val tuplesOut = Counter("tuplesOut")
val putCounter = Counter("put")
val summer = new SyncSummingQueue[Int, Long](bufferSize,
flushFrequency,
memoryFlushPercent,
memoryCounter,
timeOutCounter,
sizeCounter,
putCounter,
tuplesIn,
tuplesOut)
assert(summingWithAndWithoutSummerShouldMatch(summer, inputs))
}
}
}
| avibryant/algebird | algebird-util/src/test/scala/com/twitter/algebird/util/summer/SyncSummingQueueProperties.scala | Scala | apache-2.0 | 1,612 |
/* Generated File */
package models.table.store
import com.kyleu.projectile.services.database.slick.SlickQueryService.imports._
import java.time.ZonedDateTime
import models.store.StaffRow
import models.table.address.AddressRowTable
import scala.language.higherKinds
object StaffRowTable {
val query = TableQuery[StaffRowTable]
def getByPrimaryKey(staffId: Int) = query.filter(_.staffId === staffId).result.headOption
def getByPrimaryKeySeq(staffIdSeq: Seq[Int]) = query.filter(_.staffId.inSet(staffIdSeq)).result
def getByStoreId(storeId: Int) = query.filter(_.storeId === storeId).result
def getByStoreIdSeq(storeIdSeq: Seq[Int]) = query.filter(_.storeId.inSet(storeIdSeq)).result
def getByAddressId(addressId: Int) = query.filter(_.addressId === addressId).result
def getByAddressIdSeq(addressIdSeq: Seq[Int]) = query.filter(_.addressId.inSet(addressIdSeq)).result
implicit class StaffRowTableExtensions[C[_]](q: Query[StaffRowTable, StaffRow, C]) {
def withAddressRow = q.join(AddressRowTable.query).on(_.addressId === _.addressId)
def withAddressRowOpt = q.joinLeft(AddressRowTable.query).on(_.addressId === _.addressId)
def withStoreRow = q.join(StoreRowTable.query).on(_.storeId === _.storeId)
def withStoreRowOpt = q.joinLeft(StoreRowTable.query).on(_.storeId === _.storeId)
}
}
class StaffRowTable(tag: slick.lifted.Tag) extends Table[StaffRow](tag, "staff") {
val staffId = column[Int]("staff_id", O.PrimaryKey, O.AutoInc)
val firstName = column[String]("first_name")
val lastName = column[String]("last_name")
val addressId = column[Int]("address_id")
val email = column[Option[String]]("email")
val storeId = column[Int]("store_id")
val active = column[Boolean]("active")
val username = column[String]("username")
val password = column[Option[String]]("password")
val lastUpdate = column[ZonedDateTime]("last_update")
val picture = column[Option[Array[Byte]]]("picture")
override val * = (staffId, firstName, lastName, addressId, email, storeId, active, username, password, lastUpdate, picture) <> (
(StaffRow.apply _).tupled,
StaffRow.unapply
)
}
| KyleU/boilerplay | app/models/table/store/StaffRowTable.scala | Scala | cc0-1.0 | 2,136 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.sj.engine.input
import java.io.File
import com.bwsw.sj.engine.core.testutils.Constants
/**
* @author Pavel Tomskikh
*/
object SjInputModuleBenchmarkConstants {
val instanceHost = "localhost"
val inputModule = new File(s"../../contrib/stubs/sj-stub-input-streaming/target/scala-2.12/" +
s"sj-stub-input-streaming_2.12-${Constants.sjVersion}.jar")
val checkpointInterval = 10
val numberOfDuplicates = 10
val totalInputElements = 2 * checkpointInterval + numberOfDuplicates // increase/decrease a constant to change the number of input elements
}
| bwsw/sj-platform | core/sj-input-streaming-engine/src/test/scala/com/bwsw/sj/engine/input/SjInputModuleBenchmarkConstants.scala | Scala | apache-2.0 | 1,390 |
trait Semigroup[A] {
extension (x: A) def combine(y: A): A
}
given Semigroup[Int] = ???
given [A, B](using Semigroup[A], Semigroup[B]): Semigroup[(A, B)] = ???
object Test extends App {
((1, 1)) combine ((2, 2)) // doesn't compile
((1, 1): (Int, Int)) combine (2, 2) // compiles
//the error that compiler spat out was "value combine is not a member of ((Int, Int)) => (Int, Int)". what's
} | lampepfl/dotty | tests/pos/combine.scala | Scala | apache-2.0 | 398 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package model.report
import play.api.libs.json.Json
case class ApplicationForAnalyticalSchemesReport(userId: String,
firstName: Option[String],
lastName: Option[String],
firstSchemePreference: Option[String],
guaranteedInterviewScheme: Option[Boolean],
behaviouralTScore: Option[Double],
situationalTScore: Option[Double],
etrayTScore: Option[Double],
overallVideoScore: Option[Double]
)
object ApplicationForAnalyticalSchemesReport {
implicit val applicationForAnalyticalSchemesReportFormat = Json.format[ApplicationForAnalyticalSchemesReport]
}
| hmrc/fset-faststream | app/model/report/ApplicationForAnalyticalSchemesReport.scala | Scala | apache-2.0 | 1,597 |
package entities
import java.time._
import java.time.format.DateTimeFormatter
import play.api.libs.json._
/**
* Represents common serialization tools, e.g. instant reader
*/
object CommonJsonReadWrite {
implicit val inWrites = new Writes[Instant] {
override def writes(in: Instant): JsValue =
JsNumber(in.toEpochMilli)
}
implicit val duWrites = new Writes[Duration] {
override def writes(d: Duration): JsValue =
JsNumber(d.toMillis)
}
val instantInSecondsReads = new Reads[Instant] {
override def reads(json: JsValue) = {
try {
JsSuccess(Instant.ofEpochSecond(Integer.parseInt(json.toString())))
} catch {
case _: Exception => JsError(s"${json.toString()} is not Instant")
}
}
}
val instantInIso = new Reads[Instant] {
//2017-09-10 11:10:45
override def reads(json: JsValue) = {
try {
val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")
val parsedDate = LocalDateTime.parse(json.as[String], formatter)
val instant = Instant.now
val systemZone = ZoneId.of("Europe/Prague")
val currentOffsetForMyZone = systemZone.getRules.getOffset(instant)
JsSuccess(parsedDate.toInstant(currentOffsetForMyZone))
} catch {
case _: Exception => JsError(s"${json.toString()} is not Instant")
}
}
}
implicit val duReads = new Reads[Duration] {
override def reads(json: JsValue) = {
try {
JsSuccess(Duration.ofMillis(Integer.parseInt(json.toString())))
} catch {
case _: Exception => JsError(s"${json.toString()} is not Duration")
}
}
}
}
| vavravl1/home_center | app/entities/CommonJsonReadWrite.scala | Scala | mit | 1,656 |
package uk.co.turingatemyhamster.shortbol
package ops
package rewriteRule
import monocle.Monocle._
import sharedAst._
import sharedAst.sugar._
import longhandAst.{InstanceExp, PropertyExp}
import longhandAst.sugar._
import RewriteRule.allElements
import pragma.DefaultPrefixPragma
import terms.RDF
import terms.SBOL.displayId
/**
*
*
* @author Matthew Pocock
*/
object RepairIdentities extends InstanceRewriter {
final private val noDisplayId = (_: List[PropertyExp]).forall(_.property != displayId)
final private val noAbout = (_: List[PropertyExp]).forall(_.property != RDF.about)
import optics.longhand.InstanceExp._
import optics.longhand.ConstructorApp._
import optics.longhand.SBFile._
import optics.longhand.PropertyValue._
import optics.longhand.PropertyExp._
import Nested.{value => nestedValue}
lazy val bodyRequiresDisplayId: RewriteRule[List[PropertyExp]] = RewriteRule { (ps: List[PropertyExp]) =>
for {
id <- Eval.nextIdentifier
} yield (displayId := slLit(id.name)) ::: ps
} at noDisplayId
def bodyRequiresAbout(parentId: Identifier): RewriteRule[List[PropertyExp]] = RewriteRule { (ps: List[PropertyExp]) =>
for {
longhandAst.PropertyExp(_, longhandAst.PropertyValue.Literal(StringLiteral(s, _, _))) <- ps find (_.property == displayId)
about <- parentId match {
case LocalName(_) =>
None
case QName(pfx, LocalName(ln)) =>
Some(pfx :# s"$ln/${s.asString}")
case Url(url) =>
Some(Url(s"$url/${s.asString}"))
}
} yield {
(RDF.about := about) ::: ps
}
} at noAbout
lazy val recurseOverBody: RewriteRule[List[PropertyExp]] = RewriteRule { (ps: List[PropertyExp]) =>
for {
longhandAst.PropertyExp(_, longhandAst.PropertyValue.Reference(about)) <- ps find (_.property == RDF.about)
} yield
(bodyRequiresDisplayId andThen bodyRequiresAbout(about) andThen recurseOverBody) at
body at
nestedValue at
asNested at
value at
allElements
}
lazy val recursefromInstanceExp = recurseOverBody at body at cstrApp
lazy val cstrAppRequiresDisplayId = bodyRequiresDisplayId at body
lazy val instanceExpRequiersDisplayIdAndAbout: RewriteRule[InstanceExp] = RewriteRule { (ie: InstanceExp) =>
RewriteRule { (bdy: List[PropertyExp]) =>
for {
id <- DefaultPrefixPragma.rewrite(ie.identifier)
} yield {
val withAbout = (RDF.about := id) ::: bdy
id match {
case LocalName(ln) =>
(displayId := slLit(ln)) ::: withAbout
case QName(_, LocalName(ln)) =>
(displayId := slLit(ln)) ::: withAbout
case _ =>
withAbout
}
}
} at noDisplayId at noAbout at body at cstrApp
}
lazy val instanceExpRequiresAbout: RewriteRule[InstanceExp] = RewriteRule { (ie: InstanceExp) =>
(cstrApp composeLens body) modify
((RDF.about := ie.identifier) ::: _) apply
ie
} at { (ie: InstanceExp) =>
ie.cstrApp.body.collectFirst{ case PropertyExp(RDF.about, _) => () }.isEmpty
}
lazy val instanceRewrite =
instanceExpRequiersDisplayIdAndAbout andThen instanceExpRequiresAbout andThen recursefromInstanceExp
}
| drdozer/shortbol | shortbol/core/shared/src/main/scala/uk/co/turingatemyhamster/shortbol/ops/rewriteRule/RepairIdentities.scala | Scala | apache-2.0 | 3,245 |
/*
* Copyright (C) 2014-2015 by Nokia.
* See the LICENCE.txt file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package wookie.yql.common
import argonaut._
import Argonaut._
object CodecsUtils {
def loopOverArray[A](curs: ACursor, func: ACursor => DecodeResult[A], accum: DecodeResult[List[A]]): DecodeResult[List[A]] = {
if (curs.succeeded) {
val currentDecodedValues = func(curs)
val updatedAccum = accum.map {
a =>
currentDecodedValues.value match {
case Some(b) => b :: a
case None => a
}
}
loopOverArray(curs.right, func, updatedAccum)
} else {
accum
}
}
}
| elyast/wookie | examples/src/main/scala/wookie/yql/common/CodecsUtils.scala | Scala | apache-2.0 | 1,274 |
/**
* Swaggy Jenkins
* Jenkins API clients generated from Swagger / Open API specification
*
* The version of the OpenAPI document: 1.1.2-pre.0
* Contact: blah@cliffano.com
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package org.openapitools.client.model
import org.openapitools.client.core.ApiModel
case class QueueLeftItem (
`class`: Option[String] = None,
actions: Option[Seq[CauseAction]] = None,
blocked: Option[Boolean] = None,
buildable: Option[Boolean] = None,
id: Option[Int] = None,
inQueueSince: Option[Int] = None,
params: Option[String] = None,
stuck: Option[Boolean] = None,
task: Option[FreeStyleProject] = None,
url: Option[String] = None,
why: Option[String] = None,
cancelled: Option[Boolean] = None,
executable: Option[FreeStyleBuild] = None
) extends ApiModel
| cliffano/swaggy-jenkins | clients/scala-akka/generated/src/main/scala/org/openapitools/client/model/QueueLeftItem.scala | Scala | mit | 939 |
package com.databricks.spark.sql.perf.mllib
import com.databricks.spark.sql.perf._
import com.typesafe.scalalogging.slf4j.{LazyLogging => Logging}
import org.apache.spark.sql._
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.ml.Transformer
class MLTransformerBenchmarkable(
params: MLParams,
test: BenchmarkAlgorithm,
sqlContext: SQLContext)
extends Benchmarkable with Serializable {
import MLTransformerBenchmarkable._
private var testData: DataFrame = null
private var trainingData: DataFrame = null
private var testDataCount: Option[Long] = None
private val param = MLBenchContext(params, sqlContext)
override val name = test.name
override protected val executionMode: ExecutionMode = ExecutionMode.SparkPerfResults
override protected def beforeBenchmark(): Unit = {
logger.info(s"$this beforeBenchmark")
try {
testData = test.testDataSet(param)
testData.cache()
testDataCount = Some(testData.count())
trainingData = test.trainingDataSet(param)
trainingData.cache()
trainingData.count()
} catch {
case e: Throwable =>
println(s"$this error in beforeBenchmark: ${e.getStackTraceString}")
throw e
}
}
override protected def doBenchmark(
includeBreakdown: Boolean,
description: String,
messages: ArrayBuffer[String]): BenchmarkResult = {
try {
val (trainingTime, model: Transformer) = measureTime {
logger.info(s"$this: train: trainingSet=${trainingData.schema}")
val estimator = test.getEstimator(param)
estimator.fit(trainingData)
}
logger.info(s"model: $model")
val (_, scoreTraining) = measureTime {
test.score(param, trainingData, model)
}
val (scoreTestTime, scoreTest) = measureTime {
test.score(param, testData, model)
}
val ml = MLResult(
trainingTime = Some(trainingTime.toMillis),
trainingMetric = Some(scoreTraining),
testTime = Some(scoreTestTime.toMillis),
testMetric = Some(scoreTest / testDataCount.get))
BenchmarkResult(
name = name,
mode = executionMode.toString,
parameters = Map.empty,
executionTime = Some(trainingTime.toMillis),
mlParams = Some(params),
mlResult = Some(ml))
} catch {
case e: Exception =>
BenchmarkResult(
name = name,
mode = executionMode.toString,
parameters = Map.empty,
mlParams = Some(params),
failure = Some(Failure(e.getClass.getSimpleName,
e.getMessage + ":\\n" + e.getStackTraceString)))
} finally {
Option(testData).map(_.unpersist())
Option(trainingData).map(_.unpersist())
}
}
def prettyPrint: String = {
val paramString = pprint(params).mkString("\\n")
s"$test\\n$paramString"
}
}
object MLTransformerBenchmarkable {
private def pprint(p: AnyRef): Seq[String] = {
val m = getCCParams(p)
m.flatMap {
case (key, Some(value: Any)) => Some(s" $key=$value")
case _ => None
} .toSeq
}
// From http://stackoverflow.com/questions/1226555/case-class-to-map-in-scala
private def getCCParams(cc: AnyRef) =
(Map[String, Any]() /: cc.getClass.getDeclaredFields) {(a, f) =>
f.setAccessible(true)
a + (f.getName -> f.get(cc))
}
}
| josiahsams/spark-sql-perf-spark2.0.0 | src/main/scala/com/databricks/spark/sql/perf/mllib/MLTransformerBenchmarkable.scala | Scala | apache-2.0 | 3,368 |
package com.peterparameter.ecm.basic
import com.peterparameter.ecm.common.Alias.Num
import com.peterparameter.ecm.common.CurveUtils._
import com.peterparameter.ecm.common._
import spire.math._
import spire.random._
import scala.util.Try
/**
*/
object Montgomery {
import SafeLong._
def arithmetic(curve: MontgomeryCurve, initialPoint: MontgomeryPoint): MontgomeryArithmetic =
new MontgomeryArithmetic(curve, initialPoint)
/*
* Projective point on the elliptic curve, with Y-coordinate omitted (x = X / (Z ^ 2), y = X / (Z ^ 3))
*/
case class MontgomeryPoint(x: Num, z: Num)
/*
* Curve B * y ^ 2 = x ^ 3 + A * x ^ 2 + x
*/
case class MontgomeryCurve(a: Num, b: Num, characteristic: Num)
def generate(n: Num, rng: Generator = spire.random.GlobalRng): CurveResult =
genCurve(n, getSigma(n, rng))
type CurveResult = Either[Factor, (MontgomeryCurve, MontgomeryPoint)]
private def genCurve(n: Num, sigma: Num): CurveResult = {
def modInv(number: Num): Option[Num] = Try(SafeLong(number.toBigInt.modInverse(n.toBigInt))).toOption
val u = sigma * sigma - five
val v = four * sigma
val x = u * u * u % n
val z = v * v * v % n
val candidate = four * x * v % n
def degenerate(candidate: Num): CurveResult = {
val gcd = candidate.gcd(n)
if (gcd === n) generate(n)
else Left(Factor(gcd))
}
def nonDegenerate(t1: Num): CurveResult = {
val t2 = (v - u + n) % n
val a = (t2 * t2 * t2 * (three * u + v) * t1 - two) % n
Right((MontgomeryCurve(a, one, n), MontgomeryPoint(x % n, z % n)))
}
val inverse = modInv(candidate)
inverse.fold(degenerate(candidate))(nonDegenerate)
}
class MontgomeryArithmetic(curve: MontgomeryCurve, val initialPoint: MontgomeryPoint) {
import Utils._
private val four = SafeLong(4L)
val infinity: MontgomeryPoint = MontgomeryPoint(SafeLong.zero, SafeLong.zero)
private val n = curve.characteristic
def neg(p: MontgomeryPoint): MontgomeryPoint = MontgomeryPoint(-p.x, p.z)
def double(p: MontgomeryPoint): MontgomeryPoint = {
val xx = p.x * p.x
val zz = p.z * p.z
val xz = p.x * p.z
val diff = (xx - zz + n) % n
val x = (diff * diff) % n
val y = (four * xz * (xx + curve.a * xz + zz)) % n
MontgomeryPoint(x, y)
}
def add(p1: MontgomeryPoint, p2: MontgomeryPoint)(origin: MontgomeryPoint): MontgomeryPoint = {
val d1 = (p1.x * p2.x - p1.z * p2.z) % n
val d2 = (p1.x * p2.z - p1.z * p2.x) % n
val x = (origin.z * d1 * d1) % n
val z = (origin.x * d2 * d2) % n
MontgomeryPoint(x, z)
}
private def multiplicationLadder(p: MontgomeryPoint, multiple: Num): MontgomeryPoint = {
var u: MontgomeryPoint = p
var t: MontgomeryPoint = double(p)
val bv = multiple.toBitVector
val range = 1 until bv.length
for (i <- range) {
if (bv(i)) {
u = add(t, u)(p)
t = double(t)
} else {
t = add(u, t)(p)
u = double(u)
}
}
u
}
def mul(p: MontgomeryPoint, multiple: Num): MontgomeryPoint = multiple match {
case SafeLong.zero => infinity
case SafeLong.one => p
case SafeLong.two => double(p)
case _ => multiplicationLadder(p, multiple)
}
}
}
| pnosko/spire-ecm | src/main/scala/com/peterparameter/ecm/basic/Montgomery.scala | Scala | mit | 3,331 |
/**
* This file is part of mycollab-web.
*
* mycollab-web is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* mycollab-web is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with mycollab-web. If not, see <http://www.gnu.org/licenses/>.
*/
package com.esofthead.mycollab.module.crm.events
import com.esofthead.mycollab.eventmanager.ApplicationEvent
/**
* @author MyCollab Ltd.
* @since 5.0.3
*/
object OpportunityEvent {
class GotoAdd(source: AnyRef, data: AnyRef) extends ApplicationEvent(source, data) {}
class GotoEdit(source: AnyRef, data: AnyRef) extends ApplicationEvent(source, data) {}
class GotoList(source: AnyRef, data: AnyRef) extends ApplicationEvent(source, data) {}
class GotoRead(source: AnyRef, data: AnyRef) extends ApplicationEvent(source, data) {}
class Save(source: AnyRef, data: AnyRef) extends ApplicationEvent(source, data) {}
class Search(source: AnyRef, data: AnyRef) extends ApplicationEvent(source, data) {}
class GotoContactRoleEdit(source: AnyRef, data: AnyRef) extends ApplicationEvent(source, data) {}
}
| maduhu/mycollab | mycollab-web/src/main/scala/com/esofthead/mycollab/module/crm/events/OpportunityEvent.scala | Scala | agpl-3.0 | 1,532 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.physical.rdbms.fs.postgres
import slamdata.Predef._
import quasar.physical.rdbms.fs.RdbmsCreate
import quasar.physical.rdbms.common.{CustomSchema, DefaultSchema, TablePath}
import doobie.syntax.string._
import doobie.free.connection.ConnectionIO
import doobie.util.fragment.Fragment
import scalaz.syntax.monad._
import scalaz.syntax.show._
trait PostgresCreate extends RdbmsCreate {
override def createSchema(schema: CustomSchema): ConnectionIO[Unit] = {
(fr"CREATE SCHEMA IF NOT EXISTS" ++ Fragment.const(schema.shows)).update.run.void
}
override def createTable(tablePath: TablePath): ConnectionIO[Unit] = {
val createSchemaQuery: ConnectionIO[Unit] = tablePath.schema match {
case DefaultSchema => ().point[ConnectionIO]
case c: CustomSchema => createSchema(c)
}
(createSchemaQuery *> (fr"CREATE TABLE IF NOT EXISTS" ++ Fragment.const(tablePath.shows) ++ fr"(data json NOT NULL)").update.run)
.void
}
}
| drostron/quasar | rdbms/src/main/scala/quasar/physical/rdbms/fs/postgres/PostgresCreate.scala | Scala | apache-2.0 | 1,574 |
package eu.timepit.crjdt.circe
import org.scalacheck.Properties
import org.scalacheck.Prop._
import eu.timepit.crjdt.circe.RegNodeConflictResolver.LWW
import eu.timepit.crjdt.circe.syntax._
import eu.timepit.crjdt.core.Replica
import eu.timepit.crjdt.core.syntax._
import io.circe.Json
object NodeToJsonDeleteSpec extends Properties("NodeToJsonDeleteSpec") {
// fix bug #19
property("delete list items") = secure {
val list = doc.downField("list")
val p = Replica
.empty("p")
.applyCmd(list := `[]`)
.applyCmd(list.iter.insert("1"))
.applyCmd(list.iter.next.insert("2"))
.applyCmd(list.iter.next.delete)
p.document.toJson ?= Json.obj(
"list" -> Json.arr(Json.fromString("2"))
)
}
}
| fthomas/crjdt | modules/circe/src/test/scala/eu/timepit/crjdt/circe/NodeToJsonDeleteSpec.scala | Scala | apache-2.0 | 742 |
package pt.org.apec.services.books.db
import godiva.slick._
import slick.driver.PostgresDriver
import scala.concurrent.ExecutionContext
import java.util.UUID
import org.joda.time.DateTime
import scala.concurrent.Future
import org.postgresql.util.PSQLException
import pt.org.apec.services.books.common._
import pt.org.apec.services.books.common.PublicationSorting._
import godiva.core.pagination.PaginationRequest
import godiva.core.pagination.PaginatedResult
import com.github.tototoshi.slick.PostgresJodaSupport._
import com.github.tminglei.slickpg.TsVector
/**
* @author ragb
*/
trait PublicationsStore extends SchemaManagement with TablesSchema with TablesComponent with Pagination {
this: DriverComponent[CustomPostgresDriver] with DatabaseComponent[PostgresDriver] with DefaultExecutionContext =>
import driver.api._
import MaybeUtils.MaybeExtensionMethods
override def tables = super[TablesComponent].tables
def createCategory(category: NewCategoryRequest): Future[Category] = database.run(Queries.insertCategory(Category(createGUID, category.name, category.slug)))
.recoverWith(mapDuplicateException)
def getCategories: Future[Seq[Category]] = database.run(Queries.listCategories.result)
def getCategoryBySlug(slug: String): Future[Option[Category]] = database.run(Queries.getCategoryBySlug(slug).result.headOption)
def getAuthors(query: Option[String] = None): Future[Seq[Author]] = database.run(Queries.listAuthors(query).result)
def createAuthor(newAuthor: NewAuthorRequest): Future[Author] = database.run(Queries.insertAuthor(Author(createGUID, newAuthor.name, newAuthor.slug))).recoverWith(mapDuplicateException)
def getAuthorBySlug(slug: String): Future[Option[Author]] = database.run(Queries.getAuthorBySlug(slug).result.headOption)
def getAuthorByGUID(guid: UUID): Future[Option[Author]] = database.run(Queries.getAuthorByGUID(guid).result.headOption)
def createPublicationStatus(newPublicationStatus: NewPublicationStatusRequest): Future[PublicationStatus] = database.run(Queries.insertPublicationStatus(PublicationStatus(createGUID, newPublicationStatus.name, newPublicationStatus.slug, newPublicationStatus.score))).recoverWith(mapDuplicateException)
def getPublicationStatuses: Future[Seq[PublicationStatus]] = database.run(Queries.listPublicationStatuses.result)
def getPublicationStatusBySlug(slug: String): Future[Option[PublicationStatus]] = database.run(Queries.getPublicationStatusBySlug(slug).result.headOption)
def createPublication(newPublication: NewPublicationRequest): Future[PublicationInfo] = {
val guid = createGUID
val action = (for {
p <- Queries.insertPublication(Publication(guid, newPublication.title, newPublication.slug, newPublication.publicationYear, DateTime.now(), None, newPublication.notes, newPublication.publicationStatusGUID))
_ <- Queries.insertPublicationAuthors(newPublication.authors)(p)
_ <- Queries.insertPublicationCategories(newPublication.categories)(p)
s <- newPublication.publicationStatusGUID
.map { guid => Queries.getPublicationStatusByGUID(guid).result.headOption }
.getOrElse(DBIO.successful(None))
_ <- Queries.insertPublicationSearch(p.guid, p.title, "portuguese")
result <- mkPublicationInfo(p, s)
} yield (result)).transactionally
database.run(action) recoverWith (mapDuplicateException)
}
def getPublications(paginationRequest: PaginationRequest, order: PublicationOrder = PublicationOrder(CreatedAt, Desc), filters: PublicationFilters = PublicationFilters()): Future[PaginatedResult[PublicationInfo]] = {
val q = for {
(p, s) <- Queries.getPublications(filters) joinLeft publicationStatuses on (_.publicationStatusGUID === _.guid)
} yield (p, s)
// TODO: use the direction attribute.
val sortedQ = order.attribute match {
case `Title` => q.sortBy(_._1.title.asc)
case `CreatedAt` => q.sortBy(_._1.createdAt.desc)
case `UpdatedAt` => q.sortBy(_._1.updatedAt.desc)
}
val publications = sortedQ.paginated(paginationRequest)
val actions = publications flatMap {
case PaginatedResult(ps, page, totals) =>
val infos: Seq[DBIO[PublicationInfo]] = ps map { case (p, s) => mkPublicationInfo(p, s) }
DBIO.sequence(infos).map(PaginatedResult(_, page, totals))
}
database.run(actions)
}
def searchPublications(query: String, pagination: PaginationRequest) = {
val q = (for {
((p, (_, _, rank, _)), s) <- Queries.getPublications(PublicationFilters()) join publicationSearches.map(p => (p.publicationGUID, p.vector, tsRank(p.vector, toTsQuery(query, Some("portuguese"))), p.languageConfig)).filter(p => p._2 @@ toTsQuery(query, Some("portuguese"))) on (_.guid === _._1) joinLeft publicationStatuses on (_._1.publicationStatusGUID === _.guid)
} yield (p, s, rank)).sortBy(_._3)
val publications = q.paginated(pagination)
val actions = publications flatMap {
case PaginatedResult(ps, page, totals) =>
val infos: Seq[DBIO[PublicationInfo]] = ps map { case (p, s, _) => mkPublicationInfo(p, s) }
DBIO.sequence(infos).map(PaginatedResult(_, page, totals))
}
database.run(actions)
}
def getPublicationGUIDFromSlug(slug: String): Future[Option[UUID]] = database.run(publications.filter(_.slug === slug).map(_.guid).result.headOption)
def getPublicationByGUID(guid: UUID): Future[Option[PublicationInfo]] = {
// This gets confusing using a for compreension
val q = for {
(p, s) <- publications joinLeft publicationStatuses on (_.publicationStatusGUID === _.guid) if p.guid === guid
} yield (p, s)
val action = q.result.headOption
.flatMap {
case Some((p, s)) => mkPublicationInfo(p, s).map(Some.apply)
case _ => DBIO.successful(None)
}
database.run(action)
}
def getPublicationBySlug(slug: String): Future[Option[PublicationInfo]] = {
// This gets confusing using a for compreension
val q = for {
(p, s) <- publications joinLeft publicationStatuses on (_.publicationStatusGUID === _.guid) if p.slug === slug
} yield (p, s)
val action = q.result.headOption
.flatMap {
case Some((p, s)) => mkPublicationInfo(p, s).map(Some.apply)
case _ => DBIO.successful(None)
}
database.run(action)
}
private def mkPublicationInfo(publication: Publication, status: Option[PublicationStatus]): DBIO[PublicationInfo] = {
val authors = Queries.getPublicationAuthors(publication.guid).result
val categories = Queries.getPublicationCategories(publication.guid).result
(authors zip categories) map {
case (a, c) =>
PublicationInfo(publication.guid, a, c, publication.title, publication.slug, publication.publicationYear, publication.createdAt, publication.updatedAt, publication.notes, status)
}
}
private val mapDuplicateException: PartialFunction[Throwable, Future[Nothing]] = {
case e: PSQLException if e.getSQLState == "23505" => {
Future.failed(new DuplicateFound())
}
}
def createPublicationFile(publicationGUID: UUID, request: NewPublicationFileRequest): Future[PublicationFile] = database.run(Queries.insertPublicationFile(publicationGUID, request)).recoverWith(mapDuplicateException)
def getPublicationFiles(publicationGUID: UUID): Future[Seq[PublicationFile]] = database.run(Queries.getPublicationFiles(publicationGUID).result)
def getCategoryCounts: Future[Seq[WithPublicationCount[Category]]] = database.run(Queries.getCategoryCounts.result.map(s => s.map(t => WithPublicationCount(t._1, t._2))))
private def createGUID = UUID.randomUUID()
object Queries {
def insertCategory(category: Category) = (categories returning categories.map(_.guid) into ((c, guid) => c)) += category
def listCategories = categories
val getCategoryByGUID = categories.findBy(_.guid)
val getCategoryBySlug = categories.findBy(_.slug)
def listAuthors(query: Option[String] = None) = query map { query =>
authorsComplete.filter(_.vector @@ toTsQuery(query, Some("pg_catalog.portuguese"))).sortBy(t => tsRank(t.vector, toTsQuery(query, Some("pg_catalog.portuguese")))).map(_.forSelect)
} getOrElse (authors)
def insertAuthor(author: Author) = (authors returning authorsComplete.map(_.guid) into ((a, guid) => a)) += author
val getAuthorBySlug = (slug: String) => authorsComplete.filter(_.slug === slug).map(_.forSelect)
val getAuthorByGUID = (guid: UUID) => authorsComplete.filter(_.guid === guid).map(_.forSelect)
def insertPublicationStatus(publicationStatus: PublicationStatus) = (publicationStatuses returning publicationStatuses.map(_.guid) into ((p, guid) => p)) += publicationStatus
val getPublicationStatusBySlug = publicationStatuses.findBy(_.slug)
val getPublicationStatusByGUID = publicationStatuses.findBy(_.guid)
val listPublicationStatuses = publicationStatuses.sortBy(_.score)
def insertPublication(publication: Publication) = (publications returning publications.map(_.guid) into ((p, guid) => p)) += publication
def insertPublicationAuthors(authorGUIDs: Seq[UUID])(publication: Publication) = publicationAuthors ++= authorGUIDs.map(guid => (guid, publication.guid))
def insertPublicationCategories(categoryGUIDs: Seq[UUID])(publication: Publication) = publicationCategories ++= categoryGUIDs.map(guid => (guid, publication.guid))
val getPublicationByGUID = publications.findBy(_.guid)
def getPublicationAuthors(publicationGUID: UUID) = for {
(_, a) <- publicationAuthors.filter(_.publicationGUID === publicationGUID) join authorsComplete on (_.authorGUID === _.guid)
} yield (a.forSelect)
def getPublicationCategories(publicationGUID: UUID) = for {
(_, category) <- publicationCategories.filter(_.publicationGUID === publicationGUID) join categories on (_.categoryGUID === _.guid)
} yield (category)
def getPublications(filters: PublicationFilters) = publications
.maybeFilter(filters.authorGUID) { authorGUID => t => publicationAuthors.filter(pa => pa.authorGUID === authorGUID && pa.publicationGUID === t.guid).exists }
.maybeFilter(filters.categoryGUID) { categoryGUID => t => publicationCategories.filter(pc => pc.categoryGUID === categoryGUID && pc.publicationGUID === t.guid).exists }
.maybeFilter(filters.publicationStatusGUID) { statusGUID => t => t.publicationStatusGUID === statusGUID }
def getPublicationFiles(publicationGUID: UUID) = publicationFiles.filter(_.publicationGUID === publicationGUID).filter(_.available === true)
def insertPublicationFile(publicationGUID: UUID, request: NewPublicationFileRequest) = (publicationFiles returning publicationFiles.map(_.guid) into ((p, guid) => p)) += PublicationFile(createGUID, publicationGUID, request.name, request.contentType, request.size, request.url)
def insertPublicationSearch(publicationGUID: UUID, title: String, language: String) = {
// slick sql's interpulator sucks for collections.
val names = sql"select coalesce(string_agg(a.name, ' '), '') from authors as a where a.guid in (select p.author_guid from publication_authors as p where p.publication_guid = ${publicationGUID.toString}::uuid)".as[String].head
names flatMap { n =>
publicationSearches.forceInsertExpr((publicationGUID, toTsVector(title, Some(language)).setWeight('a') @+ toTsVector(n, Some(language)).setWeight('d'), language))
}
}
def getCategoryCounts = {
val q = (for {
(c, pcs) <- listCategories joinLeft publicationCategories on (_.guid === _.categoryGUID)
} yield (c, pcs)).groupBy(_._1)
val q2 = q map {
case (c, p) => (c, p.map(_._2.map(_.publicationGUID)).countDistinct)
}
q2
}
}
}
class DatabaseException(val errorCode: String, message: String) extends Exception(message)
class DuplicateFound extends DatabaseException("error.duplicateFound", "Entry already exists")
case class PublicationFilters(authorGUID: Option[UUID] = None, categoryGUID: Option[UUID] = None, publicationStatusGUID: Option[UUID] = None) | apecpt/apec-books-service | service/src/main/scala/pt/org/apec/services/books/db/PublicationsStore.scala | Scala | apache-2.0 | 12,064 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package iht.controllers
import iht.config.AppConfig
import iht.connector.CachingConnector
import iht.views.html.{deadlines_application, deadlines_registration}
import javax.inject.Inject
import play.api.i18n.I18nSupport
import play.api.mvc.{Action, AnyContent, MessagesControllerComponents}
import uk.gov.hmrc.play.bootstrap.frontend.controller.FrontendController
import scala.concurrent.Future
class DeadlinesControllerImpl @Inject()(val cachingConnector: CachingConnector,
val cc: MessagesControllerComponents,
val deadlinesApplicationView: deadlines_application,
val deadlinesRegistrationView: deadlines_registration,
implicit val appConfig: AppConfig) extends FrontendController(cc) with DeadlinesController
trait DeadlinesController extends FrontendController with I18nSupport {
implicit val appConfig: AppConfig
def cachingConnector: CachingConnector
val deadlinesApplicationView: deadlines_application
val deadlinesRegistrationView: deadlines_registration
def onPageLoadRegistration: Action[AnyContent] = Action.async { implicit request =>
Future.successful(Ok(deadlinesRegistrationView.apply))
}
def onPageLoadApplication: Action[AnyContent] = Action.async { implicit request =>
Future.successful(Ok(deadlinesApplicationView.apply))
}
}
| hmrc/iht-frontend | app/iht/controllers/DeadlinesController.scala | Scala | apache-2.0 | 2,040 |
/*
* Copyright 2014 Kevin Herron
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.digitalpetri.modbus
sealed abstract class ExceptionCode(val exceptionCode: Int)
object ExceptionCode {
def fromByte(code: Byte): ExceptionCode = {
code match {
case IllegalFunction.exceptionCode => IllegalFunction
case IllegalDataAddress.exceptionCode => IllegalDataAddress
case IllegalDataValue.exceptionCode => IllegalDataValue
case ServerDeviceFailure.exceptionCode => ServerDeviceFailure
case Acknowledge.exceptionCode => Acknowledge
case ServerDeviceBusy.exceptionCode => ServerDeviceBusy
case MemoryParityError.exceptionCode => MemoryParityError
case GatewayPathUnavailable.exceptionCode => GatewayPathUnavailable
case GatewayTargetDeviceFailedToResponse.exceptionCode => GatewayTargetDeviceFailedToResponse
case _ => UnsupportedException(code)
}
}
}
/**
* The function code received in the query is not an allowable action for the server. This may be because the function
* code is only applicable to newer devices, and was not implemented in the unit selected. It could also indicate that
* the server is in the wrong state to process a request of this type, for example because it is un-configured and is
* being asked to return register values.
*/
case object IllegalFunction extends ExceptionCode(0x01)
/**
* The data address received in the query is not an allowable address for the server. More specifically, the combination
* of reference number and transfer length is invalid. For a controller with 100 registers, the PDU addresses the first
* register as 0, and the last one as 99. If a request is submitted with a starting register address of 96 and a
* quantity of registers of 4, then this request will successfully operate (address-wise at least) on registers 96, 97,
* 98, 99. If a request is submitted with a starting register address of 96 and a quantity of registers of 5, then this
* request will fail with Exception Code 0x02 “Illegal Data Address” since it attempts to operate on registers 96, 97,
* 98, 99 and 100, and there is no register with address 100.
*/
case object IllegalDataAddress extends ExceptionCode(0x02)
/**
* A value contained in the query data field is not an allowable value for server. This indicates a fault in the
* structure of the remainder of a complex request, such as that the implied length is incorrect. It specifically does
* NOT mean that a data item submitted for storage in a register has a value outside the expectation of the application
* program, since the MODBUS protocol is unaware of the significance of any particular value of any particular register.
*/
case object IllegalDataValue extends ExceptionCode(0x03)
/**
* An unrecoverable error occurred while the server was attempting to perform the requested action.
*/
case object ServerDeviceFailure extends ExceptionCode(0x04)
/**
* Specialized use in conjunction with programming commands.
* <p>
* The server has accepted the request and is processing it, but a long duration of time will be required to do so. This
* response is returned to prevent a timeout error from occurring in the client. The client can next issue a Poll Program Complete message to determine if processing is completed.
*/
object Acknowledge extends ExceptionCode(0x05)
/**
* Specialized use in conjunction with programming commands.
* <p>
* The server is engaged in processing a long– duration program command. The client should retransmit the message later
* when the server is free.
*/
case object ServerDeviceBusy extends ExceptionCode(0x06)
/**
* Specialized use in conjunction with function codes 20 and 21 and reference type 6, to indicate that the extended file
* area failed to pass a consistency check.
* <p>
* The server attempted to read record file, but detected a parity error in the memory. The client can retry the
* request, but service may be required on the server device.
*/
case object MemoryParityError extends ExceptionCode(0x08)
/**
* Specialized use in conjunction with gateways, indicates that the gateway was unable to allocate an internal
* communication path from the input port to the output port for processing the request. Usually means that the gateway
* is mis-configured or overloaded.
*/
case object GatewayPathUnavailable extends ExceptionCode(0x0A)
/**
* Specialized use in conjunction with gateways, indicates that no response was obtained from the target device. Usually
* means that the device is not present on the network.
*/
case object GatewayTargetDeviceFailedToResponse extends ExceptionCode(0x0B)
/** A catch-all for unsupported or invalid exception codes. */
case class UnsupportedException(code: Int) extends ExceptionCode(code)
| digitalpetri/scala-modbus-tcp | modbus-core/src/main/scala/com/digitalpetri/modbus/ExceptionCodes.scala | Scala | apache-2.0 | 5,476 |
// Copyright 2015 Willem Meints
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package nl.fizzylogic.reactivepi
import akka.actor.{ActorRef, ActorRefFactory}
/**
* Provides general access to the GPIO ports on your raspberry PI
*/
object GPIO {
case object Read
case class Write(data: Byte)
case class Data(value: Byte)
/**
* Starts an actor for a specific input pin
*
* @param pinNumber Pin number to use
* @param actorRefFactory actorRefFactory to use
* @return Returns the created actor
*/
def input(pinNumber: Int)(implicit actorRefFactory: ActorRefFactory): ActorRef = {
actorRefFactory.actorOf(GPIOInputActor.props(pinNumber))
}
/**
* Subscribes to a specific input pin. When the value changes, the target actor will
* receive a GPIO.Data message containing the new value
*
* @param pinNumber Pin number to subscribe to
* @param actorRefFactory actorRefFactory to use
* @return Returns the created actor
*/
def subscribe(pinNumber: Int, target: ActorRef)(implicit actorRefFactory: ActorRefFactory): ActorRef = {
actorRefFactory.actorOf(GPIOSubscriptionActor.props(pinNumber, target))
}
/**
* Starts an actor for a specific output pin
*
* @param pinNumber Pin number to use
* @param actorRefFactory actorRefFactory to use
* @return Returns the created actor
*/
def output(pinNumber: Int)(implicit actorRefFactory: ActorRefFactory): ActorRef = {
actorRefFactory.actorOf(GPIOOutputActor.props(pinNumber))
}
}
| wmeints/ReactivePI | actors/src/main/scala/nl/fizzylogic/reactivepi/GPIO.scala | Scala | apache-2.0 | 2,056 |
package aqua.http.performance
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Success, Failure}
object Main extends App{
implicit def ec = ExecutionContext.Implicits.global
val message: Future[SmsMtResp] = SprayClientSample.
sendMessage(SmsMtReq("APP_000952", "b96e26ca39fa38e9f9bd867ab3701e43", "hello world", List("tel:94776177400"), Some(1)))
message onComplete {
case Success(resp) => {
println(resp)
sys.exit()
}
case Failure(e) => {
val err: String = e.getMessage
println(s"Error occurred $err")
e.printStackTrace()
sys.exit()
}
}
}
| isuruanu/http-performance-simulator | src/main/scala/aqua/http/performance/Main.scala | Scala | gpl-2.0 | 631 |
package suggestions
package gui
import scala.language.postfixOps
import scala.collection.mutable.ListBuffer
import scala.collection.JavaConverters._
import scala.concurrent._
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{ Try, Success, Failure }
import rx.subscriptions.CompositeSubscription
import rx.lang.scala.Observable
import observablex._
import search._
trait WikipediaApi {
/** Returns a `Future` with a list of possible completions for a search `term`.
*/
def wikipediaSuggestion(term: String): Future[List[String]]
/** Returns a `Future` with the contents of the Wikipedia page for the given search `term`.
*/
def wikipediaPage(term: String): Future[String]
/** Returns an `Observable` with a list of possible completions for a search `term`.
*/
def wikiSuggestResponseStream(term: String): Observable[List[String]] = ObservableEx(wikipediaSuggestion(term))
/** Returns an `Observable` with the contents of the Wikipedia page for the given search `term`.
*/
def wikiPageResponseStream(term: String): Observable[String] = ObservableEx(wikipediaPage(term))
implicit class StringObservableOps(obs: Observable[String]) {
/** Given a stream of search terms, returns a stream of search terms with spaces replaced by underscores.
*
* E.g. `"erik", "erik meijer", "martin` should become `"erik", "erik_meijer", "martin"`
*/
def sanitized: Observable[String] = ???
}
implicit class ObservableOps[T](obs: Observable[T]) {
/** Given an observable that can possibly be completed with an error, returns a new observable
* with the same values wrapped into `Success` and the potential error wrapped into `Failure`.
*
* E.g. `1, 2, 3, !Exception!` should become `Success(1), Success(2), Success(3), Failure(Exception), !TerminateStream!`
*/
def recovered: Observable[Try[T]] = ???
/** Emits the events from the `obs` observable, until `totalSec` seconds have elapsed.
*
* After `totalSec` seconds, if `obs` is not yet completed, the result observable becomes completed.
*
* Note: uses the existing combinators on observables.
*/
def timedOut(totalSec: Long): Observable[T] = ???
/** Given a stream of events `obs` and a method `requestMethod` to map a request `T` into
* a stream of responses `S`, returns a stream of all the responses wrapped into a `Try`.
* The elements of the response stream should reflect the order of their corresponding events in `obs`.
*
* E.g. given a request stream:
*
* 1, 2, 3, 4, 5
*
* And a request method:
*
* num => if (num != 4) Observable.just(num) else Observable.error(new Exception)
*
* We should, for example, get:
*
* Success(1), Success(2), Success(3), Failure(new Exception), Success(5)
*
*
* Similarly:
*
* Observable(1, 2, 3).concatRecovered(num => Observable(num, num, num))
*
* should return:
*
* Observable(Success(1), Succeess(1), Succeess(1), Succeess(2), Succeess(2), Succeess(2), Succeess(3), Succeess(3), Succeess(3))
*/
def concatRecovered[S](requestMethod: T => Observable[S]): Observable[Try[S]] = ???
}
}
| msulima/reactive | suggestions/src/main/scala/suggestions/gui/WikipediaApi.scala | Scala | mit | 3,287 |
package org.bitcoins.core.protocol.script
import org.bitcoins.testkitcore.gen.ScriptGenerators
import org.scalacheck.{Prop, Properties}
/** Created by tom on 8/23/16.
*/
class CLTVScriptPubKeySpec extends Properties("CLTVScriptPubKeySpec") {
property("Serialization symmetry") =
Prop.forAll(ScriptGenerators.cltvScriptPubKey) {
case (cltvScriptPubKey, _) =>
CLTVScriptPubKey(cltvScriptPubKey.hex) == cltvScriptPubKey
}
}
| bitcoin-s/bitcoin-s | core-test/src/test/scala/org/bitcoins/core/protocol/script/CLTVScriptPubKeySpec.scala | Scala | mit | 449 |
object Test {
def BrokenMethod(): HasFilter[(Int, String)] = ???
trait HasFilter[B] {
def filter(p: B => Boolean) = ???
}
trait HasWithFilter {
def withFilter = ???
}
object addWithFilter {
trait NoImplicit
implicit def enrich(v: Any)
(implicit F0: NoImplicit): HasWithFilter = ???
}
BrokenMethod().withFilter(_ => true) // okay
BrokenMethod().filter(_ => true) // okay
locally {
import addWithFilter._
BrokenMethod().withFilter((_: (Int, String)) => true) // okay
}
locally {
import addWithFilter._
// adaptToMemberWithArgs sets the type of the tree `x`
// to ErrorType (while in silent mode, so the error is not
// reported. Later, when the fallback from `withFilter`
// to `filter` is attempted, the closure is taken to have
// have the type `<error> => Boolean`, which conforms to
// `(B => Boolean)`. Only later during pickling does the
// defensive check for erroneous types in the tree pick up
// the problem.
BrokenMethod().withFilter(x => true) // erroneous or inaccessible type.
}
}
| yusuke2255/dotty | tests/pending/pos/t7239.scala | Scala | bsd-3-clause | 1,116 |
object Solution {
def main(args: Array[String]) {
def max(x : Int, y : Int) = {
if(x > y){
x
}else{
y
}
}
def getMaxProd(x : String, slide : Int) : Int = {
x.toCharArray.map(_ - 48).sliding(slide).foldLeft(0)((x,y) => max(x, y.product))
}
val sc = new java.util.Scanner(System.in)
val t = sc.nextInt()
for (idx <- 0 to t) {
sc.nextInt()
val k = sc.nextInt()
val num = sc.next()
println(getMaxProd(num, k))
}
}
}
| pavithranrao/projectEuler | projectEulerScala/problem008.scala | Scala | mit | 534 |
package io.skyfii.mandrill.model
// See https://mandrillapp.com/api/docs/messages.JSON.html
case class Message(html: String,
text: Option[String],
subject: String,
from_email: String,
from_name: Option[String],
to: Vector[Recipient],
headers: Map[String, String] = Map(),
important: Boolean = false,
track_opens: Boolean = true,
track_clicks: Boolean = true,
auto_text: Boolean = true,
auto_html: Boolean = true,
inline_css: Boolean = false,
url_strip_qs: Boolean = false,
preserve_recipients: Boolean = true,
view_content_link: Boolean = true,
bcc_address: Option[String] = None,
tracking_domain: Option[String] = None,
signing_domain: String = "",
return_path_domain: String = "",
merge: Boolean = false,
merge_language: Option[String] = None,
global_merge_vars: Vector[MergeVar] = Vector(),
merge_vars: Vector[RecipientMergeVars] = Vector(),
tags: Vector[String] = Vector(),
subaccount: Option[String] = None,
google_analytics_domains: Vector[String] = Vector(),
google_analytics_campaign: Option[String] = None,
metadata: Map[String, String] = Map(),
recipient_metadata: Vector[RecipientMetadata] = Vector(),
attachments: Vector[Attachment] = Vector(),
images: Vector[Image] = Vector())
| skyfii/skyfii-mandrill | src/main/scala/io/skyfii/mandrill/model/Message.scala | Scala | apache-2.0 | 1,781 |
import scala.reflect.runtime.universe._
import scala.reflect.runtime.{universe => ru}
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
object Test extends dotty.runtime.LegacyApp {
val outer = {
val x = 2
reify{x}
}
val code = reify{
val x = 42
outer.splice
};
val toolbox = cm.mkToolBox()
val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
}
| yusuke2255/dotty | tests/disabled/macro/run/reify_nested_outer_refers_to_local.scala | Scala | bsd-3-clause | 437 |
package com.twitter.finagle.example.stream
import com.twitter.concurrent.{Broker, Offer}
import com.twitter.finagle.builder.{Server, ServerBuilder}
import com.twitter.finagle.Service
import com.twitter.finagle.stream.{Stream, StreamResponse}
import com.twitter.util.{Future, Timer, Time, JavaTimer}
import com.twitter.conversions.time._
import java.net.InetSocketAddress
import org.jboss.netty.buffer.ChannelBuffer
import org.jboss.netty.buffer.ChannelBuffers.copiedBuffer
import org.jboss.netty.handler.codec.http.{DefaultHttpResponse, HttpRequest, HttpResponseStatus}
import org.jboss.netty.util.CharsetUtil
import scala.util.Random
/**
* An example of a streaming server using HTTP Chunking. The Stream
* Codec uses HTTP Chunks and newline delimited items.
*/
object StreamServer {
// "tee" messages across all of the registered brokers.
val addBroker = new Broker[Broker[ChannelBuffer]]
val remBroker = new Broker[Broker[ChannelBuffer]]
val messages = new Broker[ChannelBuffer]
private[this] def tee(receivers: Set[Broker[ChannelBuffer]]) {
Offer.select(
addBroker.recv { b => tee(receivers + b) },
remBroker.recv { b => tee(receivers - b) },
if (receivers.isEmpty) Offer.never else {
messages.recv { m =>
Future.join(receivers map { _ ! m } toSeq) ensure tee(receivers)
}
}
)
}
private[this] def produce(r: Random, t: Timer) {
t.schedule(1.second.fromNow) {
val m = copiedBuffer(r.nextInt.toString + "\\n", CharsetUtil.UTF_8)
messages.send(m) andThen produce(r, t)
}
}
// start the two processes.
tee(Set())
produce(new Random, new JavaTimer)
def main(args: Array[String]) {
val myService = new Service[HttpRequest, StreamResponse] {
def apply(request: HttpRequest) = Future {
val subscriber = new Broker[ChannelBuffer]
addBroker ! subscriber
new StreamResponse {
val httpResponse = new DefaultHttpResponse(
request.getProtocolVersion, HttpResponseStatus.OK)
def messages = subscriber.recv
def error = new Broker[Throwable].recv
def release() = {
remBroker ! subscriber
// sink any existing messages, so they
// don't hold up the upstream.
subscriber.recv foreach { _ => () }
}
}
}
}
val server: Server = ServerBuilder()
.codec(Stream())
.bindTo(new InetSocketAddress(8080))
.name("streamserver")
.build(myService)
}
} | enachb/finagle_2.9_durgh | finagle-example/src/main/scala/com/twitter/finagle/example/stream/StreamServer.scala | Scala | apache-2.0 | 2,525 |
/**
* Copyright 2016 Dropbox, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package djinni
import djinni.ast.Record.DerivingType
import djinni.ast._
import djinni.generatorTools._
import djinni.meta._
import djinni.syntax.Error
import djinni.writer.IndentWriter
import scala.collection.mutable
import scala.collection.parallel.immutable
abstract class BaseObjcGenerator(spec: Spec) extends Generator(spec) {
val marshal = new ObjcMarshal(spec)
object ObjcConstantType extends Enumeration {
val ConstVariable, ConstMethod = Value
}
def writeObjcConstVariableDecl(w: IndentWriter, c: Const, s: String): Unit = {
val nullability = marshal.nullability(c.ty.resolved).fold("")(" __" + _)
val td = marshal.fqFieldType(c.ty) + nullability
// MBinary | MList | MSet | MMap are not allowed for constants.
w.w(s"${td} const $s${idObjc.const(c.ident)}")
}
/**
* Gererate the definition of Objc constants.
*/
def generateObjcConstants(w: IndentWriter, consts: Seq[Const], selfName: String,
genType: ObjcConstantType.Value) = {
def boxedPrimitive(ty: TypeRef): String = {
val (_, needRef) = marshal.toObjcType(ty)
if (needRef) "@" else ""
}
def writeObjcConstValue(w: IndentWriter, ty: TypeRef, v: Any): Unit = v match {
case l: Long => w.w(boxedPrimitive(ty) + l.toString)
case d: Double if marshal.fieldType(ty) == "float" => w.w(boxedPrimitive(ty) + d.toString + "f")
case d: Double => w.w(boxedPrimitive(ty) + d.toString)
case b: Boolean => w.w(boxedPrimitive(ty) + (if (b) "YES" else "NO"))
case s: String => w.w("@" + s)
case e: EnumValue => w.w(marshal.typename(ty) + idObjc.enum(e.name))
case v: ConstRef => w.w(selfName + idObjc.const (v.name))
case z: Map[_, _] => { // Value is record
val recordMdef = ty.resolved.base.asInstanceOf[MDef]
val record = recordMdef.body.asInstanceOf[Record]
val vMap = z.asInstanceOf[Map[String, Any]]
val head = record.fields.head
w.w(s"[[${marshal.typename(ty)} alloc] initWith${IdentStyle.camelUpper(head.ident)}:")
writeObjcConstValue(w, head.ty, vMap.apply(head.ident))
w.nestedN(2) {
val skipFirst = SkipFirst()
for (f <- record.fields) skipFirst {
w.wl
w.w(s"${idObjc.field(f.ident)}:")
writeObjcConstValue(w, f.ty, vMap.apply(f.ident))
}
}
w.w("]")
}
}
def writeObjcConstMethImpl(c: Const, w: IndentWriter) {
val label = "+"
val nullability = marshal.nullability(c.ty.resolved).fold("")(" __" + _)
val ret = marshal.fqFieldType(c.ty) + nullability
val decl = s"$label ($ret)${idObjc.method(c.ident)}"
writeAlignedObjcCall(w, decl, List(), "", p => ("",""))
w.wl
w.braced {
var static_var = s"s_${idObjc.method(c.ident)}"
w.w(s"static ${marshal.fqFieldType(c.ty)} const ${static_var} = ")
writeObjcConstValue(w, c.ty, c.value)
w.wl(";")
w.wl(s"return $static_var;")
}
}
genType match {
case ObjcConstantType.ConstVariable => {
for (c <- consts if marshal.canBeConstVariable(c)) {
w.wl
writeObjcConstVariableDecl(w, c, selfName)
w.w(s" = ")
writeObjcConstValue(w, c.ty, c.value)
w.wl(";")
}
}
case ObjcConstantType.ConstMethod => {
for (c <- consts if !marshal.canBeConstVariable(c)) {
writeObjcConstMethImpl(c, w)
w.wl
}
}
}
}
}
| dropbox/djinni | src/source/BaseObjcGenerator.scala | Scala | apache-2.0 | 4,132 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.