code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package org.sisioh.aws4s.dynamodb.model
import com.amazonaws.services.dynamodbv2.model.{ AttributeDefinition, ScalarAttributeType }
import org.sisioh.aws4s.PimpedType
object AttributeDefinitionFactory {
def create(): AttributeDefinition = new AttributeDefinition()
def create(attributeName: String, attributeType: String): AttributeDefinition = new AttributeDefinition(attributeName, attributeType)
def create(attributeName: String, attributeType: ScalarAttributeType): AttributeDefinition = new AttributeDefinition(attributeName, attributeType)
}
class RichAttributeDefinition(val underlying: AttributeDefinition)
extends AnyVal with PimpedType[AttributeDefinition] {
// ---
def attributeNameOpt: Option[String] = Option(underlying.getAttributeName)
def attributeNameOpt_=(value: Option[String]): Unit = underlying.setAttributeName(value.orNull)
def withAttributeNameOpt(value: Option[String]): AttributeDefinition = underlying.withAttributeName(value.orNull)
// ---
def attributeTypeOpt: Option[String] = Option(underlying.getAttributeType)
def attributeTypeOpt_=(value: Option[String]): Unit = underlying.setAttributeType(value.orNull)
def withAttributeTypeOpt(value: Option[String]): AttributeDefinition = underlying.withAttributeType(value.orNull)
// ---
}
| everpeace/aws4s | aws4s-dynamodb/src/main/scala/org/sisioh/aws4s/dynamodb/model/RichAttributeDefinition.scala | Scala | mit | 1,307 |
/***
* Copyright 2014 Rackspace US, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rackspace.com.papi.components.checker
import com.rackspace.cloud.api.wadl.Converters._
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class ValidatorWADLPlainParamSuite extends BaseValidatorSuite {
//
// validator_XSDElementContentPlain allows:
//
//
// PUT /a/b with json and xml support
// POST /a/b with xml support
//
// POST /c with json support
// GET /c
//
// The validator checks for wellformness in XML and grammar checks
// XSD requests. It also checks the element type, and it checks
// constraints against required plain params. You can PUT an a in
// /a/b and POST an e in /a/b
//
// The validator is used in the following tests.
//
val validator_XSDElementContentPlain = Validator((localWADLURI,
<application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:tst="http://www.rackspace.com/repose/wadl/checker/step/test">
<grammars>
<include href="src/test/resources/xsd/test-urlxsd.xsd"/>
</grammars>
<resources base="https://test.api.openstack.com">
<resource path="/a/b">
<method name="PUT">
<request>
<representation mediaType="application/xml" element="tst:a">
<param style="plain" path="tst:a/@stepType" required="true"/>
</representation>
<representation mediaType="application/json"/>
</request>
</method>
<method name="POST">
<request>
<representation mediaType="application/xml" element="tst:e">
<param style="plain" path="tst:e/tst:stepType" required="true"/>
</representation>
</request>
</method>
</resource>
<resource path="/c">
<method name="POST">
<request>
<representation mediaType="application/json"/>
</request>
</method>
<method name="GET"/>
</resource>
</resources>
</application>)
, TestConfig(false, false, true, true, true, 1, true))
//
// Like validator_XSDElementContentPlain, but with custom rax:message
//
val validator_XSDElementContentPlainMsg = Validator((localWADLURI,
<application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:tst="http://www.rackspace.com/repose/wadl/checker/step/test"
xmlns:rax="http://docs.rackspace.com/api">
<grammars>
<include href="src/test/resources/xsd/test-urlxsd.xsd"/>
</grammars>
<resources base="https://test.api.openstack.com">
<resource path="/a/b">
<method name="PUT">
<request>
<representation mediaType="application/xml" element="tst:a">
<param style="plain" path="tst:a/@stepType" required="true" rax:message="No stepType attribute on a"/>
</representation>
<representation mediaType="application/json"/>
</request>
</method>
<method name="POST">
<request>
<representation mediaType="application/xml" element="tst:e">
<param style="plain" path="tst:e/tst:stepType" required="true" rax:message="no stepType on e"/>
</representation>
</request>
</method>
</resource>
<resource path="/c">
<method name="POST">
<request>
<representation mediaType="application/json"/>
</request>
</method>
<method name="GET"/>
</resource>
</resources>
</application>)
, TestConfig(false, false, true, true, true, 1, true, false, false, "Xalan",
false, false, false, true))
//
// Like validator_XSDElementContentPlain, but with custom rax:message, rax:code
//
val validator_XSDElementContentPlainMsgCode = Validator((localWADLURI,
<application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:tst="http://www.rackspace.com/repose/wadl/checker/step/test"
xmlns:rax="http://docs.rackspace.com/api">
<grammars>
<include href="src/test/resources/xsd/test-urlxsd.xsd"/>
</grammars>
<resources base="https://test.api.openstack.com">
<resource path="/a/b">
<method name="PUT">
<request>
<representation mediaType="application/xml" element="tst:a">
<param style="plain" path="tst:a/@stepType" required="true" rax:message="No stepType attribute on a" rax:code="500"/>
</representation>
<representation mediaType="application/json"/>
</request>
</method>
<method name="POST">
<request>
<representation mediaType="application/xml" element="tst:e">
<param style="plain" path="tst:e/tst:stepType" required="true" rax:message="no stepType on e" rax:code="501"/>
</representation>
</request>
</method>
</resource>
<resource path="/c">
<method name="POST">
<request>
<representation mediaType="application/json"/>
</request>
</method>
<method name="GET"/>
</resource>
</resources>
</application>)
, TestConfig(false, false, true, true, true, 1, true, false, false, "Xalan",
false, false, false, true))
//
// Like validator_XSDElementContentPlain, but with custom rax:code
//
val validator_XSDElementContentPlainCode = Validator((localWADLURI,
<application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:tst="http://www.rackspace.com/repose/wadl/checker/step/test"
xmlns:rax="http://docs.rackspace.com/api">
<grammars>
<include href="src/test/resources/xsd/test-urlxsd.xsd"/>
</grammars>
<resources base="https://test.api.openstack.com">
<resource path="/a/b">
<method name="PUT">
<request>
<representation mediaType="application/xml" element="tst:a">
<param style="plain" path="tst:a/@stepType" required="true" rax:code="500"/>
</representation>
<representation mediaType="application/json"/>
</request>
</method>
<method name="POST">
<request>
<representation mediaType="application/xml" element="tst:e">
<param style="plain" path="tst:e/tst:stepType" required="true" rax:code="501"/>
</representation>
</request>
</method>
</resource>
<resource path="/c">
<method name="POST">
<request>
<representation mediaType="application/json"/>
</request>
</method>
<method name="GET"/>
</resource>
</resources>
</application>)
, TestConfig(false, false, true, true, true, 1, true, false, false, "Xalan",
false, false, false, true))
//
// Like XSDElementContentPlain but with joinopt
//
val validator_XSDElementContentPlainOpt = Validator((localWADLURI,
<application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:tst="http://www.rackspace.com/repose/wadl/checker/step/test">
<grammars>
<include href="src/test/resources/xsd/test-urlxsd.xsd"/>
</grammars>
<resources base="https://test.api.openstack.com">
<resource path="/a/b">
<method name="PUT">
<request>
<representation mediaType="application/xml" element="tst:a">
<param style="plain" path="tst:a/@stepType" required="true"/>
</representation>
<representation mediaType="application/json"/>
</request>
</method>
<method name="POST">
<request>
<representation mediaType="application/xml" element="tst:e">
<param style="plain" path="tst:e/tst:stepType" required="true"/>
</representation>
</request>
</method>
</resource>
<resource path="/c">
<method name="POST">
<request>
<representation mediaType="application/json"/>
</request>
</method>
<method name="GET"/>
</resource>
</resources>
</application>)
, TestConfig(false, false, true, true, true, 1, true, false , false, "XalanC", true))
//
// Like validator_XSDElementContentPlainOpt but with custom rax:message
//
val validator_XSDElementContentPlainOptMsg = Validator((localWADLURI,
<application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:tst="http://www.rackspace.com/repose/wadl/checker/step/test"
xmlns:rax="http://docs.rackspace.com/api">
<grammars>
<include href="src/test/resources/xsd/test-urlxsd.xsd"/>
</grammars>
<resources base="https://test.api.openstack.com">
<resource path="/a/b">
<method name="PUT">
<request>
<representation mediaType="application/xml" element="tst:a">
<param style="plain" path="tst:a/@stepType" required="true" rax:message="No stepType attribute on a"/>
</representation>
<representation mediaType="application/json"/>
</request>
</method>
<method name="POST">
<request>
<representation mediaType="application/xml" element="tst:e">
<param style="plain" path="tst:e/tst:stepType" required="true" rax:message="no stepType on e"/>
</representation>
</request>
</method>
</resource>
<resource path="/c">
<method name="POST">
<request>
<representation mediaType="application/json"/>
</request>
</method>
<method name="GET"/>
</resource>
</resources>
</application>)
, TestConfig(false, false, true, true, true, 1,
true, false , false, "XalanC", true,
false, false, true))
//
// Like validator_XSDElementContentPlainOpt but with custom rax:message, rax:code
//
val validator_XSDElementContentPlainOptMsgCode = Validator((localWADLURI,
<application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:tst="http://www.rackspace.com/repose/wadl/checker/step/test"
xmlns:rax="http://docs.rackspace.com/api">
<grammars>
<include href="src/test/resources/xsd/test-urlxsd.xsd"/>
</grammars>
<resources base="https://test.api.openstack.com">
<resource path="/a/b">
<method name="PUT">
<request>
<representation mediaType="application/xml" element="tst:a">
<param style="plain" path="tst:a/@stepType" required="true" rax:message="No stepType attribute on a" rax:code="500"/>
</representation>
<representation mediaType="application/json"/>
</request>
</method>
<method name="POST">
<request>
<representation mediaType="application/xml" element="tst:e">
<param style="plain" path="tst:e/tst:stepType" required="true" rax:message="no stepType on e" rax:code="501"/>
</representation>
</request>
</method>
</resource>
<resource path="/c">
<method name="POST">
<request>
<representation mediaType="application/json"/>
</request>
</method>
<method name="GET"/>
</resource>
</resources>
</application>)
, TestConfig(false, false, true, true, true, 1,
true, false , false, "XalanC", true,
false, false, true))
//
// Like validator_XSDElementContentPlainOptMsgCode but using Xalan
//
val validator_XSDElementContentPlainOptMsgCodeX = Validator((localWADLURI,
<application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:tst="http://www.rackspace.com/repose/wadl/checker/step/test"
xmlns:rax="http://docs.rackspace.com/api">
<grammars>
<include href="src/test/resources/xsd/test-urlxsd.xsd"/>
</grammars>
<resources base="https://test.api.openstack.com">
<resource path="/a/b">
<method name="PUT">
<request>
<representation mediaType="application/xml" element="tst:a">
<param style="plain" path="tst:a/@stepType" required="true" rax:message="No stepType attribute on a" rax:code="500"/>
</representation>
<representation mediaType="application/json"/>
</request>
</method>
<method name="POST">
<request>
<representation mediaType="application/xml" element="tst:e">
<param style="plain" path="tst:e/tst:stepType" required="true" rax:message="no stepType on e" rax:code="501"/>
</representation>
</request>
</method>
</resource>
<resource path="/c">
<method name="POST">
<request>
<representation mediaType="application/json"/>
</request>
</method>
<method name="GET"/>
</resource>
</resources>
</application>)
, TestConfig(false, false, true, true, true, 1,
true, false , false, "Xalan", true,
false, false, true))
//
// Like validator_XSDElementContentPlainOptMsgCode but using Saxon
//
val validator_XSDElementContentPlainOptMsgCodeS = Validator((localWADLURI,
<application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:tst="http://www.rackspace.com/repose/wadl/checker/step/test"
xmlns:rax="http://docs.rackspace.com/api">
<grammars>
<include href="src/test/resources/xsd/test-urlxsd.xsd"/>
</grammars>
<resources base="https://test.api.openstack.com">
<resource path="/a/b">
<method name="PUT">
<request>
<representation mediaType="application/xml" element="tst:a">
<param style="plain" path="tst:a/@stepType" required="true" rax:message="No stepType attribute on a" rax:code="500"/>
</representation>
<representation mediaType="application/json"/>
</request>
</method>
<method name="POST">
<request>
<representation mediaType="application/xml" element="tst:e">
<param style="plain" path="tst:e/tst:stepType" required="true" rax:message="no stepType on e" rax:code="501"/>
</representation>
</request>
</method>
</resource>
<resource path="/c">
<method name="POST">
<request>
<representation mediaType="application/json"/>
</request>
</method>
<method name="GET"/>
</resource>
</resources>
</application>)
, TestConfig(false, false, true, true, true, 1,
true, false , false, "SaxonHE", true,
false, false, true))
//
// Like validator_XSDElementContentPlainOpt but with custom rax:code
//
val validator_XSDElementContentPlainOptCode = Validator((localWADLURI,
<application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:tst="http://www.rackspace.com/repose/wadl/checker/step/test"
xmlns:rax="http://docs.rackspace.com/api">
<grammars>
<include href="src/test/resources/xsd/test-urlxsd.xsd"/>
</grammars>
<resources base="https://test.api.openstack.com">
<resource path="/a/b">
<method name="PUT">
<request>
<representation mediaType="application/xml" element="tst:a">
<param style="plain" path="tst:a/@stepType" required="true" rax:code="500"/>
</representation>
<representation mediaType="application/json"/>
</request>
</method>
<method name="POST">
<request>
<representation mediaType="application/xml" element="tst:e">
<param style="plain" path="tst:e/tst:stepType" required="true" rax:code="501"/>
</representation>
</request>
</method>
</resource>
<resource path="/c">
<method name="POST">
<request>
<representation mediaType="application/json"/>
</request>
</method>
<method name="GET"/>
</resource>
</resources>
</application>)
, TestConfig(false, false, true, true, true, 1,
true, false , false, "XalanC", true,
false, false, true))
//
// Like validator_XSDElementContentPlain, but using an XPath 2 engine.
//
val validator_XSDElementContentPlain2 = Validator((localWADLURI,
<application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:tst="http://www.rackspace.com/repose/wadl/checker/step/test">
<grammars>
<include href="src/test/resources/xsd/test-urlxsd.xsd"/>
</grammars>
<resources base="https://test.api.openstack.com">
<resource path="/a/b">
<method name="PUT">
<request>
<representation mediaType="application/xml" element="tst:a">
<param style="plain" path="tst:a/@stepType" required="true"/>
</representation>
<representation mediaType="application/json"/>
</request>
</method>
<method name="POST">
<request>
<representation mediaType="application/xml" element="tst:e">
<param style="plain" path="tst:e/tst:stepType" required="true"/>
<!-- a silly xpath assertion that will always return true -->
<param style="plain" path="string(current-dateTime())" required="true"/>
</representation>
</request>
</method>
</resource>
<resource path="/c">
<method name="POST">
<request>
<representation mediaType="application/json"/>
</request>
</method>
<method name="GET"/>
</resource>
</resources>
</application>)
, TestConfig(false, false, true, true, true, 2, true))
//
// Like validator_XSDElementContentPlainOpt, but using an XPath 2 engine.
//
val validator_XSDElementContentPlainOpt2 = Validator((localWADLURI,
<application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:tst="http://www.rackspace.com/repose/wadl/checker/step/test">
<grammars>
<include href="src/test/resources/xsd/test-urlxsd.xsd"/>
</grammars>
<resources base="https://test.api.openstack.com">
<resource path="/a/b">
<method name="PUT">
<request>
<representation mediaType="application/xml" element="tst:a">
<param style="plain" path="tst:a/@stepType" required="true"/>
</representation>
<representation mediaType="application/json"/>
</request>
</method>
<method name="POST">
<request>
<representation mediaType="application/xml" element="tst:e">
<param style="plain" path="tst:e/tst:stepType" required="true"/>
<!-- a silly xpath assertion that will always return true -->
<param style="plain" path="string(current-dateTime())" required="true"/>
</representation>
</request>
</method>
</resource>
<resource path="/c">
<method name="POST">
<request>
<representation mediaType="application/json"/>
</request>
</method>
<method name="GET"/>
</resource>
</resources>
</application>)
, TestConfig(false, false, true, true, true, 2, true, false , false, "XalanC", true))
val badXML_Plain1 = <e xmlns="http://www.rackspace.com/repose/wadl/checker/step/test">
<id>21f1fcf6-bf38-11e1-878e-133ab65fcec3</id>
<even>22</even>
</e>
val badXML_Plain2 = <a xmlns="http://www.rackspace.com/repose/wadl/checker/step/test"
id="21f1fcf6-bf38-11e1-878e-133ab65fcec3"
even="22"/>
test ("PUT on /a/b with application/xml should succeed on validator_XSDElementContentPlain with valid XML1") {
validator_XSDElementContentPlain.validate(request("PUT","/a/b","application/xml", goodXML_XSD2),response,chain)
}
test ("PUT on /a/b with application/xml should succeed on validator_XSDElementContentPlainMsg with valid XML1") {
validator_XSDElementContentPlainMsg.validate(request("PUT","/a/b","application/xml", goodXML_XSD2),response,chain)
}
test ("PUT on /a/b with application/xml should succeed on validator_XSDElementContentPlainCode with valid XML1") {
validator_XSDElementContentPlainCode.validate(request("PUT","/a/b","application/xml", goodXML_XSD2),response,chain)
}
test ("PUT on /a/b with application/xml should succeed on validator_XSDElementContentPlainMsgCode with valid XML1") {
validator_XSDElementContentPlainMsgCode.validate(request("PUT","/a/b","application/xml", goodXML_XSD2),response,chain)
}
test ("PUT on /a/b with application/xml should succeed on validator_XSDElementContentPlain2 with valid XML1") {
validator_XSDElementContentPlain2.validate(request("PUT","/a/b","application/xml", goodXML_XSD2),response,chain)
}
test ("POST on /a/b with application/xml should succeed on validator_XSDElementContentPlain with valid XML1") {
validator_XSDElementContentPlain.validate(request("POST","/a/b","application/xml", goodXML_XSD1),response,chain)
}
test ("POST on /a/b with application/xml should succeed on validator_XSDElementContentPlainMsg with valid XML1") {
validator_XSDElementContentPlainMsg.validate(request("POST","/a/b","application/xml", goodXML_XSD1),response,chain)
}
test ("POST on /a/b with application/xml should succeed on validator_XSDElementContentPlainCode with valid XML1") {
validator_XSDElementContentPlainCode.validate(request("POST","/a/b","application/xml", goodXML_XSD1),response,chain)
}
test ("POST on /a/b with application/xml should succeed on validator_XSDElementContentPlainMsgCode with valid XML1") {
validator_XSDElementContentPlainMsgCode.validate(request("POST","/a/b","application/xml", goodXML_XSD1),response,chain)
}
test ("POST on /a/b with application/xml should succeed on validator_XSDElementContentPlain2 with valid XML1") {
validator_XSDElementContentPlain2.validate(request("POST","/a/b","application/xml", goodXML_XSD1),response,chain)
}
test ("PUT on /a/b with application/json should succeed on validator_XSDElementContentPlain with well formed JSON") {
validator_XSDElementContentPlain.validate(request("PUT","/a/b","application/json", goodJSON),response,chain)
}
test ("PUT on /a/b with application/json should succeed on validator_XSDElementContentPlainMsg with well formed JSON") {
validator_XSDElementContentPlainMsg.validate(request("PUT","/a/b","application/json", goodJSON),response,chain)
}
test ("PUT on /a/b with application/json should succeed on validator_XSDElementContentPlainCode with well formed JSON") {
validator_XSDElementContentPlainCode.validate(request("PUT","/a/b","application/json", goodJSON),response,chain)
}
test ("PUT on /a/b with application/json should succeed on validator_XSDElementContentPlainMsgCode with well formed JSON") {
validator_XSDElementContentPlainMsgCode.validate(request("PUT","/a/b","application/json", goodJSON),response,chain)
}
test ("PUT on /a/b with application/json should succeed on validator_XSDElementContentPlain2 with well formed JSON") {
validator_XSDElementContentPlain2.validate(request("PUT","/a/b","application/json", goodJSON),response,chain)
}
test ("POST on /c with application/json should succeed on validator_XSDElementContentPlain with well formed JSON") {
validator_XSDElementContentPlain.validate(request("POST","/c","application/json", goodJSON),response,chain)
}
test ("POST on /c with application/json should succeed on validator_XSDElementContentPlainMsg with well formed JSON") {
validator_XSDElementContentPlainMsg.validate(request("POST","/c","application/json", goodJSON),response,chain)
}
test ("POST on /c with application/json should succeed on validator_XSDElementContentPlainCode with well formed JSON") {
validator_XSDElementContentPlainCode.validate(request("POST","/c","application/json", goodJSON),response,chain)
}
test ("POST on /c with application/json should succeed on validator_XSDElementContentPlainMsgCode with well formed JSON") {
validator_XSDElementContentPlainMsgCode.validate(request("POST","/c","application/json", goodJSON),response,chain)
}
test ("POST on /c with application/json should succeed on validator_XSDElementContentPlain2 with well formed JSON") {
validator_XSDElementContentPlain2.validate(request("POST","/c","application/json", goodJSON),response,chain)
}
test ("GOT on /c should succeed on validator_XSDElementContentPlain") {
validator_XSDElementContentPlain.validate(request("GET","/c"),response,chain)
}
test ("GOT on /c should succeed on validator_XSDElementContentPlainMsg") {
validator_XSDElementContentPlainMsg.validate(request("GET","/c"),response,chain)
}
test ("GOT on /c should succeed on validator_XSDElementContentPlainCode") {
validator_XSDElementContentPlainCode.validate(request("GET","/c"),response,chain)
}
test ("GOT on /c should succeed on validator_XSDElementContentPlainMsgCode") {
validator_XSDElementContentPlainMsgCode.validate(request("GET","/c"),response,chain)
}
test ("GOT on /c should succeed on validator_XSDElementContentPlain2") {
validator_XSDElementContentPlain2.validate(request("GET","/c"),response,chain)
}
test ("PUT on /a/b should fail with well formed XML PUT in the wrong location in validator_XSDElementContentPlain") {
assertResultFailed(validator_XSDElementContentPlain.validate(request("PUT","/a/b", "application/xml", goodXML_XSD1),response,chain), 400, "Bad Content: Expecting the root element to be: tst:a")
}
test ("PUT on /a/b should fail with well formed XML PUT in the wrong location in validator_XSDElementContentPlainMsg") {
assertResultFailed(validator_XSDElementContentPlainMsg.validate(request("PUT","/a/b", "application/xml", goodXML_XSD1),response,chain), 400, "Bad Content: Expecting the root element to be: tst:a")
}
test ("PUT on /a/b should fail with well formed XML PUT in the wrong location in validator_XSDElementContentPlainCode") {
assertResultFailed(validator_XSDElementContentPlainCode.validate(request("PUT","/a/b", "application/xml", goodXML_XSD1),response,chain), 400, "Bad Content: Expecting the root element to be: tst:a")
}
test ("PUT on /a/b should fail with well formed XML PUT in the wrong location in validator_XSDElementContentPlainMsgCode") {
assertResultFailed(validator_XSDElementContentPlainMsgCode.validate(request("PUT","/a/b", "application/xml", goodXML_XSD1),response,chain), 400, "Bad Content: Expecting the root element to be: tst:a")
}
test ("PUT on /a/b should fail with well formed XML PUT in the wrong location in validator_XSDElementContentPlain2") {
assertResultFailed(validator_XSDElementContentPlain2.validate(request("PUT","/a/b", "application/xml", goodXML_XSD1),response,chain), 400, "Bad Content: Expecting the root element to be: tst:a")
}
test ("PUT on /a/b should fail with well formed XML PUT with missing required plain params on validator_XSDElementContentPlain") {
assertResultFailed(validator_XSDElementContentPlain.validate(request("PUT","/a/b", "application/xml", badXML_Plain2),response,chain), 400, "Bad Content: Expecting tst:a/@stepType")
}
test ("PUT on /a/b should fail with well formed XML PUT with missing required plain params on validator_XSDElementContentPlainMsg") {
assertResultFailed(validator_XSDElementContentPlainMsg.validate(request("PUT","/a/b", "application/xml", badXML_Plain2),response,chain), 400, "Bad Content: No stepType attribute on a")
}
test ("PUT on /a/b should fail with well formed XML PUT with missing required plain params on validator_XSDElementContentPlainCode") {
assertResultFailed(validator_XSDElementContentPlainCode.validate(request("PUT","/a/b", "application/xml", badXML_Plain2),response,chain), 500, "Expecting tst:a/@stepType")
}
test ("PUT on /a/b should fail with well formed XML PUT with missing required plain params on validator_XSDElementContentPlainMsgCode") {
assertResultFailed(validator_XSDElementContentPlainMsgCode.validate(request("PUT","/a/b", "application/xml", badXML_Plain2),response,chain), 500, "No stepType attribute on a")
}
test ("PUT on /a/b should fail with well formed XML PUT with missing required plain params on validator_XSDElementContentPlain2") {
assertResultFailed(validator_XSDElementContentPlain2.validate(request("PUT","/a/b", "application/xml", badXML_Plain2),response,chain), 400, "Bad Content: Expecting tst:a/@stepType")
}
test ("POST on /a/b should fail with well formed XML POST in the wrong location in validator_XSDElementContentPlain") {
assertResultFailed(validator_XSDElementContentPlain.validate(request("POST","/a/b", "application/xml", goodXML_XSD2),response,chain), 400, "Bad Content: Expecting the root element to be: tst:e")
}
test ("POST on /a/b should fail with well formed XML POST in the wrong location in validator_XSDElementContentPlainMsg") {
assertResultFailed(validator_XSDElementContentPlainMsg.validate(request("POST","/a/b", "application/xml", goodXML_XSD2),response,chain), 400, "Bad Content: Expecting the root element to be: tst:e")
}
test ("POST on /a/b should fail with well formed XML POST in the wrong location in validator_XSDElementContentPlainCode") {
assertResultFailed(validator_XSDElementContentPlainCode.validate(request("POST","/a/b", "application/xml", goodXML_XSD2),response,chain), 400, "Bad Content: Expecting the root element to be: tst:e")
}
test ("POST on /a/b should fail with well formed XML POST in the wrong location in validator_XSDElementContentPlainMsgCode") {
assertResultFailed(validator_XSDElementContentPlainMsgCode.validate(request("POST","/a/b", "application/xml", goodXML_XSD2),response,chain), 400, "Bad Content: Expecting the root element to be: tst:e")
}
test ("POST on /a/b should fail with well formed XML POST in the wrong location in validator_XSDElementContentPlain2") {
assertResultFailed(validator_XSDElementContentPlain2.validate(request("POST","/a/b", "application/xml", goodXML_XSD2),response,chain), 400, "Bad Content: Expecting the root element to be: tst:e")
}
test ("POST on /a/b should fail with well formed XML POST with missing required plain params on validator_XSDElementContentPlain") {
assertResultFailed(validator_XSDElementContentPlain.validate(request("POST","/a/b", "application/xml", badXML_Plain1),response,chain), 400, "Bad Content: Expecting tst:e/tst:stepType")
}
test ("POST on /a/b should fail with well formed XML POST with missing required plain params on validator_XSDElementContentPlainMsg") {
assertResultFailed(validator_XSDElementContentPlainMsg.validate(request("POST","/a/b", "application/xml", badXML_Plain1),response,chain), 400, "Bad Content: no stepType on e")
}
test ("POST on /a/b should fail with well formed XML POST with missing required plain params on validator_XSDElementContentPlainCode") {
assertResultFailed(validator_XSDElementContentPlainCode.validate(request("POST","/a/b", "application/xml", badXML_Plain1),response,chain), 501, "Expecting tst:e/tst:stepType")
}
test ("POST on /a/b should fail with well formed XML POST with missing required plain params on validator_XSDElementContentPlainMsgCode") {
assertResultFailed(validator_XSDElementContentPlainMsgCode.validate(request("POST","/a/b", "application/xml", badXML_Plain1),response,chain), 501, "no stepType on e")
}
test ("POST on /a/b should fail with well formed XML POST with missing required plain params on validator_XSDElementContentPlain2") {
assertResultFailed(validator_XSDElementContentPlain2.validate(request("POST","/a/b", "application/xml", badXML_Plain1),response,chain), 400, "Bad Content: Expecting tst:e/tst:stepType")
}
test ("PUT on /a/b should fail with well formed XML that does not match schema on validator_XSDElementContentPlain") {
assertResultFailed(validator_XSDElementContentPlain.validate(request("PUT","/a/b", "application/xml", goodXML),response,chain), 400, "Bad Content: Expecting the root element to be: tst:a")
}
test ("PUT on /a/b should fail with well formed XML that does not match schema on validator_XSDElementContentPlainMsg") {
assertResultFailed(validator_XSDElementContentPlainMsg.validate(request("PUT","/a/b", "application/xml", goodXML),response,chain), 400, "Bad Content: Expecting the root element to be: tst:a")
}
test ("PUT on /a/b should fail with well formed XML that does not match schema on validator_XSDElementContentPlainCode") {
assertResultFailed(validator_XSDElementContentPlainCode.validate(request("PUT","/a/b", "application/xml", goodXML),response,chain), 400, "Bad Content: Expecting the root element to be: tst:a")
}
test ("PUT on /a/b should fail with well formed XML that does not match schema on validator_XSDElementContentPlainMsgCode") {
assertResultFailed(validator_XSDElementContentPlainMsgCode.validate(request("PUT","/a/b", "application/xml", goodXML),response,chain), 400, "Bad Content: Expecting the root element to be: tst:a")
}
test ("PUT on /a/b should fail with well formed XML that does not match schema on validator_XSDElementContentPlain2") {
assertResultFailed(validator_XSDElementContentPlain2.validate(request("PUT","/a/b", "application/xml", goodXML),response,chain), 400, "Bad Content: Expecting the root element to be: tst:a")
}
test ("PUT on /a/b should fail with well formed XML, correct element, but does not validate against the schema in validator_XSDElementContentPlain") {
assertResultFailed(validator_XSDElementContentPlain.validate(request("PUT","/a/b", "application/xml",
<a xmlns="http://www.rackspace.com/repose/wadl/checker/step/test" stepType="foo">
<id>21f1fcf6-bf38-11e1-878e-133ab65fcec3</id>
<stepType>URL_FAIL</stepType>
<even>22</even>
</a>
),response,chain), 400)
}
test ("PUT on /a/b should fail with well formed XML, correct element, but does not validate against the schema in validator_XSDElementContentPlainMsg") {
assertResultFailed(validator_XSDElementContentPlainMsg.validate(request("PUT","/a/b", "application/xml",
<a xmlns="http://www.rackspace.com/repose/wadl/checker/step/test" stepType="foo">
<id>21f1fcf6-bf38-11e1-878e-133ab65fcec3</id>
<stepType>URL_FAIL</stepType>
<even>22</even>
</a>
),response,chain), 400)
}
test ("PUT on /a/b should fail with well formed XML, correct element, but does not validate against the schema in validator_XSDElementContentPlainCode") {
assertResultFailed(validator_XSDElementContentPlainCode.validate(request("PUT","/a/b", "application/xml",
<a xmlns="http://www.rackspace.com/repose/wadl/checker/step/test" stepType="foo">
<id>21f1fcf6-bf38-11e1-878e-133ab65fcec3</id>
<stepType>URL_FAIL</stepType>
<even>22</even>
</a>
),response,chain), 400)
}
test ("PUT on /a/b should fail with well formed XML, correct element, but does not validate against the schema in validator_XSDElementContentPlainMsgCode") {
assertResultFailed(validator_XSDElementContentPlainMsgCode.validate(request("PUT","/a/b", "application/xml",
<a xmlns="http://www.rackspace.com/repose/wadl/checker/step/test" stepType="foo">
<id>21f1fcf6-bf38-11e1-878e-133ab65fcec3</id>
<stepType>URL_FAIL</stepType>
<even>22</even>
</a>
),response,chain), 400)
}
test ("PUT on /a/b should fail with well formed XML, correct element, butdoes not validate against the schema in validator_XSDElementContentPlain2") {
assertResultFailed(validator_XSDElementContentPlain2.validate(request("PUT","/a/b", "application/xml",
<a xmlns="http://www.rackspace.com/repose/wadl/checker/step/test" stepType="foo">
<id>21f1fcf6-bf38-11e1-878e-133ab65fcec3</id>
<stepType>URL_FAIL</stepType>
<even>22</even>
</a>
),response,chain), 400)
}
test ("PUT on /a/b with application/xml should succeed on validator_XSDElementContentPlainOpt with valid XML1") {
validator_XSDElementContentPlainOpt.validate(request("PUT","/a/b","application/xml", goodXML_XSD2),response,chain)
}
test ("PUT on /a/b with application/xml should succeed on validator_XSDElementContentPlainOptMsg with valid XML1") {
validator_XSDElementContentPlainOptMsg.validate(request("PUT","/a/b","application/xml", goodXML_XSD2),response,chain)
}
test ("PUT on /a/b with application/xml should succeed on validator_XSDElementContentPlainOptCode with valid XML1") {
validator_XSDElementContentPlainOptCode.validate(request("PUT","/a/b","application/xml", goodXML_XSD2),response,chain)
}
test ("PUT on /a/b with application/xml should succeed on validator_XSDElementContentPlainOptMsgCode with valid XML1") {
validator_XSDElementContentPlainOptMsgCode.validate(request("PUT","/a/b","application/xml", goodXML_XSD2),response,chain)
}
test ("PUT on /a/b with application/xml should succeed on validator_XSDElementContentPlainOptMsgCodeX with valid XML1") {
validator_XSDElementContentPlainOptMsgCodeX.validate(request("PUT","/a/b","application/xml", goodXML_XSD2),response,chain)
}
test ("PUT on /a/b with application/xml should succeed on validator_XSDElementContentPlainOptMsgCodeS with valid XML1") {
validator_XSDElementContentPlainOptMsgCodeS.validate(request("PUT","/a/b","application/xml", goodXML_XSD2),response,chain)
}
test ("PUT on /a/b with application/xml should succeed on validator_XSDElementContentPlainOpt2 with valid XML1") {
validator_XSDElementContentPlainOpt2.validate(request("PUT","/a/b","application/xml", goodXML_XSD2),response,chain)
}
test ("POST on /a/b with application/xml should succeed on validator_XSDElementContentPlainOpt with valid XML1") {
validator_XSDElementContentPlainOpt.validate(request("POST","/a/b","application/xml", goodXML_XSD1),response,chain)
}
test ("POST on /a/b with application/xml should succeed on validator_XSDElementContentPlainOptMsg with valid XML1") {
validator_XSDElementContentPlainOptMsg.validate(request("POST","/a/b","application/xml", goodXML_XSD1),response,chain)
}
test ("POST on /a/b with application/xml should succeed on validator_XSDElementContentPlainOptCode with valid XML1") {
validator_XSDElementContentPlainOptCode.validate(request("POST","/a/b","application/xml", goodXML_XSD1),response,chain)
}
test ("POST on /a/b with application/xml should succeed on validator_XSDElementContentPlainOptMsgCode with valid XML1") {
validator_XSDElementContentPlainOptMsgCode.validate(request("POST","/a/b","application/xml", goodXML_XSD1),response,chain)
}
test ("POST on /a/b with application/xml should succeed on validator_XSDElementContentPlainOptMsgCodeX with valid XML1") {
validator_XSDElementContentPlainOptMsgCodeX.validate(request("POST","/a/b","application/xml", goodXML_XSD1),response,chain)
}
test ("POST on /a/b with application/xml should succeed on validator_XSDElementContentPlainOptMsgCodeS with valid XML1") {
validator_XSDElementContentPlainOptMsgCodeS.validate(request("POST","/a/b","application/xml", goodXML_XSD1),response,chain)
}
test ("POST on /a/b with application/xml should succeed on validator_XSDElementContentPlainOpt2 with valid XML1") {
validator_XSDElementContentPlainOpt2.validate(request("POST","/a/b","application/xml", goodXML_XSD1),response,chain)
}
test ("PUT on /a/b with application/json should succeed on validator_XSDElementContentPlainOpt with well formed JSON") {
validator_XSDElementContentPlainOpt.validate(request("PUT","/a/b","application/json", goodJSON),response,chain)
}
test ("PUT on /a/b with application/json should succeed on validator_XSDElementContentPlainOptMsg with well formed JSON") {
validator_XSDElementContentPlainOptMsg.validate(request("PUT","/a/b","application/json", goodJSON),response,chain)
}
test ("PUT on /a/b with application/json should succeed on validator_XSDElementContentPlainOptCode with well formed JSON") {
validator_XSDElementContentPlainOptCode.validate(request("PUT","/a/b","application/json", goodJSON),response,chain)
}
test ("PUT on /a/b with application/json should succeed on validator_XSDElementContentPlainOptMsgCode with well formed JSON") {
validator_XSDElementContentPlainOptMsgCode.validate(request("PUT","/a/b","application/json", goodJSON),response,chain)
}
test ("PUT on /a/b with application/json should succeed on validator_XSDElementContentPlainOptMsgCodeX with well formed JSON") {
validator_XSDElementContentPlainOptMsgCodeX.validate(request("PUT","/a/b","application/json", goodJSON),response,chain)
}
test ("PUT on /a/b with application/json should succeed on validator_XSDElementContentPlainOptMsgCodeS with well formed JSON") {
validator_XSDElementContentPlainOptMsgCodeS.validate(request("PUT","/a/b","application/json", goodJSON),response,chain)
}
test ("PUT on /a/b with application/json should succeed on validator_XSDElementContentPlainOpt2 with well formed JSON") {
validator_XSDElementContentPlainOpt2.validate(request("PUT","/a/b","application/json", goodJSON),response,chain)
}
test ("POST on /c with application/json should succeed on validator_XSDElementContentPlainOpt with well formed JSON") {
validator_XSDElementContentPlainOpt.validate(request("POST","/c","application/json", goodJSON),response,chain)
}
test ("POST on /c with application/json should succeed on validator_XSDElementContentPlainOptMsg with well formed JSON") {
validator_XSDElementContentPlainOptMsg.validate(request("POST","/c","application/json", goodJSON),response,chain)
}
test ("POST on /c with application/json should succeed on validator_XSDElementContentPlainOptCode with well formed JSON") {
validator_XSDElementContentPlainOptCode.validate(request("POST","/c","application/json", goodJSON),response,chain)
}
test ("POST on /c with application/json should succeed on validator_XSDElementContentPlainOptMsgCode with well formed JSON") {
validator_XSDElementContentPlainOptMsgCode.validate(request("POST","/c","application/json", goodJSON),response,chain)
}
test ("POST on /c with application/json should succeed on validator_XSDElementContentPlainOptMsgCodeX with well formed JSON") {
validator_XSDElementContentPlainOptMsgCodeX.validate(request("POST","/c","application/json", goodJSON),response,chain)
}
test ("POST on /c with application/json should succeed on validator_XSDElementContentPlainOptMsgCodeS with well formed JSON") {
validator_XSDElementContentPlainOptMsgCodeS.validate(request("POST","/c","application/json", goodJSON),response,chain)
}
test ("POST on /c with application/json should succeed on validator_XSDElementContentPlainOpt2 with well formed JSON") {
validator_XSDElementContentPlainOpt2.validate(request("POST","/c","application/json", goodJSON),response,chain)
}
test ("GOT on /c should succeed on validator_XSDElementContentPlainOpt") {
validator_XSDElementContentPlainOpt.validate(request("GET","/c"),response,chain)
}
test ("GOT on /c should succeed on validator_XSDElementContentPlainOptMsg") {
validator_XSDElementContentPlainOptMsg.validate(request("GET","/c"),response,chain)
}
test ("GOT on /c should succeed on validator_XSDElementContentPlainOptCode") {
validator_XSDElementContentPlainOptCode.validate(request("GET","/c"),response,chain)
}
test ("GOT on /c should succeed on validator_XSDElementContentPlainOptMsgCode") {
validator_XSDElementContentPlainOptMsgCode.validate(request("GET","/c"),response,chain)
}
test ("GOT on /c should succeed on validator_XSDElementContentPlainOptMsgCodeX") {
validator_XSDElementContentPlainOptMsgCodeX.validate(request("GET","/c"),response,chain)
}
test ("GOT on /c should succeed on validator_XSDElementContentPlainOptMsgCodeS") {
validator_XSDElementContentPlainOptMsgCodeS.validate(request("GET","/c"),response,chain)
}
test ("GOT on /c should succeed on validator_XSDElementContentPlainOpt2") {
validator_XSDElementContentPlainOpt2.validate(request("GET","/c"),response,chain)
}
test ("PUT on /a/b should fail with well formed XML PUT in the wrong location in validator_XSDElementContentPlainOpt") {
assertResultFailed(validator_XSDElementContentPlainOpt.validate(request("PUT","/a/b", "application/xml", goodXML_XSD1),response,chain), 400, "Bad Content: Expecting the root element to be: tst:a")
}
test ("PUT on /a/b should fail with well formed XML PUT in the wrong location in validator_XSDElementContentPlainOptMsg") {
assertResultFailed(validator_XSDElementContentPlainOptMsg.validate(request("PUT","/a/b", "application/xml", goodXML_XSD1),response,chain), 400, "Bad Content: Expecting the root element to be: tst:a")
}
test ("PUT on /a/b should fail with well formed XML PUT in the wrong location in validator_XSDElementContentPlainOptCode") {
assertResultFailed(validator_XSDElementContentPlainOptCode.validate(request("PUT","/a/b", "application/xml", goodXML_XSD1),response,chain), 400, "Bad Content: Expecting the root element to be: tst:a")
}
test ("PUT on /a/b should fail with well formed XML PUT in the wrong location in validator_XSDElementContentPlainOptMsgCode") {
assertResultFailed(validator_XSDElementContentPlainOptMsgCode.validate(request("PUT","/a/b", "application/xml", goodXML_XSD1),response,chain), 400, "Bad Content: Expecting the root element to be: tst:a")
}
test ("PUT on /a/b should fail with well formed XML PUT in the wrong location in validator_XSDElementContentPlainOptMsgCodeX") {
assertResultFailed(validator_XSDElementContentPlainOptMsgCodeX.validate(request("PUT","/a/b", "application/xml", goodXML_XSD1),response,chain), 400, "Bad Content: Expecting the root element to be: tst:a")
}
test ("PUT on /a/b should fail with well formed XML PUT in the wrong location in validator_XSDElementContentPlainOptMsgCodeS") {
assertResultFailed(validator_XSDElementContentPlainOptMsgCodeS.validate(request("PUT","/a/b", "application/xml", goodXML_XSD1),response,chain), 400, "Bad Content: Expecting the root element to be: tst:a")
}
test ("PUT on /a/b should fail with well formed XML PUT in the wrong location in validator_XSDElementContentPlainOpt2") {
assertResultFailed(validator_XSDElementContentPlainOpt2.validate(request("PUT","/a/b", "application/xml", goodXML_XSD1),response,chain), 400, "Bad Content: Expecting the root element to be: tst:a")
}
test ("PUT on /a/b should fail with well formed XML PUT with missing required plain params on validator_XSDElementContentPlainOpt") {
assertResultFailed(validator_XSDElementContentPlainOpt.validate(request("PUT","/a/b", "application/xml", badXML_Plain2),response,chain), 400, "Bad Content: Expecting tst:a/@stepType")
}
test ("PUT on /a/b should fail with well formed XML PUT with missing required plain params on validator_XSDElementContentPlainOptMsg") {
assertResultFailed(validator_XSDElementContentPlainOptMsg.validate(request("PUT","/a/b", "application/xml", badXML_Plain2),response,chain), 400, "Bad Content: No stepType attribute on a")
}
test ("PUT on /a/b should fail with well formed XML PUT with missing required plain params on validator_XSDElementContentPlainOptCode") {
assertResultFailed(validator_XSDElementContentPlainOptCode.validate(request("PUT","/a/b", "application/xml", badXML_Plain2),response,chain), 500, "Expecting tst:a/@stepType")
}
test ("PUT on /a/b should fail with well formed XML PUT with missing required plain params on validator_XSDElementContentPlainOptMsgCode") {
assertResultFailed(validator_XSDElementContentPlainOptMsgCode.validate(request("PUT","/a/b", "application/xml", badXML_Plain2),response,chain), 500, "No stepType attribute on a")
}
test ("PUT on /a/b should fail with well formed XML PUT with missing required plain params on validator_XSDElementContentPlainOptMsgCodeX") {
assertResultFailed(validator_XSDElementContentPlainOptMsgCodeX.validate(request("PUT","/a/b", "application/xml", badXML_Plain2),response,chain), 500, "No stepType attribute on a")
}
test ("PUT on /a/b should fail with well formed XML PUT with missing required plain params on validator_XSDElementContentPlainOptMsgCodeS") {
assertResultFailed(validator_XSDElementContentPlainOptMsgCodeS.validate(request("PUT","/a/b", "application/xml", badXML_Plain2),response,chain), 500, "No stepType attribute on a")
}
test ("PUT on /a/b should fail with well formed XML PUT with missing required plain params on validator_XSDElementContentPlainOpt2") {
assertResultFailed(validator_XSDElementContentPlainOpt2.validate(request("PUT","/a/b", "application/xml", badXML_Plain2),response,chain), 400, "Bad Content: Expecting tst:a/@stepType")
}
test ("POST on /a/b should fail with well formed XML POST in the wrong location in validator_XSDElementContentPlainOpt") {
assertResultFailed(validator_XSDElementContentPlainOpt.validate(request("POST","/a/b", "application/xml", goodXML_XSD2),response,chain), 400, "Bad Content: Expecting the root element to be: tst:e")
}
test ("POST on /a/b should fail with well formed XML POST in the wrong location in validator_XSDElementContentPlainOptMsg") {
assertResultFailed(validator_XSDElementContentPlainOptMsg.validate(request("POST","/a/b", "application/xml", goodXML_XSD2),response,chain), 400, "Bad Content: Expecting the root element to be: tst:e")
}
test ("POST on /a/b should fail with well formed XML POST in the wrong location in validator_XSDElementContentPlainOptCode") {
assertResultFailed(validator_XSDElementContentPlainOptCode.validate(request("POST","/a/b", "application/xml", goodXML_XSD2),response,chain), 400, "Bad Content: Expecting the root element to be: tst:e")
}
test ("POST on /a/b should fail with well formed XML POST in the wrong location in validator_XSDElementContentPlainOptMsgCode") {
assertResultFailed(validator_XSDElementContentPlainOptMsgCode.validate(request("POST","/a/b", "application/xml", goodXML_XSD2),response,chain), 400, "Bad Content: Expecting the root element to be: tst:e")
}
test ("POST on /a/b should fail with well formed XML POST in the wrong location in validator_XSDElementContentPlainOptMsgCodeX") {
assertResultFailed(validator_XSDElementContentPlainOptMsgCodeX.validate(request("POST","/a/b", "application/xml", goodXML_XSD2),response,chain), 400, "Bad Content: Expecting the root element to be: tst:e")
}
test ("POST on /a/b should fail with well formed XML POST in the wrong location in validator_XSDElementContentPlainOptMsgCodeS") {
assertResultFailed(validator_XSDElementContentPlainOptMsgCodeS.validate(request("POST","/a/b", "application/xml", goodXML_XSD2),response,chain), 400, "Bad Content: Expecting the root element to be: tst:e")
}
test ("POST on /a/b should fail with well formed XML POST in the wrong location in validator_XSDElementContentPlainOpt2") {
assertResultFailed(validator_XSDElementContentPlainOpt2.validate(request("POST","/a/b", "application/xml", goodXML_XSD2),response,chain), 400, "Bad Content: Expecting the root element to be: tst:e")
}
test ("POST on /a/b should fail with well formed XML POST with missing required plain params on validator_XSDElementContentPlainOpt") {
assertResultFailed(validator_XSDElementContentPlainOpt.validate(request("POST","/a/b", "application/xml", badXML_Plain1),response,chain), 400, "Bad Content: Expecting tst:e/tst:stepType")
}
test ("POST on /a/b should fail with well formed XML POST with missing required plain params on validator_XSDElementContentPlainOptMsg") {
assertResultFailed(validator_XSDElementContentPlainOptMsg.validate(request("POST","/a/b", "application/xml", badXML_Plain1),response,chain), 400, "Bad Content: no stepType on e")
}
test ("POST on /a/b should fail with well formed XML POST with missing required plain params on validator_XSDElementContentPlainOptCode") {
assertResultFailed(validator_XSDElementContentPlainOptCode.validate(request("POST","/a/b", "application/xml", badXML_Plain1),response,chain), 501, "Expecting tst:e/tst:stepType")
}
test ("POST on /a/b should fail with well formed XML POST with missing required plain params on validator_XSDElementContentPlainOptMsgCode") {
assertResultFailed(validator_XSDElementContentPlainOptMsgCode.validate(request("POST","/a/b", "application/xml", badXML_Plain1),response,chain), 501, "no stepType on e")
}
test ("POST on /a/b should fail with well formed XML POST with missing required plain params on validator_XSDElementContentPlainOptMsgCodeX") {
assertResultFailed(validator_XSDElementContentPlainOptMsgCodeX.validate(request("POST","/a/b", "application/xml", badXML_Plain1),response,chain), 501, "no stepType on e")
}
test ("POST on /a/b should fail with well formed XML POST with missing required plain params on validator_XSDElementContentPlainOptMsgCodeS") {
assertResultFailed(validator_XSDElementContentPlainOptMsgCodeS.validate(request("POST","/a/b", "application/xml", badXML_Plain1),response,chain), 501, "no stepType on e")
}
test ("POST on /a/b should fail with well formed XML POST with missing required plain params on validator_XSDElementContentPlainOpt2") {
assertResultFailed(validator_XSDElementContentPlainOpt2.validate(request("POST","/a/b", "application/xml", badXML_Plain1),response,chain), 400, "Bad Content: Expecting tst:e/tst:stepType")
}
test ("PUT on /a/b should fail with well formed XML that does not match schema on validator_XSDElementContentPlainOpt") {
assertResultFailed(validator_XSDElementContentPlainOpt.validate(request("PUT","/a/b", "application/xml", goodXML),response,chain), 400)
}
test ("PUT on /a/b should fail with well formed XML that does not match schema on validator_XSDElementContentPlainOptMsg") {
assertResultFailed(validator_XSDElementContentPlainOptMsg.validate(request("PUT","/a/b", "application/xml", goodXML),response,chain), 400)
}
test ("PUT on /a/b should fail with well formed XML that does not match schema on validator_XSDElementContentPlainOptCode") {
assertResultFailed(validator_XSDElementContentPlainOptCode.validate(request("PUT","/a/b", "application/xml", goodXML),response,chain), 400)
}
test ("PUT on /a/b should fail with well formed XML that does not match schema on validator_XSDElementContentPlainOptMsgCode") {
assertResultFailed(validator_XSDElementContentPlainOptMsgCode.validate(request("PUT","/a/b", "application/xml", goodXML),response,chain), 400)
}
test ("PUT on /a/b should fail with well formed XML that does not match schema on validator_XSDElementContentPlainOptMsgCodeX") {
assertResultFailed(validator_XSDElementContentPlainOptMsgCodeX.validate(request("PUT","/a/b", "application/xml", goodXML),response,chain), 400)
}
test ("PUT on /a/b should fail with well formed XML that does not match schema on validator_XSDElementContentPlainOptMsgCodeS") {
assertResultFailed(validator_XSDElementContentPlainOptMsgCodeS.validate(request("PUT","/a/b", "application/xml", goodXML),response,chain), 400)
}
test ("PUT on /a/b should fail with well formed XML that does not match schema on validator_XSDElementContentPlainOpt2") {
assertResultFailed(validator_XSDElementContentPlainOpt2.validate(request("PUT","/a/b", "application/xml", goodXML),response,chain), 400)
}
test ("PUT on /a/b should fail with well formed XML, correct element, but does not validate against the schema in validator_XSDElementContentPlainOpt") {
assertResultFailed(validator_XSDElementContentPlainOpt.validate(request("PUT","/a/b", "application/xml",
<a xmlns="http://www.rackspace.com/repose/wadl/checker/step/test" stepType="foo">
<id>21f1fcf6-bf38-11e1-878e-133ab65fcec3</id>
<stepType>URL_FAIL</stepType>
<even>22</even>
</a>
),response,chain), 400)
}
test ("PUT on /a/b should fail with well formed XML, correct element, but does not validate against the schema in validator_XSDElementContentPlainOptMsg") {
assertResultFailed(validator_XSDElementContentPlainOptMsg.validate(request("PUT","/a/b", "application/xml",
<a xmlns="http://www.rackspace.com/repose/wadl/checker/step/test" stepType="foo">
<id>21f1fcf6-bf38-11e1-878e-133ab65fcec3</id>
<stepType>URL_FAIL</stepType>
<even>22</even>
</a>
),response,chain), 400)
}
test ("PUT on /a/b should fail with well formed XML, correct element, but does not validate against the schema in validator_XSDElementContentPlainOptCode") {
assertResultFailed(validator_XSDElementContentPlainOptCode.validate(request("PUT","/a/b", "application/xml",
<a xmlns="http://www.rackspace.com/repose/wadl/checker/step/test" stepType="foo">
<id>21f1fcf6-bf38-11e1-878e-133ab65fcec3</id>
<stepType>URL_FAIL</stepType>
<even>22</even>
</a>
),response,chain), 400)
}
test ("PUT on /a/b should fail with well formed XML, correct element, but does not validate against the schema in validator_XSDElementContentPlainOptMsgCode") {
assertResultFailed(validator_XSDElementContentPlainOptMsgCode.validate(request("PUT","/a/b", "application/xml",
<a xmlns="http://www.rackspace.com/repose/wadl/checker/step/test" stepType="foo">
<id>21f1fcf6-bf38-11e1-878e-133ab65fcec3</id>
<stepType>URL_FAIL</stepType>
<even>22</even>
</a>
),response,chain), 400)
}
test ("PUT on /a/b should fail with well formed XML, correct element, but does not validate against the schema in validator_XSDElementContentPlainOptMsgCodeX") {
assertResultFailed(validator_XSDElementContentPlainOptMsgCodeX.validate(request("PUT","/a/b", "application/xml",
<a xmlns="http://www.rackspace.com/repose/wadl/checker/step/test" stepType="foo">
<id>21f1fcf6-bf38-11e1-878e-133ab65fcec3</id>
<stepType>URL_FAIL</stepType>
<even>22</even>
</a>
),response,chain), 400)
}
test ("PUT on /a/b should fail with well formed XML, correct element, but does not validate against the schema in validator_XSDElementContentPlainOptMsgCodeS") {
assertResultFailed(validator_XSDElementContentPlainOptMsgCodeS.validate(request("PUT","/a/b", "application/xml",
<a xmlns="http://www.rackspace.com/repose/wadl/checker/step/test" stepType="foo">
<id>21f1fcf6-bf38-11e1-878e-133ab65fcec3</id>
<stepType>URL_FAIL</stepType>
<even>22</even>
</a>
),response,chain), 400)
}
test ("PUT on /a/b should fail with well formed XML, correct element, butdoes not validate against the schema in validator_XSDElementContentPlainOpt2") {
assertResultFailed(validator_XSDElementContentPlainOpt2.validate(request("PUT","/a/b", "application/xml",
<a xmlns="http://www.rackspace.com/repose/wadl/checker/step/test" stepType="foo">
<id>21f1fcf6-bf38-11e1-878e-133ab65fcec3</id>
<stepType>URL_FAIL</stepType>
<even>22</even>
</a>
),response,chain), 400)
}
}
| tylerroyal/api-checker | core/src/test/scala/com/rackspace/com/papi/components/checker/ValidatorWADLPlainParamSuite.scala | Scala | apache-2.0 | 67,105 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers
import com.google.inject.{Inject, Singleton}
import config.{ApplicationConfig, GmpContext, GmpSessionCache}
import controllers.auth.AuthAction
import play.api.Logging
import play.api.mvc.MessagesControllerComponents
import services.SessionService
import uk.gov.hmrc.auth.core.AuthConnector
import scala.concurrent.ExecutionContext
@Singleton
class SessionCacheController @Inject()(authAction: AuthAction,
override val authConnector: AuthConnector,
ac:ApplicationConfig,
sessionService: SessionService,
implicit val config:GmpContext,
override val messagesControllerComponents: MessagesControllerComponents,
implicit val executionContext: ExecutionContext,
implicit val gmpSessionCache: GmpSessionCache
) extends GmpPageFlow(authConnector,sessionService,config,messagesControllerComponents,ac) with Logging {
def newCalculation = authAction.async {
implicit request => {
logger.debug(s"[SessionCacheController][newCalculation][GET] : $request")
sessionService.resetGmpSessionWithScon map {
case Some(x) => Redirect(controllers.routes.PensionDetailsController.get)
case None => throw new RuntimeException
}
}
}
def newBulkCalculation = authAction.async {
implicit request => {
logger.debug(s"[SessionCacheController][newBulkCalculation][GET] : $request")
sessionService.resetGmpBulkSession() map {
case Some(x) => Redirect(controllers.routes.FileUploadController.get)
case None => throw new RuntimeException
}
}
}
}
| hmrc/gmp-frontend | app/controllers/SessionCacheController.scala | Scala | apache-2.0 | 2,479 |
package scala.meta
package internal
package trees
import scala.meta.prettyprinters._
import scala.meta.inputs._
import scala.meta.tokens._
import scala.meta.tokens.Token._
import scala.meta.tokenizers._
// NOTE: Methods that start with "private" are NOT intended to be called outside scala.meta.
// Calling these methods from hosts will compile (because hosts are in meta), but is strongly discouraged.
trait InternalTree extends Product {
self: Tree =>
// ==============================================================
// Pieces of internal state of scala.meta ASTs.
// Some nodes define all of them as fields, some nodes define only a subset.
// However, all nodes have corresponding defs to simplify uniform treatment.
// Everyone except for scala.meta's core should be using "private"-less versions of these methods,
// because those are only available on appropriate trees.
// ==============================================================
private[meta] def privatePrototype: Tree
private[meta] def privateParent: Tree
private[meta] def privateOrigin: Origin
private[meta] def privateCopy(
prototype: Tree = this,
parent: Tree = privateParent,
destination: String = null,
origin: Origin = privateOrigin
): Tree
// ==============================================================
// Getters for pieces of internal state defined above.
// ==============================================================
def parent: Option[Tree] = {
if (privateParent != null) scala.Some(privateParent) else None
}
// NOTE: InternalTree inherits traditional productXXX methods from Product
// and also adds a new method called productFields.
// def productPrefix: String
// def productArity: Int
// def productElement(n: Int): Any
// def productIterator: Iterator[Any]
def productFields: List[String]
private[meta] def origin: Origin = {
if (privateOrigin != null) privateOrigin else Origin.None
}
def pos: Position = {
origin match {
case Origin.Parsed(input, dialect, pos) =>
val tokens = dialect(input).tokenize.get
val startToken = tokens(pos.start)
val endToken = tokens(pos.end - 1)
Position.Range(input, startToken.start, endToken.end)
case _ =>
Position.None
}
}
def tokens(implicit dialect: Dialect): Tokens = {
origin match {
case Origin.Parsed(input, dialect, pos) =>
val tokens = dialect(input).tokenize.get
tokens.slice(pos.start, pos.end)
case _ =>
dialect(Input.VirtualFile("<InternalTrees.tokens>", this.syntax)).tokenize.get
}
}
// ==============================================================
// Setters for pieces of internal state defined above.
// Everyone except for scala.meta's core should be using "private"-less versions of these methods,
// because those are only available on appropriate trees.
// ==============================================================
private[meta] def privateWithOrigin(origin: Origin): Tree = {
this.privateCopy(origin = origin)
}
// ==============================================================
// Intellij-friendly stubs.
// See https://github.com/scalameta/scalameta/pull/907#discussion_r120090447.
// ==============================================================
protected def checkFields(x: Any): Unit = ()
protected def checkParent(x: Any): Unit = ()
}
trait InternalTreeXtensions {
private[meta] implicit class XtensionOriginTree[T <: Tree](tree: T) {
def origin: Origin = if (tree.privateOrigin != null) tree.privateOrigin else Origin.None
def withOrigin(origin: Origin): T = tree.privateWithOrigin(origin).asInstanceOf[T]
}
}
| scalameta/scalameta | scalameta/trees/shared/src/main/scala/scala/meta/internal/trees/InternalTrees.scala | Scala | bsd-3-clause | 3,731 |
package mesosphere.marathon.state
import mesosphere.marathon.Protos
import mesosphere.marathon.state.PathId._
import mesosphere.mesos.protos.Implicits.slaveIDToProto
import mesosphere.mesos.protos.SlaveID
import org.apache.mesos.{ Protos => mesos }
case class TaskFailure(
appId: PathId,
taskId: mesos.TaskID,
state: mesos.TaskState,
message: String = "",
host: String = "",
version: Timestamp = Timestamp.now,
timestamp: Timestamp = Timestamp.now,
slaveId: Option[mesos.SlaveID] = None)
extends MarathonState[Protos.TaskFailure, TaskFailure] {
override def mergeFromProto(proto: Protos.TaskFailure): TaskFailure =
TaskFailure(proto)
override def mergeFromProto(bytes: Array[Byte]): TaskFailure = {
val proto = Protos.TaskFailure.parseFrom(bytes)
mergeFromProto(proto)
}
override def toProto: Protos.TaskFailure = {
val taskFailureBuilder = Protos.TaskFailure.newBuilder
.setAppId(appId.toString)
.setTaskId(taskId)
.setState(state)
.setMessage(message)
.setHost(host)
.setVersion(version.toString)
.setTimestamp(timestamp.toString)
if (slaveId.isDefined) {
taskFailureBuilder.setSlaveId(slaveId.get)
}
taskFailureBuilder.build
}
}
object TaskFailure {
import mesosphere.marathon.event.MesosStatusUpdateEvent
def apply(proto: Protos.TaskFailure): TaskFailure =
TaskFailure(
appId = proto.getAppId.toPath,
taskId = proto.getTaskId,
state = proto.getState,
message = proto.getMessage,
host = proto.getHost,
version = Timestamp(proto.getVersion),
timestamp = Timestamp(proto.getTimestamp),
slaveId = if (proto.hasSlaveId) Some(proto.getSlaveId) else None
)
object FromMesosStatusUpdateEvent {
def unapply(statusUpdate: MesosStatusUpdateEvent): Option[TaskFailure] =
apply(statusUpdate)
def apply(statusUpdate: MesosStatusUpdateEvent): Option[TaskFailure] = {
val MesosStatusUpdateEvent(
slaveId, taskId, taskStateStr, message,
appId, host, _, version, _, ts
) = statusUpdate
val state = taskState(taskStateStr)
if (isFailureState(state))
Some(TaskFailure(
appId,
mesos.TaskID.newBuilder.setValue(taskId).build,
state,
message,
host,
Timestamp(version),
Timestamp(ts),
Option(slaveIDToProto(SlaveID(slaveId)))
))
else None
}
}
protected[this] def taskState(s: String): mesos.TaskState =
mesos.TaskState.valueOf(s)
protected[this] def isFailureState(state: mesos.TaskState): Boolean = {
import mesos.TaskState._
state match {
case TASK_FAILED | TASK_LOST | TASK_ERROR => true
case _ => false
}
}
}
| sepiroth887/marathon | src/main/scala/mesosphere/marathon/state/TaskFailure.scala | Scala | apache-2.0 | 2,808 |
package mesosphere.marathon
package storage.migration
import akka.actor.Scheduler
import java.net.URI
import akka.Done
import akka.stream.Materializer
import com.typesafe.scalalogging.StrictLogging
import mesosphere.marathon.Protos.StorageVersion
import scala.concurrent.ExecutionContext.Implicits.global
import mesosphere.marathon.core.storage.backup.PersistentStoreBackup
import mesosphere.marathon.core.storage.store.PersistenceStore
import mesosphere.marathon.storage.StorageConfig
import mesosphere.marathon.storage.repository._
import mesosphere.marathon.util.toRichFuture
import scala.async.Async.{ async, await }
import scala.concurrent.duration._
import scala.concurrent.{ Await, Future }
import scala.util.control.NonFatal
import mesosphere.marathon.raml.RuntimeConfiguration
import mesosphere.marathon.storage.migration.Migration.MigrationAction
import scala.concurrent.ExecutionContext
import scala.util.{ Failure, Success }
/**
* Base trait of a migration step.
*/
trait MigrationStep {
/**
* Apply migration step.
*
* @return Future for the running migration.
*/
def migrate()(implicit ctx: ExecutionContext, mat: Materializer): Future[Done]
}
/**
* @param persistenceStore Optional "new" PersistenceStore for new migrations, the repositories
* are assumed to be in the new format.
*/
@SuppressWarnings(Array("UnusedMethodParameter")) // materializer will definitely be used in the future.
class Migration(
private[migration] val availableFeatures: Set[String],
private[migration] val defaultNetworkName: Option[String],
private[migration] val mesosBridgeName: String,
private[migration] val persistenceStore: PersistenceStore[_, _, _],
private[migration] val appRepository: AppRepository,
private[migration] val podRepository: PodRepository,
private[migration] val groupRepository: GroupRepository,
private[migration] val deploymentRepository: DeploymentRepository,
private[migration] val instanceRepo: InstanceRepository,
private[migration] val taskFailureRepo: TaskFailureRepository,
private[migration] val frameworkIdRepo: FrameworkIdRepository,
private[migration] val serviceDefinitionRepo: ServiceDefinitionRepository,
private[migration] val runtimeConfigurationRepository: RuntimeConfigurationRepository,
private[migration] val backup: PersistentStoreBackup,
private[migration] val config: StorageConfig,
private[migration] val steps: List[MigrationAction] = Migration.steps
)(implicit mat: Materializer, scheduler: Scheduler) extends StrictLogging {
import StorageVersions.OrderedStorageVersion
import Migration.statusLoggingInterval
private[migration] val minSupportedStorageVersion = StorageVersions(1, 4, 0, StorageVersion.StorageFormat.PERSISTENCE_STORE)
val targetVersion = StorageVersions(steps)
protected def notifyMigrationInProgress(from: StorageVersion, migrateVersion: StorageVersion) = {
logger.info(
s"Migration for storage: ${from.str} to current: ${targetVersion.str}: " +
s"application of the change for version ${migrateVersion.str} is still in progress"
)
}
def applyMigrationSteps(from: StorageVersion): Future[Seq[StorageVersion]] = {
steps
.filter { case (version, _) => version > from }
.sortBy { case (version, _) => version }
.foldLeft(Future.successful(Seq.empty[StorageVersion])) {
case (resultsFuture, (migrateVersion, change)) => resultsFuture.flatMap { res =>
logger.info(
s"Migration for storage: ${from.str} to target: ${targetVersion.str}: apply change for version: ${migrateVersion.str} "
)
val migrationInProgressNotification = scheduler.schedule(statusLoggingInterval, statusLoggingInterval) {
notifyMigrationInProgress(from, migrateVersion)
}
val step = change.apply(this)
step.migrate().recover {
case e: MigrationCancelledException => throw e
case NonFatal(e) =>
throw new MigrationFailedException(s"while migrating storage to $migrateVersion", e)
}.map { _ =>
res :+ migrateVersion
}.andThen {
case _ =>
migrationInProgressNotification.cancel()
}
}
}
}
@SuppressWarnings(Array("all")) // async/await
def migrateAsync(): Future[Seq[StorageVersion]] = async {
val config = await(runtimeConfigurationRepository.get()).getOrElse(RuntimeConfiguration())
// before backup/restore called, reset the runtime configuration
await(runtimeConfigurationRepository.store(RuntimeConfiguration(None, None)))
// step 1: backup current zk state
await(config.backup.map(uri => backup.backup(new URI(uri))).getOrElse(Future.successful(Done)))
// step 2: restore state from given backup
await(config.restore.map(uri => backup.restore(new URI(uri))).getOrElse(Future.successful(Done)))
// last step: run the migration, to ensure we can operate on the zk state
val result = await(migrateStorage(backupCreated = config.backup.isDefined || config.restore.isDefined))
logger.info(s"Migration successfully applied for version ${targetVersion.str}")
result
}
def migrate(): Seq[StorageVersion] =
Await.result(migrateAsync(), Duration.Inf)
@SuppressWarnings(Array("all")) // async/await
def runMigrations(version: Protos.StorageVersion, backupCreated: Boolean = false): Future[Seq[StorageVersion]] =
async {
if (!backupCreated && config.backupLocation.isDefined) {
logger.info("Making a backup of the current state")
await(backup.backup(config.backupLocation.get))
logger.info(s"The backup has been saved to ${config.backupLocation}")
logger.info("Going to apply the migration steps.")
}
val result = await(applyMigrationSteps(version))
await(storeCurrentVersion())
result
}
@SuppressWarnings(Array("all")) // async/await
def migrateStorage(backupCreated: Boolean = false): Future[Seq[StorageVersion]] = {
async {
val currentVersion = await(getCurrentVersion)
val migrations = currentVersion match {
case Some(version) if version < minSupportedStorageVersion =>
val msg = s"Migration from versions < ${minSupportedStorageVersion.str} are not supported. Your version: ${version.str}"
throw new MigrationFailedException(msg)
case Some(version) if version > targetVersion =>
val msg = s"Migration from ${version.str} is not supported as it is newer than ${targetVersion.str}."
throw new MigrationFailedException(msg)
case Some(version) if version < targetVersion =>
// mark migration as started
await(persistenceStore.startMigration())
await(runMigrations(version, backupCreated).asTry) match {
case Success(result) =>
// mark migration as completed
await(persistenceStore.endMigration())
result
case Failure(ex: MigrationCancelledException) =>
// mark migration as completed
await(persistenceStore.endMigration())
throw ex
case Failure(ex) =>
throw ex
}
case Some(version) if version == targetVersion =>
logger.info(s"No migration necessary, already at the target version ${targetVersion.str}")
Nil
case _ =>
logger.info("No migration necessary, no version stored")
// mark migration as started
await(persistenceStore.startMigration())
await(storeCurrentVersion())
// mark migration as completed
await(persistenceStore.endMigration())
Nil
}
migrations
}.recover {
case ex: MigrationCancelledException => throw ex
case ex: MigrationFailedException => throw ex
case NonFatal(ex) =>
throw new MigrationFailedException(s"Migration Failed: ${ex.getMessage}", ex)
}
}
private def getCurrentVersion: Future[Option[StorageVersion]] =
persistenceStore.storageVersion()
private def storeCurrentVersion(): Future[Done] =
persistenceStore.setStorageVersion(targetVersion)
}
object Migration {
val StorageVersionName = "internal:storage:version"
val maxConcurrency = 8
val statusLoggingInterval = 10.seconds
type MigrationStepFactory = Migration => MigrationStep
type MigrationAction = (StorageVersion, MigrationStepFactory)
/**
* All the migration steps, that have to be applied.
* They get applied after the master has been elected.
*/
lazy val steps: List[MigrationAction] =
List(
StorageVersions(1, 4, 2, StorageVersion.StorageFormat.PERSISTENCE_STORE) -> { migration =>
new MigrationTo142(migration.appRepository)
},
StorageVersions(1, 4, 6, StorageVersion.StorageFormat.PERSISTENCE_STORE) -> { (migration) =>
new MigrationTo146(migration.appRepository, migration.podRepository)
},
StorageVersions(1, 5, 0, StorageVersion.StorageFormat.PERSISTENCE_STORE) -> { (migration) =>
MigrationTo15(migration)
},
StorageVersions(1, 5, 2, StorageVersion.StorageFormat.PERSISTENCE_STORE) -> { (migration) =>
new MigrationTo152(migration.instanceRepo)
},
StorageVersions(1, 6, 0, StorageVersion.StorageFormat.PERSISTENCE_STORE) -> { (migration) =>
new MigrationTo160(migration.instanceRepo, migration.persistenceStore)
}
// From here onwards we are not bound to the build version anymore.
//StorageVersions(200) -> { (migration) => new MigrationTo200(...) }
)
}
object StorageVersions {
def apply(major: Int, minor: Int = 0, patch: Int = 0,
format: StorageVersion.StorageFormat = StorageVersion.StorageFormat.PERSISTENCE_STORE): StorageVersion = {
StorageVersion
.newBuilder()
.setMajor(major)
.setMinor(minor)
.setPatch(patch)
.setFormat(format)
.build()
}
/**
* Get the migration target version from a list of migration steps.
*
* @param steps The steps of a a migration.
* @return The target version of the migration steps.
*/
def apply(steps: List[MigrationAction]): StorageVersion = steps.map { case (version, _) => version }.max
implicit class OrderedStorageVersion(val version: StorageVersion) extends AnyVal with Ordered[StorageVersion] {
override def compare(that: StorageVersion): Int = {
def by(left: Int, right: Int, fn: => Int): Int = if (left.compareTo(right) != 0) left.compareTo(right) else fn
by(version.getFormat.getNumber, that.getFormat.getNumber,
by(version.getMajor, that.getMajor,
by(version.getMinor, that.getMinor,
by(version.getPatch, that.getPatch, 0))))
}
def str: String = s"Version(${version.getMajor}, ${version.getMinor}, ${version.getPatch}, ${version.getFormat})"
def nonEmpty: Boolean = !version.equals(empty)
}
def empty: StorageVersion = StorageVersions(0, 0, 0, StorageVersion.StorageFormat.LEGACY)
}
| guenter/marathon | src/main/scala/mesosphere/marathon/storage/migration/Migration.scala | Scala | apache-2.0 | 11,103 |
/*************************************************************************
* *
* This file is part of the 20n/act project. *
* 20n/act enables DNA prediction for synthetic biology/bioengineering. *
* Copyright (C) 2017 20n Labs, Inc. *
* *
* Please direct all queries to act@20n.com. *
* *
* This program is free software: you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation, either version 3 of the License, or *
* (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License *
* along with this program. If not, see <http://www.gnu.org/licenses/>. *
* *
*************************************************************************/
package com.act.biointerpretation.rsmiles.processing
import act.server.MongoDB
import chemaxon.formats.MolFormatException
import com.act.analysis.chemicals.molecules.{MoleculeExporter, MoleculeFormat, MoleculeImporter}
import com.act.biointerpretation.rsmiles.chemicals.JsonInformationTypes.{ChemicalInformation, ReactionInformation}
import com.act.workflow.tool_manager.workflow.workflow_mixins.mongo.{ChemicalKeywords, MongoWorkflowUtilities, ReactionKeywords}
import com.mongodb.{BasicDBList, BasicDBObject, DBObject}
import org.apache.logging.log4j.LogManager
import scala.collection.JavaConversions._
import scala.collection.parallel.immutable.ParMap
object ReactionProcessing {
val logger = LogManager.getLogger(getClass)
def constructDbReaction(mongoDb: MongoDB, moleculeFormat: MoleculeFormat.MoleculeFormatType)
(previousChemicals: ParMap[Long, ChemicalInformation], substrateCountFilter: Int = -1)
(ob: DBObject): Option[ReactionInformation] = {
// Parse the objects from the database and ensure that they exist.
val substrates = ob.get(s"${ReactionKeywords.ENZ_SUMMARY}").asInstanceOf[BasicDBObject].get(s"${ReactionKeywords.SUBSTRATES}").asInstanceOf[BasicDBList]
val products = ob.get(s"${ReactionKeywords.ENZ_SUMMARY}").asInstanceOf[BasicDBObject].get(s"${ReactionKeywords.PRODUCTS}").asInstanceOf[BasicDBList]
val reactionId = ob.get(ReactionKeywords.ID.toString).asInstanceOf[Int]
if (substrates == null | products == null) return None
val productList = products.toList
val substrateList = substrates.toList
// Not really a reaction if nothing is happening.
if (substrateList.isEmpty || productList.isEmpty) return None
// Ensure that the substrate number is the same as the number of substrates we are looking for
if (substrateCountFilter > 0 && substrateList.length != substrateCountFilter) return None
// Make sure we load everything in, we assign settings that carry over here.
val moleculeLoader: (DBObject) => List[ChemicalInformation] = loadMolecule(mongoDb, moleculeFormat)(previousChemicals)
try {
val substrateMoleculeList: List[ChemicalInformation] = substrateList.flatMap(x => moleculeLoader(x.asInstanceOf[DBObject]))
val productMoleculeList: List[ChemicalInformation] = productList.flatMap(x => moleculeLoader(x.asInstanceOf[DBObject]))
/*
Check if the Substrates == Products.
This probably means a stereo change is occurring that our import settings could strip off the original molecule.
*/
val uniqueSubstrates = substrateMoleculeList.map(_.getString).toSet
val uniqueProducts = productMoleculeList.map(_.getString).toSet
if (uniqueSubstrates.equals(uniqueProducts)) {
logger.debug(s"Reaction with ID $reactionId has the same substrates as products. " +
s"Likely is a stereo change. Skipping.")
return None
}
Option(new ReactionInformation(reactionId, substrateMoleculeList, productMoleculeList))
} catch {
case e: MolFormatException => None
}
}
private def loadMolecule(mongoDb: MongoDB, moleculeFormat: MoleculeFormat.MoleculeFormatType)
(previousChemicals: ParMap[Long, ChemicalInformation])(dbObj: DBObject): List[ChemicalInformation] = {
val hitAbstractChem: Option[ChemicalInformation] =
previousChemicals.get(dbObj.get(ReactionKeywords.PUBCHEM.toString).asInstanceOf[Long])
val coefficient = dbObj.get(ReactionKeywords.COEFFICIENT.toString).asInstanceOf[Int]
// We only get past this if we are dealing with non-abstract chemicals.
if (hitAbstractChem.isDefined) return List.fill(coefficient)(hitAbstractChem.get)
// Try to look for real molecules if we can't find it in our abstract stack.
val chemicalId = dbObj.get(ReactionKeywords.PUBCHEM.toString).asInstanceOf[Long]
val query = Mongo.createDbObject(ChemicalKeywords.ID, chemicalId)
val inchi = Mongo.mongoQueryChemicals(mongoDb)(query, null).next().get(ChemicalKeywords.INCHI.toString).asInstanceOf[String]
if (!moleculeFormat.toString.toLowerCase.contains("inchi"))
logger.warn("Trying to import InChIs with a non InChI setting.")
val molecule = MoleculeImporter.importMolecule(inchi, moleculeFormat)
List.fill(coefficient)(new ChemicalInformation(chemicalId.toInt, MoleculeExporter.exportMolecule(molecule, moleculeFormat)))
}
object Mongo extends MongoWorkflowUtilities {}
}
| 20n/act | reachables/src/main/scala/com/act/biointerpretation/rsmiles/processing/ReactionProcessing.scala | Scala | gpl-3.0 | 6,154 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spot.utilities
object TimeUtilities {
/**
* It converts HH:MM:SS string to seconds
*
* @param timeStr This is time in the form of a string
* @return It returns time converted to seconds
*/
def getTimeAsDouble(timeStr: String) : Double = {
val s = timeStr.split(":")
val hours = s(0).toInt
val minutes = s(1).toInt
val seconds = s(2).toInt
(3600*hours + 60*minutes + seconds).toDouble
}
/**
* It takes only the hour element of time
*
* @param timeStr This is time in the form of a string
* @return It returns only the hour of time
*/
def getTimeAsHour(timeStr: String): Int = {
val s = timeStr.split(":")
val hours = s(0).toInt
hours
}
}
| NathanSegerlind/incubator-spot | spot-ml/src/main/scala/org/apache/spot/utilities/TimeUtilities.scala | Scala | apache-2.0 | 1,550 |
package org.scalaide.core
package quickassist
import org.eclipse.jdt.internal.core.util.SimpleDocument
import org.junit.AfterClass
import org.junit.Assert
import org.junit.BeforeClass
import org.junit.Test
import org.scalaide.core.internal.quickassist.explicit.ExplicitReturnType
import scala.util.control.Exception
object ExplicitTypeAssistTest extends QuickAssistTest {
@BeforeClass
def createProject() = create("assist")
@AfterClass
def deleteProject() = delete()
}
/** This test suite requires the UI. */
class ExplicitTypeAssistTest extends QuickAssistTestHelper {
import ExplicitTypeAssistTest._
val quickAssist = new ExplicitReturnType
def createSource(packageName: String, unitName: String)(contents: String) = createSourceFile(packageName, unitName)(contents)
def assistsFor(contents: String, expected: String): Unit =
runQuickAssistWith(contents) { p =>
Assert.assertTrue("Add explicit type proposal not found", p.nonEmpty)
val doc = new SimpleDocument(contents.filterNot(_ == '^'))
p.head.apply(doc)
Assert.assertEquals("Changes unexpected", expected, doc.get())
}
@Test
def assistVal(): Unit = {
assistsFor("""
class Test {
val foo = ^42
}
""".stripMargin, """
class Test {
val foo: Int = 42
}
""".stripMargin)
}
@Test
def assistDef(): Unit = {
assistsFor("""
class Test {
def foo(x: Int) = ^x + 1
}
""".stripMargin, """
class Test {
def foo(x: Int): Int = x + 1
}
""".stripMargin)
}
@Test
def assistList(): Unit = {
assistsFor("""
class Test {
def foo(x: Int) = ^List.fill(x)(0)
}
""".stripMargin, """
class Test {
def foo(x: Int): List[Int] = List.fill(x)(0)
}
""".stripMargin)
}
@Test
def assistMultiLine(): Unit = {
assistsFor("""
class Test {
def foo(x: Int) = ^{
List.fill(x)(0)
}
}
""".stripMargin, """
class Test {
def foo(x: Int): List[Int] = {
List.fill(x)(0)
}
}
""".stripMargin)
}
@Test
def assistComplexSignature(): Unit = {
assistsFor("""
class Test {
def foo[T](size: Int = 42, init: T)(implicit ord: Ordered[T]) = {
^List.fill(size)(init)
}
}
""".stripMargin, """
class Test {
def foo[T](size: Int = 42, init: T)(implicit ord: Ordered[T]): List[T] = {
List.fill(size)(init)
}
}
""".stripMargin)
}
@Test
def assistInnerScopeVal(): Unit = {
assistsFor("""
class Test {
def foo(x: Int) = {
val size = 10
val bar = ^List.fill(size)(0)
}
}
""".stripMargin, """
class Test {
def foo(x: Int) = {
val size = 10
val bar: List[Int] = List.fill(size)(0)
}
}
""".stripMargin)
}
@Test
def assistInnerScopeDef(): Unit = {
assistsFor("""
class Test {
def foo(x: Int) = {
val size = 10
def bar[T](init: T) = ^List.fill(size)(init)
}
}
""".stripMargin, """
class Test {
def foo(x: Int) = {
val size = 10
def bar[T](init: T): List[T] = List.fill(size)(init)
}
}
""".stripMargin)
}
@Test
def assistTransitive(): Unit = {
assistsFor("""
class Test {
val x = ^initialize()
def initialize() = {
cout += 1
count
}
var count = 0
}
""".stripMargin, """
class Test {
val x: Int = initialize()
def initialize() = {
cout += 1
count
}
var count = 0
}
""".stripMargin)
}
@Test
def assistMultiAssign(): Unit = {
assistsFor("""
class Test {
val x, y, z = ^initialize()
def initialize() = 0
}
""".stripMargin, """
class Test {
val x, y, z: Int = initialize()
def initialize() = 0
}
""".stripMargin)
}
@Test
def noAssistPatMat(): Unit = {
noAssistsFor("""
class Test {
val Some(x) = ^Option(new Object)
}
""".stripMargin)
}
@Test
def noAssistTuple(): Unit = {
noAssistsFor("""
class Test {
val (x, y) = ^(1, 2)
}
""".stripMargin)
}
@Test
def assistOperatorVal(): Unit = {
assistsFor("""
class Test {
val ~ = ^42
}
""".stripMargin, """
class Test {
val ~ : Int = 42
}
""".stripMargin)
}
@Test
def assistOperatorDef(): Unit = {
assistsFor("""
class Test {
def ++ = ^42
}
""".stripMargin, """
class Test {
def ++ : Int = 42
}
""".stripMargin)
}
}
| Kwestor/scala-ide | org.scala-ide.sdt.core.tests/src/org/scalaide/core/quickassist/ExplicitTypeAssistTest.scala | Scala | bsd-3-clause | 5,109 |
package models
import scala.util.control.Exception._
case class EvernoteAuth(environment: com.evernote.auth.EvernoteService, token: String)
object EvernoteAuth {
def apply(environment: String, token: String): Option[EvernoteAuth] =
catching(classOf[IllegalArgumentException]) opt
EvernoteAuth(com.evernote.auth.EvernoteService.valueOf(environment), token)
}
| int128/kanbannote | src/main/scala/models/EvernoteAuth.scala | Scala | apache-2.0 | 375 |
/*
* The MIT License (MIT)
* Copyright (c) 2011 Mojolly Ltd.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software
* and associated documentation files (the "Software"), to deal in the Software without restriction,
* including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
* LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
* OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package mojolly.logback
import javax.servlet.http.{ HttpServletResponse, HttpServletRequest }
import org.scalatra.{ ScalatraKernel, Handler, MatchedRoute }
import ScalatraKernel.MultiParamsKey
import org.scalatra.util.MultiMap
import org.slf4j.MDC
import java.net.URLEncoder
import com.weiglewilczek.slf4s.Logging
import collection.JavaConversions._
import java.util.{ Map ⇒ JMap }
object ScalatraLogbackSupport {
val CgiParamsKey = "com.mojolly.logback.ScalatraLogbackSupport"
}
trait ScalatraLogbackSupport extends Handler with Logging { self: ScalatraKernel ⇒
import ScalatraLogbackSupport.CgiParamsKey
abstract override def handle(req: HttpServletRequest, res: HttpServletResponse) {
val realMultiParams = req.getParameterMap.asInstanceOf[JMap[String, Array[String]]].toMap transform { (k, v) ⇒ v: Seq[String] }
withRequest(req) {
request(MultiParamsKey) = MultiMap(Map() ++ realMultiParams)
request(CgiParamsKey) = readCgiParams(req)
fillMdc()
super.handle(req, res)
MDC.clear()
}
}
override protected def withRouteMultiParams[S](matchedRoute: Option[MatchedRoute])(thunk: ⇒ S): S = {
val originalParams = multiParams
request(ScalatraKernel.MultiParamsKey) = originalParams ++ matchedRoute.map(_.multiParams).getOrElse(Map.empty)
fillMdc()
try { thunk } finally { request(ScalatraKernel.MultiParamsKey) = originalParams }
}
protected def fillMdc() { // Do this twice so that we get all the route params if they are available and applicable
MDC.clear()
MDC.put(RequestSupport.REQUEST_PATH, requestPath)
MDC.put(RequestSupport.REQUEST_APP, getClass.getSimpleName)
MDC.put(RequestSupport.REQUEST_PARAMS, multiParams map { case (k, vl) ⇒ vl.map(v ⇒ "%s=%s".format(%-(k), %-(v))) } mkString "&")
MDC.put(RequestSupport.SESSION_PARAMS, session map { case (k, v) ⇒ "%s=%s".format(%-(k), %-(v.toString)) } mkString "&")
MDC.put(RequestSupport.CGI_PARAMS, cgiParams map { case (k, v) ⇒ "%s=%s".format(%-(k), %-(v)) } mkString "&")
}
def cgiParams = request get CgiParamsKey map (_.asInstanceOf[Map[String, String]]) getOrElse Map.empty
private def readCgiParams(req: HttpServletRequest) = Map(
"AUTH_TYPE" -> req.getAuthType,
"CONTENT_LENGTH" -> req.getContentLength.toString,
"CONTENT_TYPE" -> req.getContentType,
"DOCUMENT_ROOT" -> applicationContext.getRealPath(applicationContext.getContextPath),
"PATH_INFO" -> req.getPathInfo,
"PATH_TRANSLATED" -> req.getPathTranslated,
"QUERY_STRING" -> req.getQueryString,
"REMOTE_ADDR" -> req.getRemoteAddr,
"REMOTE_HOST" -> req.getRemoteHost,
"REMOTE_USER" -> req.getRemoteUser,
"REQUEST_METHOD" -> req.getMethod,
"SCRIPT_NAME" -> req.getServletPath,
"SERVER_NAME" -> req.getServerName,
"SERVER_PORT" -> req.getServerPort.toString,
"SERVER_PROTOCOL" -> req.getProtocol,
"SERVER_SOFTWARE" -> applicationContext.getServerInfo)
private def %-(s: String) = if (s == null || s.trim.isEmpty) "" else URLEncoder.encode(s, "UTF-8")
}
| backchatio/logback-akka | src/main/scala/ScalatraLogbackSupport.scala | Scala | mit | 4,191 |
package me.hawkweisman
import scala.concurrent.{ Future, Promise }
import scala.language.implicitConversions
import scala.util.Try
/**
* ==Concurrent==
* Utilities for concurrent programming.
*
* ==Implicit Conversions==
* This package contains implicit conversions for standard library classes
* in the `scala.concurrent` package.
*
* 1. [[TryToFuture]] implicitly converts an instance of [[scala.util.Try]]
* to an instance of [[scala.concurrent.Future]].
* {{{
* import scala.util.Try
* import scala.concurrent.Future
* import me.hawkweisman.util.concurrent.tryToFuture
*
* def tryToGetAString(): Try[String]
* = Success("hi!")
*
* val stringFuture: Future[String] = tryToGetAString()
* }}}
*
*/
object concurrent {
/**
* Implicitly convert a [[scala.util.Try Try]] to a
* [[scala.concurrent.Future Future]].
*
* This is primarily for use when you are `flatMap`ping over multiple
* `Future`s and you want to include some non-async operations.
*
* For best performance, the operation that returns a `Try` rather than
* a `Future` should either be the first item `flatMap`ped over, or should
* have already been performed and stored in a local value. If it is the
* result of a function call and included after multiple async calls,
* I believe all the async operations will block on that call.
*
* @param t A [[scala.util.Try Try]]
* @return that [[scala.util.Try Try]] converted to a
* [[scala.concurrent.Future]]
* @example{{{
* import scala.util.Try
* import scala.concurrent.Future
* import me.hawkweisman.util.concurrent.tryToFuture
*
* def tryToGetAString(): Try[String]
* = Success("hi!")
*
* val stringFuture: Future[String] = tryToGetAString()
* }}}
*/
implicit def tryToFuture[T](t: Try[T]): Future[T]
= Promise().complete(t)
.future
}
| hawkw/scala-common | src/main/scala/me/hawkweisman/concurrent.scala | Scala | mit | 1,907 |
///*
// * ******************************************************************************
// * * Copyright (C) 2013 Christopher Harris (Itszuvalex)
// * * Itszuvalex@gmail.com
// * *
// * * This program is free software; you can redistribute it and/or
// * * modify it under the terms of the GNU General Public License
// * * as published by the Free Software Foundation; either version 2
// * * of the License, or (at your option) any later version.
// * *
// * * This program is distributed in the hope that it will be useful,
// * * but WITHOUT ANY WARRANTY; without even the implied warranty of
// * * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// * * GNU General Public License for more details.
// * *
// * * You should have received a copy of the GNU General Public License
// * * along with this program; if not, write to the Free Software
// * * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
// * *****************************************************************************
// */
//package com.itszuvalex.femtocraft.configuration
//
//import java.lang.annotation.{ElementType, Retention, RetentionPolicy, Target}
//
//import scala.annotation.Annotation
//
///**
// * Created by Christopher Harris (Itszuvalex) on 9/10/14.
// */
//@Retention(RetentionPolicy.RUNTIME)
//@Target(Array(ElementType.FIELD, ElementType.TYPE))
//abstract class Configurable extends Annotation {
// def comment: String
//}
| Itszuvalex/Femtocraft-alpha-1 | src/main/java/com/itszuvalex/femtocraft/configuration/Configurable.scala | Scala | gpl-2.0 | 1,479 |
package org.openmole.plugin.task.systemexec
import org.scalatest._
class ParserSpec extends FlatSpec with Matchers {
"Backslashed strings" should "be parsed propely" in {
val l1 = """R -e "install.packages(c(\"lib\"), dependencies = T)""""
val groundTruthL1 = Vector("R", "-e", """install.packages(c("lib"), dependencies = T)""")
val parsedL1 = parse(l1)
parsedL1.size should equal(groundTruthL1.size)
parsedL1 should equal(groundTruthL1)
}
"Lists of multiple double-quoted strings" should "be parsed propely" in {
val l1 = """R -e "install.packages(c(\"spdep\", \"ncf\", "\readr\"), dependencies = T)""""
val groundTruthL1 = Vector("R", "-e", """install.packages(c("spdep", "ncf", "readr"), dependencies = T)""")
val parsedL1 = parse(l1)
parsedL1.size should equal(groundTruthL1.size)
parsedL1 should equal(groundTruthL1)
}
"Arguments" should "be tokenized propely" in {
val l2 = s"/tmp/udocker create --name=eiav80eaiuE imageId"
val groundTruthL2 = Vector("/tmp/udocker", "create", "--name=eiav80eaiuE", "imageId")
val parsedL2 = parse(l2)
parsedL2.size should equal(parsedL2.size)
parsedL2 should equal(groundTruthL2)
}
"A real world command line" should "be parsed propely" in {
val l3 = """/home/reuillon/.openmole/simplet/.tmp/3d48df46-a2b5-46f3-bcf8-e0a99bb0f2de/execution9ee99d7e-52e4-4ff4-bd6a-6942b7104eec/udocker/udocker run --workdir="/" -v "/home/reuillon/.openmole/simplet/.tmp/3d48df46-a2b5-46f3-bcf8-e0a99bb0f2de/execution9ee99d7e-52e4-4ff4-bd6a-6942b7104eec/externalTask035caba5-a447-4267-8f5d-a67e5fdc80b6/inputs/data.csv":"/data.csv" -v "/home/reuillon/.openmole/simplet/.tmp/3d48df46-a2b5-46f3-bcf8-e0a99bb0f2de/execution9ee99d7e-52e4-4ff4-bd6a-6942b7104eec/externalTask035caba5-a447-4267-8f5d-a67e5fdc80b6/inputs/test.R":"/test.R" jfdoimamfhkdkbekapobfgofgdebbjni R -e "install.packages(c('spdep'), dependencies = T)""""
val groundTruthL3 = Vector(
"/home/reuillon/.openmole/simplet/.tmp/3d48df46-a2b5-46f3-bcf8-e0a99bb0f2de/execution9ee99d7e-52e4-4ff4-bd6a-6942b7104eec/udocker/udocker",
"run", """--workdir=/""", "-v", """/home/reuillon/.openmole/simplet/.tmp/3d48df46-a2b5-46f3-bcf8-e0a99bb0f2de/execution9ee99d7e-52e4-4ff4-bd6a-6942b7104eec/externalTask035caba5-a447-4267-8f5d-a67e5fdc80b6/inputs/data.csv:/data.csv""", "-v", """/home/reuillon/.openmole/simplet/.tmp/3d48df46-a2b5-46f3-bcf8-e0a99bb0f2de/execution9ee99d7e-52e4-4ff4-bd6a-6942b7104eec/externalTask035caba5-a447-4267-8f5d-a67e5fdc80b6/inputs/test.R:/test.R""", "jfdoimamfhkdkbekapobfgofgdebbjni", "R", "-e", "install.packages(c('spdep'), dependencies = T)"
)
val parsedL3 = parse(l3)
parsedL3.size should equal(groundTruthL3.size)
parsedL3 should equal(groundTruthL3)
}
}
| openmole/openmole | openmole/plugins/org.openmole.plugin.task.systemexec/src/test/scala/org/openmole/plugin/task/systemexec/ParserSpec.scala | Scala | agpl-3.0 | 2,787 |
/******************************************************************************
* Copyright © 2016 Maxim Karpov *
* *
* Licensed under the Apache License, Version 2.0 (the "License"); *
* you may not use this file except in compliance with the License. *
* You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an "AS IS" BASIS, *
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
* See the License for the specific language governing permissions and *
* limitations under the License. *
******************************************************************************/
package ru.makkarpov.scalingua
import ru.makkarpov.scalingua.extract.MessageExtractor
import scala.reflect.macros.whitebox
object Compat {
type Context = whitebox.Context
def prettyPrint(c: Context)(e: c.Tree): String = c.universe.showCode(e)
def termName(c: Context)(s: String): c.TermName = c.universe.TermName(c.freshName(s))
def typecheck(c: Context)(e: c.Tree): c.Tree = c.typecheck(e)
def processEscapes(s: String) = scala.StringContext.processEscapes(s)
implicit class MutSetOps[A](s: scala.collection.mutable.Set[A]) {
def filterInPlace(p: A => Boolean) = s.retain(p)
}
val CollectionConverters = scala.collection.JavaConverters
}
| makkarpov/scalingua | scalingua/shared/src/main/scala-2.12/ru/makkarpov/scalingua/Compat.scala | Scala | apache-2.0 | 1,868 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.parquet
import scala.collection.JavaConverters._
import org.apache.parquet.hadoop.ParquetOutputFormat
import org.apache.spark.sql.test.SharedSparkSession
// TODO: this needs a lot more testing but it's currently not easy to test with the parquet
// writer abstractions. Revisit.
class ParquetEncodingSuite extends ParquetCompatibilityTest with SharedSparkSession {
import testImplicits._
val ROW = ((1).toByte, 2, 3L, "abc")
val NULL_ROW = (
null.asInstanceOf[java.lang.Byte],
null.asInstanceOf[Integer],
null.asInstanceOf[java.lang.Long],
null.asInstanceOf[String])
test("All Types Dictionary") {
(1 :: 1000 :: Nil).foreach { n => {
withTempPath { dir =>
List.fill(n)(ROW).toDF.repartition(1).write.parquet(dir.getCanonicalPath)
val file = SpecificParquetRecordReaderBase.listDirectory(dir).toArray.head
val conf = sqlContext.conf
val reader = new VectorizedParquetRecordReader(
conf.offHeapColumnVectorEnabled, conf.parquetVectorizedReaderBatchSize)
reader.initialize(file.asInstanceOf[String], null)
val batch = reader.resultBatch()
assert(reader.nextBatch())
assert(batch.numRows() == n)
var i = 0
while (i < n) {
assert(batch.column(0).getByte(i) == 1)
assert(batch.column(1).getInt(i) == 2)
assert(batch.column(2).getLong(i) == 3)
assert(batch.column(3).getUTF8String(i).toString == "abc")
i += 1
}
reader.close()
}
}}
}
test("All Types Null") {
(1 :: 100 :: Nil).foreach { n => {
withTempPath { dir =>
val data = List.fill(n)(NULL_ROW).toDF
data.repartition(1).write.parquet(dir.getCanonicalPath)
val file = SpecificParquetRecordReaderBase.listDirectory(dir).toArray.head
val conf = sqlContext.conf
val reader = new VectorizedParquetRecordReader(
conf.offHeapColumnVectorEnabled, conf.parquetVectorizedReaderBatchSize)
reader.initialize(file.asInstanceOf[String], null)
val batch = reader.resultBatch()
assert(reader.nextBatch())
assert(batch.numRows() == n)
var i = 0
while (i < n) {
assert(batch.column(0).isNullAt(i))
assert(batch.column(1).isNullAt(i))
assert(batch.column(2).isNullAt(i))
assert(batch.column(3).isNullAt(i))
i += 1
}
reader.close()
}}
}
}
test("Read row group containing both dictionary and plain encoded pages") {
withSQLConf(ParquetOutputFormat.DICTIONARY_PAGE_SIZE -> "2048",
ParquetOutputFormat.PAGE_SIZE -> "4096") {
withTempPath { dir =>
// In order to explicitly test for SPARK-14217, we set the parquet dictionary and page size
// such that the following data spans across 3 pages (within a single row group) where the
// first page is dictionary encoded and the remaining two are plain encoded.
val data = (0 until 512).flatMap(i => Seq.fill(3)(i.toString))
data.toDF("f").coalesce(1).write.parquet(dir.getCanonicalPath)
val file = SpecificParquetRecordReaderBase.listDirectory(dir).asScala.head
val conf = sqlContext.conf
val reader = new VectorizedParquetRecordReader(
conf.offHeapColumnVectorEnabled, conf.parquetVectorizedReaderBatchSize)
reader.initialize(file, null /* set columns to null to project all columns */)
val column = reader.resultBatch().column(0)
assert(reader.nextBatch())
(0 until 512).foreach { i =>
assert(column.getUTF8String(3 * i).toString == i.toString)
assert(column.getUTF8String(3 * i + 1).toString == i.toString)
assert(column.getUTF8String(3 * i + 2).toString == i.toString)
}
reader.close()
}
}
}
}
| wangmiao1981/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetEncodingSuite.scala | Scala | apache-2.0 | 4,705 |
package android
import Keys._
import Keys.Internal._
import Tasks._
import android.BuildOutput.Converter
import com.android.sdklib.SdkVersionInfo
import sbt._
import sbt.Keys._
import scala.util.Try
/**
* @author pfnguyen
*/
trait AndroidAppSettings extends AutoPlugin {
override def projectSettings = inConfig(Android)(List(
debugIncludesTests := (projectLayout.value.testSources ** "*.scala").get.nonEmpty,
installTimeout := 0,
install := (installTaskDef dependsOn hasDevice).value,
uninstall := (uninstallTaskDef dependsOn hasDevice).value,
clean := (cleanTaskDef dependsOn hasDevice).value,
debug := (runTaskDef(true) dependsOn install).evaluated,
run := (runTaskDef(false) dependsOn install).evaluated,
hasDevice := {
if (Commands.targetDevice(sdkPath.value, streams.value.log).isEmpty)
PluginFail("no device connected")
},
allDevices := false,
dexInputs := dexInputsTaskDef.value,
dexAggregate := dexAggregateTaskDef.value,
proguardAggregate := proguardAggregateTaskDef.value,
apkbuildAggregate := apkbuildAggregateTaskDef.value,
predex := predexTaskDef.value,
predexRetrolambda := false,
predexSkip := localProjects.value.map(_.getJarFile),
dex := dexTaskDef.value,
dexShards := false,
dexLegacyMode := {
val minSdk = minSdkVersion.value
val minLevel = Try(minSdk.toInt).toOption getOrElse
SdkVersionInfo.getApiByBuildCode(minSdk, true)
minLevel < 21
},
dexMaxHeap := "1024m",
dexInProcess := false, // turn off, does not work properly?
dexMaxProcessCount := java.lang.Runtime.getRuntime.availableProcessors,
dexMulti := false,
dexMainRoots := List(
"activity",
"application",
"service",
"receiver",
"provider",
"instrumentation"),
dexMainClassesRules := List(
"-dontobfuscate",
"-dontoptimize",
"-dontpreverify",
"-dontwarn **",
"-dontnote **",
"-forceprocessing",
"-keep public class * extends android.app.backup.BackupAgent { <init>(); }",
"-keep public class * extends java.lang.annotation.Annotation { *; }",
"-keep class android.support.multidex.** { *; }"
),
dexMainClasses := Nil,
dexMinimizeMain := false,
dexAdditionalParams := Nil,
dexMainClassesConfig := (dexMainClassesConfigTaskDef dependsOn (packageT in Compile)).value,
proguardVersion := "5.0",
proguardCache := "scala" :: Nil,
proguardLibraries := Nil,
proguardConfig := proguardConfigTaskDef.value,
proguardConfig := (proguardConfig dependsOn packageResources).value,
proguard := proguardTaskDef.value,
proguardInputs := proguardInputsTaskDef.value,
proguardInputs := (proguardInputs dependsOn (packageT in Compile)).value,
proguardScala := autoScalaLibrary.value,
useProguard := proguardScala.value,
useProguardInDebug := proguardScala.value,
shrinkResources := false,
resourceShrinker := resourceShrinkerTaskDef.value,
packageResources := (packageResourcesTaskDef dependsOn rGenerator).value,
apkFile := {
implicit val output: Converter = outputLayout.value
projectLayout.value.integrationApkFile(name.value)
},
packagingOptions := PackagingOptions(Nil, Nil, Nil),
apkbuild := apkbuildTaskDef.value,
apkbuild := (apkbuild dependsOn (managedResources in Compile)).value,
apkDebugSigningConfig := DebugSigningConfig(),
apkSigningConfig := properties {
p => {
def makeSigningConfig(alias: String, store: String, passwd: String) = {
val c = PlainSigningConfig(file(store), passwd, alias)
val c2 = Option(p.getProperty("key.store.type")).fold(c) { t =>
c.copy(storeType = t)
}
Option(p.getProperty("key.alias.password")).fold(c2) { p =>
c2.copy(keyPass = Some(p))
}
}
for {
a <- Option(p.getProperty("key.alias"))
b <- Option(p.getProperty("key.store"))
c <- Option(p.getProperty("key.store.password"))
} yield makeSigningConfig(a, b, c)
}
}.value,
signRelease := signReleaseTaskDef.value,
zipalign := zipalignTaskDef.value,
packageT := zipalign.value,
// I hope packageXXX dependsOn(setXXX) sets createDebug before package
packageDebug := packageT.value,
packageDebug := (packageDebug dependsOn setDebug).value,
packageRelease := packageT.value,
packageRelease := (packageRelease dependsOn setRelease).value,
zipalignPath := {
val p = sdkPath.value
val m = sdkManager.value
val bt = buildToolInfo.value
val s = sLog.value
val pathInBt = SdkLayout.zipalign(bt)
s.debug("checking zipalign at: " + pathInBt)
if (pathInBt.exists)
pathInBt.getAbsolutePath
else {
val zipalign = SdkLayout.zipalign(p)
if (!zipalign.exists)
fail("zipalign not found at either %s or %s" format (
pathInBt, zipalign))
zipalign.getAbsolutePath
}
}
)) ++ List(
streams in update := ((streams in update) dependsOn stableProguardConfig).value,
libraryDependencies += Def.setting("net.sf.proguard" % "proguard-base" % proguardVersion.value % AndroidInternal.name).value,
managedClasspath in AndroidInternal := Classpaths.managedJars(AndroidInternal, classpathTypes.value, update.value)
)
private[this] val stableProguardConfig = Def.taskDyn {
val checkdir = streams.value.cacheDirectory / "proguardRuleCheck"
val rulecheck = (checkdir * "*").get.toList.map(_.getName).sorted
val ruleHash = Hash.toHex(Hash(proguardCache.value.mkString(";")))
val optionHash = Hash.toHex(Hash(proguardOptions.value.mkString(";")))
val checkfiles = List(ruleHash, optionHash).sorted
if (rulecheck.nonEmpty && checkfiles != rulecheck && useProguardInDebug.value) Def.task {
streams.value.log.warn("proguard rules have changed, forcing clean build")
val _ = (clean in Compile).value
} else Def.task {
checkdir.mkdirs()
IO.touch(checkdir / ruleHash)
IO.touch(checkdir / optionHash)
}
}
}
| pfn/android-sdk-plugin | src/AndroidAppSettings.scala | Scala | bsd-3-clause | 6,838 |
package pl.gosub.akka.online.follow.the.leader
import akka.Done
import akka.stream.stage.{GraphStageLogic, GraphStageWithMaterializedValue, InHandler, OutHandler}
import akka.stream.{Attributes, FanInShape2, Inlet, Outlet}
import scala.concurrent.{Future, Promise}
class FollowTheLeaderStage(private val ftl: FollowTheLeaderLogic) extends GraphStageWithMaterializedValue[FanInShape2[Double, Double, Double], Future[Done]]{
// Stage syntax
val dataIn: Inlet[Double] = Inlet("FollowTheLeaderStage.dataIn")
val resultsIn: Inlet[Double] = Inlet("FollowTheLeaderStage.resultsIn")
val predictionsOut: Outlet[Double] = Outlet("FollowTheLeaderStage.predictionsOut")
override val shape: FanInShape2[Double, Double, Double] = new FanInShape2(dataIn, resultsIn, predictionsOut)
@scala.throws[Exception](classOf[Exception])
override def createLogicAndMaterializedValue(inheritedAttributes: Attributes): (GraphStageLogic, Future[Done]) = {
// Completion notification
val p: Promise[Done] = Promise()
val logic = new GraphStageLogic(shape) {
setHandler(resultsIn, new InHandler {
@scala.throws[Exception](classOf[Exception])
override def onPush(): Unit = {
val nextResult = grab(resultsIn)
read(dataIn)({ x =>
if (isAvailable(predictionsOut)) push(predictionsOut, ftl.predict(x, nextResult))
}, () => {})
}
})
setHandler(dataIn, new InHandler {
override def onPush(): Unit = {
val x = grab(dataIn)
read(resultsIn)({previousResult =>
if (isAvailable(predictionsOut)) push(predictionsOut, ftl.predict(x, previousResult))
}, () => {})
}
override def onUpstreamFinish(): Unit = {
completeStage()
}
})
setHandler(predictionsOut, new OutHandler {
override def onPull(): Unit = {
pull(dataIn)
}
})
}
(logic, p.future)
}
} | gosubpl/akka-online | src/main/scala/pl/gosub/akka/online/follow/the/leader/FollowTheLeaderStage.scala | Scala | apache-2.0 | 1,961 |
package geotrellis.transit
import akka.actor._
import scala.concurrent.duration._
import scala.collection.mutable
case object CleanRequest
case class BuildRequest[TK](key:TK)
case class BuildResponse[TK,TV](key:TK,value:TV)
case class CacheLookup[TK](key:TK)
case class BuildWorkInProgress(builder:ActorRef,requesters:mutable.ListBuffer[ActorRef])
class CacheActor[TK,TV](expireTime:Long,
cleanInterval:Long = 1000L,
buildFunc:TK=>TV) extends Actor {
val cache = mutable.Map[TK,TV]()
val lastAccess = mutable.Map[TK,Long]()
val pending = mutable.Map[TK,BuildWorkInProgress]()
var cleanTick:Cancellable = null
import context.dispatcher
override
def preStart() = {
cleanTick = context.system.scheduler.schedule(
0 milliseconds,
cleanInterval milliseconds,
self,
CleanRequest)
}
override
def postStop() = if(cleanTick != null) { cleanTick.cancel }
def receive = {
case CleanRequest => cleanCache()
case CacheLookup(key) => cacheLookup(key.asInstanceOf[TK],sender)
case BuildResponse(key,value) => buildDone(key.asInstanceOf[TK],value.asInstanceOf[TV])
}
private def cleanCache() =
for(key <- lastAccess.keys.toList) {
if(System.currentTimeMillis - lastAccess(key) > expireTime) {
cache.remove(key)
lastAccess.remove(key)
}
}
private def cacheLookup(key:TK,sender:ActorRef) =
if(cache.contains(key)) {
lastAccess(key) = System.currentTimeMillis
sender ! cache(key)
} else {
if(pending.contains(key)) {
pending(key).requesters += sender
} else {
val builder =
context.actorOf(Props(classOf[BuilderActor[TK,TV]],self,buildFunc))
pending(key) = BuildWorkInProgress(builder,mutable.ListBuffer(sender))
builder ! BuildRequest(key)
}
}
private def buildDone(key:TK,value:TV) = {
if(!pending.contains(key)) { sys.error("Build done on a key not in pending.") }
cache(key) = value
lastAccess(key) = System.currentTimeMillis
val BuildWorkInProgress(builder,requesters) = pending(key)
builder ! PoisonPill
for(r <- requesters) { r ! value }
pending.remove(key)
}
}
case class BuilderActor[TK,TV](cacheActor:ActorRef,buildFunc:TK=>TV) extends Actor {
def receive = {
case BuildRequest(key) =>
val k = key.asInstanceOf[TK]
cacheActor ! BuildResponse(k,buildFunc(k))
}
}
| flibbertigibbet/open-transit-indicators | scala/geotrellis-transit/src/main/scala/geotrellis/transit/cache.scala | Scala | gpl-3.0 | 2,608 |
package com.wavesplatform.network
import com.google.common.cache.CacheBuilder
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.lang.ValidationError
import com.wavesplatform.network.InvalidBlockStorageImpl._
import scala.concurrent.duration.FiniteDuration
trait InvalidBlockStorage {
def add(blockId: ByteStr, validationError: ValidationError): Unit
def find(blockId: ByteStr): Option[ValidationError]
}
object InvalidBlockStorage {
object NoOp extends InvalidBlockStorage {
override def add(blockId: ByteStr, validationError: ValidationError): Unit = {}
override def find(blockId: ByteStr): Option[ValidationError] = None
}
}
class InvalidBlockStorageImpl(settings: InvalidBlockStorageSettings) extends InvalidBlockStorage {
private val cache = CacheBuilder
.newBuilder()
.expireAfterWrite(settings.timeout.length, settings.timeout.unit)
.build[ByteStr, ValidationError]()
override def add(blockId: ByteStr, validationError: ValidationError): Unit = cache.put(blockId, validationError)
override def find(blockId: ByteStr): Option[ValidationError] = Option(cache.getIfPresent(blockId))
}
object InvalidBlockStorageImpl {
case class InvalidBlockStorageSettings(maxSize: Int, timeout: FiniteDuration)
}
| wavesplatform/Waves | node/src/main/scala/com/wavesplatform/network/InvalidBlockStorage.scala | Scala | mit | 1,283 |
package controllers
import play.api.mvc._
import actors._
import akka.actor._
import akka.actor.ActorRef
import akka.actor.Props
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import scala.concurrent.duration._
import akka.pattern.ask
import play.api.libs.json.Json
import models.Category
import play.api.libs.json._
import akka.util.Timeout
object Application extends Controller {
val system = ActorSystem("application")
val recipesActor: ActorRef = system.actorOf(Props[RecipesActor])
val categoriesActor: ActorRef = system.actorOf(Props[CategoriesActor])
def index = Action {
Ok(views.html.index("Your new application is ready."))
}
def listRecipes = Action.async {
implicit val timeout = Timeout(3 seconds)
(recipesActor ? ListRecipes() ).mapTo[JsValue].map { message =>
Ok(message)
}
}
def listCategories = Action.async {
implicit val timeout = Timeout(3 seconds)
(categoriesActor ? ListCategories() ).mapTo[JsValue].map { message =>
Ok(message)
}
}
} | MatthieuNICOLAS/play-with-dart | app/controllers/Application.scala | Scala | gpl-3.0 | 1,060 |
package com.geeksville.andropilot.service
import com.geeksville.akka.InstrumentedActor
import android.content.Context
import com.geeksville.flight.MsgSysStatusChanged
import com.geeksville.flight.Location
import com.geeksville.util.Throttled
import com.geeksville.flight.MsgModeChanged
import com.geeksville.flight.VehicleListener
import com.ridemission.scandroid.AndroidLogger
import com.geeksville.mavlink.MsgHeartbeatLost
/**
* Crudely use the pebble watch 'music' app to show flight data
*/
class PebbleVehicleListener(context: AndropilotService) extends VehicleListener(context.vehicle.get) with AndroidLogger {
// Only update pebble every 10 secs (to save battery)
private val throttle = new Throttled(10 * 1000)
override def onReceive = {
case l: Location =>
perhapsUpdate()
case MsgSysStatusChanged =>
perhapsUpdate()
case MsgHeartbeatLost(_) =>
updatePebble()
case MsgModeChanged(_) =>
updatePebble() // Show this change promptly
}
private def perhapsUpdate() {
throttle { updatePebble _ }
}
private def updatePebble() {
context.vehicle.foreach { v =>
val bat = v.batteryVoltage.map { s => "Bat: %1.2f V".format(s) }.getOrElse("")
val loc = "Alt: %1.1f m".format(v.bestAltitude)
val mode = v.currentModeOrStatus
warn(s"Setting pebble $bat/$loc/$mode")
PebbleClient.sendMusicToPebble(context, bat, loc, mode)
}
}
override def postStop() {
PebbleClient.sendMusicToPebble(context, "", "", "Exited")
super.postStop()
}
} | geeksville/arduleader | andropilot/src/main/scala/com/geeksville/andropilot/service/PebbleVehicleListener.scala | Scala | gpl-3.0 | 1,544 |
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cogx.cogmath.geometry
/** A Shape describes the dimensions of a discrete, hyper-rectangular object.
*
* For historical reasons, the elements of a shape also have the following names:
* 3D: (layers, rows, columns)
* 2D: (rows, columns)
* 1D: (columns)
*
* @param sizes The integral size of each dimension.
*
* @author Greg Snider
*/
@SerialVersionUID(-618254321595421426L)
final class Shape(private val sizes: Array[Int]) extends Serializable {
/** Create a dimensional shape with zero or more dimensions. */
@deprecated("drop 'new', use Shape(Int*) instead.", "4.0")
def this(size: Int*) = this(size.toArray)
/** Get the size of the indexed "dimension". */
def apply(dimension: Int) = sizes(dimension)
/** Get the size of the last dimension. If this is 0-dimensions, returns 0.*/
val lastDimension: Int = {
if (sizes.length == 0)
0
else
sizes(sizes.length - 1)
}
/** Number of dimensions for this shape. */
val dimensions = sizes.length
/** Number of points in this discrete shape. Must be representable as an Int. */
lazy val points = {
if (longPoints > Int.MaxValue)
throw new RuntimeException(s"Number of Shape points $longPoints exceeds maximum of ${Int.MaxValue}.")
else
longPoints.toInt
}
/** Number of points in this discrete shape as a Long. 0D Shapes have one point. */
val longPoints: Long = sizes.map(_.toLong).foldLeft(1L)(_ * _)
/** Concatenate two shapes to form a higher dimensional shape. */
def concat(that: Shape) = new Shape(Array.concat(sizes, that.sizes))
/** Reduces the Shape by a factor in each dimension, rounding up when
* the original size is not an integer multiple of the downsample factor.
* The is the standard we use throughout Cog.
*/
def downsample(factor:Int) = new Shape(toArray.map(size =>
math.ceil(size.toFloat/factor.toFloat).toInt))
def supersample = new Shape(toArray.map(size => 2 * size))
/** Map this shape to another shape using "f". */
def map(f: Int => Int) = new Shape(sizes map f)
/** Drop the first "d" dimensions from "this", returning smaller shape. */
def drop(d: Int): Shape = {
require(d <= dimensions)
val newSizes = new Array[Int](dimensions - d)
for (i <- d until dimensions)
newSizes(i - d) = sizes(i)
new Shape(newSizes)
}
/** Drop the last "d" dimensions from "this", returning smaller shape. */
def dropLast(d: Int): Shape = {
require(d <= dimensions)
val newSizes = new Array[Int](dimensions - d)
for (i <- 0 until dimensions - d)
newSizes(i) = sizes(i)
new Shape(newSizes)
}
/** Concatenate "this" and "that" to create a higher-dimensional shape. */
def concatenate(that: Shape): Shape = {
new Shape(Array.concat(this.sizes, that.sizes))
}
/** Join "this" and "that" to create a same-dimensional shape representing
* the two shapes abutted. Join of 0D fields illegal. */
def join(that: Shape): Shape = {
require(this.drop(1) == that.drop(1))
val newSizes = Array.tabulate(dimensions) { i =>
if (i == 0)
this(i) + that(i)
else
this(i)
}
Shape(newSizes)
}
/** Compare two shapes, returning true if every dimension of `this` is
* smaller than or equal to the corresponding dimension in `other`.
*
* @param other Shape to compare with `this`.
* @return True if `this` and `other` have the same number of dimensions and
* `this` is no larger in any corresponding dimension.
*/
def <=(other: Shape): Boolean = {
if (dimensions == other.dimensions) {
for (i <- 0 until sizes.length)
if (this.sizes(i) > other.sizes(i))
return false
true
} else
// Shapes have different number of dimensions, so comparison is
// meaningless.
false
}
/** Return an iterator over all possible discrete locations in the shape.
* For example, the Shape (2, 3) has the following locations:
* {{{
* (0, 0)
* (0, 1)
* (0, 2)
* (1, 0)
* (1, 1)
* (1, 2)
* }}}
*/
def indices = new Iterator[Array[Int]] {
private val index = new Array[Int](sizes.length)
private var notDone = true
def hasNext = notDone
def next(): Array[Int] = {
val tuple = new Array[Int](sizes.length)
for (i <- 0 until tuple.length)
tuple(i) = index(i)
if (tuple.length == 0)
notDone = false
else
incrementIndex(tuple.length - 1)
tuple
}
private def incrementIndex(i: Int) {
index(i) += 1
if (index(i) == sizes(i)) {
index(i) = 0
if (i == 0)
notDone = false
else
incrementIndex(i - 1)
}
}
}
/** Convert a Shape to a String for debugging. */
override def toString = toString("Shape")
/** Like ordinary toString but allows for a prefix besides "Shape" */
def toString(prefix: String) = {
var string = prefix + "( "
for (i <- 0 until dimensions)
string += (this(i) + " ")
string += ")"
string
}
/** Test "this" and "other" Shape for equality. Allows "==" to work. */
override def equals(other: Any): Boolean =
other match {
case that: Shape =>
if (dimensions == that.dimensions) {
for (i <- 0 until dimensions)
if (sizes(i) != that.sizes(i))
return false
true
} else
false
case _ => false
}
/** Required because of overriding equals. */
override val hashCode: Int = {
// Product-sum hash
val Base = 31
var factor = Base
var sum = 0
for (size <- sizes) {
sum += size * factor
factor *= Base
}
sum
}
/** Convert the shape to an array. */
def toArray: Array[Int] = sizes.map(i => i)
}
/** Companion object for Shape.
*/
object Shape {
/** A zero-dimensional shape. */
val scalar = Shape()
/** An unknown shape. */
val unknown = Shape()
/** Create a shape with given "size" dimensions. */
def apply(size: Int*) = new Shape(size.toArray)
def apply(size: Array[Int]) = new Shape(size)
}
| hpe-cct/cct-core | src/main/scala/cogx/cogmath/geometry/Shape.scala | Scala | apache-2.0 | 6,779 |
package com.datastax.spark.connector.rdd
import java.io.IOException
import java.util.Date
import org.joda.time.DateTime
import com.datastax.spark.connector._
import com.datastax.spark.connector.cql.CassandraConnector
import com.datastax.spark.connector.embedded._
import com.datastax.spark.connector.embedded.SparkTemplate._
import com.datastax.spark.connector.mapper.DefaultColumnMapper
import com.datastax.spark.connector.types.TypeConverter
import scala.reflect.runtime.universe.typeTag
import scala.collection.JavaConversions._
case class KeyValue(key: Int, group: Long, value: String)
case class KeyValueWithConversion(key: String, group: Int, value: Long)
case class CustomerId(id: String)
case class Key(key: Int)
case class KeyGroup(key: Int, group: Int)
case class Value(value: String)
case class WriteTimeClass(id: Int, value: String, writeTimeOfValue: Long)
case class TTLClass(id: Int, value: String, ttlOfValue: Int)
case class ClassWithWeirdProps(devil: Int, cat: Long, value: String)
class MutableKeyValue(var key: Int, var group: Long) extends Serializable {
var value: String = null
}
class MutableKeyValueWithConversion(var key: String, var group: Int) extends Serializable {
var value: Long = 0L
}
class SuperKeyValue extends Serializable {
var key: Int = 0
var value: String = ""
}
class SubKeyValue extends SuperKeyValue {
var group: Long = 0L
}
case class Address(street: String, city: String, zip: Int)
case class ClassWithUDT(key: Int, name: String, addr: Address)
case class ClassWithTuple(key: Int, value: (Int, String))
case class ClassWithSmallInt(key: Int, value: Short)
class CassandraRDDSpec extends SparkCassandraITFlatSpecBase {
useCassandraConfig(Seq("cassandra-default.yaml.template"))
useSparkConf(defaultSparkConf)
val conn = CassandraConnector(defaultConf)
val bigTableRowCount = 100000
private val ks = "CassandraRDDSpec"
conn.withSessionDo { session =>
session.execute(s"""CREATE KEYSPACE IF NOT EXISTS "$ks" WITH REPLICATION = { 'class': 'SimpleStrategy', 'replication_factor': 1 }""")
session.execute(s"""CREATE TABLE IF NOT EXISTS "$ks".short_value (key INT, value SMALLINT, PRIMARY KEY (key))""")
session.execute(s"""INSERT INTO "$ks".short_value (key, value) VALUES (1,100)""")
session.execute(s"""INSERT INTO "$ks".short_value (key, value) VALUES (2,200)""")
session.execute(s"""INSERT INTO "$ks".short_value (key, value) VALUES (3,300)""")
session.execute(s"""CREATE TABLE IF NOT EXISTS "$ks".key_value (key INT, group BIGINT, value TEXT, PRIMARY KEY (key, group))""")
session.execute(s"""INSERT INTO "$ks".key_value (key, group, value) VALUES (1, 100, '0001')""")
session.execute(s"""INSERT INTO "$ks".key_value (key, group, value) VALUES (2, 100, '0002')""")
session.execute(s"""INSERT INTO "$ks".key_value (key, group, value) VALUES (3, 300, '0003')""")
session.execute(s"""CREATE TABLE IF NOT EXISTS "$ks".simple_kv (key INT, value TEXT, PRIMARY KEY (key))""")
session.execute(s"""INSERT INTO "$ks".simple_kv (key, value) VALUES (1, '0001')""")
session.execute(s"""INSERT INTO "$ks".simple_kv (key, value) VALUES (2, '0002')""")
session.execute(s"""INSERT INTO "$ks".simple_kv (key, value) VALUES (3, '0003')""")
session.execute(s"""CREATE TABLE IF NOT EXISTS "$ks".collections (key INT PRIMARY KEY, l list<text>, s set<text>, m map<text, text>)""")
session.execute(s"""INSERT INTO "$ks".collections (key, l, s, m) VALUES (1, ['item1', 'item2'], {'item1', 'item2'}, {'key1': 'value1', 'key2': 'value2'})""")
session.execute(s"""INSERT INTO "$ks".collections (key, l, s, m) VALUES (2, null, null, null)""")
session.execute(s"""CREATE TABLE IF NOT EXISTS "$ks".blobs (key INT PRIMARY KEY, b blob)""")
session.execute(s"""INSERT INTO "$ks".blobs (key, b) VALUES (1, 0x0102030405060708090a0b0c)""")
session.execute(s"""INSERT INTO "$ks".blobs (key, b) VALUES (2, null)""")
session.execute(s"""CREATE TABLE IF NOT EXISTS "$ks".composite_key (key_c1 INT, key_c2 INT, group INT, value TEXT, PRIMARY KEY ((key_c1, key_c2), group))""")
session.execute(s"""INSERT INTO "$ks".composite_key (key_c1, key_c2, group, value) VALUES (1, 1, 1, 'value1')""")
session.execute(s"""INSERT INTO "$ks".composite_key (key_c1, key_c2, group, value) VALUES (1, 1, 2, 'value2')""")
session.execute(s"""INSERT INTO "$ks".composite_key (key_c1, key_c2, group, value) VALUES (1, 2, 3, 'value3')""")
session.execute(s"""INSERT INTO "$ks".composite_key (key_c1, key_c2, group, value) VALUES (2, 2, 4, 'value4')""")
session.execute(s"""CREATE TABLE IF NOT EXISTS "$ks".clustering_time (key INT, time TIMESTAMP, value TEXT, PRIMARY KEY (key, time))""")
session.execute(s"""INSERT INTO "$ks".clustering_time (key, time, value) VALUES (1, '2014-07-12 20:00:01', 'value1')""")
session.execute(s"""INSERT INTO "$ks".clustering_time (key, time, value) VALUES (1, '2014-07-12 20:00:02', 'value2')""")
session.execute(s"""INSERT INTO "$ks".clustering_time (key, time, value) VALUES (1, '2014-07-12 20:00:03', 'value3')""")
session.execute(s"""CREATE TYPE IF NOT EXISTS "$ks".address (street text, city text, zip int)""")
session.execute(s"""CREATE TABLE IF NOT EXISTS "$ks".udts(key INT PRIMARY KEY, name text, addr frozen<address>)""")
session.execute(s"""INSERT INTO "$ks".udts(key, name, addr) VALUES (1, 'name', {street: 'Some Street', city: 'Paris', zip: 11120})""")
session.execute(s"""CREATE TABLE IF NOT EXISTS "$ks".tuples(key INT PRIMARY KEY, value FROZEN<TUPLE<INT, VARCHAR>>)""")
session.execute(s"""INSERT INTO "$ks".tuples(key, value) VALUES (1, (1, 'first'))""")
session.execute("""CREATE KEYSPACE IF NOT EXISTS "MixedSpace" WITH REPLICATION = { 'class': 'SimpleStrategy', 'replication_factor': 1 }""")
session.execute("""CREATE TABLE IF NOT EXISTS "MixedSpace"."MixedCase"(key INT PRIMARY KEY, value INT)""")
session.execute("""CREATE TABLE IF NOT EXISTS "MixedSpace"."MiXEDCase"(key INT PRIMARY KEY, value INT)""")
session.execute("""CREATE TABLE IF NOT EXISTS "MixedSpace"."MixedCASE"(key INT PRIMARY KEY, value INT)""")
session.execute("""CREATE TABLE IF NOT EXISTS "MixedSpace"."MoxedCAs" (key INT PRIMARY KEY, value INT)""")
session.execute(s"""CREATE TABLE IF NOT EXISTS "$ks".big_table (key INT PRIMARY KEY, value INT)""")
val insert = session.prepare(s"""INSERT INTO "$ks".big_table(key, value) VALUES (?, ?)""")
for (i <- 1 to bigTableRowCount) {
session.execute(insert.bind(i.asInstanceOf[AnyRef], i.asInstanceOf[AnyRef]))
}
session.execute(s"""CREATE TABLE IF NOT EXISTS "$ks".write_time_ttl_test (id INT PRIMARY KEY, value TEXT, value2 TEXT)""")
}
"A CassandraRDD" should "allow to read a Cassandra table as Array of CassandraRow" in {
val result = sc.cassandraTable(ks, "key_value").collect()
result should have length 3
result.head.getInt("key") should (be >= 1 and be <= 3)
result.head.getLong("group") should (be >= 100L and be <= 300L)
result.head.getString("value") should startWith("000")
}
it should "allow to read a Cassandra table as Array of pairs of primitives" in {
val result = sc.cassandraTable[(Int, Long)](ks, "key_value").select("key", "group").collect()
result should have length 3
result.head._1 should (be >= 1 and be <= 3)
result.head._2 should (be >= 100L and be <= 300L)
}
it should "allow to read a Cassandra table as Array of tuples" in {
val result = sc.cassandraTable[(Int, Long, String)](ks, "key_value").collect()
result should have length 3
result.head._1 should (be >= 1 and be <= 3)
result.head._2 should (be >= 100L and be <= 300L)
result.head._3 should startWith("000")
}
it should "allow to read a Cassandra table as Array of user-defined case class objects" in {
val result = sc.cassandraTable[KeyValue](ks, "key_value").collect()
result should have length 3
result.head.key should (be >= 1 and be <= 3)
result.head.group should (be >= 100L and be <= 300L)
result.head.value should startWith("000")
}
"A CassandraRDD" should "allow to read a Cassandra table as Array of user-defined objects with inherited fields" in {
val result = sc.cassandraTable[SubKeyValue](ks, "key_value").collect()
result should have length 3
result.map(kv => (kv.key, kv.group, kv.value)).toSet shouldBe Set(
(1, 100, "0001"),
(2, 100, "0002"),
(3, 300, "0003")
)
}
it should "allow to read a Cassandra table as Array of user-defined class objects" in {
val result = sc.cassandraTable[SampleScalaClass](ks, "simple_kv").collect()
result should have length 3
result.head.key should (be >= 1 and be <= 3)
result.head.value should startWith("000")
}
it should "allow to read a Cassandra table as Array of user-defined class (with multiple constructors) objects" in {
val result = sc.cassandraTable[SampleScalaClassWithMultipleCtors](ks, "simple_kv").collect()
result should have length 3
result.head.key should (be >= 1 and be <= 3)
result.head.value should startWith("000")
}
it should "allow to read a Cassandra table as Array of user-defined class (with no fields) objects" in {
val result = sc.cassandraTable[SampleScalaClassWithNoFields](ks, "simple_kv").collect()
result should have length 3
}
it should "allow to read a Cassandra table as Array of user-defined case class (nested) objects" in {
val result = sc.cassandraTable[SampleWithNestedScalaCaseClass#InnerClass](ks, "simple_kv").collect()
result should have length 3
result.head.key should (be >= 1 and be <= 3)
result.head.value should startWith("000")
}
it should "allow to read a Cassandra table as Array of user-defined case class (deeply nested) objects" in {
val result = sc.cassandraTable[SampleWithDeeplyNestedScalaCaseClass#IntermediateClass#InnerClass](ks, "simple_kv").collect()
result should have length 3
result.head.key should (be >= 1 and be <= 3)
result.head.value should startWith("000")
}
it should "allow to read a Cassandra table as Array of user-defined case class (nested in object) objects" in {
val result = sc.cassandraTable[SampleObject.ClassInObject](ks, "simple_kv").collect()
result should have length 3
result.head.key should (be >= 1 and be <= 3)
result.head.value should startWith("000")
}
it should "allow to read a Cassandra table as Array of user-defined mutable objects" in {
val result = sc.cassandraTable[MutableKeyValue](ks, "key_value").collect()
result should have length 3
result.head.key should (be >= 1 and be <= 3)
result.head.group should (be >= 100L and be <= 300L)
result.head.value should startWith("000")
}
it should "allow to read a Cassandra table as Array of user-defined case class objects with custom mapping specified by aliases" in {
val result = sc.cassandraTable[ClassWithWeirdProps](ks, "key_value")
.select("key" as "devil", "group" as "cat", "value").collect()
result should have length 3
result.head.devil should (be >= 1 and be <= 3)
result.head.cat should (be >= 100L and be <= 300L)
result.head.value should startWith("000")
}
it should "allow to read a Cassandra table into CassandraRow objects with custom mapping specified by aliases" in {
val result = sc.cassandraTable(ks, "key_value")
.select("key" as "devil", "group" as "cat", "value").collect()
result should have length 3
result.head.getInt("devil") should (be >= 1 and be <= 3)
result.head.getLong("cat") should (be >= 100L and be <= 300L)
result.head.getString("value") should startWith("000")
}
it should "apply proper data type conversions for tuples" in {
val result = sc.cassandraTable[(String, Int, Long)](ks, "key_value").collect()
result should have length 3
Some(result.head._1) should contain oneOf("1", "2", "3")
result.head._2 should (be >= 100 and be <= 300)
result.head._3 should (be >= 1L and be <= 3L)
}
it should "apply proper data type conversions for user-defined case class objects" in {
val result = sc.cassandraTable[KeyValueWithConversion](ks, "key_value").collect()
result should have length 3
Some(result.head.key) should contain oneOf("1", "2", "3")
result.head.group should (be >= 100 and be <= 300)
result.head.value should (be >= 1L and be <= 3L)
}
it should "apply proper data type conversions for user-defined mutable objects" in {
val result = sc.cassandraTable[MutableKeyValueWithConversion](ks, "key_value").collect()
result should have length 3
Some(result.head.key) should contain oneOf("1", "2", "3")
result.head.group should (be >= 100 and be <= 300)
result.head.value should (be >= 1L and be <= 3L)
}
it should "map columns to objects using user-defined function" in {
val result = sc.cassandraTable[MutableKeyValue](ks, "key_value")
.as((key: Int, group: Long, value: String) => (key, group, value)).collect()
result should have length 3
result.head._1 should (be >= 1 and be <= 3)
result.head._2 should (be >= 100L and be <= 300L)
result.head._3 should startWith("000")
}
it should "map columns to objects using user-defined function with type conversion" in {
val result = sc.cassandraTable[MutableKeyValue](ks, "key_value")
.as((key: String, group: String, value: Option[String]) => (key, group, value)).collect()
result should have length 3
Some(result.head._1) should contain oneOf("1", "2", "3")
Some(result.head._2) should contain oneOf("100", "300")
Some(result.head._3) should contain oneOf(Some("0001"), Some("0002"), Some("0003"))
}
it should "allow for selecting a subset of columns" in {
val result = sc.cassandraTable(ks, "key_value").select("value").collect()
result should have length 3
result.head.size shouldEqual 1
result.head.getString("value") should startWith("000")
}
it should "allow for selecting a subset of rows" in {
val result = sc.cassandraTable(ks, "key_value").where("group < ?", 200L).collect()
result should have length 2
result.head.size shouldEqual 3
result.head.getInt("group") shouldEqual 100
result.head.getString("value") should startWith("000")
}
it should "use a single partition per node for a tiny table" in {
val rdd = sc.cassandraTable(ks, "key_value")
rdd.partitions should have length conn.hosts.size
}
it should "allow for reading collections" in {
val result = sc.cassandraTable(ks, "collections").collect()
val rowById = result.groupBy(_.getInt("key")).mapValues(_.head)
rowById(1).getList[String]("l") shouldEqual Vector("item1", "item2")
rowById(1).getSet[String]("s") shouldEqual Set("item1", "item2")
rowById(1).getMap[String, String]("m") shouldEqual Map("key1" -> "value1", "key2" -> "value2")
rowById(2).getList[String]("l") shouldEqual Vector.empty
rowById(2).getSet[String]("s") shouldEqual Set.empty
rowById(2).getMap[String, String]("m") shouldEqual Map.empty
}
it should "allow for reading blobs" in {
val result = sc.cassandraTable(ks, "blobs").collect()
val rowById = result.groupBy(_.getInt("key")).mapValues(_.head)
rowById(1).getBytes("b").limit() shouldEqual 12
rowById(1).get[Array[Byte]]("b") shouldEqual Array(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)
rowById(2).getBytesOption("b") shouldEqual None
}
it should "allow for converting fields to custom types by user-defined TypeConverter" in {
TypeConverter.registerConverter(new TypeConverter[CustomerId] {
def targetTypeTag = typeTag[CustomerId]
def convertPF = { case x: String => CustomerId(x) }
})
val result = sc.cassandraTable[(Int, Long, CustomerId)](ks, "key_value").collect()
result should have length 3
result(0)._3 shouldNot be(null)
result(1)._3 shouldNot be(null)
result(2)._3 shouldNot be(null)
}
it should "allow for reading tables with composite partitioning key" in {
val result = sc.cassandraTable[(Int, Int, Int, String)](ks, "composite_key")
.where("group >= ?", 3).collect()
result should have length 2
}
it should "convert values passed to where to correct types (String -> Timestamp)" in {
val result = sc.cassandraTable[(Int, Date, String)](ks, "clustering_time")
.where("time >= ?", "2014-07-12 20:00:02").collect()
result should have length 2
}
it should "convert values passed to where to correct types (DateTime -> Timestamp)" in {
val result = sc.cassandraTable[(Int, Date, String)](ks, "clustering_time")
.where("time >= ?", new DateTime(2014, 7, 12, 20, 0, 2)).collect()
result should have length 2
}
it should "convert values passed to where to correct types (Date -> Timestamp)" in {
val result = sc.cassandraTable[(Int, Date, String)](ks, "clustering_time")
.where("time >= ?", new DateTime(2014, 7, 12, 20, 0, 2).toDate).collect()
result should have length 2
}
it should "convert values passed to where to correct types (String -> Timestamp) (double limit)" in {
val result = sc.cassandraTable[(Int, Date, String)](ks, "clustering_time")
.where("time > ? and time < ?", "2014-07-12 20:00:01", "2014-07-12 20:00:03").collect()
result should have length 1
}
it should "convert values passed to where to correct types (DateTime -> Timestamp) (double limit)" in {
val result = sc.cassandraTable[(Int, Date, String)](ks, "clustering_time")
.where("time > ? and time < ?", new DateTime(2014, 7, 12, 20, 0, 1), new DateTime(2014, 7, 12, 20, 0, 3)).collect()
result should have length 1
}
it should "convert values passed to where to correct types (Date -> Timestamp) (double limit)" in {
val result = sc.cassandraTable[(Int, Date, String)](ks, "clustering_time")
.where("time > ? and time < ?", new DateTime(2014, 7, 12, 20, 0, 1).toDate, new DateTime(2014, 7, 12, 20, 0, 3).toDate).collect()
result should have length 1
}
it should "accept partitioning key in where" in {
val result = sc.cassandraTable[(Int, Date, String)](ks, "clustering_time")
.where("key = ?", 1).collect()
result should have length 3
}
it should "accept partitioning key and clustering column predicate in where" in {
val result = sc.cassandraTable[(Int, Date, String)](ks, "clustering_time")
.where("key = ? AND time >= ?", 1, new DateTime(2014, 7, 12, 20, 0, 2).toDate).collect()
result should have length 2
}
it should "accept composite partitioning key in where" in {
val result = sc.cassandraTable[(Int, Int, Int, String)](ks, "composite_key")
.where("key_c1 = ? AND key_c2 = ?", 1, 1).collect()
result should have length 2
}
it should "allow to fetch columns from a table with user defined Cassandra type (UDT)" in {
val result = sc.cassandraTable(ks, "udts").select("key", "name").collect()
result should have length 1
val row = result.head
row.getInt(0) should be(1)
row.getString(1) should be("name")
}
it should "allow to fetch UDT columns as UDTValue objects" in {
val result = sc.cassandraTable(ks, "udts").select("key", "name", "addr").collect()
result should have length 1
val row = result.head
row.getInt(0) should be(1)
row.getString(1) should be("name")
val udtValue = row.getUDTValue(2)
udtValue.size should be(3)
udtValue.getString("street") should be("Some Street")
udtValue.getString("city") should be("Paris")
udtValue.getInt("zip") should be(11120)
}
it should "allow to fetch UDT columns as objects of case classes" in {
val result = sc.cassandraTable[ClassWithUDT](ks, "udts").select("key", "name", "addr").collect()
result should have length 1
val row = result.head
row.key should be(1)
row.name should be("name")
val udtValue = row.addr
udtValue.street should be("Some Street")
udtValue.city should be("Paris")
udtValue.zip should be(11120)
}
it should "allow to fetch tuple columns as TupleValue objects" in {
val result = sc.cassandraTable(ks, "tuples").select("key", "value").collect()
result should have length 1
val row = result.head
row.getInt(0) should be(1)
val tuple = row.getTupleValue(1)
tuple.size should be(2)
tuple.getInt(0) should be(1)
tuple.getString(1) should be("first")
}
it should "allow to fetch tuple columns as Scala tuples" in {
val result = sc.cassandraTable[ClassWithTuple](ks, "tuples").select("key", "value").collect()
result should have length 1
val row = result.head
row.key should be(1)
row.value._1 should be(1)
row.value._2 should be("first")
}
it should "throw appropriate IOException when the table was not found at the computation time" in {
intercept[IOException] { sc.cassandraTable(ks, "unknown_table").collect() }
}
it should "be lazy and must not throw IOException if the table was not found at the RDD initialization time" in {
sc.cassandraTable(ks, "unknown_table")
}
it should "not leak threads" in {
def threadCount() = {
// Before returning active thread count, wait while thread count is decreasing,
// to give spark some time to terminate temporary threads and not count them
val counts = Iterator.continually { Thread.sleep(500); Thread.activeCount() }
counts.sliding(2)
.dropWhile { case Seq(prev, current) => current < prev }
.next().head
}
// compute a few RDDs so the thread pools get initialized
// using parallel range, to initialize parallel collections fork-join-pools
val iterationCount = 256
for (i <- (1 to iterationCount).par)
sc.cassandraTable(ks, "key_value").collect()
// subsequent computations of RDD should reuse already created thread pools,
// not instantiate new ones
val startThreadCount = threadCount()
val oldThreads = Thread.getAllStackTraces.keySet().toSet
for (i <- (1 to iterationCount).par)
sc.cassandraTable(ks, "key_value").collect()
val endThreadCount = threadCount()
val newThreads = Thread.getAllStackTraces.keySet().toSet
val createdThreads = newThreads -- oldThreads
println("Start thread count: " + startThreadCount)
println("End thread count: " + endThreadCount)
println("Threads created: ")
createdThreads.map(_.getName).toSeq.sortBy(identity).foreach(println)
// This is not very precise, but if there was a thread leak and we leaked even only
// 1-thread per rdd, this test would not pass. Typically we observed the endThreadCount = startThreadCount +/- 3
endThreadCount should be < startThreadCount + iterationCount * 3 / 4
}
it should "allow to read Cassandra table as Array of KV tuples of two pairs" in {
val results = sc
.cassandraTable[(Int, String)](ks, "composite_key")
.select("group", "value", "key_c1", "key_c2")
.keyBy[(Int, Int)]("key_c1", "key_c2")
.collect()
results should have length 4
results should contain (((1, 1), (1, "value1")))
results should contain (((1, 1), (2, "value2")))
results should contain (((1, 2), (3, "value3")))
results should contain (((2, 2), (4, "value4")))
}
it should "allow to read Cassandra table as Array of KV tuples of a pair and a case class" in {
val results = sc
.cassandraTable[Value](ks, "key_value")
.select("key", "group", "value")
.keyBy[(Int, Int)]("key", "group")
.collect()
results should have length 3
val map = results.toMap
map((1, 100)) should be (Value("0001"))
map((2, 100)) should be (Value("0002"))
map((3, 300)) should be (Value("0003"))
}
it should "allow to read Cassandra table as Array of KV tuples of a case class and a tuple" in {
val results = sc
.cassandraTable[(Int, Int, String)](ks, "key_value")
.select("key", "group", "value")
.keyBy[KeyGroup]
.collect()
results should have length 3
results should contain ((KeyGroup(1, 100), (1, 100, "0001")))
results should contain ((KeyGroup(2, 100), (2, 100, "0002")))
results should contain ((KeyGroup(3, 300), (3, 300, "0003")))
}
it should "allow to read Cassandra table as Array of KV tuples of a case class and a tuple grouped by partition key" in {
conn.withSessionDo { session =>
session.execute(s"""CREATE TABLE IF NOT EXISTS "$ks".wide_rows(key INT, group INT, value VARCHAR, PRIMARY KEY (key, group))""")
session.execute(s"""INSERT INTO "$ks".wide_rows(key, group, value) VALUES (10, 10, '1010')""")
session.execute(s"""INSERT INTO "$ks".wide_rows(key, group, value) VALUES (10, 11, '1011')""")
session.execute(s"""INSERT INTO "$ks".wide_rows(key, group, value) VALUES (10, 12, '1012')""")
session.execute(s"""INSERT INTO "$ks".wide_rows(key, group, value) VALUES (20, 20, '2020')""")
session.execute(s"""INSERT INTO "$ks".wide_rows(key, group, value) VALUES (20, 21, '2021')""")
session.execute(s"""INSERT INTO "$ks".wide_rows(key, group, value) VALUES (20, 22, '2022')""")
}
val results = sc
.cassandraTable[(Int, Int, String)](ks, "wide_rows")
.select("key", "group", "value")
.keyBy[Key]
.spanByKey
.collect()
.toMap
results should have size 2
results should contain key Key(10)
results should contain key Key(20)
results(Key(10)) should contain inOrder(
(10, 10, "1010"),
(10, 11, "1011"),
(10, 12, "1012"))
results(Key(20)) should contain inOrder(
(20, 20, "2020"),
(20, 21, "2021"),
(20, 22, "2022"))
}
it should "allow to read Cassandra table as Array of tuples of two case classes" in {
val results = sc.cassandraTable[Value](ks, "key_value")
.select("key", "group", "value")
.keyBy[KeyGroup]
.collect()
results should have length 3
results should contain((KeyGroup(1, 100), Value("0001")))
results should contain((KeyGroup(2, 100), Value("0002")))
results should contain((KeyGroup(3, 300), Value("0003")))
}
it should "allow to read Cassandra table as Array of String values" in {
val results = sc.cassandraTable[String](ks, "key_value").select("value").collect()
results should have length 3
results should contain("0001")
results should contain("0002")
results should contain("0003")
}
it should "allow to read Cassandra table as Array of Int values" in {
val results = sc.cassandraTable[Int](ks, "key_value").select("key").collect()
results should have length 3
results should contain(1)
results should contain(2)
results should contain(3)
}
it should "allow to read Cassandra table as Array of java.lang.Integer values" in {
val results = sc.cassandraTable[Integer](ks, "key_value").select("key").collect()
results should have length 3
results should contain(1)
results should contain(2)
results should contain(3)
}
it should "allow to read Cassandra table as Array of List of values" in {
val results = sc.cassandraTable[List[String]](ks, "collections").select("l").collect()
results should have length 2
results should contain(List("item1", "item2"))
}
it should "allow to read Cassandra table as Array of Set of values" in {
val results = sc.cassandraTable[Set[String]](ks, "collections").select("l").collect()
results should have length 2
results should contain(Set("item1", "item2"))
}
// This is to trigger result set paging, unused in most other tests:
it should "allow to count a high number of rows" in {
val count = sc.cassandraTable(ks, "big_table").cassandraCount()
count should be (bigTableRowCount)
}
it should "allow to fetch write time of a specified column as a tuple element" in {
val writeTime = System.currentTimeMillis() * 1000L
conn.withSessionDo { session =>
session.execute(s"""TRUNCATE "$ks".write_time_ttl_test""")
session.execute(s"""INSERT INTO "$ks".write_time_ttl_test (id, value, value2) VALUES (1, 'test', 'test2') USING TIMESTAMP $writeTime""")
}
val results = sc.cassandraTable[(Int, String, Long)](ks, "write_time_ttl_test")
.select("id", "value", "value".writeTime).collect().headOption
results.isDefined should be(true)
results.get should be((1, "test", writeTime))
}
it should "allow to fetch ttl of a specified column as a tuple element" in {
val ttl = 1000
conn.withSessionDo { session =>
session.execute(s"""TRUNCATE "$ks".write_time_ttl_test""")
session.execute(s"""INSERT INTO "$ks".write_time_ttl_test (id, value, value2) VALUES (1, 'test', 'test2') USING TTL $ttl""")
}
val results = sc.cassandraTable[(Int, String, Int)](ks, "write_time_ttl_test")
.select("id", "value", "value".ttl).collect().headOption
results.isDefined should be(true)
results.get._1 should be (1)
results.get._2 should be ("test")
results.get._3 should be > (ttl - 10)
results.get._3 should be <= ttl
}
it should "allow to fetch both write time and ttl of a specified column as tuple elements" in {
val writeTime = System.currentTimeMillis() * 1000L
val ttl = 1000
conn.withSessionDo { session =>
session.execute(s"""TRUNCATE "$ks".write_time_ttl_test""")
session.execute(s"""INSERT INTO "$ks".write_time_ttl_test (id, value, value2) VALUES (1, 'test', 'test2') USING TIMESTAMP $writeTime AND TTL $ttl""")
}
val results = sc.cassandraTable[(Int, String, Long, Int)](ks, "write_time_ttl_test")
.select("id", "value", "value".writeTime, "value".ttl).collect().headOption
results.isDefined should be(true)
results.get._1 should be (1)
results.get._2 should be ("test")
results.get._3 should be (writeTime)
results.get._4 should be > (ttl - 10)
results.get._4 should be <= ttl
}
it should "allow to fetch write time of two different columns as tuple elements" in {
val writeTime = System.currentTimeMillis() * 1000L
conn.withSessionDo { session =>
session.execute(s"""TRUNCATE "$ks".write_time_ttl_test""")
session.execute(s"""INSERT INTO "$ks".write_time_ttl_test (id, value, value2) VALUES (1, 'test', 'test2') USING TIMESTAMP $writeTime""")
}
val results = sc.cassandraTable[(Int, Long, Long)](ks, "write_time_ttl_test")
.select("id", "value".writeTime, "value2".writeTime).collect().headOption
results.isDefined should be(true)
results.get should be((1, writeTime, writeTime))
}
it should "allow to fetch ttl of two different columns as tuple elements" in {
val ttl = 1000
conn.withSessionDo { session =>
session.execute(s"""TRUNCATE "$ks".write_time_ttl_test""")
session.execute(s"""INSERT INTO "$ks".write_time_ttl_test (id, value, value2) VALUES (1, 'test', 'test2') USING TTL $ttl""")
}
val results = sc.cassandraTable[(Int, Int, Int)](ks, "write_time_ttl_test")
.select("id", "value".ttl, "value2".ttl).collect().headOption
results.isDefined should be(true)
results.get._1 should be (1)
results.get._2 should be > (ttl - 10)
results.get._2 should be <= ttl
results.get._3 should be > (ttl - 10)
results.get._3 should be <= ttl
}
it should "allow to fetch writetime of a specified column and map it to a class field with custom mapping" in {
val writeTime = System.currentTimeMillis() * 1000L
conn.withSessionDo { session =>
session.execute(s"""TRUNCATE "$ks".write_time_ttl_test""")
session.execute(s"""INSERT INTO "$ks".write_time_ttl_test (id, value, value2) VALUES (1, 'test', 'test2') USING TIMESTAMP $writeTime""")
}
implicit val mapper = new DefaultColumnMapper[WriteTimeClass](Map("writeTimeOfValue" -> "value".writeTime.selectedAs))
val results = sc.cassandraTable[WriteTimeClass](ks, "write_time_ttl_test")
.select("id", "value", "value".writeTime).collect().headOption
results.isDefined should be (true)
results.head should be (WriteTimeClass(1, "test", writeTime))
}
it should "allow to fetch ttl of a specified column and map it to a class field with custom mapping" in {
val ttl = 1000
conn.withSessionDo { session =>
session.execute(s"""TRUNCATE "$ks".write_time_ttl_test""")
session.execute(s"""INSERT INTO "$ks".write_time_ttl_test (id, value, value2) VALUES (1, 'test', 'test2') USING TTL $ttl""")
}
implicit val mapper = new DefaultColumnMapper[TTLClass](Map("ttlOfValue" -> "value".ttl.selectedAs))
val results = sc.cassandraTable[TTLClass](ks, "write_time_ttl_test")
.select("id", "value", "value".ttl).collect().headOption
results.isDefined should be (true)
results.head.id should be (1)
results.head.value should be ("test")
results.head.ttlOfValue > (ttl - 10)
results.head.ttlOfValue <= ttl
}
it should "allow to fetch writetime of a specified column and map it to a class field with aliases" in {
val writeTime = System.currentTimeMillis() * 1000L
conn.withSessionDo { session =>
session.execute(s"""TRUNCATE "$ks".write_time_ttl_test""")
session.execute(s"""INSERT INTO "$ks".write_time_ttl_test (id, value, value2) VALUES (1, 'test', 'test2') USING TIMESTAMP $writeTime""")
}
val results = sc.cassandraTable[WriteTimeClass](ks, "write_time_ttl_test")
.select("id", "value", "value".writeTime as "writeTimeOfValue").collect().headOption
results.isDefined should be (true)
results.head should be (WriteTimeClass(1, "test", writeTime))
}
it should "allow to fetch ttl of a specified column and map it to a class field with aliases" in {
val ttl = 1000
conn.withSessionDo { session =>
session.execute(s"""TRUNCATE "$ks".write_time_ttl_test""")
session.execute(s"""INSERT INTO "$ks".write_time_ttl_test (id, value, value2) VALUES (1, 'test', 'test2') USING TTL $ttl""")
}
val results = sc.cassandraTable[TTLClass](ks, "write_time_ttl_test")
.select("id", "value", "value".ttl as "ttlOfValue").collect().headOption
results.isDefined should be (true)
results.head.id should be (1)
results.head.value should be ("test")
results.head.ttlOfValue > (ttl - 10)
results.head.ttlOfValue <= ttl
}
it should "allow to specify ascending ordering" in {
val results = sc.cassandraTable[(Int, Date, String)](ks, "clustering_time")
.where("key=1").withAscOrder.collect()
results.map(_._3).toList shouldBe List("value1", "value2", "value3")
}
it should "allow to specify descending ordering" in {
val results = sc.cassandraTable[(Int, Date, String)](ks, "clustering_time")
.where("key=1").withDescOrder.collect()
results.map(_._3).toList shouldBe List("value3", "value2", "value1")
}
it should "allow to specify rows number limit" in {
val results = sc.cassandraTable[(Int, Date, String)](ks, "clustering_time").where("key=1").limit(2).collect()
results should have length 2
results(0)._3 shouldBe "value1"
results(1)._3 shouldBe "value2"
}
it should "allow to specify rows number with take" in {
val results = sc.cassandraTable[(Int, Date, String)](ks, "clustering_time").where("key=1").take(2)
results should have length 2
results(0)._3 shouldBe "value1"
results(1)._3 shouldBe "value2"
}
it should "count the CassandraRDD items" in {
val result = sc.cassandraTable(ks, "big_table").cassandraCount()
result shouldBe bigTableRowCount
}
it should "count the CassandraRDD items with where predicate" in {
val result = sc.cassandraTable(ks, "big_table").where("key=1").cassandraCount()
result shouldBe 1
}
it should "allow to use empty RDD on undefined table" in {
val result = sc.cassandraTable("unknown_ks", "unknown_table").toEmptyCassandraRDD.collect()
result should have length 0
}
it should "allow to use empty RDD on defined table" in {
val result = sc.cassandraTable(ks, "simple_kv").toEmptyCassandraRDD.collect()
result should have length 0
}
it should "suggest similar tables if table doesn't exist but keyspace does" in {
val ioe = the [IOException] thrownBy sc.cassandraTable("MixedSpace","mixedcase").collect()
val message = ioe.getMessage
message should include ("MixedSpace.MixedCase")
message should include ("MixedSpace.MiXEDCase")
message should include ("MixedSpace.MixedCASE")
}
it should "suggest possible keyspace and table matches if the keyspace and table do not exist" in {
val ioe = the [IOException] thrownBy sc.cassandraTable("MoxedSpace","mixdcase").collect()
val message = ioe.getMessage
message should include ("MixedSpace.MixedCase")
message should include ("MixedSpace.MiXEDCase")
message should include ("MixedSpace.MixedCASE")
}
it should "suggest possible keyspaces if the table exists but in a different keyspace" in {
val ioe = the [IOException] thrownBy sc.cassandraTable("MoxedSpace","MoxedCAS").collect()
val message = ioe.getMessage
message should include ("MixedSpace.MoxedCAs")
}
it should "suggest possible keyspaces and tables if the table has a fuzzy match but they keyspace does not" in {
val ioe = the [IOException] thrownBy sc.cassandraTable("rock","MixedCase").collect()
val message = ioe.getMessage
message should include ("MixedSpace.MixedCase")
}
it should "handle upper case charactors in UDT fields" in {
conn.withSessionDo { session =>
session.execute("use \\"CassandraRDDSpec\\"")
session.execute(
"""CREATE TYPE "Attachment" (
| "Id" text,
| "MimeType" text,
| "FileName" text
|)
""".stripMargin)
session.execute(
"""CREATE TABLE "Interaction" (
| "Id" text PRIMARY KEY,
| "Attachments" map<text,frozen<"Attachment">>,
| "ContactId" text
|)
""".stripMargin)
session.execute(
"""INSERT INTO "Interaction"(
| "Id",
| "Attachments",
| "ContactId"
|)
|VALUES (
| '000000a5ixIEvmPD',
| null,
| 'xcb9HMoQ'
|)
""".stripMargin)
session.execute(
"""UPDATE "Interaction"
|SET
| "Attachments" = "Attachments" + {'rVpgK':
| {"Id":'rVpgK',
| "MimeType":'text/plain',
| "FileName":'notes.txt'}}
|WHERE "Id" = '000000a5ixIEvmPD'
""".stripMargin)
}
val tableRdd = sc.cassandraTable("CassandraRDDSpec", "Interaction")
val dataColumns = tableRdd.map(row => row.getString("ContactId"))
dataColumns.count shouldBe 1
}
it should "be able to read SMALLINT columns from" in {
val result = sc.cassandraTable[(Int, Short)](ks, "short_value").collect
result should contain ((1, 100))
result should contain ((2, 200))
result should contain ((3, 300))
}
}
| debasish83/cassandra-driver-spark | spark-cassandra-connector/src/it/scala/com/datastax/spark/connector/rdd/CassandraRDDSpec.scala | Scala | apache-2.0 | 38,965 |
object Test1 {
implicit def cast[A, B](x: A)(implicit c: A => B): B = c(x)
val x1: String = 1
val x2: String = cast[Int, String](1)
}
object Test2 {
class Foo
class Bar
class Baz
implicit def foo2bar(x: Foo)(implicit baz2bar: Baz => Bar): Bar = baz2bar(new Baz)
implicit def baz2bar(x: Baz)(implicit foo2bar: Foo => Bar): Bar = foo2bar(new Foo)
val x: Bar = new Foo
val y: Bar = new Baz
}
| som-snytt/dotty | tests/untried/neg/divergent-implicit.scala | Scala | apache-2.0 | 411 |
/*******************************************************************************
* Copyright (c) 2014 Łukasz Szpakowski.
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
******************************************************************************/
package pl.luckboy.purfuncor.frontend
import scala.collection.immutable.BitSet
import scala.collection.immutable.IntMap
import scala.util.parsing.input.Position
import scala.annotation.tailrec
import scalaz._
import scalaz.Scalaz._
case class ParamForest[+T](nodes: IntMap[ParamNode], terms: IntMap[T], next: Int)
{
def containsTerm(param: Int) = terms.contains(param)
def getTerm(param: Int) = terms.get(param)
@tailrec
final def findRootParam(param: Int): Option[Int] =
nodes.get(param) match {
case Some(ParamNode(Some(prev))) => findRootParam(prev)
case Some(ParamNode(None)) => some(param)
case None => none
}
def replaceParam[T2 >: T](param: Int, term: T2) = findRootParam(param).map { rp => copy(terms = terms.updated(rp, term)) }
def unionParams(param1: Int, param2: Int) =
for {
rootParam1 <- findRootParam(param1)
rootParam2 <- findRootParam(param2)
} yield {
if(rootParam1 =/= rootParam2) (copy(nodes.updated(rootParam1, ParamNode(some(rootParam2)))), true) else (this, false)
}
def allocateParam = {
if(next < Integer.MAX_VALUE) {
val param = next
some((copy(nodes = nodes + (param -> ParamNode(None)), next = next + 1), param))
} else
none
}
def allocatedParams = nodes.keySet
def reverseParamMap(paramMap: Map[Int, Int]) = {
nodes.keySet.foldLeft(some((paramMap.map { _.swap }.toMap, BitSet()))) {
case (Some((reversedParamMap, markedParams)), param) =>
findParams(param)(BitSet(), markedParams).map {
params =>
val tmpReversedParamMap = reversedParamMap.find { p => params.contains(p._1) }.toList.flatMap { p => params.map { (_, p._2) } }.toMap
(reversedParamMap ++ tmpReversedParamMap, markedParams | params)
}
case (None, _) =>
none
}.map { _._1 }
}
@tailrec
private def findParams(param: Int)(params: BitSet, markedParams: BitSet): Option[BitSet] =
nodes.get(param) match {
case Some(ParamNode(Some(prev))) =>
if(!markedParams.contains(param))
findParams(prev)(params + param, markedParams)
else
some(params + param)
case Some(ParamNode(None)) =>
some(params + param)
case None =>
none
}
}
object ParamForest
{
val empty = ParamForest(IntMap(), IntMap(), 0)
}
case class ParamNode(prev: Option[Int])
| luckboy/Purfuncor | src/main/scala/pl/luckboy/purfuncor/frontend/ParamForest.scala | Scala | mpl-2.0 | 2,926 |
/*
* Copyright 2013 Gregor Uhlenheuer
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.kongo2002.android.prost
import android.app.Activity
import android.content.Intent
import android.content.SharedPreferences
import android.os.Bundle
import android.preference.PreferenceActivity
import scala.collection.mutable.HashSet
object SettingsActivity {
final val RESULT_TILES_CHANGED = 2
final val RESULT_DATA_KEY = "com.kongo2002.android.prost.ChangedTiles"
}
class SettingsActivity extends PreferenceActivity
with SharedPreferences.OnSharedPreferenceChangeListener {
val changedTiles = new HashSet[Tiles.Tiles]
override def onCreate(state: Bundle) {
super.onCreate(state)
addPreferencesFromResource(R.xml.preferences)
}
override def onResume() {
super.onResume
/* hook into preference changes */
val prefs = getPreferenceScreen.getSharedPreferences
Tiles.values.foreach(t => updateSummary(prefs, Tiles.configKey(t)))
prefs.registerOnSharedPreferenceChangeListener(this)
}
override def onPause() {
super.onPause
/* remove preference changes callback */
val prefs = getPreferenceScreen.getSharedPreferences
prefs.unregisterOnSharedPreferenceChangeListener(this)
}
/**
* Preference changes event handler
* @param prefs Shared preferences
* @param key Configuration key that was changed
*/
override def onSharedPreferenceChanged(prefs: SharedPreferences, key: String) {
val foundTile = Tiles.values.find(t => Tiles.configKey(t).equals(key))
foundTile match {
case Some(tile) => {
/* update list preference summary text */
updateSummary(prefs, key)
changedTiles += tile
}
case None =>
}
}
override def finish {
if (changedTiles.size > 0) {
val tilesArray = changedTiles.toArray.map(x => x.id)
val intent = new Intent
intent.putExtra(SettingsActivity.RESULT_DATA_KEY, tilesArray)
changedTiles.clear
setResult(SettingsActivity.RESULT_TILES_CHANGED, intent)
} else {
setResult(Activity.RESULT_OK)
}
super.finish
}
private def updateSummary(prefs: SharedPreferences, key: String) {
val value = prefs.getString(key, "")
val pref = findPreference(key)
Commands.get(value) match {
case Some(cmd) => pref.setSummary(cmd.name)
case _ => pref.setSummary("")
}
}
}
| kongo2002/prost | src/com/kongo2002/android/prost/SettingsActivity.scala | Scala | apache-2.0 | 2,917 |
package com.github.ldaniels528.trifecta.ui.actors
import akka.actor.ActorRef
/**
* Represents a Server-Side Events session
* @param sessionId the given [[String session ID]]
* @param actor the given [[ActorRef actor]]
* @author lawrence.daniels@gmail.com
*/
case class SSESession(sessionId: String, actor: ActorRef)
| ldaniels528/trifecta | app-play/app/com/github/ldaniels528/trifecta/ui/actors/SSESession.scala | Scala | apache-2.0 | 328 |
package org.scalajs.testsuite.javalib.util.logging
import java.util.logging._
import org.junit.Test
import org.junit.Assert._
import org.scalajs.testsuite.utils.Platform
import org.scalajs.testsuite.utils.AssertThrows._
class HandlerTest {
class TestHandler extends Handler {
override def flush(): Unit = {}
override def publish(record: LogRecord): Unit = {}
override def close(): Unit = {}
override def reportError(msg: String, ex: Exception, code: Int): Unit =
super.reportError(msg, ex, code)
}
class TestFilter(allow: Boolean) extends Filter {
override def isLoggable(record: LogRecord): Boolean = allow
}
class TestErrorManager extends ErrorManager {
var callParams: Option[(String, Exception, Int)] = None
override def error(msg: String, ex: Exception, code: Int): Unit =
callParams = Some((msg, ex, code))
}
@Test def test_constructor(): Unit = {
val h = new TestHandler()
assertNull(h.getFormatter)
assertNull(h.getEncoding)
assertNull(h.getFilter)
assertNotNull(h.getErrorManager)
assertEquals(Level.ALL, h.getLevel)
h.setFormatter(new SimpleFormatter())
assertNotNull(h.getFormatter)
// The javadocs indicate this is allowed but at runtime on
// the JVM an NPE is thrown
if (!Platform.executingInJVM) {
h.setFormatter(null)
assertNull(h.getFormatter)
}
h.setFilter(new TestFilter(true))
assertNotNull(h.getFilter)
h.setFilter(null)
assertNull(h.getFilter)
h.setEncoding("UTF-16")
assertNotNull(h.getEncoding)
h.setEncoding(null)
assertNull(h.getEncoding)
expectThrows(classOf[NullPointerException], h.setErrorManager(null))
h.setLevel(Level.FINE)
assertEquals(Level.FINE, h.getLevel)
expectThrows(classOf[NullPointerException], h.setLevel(null))
}
@Test def test_report_error(): Unit = {
// Error manager is quite opaque, to test we'll use a mock version
// to record the params
val h = new TestHandler()
val e = new TestErrorManager()
h.setErrorManager(e)
val ex = new RuntimeException()
h.reportError("msg", ex, 24)
assertEquals(Some(("msg", ex, 24)), e.callParams)
}
@Test def test_handler_is_loggable(): Unit = {
val h1 = new TestHandler()
def checkLogAllLevels: Unit = {
assertTrue(h1.isLoggable(new LogRecord(Level.SEVERE, "message")))
assertTrue(h1.isLoggable(new LogRecord(Level.WARNING, "message")))
assertTrue(h1.isLoggable(new LogRecord(Level.INFO, "message")))
assertTrue(h1.isLoggable(new LogRecord(Level.CONFIG, "message")))
assertTrue(h1.isLoggable(new LogRecord(Level.FINE, "message")))
assertTrue(h1.isLoggable(new LogRecord(Level.FINER, "message")))
assertTrue(h1.isLoggable(new LogRecord(Level.FINEST, "message")))
assertTrue(h1.isLoggable(new LogRecord(Level.OFF, "message")))
assertTrue(h1.isLoggable(new LogRecord(Level.ALL, "message")))
}
// Level is all at start
checkLogAllLevels
// Check filter
h1.setFilter(new TestFilter(true))
checkLogAllLevels
// Check filter
h1.setFilter(new TestFilter(false))
assertFalse(h1.isLoggable(new LogRecord(Level.SEVERE, "message")))
assertFalse(h1.isLoggable(new LogRecord(Level.WARNING, "message")))
assertFalse(h1.isLoggable(new LogRecord(Level.INFO, "message")))
assertFalse(h1.isLoggable(new LogRecord(Level.CONFIG, "message")))
assertFalse(h1.isLoggable(new LogRecord(Level.FINE, "message")))
assertFalse(h1.isLoggable(new LogRecord(Level.FINER, "message")))
assertFalse(h1.isLoggable(new LogRecord(Level.FINEST, "message")))
assertFalse(h1.isLoggable(new LogRecord(Level.OFF, "message")))
assertFalse(h1.isLoggable(new LogRecord(Level.ALL, "message")))
// set it at INFO
val h2 = new TestHandler()
h2.setLevel(Level.INFO)
assertTrue(h2.isLoggable(new LogRecord(Level.SEVERE, "message")))
assertTrue(h2.isLoggable(new LogRecord(Level.WARNING, "message")))
assertTrue(h2.isLoggable(new LogRecord(Level.INFO, "message")))
assertFalse(h2.isLoggable(new LogRecord(Level.CONFIG, "message")))
assertFalse(h2.isLoggable(new LogRecord(Level.FINE, "message")))
assertFalse(h2.isLoggable(new LogRecord(Level.FINER, "message")))
assertFalse(h2.isLoggable(new LogRecord(Level.FINEST, "message")))
assertTrue(h2.isLoggable(new LogRecord(Level.OFF, "message")))
assertFalse(h2.isLoggable(new LogRecord(Level.ALL, "message")))
}
}
| scala-js/scala-js-java-logging | testSuite/shared/src/test/scala/org/scalajs/testsuite/javalib/util/logging/HandlerTest.scala | Scala | bsd-3-clause | 4,496 |
package scala.collection.immutable
import org.scalacheck._
import Prop._
object ListMapProperties extends Properties("immutable.ListMap") {
type K = Int
type V = Int
type T = (K, V)
property("from(linkedHashMap) == from(linkedHashMap.toList)") = forAll { m: Map[K, V] =>
val lhm = m.to(collection.mutable.LinkedHashMap)
ListMap.from(lhm) ?= ListMap.from(lhm.toList)
}
property("from(map) == from(map.toList)") = forAll { m: Map[K, V] =>
ListMap.from(m) ?= ListMap.from(m.toList)
}
property("from(map.view) == from(map)") = forAll { m: Map[K, V] =>
ListMap.from(m.view) ?= ListMap.from(m)
}
}
| lrytz/scala | test/scalacheck/scala/collection/immutable/ListMapProperties.scala | Scala | apache-2.0 | 631 |
package by.verkpavel.grafolnet.model
case class ImageRequest(id: Int, image: String, params: Array[String])
case class ImageResponse(id: Int, params: Map[String, Any])
| VerkhovtsovPavel/BSUIR_Labs | Diploma/diploma-latex/src/fulllisting/package.scala | Scala | mit | 169 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.tools.data
import cmwell.tools.data.downloader.consumer.Downloader.Token
import cmwell.tools.data.utils.akka.stats.DownloaderStats.DownloadStats
package object sparql {
type TokenAndStatisticsMap = Map[String, TokenAndStatistics]
type TokenAndStatistics = (Token, Option[DownloadStats])
}
| hochgi/CM-Well | server/cmwell-data-tools/src/main/scala/cmwell/tools/data/sparql/package.scala | Scala | apache-2.0 | 926 |
package se.nimsa.sbx.user
import akka.actor.ActorSystem
import akka.testkit.{ImplicitSender, TestActorRef, TestKit}
import akka.util.Timeout
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Matchers, WordSpecLike}
import se.nimsa.sbx.user.UserProtocol._
import se.nimsa.sbx.util.FutureUtil.await
import se.nimsa.sbx.util.TestUtil
import scala.concurrent.duration.DurationInt
class UserServiceActorTest(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
with WordSpecLike with Matchers with BeforeAndAfterAll with BeforeAndAfterEach {
def this() = this(ActorSystem("UserServiceActorTestSystem"))
implicit val ec = system.dispatcher
implicit val timeout = Timeout(30.seconds)
val dbConfig = TestUtil.createTestDb("userserviceactortest")
val dao = new UserDAO(dbConfig)
await(dao.create())
val userService = TestActorRef(new UserServiceActor(dao, "admin", "admin", 1000000))
val userActor = userService.underlyingActor
override def afterEach() = await(dao.clear())
override def afterAll {
TestKit.shutdownActorSystem(system)
}
"A UserServiceActor" should {
"cleanup expired sessions regularly" in {
val user = await(dao.insert(ApiUser(-1, "user", UserRole.USER).withPassword("pass")))
await(dao.insertSession(ApiSession(-1, user.id, "token1", "ip1", "user agent1", System.currentTimeMillis)))
await(dao.insertSession(ApiSession(-1, user.id, "token2", "ip2", "user agent2", 0)))
await(dao.listUsers(0, 10)) should have length 2
await(dao.listSessions) should have length 2
await(userActor.removeExpiredSessions())
await(dao.listSessions) should have length 1
}
"refresh a non-expired session defined by a token, ip and user agent" in {
val userAgent = "user agent"
val userAgentHash = userActor.md5Hash(userAgent)
val user = await(dao.insert(ApiUser(-1, "user", UserRole.USER).withPassword("pass")))
val sessionTime = System.currentTimeMillis - 1000
await(dao.insertSession(ApiSession(-1, user.id, "token", "ip", userAgentHash, sessionTime)))
await(userActor.getAndRefreshUser(AuthKey(Some("token"), Some("ip"), Some(userAgent))))
val optionalSession = await(dao.userSessionByTokenIpAndUserAgent("token", "ip", userAgentHash))
optionalSession.isDefined shouldBe true
optionalSession.get._2.updated shouldBe >(sessionTime)
}
"not refresh an expired session defined by a token, ip and user agent" in {
val user = await(dao.insert(ApiUser(-1, "user", UserRole.USER).withPassword("pass")))
val sessionTime = 1000
await(dao.insertSession(ApiSession(-1, user.id, "token", "ip", "user agent", sessionTime)))
await(userActor.getAndRefreshUser(AuthKey(Some("token"), Some("ip"), Some("user agent"))))
val optionalSession = await(dao.userSessionByTokenIpAndUserAgent("token", "ip", "user agent"))
optionalSession.isDefined shouldBe true
optionalSession.get._2.updated shouldBe sessionTime
}
"create a session if none exists and update it if one exists" in {
val user = await(dao.insert(ApiUser(-1, "user", UserRole.USER).withPassword("pass")))
await(dao.listSessions) should have length 0
val session1 = await(userActor.createOrUpdateSession(user, "ip", "userAgent"))
await(dao.listSessions) should have length 1
Thread.sleep(100)
val session2 = await(userActor.createOrUpdateSession(user, "ip", "userAgent"))
await(dao.listSessions) should have length 1
session2.updated shouldBe >(session1.updated)
}
"remove a session based on user id, IP and user agent when logging out" in {
val userAgent = "user agent"
val user = await(dao.insert(ApiUser(-1, "user", UserRole.USER).withPassword("pass")))
val session1 = await(userActor.createOrUpdateSession(user, "ip", userAgent))
await(dao.listSessions) should have length 1
await(userActor.deleteSession(user, AuthKey(Some(session1.token), Some("Other IP"), Some(userAgent))))
await(dao.listSessions) should have length 1
await(userActor.deleteSession(user, AuthKey(Some(session1.token), Some(session1.ip), Some(userAgent))))
await(dao.listSessions) should have length 0
}
"not create more than one session when logging in twice" in {
await(dao.insert(ApiUser(-1, "user", UserRole.USER).withPassword("pass")))
userService ! Login(UserPass("user", "pass"), AuthKey(None, Some("ip"), Some("userAgent")))
expectMsgType[LoggedIn]
await(dao.listUsers(0, 10)) should have length 1
await(dao.listSessions) should have length 1
userService ! Login(UserPass("user", "pass"), AuthKey(None, Some("ip"), Some("userAgent")))
expectMsgType[LoggedIn]
await(dao.listUsers(0, 10)) should have length 1
await(dao.listSessions) should have length 1
}
"not allow logging in if credentials are invalid" in {
await(dao.insert(ApiUser(-1, "user", UserRole.USER).withPassword("pass")))
userService ! Login(UserPass("user", "incorrect password"), AuthKey(None, Some("ip"), Some("userAgent")))
expectMsg(LoginFailed)
}
"not allow logging in if information on IP address and/or user agent is missing" in {
await(dao.insert(ApiUser(-1, "user", UserRole.USER).withPassword("pass")))
userService ! Login(UserPass("user", "pass"), AuthKey(None, None, Some("userAgent")))
expectMsg(LoginFailed)
userService ! Login(UserPass("user", "pass"), AuthKey(None, Some("ip"), None))
expectMsg(LoginFailed)
userService ! Login(UserPass("user", "pass"), AuthKey(None, None, None))
expectMsg(LoginFailed)
}
}
} | slicebox/slicebox | src/test/scala/se/nimsa/sbx/user/UserServiceActorTest.scala | Scala | apache-2.0 | 5,706 |
package org.scaladebugger.docs.layouts.partials.common
import org.scaladebugger.docs.styles.PageStyle
import scalatags.Text.all._
/**
* Generates a lined content structure.
*/
object LinedContent {
def apply(markerText: String, content: Modifier): Modifier = Raw(
span(PageStyle.marker, PageStyle.linedContentLeft)(markerText),
span(PageStyle.linedContentRight)(content)
)
/** Takes raw content to place inside a lined content block. */
def Raw(content: Modifier*): Modifier =
div(PageStyle.linedContent)(content: _*)
}
| chipsenkbeil/scala-debugger | scala-debugger-docs/src/main/scala/org/scaladebugger/docs/layouts/partials/common/LinedContent.scala | Scala | apache-2.0 | 546 |
package io.ddf.flink.utils
import it.unimi.dsi.fastutil.BigListIterator
import it.unimi.dsi.fastutil.objects.ObjectBigListIterators
import org.apache.flink.api.common.io.GenericInputFormat
import org.apache.flink.api.table.Row
import org.apache.flink.core.io.GenericInputSplit
class RowCacheInputFormat(filePath: String, maxpartitions: Int) extends GenericInputFormat[Row] {
private var currentSplit: BigListIterator[Row] = null
override def createInputSplits(numSplits: Int): Array[GenericInputSplit] = {
0 to maxpartitions - 1 map (i => new GenericInputSplit(i, maxpartitions)) toArray
}
override def open(split: GenericInputSplit): Unit = {
super.open(split)
currentSplit = RowCache.getSplit(filePath, this.partitionNumber.toString) match {
case Some(l) => l.listIterator()
case None => ObjectBigListIterators.EMPTY_BIG_LIST_ITERATOR.asInstanceOf[BigListIterator[Row]]
}
}
override def nextRecord(ot: Row): Row = {
val r = currentSplit.next()
r
}
override def reachedEnd(): Boolean = {
!currentSplit.hasNext
}
}
| ddf-project/ddf-flink | flink/src/main/scala/io/ddf/flink/utils/RowCacheInputFormat.scala | Scala | apache-2.0 | 1,080 |
package com.burness.algorithm.demo
import com.burness.utils.{SortMap, ReadXML}
import scala.collection.mutable.Map
/**
* Created by burness on 16/5/19.
*/
object ExampleLRConfig {
def main(args: Array[String]) {
val readXML = new ReadXML(getClass.getResource("/config/lrJobConf.xml").getPath)
val xmlMap = readXML.read().map{
case (pipeLineId, list)=>
(pipeLineId.toInt, list)
}.toMap
println(xmlMap)
}
}
| spark-mler/algorithmEngine | src/main/scala/com.burness/algorithm/demo/ExampleLRConfig.scala | Scala | apache-2.0 | 446 |
package com.seanshubin.learn.datomic.prototype
import datomic.Peer
object DevDatomic extends App {
//bin/transactor config/samples/dev-transactor-template.properties
val (duration, _) = Timer.time {
val datomicUri = "datomic:free://localhost:4334/world"
Peer.createDatabase(datomicUri)
}
println(DurationFormat.MillisecondsFormat.format(duration))
//bin/console --port 5334 mem datomic:dev://localhost:4334/hello
//http://localhost:5334/browse
}
| SeanShubin/learn-datomic | prototype/src/test/scala/com/seanshubin/learn/datomic/prototype/DevDatomic.scala | Scala | unlicense | 468 |
package lila.push
import org.joda.time.DateTime
import reactivemongo.api.bson._
import lila.db.dsl._
import lila.user.User
final private class DeviceApi(coll: Coll)(implicit ec: scala.concurrent.ExecutionContext) {
implicit private val DeviceBSONHandler = Macros.handler[Device]
private[push] def findByDeviceId(deviceId: String): Fu[Option[Device]] =
coll.find($id(deviceId)).one[Device]
private[push] def findLastManyByUserId(platform: String, max: Int)(userId: String): Fu[List[Device]] =
coll
.find(
$doc(
"platform" -> platform,
"userId" -> userId
)
)
.sort($doc("seenAt" -> -1))
.cursor[Device]()
.list(max)
private[push] def findLastOneByUserId(platform: String)(userId: String): Fu[Option[Device]] =
findLastManyByUserId(platform, 1)(userId) dmap (_.headOption)
def register(user: User, platform: String, deviceId: String) = {
lila.mon.push.register.in(platform).increment()
coll.update
.one(
$id(deviceId),
Device(
_id = deviceId,
platform = platform,
userId = user.id,
seenAt = DateTime.now
),
upsert = true
)
.void
}
def unregister(user: User) = {
lila.mon.push.register.out.increment()
coll.delete.one($doc("userId" -> user.id)).void
}
def delete(device: Device) =
coll.delete.one($id(device._id)).void
}
| luanlv/lila | modules/push/src/main/DeviceApi.scala | Scala | mit | 1,435 |
package com.wavesplatform.transaction.serialization.impl
import java.nio.ByteBuffer
import com.google.common.primitives.{Bytes, Longs}
import com.wavesplatform.account.AddressScheme
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.crypto
import com.wavesplatform.serialization.ByteBufferOps
import com.wavesplatform.transaction.lease.LeaseCancelTransaction
import com.wavesplatform.transaction.{Proofs, TxVersion}
import play.api.libs.json.{JsObject, Json}
import scala.util.Try
object LeaseCancelTxSerializer {
def toJson(tx: LeaseCancelTransaction): JsObject =
BaseTxJson.toJson(tx) ++ Json.obj("leaseId" -> tx.leaseId.toString) ++
(if (tx.version == TxVersion.V2) Json.obj("chainId" -> tx.chainId) else Json.obj())
def bodyBytes(tx: LeaseCancelTransaction): Array[Byte] = {
import tx._
val baseBytes = Bytes.concat(sender.arr, Longs.toByteArray(fee), Longs.toByteArray(timestamp), leaseId.arr)
version match {
case TxVersion.V1 => Bytes.concat(Array(typeId), baseBytes)
case TxVersion.V2 => Bytes.concat(Array(typeId, version, chainId), baseBytes)
case _ => PBTransactionSerializer.bodyBytes(tx)
}
}
def toBytes(tx: LeaseCancelTransaction): Array[Byte] = {
tx.version match {
case TxVersion.V1 => Bytes.concat(this.bodyBytes(tx), tx.proofs.toSignature.arr)
case TxVersion.V2 => Bytes.concat(Array(0: Byte), this.bodyBytes(tx), tx.proofs.bytes())
case _ => PBTransactionSerializer.bytes(tx)
}
}
def parseBytes(bytes: Array[Byte]): Try[LeaseCancelTransaction] = Try {
def parseCommonPart(version: TxVersion, buf: ByteBuffer): LeaseCancelTransaction = {
val sender = buf.getPublicKey
val fee = buf.getLong
val timestamp = buf.getLong
val leaseId = buf.getByteArray(crypto.DigestLength)
LeaseCancelTransaction(version, sender, ByteStr(leaseId), fee, timestamp, Nil, AddressScheme.current.chainId)
}
require(bytes.length > 2, "buffer underflow while parsing transaction")
if (bytes(0) == 0) {
require(bytes(1) == LeaseCancelTransaction.typeId, "transaction type mismatch")
require(bytes(2) == TxVersion.V2, "transaction version mismatch")
val buf = ByteBuffer.wrap(bytes, 4, bytes.length - 4)
parseCommonPart(TxVersion.V2, buf).copy(proofs = buf.getProofs)
} else {
require(bytes(0) == LeaseCancelTransaction.typeId, "transaction type mismatch")
val buf = ByteBuffer.wrap(bytes, 1, bytes.length - 1)
parseCommonPart(TxVersion.V1, buf).copy(proofs = Proofs(buf.getSignature))
}
}
}
| wavesplatform/Waves | node/src/main/scala/com/wavesplatform/transaction/serialization/impl/LeaseCancelTxSerializer.scala | Scala | mit | 2,624 |
package com.socrata.pg.store.events
import com.socrata.soql.environment.ColumnName
import scala.language.reflectiveCalls
import com.socrata.datacoordinator.id.{ColumnId, UserColumnId}
import com.socrata.datacoordinator.secondary.{ColumnCreated, ColumnInfo, SystemRowIdentifierChanged}
import com.socrata.pg.store.{PGSecondaryTestBase, PGSecondaryUniverseTestBase, PGStoreTestBase}
import com.socrata.soql.types.SoQLID
class SystemRowIdentifierChangedHandlerTest extends PGSecondaryTestBase with PGSecondaryUniverseTestBase with PGStoreTestBase {
test("handle SystemRowIdentifierChanged") {
withPgu() { pgu =>
val f = workingCopyCreatedFixture
val events = f.events ++ Seq(
ColumnCreated(ColumnInfo(new ColumnId(9124), new UserColumnId(":id"), Some(ColumnName(":id")), SoQLID, false, false, false, None)),
SystemRowIdentifierChanged(ColumnInfo(new ColumnId(9124), new UserColumnId(":id"), Some(ColumnName(":id")), SoQLID, false, false, false, None))
)
f.pgs.doVersion(pgu, f.datasetInfo, f.dataVersion + 1, f.dataVersion + 1, None, events.iterator)
val truthCopyInfo = getTruthCopyInfo(pgu, f.datasetInfo)
val schema = pgu.datasetMapReader.schema(truthCopyInfo)
schema.values.filter(_.isSystemPrimaryKey) should have size (1)
}
}
test("SystemRowIdentifierChanged should refuse to run a second time") {
withPgu() { pgu =>
val f = workingCopyCreatedFixture
val events = f.events ++ Seq(
ColumnCreated(ColumnInfo(new ColumnId(9124), new UserColumnId(":id"), Some(ColumnName(":id")), SoQLID, false, false, false, None)),
SystemRowIdentifierChanged(ColumnInfo(new ColumnId(9124), new UserColumnId(":id"), Some(ColumnName(":id")), SoQLID, false, false, false, None)),
SystemRowIdentifierChanged(ColumnInfo(new ColumnId(9124), new UserColumnId(":id"), Some(ColumnName(":id")), SoQLID, false, false, false, None))
)
intercept[UnsupportedOperationException] {
f.pgs.doVersion(pgu, f.datasetInfo, f.dataVersion + 1, f.dataVersion + 1, None, events.iterator)
}
}
}
}
| socrata-platform/soql-postgres-adapter | store-pg/src/test/scala/com/socrata/pg/store/events/SystemRowIdentifierChangedHandlerTest.scala | Scala | apache-2.0 | 2,108 |
package knot.net.config
import org.scalatest.FunSpec
import org.scalatest.Matchers._
class NetConfigsSpec extends FunSpec {
describe("tcp") {
it("buffer") {
NetConfigs.tcp.defaultBufferSize should be(1500)
NetConfigs.tcp.bufferPoolMaxSize should be(100)
}
it("stream"){
NetConfigs.tcp.stream.writeBufferSize should be(16 * 1024)
}
}
}
| defvar/knot | knot-net/src/test/scala/knot/net/config/NetConfigsSpec.scala | Scala | mit | 376 |
package scala.tools.nsc.interpreter.shell
//import org.junit.Assert._
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
case class Tabby(width: Int = 80, isAcross: Boolean = false, marginSize: Int = 3) extends Tabulator
case class VTabby(width: Int = 80, isAcross: Boolean = false, marginSize: Int = 3) extends VariColumnTabulator
@RunWith(classOf[JUnit4])
class TabulatorTest {
@Test def oneliner() = {
val sut = Tabby()
val items = List("a", "b", "c")
val res = sut tabulate items
assert(res.size == 1)
assert(res(0).size == 1)
assert(res(0)(0) startsWith "a")
assert(res(0)(0) endsWith "c")
}
@Test def twoliner() = {
val sut = Tabby(width = 40)
val items = List("a" * 15, "b" * 15, "c" * 15)
val res = sut tabulate items
assert(res.size == 2)
assert(res(0).size == 2)
assert(res(1).size == 2) // trailing empty strings
assert(res(1)(0) startsWith "b")
}
@Test def twolinerx() = {
val sut = Tabby(width = 40, isAcross = true)
val items = List("a" * 15, "b" * 15, "c" * 15)
val res = sut tabulate items
assert(res.size == 2)
assert(res(0).size == 2)
assert(res(1).size == 1) // no trailing empty strings
assert(res(1)(0) startsWith "c")
}
// before, two 9-width cols don't fit in 20
// but now, 5-col and 9-col do fit.
@Test def twolinerVariable() = {
val sut = VTabby(width = 20)
val items = (1 to 9) map (i => i.toString * i)
val rows = sut tabulate items
assert(rows.size == 5)
assert(rows(0).size == 2)
assert(rows(0)(0).size == 8) // width is 55555 plus margin of 3
}
@Test def sys() = {
val sut = VTabby(width = 40)
val items = List("BooleanProp", "PropImpl", "addShutdownHook", "error",
"process", "CreatorImpl", "ShutdownHookThread", "allThreads",
"exit", "props", "Prop", "SystemProperties",
"env", "package", "runtime")
val rows = sut tabulate items
assert(rows.size == 8)
assert(rows(0).size == 2)
assert(rows(0)(0).size == "ShutdownHookThread".length + sut.marginSize) // 21
}
@Test def syswide() = {
val sut = VTabby(width = 120)
val items = List("BooleanProp", "PropImpl", "addShutdownHook", "error",
"process", "CreatorImpl", "ShutdownHookThread", "allThreads",
"exit", "props", "Prop", "SystemProperties",
"env", "package", "runtime")
val rows = sut tabulate items
assert(rows.size == 2)
assert(rows(0).size == 8)
assert(rows(0)(0).size == "BooleanProp".length + sut.marginSize) // 14
}
@Test def resultFits() = {
val sut = VTabby(width = 10)
// each of two lines would fit, but layout is two cols of width six > 10
// therefore, should choose ncols = 1
val items = List("a", "bcd",
"efg", "h")
val rows = sut tabulate items
assert(rows.size == 4)
assert(rows(0).size == 1)
assert(rows(0)(0).size == "efg".length + sut.marginSize) // 6
}
@Test def badFit() = {
val sut = VTabby(isAcross = true)
val items = ('a' until 'z').map(_.toString).toList
val rows = sut tabulate items
assert(rows.size == 2)
assert(rows(0).size == 20) // 20 * 4 = 80
assert(rows(1)(0).dropRight(sut.marginSize) == "u")
}
@Test def badFitter() = {
val sut = VTabby(isAcross = true)
val items = List (
"%", "&", "*", "+", "-", "/", ">", ">=", ">>", ">>>", "^",
"asInstanceOf", "isInstanceOf", "toByte", "toChar", "toDouble", "toFloat",
"toInt", "toLong", "toShort", "toString", "unary_+", "unary_-", "unary_~", "|"
)
val rows = sut tabulate items
assert(rows.size == 4)
assert(rows(3).size == 4) // 7 cols
assert(rows(3)(0).dropRight(sut.marginSize) == "unary_+")
}
}
| martijnhoekstra/scala | test/junit/scala/tools/nsc/interpreter/TabulatorTest.scala | Scala | apache-2.0 | 3,886 |
package com.acework.js
package object logger {
private val defaultLogger = LoggerFactory.getLogger("Log")
def log = defaultLogger
}
| lvitaly/scalajs-react-bootstrap | core/src/main/scala/com/acework/js/logger/package.scala | Scala | mit | 138 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.codegen.{UnsafeArrayWriter, UnsafeRowWriter, UnsafeWriter}
import org.apache.spark.sql.catalyst.util.ArrayData
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{UserDefinedType, _}
import org.apache.spark.unsafe.Platform
/**
* An interpreted unsafe projection. This class reuses the [[UnsafeRow]] it produces, a consumer
* should copy the row if it is being buffered. This class is not thread safe.
*
* @param expressions that produces the resulting fields. These expressions must be bound
* to a schema.
*/
class InterpretedUnsafeProjection(expressions: Array[Expression]) extends UnsafeProjection {
import InterpretedUnsafeProjection._
private[this] val subExprEliminationEnabled = SQLConf.get.subexpressionEliminationEnabled
private[this] lazy val runtime =
new SubExprEvaluationRuntime(SQLConf.get.subexpressionEliminationCacheMaxEntries)
private[this] val exprs = if (subExprEliminationEnabled) {
runtime.proxyExpressions(expressions)
} else {
expressions.toSeq
}
/** Number of (top level) fields in the resulting row. */
private[this] val numFields = expressions.length
/** Array that expression results. */
private[this] val values = new Array[Any](numFields)
/** The row representing the expression results. */
private[this] val intermediate = new GenericInternalRow(values)
/* The row writer for UnsafeRow result */
private[this] val rowWriter = new UnsafeRowWriter(numFields, numFields * 32)
/** The writer that writes the intermediate result to the result row. */
private[this] val writer: InternalRow => Unit = {
val baseWriter = generateStructWriter(
rowWriter,
expressions.map(e => StructField("", e.dataType, e.nullable)))
if (!expressions.exists(_.nullable)) {
// No nullable fields. The top-level null bit mask will always be zeroed out.
baseWriter
} else {
// Zero out the null bit mask before we write the row.
row => {
rowWriter.zeroOutNullBytes()
baseWriter(row)
}
}
}
override def initialize(partitionIndex: Int): Unit = {
exprs.foreach(_.foreach {
case n: Nondeterministic => n.initialize(partitionIndex)
case _ =>
})
}
override def apply(row: InternalRow): UnsafeRow = {
if (subExprEliminationEnabled) {
runtime.setInput(row)
}
// Put the expression results in the intermediate row.
var i = 0
while (i < numFields) {
values(i) = exprs(i).eval(row)
i += 1
}
// Write the intermediate row to an unsafe row.
rowWriter.reset()
writer(intermediate)
rowWriter.getRow()
}
}
/**
* Helper functions for creating an [[InterpretedUnsafeProjection]].
*/
object InterpretedUnsafeProjection {
/**
* Returns an [[UnsafeProjection]] for given sequence of bound Expressions.
*/
def createProjection(exprs: Seq[Expression]): UnsafeProjection = {
// We need to make sure that we do not reuse stateful expressions.
val cleanedExpressions = exprs.map(_.transform {
case s: Stateful => s.freshCopy()
})
new InterpretedUnsafeProjection(cleanedExpressions.toArray)
}
/**
* Generate a struct writer function. The generated function writes an [[InternalRow]] to the
* given buffer using the given [[UnsafeRowWriter]].
*/
private def generateStructWriter(
rowWriter: UnsafeRowWriter,
fields: Array[StructField]): InternalRow => Unit = {
val numFields = fields.length
// Create field writers.
val fieldWriters = fields.map { field =>
generateFieldWriter(rowWriter, field.dataType, field.nullable)
}
// Create basic writer.
row => {
var i = 0
while (i < numFields) {
fieldWriters(i).apply(row, i)
i += 1
}
}
}
/**
* Generate a writer function for a struct field, array element, map key or map value. The
* generated function writes the element at an index in a [[SpecializedGetters]] object (row
* or array) to the given buffer using the given [[UnsafeWriter]].
*/
private def generateFieldWriter(
writer: UnsafeWriter,
dt: DataType,
nullable: Boolean): (SpecializedGetters, Int) => Unit = {
// Create the basic writer.
val unsafeWriter: (SpecializedGetters, Int) => Unit = dt match {
case BooleanType =>
(v, i) => writer.write(i, v.getBoolean(i))
case ByteType =>
(v, i) => writer.write(i, v.getByte(i))
case ShortType =>
(v, i) => writer.write(i, v.getShort(i))
case IntegerType | DateType | _: YearMonthIntervalType =>
(v, i) => writer.write(i, v.getInt(i))
case LongType | TimestampType | TimestampNTZType | _: DayTimeIntervalType =>
(v, i) => writer.write(i, v.getLong(i))
case FloatType =>
(v, i) => writer.write(i, v.getFloat(i))
case DoubleType =>
(v, i) => writer.write(i, v.getDouble(i))
case DecimalType.Fixed(precision, scale) =>
(v, i) => writer.write(i, v.getDecimal(i, precision, scale), precision, scale)
case CalendarIntervalType =>
(v, i) => writer.write(i, v.getInterval(i))
case BinaryType =>
(v, i) => writer.write(i, v.getBinary(i))
case StringType =>
(v, i) => writer.write(i, v.getUTF8String(i))
case StructType(fields) =>
val numFields = fields.length
val rowWriter = new UnsafeRowWriter(writer, numFields)
val structWriter = generateStructWriter(rowWriter, fields)
(v, i) => {
v.getStruct(i, fields.length) match {
case row: UnsafeRow =>
writer.write(i, row)
case row =>
val previousCursor = writer.cursor()
// Nested struct. We don't know where this will start because a row can be
// variable length, so we need to update the offsets and zero out the bit mask.
rowWriter.resetRowWriter()
structWriter.apply(row)
writer.setOffsetAndSizeFromPreviousCursor(i, previousCursor)
}
}
case ArrayType(elementType, containsNull) =>
val arrayWriter = new UnsafeArrayWriter(writer, getElementSize(elementType))
val elementWriter = generateFieldWriter(
arrayWriter,
elementType,
containsNull)
(v, i) => {
val previousCursor = writer.cursor()
writeArray(arrayWriter, elementWriter, v.getArray(i))
writer.setOffsetAndSizeFromPreviousCursor(i, previousCursor)
}
case MapType(keyType, valueType, valueContainsNull) =>
val keyArrayWriter = new UnsafeArrayWriter(writer, getElementSize(keyType))
val keyWriter = generateFieldWriter(
keyArrayWriter,
keyType,
nullable = false)
val valueArrayWriter = new UnsafeArrayWriter(writer, getElementSize(valueType))
val valueWriter = generateFieldWriter(
valueArrayWriter,
valueType,
valueContainsNull)
(v, i) => {
v.getMap(i) match {
case map: UnsafeMapData =>
writer.write(i, map)
case map =>
val previousCursor = writer.cursor()
// preserve 8 bytes to write the key array numBytes later.
valueArrayWriter.grow(8)
valueArrayWriter.increaseCursor(8)
// Write the keys and write the numBytes of key array into the first 8 bytes.
writeArray(keyArrayWriter, keyWriter, map.keyArray())
Platform.putLong(
valueArrayWriter.getBuffer,
previousCursor,
valueArrayWriter.cursor - previousCursor - 8
)
// Write the values.
writeArray(valueArrayWriter, valueWriter, map.valueArray())
writer.setOffsetAndSizeFromPreviousCursor(i, previousCursor)
}
}
case udt: UserDefinedType[_] =>
generateFieldWriter(writer, udt.sqlType, nullable)
case NullType =>
(_, _) => {}
case _ =>
throw new IllegalStateException(s"The data type '${dt.typeName}' is not supported in " +
"generating a writer function for a struct field, array element, map key or map value.")
}
// Always wrap the writer with a null safe version.
dt match {
case _: UserDefinedType[_] =>
// The null wrapper depends on the sql type and not on the UDT.
unsafeWriter
case DecimalType.Fixed(precision, _) if precision > Decimal.MAX_LONG_DIGITS =>
// We can't call setNullAt() for DecimalType with precision larger than 18, we call write
// directly. We can use the unwrapped writer directly.
unsafeWriter
case BooleanType | ByteType =>
(v, i) => {
if (!v.isNullAt(i)) {
unsafeWriter(v, i)
} else {
writer.setNull1Bytes(i)
}
}
case ShortType =>
(v, i) => {
if (!v.isNullAt(i)) {
unsafeWriter(v, i)
} else {
writer.setNull2Bytes(i)
}
}
case IntegerType | DateType | FloatType =>
(v, i) => {
if (!v.isNullAt(i)) {
unsafeWriter(v, i)
} else {
writer.setNull4Bytes(i)
}
}
case _ =>
(v, i) => {
if (!v.isNullAt(i)) {
unsafeWriter(v, i)
} else {
writer.setNull8Bytes(i)
}
}
}
}
/**
* Get the number of bytes elements of a data type will occupy in the fixed part of an
* [[UnsafeArrayData]] object. Reference types are stored as an 8 byte combination of an
* offset (upper 4 bytes) and a length (lower 4 bytes), these point to the variable length
* portion of the array object. Primitives take up to 8 bytes, depending on the size of the
* underlying data type.
*/
private def getElementSize(dataType: DataType): Int = dataType match {
case NullType | StringType | BinaryType | CalendarIntervalType |
_: DecimalType | _: StructType | _: ArrayType | _: MapType => 8
case _ => dataType.defaultSize
}
/**
* Write an array to the buffer. If the array is already in serialized form (an instance of
* [[UnsafeArrayData]]) then we copy the bytes directly, otherwise we do an element-by-element
* copy.
*/
private def writeArray(
arrayWriter: UnsafeArrayWriter,
elementWriter: (SpecializedGetters, Int) => Unit,
array: ArrayData): Unit = array match {
case unsafe: UnsafeArrayData =>
arrayWriter.write(unsafe)
case _ =>
val numElements = array.numElements()
arrayWriter.initialize(numElements)
var i = 0
while (i < numElements) {
elementWriter.apply(array, i)
i += 1
}
}
}
| shaneknapp/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/InterpretedUnsafeProjection.scala | Scala | apache-2.0 | 11,843 |
package net.mtgto.confluence4s
/**
* The entity model of the summary of space.
*
* @see https://developer.atlassian.com/display/CONFDEV/Remote+Confluence+Data+Objects#RemoteConfluenceDataObjects-SpaceSummary
* @param key the space key
* @param name the name of the space
* @param url the url to view this space online
*/
case class SpaceSummary(
key: String,
name: String,
url: String
)
| mtgto/confluence4s | src/main/scala/net/mtgto/confluence4s/SpaceSummary.scala | Scala | bsd-3-clause | 401 |
// See LICENSE for license details.
package util
import scala.collection.immutable.ListMap
import chisel3._
class HVec[T<:Data](wires: Seq[T]) extends Record with collection.IndexedSeq[T] {
def apply(x: Int) = wires(x)
val elements = ListMap(wires.zipWithIndex.map { case (n,i) => (i.toString, n) }:_*)
def length = wires.length
override def cloneType: this.type = new HVec(wires.map(_.cloneType)).asInstanceOf[this.type]
}
object HVec {
def apply[T<:Data](wires: Seq[T]) = new HVec(wires)
def tabulate[T<:Data](size: Int)(gen: Int => T) = HVec(Seq.tabulate(size) { i => gen(i)})
}
| stanford-ppl/spatial-lang | spatial/core/resources/chiselgen/template-level/fringeHW/util/HVec.scala | Scala | mit | 599 |
package ch.ninecode.cim
import java.util.regex.Pattern
import scala.collection.mutable.ArrayBuffer
/**
* Context for parsing.
* Contains the raw XML, indexes at which to start and stop parsing,
* the line number index of newlines within the XML,
* text coverage set (in debug) and error messages raised while parsing.
*
* @param xml The current xml string being parsed.
* @param start The starting character position of the xml string - non-zero if not the first Split.
* @param end The ending character position at which to stop parsing.
* @param first_byte The byte offset of the first character to be parsed.
*/
class CIMContext (var xml: String, val start: Long, var end: Long, var first_byte: Long)
{
import CIMContext._
/**
* The array of character positions of newlines in the xml string.
*/
val newlines: ArrayBuffer[Long] = index_string(xml, start)
/**
* An array of string start and end offsets that have been parsed.
*/
val coverage: ArrayBuffer[(Int, Int)] = new ArrayBuffer[(Int, Int)]
/**
* An array of up to MAXERRORS error messages.
*/
val errors: ArrayBuffer[String] = new ArrayBuffer[String]
/**
* The byte offset of the last successfully parsed full element.
*/
var last_byte: Long = first_byte
/**
* The internal XML for an element being parsed.
*/
var subxml: String = _
/**
* Create an index of newline characters in a string.
* The index of newlines for the string
* "Now is the time\\nfor all good men\\nto come to the aid of the party\\n"
* is [15, 32, 64]
*
* @param string the string to index
* @param offset optional offset to add to the index values
* @return {Unit} nothing
*/
def index_string (string: String, offset: Long = 0L, n: ArrayBuffer[Long] = ArrayBuffer[Long]()): ArrayBuffer[Long] =
{
val matcher = lines.matcher(string)
while (matcher.find())
{
val _ = n += (matcher.start() + offset)
}
n
}
/**
* Get the line number for the given offset value.
* Uses a binary search through the newline array to determine where the
* given offset lies in the source stream.
*
* @param offset the character position in the stream
* @return the line number (1 + how many newlines precede the offset)
*/
@SuppressWarnings(Array("org.wartremover.warts.Return"))
def line_number (offset: Long = end): Int =
{
var min = 0
var max = newlines.length - 1
var index = min
while (min <= max)
{
index = (min + max) / 2 | 0
val item = newlines(index)
if (item < offset)
min = index + 1
else
if (item > offset)
max = index - 1
else
return index + 1
}
if (newlines(index) <= offset)
index += 1
index + 1
}
/**
* Check that all characters were consumed by parsing.
* Used to find attributes and references that are not understood by the model.
*
* @return <code>true</code> if all non-whitespace characters were parsed.
*/
def covered (): Boolean =
{
var ret: Boolean = true
var index: Int = 0
for (pair <- coverage.sorted)
{
while (ret && index < pair._1)
{
ret &&= subxml.charAt(index).isWhitespace
if (!ret && errors.size < MAXERRORS)
{
val _ = errors += """Unknown content "%s" at line %d""".format(subxml.substring(index, pair._1).trim(), line_number())
}
index += 1
}
index = pair._2
}
while (ret && index < subxml.length())
{
ret &&= subxml.charAt(index).isWhitespace
if (!ret && errors.size < MAXERRORS)
{
val _ = errors += """Unknown content "%s" at line %d""".format(subxml.substring(index, subxml.length()).trim(), line_number())
}
index += 1
}
ret
}
/**
* Output a debugging string of this context.
*/
override def toString: String =
{
s""""${subxml.substring(0, 50)}..." @ $end character $last_byte byte"""
}
}
object CIMContext
{
/**
* Add extra checks while parsing flag.
*/
var DEBUG = true
/**
* Return no elements after an error flag.
*/
var STOP_ON_ERROR = false
/**
* Limit errors accumulation to this amount.
*/
var MAXERRORS = 10
/**
* Regular expression for line counting.
*/
val lines: Pattern = Pattern.compile("""\\n""")
}
| derrickoswald/CIMScala | CIMReader/src/main/scala/ch/ninecode/cim/CIMContext.scala | Scala | mit | 4,824 |
// Copyright 2019 EPFL DATA Lab (data.epfl.ch)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package squid.lang
/** A set of types and methods for manipulating class, object, field, and method definitions using Squid.
* This is notably used as the output of the `@lift` macro. */
trait Definitions extends Base {
type FieldGetter <: MtdSymbol
type FieldSetter <: MtdSymbol
// TODO impl monomorphization of classes and methods
// For monomorphized classes, we need to transform all the type references everywhere... so the process will take
// a bunch of other external code that uses the class and adapt it, too.
object TopLevel extends Scope {
type Scp = Any
val members = Nil
}
trait Scope { outerScope =>
type Scp
sealed abstract class Member
val members: List[Member]
trait Parameterized {
val tparams: List[TypParam]
val typeParams: List[CodeType[_]] =
tparams.map(tp => CodeType(staticTypeApp(tp, Nil)))
}
trait ClassWithObject[C] extends Clasz[C] {
val companion: Some[outerScope.Object[_]]
}
trait ClassWithoutObject[C] extends Clasz[C] {
val companion: None.type = None
}
trait ObjectWithClass[C] extends Object[C] {
val companion: Some[outerScope.Clasz[_]]
}
trait ObjectWithoutClass[C] extends Object[C] {
val companion: None.type = None
}
abstract class Object[C: CodeType](val name: String) extends ClassOrObject[C] {
val companion: Option[outerScope.Clasz[_]]
}
/* Note: in Scala 2.11, naming this Class results in strange failures, as in:
* java.lang.NoClassDefFoundError: squid/lang/Definitions$Scope$Class (wrong name: squid/lang/Definitions$Scope$class) */
abstract class Clasz[C: CodeType](val name: String, val tparams: List[TypParam]) extends ClassOrObject[C] with Parameterized {
val companion: Option[outerScope.Object[_]]
val self: Variable[C]
}
abstract class ClassOrObject[C](implicit val C: CodeType[C]) extends Member with Scope {
type Scp <: outerScope.Scp
// TODO should have special ctor method(s)...
val name: String
val parents: List[CodeType[_]]
val fields: List[Field[_]]
val methods: List[Method[_,_]]
lazy val members: List[Member] = fields ::: methods
abstract class FieldOrMethod[A](implicit val A: CodeType[A]) extends Member {
val symbol: MtdSymbol
}
type AnyField = Field[_]
class Field[A0: CodeType](
val name: String,
val get: FieldGetter,
val set: Option[FieldSetter],
val init: Code[A0,Scp]
) extends FieldOrMethod[A0] {
type A = A0
val symbol: MtdSymbol = get
//println(s"FIELD $this")
override def toString = s"va(l/r) ${symbol}: ${A.rep} = ${showRep(init.rep)}"
}
type AnyMethod[S <: Scp] = Method[_, S]
class Method[A0: CodeType, S <: Scp](
val symbol: MtdSymbol,
val tparams: List[TypParam],
val vparams: List[List[Variable[_]]],
val body: Code[A0,S]
) extends FieldOrMethod[A0] with Parameterized {
type Scp = S
type A = A0
//println(s"METHOD $this")
override def toString = s"def ${symbol}[${tparams.mkString(",")}]${vparams.map(vps => vps.map(vp =>
//s"${vp.`internal bound`}: ${vp.Typ.rep}"
s"$vp"
).mkString("(",",",")")).mkString}: ${A.rep} = ${showRep(body.rep)}"
}
// A helper for creating Field objects; used by the `@lift` macro
def mkField(name: String, get: MtdSymbol, set: Option[MtdSymbol], init: Rep)(typ: TypeRep): Field[_] =
new Field[Any](name, get.asInstanceOf[FieldGetter],
set.map(_.asInstanceOf[FieldSetter]),Code(init))(CodeType[Any](typ))
// TODO an API for modifying these constructs in a safe way...
/*
sealed abstract class MethodTransformation[-A]
case object Remove extends MethodTransformation[Any]
case class Rewrite[A](newBody: Code[A,Scp]) extends MethodTransformation[A]
//def transform(trans: Map[Method[_],MethodTransformation])
def transform(trans: List[MethodTransformation[_]]) = ???
*/
}
}
}
| epfldata/squid | core/src/main/scala/squid/lang/Definitions.scala | Scala | apache-2.0 | 4,854 |
package org.tribbloid.spookystuff.expression
import org.tribbloid.spookystuff.SpookyEnvSuite
import org.tribbloid.spookystuff.actions.Wget
import org.tribbloid.spookystuff.entity.PageRow
import org.tribbloid.spookystuff.expressions.NamedFunction1
/**
* Created by peng on 12/3/14.
*/
class TestExprView extends SpookyEnvSuite {
import org.tribbloid.spookystuff.dsl._
lazy val page = (
Wget("http://www.wikipedia.org/") :: Nil
).resolve(spooky)
lazy val row = PageRow(pageLikes = page)
.select($"title".head.text.~('abc))
.head
test("symbol as Expr"){
assert('abc.apply(row) === Some("Wikipedia"))
}
test("andThen"){
val fun = 'abc.andThen(_.map(_.toString))
assert(fun.name === "abc.<function1>")
assert(fun(row) === Some("Wikipedia"))
val fun2 = 'abc.andThen(NamedFunction1(_.map(_.toString),"after"))
assert(fun2.name === "abc.after")
assert(fun(row) === Some("Wikipedia"))
}
test("andMap"){
val fun = 'abc.andMap(_.toString)
assert(fun.name === "abc.<function1>")
assert(fun(row) === Some("Wikipedia"))
val fun2 = 'abc.andMap(_.toString, "after")
assert(fun2.name === "abc.after")
assert(fun(row) === Some("Wikipedia"))
}
test("andFlatMap"){
val fun = 'abc.andFlatMap(_.toString.headOption)
assert(fun.name === "abc.<function1>")
assert(fun(row) === Some('W'))
val fun2 = 'abc.andFlatMap(_.toString.headOption, "after")
assert(fun2.name === "abc.after")
assert(fun(row) === Some('W'))
}
} | chenUT/spookystuff | core/src/test/scala/org/tribbloid/spookystuff/expression/TestExprView.scala | Scala | apache-2.0 | 1,516 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalactic
import org.scalatest._
import scala.collection.GenSeq
import scala.collection.GenMap
import scala.collection.GenSet
import scala.collection.GenIterable
import scala.collection.GenTraversable
import scala.collection.GenTraversableOnce
import scala.collection.{mutable,immutable}
class ConversionCheckedMapEqualityConstraintsSpec extends Spec with NonImplicitAssertions with ConversionCheckedTripleEquals with MapEqualityConstraints {
case class Super(size: Int)
class Sub(sz: Int) extends Super(sz)
val super1: Super = new Super(1)
val sub1: Sub = new Sub(1)
val super2: Super = new Super(2)
val sub2: Sub = new Sub(2)
val nullSuper: Super = null
case class Fruit(name: String)
class Apple extends Fruit("apple")
class Orange extends Fruit("orange")
object `the MapEqualityConstraints trait` {
def `should allow any Map to be compared with any other Map, so long as the element types of the two Maps adhere to the equality constraint in force for those types` {
assert(mutable.HashMap('a' -> 1, 'b' -> 2, 'c' -> 3) === immutable.HashMap('a' -> 1, 'b' -> 2, 'c' -> 3))
assert(mutable.HashMap('a' -> 1, 'b' -> 2, 'c' -> 3) === immutable.HashMap('a' -> 1L, 'b' -> 2L, 'c' -> 3L)) // does not compile last time I checked
assert(mutable.HashMap('a' -> 1L, 'b' -> 2L, 'c' -> 3L) === immutable.HashMap('a' -> 1, 'b' -> 2, 'c' -> 3)) // does not compile last time I checked
assert(immutable.HashMap('a' -> 1, 'b' -> 2, 'c' -> 3) === mutable.HashMap('a' -> 1L, 'b' -> 2L, 'c' -> 3L)) // does not compile last time I checked
assert(immutable.HashMap('a' -> 1L, 'b' -> 2L, 'c' -> 3L) === mutable.HashMap('a' -> 1, 'b' -> 2, 'c' -> 3)) // does not compile last time I checked
assert(mutable.HashMap('a' -> new Apple, 'b' -> new Apple) === immutable.HashMap('a' -> new Fruit("apple"), 'b' -> new Fruit("apple")))
assert(immutable.HashMap('a' -> new Fruit("apple"), 'b' -> new Fruit("apple")) === mutable.HashMap('a' -> new Apple, 'b' -> new Apple))
assertTypeError("mutable.HashMap('a' -> new Apple, 'b' -> new Apple) === immutable.HashMap('a' -> new Orange, 'b' -> new Orange)")
assertTypeError("immutable.HashMap('a' -> new Apple, 'b' -> new Apple) === mutable.HashMap('a' -> new Orange, 'b' -> new Orange)")
assertTypeError("immutable.HashMap('a' -> new Orange, 'b' -> new Orange) === mutable.HashMap('a' -> new Apple, 'b' -> new Apple)")
assertTypeError("mutable.HashMap('a' -> new Orange, 'b' -> new Orange) === immutable.HashMap('a' -> new Apple, 'b' -> new Apple)")
}
}
}
| travisbrown/scalatest | src/test/scala/org/scalactic/ConversionCheckedMapEqualityConstraintsSpec.scala | Scala | apache-2.0 | 3,194 |
package org.jetbrains.jps.incremental.scala
package remote
import java.net.{ConnectException, InetAddress, UnknownHostException}
import org.jetbrains.jps.incremental.ModuleLevelBuilder.ExitCode
import org.jetbrains.jps.incremental.scala.data.{CompilationData, CompilerData, SbtData}
/**
* @author Pavel Fatin
*/
class RemoteServer(val address: InetAddress, val port: Int) extends Server with RemoteResourceOwner {
def compile(sbtData: SbtData, compilerData: CompilerData, compilationData: CompilationData, client: Client): ExitCode = {
val arguments = Arguments(sbtData, compilerData, compilationData, Seq.empty).asStrings
try {
send(serverAlias, arguments, client)
ExitCode.OK
} catch {
case e: ConnectException =>
val firstLine = s"Cannot connect to compile server at ${address.toString}:$port"
val secondLine = "Trying to compile without it"
val message = s"$firstLine\\n$secondLine"
client.warning(message)
client.debug(s"$firstLine\\n${e.toString}\\n${e.getStackTrace.mkString("\\n")}")
ScalaBuilder.localServer.compile(sbtData, compilerData, compilationData, client)
case e: UnknownHostException =>
val message = "Unknown IP address of compile server host: " + address.toString
client.error(message)
client.debug(s"$message\\n${e.toString}\\n${e.getStackTrace.mkString("\\n")}")
ExitCode.ABORT
}
}
} | triplequote/intellij-scala | scala/compiler-jps/src/org/jetbrains/jps/incremental/scala/remote/RemoteServer.scala | Scala | apache-2.0 | 1,428 |
package org.jetbrains.plugins.scala
package debugger.evaluation
import com.intellij.debugger.codeinsight.RuntimeTypeEvaluator
import com.intellij.debugger.engine.ContextUtil
import com.intellij.debugger.engine.evaluation.expression.ExpressionEvaluator
import com.intellij.debugger.engine.evaluation.{CodeFragmentKind, EvaluationContextImpl, TextWithImportsImpl}
import com.intellij.debugger.impl.DebuggerContextImpl
import com.intellij.debugger.{DebuggerBundle, DebuggerInvocationUtil, EvaluatingComputable}
import com.intellij.openapi.application.{AccessToken, ReadAction}
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.progress.ProgressIndicator
import com.intellij.openapi.project.Project
import com.intellij.openapi.util.Key
import com.intellij.psi._
import com.intellij.psi.impl.source.PsiImmediateClassType
import com.intellij.psi.search.GlobalSearchScope
import com.sun.jdi.{ClassType, Type, Value}
import org.jetbrains.annotations.Nullable
import org.jetbrains.plugins.scala.debugger.evaluation.ScalaRuntimeTypeEvaluator._
import org.jetbrains.plugins.scala.debugger.evaluation.util.DebuggerUtil
import org.jetbrains.plugins.scala.extensions.inReadAction
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScExpression
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScModifierListOwner
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScObject
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiManager
import org.jetbrains.plugins.scala.lang.psi.types.ScType
import org.jetbrains.plugins.scala.lang.psi.types.ScType.ExtractClass
/**
* Nikolay.Tropin
* 8/8/13
*/
abstract class ScalaRuntimeTypeEvaluator(@Nullable editor: Editor, expression: PsiElement, context: DebuggerContextImpl, indicator: ProgressIndicator)
extends RuntimeTypeEvaluator(editor, expression, context, indicator) {
override def evaluate(evaluationContext: EvaluationContextImpl): PsiType = {
val project: Project = evaluationContext.getProject
val evaluator: ExpressionEvaluator = DebuggerInvocationUtil.commitAndRunReadAction(project, new EvaluatingComputable[ExpressionEvaluator] {
def compute: ExpressionEvaluator = {
val textWithImports = new TextWithImportsImpl(CodeFragmentKind.CODE_BLOCK, expression.getText)
val codeFragment = new ScalaCodeFragmentFactory().createCodeFragment(textWithImports, expression, project)
ScalaEvaluatorBuilder.build(codeFragment, ContextUtil.getSourcePosition(evaluationContext))
}
})
val value: Value = evaluator.evaluate(evaluationContext)
if (value != null) {
inReadAction {
Option(getCastableRuntimeType(project, value)).map(new PsiImmediateClassType(_, PsiSubstitutor.EMPTY)).orNull
}
} else throw EvaluationException(DebuggerBundle.message("evaluation.error.surrounded.expression.null"))
}
}
object ScalaRuntimeTypeEvaluator {
val KEY: Key[ScExpression => ScType] = Key.create("SCALA_RUNTIME_TYPE_EVALUATOR")
def getCastableRuntimeType(project: Project, value: Value): PsiClass = {
val unwrapped = DebuggerUtil.unwrapScalaRuntimeObjectRef(value)
val jdiType: Type = unwrapped.asInstanceOf[Value].`type`
var psiClass: PsiClass = findPsiClass(project, jdiType)
if (psiClass != null) {
return psiClass
}
jdiType match {
case classType: ClassType =>
val superclass: ClassType = classType.superclass
val stdTypeNames = Seq("java.lang.Object", "scala.Any", "scala.AnyRef", "scala.AnyVal")
if (superclass != null && !stdTypeNames.contains(superclass.name)) {
psiClass = findPsiClass(project, superclass)
if (psiClass != null) {
return psiClass
}
}
import scala.collection.JavaConversions._
classType.interfaces.map(findPsiClass(project, _)).find(_ != null).orNull
case _ => null
}
}
private def findPsiClass(project: Project, jdiType: Type): PsiClass = {
val token: AccessToken = ReadAction.start
try {
new ScalaPsiManager(project).getCachedClass(GlobalSearchScope.allScope(project), jdiType.name())
}
finally {
token.finish()
}
}
def isSubtypeable(scType: ScType): Boolean = {
scType match {
case ExtractClass(psiClass) =>
psiClass match {
case _: ScObject => false
case owner: ScModifierListOwner => !owner.hasFinalModifier
case _ if scType.isInstanceOf[PsiPrimitiveType] => false
case _ => !psiClass.hasModifierProperty(PsiModifier.FINAL)
}
case _ => false
}
}
}
| SergeevPavel/intellij-scala | src/org/jetbrains/plugins/scala/debugger/evaluation/ScalaRuntimeTypeEvaluator.scala | Scala | apache-2.0 | 4,597 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dbis.pig.op
/**
* Display represents the DISPLAY operator used to produce data for Zeppelin.
*
* @param in the input pipe
*/
case class Display(private val in: Pipe) extends PigOperator(List(), List(in)) {
/**
* Returns the lineage string describing the sub-plan producing the input for this operator.
*
* @return a string representation of the sub-plan.
*/
override def lineageString: String = {
s"""DISPLAY%""" + super.lineageString
}
}
| ksattler/piglet | src/main/scala/dbis/pig/op/Display.scala | Scala | apache-2.0 | 1,276 |
package build
import java.nio.charset.StandardCharsets
import java.nio.file._
import com.google.common.jimfs.Jimfs
import org.scalajs.jsenv._
import org.scalajs.jsenv.nodejs._
import org.scalajs.linker.interface.ESVersion
final class NodeJSEnvForcePolyfills(esVersion: ESVersion, config: NodeJSEnv.Config) extends JSEnv {
def this(esVersion: ESVersion) = this(esVersion, NodeJSEnv.Config())
val name: String = s"Node.js forcing polyfills for $esVersion"
// Deactivate source maps if esVersion < ES2015 because source-map-support requires `Map`
private val nodeJSEnv = {
val config1 =
if (esVersion >= ESVersion.ES2015) config
else config.withSourceMap(false)
new NodeJSEnv(config1)
}
def start(input: Seq[Input], runConfig: RunConfig): JSRun =
nodeJSEnv.start(forcePolyfills +: input, runConfig)
def startWithCom(input: Seq[Input], runConfig: RunConfig,
onMessage: String => Unit): JSComRun = {
nodeJSEnv.startWithCom(forcePolyfills +: input, runConfig, onMessage)
}
/** File to force all our ES 2015 polyfills to be used, by deleting the
* native functions.
*/
private def forcePolyfills(): Input = {
import ESVersion._
def cond(version: ESVersion, stat: String): String =
if (esVersion < version) stat
else ""
var script = ""
if (esVersion < ES2015) {
script += """
|delete Object.is;
|
|delete Reflect.ownKeys;
|
|delete Math.fround;
|delete Math.imul;
|delete Math.clz32;
|delete Math.log10;
|delete Math.log1p;
|delete Math.cbrt;
|delete Math.hypot;
|delete Math.expm1;
|delete Math.sinh;
|delete Math.cosh;
|delete Math.tanh;
|
|delete global.Map;
|delete global.Promise;
|delete global.Set;
|delete global.Symbol;
|
|delete global.Int8Array;
|delete global.Int16Array;
|delete global.Int32Array;
|delete global.Uint8Array;
|delete global.Uint16Array;
|delete global.Uint32Array;
|delete global.Float32Array;
|delete global.Float64Array;
|
|delete String.prototype.repeat;
""".stripMargin
}
if (esVersion < ES2017) {
script += """
|delete Object.getOwnPropertyDescriptors;
""".stripMargin
}
if (true) { // esVersion < ES2022 ('d' flag)
script += s"""
|global.RegExp = (function(OrigRegExp) {
| return function RegExp(pattern, flags) {
| if (typeof flags === 'string') {
|${cond(ES2015, """
| if (flags.indexOf('u') >= 0)
| throw new SyntaxError("unsupported flag 'u'");
| if (flags.indexOf('y') >= 0)
| throw new SyntaxError("unsupported flag 'y'");
|""".stripMargin)}
|${cond(ES2018, """
| if (flags.indexOf('s') >= 0)
| throw new SyntaxError("unsupported flag 's'");
|""".stripMargin)}
| if (flags.indexOf('d') >= 0)
| throw new SyntaxError("unsupported flag 'd'");
| }
|
|${cond(ES2018, """
| if (typeof pattern === 'string') {
| if (pattern.indexOf('(?<=') >= 0 || pattern.indexOf('(?<!') >= 0)
| throw new SyntaxError("unsupported look-behinds");
| if (pattern.indexOf('(?<') >= 0)
| throw new SyntaxError("unsupported named capture groups");
| if (pattern.indexOf('\\\\\\\\p{') >= 0 || pattern.indexOf('\\\\\\\\P{') >= 0)
| throw new SyntaxError("unsupported Unicode character classes");
| }
|""".stripMargin)}
|
| return new OrigRegExp(pattern, flags);
| }
|})(global.RegExp);
""".stripMargin
}
val p = Files.write(
Jimfs.newFileSystem().getPath("scalaJSEnvInfo.js"),
script.getBytes(StandardCharsets.UTF_8))
Input.Script(p)
}
}
| scala-js/scala-js | project/NodeJSEnvForcePolyfills.scala | Scala | apache-2.0 | 4,026 |
/**
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.dispatch
import akka.actor.ActorCell
import scala.concurrent.duration.Duration
import scala.concurrent.duration.FiniteDuration
/**
* Dedicates a unique thread for each actor passed in as reference. Served through its messageQueue.
*
* The preferred way of creating dispatchers is to define configuration of it and use the
* the `lookup` method in [[akka.dispatch.Dispatchers]].
*/
class PinnedDispatcher(
_configurator: MessageDispatcherConfigurator,
_actor: ActorCell,
_id: String,
_shutdownTimeout: FiniteDuration,
_threadPoolConfig: ThreadPoolConfig)
extends Dispatcher(_configurator,
_id,
Int.MaxValue,
Duration.Zero,
_threadPoolConfig.copy(corePoolSize = 1, maxPoolSize = 1),
_shutdownTimeout) {
@volatile
private var owner: ActorCell = _actor
//Relies on an external lock provided by MessageDispatcher.attach
protected[akka] override def register(actorCell: ActorCell) = {
val actor = owner
if ((actor ne null) && actorCell != actor) throw new IllegalArgumentException("Cannot register to anyone but " + actor)
owner = actorCell
super.register(actorCell)
}
//Relies on an external lock provided by MessageDispatcher.detach
protected[akka] override def unregister(actor: ActorCell) = {
super.unregister(actor)
owner = null
}
}
| Fincore/org.spark-project.akka | actor/src/main/scala/akka/dispatch/PinnedDispatcher.scala | Scala | mit | 1,403 |
/*
Copyright 2013 NICTA
This file is part of t3as (Text Analysis As A Service).
t3as is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
t3as is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with t3as. If not, see <http://www.gnu.org/licenses/>.
*/
package org.t3as.patClas.common
import scala.collection.JavaConversions._
import org.scalatest.{Matchers, FlatSpec}
import org.t3as.patClas.common.CPCUtil.ClassificationItem
class TestUtil extends FlatSpec with Matchers {
"properties" should "load from classpath" in {
val p = Util.properties("/util-test.properties")
p("name") should be ("value")
}
"toText" should "concatenate all text node descendents in document order, trimming whitespace" in {
val xml = """<a> text1 <elem>text 2</elem> <b>text 3<elem>text 4</elem> </b> <elem>text 5</elem>text 6</a>"""
val s = Util.toText(xml)
s should be ("text1\\ntext 2\\ntext 3\\ntext 4\\ntext 5\\ntext 6")
}
"ltrim" should "left trim" in {
for ((in, out) <- Seq(("", ""), ("0", ""), ("00", ""), ("01", "1"), ("001", "1"), ("010", "10"), ("00100", "100"))) {
Util.ltrim(in, '0') should be (out)
}
}
"rtrim" should "right trim" in {
for ((in, out) <- Seq(("", ""), ("0", ""), ("00", ""), ("10", "1"), ("100", "1"), ("010", "01"), ("00100", "001"))) {
Util.rtrim(in, '0') should be (out)
}
}
"Classification" should "update parentId" in {
ClassificationItem(None, -1, true, true, true, "2013-01-01", 2, "symbol", "classTitle", "notesAndWarnings").copy(parentId = 3).parentId should be (3)
}
"toCpcFormat" should "convert to CPC style format" in {
for ((in, out) <- Seq(("A01B0012987000", "A01B12/987"), ("A01B0012986000", "A01B12/986"), ("A01B0012000000", "A01B12"))) {
IPCUtil.toCpcFormat(in) should be (out)
}
}
"IPCUtil.ipcToText" should "concatenate all text node descendents and sref/@ref in document order, trimming whitespace" in {
val xml = """<a> text1 <elem>text 2</elem> <b>text 3 ref to <sref ref="A01B0012986000"/><elem>text 4</elem> </b> <elem>text 5</elem>text 6</a>"""
val s = IPCUtil.ipcToText(xml)
s should be ("text1\\ntext 2\\ntext 3 ref to\\nA01B12/986\\ntext 4\\ntext 5\\ntext 6")
}
}
| NICTA/t3as-pat-clas | pat-clas-common/src/test/scala/org/t3as/patClas/common/TestUtil.scala | Scala | gpl-3.0 | 2,714 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.k8s.submit.submitsteps.hadoopsteps
import io.fabric8.kubernetes.api.model.{Container, Pod, Secret}
/**
* Represents a given configuration of the hadoop configuration logic, informing the
* HadoopConfigBootstrapStep of how the driver should be configured. This includes:
* <p>
* - What Spark properties should be set on the driver's SparkConf for the executors
* - The spec of the main container so that it can be modified to share volumes
* - The spec of the driver pod EXCEPT for the addition of the given hadoop configs (e.g. volumes
* the hadoop logic needs)
* - The properties that will be stored into the config map which have (key, value)
* pairs of (path, data)
* - The secret containing a DT, either previously specified or built on the fly
* - The name of the secret where the DT will be stored
* - The data item-key on the secret which correlates with where the current DT data is stored
*/
private[spark] case class HadoopConfigSpec(
additionalDriverSparkConf: Map[String, String],
driverPod: Pod,
driverContainer: Container,
configMapProperties: Map[String, String],
dtSecret: Option[Secret],
dtSecretName: String,
dtSecretItemKey: String)
| apache-spark-on-k8s/spark | resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/submitsteps/hadoopsteps/HadoopConfigSpec.scala | Scala | apache-2.0 | 2,032 |
package lampetia.cg.samples
import lampetia.cg.CodeGenerator
import lampetia.metamodel.Dsl._
import lampetia.metamodel.Module
/**
* @author Hossam Karim
*/
object BankModule extends App {
val base = "com.bank"
val module = Module("Bank", base, "bank")
val CustomerTypeModel =
enum("CustomerType")("Individual".ecase, "Organization".ecase).withDiscriminator("customerType".string) <+ commonFeatures
val CountryModel =
entity("Country")("name".name) <+ commonFeatures
val CityModel =
entity("City")("name".name, "country" ref CountryModel.id).withResourceType(s"$base.city:1.0") <+ commonFeatures
val AddressModel = value("Address")("value".string) <+ commonFeatures
val BranchModel = composite("Branch")("branchName".name, "branchAddress" of AddressModel) <+ commonFeatures
val BuildingModel =
composite("Building")("buildingName".name, "buildingManager" of SubjectId) << (jsonbComposite in Sql)
val EmployeeModel =
entity("Employee")("name".name) <+ commonFeatures
val BankModel =
entity("Bank")(
"address" of AddressModel,
"city" ref CityModel.id,
"manager" ref EmployeeModel.id,
"customerType" of CustomerTypeModel,
("branch" of BranchModel) << (flatten() in Sql),
("building" of BuildingModel) << (jsonbComposite in Sql),
"comments".jsond).withResourceType(s"$base.bank:1.0") <+ commonFeatures
val models = Seq(
CountryModel.id.tpe,
CountryModel,
CityModel.id.tpe,
CityModel,
EmployeeModel.id.tpe,
EmployeeModel,
CustomerTypeModel,
BankModel.id.tpe,
AddressModel,
BranchModel,
BuildingModel,
BankModel)
CodeGenerator.serviceGenerator(module, models).generate()
}
| rosama86/lampetia | lampetia-code-gen/src/main/scala/lampetia/cg/samples/BankModule.scala | Scala | mit | 1,724 |
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.query
import com.twitter.finagle.httpx.Request
import com.twitter.finatra.annotations.Flag
import com.twitter.util.Time
import com.twitter.zipkin.storage.QueryRequest
import javax.inject.Inject
import scala.collection.mutable
import scala.util.Try
// TODO: rewrite me into a normal finatra case class
class QueryExtractor @Inject()(@Flag("zipkin.queryService.limit") defaultQueryLimit: Int) {
/**
* Takes a `Request` and produces the correct `QueryRequest` depending
* on the GET parameters present
*/
def apply(req: Request): Try[QueryRequest] = Try {
val serviceName = req.params.get("serviceName").getOrElse("")
val spanName = req.params.get("spanName").flatMap(n => if (n == "all" || n == "") None else Some(n))
val endTs = req.params.getLong("endTs").getOrElse(Time.now.inMicroseconds)
val limit = req.params.get("limit").map(_.toInt).getOrElse(defaultQueryLimit)
val (annotations, binaryAnnotations) = req.params.get("annotationQuery") map { query =>
val anns = mutable.Set[String]()
val binAnns = mutable.Set[(String, String)]()
query.split(" and ") foreach { ann =>
ann.split("=").toList match {
case "" :: Nil =>
case key :: value :: Nil => binAnns.add((key, value))
case key :: Nil => anns.add(key)
case _ =>
}
}
(anns.toSet, binAnns.toSet)
} getOrElse {
(Set.empty[String], Set.empty[(String, String)])
}
QueryRequest(serviceName, spanName, annotations, binaryAnnotations, endTs, limit)
}
}
| jfeltesse-mdsol/zipkin | zipkin-query/src/main/scala/com/twitter/zipkin/query/QueryExtractor.scala | Scala | apache-2.0 | 2,169 |
/**
* Intel Intrinsics for Lightweight Modular Staging Framework
* https://github.com/ivtoskov/lms-intrinsics
* Department of Computer Science, ETH Zurich, Switzerland
* __ _ __ _ _
* / /____ ___ _____ (_)____ / /_ _____ (_)____ _____ (_)_____ _____
* / // __ `__ \\ / ___/______ / // __ \\ / __// ___// // __ \\ / ___// // ___// ___/
* / // / / / / /(__ )/_____// // / / // /_ / / / // / / /(__ )/ // /__ (__ )
* /_//_/ /_/ /_//____/ /_//_/ /_/ \\__//_/ /_//_/ /_//____//_/ \\___//____/
*
* Copyright (C) 2017 Ivaylo Toskov (itoskov@ethz.ch)
* Alen Stojanov (astojanov@inf.ethz.ch)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.ethz.acl.intrinsics
object MicroArchType extends Enumeration {
type MicroArchType = Value
val Haswell = Value
val IvyBridge = Value
val Nehalem = Value
val SandyBridge = Value
val Westmere = Value
}
| ivtoskov/lms-intrinsics | src/main/scala/ch/ethz/acl/intrinsics/MicroArchType.scala | Scala | apache-2.0 | 1,523 |
package models.manager.commands
import models.manager.commands.traits.SimpleCommand
/**
* Created by rbrowning on 5/12/17.
*
* Sent from the endpoints to the manager, will cause the player to start playing the next item in
* the playlist
*/
case class PlayNextItem() extends SimpleCommand
| rebrowning/entertainment-cluster-member | src/main/scala/models/manager/commands/PlayNextItem.scala | Scala | apache-2.0 | 301 |
package japgolly.scalajs.react.extra
import japgolly.scalajs.react._
import japgolly.scalajs.react.test.ReactTestUtils
import japgolly.scalajs.react.vdom.prefix_<^._
import org.scalajs.dom.Event
import utest._
import TestUtil._
object EventListenerTest extends TestSuite {
val C = ReactComponentB[Unit]("")
.initialState(0)
.backend(_ => new OnUnmount.Backend)
.render((_, state, _) => <.div(s"Hit $state times"))
.configure(EventListener.install("hello", $ => () => $.modState(_ + 1)))
.buildU
override def tests = TestSuite {
val c = ReactTestUtils.renderIntoDocument(C())
def dispatch(name: String) = {
val e = new Event
e.initEvent(name, true, true)
c.getDOMNode() dispatchEvent e
}
c.state mustEqual 0
dispatch("xx")
c.state mustEqual 0
dispatch("hello")
c.state mustEqual 1
}
}
| beni55/scalajs-react | test/src/test/scala/japgolly/scalajs/react/extra/EventListenerTest.scala | Scala | apache-2.0 | 866 |
package controllers
import db.{GithubUsersDao, TokensDao, UsersDao}
import io.flow.common.v0.models.json._
import io.flow.dependency.api.lib.Github
import io.flow.dependency.v0.models.GithubAuthenticationForm
import io.flow.dependency.v0.models.json._
import io.flow.error.v0.models.json._
import io.flow.play.util.Validation
import play.api.libs.json._
import play.api.mvc._
import scala.concurrent.Future
class GithubUsers @javax.inject.Inject() (
github: Github,
usersDao: UsersDao,
githubUsersDao: GithubUsersDao,
tokensDao: TokensDao,
val controllerComponents: ControllerComponents
) extends BaseController {
import scala.concurrent.ExecutionContext.Implicits.global
def postGithub() = Action.async(parse.json) { request =>
request.body.validate[GithubAuthenticationForm] match {
case e: JsError => Future {
UnprocessableEntity(Json.toJson(Validation.invalidJson(e)))
}
case s: JsSuccess[GithubAuthenticationForm] => {
val form = s.get
github.getUserFromCode(usersDao, githubUsersDao, tokensDao, form.code).map {
case Left(errors) => UnprocessableEntity(Json.toJson(Validation.errors(errors)))
case Right(user) => Ok(Json.toJson(user))
}
}
}
}
}
| flowcommerce/dependency | api/app/controllers/GithubUsers.scala | Scala | mit | 1,258 |
package com.automatak.render.dnp3.objects.generators
object GroupVariationIncludes {
def headerReadWrite : Iterator[String] = Iterator(
"<openpal/container/RSlice.h>",
"<openpal/container/WSlice.h>",
""""opendnp3/app/DNPTime.h""""
)
def implReadWrite : Iterator[String] = Iterator(
"<openpal/serialization/Format.h>",
"<openpal/serialization/Parse.h>"
)
def headerVariableLength: Iterator[String] = Iterator(
""""opendnp3/app/IVariableLength.h""""
)
def implVariableLength: Iterator[String] = Iterator(
"<openpal/serialization/Serialization.h>",
""""opendnp3/app/parsing/PrefixFields.h""""
)
}
| thiagoralves/OpenPLC_v2 | dnp3/generation/dnp3/src/main/scala/com/automatak/render/dnp3/objects/generators/GroupVariationIncludes.scala | Scala | gpl-3.0 | 654 |
/**
* Copyright (c) 2015, Cloudera, Inc. All Rights Reserved.
*
* Cloudera, Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"). You may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for
* the specific language governing permissions and limitations under the
* License.
*/
package com.cloudera.sparkts
import java.io.File
import java.nio.file.Files
import breeze.linalg._
import com.cloudera.sparkts.DateTimeIndex._
import com.github.nscala_time.time.Imports._
import org.apache.spark.{SparkConf, SparkContext}
import org.scalatest.{FunSuite, ShouldMatchers}
class TimeSeriesRDDSuite extends FunSuite with LocalSparkContext with ShouldMatchers {
test("slice") {
val conf = new SparkConf().setMaster("local").setAppName(getClass.getName)
TimeSeriesKryoRegistrator.registerKryoClasses(conf)
sc = new SparkContext(conf)
val vecs = Array(0 until 10, 10 until 20, 20 until 30)
.map(_.map(x => x.toDouble).toArray)
.map(new DenseVector(_))
.map(x => (x(0).toString, x))
val start = new DateTime("2015-4-9")
val index = uniform(start, 10, 1.days)
val rdd = new TimeSeriesRDD(index, sc.parallelize(vecs))
val slice = rdd.slice(start + 1.days, start + 6.days)
slice.index should be (uniform(start + 1.days, 6, 1.days))
val contents = slice.collectAsMap()
contents.size should be (3)
contents("0.0") should be (new DenseVector((1 until 7).map(_.toDouble).toArray))
contents("10.0") should be (new DenseVector((11 until 17).map(_.toDouble).toArray))
contents("20.0") should be (new DenseVector((21 until 27).map(_.toDouble).toArray))
}
test("filterEndingAfter") {
val conf = new SparkConf().setMaster("local").setAppName(getClass.getName)
TimeSeriesKryoRegistrator.registerKryoClasses(conf)
sc = new SparkContext(conf)
val vecs = Array(0 until 10, 10 until 20, 20 until 30)
.map(_.map(x => x.toDouble).toArray)
.map(new DenseVector(_))
.map(x => (x(0).toString, x))
val start = new DateTime("2015-4-9")
val index = uniform(start, 10, 1.days)
val rdd = new TimeSeriesRDD(index, sc.parallelize(vecs))
rdd.filterEndingAfter(start).count() should be (3)
}
test("toInstants") {
val conf = new SparkConf().setMaster("local").setAppName(getClass.getName)
TimeSeriesKryoRegistrator.registerKryoClasses(conf)
sc = new SparkContext(conf)
val seriesVecs = (0 until 20 by 4).map(
x => new DenseVector((x until x + 4).map(_.toDouble).toArray))
val labels = Array("a", "b", "c", "d", "e")
val start = new DateTime("2015-4-9")
val index = uniform(start, 4, 1.days)
val rdd = sc.parallelize(labels.zip(seriesVecs.map(_.asInstanceOf[Vector[Double]])), 3)
val tsRdd = new TimeSeriesRDD(index, rdd)
val samples = tsRdd.toInstants().collect()
samples should be (Array(
(start, new DenseVector((0.0 until 20.0 by 4.0).toArray)),
(start + 1.days, new DenseVector((1.0 until 20.0 by 4.0).toArray)),
(start + 2.days, new DenseVector((2.0 until 20.0 by 4.0).toArray)),
(start + 3.days, new DenseVector((3.0 until 20.0 by 4.0).toArray)))
)
}
test("save / load") {
val conf = new SparkConf().setMaster("local").setAppName(getClass.getName)
TimeSeriesKryoRegistrator.registerKryoClasses(conf)
sc = new SparkContext(conf)
val vecs = Array(0 until 10, 10 until 20, 20 until 30)
.map(_.map(x => x.toDouble).toArray)
.map(new DenseVector(_))
.map(x => (x(0).toString, x))
val start = new DateTime("2015-4-9")
val index = uniform(start, 10, 1.days)
val rdd = new TimeSeriesRDD(index, sc.parallelize(vecs))
val tempDir = Files.createTempDirectory("saveload")
val path = tempDir.toFile.getAbsolutePath
new File(path).delete()
try {
rdd.saveAsCsv(path)
val loaded = TimeSeriesRDD.timeSeriesRDDFromCsv(path, sc)
loaded.index should be (rdd.index)
} finally {
new File(path).listFiles().foreach(_.delete())
new File(path).delete()
}
}
}
| narahari92/spark-timeseries | src/test/scala/com/cloudera/sparkts/TimeSeriesRDDSuite.scala | Scala | apache-2.0 | 4,315 |
/*******************************************************************************
* (C) Copyright 2015 Haifeng Li
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package smile
/** Originally used for data compression, Vector quantization (VQ)
* allows the modeling of probability density functions by
* the distribution of prototype vectors. It works by dividing a large set of points
* (vectors) into groups having approximately the same number of
* points closest to them. Each group is represented by its centroid
* point, as in K-Means and some other clustering algorithms.
*
* Vector quantization is is based on the competitive learning paradigm,
* and also closely related to sparse coding models
* used in deep learning algorithms such as autoencoder.
*
* Algorithms in this package also support the <code>partition</code>
* method for clustering purpose.
*
* @author Haifeng Li
*/
package object vq extends Operators {
}
| arehart13/smile | scala/src/main/scala/smile/vq/package.scala | Scala | apache-2.0 | 1,554 |
package tuner.gui
import scala.swing.BoxPanel
import scala.swing.Orientation
import scala.swing.ScrollPane
import scala.collection.immutable.SortedMap
//import tuner.gui.event.ReadyToDraw
import tuner.project.Viewable
class ResponseStatsPanel(project:Viewable)
extends ScrollPane {
//extends BoxPanel(Orientation.Vertical) {
val histogramPanels:SortedMap[String,ResponseHistogramPanel] = {
val gpm = project.gpModels
SortedMap[String,ResponseHistogramPanel]() ++
project.responseFields.map {fld =>
val model = gpm(fld)
val panel = new ResponseHistogramPanel(project, fld)
listenTo(panel)
(fld -> panel)
}
}
contents = new BoxPanel(Orientation.Vertical) {
contents += new CollapsiblePanel(CollapsiblePanel.Scroll) {
histogramPanels.foreach {case (fld,panel) =>
//panels += new CollapsiblePanel.CPanel(fld, panel)
val p = new org.japura.gui.CollapsiblePanel(fld)
p.addCollapsiblePanelListener(new org.japura.gui.event.CollapsiblePanelAdapter {
override def panelCollapsed(event:org.japura.gui.event.CollapsiblePanelEvent) = {
panel.visible = false
}
override def panelExpanded(event:org.japura.gui.event.CollapsiblePanelEvent) = {
panel.visible = true
}
})
p.collapseImmediately
p.setAnimationEnabled(false)
p.add(panel.peer)
peer.add(p)
}
}
}
def destroy = histogramPanels.values.foreach {panel => panel.destroy}
/*
reactions += {
case ReadyToDraw(rhp:ResponseHistogramPanel) =>
updateHistograms(rhp)
}
protected def updateHistograms(updatedPanel:ResponseHistogramPanel) = {
curMin = math.min(curMin, updatedPanel.yAxisTicks.min)
curMax = math.max(curMax, updatedPanel.yAxisTicks.max)
val newTicks = List(curMin, (curMin+curMax)/2, curMax)
histogramPanels.values.foreach {panel =>
panel.yAxisTicks = newTicks
}
}
*/
}
| gabysbrain/tuner | src/main/scala/tuner/gui/ResponseStatsPanel.scala | Scala | mit | 1,998 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.expressions
import org.apache.calcite.rex.RexNode
import org.apache.calcite.sql.SqlAggFunction
import org.apache.calcite.sql.fun._
import org.apache.calcite.tools.RelBuilder
import org.apache.calcite.tools.RelBuilder.AggCall
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.table.functions.AggregateFunction
import org.apache.flink.table.functions.utils.AggSqlFunction
import org.apache.flink.table.typeutils.TypeCheckUtils
import org.apache.flink.api.common.typeinfo.BasicTypeInfo
import org.apache.flink.api.java.typeutils.MultisetTypeInfo
import org.apache.flink.table.calcite.FlinkTypeFactory
import org.apache.flink.table.functions.utils.UserDefinedFunctionUtils._
import org.apache.flink.table.validate.{ValidationFailure, ValidationResult, ValidationSuccess}
abstract sealed class Aggregation extends PlannerExpression {
override def toString = s"Aggregate"
override private[flink] def toRexNode(implicit relBuilder: RelBuilder): RexNode =
throw new UnsupportedOperationException("Aggregate cannot be transformed to RexNode")
/**
* Convert Aggregate to its counterpart in Calcite, i.e. AggCall
*/
private[flink] def toAggCall(
name: String,
isDistinct: Boolean = false
)(implicit relBuilder: RelBuilder): AggCall
/**
* Returns the SqlAggFunction for this Aggregation.
*/
private[flink] def getSqlAggFunction()(implicit relBuilder: RelBuilder): SqlAggFunction
}
case class DistinctAgg(child: PlannerExpression) extends Aggregation {
def distinct: PlannerExpression = DistinctAgg(child)
override private[flink] def resultType: TypeInformation[_] = child.resultType
override private[flink] def validateInput(): ValidationResult = {
super.validateInput()
child match {
case agg: Aggregation =>
child.validateInput()
case _ =>
ValidationFailure(s"Distinct modifier cannot be applied to $child! " +
s"It can only be applied to an aggregation expression, for example, " +
s"'a.count.distinct which is equivalent with COUNT(DISTINCT a).")
}
}
override private[flink] def toAggCall(
name: String, isDistinct: Boolean = true)(implicit relBuilder: RelBuilder) = {
child.asInstanceOf[Aggregation].toAggCall(name, isDistinct = true)
}
override private[flink] def getSqlAggFunction()(implicit relBuilder: RelBuilder) = {
child.asInstanceOf[Aggregation].getSqlAggFunction()
}
override private[flink] def children = Seq(child)
}
case class Sum(child: PlannerExpression) extends Aggregation {
override private[flink] def children: Seq[PlannerExpression] = Seq(child)
override def toString = s"sum($child)"
override private[flink] def toAggCall(
name: String, isDistinct: Boolean = false)(implicit relBuilder: RelBuilder): AggCall = {
relBuilder.aggregateCall(
SqlStdOperatorTable.SUM,
isDistinct,
false,
null,
name,
child.toRexNode)
}
override private[flink] def resultType = child.resultType
override private[flink] def validateInput() =
TypeCheckUtils.assertNumericExpr(child.resultType, "sum")
override private[flink] def getSqlAggFunction()(implicit relBuilder: RelBuilder) = {
val returnType = relBuilder
.getTypeFactory.asInstanceOf[FlinkTypeFactory]
.createTypeFromTypeInfo(resultType, isNullable = true)
new SqlSumAggFunction(returnType)
}
}
case class Sum0(child: PlannerExpression) extends Aggregation {
override private[flink] def children: Seq[PlannerExpression] = Seq(child)
override def toString = s"sum0($child)"
override private[flink] def toAggCall(
name: String, isDistinct: Boolean = false)(implicit relBuilder: RelBuilder): AggCall = {
relBuilder.aggregateCall(
SqlStdOperatorTable.SUM0,
isDistinct,
false,
null,
name,
child.toRexNode)
}
override private[flink] def resultType = child.resultType
override private[flink] def validateInput() =
TypeCheckUtils.assertNumericExpr(child.resultType, "sum0")
override private[flink] def getSqlAggFunction()(implicit relBuilder: RelBuilder) =
SqlStdOperatorTable.SUM0
}
case class Min(child: PlannerExpression) extends Aggregation {
override private[flink] def children: Seq[PlannerExpression] = Seq(child)
override def toString = s"min($child)"
override private[flink] def toAggCall(
name: String, isDistinct: Boolean = false)(implicit relBuilder: RelBuilder): AggCall = {
relBuilder.aggregateCall(
SqlStdOperatorTable.MIN,
isDistinct,
false,
null,
name,
child.toRexNode)
}
override private[flink] def resultType = child.resultType
override private[flink] def validateInput() =
TypeCheckUtils.assertOrderableExpr(child.resultType, "min")
override private[flink] def getSqlAggFunction()(implicit relBuilder: RelBuilder) = {
SqlStdOperatorTable.MIN
}
}
case class Max(child: PlannerExpression) extends Aggregation {
override private[flink] def children: Seq[PlannerExpression] = Seq(child)
override def toString = s"max($child)"
override private[flink] def toAggCall(
name: String, isDistinct: Boolean = false)(implicit relBuilder: RelBuilder): AggCall = {
relBuilder.aggregateCall(
SqlStdOperatorTable.MAX,
isDistinct,
false,
null,
name,
child.toRexNode)
}
override private[flink] def resultType = child.resultType
override private[flink] def validateInput() =
TypeCheckUtils.assertOrderableExpr(child.resultType, "max")
override private[flink] def getSqlAggFunction()(implicit relBuilder: RelBuilder) = {
SqlStdOperatorTable.MAX
}
}
case class Count(child: PlannerExpression) extends Aggregation {
override private[flink] def children: Seq[PlannerExpression] = Seq(child)
override def toString = s"count($child)"
override private[flink] def toAggCall(
name: String, isDistinct: Boolean = false)(implicit relBuilder: RelBuilder): AggCall = {
relBuilder.aggregateCall(
SqlStdOperatorTable.COUNT,
isDistinct,
false,
null,
name,
child.toRexNode)
}
override private[flink] def resultType = BasicTypeInfo.LONG_TYPE_INFO
override private[flink] def getSqlAggFunction()(implicit relBuilder: RelBuilder) = {
SqlStdOperatorTable.COUNT
}
}
case class Avg(child: PlannerExpression) extends Aggregation {
override private[flink] def children: Seq[PlannerExpression] = Seq(child)
override def toString = s"avg($child)"
override private[flink] def toAggCall(
name: String, isDistinct: Boolean = false)(implicit relBuilder: RelBuilder): AggCall = {
relBuilder.aggregateCall(
SqlStdOperatorTable.AVG,
isDistinct,
false,
null,
name,
child.toRexNode)
}
override private[flink] def resultType = child.resultType
override private[flink] def validateInput() =
TypeCheckUtils.assertNumericExpr(child.resultType, "avg")
override private[flink] def getSqlAggFunction()(implicit relBuilder: RelBuilder) = {
SqlStdOperatorTable.AVG
}
}
/**
* Returns a multiset aggregates.
*/
case class Collect(child: PlannerExpression) extends Aggregation {
override private[flink] def children: Seq[PlannerExpression] = Seq(child)
override private[flink] def resultType: TypeInformation[_] =
MultisetTypeInfo.getInfoFor(child.resultType)
override def toString: String = s"collect($child)"
override private[flink] def toAggCall(
name: String, isDistinct: Boolean = false)(implicit relBuilder: RelBuilder): AggCall = {
relBuilder.aggregateCall(
SqlStdOperatorTable.COLLECT,
isDistinct,
false,
null,
name,
child.toRexNode)
}
override private[flink] def getSqlAggFunction()(implicit relBuilder: RelBuilder) = {
SqlStdOperatorTable.COLLECT
}
}
case class StddevPop(child: PlannerExpression) extends Aggregation {
override private[flink] def children: Seq[PlannerExpression] = Seq(child)
override def toString = s"stddev_pop($child)"
override private[flink] def toAggCall(
name: String, isDistinct: Boolean = false)(implicit relBuilder: RelBuilder): AggCall = {
relBuilder.aggregateCall(
SqlStdOperatorTable.STDDEV_POP,
isDistinct,
false,
null,
name,
child.toRexNode)
}
override private[flink] def resultType = child.resultType
override private[flink] def validateInput() =
TypeCheckUtils.assertNumericExpr(child.resultType, "stddev_pop")
override private[flink] def getSqlAggFunction()(implicit relBuilder: RelBuilder) =
SqlStdOperatorTable.STDDEV_POP
}
case class StddevSamp(child: PlannerExpression) extends Aggregation {
override private[flink] def children: Seq[PlannerExpression] = Seq(child)
override def toString = s"stddev_samp($child)"
override private[flink] def toAggCall(
name: String, isDistinct: Boolean = false)(implicit relBuilder: RelBuilder): AggCall = {
relBuilder.aggregateCall(
SqlStdOperatorTable.STDDEV_SAMP,
isDistinct,
false,
null,
name,
child.toRexNode)
}
override private[flink] def resultType = child.resultType
override private[flink] def validateInput() =
TypeCheckUtils.assertNumericExpr(child.resultType, "stddev_samp")
override private[flink] def getSqlAggFunction()(implicit relBuilder: RelBuilder) =
SqlStdOperatorTable.STDDEV_SAMP
}
case class VarPop(child: PlannerExpression) extends Aggregation {
override private[flink] def children: Seq[PlannerExpression] = Seq(child)
override def toString = s"var_pop($child)"
override private[flink] def toAggCall(
name: String, isDistinct: Boolean = false)(implicit relBuilder: RelBuilder): AggCall = {
relBuilder.aggregateCall(
SqlStdOperatorTable.VAR_POP,
isDistinct,
false,
null,
name,
child.toRexNode)
}
override private[flink] def resultType = child.resultType
override private[flink] def validateInput() =
TypeCheckUtils.assertNumericExpr(child.resultType, "var_pop")
override private[flink] def getSqlAggFunction()(implicit relBuilder: RelBuilder) =
SqlStdOperatorTable.VAR_POP
}
case class VarSamp(child: PlannerExpression) extends Aggregation {
override private[flink] def children: Seq[PlannerExpression] = Seq(child)
override def toString = s"var_samp($child)"
override private[flink] def toAggCall(
name: String, isDistinct: Boolean = false)(implicit relBuilder: RelBuilder): AggCall = {
relBuilder.aggregateCall(
SqlStdOperatorTable.VAR_SAMP,
isDistinct,
false,
null,
name,
child.toRexNode)
}
override private[flink] def resultType = child.resultType
override private[flink] def validateInput() =
TypeCheckUtils.assertNumericExpr(child.resultType, "var_samp")
override private[flink] def getSqlAggFunction()(implicit relBuilder: RelBuilder) =
SqlStdOperatorTable.VAR_SAMP
}
/**
* Expression for calling a user-defined aggregate function.
*/
case class AggFunctionCall(
aggregateFunction: AggregateFunction[_, _],
resultTypeInfo: TypeInformation[_],
accTypeInfo: TypeInformation[_],
args: Seq[PlannerExpression])
extends Aggregation {
override private[flink] def children: Seq[PlannerExpression] = args
override def resultType: TypeInformation[_] = resultTypeInfo
override def validateInput(): ValidationResult = {
val signature = children.map(_.resultType)
// look for a signature that matches the input types
val foundSignature = getAccumulateMethodSignature(aggregateFunction, signature)
if (foundSignature.isEmpty) {
ValidationFailure(s"Given parameters do not match any signature. \\n" +
s"Actual: ${signatureToString(signature)} \\n" +
s"Expected: ${
getMethodSignatures(aggregateFunction, "accumulate")
.map(_.drop(1))
.map(signatureToString)
.mkString(", ")}")
} else {
ValidationSuccess
}
}
override def toString: String = s"${aggregateFunction.getClass.getSimpleName}($args)"
override def toAggCall(
name: String, isDistinct: Boolean = false)(implicit relBuilder: RelBuilder): AggCall = {
relBuilder.aggregateCall(
this.getSqlAggFunction(),
isDistinct,
false,
null,
name,
args.map(_.toRexNode): _*)
}
override private[flink] def getSqlAggFunction()(implicit relBuilder: RelBuilder) = {
val typeFactory = relBuilder.getTypeFactory.asInstanceOf[FlinkTypeFactory]
AggSqlFunction(
aggregateFunction.functionIdentifier,
aggregateFunction.toString,
aggregateFunction,
resultType,
accTypeInfo,
typeFactory,
aggregateFunction.requiresOver)
}
override private[flink] def toRexNode(implicit relBuilder: RelBuilder): RexNode = {
relBuilder.call(this.getSqlAggFunction(), args.map(_.toRexNode): _*)
}
}
| ueshin/apache-flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/expressions/aggregations.scala | Scala | apache-2.0 | 13,904 |
package com.kubukoz.scala99
/**
* P21 (*) Insert an element at a given position into a list.
* Example:
* scala> insertAt('new, 1, List('a, 'b, 'c, 'd))
* res0: List[Symbol] = List('a, 'new, 'b, 'c, 'd)
**/
object P21 {
/**
* This one was quite a tough guy, but I finally realised that
* the memory list (memLeft) should be reversed
* every time it's returned itself or with other elements. That's it!
**/
def insertAt[T](item: T, index: Int, list: List[T]): List[T] = {
def go(i: Int, l: List[T], memLeft: List[T]): List[T] = (i, l) match {
case _ if i <= 0 => memLeft.reverse ::: (item :: l)
case (_, Nil) => (item :: memLeft).reverse
case (_, h :: t) => go(i - 1, t, h :: memLeft)
}
go(index, list, Nil)
}
}
| kubukoz/scala-99 | src/main/scala/com/kubukoz/scala99/P21.scala | Scala | apache-2.0 | 776 |
package services.time
import java.time.Instant
class SystemClockService extends ClockService {
override def getCurrentTime: Instant = Instant.now()
}
| vetafi/vetafi-web | app/services/time/SystemClockService.scala | Scala | apache-2.0 | 155 |
/**
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
package sbt.protocol
/** Message for events. */
abstract class EventMessage() extends Serializable {
override def equals(o: Any): Boolean = o match {
case x: EventMessage => true
case _ => false
}
override def hashCode: Int = {
37 * (17 + "EventMessage".##)
}
override def toString: String = {
"EventMessage()"
}
}
object EventMessage {
}
| Duhemm/sbt | protocol/src/main/contraband-scala/sbt/protocol/EventMessage.scala | Scala | bsd-3-clause | 476 |
package mgoeminne.scalaggplot.geom
import mgoeminne.scalaggplot.position.Position
import mgoeminne.scalaggplot.stat.Statistic
import mgoeminne.scalaggplot.{position, aes, stat}
import org.saddle.Frame
/**
* Frequency polygon.
*
* == Aesthetics ==
*
* This function understands the following aesthetics:
*
* - alpha
* - colour
* - linetype
* - size
*
* == Examples ==
*
* TODO
*
* @param mapping The aesthetic mapping, usually constructed with [[aes.aes]] or [[aes.string]].
* Only needs to be set at the layer level if you are overriding the plot defaults.
* @param data A layer specific dataset - only needed if you want to override the plot defaults.
* @param stat The statistical transformation to use on the data for this layer.
* @param position The position adjustment to use for overlappling points on this layer.
* @tparam T
*/
case class freqpoly[T]( mapping: Option[(Seq[Numeric[T]], Seq[Numeric[T]])] = None,
data: Option[Frame[Any,Any,T]] = None,
stat: Statistic = freqpolyUtil.defaultStat,
position: Position = freqpolyUtil.defaultPos) extends Geom
private object freqpolyUtil
{
val defaultStat = stat.bin()
val defaultPos = position.identity
} | mgoeminne/scala-ggplot | src/main/scala/mgoeminne/scalaggplot/geom/freqpoly.scala | Scala | lgpl-3.0 | 1,295 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import uk.gov.hmrc.ct.accounts.frs102.retriever.Frs102AccountsBoxRetriever
import uk.gov.hmrc.ct.box.ValidatableBox._
import uk.gov.hmrc.ct.box._
case class AC7210(value: Option[String]) extends CtBoxIdentifier(name = "Dividends note - Additional information")
with CtOptionalString
with Input
with SelfValidatableBox[Frs102AccountsBoxRetriever, Option[String]] {
override def validate(boxRetriever: Frs102AccountsBoxRetriever): Set[CtValidation] = {
collectErrors(
cannotExistIf(hasValue && !boxRetriever.ac32().hasValue),
validateOptionalStringByLength(min = 0, max = StandardCohoTextFieldLimit),
validateCoHoStringReturnIllegalChars()
)
}
}
| pncampbell/ct-calculations | src/main/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/AC7210.scala | Scala | apache-2.0 | 1,442 |
/*
* (c) Copyright 2014 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.rookboom.web
import org.springframework.stereotype.Controller
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.web.bind.annotation.{RequestParam, RequestMapping}
import collection.JavaConversions._
import com.linkedin.rookboom.user.UserManager
import com.linkedin.rookboom.util.Logging
@Controller
@RequestMapping(Array("users"))
class UsersController extends ExceptionResolver {
@Autowired val userManager: UserManager = null
@RequestMapping
def handle() = userManager.getUsers
@RequestMapping(Array("/search"))
def search(@RequestParam(value = "query") query: String,
@RequestParam(value = "search", defaultValue = "all") search: String) = {
case class SearchItem(displayName: String, address: String)
val usersAndGroups = userManager.getUsers.map(u => SearchItem(u.displayName, u.address)).toList :::
userManager.getGroups.map(g => SearchItem(g.displayName, g.address)).toList
val result = usersAndGroups.filter(searchItem => {
val queryLc = query.toLowerCase
search match {
case "email" => searchItem.address.toLowerCase.contains(queryLc)
case "name" => searchItem.displayName.toLowerCase.contains(queryLc)
case _ => searchItem.displayName.toLowerCase.contains(queryLc) ||
searchItem.address.toLowerCase.contains(queryLc)
}
})
mapAsJavaMap(Map("users" -> result.asInstanceOf[AnyRef]))
}
}
| linkedin/RookBoom | web/src/main/scala/com/linkedin/rookboom/web/UsersController.scala | Scala | apache-2.0 | 2,090 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.plans.logical
import org.apache.spark.sql.catalyst.expressions._
trait QueryPlanConstraints extends ConstraintHelper { self: LogicalPlan =>
/**
* An [[ExpressionSet]] that contains invariants about the rows output by this operator. For
* example, if this set contains the expression `a = 2` then that expression is guaranteed to
* evaluate to `true` for all rows produced.
*/
lazy val constraints: ExpressionSet = {
if (conf.constraintPropagationEnabled) {
ExpressionSet(
validConstraints
.union(inferAdditionalConstraints(validConstraints))
.union(constructIsNotNullConstraints(validConstraints, output))
.filter { c =>
c.references.nonEmpty && c.references.subsetOf(outputSet) && c.deterministic
}
)
} else {
ExpressionSet(Set.empty)
}
}
/**
* This method can be overridden by any child class of QueryPlan to specify a set of constraints
* based on the given operator's constraint propagation logic. These constraints are then
* canonicalized and filtered automatically to contain only those attributes that appear in the
* [[outputSet]].
*
* See [[Canonicalize]] for more details.
*/
protected lazy val validConstraints: Set[Expression] = Set.empty
}
trait ConstraintHelper {
/**
* Infers an additional set of constraints from a given set of equality constraints.
* For e.g., if an operator has constraints of the form (`a = 5`, `a = b`), this returns an
* additional constraint of the form `b = 5`.
*/
def inferAdditionalConstraints(constraints: Set[Expression]): Set[Expression] = {
var inferredConstraints = Set.empty[Expression]
// IsNotNull should be constructed by `constructIsNotNullConstraints`.
val predicates = constraints.filterNot(_.isInstanceOf[IsNotNull])
predicates.foreach {
case eq @ EqualTo(l: Attribute, r: Attribute) =>
val candidateConstraints = predicates - eq
inferredConstraints ++= replaceConstraints(candidateConstraints, l, r)
inferredConstraints ++= replaceConstraints(candidateConstraints, r, l)
case eq @ EqualTo(l @ Cast(_: Attribute, _, _), r: Attribute) =>
inferredConstraints ++= replaceConstraints(predicates - eq, r, l)
case eq @ EqualTo(l: Attribute, r @ Cast(_: Attribute, _, _)) =>
inferredConstraints ++= replaceConstraints(predicates - eq, l, r)
case _ => // No inference
}
inferredConstraints -- constraints
}
private def replaceConstraints(
constraints: Set[Expression],
source: Expression,
destination: Expression): Set[Expression] = constraints.map(_ transform {
case e: Expression if e.semanticEquals(source) => destination
})
/**
* Infers a set of `isNotNull` constraints from null intolerant expressions as well as
* non-nullable attributes. For e.g., if an expression is of the form (`a > 5`), this
* returns a constraint of the form `isNotNull(a)`
*/
def constructIsNotNullConstraints(
constraints: Set[Expression],
output: Seq[Attribute]): Set[Expression] = {
// First, we propagate constraints from the null intolerant expressions.
var isNotNullConstraints: Set[Expression] = constraints.flatMap(inferIsNotNullConstraints)
// Second, we infer additional constraints from non-nullable attributes that are part of the
// operator's output
val nonNullableAttributes = output.filterNot(_.nullable)
isNotNullConstraints ++= nonNullableAttributes.map(IsNotNull).toSet
isNotNullConstraints -- constraints
}
/**
* Infer the Attribute-specific IsNotNull constraints from the null intolerant child expressions
* of constraints.
*/
private def inferIsNotNullConstraints(constraint: Expression): Seq[Expression] =
constraint match {
// When the root is IsNotNull, we can push IsNotNull through the child null intolerant
// expressions
case IsNotNull(expr) => scanNullIntolerantAttribute(expr).map(IsNotNull(_))
// Constraints always return true for all the inputs. That means, null will never be returned.
// Thus, we can infer `IsNotNull(constraint)`, and also push IsNotNull through the child
// null intolerant expressions.
case _ => scanNullIntolerantAttribute(constraint).map(IsNotNull(_))
}
/**
* Recursively explores the expressions which are null intolerant and returns all attributes
* in these expressions.
*/
private def scanNullIntolerantAttribute(expr: Expression): Seq[Attribute] = expr match {
case a: Attribute => Seq(a)
case _: NullIntolerant => expr.children.flatMap(scanNullIntolerantAttribute)
case _ => Seq.empty[Attribute]
}
}
| goldmedal/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/QueryPlanConstraints.scala | Scala | apache-2.0 | 5,558 |
package at.logic.gapt.examples
import at.logic.gapt.expr._
import at.logic.gapt.expr.fol.{ naive, thresholds }
import at.logic.gapt.proofs.{ FOLClause, HOLSequent }
/**
* Creates the n-th formula of a sequence where distributivity-based
* algorithm produces only exponential CNFs.
*/
object PQPairs {
def apply( n: Int ): FOLFormula = {
assert( n >= 1 )
if ( n == 1 )
And( p( 1 ), q( 1 ) )
else
Or( apply( n - 1 ), And( p( n ), q( n ) ) )
}
def p( i: Int ) = FOLAtom( "p_" + i, Nil )
def q( i: Int ) = FOLAtom( "q_" + i, Nil )
}
/**
* Given n >= 2 creates an unsatisfiable first-order clause set based on a
* statement about the permutations in S_n.
*/
object Permutations {
def apply( n: Int ): List[FOLClause] = {
assert( n >= 2 )
val Rord = FOLAtom( "R", List.range( 1, n + 1 ).map( x( _ ) ) )
val Rtransp = FOLAtom( "R", x( 2 ) :: x( 1 ) :: List.range( 3, n + 1 ).map( x( _ ) ) )
val Rrot = FOLAtom( "R", x( n ) :: List.range( 1, n ).map( x( _ ) ) )
val Rord_c = FOLAtom( "R", List.range( 1, n + 1 ).map( c( _ ) ) )
val even = List.range( 2, n + 1 ).sliding( 1, 2 ).flatten.toList
val odd = List.range( 1, n + 1 ).sliding( 1, 2 ).flatten.toList
val Revenodd_c = FOLAtom( "R", ( odd ++ even ).map( c( _ ) ) )
val Ctransp = FOLClause( Rord :: Nil, Rtransp :: Nil )
val Crot = FOLClause( Rord :: Nil, Rrot :: Nil )
val Goalpos = FOLClause( Nil, Rord_c :: Nil )
val Goalneg = FOLClause( Revenodd_c :: Nil, Nil )
Ctransp :: Crot :: Goalpos :: Goalneg :: Nil
}
/**
* return the set of constants which occur in the n-th clause set
*/
def constants( n: Int ): Set[FOLTerm] = List.range( 1, n + 1 ).map( c( _ ) ).toSet
private def x( i: Int ) = FOLVar( "x_" + i )
private def c( i: Int ) = FOLConst( "c_" + i )
}
/**
* Creates the n-th tautology of a sequence that has only exponential-size cut-free proofs
*
* This sequence is taken from: S. Buss. "Weak Formal Systems and Connections to
* Computational Complexity". Lecture Notes for a Topics Course, UC Berkeley, 1988,
* available from: http://www.math.ucsd.edu/~sbuss/ResearchWeb/index.html
*/
object BussTautology {
def apply( n: Int ): HOLSequent = HOLSequent( Ant( n ), c( n ) :: d( n ) :: Nil )
def c( i: Int ) = FOLAtom( "c_" + i, Nil )
def d( i: Int ) = FOLAtom( "d_" + i, Nil )
def F( i: Int ): FOLFormula = if ( i == 1 ) Or( c( 1 ), d( 1 ) ) else And( F( i - 1 ), Or( c( i ), d( i ) ) )
def A( i: Int ) = if ( i == 1 ) c( 1 ) else Imp( F( i - 1 ), c( i ) )
def B( i: Int ) = if ( i == 1 ) d( 1 ) else Imp( F( i - 1 ), d( i ) )
// the antecedens of the final sequent
def Ant( i: Int ): List[FOLFormula] = if ( i == 0 ) Nil else Or( A( i ), B( i ) ) :: Ant( i - 1 )
}
/**
* Constructs a formula representing the pigeon hole principle. More precisely:
* PigeonHolePrinciple( p, h ) states that if p pigeons are put into h holes
* then there is a hole which contains two pigeons. PigeonHolePrinciple( p, h )
* is a tautology iff p > h.
*
* Since we want to avoid empty disjunctions, we assume > 1 pigeons.
*/
object PigeonHolePrinciple {
// The binary relation symbol.
val rel = "R"
/**
* @param ps the number of pigeons
* @param hs the number of holes
*/
def apply( ps: Int, hs: Int ) = {
assert( ps > 1 )
Imp(
And( ( 1 to ps ).map( p =>
Or( ( 1 to hs ).map( h => atom( p, h ) ).toList ) ).toList ),
Or( ( 1 to hs ).map( h =>
Or( ( 2 to ps ).map( p =>
Or( ( ( 1 to p - 1 ) ).map( pp =>
And( atom( p, h ), atom( pp, h ) ) ).toList ) ).toList ) ).toList )
)
}
def atom( p: Int, h: Int ) = FOLAtom( rel, pigeon( p ) :: hole( h ) :: Nil )
def pigeon( i: Int ) = FOLConst( "p_" + i )
def hole( i: Int ) = FOLConst( "h_" + i )
}
/**
* Sequence of valid first-order formulas about equivalent counting methods.
*
* Consider the formula ∀z ∃^=1^i ∀x ∃y a_i(x,y,z), where ∃^=1^i is a quantifier
* that says that there exists exactly one i (in 0..n) such that ∀x ∃y a_i(x,y,z) is true.
*
* This function returns the equivalence between two implementations of the formula:
* first, using a naive quadratic implementation; and second, using an O(n*log(n))
* implementation with threshold formulas.
*/
object CountingEquivalence {
def apply( n: Int ): FOLFormula = {
val as = 0 to n map { i => hof"!x?y ${s"a$i"} x y z" }
hof"!z ${thresholds.exactly oneOf as} <-> !z ${naive.exactly oneOf as}".asInstanceOf[FOLFormula]
}
}
| gebner/gapt | examples/FormulaSequences.scala | Scala | gpl-3.0 | 4,539 |
package edu.rice.habanero.benchmarks.piprecision
import java.math.BigDecimal
import java.util.concurrent.atomic.AtomicInteger
import akka.actor.{ActorRef, Props}
import edu.rice.habanero.actors.{AkkaActor, AkkaActorState}
import edu.rice.habanero.benchmarks.piprecision.PiPrecisionConfig.{StartMessage, StopMessage}
import edu.rice.habanero.benchmarks.{Benchmark, BenchmarkRunner}
/**
*
* @author <a href="http://shams.web.rice.edu/">Shams Imam</a> (shams@rice.edu)
*/
object PiPrecisionAkkaActorBenchmark {
def main(args: Array[String]) {
BenchmarkRunner.runBenchmark(args, new PiPrecisionAkkaActorBenchmark)
}
private final class PiPrecisionAkkaActorBenchmark extends Benchmark {
def initialize(args: Array[String]) {
PiPrecisionConfig.parseArgs(args)
}
def printArgInfo() {
PiPrecisionConfig.printArgs()
}
def runIteration() {
val numWorkers: Int = PiPrecisionConfig.NUM_WORKERS
val precision: Int = PiPrecisionConfig.PRECISION
val system = AkkaActorState.newActorSystem("PiPrecision")
val master = system.actorOf(Props(new Master(numWorkers, precision)))
AkkaActorState.startActor(master)
master ! StartMessage.ONLY
AkkaActorState.awaitTermination(system)
}
def cleanupIteration(lastIteration: Boolean, execTimeMillis: Double) {
}
}
private class Master(numWorkers: Int, scale: Int) extends AkkaActor[AnyRef] {
private final val workers = Array.tabulate[ActorRef](numWorkers)(i => context.system.actorOf(Props(new Worker(self, i))))
private var result: BigDecimal = BigDecimal.ZERO
private final val tolerance = BigDecimal.ONE.movePointLeft(scale)
private final val numWorkersTerminated: AtomicInteger = new AtomicInteger(0)
private var numTermsRequested: Int = 0
private var numTermsReceived: Int = 0
private var stopRequests: Boolean = false
override def onPostStart() {
workers.foreach(loopWorker => {
AkkaActorState.startActor(loopWorker)
})
}
/**
* Generates work for the given worker
*
* @param workerId the id of te worker to send work
*/
private def generateWork(workerId: Int) {
val wm: PiPrecisionConfig.WorkMessage = new PiPrecisionConfig.WorkMessage(scale, numTermsRequested)
workers(workerId) ! wm
numTermsRequested += 1
}
def requestWorkersToExit() {
workers.foreach(loopWorker => {
loopWorker ! StopMessage.ONLY
})
}
override def process(msg: AnyRef) {
msg match {
case rm: PiPrecisionConfig.ResultMessage =>
numTermsReceived += 1
result = result.add(rm.result)
if (rm.result.compareTo(tolerance) <= 0) {
stopRequests = true
}
if (!stopRequests) {
generateWork(rm.workerId)
}
if (numTermsReceived == numTermsRequested) {
requestWorkersToExit()
}
case _: PiPrecisionConfig.StopMessage =>
val numTerminated: Int = numWorkersTerminated.incrementAndGet
if (numTerminated == numWorkers) {
exit()
}
case _: PiPrecisionConfig.StartMessage =>
var t: Int = 0
while (t < Math.min(scale, 10 * numWorkers)) {
generateWork(t % numWorkers)
t += 1
}
case message =>
val ex = new IllegalArgumentException("Unsupported message: " + message)
ex.printStackTrace(System.err)
}
}
def getResult: String = {
result.toPlainString
}
}
private class Worker(master: ActorRef, id: Int) extends AkkaActor[AnyRef] {
override def process(msg: AnyRef) {
msg match {
case _: PiPrecisionConfig.StopMessage =>
master ! new PiPrecisionConfig.StopMessage
exit()
case wm: PiPrecisionConfig.WorkMessage =>
val result: BigDecimal = PiPrecisionConfig.calculateBbpTerm(wm.scale, wm.term)
master ! new PiPrecisionConfig.ResultMessage(result, id)
case message =>
val ex = new IllegalArgumentException("Unsupported message: " + message)
ex.printStackTrace(System.err)
}
}
}
}
| shamsmahmood/savina | src/main/scala/edu/rice/habanero/benchmarks/piprecision/PiPrecisionAkkaActorBenchmark.scala | Scala | gpl-2.0 | 4,217 |
package net.scalytica.symbiotic.postgres.docmanagement
import net.scalytica.symbiotic.test.specs.{FolderRepositorySpec, PostgresSpec}
class PostgresFolderRepositorySpec
extends FolderRepositorySpec
with PostgresSpec {
override val folderRepo = new PostgresFolderRepository(config)
}
| kpmeen/symbiotic | symbiotic-postgres/src/test/scala/net/scalytica/symbiotic/postgres/docmanagement/PostgresFolderRepositorySpec.scala | Scala | apache-2.0 | 296 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.builders
import monix.execution.cancelables.CompositeCancelable
import monix.execution.{Ack, Cancelable}
import monix.execution.Ack.{Continue, Stop}
import scala.util.control.NonFatal
import monix.reactive.Observable
import monix.reactive.observers.Subscriber
import scala.concurrent.{Future, Promise}
import scala.util.Success
private[reactive] final class Zip5Observable[A1, A2, A3, A4, A5, +R](
obsA1: Observable[A1],
obsA2: Observable[A2],
obsA3: Observable[A3],
obsA4: Observable[A4],
obsA5: Observable[A5])(f: (A1, A2, A3, A4, A5) => R)
extends Observable[R] {
def unsafeSubscribeFn(out: Subscriber[R]): Cancelable = {
import out.scheduler
val lock = new AnyRef
// MUST BE synchronized by `lock`
var isDone = false
// MUST BE synchronized by `lock`
var lastAck = Continue: Future[Ack]
// MUST BE synchronized by `lock`
var elemA1: A1 = null.asInstanceOf[A1]
// MUST BE synchronized by `lock`
var hasElemA1 = false
// MUST BE synchronized by `lock`
var elemA2: A2 = null.asInstanceOf[A2]
// MUST BE synchronized by `lock`
var hasElemA2 = false
// MUST BE synchronized by `lock`
var elemA3: A3 = null.asInstanceOf[A3]
// MUST BE synchronized by `lock`
var hasElemA3 = false
// MUST BE synchronized by `lock`
var elemA4: A4 = null.asInstanceOf[A4]
// MUST BE synchronized by `lock`
var hasElemA4 = false
// MUST BE synchronized by `lock`
var elemA5: A5 = null.asInstanceOf[A5]
// MUST BE synchronized by `lock`
var hasElemA5 = false
// MUST BE synchronized by `lock`
var continueP = Promise[Ack]()
// MUST BE synchronized by `lock`
var sourcesCompleted: Int = 0
def completeWithNext: Boolean = sourcesCompleted >= 1
// MUST BE synchronized by `lock`
def rawOnNext(a1: A1, a2: A2, a3: A3, a4: A4, a5: A5): Future[Ack] =
if (isDone) Stop
else {
var streamError = true
try {
val c = f(a1, a2, a3, a4, a5)
streamError = false
val ack = out.onNext(c)
if (completeWithNext) {
ack.onComplete(_ => signalOnComplete(false))
}
ack
} catch {
case NonFatal(ex) if streamError =>
isDone = true
out.onError(ex)
Stop
} finally {
hasElemA1 = false
hasElemA2 = false
hasElemA3 = false
hasElemA4 = false
hasElemA5 = false
}
}
// MUST BE synchronized by `lock`
def signalOnNext(a1: A1, a2: A2, a3: A3, a4: A4, a5: A5): Future[Ack] = {
lastAck = lastAck match {
case Continue => rawOnNext(a1, a2, a3, a4, a5)
case Stop => Stop
case async =>
async.flatMap {
// async execution, we have to re-sync
case Continue => lock.synchronized(rawOnNext(a1, a2, a3, a4, a5))
case Stop => Stop
}
}
val oldP = continueP
continueP = Promise[Ack]()
oldP.completeWith(lastAck)
lastAck
}
def signalOnError(ex: Throwable): Unit = lock.synchronized {
if (!isDone) {
isDone = true
out.onError(ex)
lastAck = Stop
}
}
def rawOnComplete(): Unit =
if (!isDone) {
isDone = true
out.onComplete()
}
def signalOnComplete(hasElem: Boolean): Unit = lock.synchronized {
// If all other sources have completed then
// we won't receive the next batch of elements
if (!hasElem || sourcesCompleted == 4) {
lastAck match {
case Continue => rawOnComplete()
case Stop => () // do nothing
case async =>
async.onComplete {
case Success(Continue) =>
lock.synchronized(rawOnComplete())
case _ =>
() // do nothing
}
}
continueP.trySuccess(Stop)
lastAck = Stop
} else {
sourcesCompleted += 1
}
}
val composite = CompositeCancelable()
composite += obsA1.unsafeSubscribeFn(new Subscriber[A1] {
implicit val scheduler = out.scheduler
def onNext(elem: A1): Future[Ack] = lock.synchronized {
if (isDone) Stop
else {
elemA1 = elem
if (!hasElemA1) hasElemA1 = true
if (hasElemA2 && hasElemA3 && hasElemA4 && hasElemA5)
signalOnNext(elemA1, elemA2, elemA3, elemA4, elemA5)
else
continueP.future
}
}
def onError(ex: Throwable): Unit =
signalOnError(ex)
def onComplete(): Unit =
signalOnComplete(hasElemA1)
})
composite += obsA2.unsafeSubscribeFn(new Subscriber[A2] {
implicit val scheduler = out.scheduler
def onNext(elem: A2): Future[Ack] = lock.synchronized {
if (isDone) Stop
else {
elemA2 = elem
if (!hasElemA2) hasElemA2 = true
if (hasElemA1 && hasElemA3 && hasElemA4 && hasElemA5)
signalOnNext(elemA1, elemA2, elemA3, elemA4, elemA5)
else
continueP.future
}
}
def onError(ex: Throwable): Unit =
signalOnError(ex)
def onComplete(): Unit =
signalOnComplete(hasElemA2)
})
composite += obsA3.unsafeSubscribeFn(new Subscriber[A3] {
implicit val scheduler = out.scheduler
def onNext(elem: A3): Future[Ack] = lock.synchronized {
if (isDone) Stop
else {
elemA3 = elem
if (!hasElemA3) hasElemA3 = true
if (hasElemA1 && hasElemA2 && hasElemA4 && hasElemA5)
signalOnNext(elemA1, elemA2, elemA3, elemA4, elemA5)
else
continueP.future
}
}
def onError(ex: Throwable): Unit =
signalOnError(ex)
def onComplete(): Unit =
signalOnComplete(hasElemA3)
})
composite += obsA4.unsafeSubscribeFn(new Subscriber[A4] {
implicit val scheduler = out.scheduler
def onNext(elem: A4): Future[Ack] = lock.synchronized {
if (isDone) Stop
else {
elemA4 = elem
if (!hasElemA4) hasElemA4 = true
if (hasElemA1 && hasElemA2 && hasElemA3 && hasElemA5)
signalOnNext(elemA1, elemA2, elemA3, elemA4, elemA5)
else
continueP.future
}
}
def onError(ex: Throwable): Unit =
signalOnError(ex)
def onComplete(): Unit =
signalOnComplete(hasElemA4)
})
composite += obsA5.unsafeSubscribeFn(new Subscriber[A5] {
implicit val scheduler = out.scheduler
def onNext(elem: A5): Future[Ack] = lock.synchronized {
if (isDone) Stop
else {
elemA5 = elem
if (!hasElemA5) hasElemA5 = true
if (hasElemA1 && hasElemA2 && hasElemA3 && hasElemA4)
signalOnNext(elemA1, elemA2, elemA3, elemA4, elemA5)
else
continueP.future
}
}
def onError(ex: Throwable): Unit =
signalOnError(ex)
def onComplete(): Unit =
signalOnComplete(hasElemA5)
})
composite
}
}
| monixio/monix | monix-reactive/shared/src/main/scala/monix/reactive/internal/builders/Zip5Observable.scala | Scala | apache-2.0 | 7,850 |
package scodec
package codecs
import scodec.bits._
import org.scalacheck._
class EitherCodecTest extends CodecSuite {
"the either codec" should {
"roundtrip" in {
val c = either(bool(8), uint8, utf8)
roundtrip(c, Left(0))
roundtrip(c, Left(255))
roundtrip(c, Right("hello, world"))
// locally override Arbitrary[Int] to fit in 8 bytes unsigned
implicit val arb = Arbitrary(Gen.choose(0,255))
forAll { (e: Either[Int,String]) => roundtrip(c, e) }
}
"encode correctly" in {
val c = either(bool(8), uint8, ascii)
c.encode(Left(255)) shouldBe Attempt.successful(bin"00000000 11111111")
c.encode(Right("hi")) shouldBe Attempt.successful(hex"ff 68 69".toBitVector)
}
}
}
| alissapajer/scodec | shared/src/test/scala/scodec/codecs/EitherCodecTest.scala | Scala | bsd-3-clause | 752 |
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\\
* @ @ *
* # # # # (c) 2016 CAB *
* # # # # # # *
* # # # # # # # # # # # # *
* # # # # # # # # # *
* # # # # # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # # # # # *
* @ @ *
\\* * http://github.com/alexcab * * * * * * * * * * * * * * * * * * * * * * * * * */
package mathact.parts.control.infrastructure
import java.util.concurrent.ExecutionException
import akka.actor._
import com.typesafe.config.Config
import mathact.AppConfig
import mathact.parts.model.data.sketch.SketchData
import mathact.parts.model.enums.SketchStatus
import mathact.parts.model.messages.{M, Msg}
import mathact.parts.ActorBase
import mathact.parts.gui.SelectSketchWindow
import mathact.parts.WorkbenchLike
import scala.concurrent.Future
import scala.util.{Failure, Success}
import scala.concurrent.duration._
/** Application controller actor
* Created by CAB on 20.06.2016.
*/
class MainController(doStop: Int⇒Unit, config: AppConfig) extends ActorBase{
//Parameters
val sketchStartTimeout = 5.seconds
//Messages
case object ShowUI
case class RunSketch(className: String)
case class SketchStarted(className: String)
case class SketchStartTimeout(className: String)
case object DoStop //Normal stop
case object DoErrorStop //Stop by error
case class DoTerminate(exitCode: Int) //Free resources and terminate
//Holders
case class CurrentSketch(sketch: SketchData, isWorking: Boolean, controller: Option[ActorRef]){
def started(): CurrentSketch = CurrentSketch(sketch, isWorking = true, controller)
def withController(controller: ActorRef): CurrentSketch = CurrentSketch(sketch, isWorking, Some(controller))}
//UI definitions
val uiSelectSketch = new SelectSketchWindow(log){
def sketchSelected(sketchClassName: String): Unit = {self ! RunSketch(sketchClassName)}
def windowClosed(): Unit = {self ! DoStop}}
//Variables
var sketches = List[SketchData]()
var currentSketch: Option[CurrentSketch] = None
//TODO Код ниже это конструирование субакторов для WorkbenchController
//new WorkbenchController(...){
// val randomName = "_" + sketchData.className + "_" + UUID.randomUUID
// val sketchUi = context.actorOf(Props(
// new SketchUI(config.sketchUI, self)),
// "SketchUI" + randomName)
// val userLogging = context.actorOf(Props(
// new UserLogging(config.userLogging, self)),
// "UserLogging" + randomName)
// val visualization = context.actorOf(Props(
// new Visualization(config.visualization, self)),
// "Visualization" + randomName)
// val pumping = context.actorOf(Props(
// new Pumping(config.pumping, self, sketchName, userLogging, visualization)),
// "Pumping" + randomName)
//}
//Functions
def setCurrentSketchState(newStat: SketchStatus): Unit = ???
// currentSketch.foreach{ cs ⇒
// sketches = sketches.map{
// case s if s.className == cs.sketch.className ⇒ s.copy(status = newStat)
// case s ⇒ s}}
def cleanCurrentSketch(): Unit = {
currentSketch.foreach(_.controller.foreach(_ ! M.ShutdownWorkbenchController))
currentSketch = None}
//Messages handling
def reaction = {
//Handling of starting
case M.MainControllerStart(sketchList) ⇒
sketches = sketchList
//Check if there is autoruned
// sketchList.find(_.status == SketchStatus.Autorun) match{
// case Some(sketch) ⇒
// self ! RunSketch(sketch.className)
// case None ⇒
// self ! ShowUI}
//Display UI
case ShowUI ⇒
// tryToRun{uiSelectSketch.show(sketches)} match{
// case Success(_) ⇒
// log.debug("[MainController.MainControllerStart] UI is created.")
// case Failure(_) ⇒
// self ! DoErrorStop}
//Run selected sketch
case RunSketch(className) ⇒
(currentSketch, sketches.find(_.className == className)) match{
case (None, Some(sketch)) ⇒
currentSketch = Some(CurrentSketch(sketch, isWorking = false, None))
//Starting creating timeout
context.system.scheduler.scheduleOnce(sketchStartTimeout, self, SketchStartTimeout(className))
//Hid UI
// tryToRun{uiSelectSketch.hide()}
//Create Workbench instance
Future{sketch.clazz.newInstance()}
.map{ _ ⇒ self ! SketchStarted(className)}
.recover{
case t: ExecutionException ⇒ self ! M.SketchError(className, t.getCause)
case t: Throwable ⇒ self ! M.SketchError(className, t)}
case (Some(curSketch), _) if curSketch.sketch.className != className ⇒
log.warning(s"[MainController.RunSketch] Current sketch $curSketch not ended.")
case (_, None) ⇒
log.error(s"[MainController.RunSketch] Not found sketch for className: $className")
case _ ⇒}
//Creating of new WorkbenchContext instance, return Either[Exception,WorkbenchContext]
case M.NewWorkbenchContext(workbench: WorkbenchLike) ⇒
//TODO Проветить запрос от текущего скетча и если нет ошыбка, если да пересылка контроллкру скетча GetWorkbenchContext
//
//
// (currentSketch, Option(workbench.getClass.getCanonicalName)) match {
// case (Some(s), Some(cn)) if s.sketch.className == cn ⇒
// //Create WorkbenchContext
// val controller: ActorRef = ???
//
//
//// context.actorOf(
//// Props(new WorkbenchController(s.sketch, self, config)),
//// "WorkbenchControllerActor_" + s.sketch.className)
//
//
// context.watch(controller)
// currentSketch = currentSketch.map(_.withController(controller))
// //Init of Workbench Controller
// controller ! M.WorkbenchControllerInit(sender)
//
//
//
//
//
// case (_, cn) ⇒ sender ! Left(new Exception(
// s"[MainController.NewWorkbenchContext] Workbench class $cn not match a current sketch: $currentSketch"))}
//SketchData started
case SketchStarted(className) ⇒
currentSketch.filter(_.sketch.className == className).foreach{
case s if s.controller.nonEmpty ⇒
s.controller.foreach(_ ! M.StartWorkbenchController)
currentSketch = currentSketch.map(_.started())
case s ⇒
self ! M.SketchError(className, new Exception(
s"[MainController.SketchStarted] Workbench controller not created, current sketch: $currentSketch"))}
//Normal end of sketch
case M.SketchDone(className) ⇒
currentSketch.filter(_.sketch.className == className).foreach{ _ ⇒
log.info(s"[MainController.SketchDone] Current sketch: $currentSketch")
setCurrentSketchState(SketchStatus.Ended)
cleanCurrentSketch()
self ! ShowUI}
//Failure end of sketch
case M.SketchError(className, error) ⇒
currentSketch.filter(_.sketch.className == className).foreach{ _ ⇒
log.error(
s"[MainController.SketchError] Error: $error currentSketch: $currentSketch, " +
s"StackTrace: \\n ${error.getStackTrace.mkString("\\n")}")
setCurrentSketchState(SketchStatus.Failed)
cleanCurrentSketch()
self ! ShowUI}
//SketchData start timeout
case SketchStartTimeout(className) ⇒
currentSketch.filter(cs ⇒ cs.sketch.className == className && (! cs.isWorking)).foreach{ s ⇒
log.error(s"[MainController.SketchStartTimeout] Timeout: $sketchStartTimeout, currentSketch: $currentSketch")
setCurrentSketchState(SketchStatus.Failed)
cleanCurrentSketch()
self ! ShowUI}
//Terminated of current sketch
case Terminated(actor) ⇒
currentSketch.filter(_.controller.contains(actor)).foreach{ _ ⇒
log.error(s"[MainController.Terminated] Actor: $actor, currentSketch: $currentSketch")
setCurrentSketchState(SketchStatus.Failed)
currentSketch = None
self ! ShowUI}
//Self normal stopping
case DoStop ⇒
cleanCurrentSketch()
self ! DoTerminate(0)
//Error normal stopping
case DoErrorStop ⇒
cleanCurrentSketch()
self ! DoTerminate(-1)
case DoTerminate(exitCode) ⇒
//Hide UI
// tryToRun{uiSelectSketch.hide()}
//Call stop
doStop(exitCode)
self ! PoisonPill}}
| AlexCAB/ProbabilisticPlaying | mathact/src/main/scala/mathact/parts/control/infrastructure/MainController.scala | Scala | mit | 9,248 |
/* date: Jun 24, 2012
*/
package com.client
import javax.swing._
class StatusLine extends JLabel {
setText("")
def clearStatusLine=setText("")
def addMessageToStatusLine(message:String)= setText(message)
}
| hangle/Notecard | src/StatusLine.scala | Scala | apache-2.0 | 218 |
package info.mukel.telegrambot4s.methods
import info.mukel.telegrambot4s.models.{Message, ReplyMarkup}
/** Use this method to send phone contacts. On success, the sent Message is returned.
*
* @param chatId Integer or String Unique identifier for the target chat or username of the target channel (in the format @channelusername)
* @param phoneNumber String Contact's phone number
* @param firstName String Contact's first name
* @param lastName String Optional Contact's last name
* @param disableNotification Boolean Optional Sends the message silently. iOS users will not receive a notification, Android users will receive a notification with no sound.
* @param replyToMessageId Integer Optional If the message is a reply, ID of the original message
* @param replyMarkup InlineKeyboardMarkup or ReplyKeyboardMarkup or ReplyKeyboardHide or ForceReply Optional Additional interface options. A JSON-serialized object for an inline keyboard, custom reply keyboard, instructions to hide keyboard or to force a reply from the user.
*/
case class SendContact(
chatId : Long Either String,
phoneNumber : String,
firstName : String,
lastName : Option[String] = None,
disableNotification : Option[Boolean] = None,
replyToMessageId : Option[Long] = None,
replyMarkup : Option[ReplyMarkup] = None
) extends ApiRequestJson[Message]
| hugemane/telegrambot4s | src/main/scala/info/mukel/telegrambot4s/methods/SendContact.scala | Scala | apache-2.0 | 1,631 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.types
import scala.collection.{mutable, Map}
import scala.util.Try
import scala.util.control.NonFatal
import org.json4s.JsonDSL._
import org.apache.spark.SparkException
import org.apache.spark.annotation.Stable
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.analysis.Resolver
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, InterpretedOrdering}
import org.apache.spark.sql.catalyst.parser.{CatalystSqlParser, LegacyTypeStringParser}
import org.apache.spark.sql.catalyst.util.{quoteIdentifier, truncatedString, StringUtils}
import org.apache.spark.sql.catalyst.util.StringUtils.StringConcat
import org.apache.spark.sql.internal.SQLConf
/**
* A [[StructType]] object can be constructed by
* {{{
* StructType(fields: Seq[StructField])
* }}}
* For a [[StructType]] object, one or multiple [[StructField]]s can be extracted by names.
* If multiple [[StructField]]s are extracted, a [[StructType]] object will be returned.
* If a provided name does not have a matching field, it will be ignored. For the case
* of extracting a single [[StructField]], a `null` will be returned.
*
* Scala Example:
* {{{
* import org.apache.spark.sql._
* import org.apache.spark.sql.types._
*
* val struct =
* StructType(
* StructField("a", IntegerType, true) ::
* StructField("b", LongType, false) ::
* StructField("c", BooleanType, false) :: Nil)
*
* // Extract a single StructField.
* val singleField = struct("b")
* // singleField: StructField = StructField(b,LongType,false)
*
* // If this struct does not have a field called "d", it throws an exception.
* struct("d")
* // java.lang.IllegalArgumentException: d does not exist.
* // ...
*
* // Extract multiple StructFields. Field names are provided in a set.
* // A StructType object will be returned.
* val twoFields = struct(Set("b", "c"))
* // twoFields: StructType =
* // StructType(StructField(b,LongType,false), StructField(c,BooleanType,false))
*
* // Any names without matching fields will throw an exception.
* // For the case shown below, an exception is thrown due to "d".
* struct(Set("b", "c", "d"))
* // java.lang.IllegalArgumentException: d does not exist.
* // ...
* }}}
*
* A [[org.apache.spark.sql.Row]] object is used as a value of the [[StructType]].
*
* Scala Example:
* {{{
* import org.apache.spark.sql._
* import org.apache.spark.sql.types._
*
* val innerStruct =
* StructType(
* StructField("f1", IntegerType, true) ::
* StructField("f2", LongType, false) ::
* StructField("f3", BooleanType, false) :: Nil)
*
* val struct = StructType(
* StructField("a", innerStruct, true) :: Nil)
*
* // Create a Row with the schema defined by struct
* val row = Row(Row(1, 2, true))
* }}}
*
* @since 1.3.0
*/
@Stable
case class StructType(fields: Array[StructField]) extends DataType with Seq[StructField] {
/** No-arg constructor for kryo. */
def this() = this(Array.empty[StructField])
/** Returns all field names in an array. */
def fieldNames: Array[String] = fields.map(_.name)
/**
* Returns all field names in an array. This is an alias of `fieldNames`.
*
* @since 2.4.0
*/
def names: Array[String] = fieldNames
private lazy val fieldNamesSet: Set[String] = fieldNames.toSet
private lazy val nameToField: Map[String, StructField] = fields.map(f => f.name -> f).toMap
private lazy val nameToIndex: Map[String, Int] = fieldNames.zipWithIndex.toMap
override def equals(that: Any): Boolean = {
that match {
case StructType(otherFields) =>
java.util.Arrays.equals(
fields.asInstanceOf[Array[AnyRef]], otherFields.asInstanceOf[Array[AnyRef]])
case _ => false
}
}
private lazy val _hashCode: Int = java.util.Arrays.hashCode(fields.asInstanceOf[Array[AnyRef]])
override def hashCode(): Int = _hashCode
/**
* Creates a new [[StructType]] by adding a new field.
* {{{
* val struct = (new StructType)
* .add(StructField("a", IntegerType, true))
* .add(StructField("b", LongType, false))
* .add(StructField("c", StringType, true))
*}}}
*/
def add(field: StructField): StructType = {
StructType(fields :+ field)
}
/**
* Creates a new [[StructType]] by adding a new nullable field with no metadata.
*
* val struct = (new StructType)
* .add("a", IntegerType)
* .add("b", LongType)
* .add("c", StringType)
*/
def add(name: String, dataType: DataType): StructType = {
StructType(fields :+ StructField(name, dataType, nullable = true, Metadata.empty))
}
/**
* Creates a new [[StructType]] by adding a new field with no metadata.
*
* val struct = (new StructType)
* .add("a", IntegerType, true)
* .add("b", LongType, false)
* .add("c", StringType, true)
*/
def add(name: String, dataType: DataType, nullable: Boolean): StructType = {
StructType(fields :+ StructField(name, dataType, nullable, Metadata.empty))
}
/**
* Creates a new [[StructType]] by adding a new field and specifying metadata.
* {{{
* val struct = (new StructType)
* .add("a", IntegerType, true, Metadata.empty)
* .add("b", LongType, false, Metadata.empty)
* .add("c", StringType, true, Metadata.empty)
* }}}
*/
def add(
name: String,
dataType: DataType,
nullable: Boolean,
metadata: Metadata): StructType = {
StructType(fields :+ StructField(name, dataType, nullable, metadata))
}
/**
* Creates a new [[StructType]] by adding a new field and specifying metadata.
* {{{
* val struct = (new StructType)
* .add("a", IntegerType, true, "comment1")
* .add("b", LongType, false, "comment2")
* .add("c", StringType, true, "comment3")
* }}}
*/
def add(
name: String,
dataType: DataType,
nullable: Boolean,
comment: String): StructType = {
StructType(fields :+ StructField(name, dataType, nullable).withComment(comment))
}
/**
* Creates a new [[StructType]] by adding a new nullable field with no metadata where the
* dataType is specified as a String.
*
* {{{
* val struct = (new StructType)
* .add("a", "int")
* .add("b", "long")
* .add("c", "string")
* }}}
*/
def add(name: String, dataType: String): StructType = {
add(name, CatalystSqlParser.parseDataType(dataType), nullable = true, Metadata.empty)
}
/**
* Creates a new [[StructType]] by adding a new field with no metadata where the
* dataType is specified as a String.
*
* {{{
* val struct = (new StructType)
* .add("a", "int", true)
* .add("b", "long", false)
* .add("c", "string", true)
* }}}
*/
def add(name: String, dataType: String, nullable: Boolean): StructType = {
add(name, CatalystSqlParser.parseDataType(dataType), nullable, Metadata.empty)
}
/**
* Creates a new [[StructType]] by adding a new field and specifying metadata where the
* dataType is specified as a String.
* {{{
* val struct = (new StructType)
* .add("a", "int", true, Metadata.empty)
* .add("b", "long", false, Metadata.empty)
* .add("c", "string", true, Metadata.empty)
* }}}
*/
def add(
name: String,
dataType: String,
nullable: Boolean,
metadata: Metadata): StructType = {
add(name, CatalystSqlParser.parseDataType(dataType), nullable, metadata)
}
/**
* Creates a new [[StructType]] by adding a new field and specifying metadata where the
* dataType is specified as a String.
* {{{
* val struct = (new StructType)
* .add("a", "int", true, "comment1")
* .add("b", "long", false, "comment2")
* .add("c", "string", true, "comment3")
* }}}
*/
def add(
name: String,
dataType: String,
nullable: Boolean,
comment: String): StructType = {
add(name, CatalystSqlParser.parseDataType(dataType), nullable, comment)
}
/**
* Extracts the [[StructField]] with the given name.
*
* @throws IllegalArgumentException if a field with the given name does not exist
*/
def apply(name: String): StructField = {
nameToField.getOrElse(name,
throw new IllegalArgumentException(
s"$name does not exist. Available: ${fieldNames.mkString(", ")}"))
}
/**
* Returns a [[StructType]] containing [[StructField]]s of the given names, preserving the
* original order of fields.
*
* @throws IllegalArgumentException if at least one given field name does not exist
*/
def apply(names: Set[String]): StructType = {
val nonExistFields = names -- fieldNamesSet
if (nonExistFields.nonEmpty) {
throw new IllegalArgumentException(
s"${nonExistFields.mkString(", ")} do(es) not exist. " +
s"Available: ${fieldNames.mkString(", ")}")
}
// Preserve the original order of fields.
StructType(fields.filter(f => names.contains(f.name)))
}
/**
* Returns the index of a given field.
*
* @throws IllegalArgumentException if a field with the given name does not exist
*/
def fieldIndex(name: String): Int = {
nameToIndex.getOrElse(name,
throw new IllegalArgumentException(
s"$name does not exist. Available: ${fieldNames.mkString(", ")}"))
}
private[sql] def getFieldIndex(name: String): Option[Int] = {
nameToIndex.get(name)
}
/**
* Returns the normalized path to a field and the field in this struct and its child structs.
*
* If includeCollections is true, this will return fields that are nested in maps and arrays.
*/
private[sql] def findNestedField(
fieldNames: Seq[String],
includeCollections: Boolean = false,
resolver: Resolver = _ == _): Option[(Seq[String], StructField)] = {
def prettyFieldName(nameParts: Seq[String]): String = {
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
nameParts.quoted
}
def findField(
struct: StructType,
searchPath: Seq[String],
normalizedPath: Seq[String]): Option[(Seq[String], StructField)] = {
searchPath.headOption.flatMap { searchName =>
val found = struct.fields.filter(f => resolver(searchName, f.name))
if (found.length > 1) {
val names = found.map(f => prettyFieldName(normalizedPath :+ f.name))
.mkString("[", ", ", " ]")
throw new AnalysisException(
s"Ambiguous field name: ${prettyFieldName(normalizedPath :+ searchName)}. Found " +
s"multiple columns that can match: $names")
} else if (found.isEmpty) {
None
} else {
val field = found.head
(searchPath.tail, field.dataType, includeCollections) match {
case (Seq(), _, _) =>
Some(normalizedPath -> field)
case (names, struct: StructType, _) =>
findField(struct, names, normalizedPath :+ field.name)
case (_, _, false) =>
None // types nested in maps and arrays are not used
case (Seq("key"), MapType(keyType, _, _), true) =>
// return the key type as a struct field to include nullability
Some((normalizedPath :+ field.name) -> StructField("key", keyType, nullable = false))
case (Seq("key", names @ _*), MapType(struct: StructType, _, _), true) =>
findField(struct, names, normalizedPath ++ Seq(field.name, "key"))
case (Seq("value"), MapType(_, valueType, isNullable), true) =>
// return the value type as a struct field to include nullability
Some((normalizedPath :+ field.name) ->
StructField("value", valueType, nullable = isNullable))
case (Seq("value", names @ _*), MapType(_, struct: StructType, _), true) =>
findField(struct, names, normalizedPath ++ Seq(field.name, "value"))
case (Seq("element"), ArrayType(elementType, isNullable), true) =>
// return the element type as a struct field to include nullability
Some((normalizedPath :+ field.name) ->
StructField("element", elementType, nullable = isNullable))
case (Seq("element", names @ _*), ArrayType(struct: StructType, _), true) =>
findField(struct, names, normalizedPath ++ Seq(field.name, "element"))
case _ =>
None
}
}
}
}
findField(this, fieldNames, Nil)
}
protected[sql] def toAttributes: Seq[AttributeReference] =
map(f => AttributeReference(f.name, f.dataType, f.nullable, f.metadata)())
def treeString: String = treeString(Int.MaxValue)
def treeString(maxDepth: Int): String = {
val stringConcat = new StringUtils.StringConcat()
stringConcat.append("root\\n")
val prefix = " |"
val depth = if (maxDepth > 0) maxDepth else Int.MaxValue
fields.foreach(field => field.buildFormattedString(prefix, stringConcat, depth))
stringConcat.toString()
}
// scalastyle:off println
def printTreeString(): Unit = println(treeString)
// scalastyle:on println
private[sql] def buildFormattedString(
prefix: String,
stringConcat: StringConcat,
maxDepth: Int): Unit = {
fields.foreach(field => field.buildFormattedString(prefix, stringConcat, maxDepth))
}
override private[sql] def jsonValue =
("type" -> typeName) ~
("fields" -> map(_.jsonValue))
override def apply(fieldIndex: Int): StructField = fields(fieldIndex)
override def length: Int = fields.length
override def iterator: Iterator[StructField] = fields.iterator
/**
* The default size of a value of the StructType is the total default sizes of all field types.
*/
override def defaultSize: Int = fields.map(_.dataType.defaultSize).sum
override def simpleString: String = {
val fieldTypes = fields.view.map(field => s"${field.name}:${field.dataType.simpleString}")
truncatedString(
fieldTypes,
"struct<", ",", ">",
SQLConf.get.maxToStringFields)
}
override def catalogString: String = {
// in catalogString, we should not truncate
val stringConcat = new StringUtils.StringConcat()
val len = fields.length
stringConcat.append("struct<")
var i = 0
while (i < len) {
stringConcat.append(s"${fields(i).name}:${fields(i).dataType.catalogString}")
i += 1
if (i < len) stringConcat.append(",")
}
stringConcat.append(">")
stringConcat.toString
}
override def sql: String = {
val fieldTypes = fields.map(f => s"${quoteIdentifier(f.name)}: ${f.dataType.sql}")
s"STRUCT<${fieldTypes.mkString(", ")}>"
}
/**
* Returns a string containing a schema in DDL format. For example, the following value:
* `StructType(Seq(StructField("eventId", IntegerType), StructField("s", StringType)))`
* will be converted to `eventId` INT, `s` STRING.
* The returned DDL schema can be used in a table creation.
*
* @since 2.4.0
*/
def toDDL: String = fields.map(_.toDDL).mkString(",")
private[sql] override def simpleString(maxNumberFields: Int): String = {
val builder = new StringBuilder
val fieldTypes = fields.take(maxNumberFields).map {
f => s"${f.name}: ${f.dataType.simpleString(maxNumberFields)}"
}
builder.append("struct<")
builder.append(fieldTypes.mkString(", "))
if (fields.length > 2) {
if (fields.length - fieldTypes.length == 1) {
builder.append(" ... 1 more field")
} else {
builder.append(" ... " + (fields.length - 2) + " more fields")
}
}
builder.append(">").toString()
}
/**
* Merges with another schema (`StructType`). For a struct field A from `this` and a struct field
* B from `that`,
*
* 1. If A and B have the same name and data type, they are merged to a field C with the same name
* and data type. C is nullable if and only if either A or B is nullable.
* 2. If A doesn't exist in `that`, it's included in the result schema.
* 3. If B doesn't exist in `this`, it's also included in the result schema.
* 4. Otherwise, `this` and `that` are considered as conflicting schemas and an exception would be
* thrown.
*/
private[sql] def merge(that: StructType): StructType =
StructType.merge(this, that).asInstanceOf[StructType]
override private[spark] def asNullable: StructType = {
val newFields = fields.map {
case StructField(name, dataType, nullable, metadata) =>
StructField(name, dataType.asNullable, nullable = true, metadata)
}
StructType(newFields)
}
override private[spark] def existsRecursively(f: (DataType) => Boolean): Boolean = {
f(this) || fields.exists(field => field.dataType.existsRecursively(f))
}
@transient
private[sql] lazy val interpretedOrdering =
InterpretedOrdering.forSchema(this.fields.map(_.dataType))
}
/**
* @since 1.3.0
*/
@Stable
object StructType extends AbstractDataType {
override private[sql] def defaultConcreteType: DataType = new StructType
override private[sql] def acceptsType(other: DataType): Boolean = {
other.isInstanceOf[StructType]
}
override private[sql] def simpleString: String = "struct"
private[sql] def fromString(raw: String): StructType = {
Try(DataType.fromJson(raw)).getOrElse(LegacyTypeStringParser.parseString(raw)) match {
case t: StructType => t
case _ => throw new RuntimeException(s"Failed parsing ${StructType.simpleString}: $raw")
}
}
/**
* Creates StructType for a given DDL-formatted string, which is a comma separated list of field
* definitions, e.g., a INT, b STRING.
*
* @since 2.2.0
*/
def fromDDL(ddl: String): StructType = CatalystSqlParser.parseTableSchema(ddl)
def apply(fields: Seq[StructField]): StructType = StructType(fields.toArray)
def apply(fields: java.util.List[StructField]): StructType = {
import scala.collection.JavaConverters._
StructType(fields.asScala)
}
private[sql] def fromAttributes(attributes: Seq[Attribute]): StructType =
StructType(attributes.map(a => StructField(a.name, a.dataType, a.nullable, a.metadata)))
private[sql] def removeMetadata(key: String, dt: DataType): DataType =
dt match {
case StructType(fields) =>
val newFields = fields.map { f =>
val mb = new MetadataBuilder()
f.copy(dataType = removeMetadata(key, f.dataType),
metadata = mb.withMetadata(f.metadata).remove(key).build())
}
StructType(newFields)
case _ => dt
}
private[sql] def merge(left: DataType, right: DataType): DataType =
(left, right) match {
case (ArrayType(leftElementType, leftContainsNull),
ArrayType(rightElementType, rightContainsNull)) =>
ArrayType(
merge(leftElementType, rightElementType),
leftContainsNull || rightContainsNull)
case (MapType(leftKeyType, leftValueType, leftContainsNull),
MapType(rightKeyType, rightValueType, rightContainsNull)) =>
MapType(
merge(leftKeyType, rightKeyType),
merge(leftValueType, rightValueType),
leftContainsNull || rightContainsNull)
case (StructType(leftFields), StructType(rightFields)) =>
val newFields = mutable.ArrayBuffer.empty[StructField]
val rightMapped = fieldsMap(rightFields)
leftFields.foreach {
case leftField @ StructField(leftName, leftType, leftNullable, _) =>
rightMapped.get(leftName)
.map { case rightField @ StructField(rightName, rightType, rightNullable, _) =>
try {
leftField.copy(
dataType = merge(leftType, rightType),
nullable = leftNullable || rightNullable)
} catch {
case NonFatal(e) =>
throw new SparkException(s"Failed to merge fields '$leftName' and " +
s"'$rightName'. " + e.getMessage)
}
}
.orElse {
Some(leftField)
}
.foreach(newFields += _)
}
val leftMapped = fieldsMap(leftFields)
rightFields
.filterNot(f => leftMapped.get(f.name).nonEmpty)
.foreach { f =>
newFields += f
}
StructType(newFields)
case (DecimalType.Fixed(leftPrecision, leftScale),
DecimalType.Fixed(rightPrecision, rightScale)) =>
if ((leftPrecision == rightPrecision) && (leftScale == rightScale)) {
DecimalType(leftPrecision, leftScale)
} else if ((leftPrecision != rightPrecision) && (leftScale != rightScale)) {
throw new SparkException("Failed to merge decimal types with incompatible " +
s"precision $leftPrecision and $rightPrecision & scale $leftScale and $rightScale")
} else if (leftPrecision != rightPrecision) {
throw new SparkException("Failed to merge decimal types with incompatible " +
s"precision $leftPrecision and $rightPrecision")
} else {
throw new SparkException("Failed to merge decimal types with incompatible " +
s"scala $leftScale and $rightScale")
}
case (leftUdt: UserDefinedType[_], rightUdt: UserDefinedType[_])
if leftUdt.userClass == rightUdt.userClass => leftUdt
case (leftType, rightType) if leftType == rightType =>
leftType
case _ =>
throw new SparkException(s"Failed to merge incompatible data types ${left.catalogString}" +
s" and ${right.catalogString}")
}
private[sql] def fieldsMap(fields: Array[StructField]): Map[String, StructField] = {
// Mimics the optimization of breakOut, not present in Scala 2.13, while working in 2.12
val map = mutable.Map[String, StructField]()
map.sizeHint(fields.length)
fields.foreach(s => map.put(s.name, s))
map
}
}
| goldmedal/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala | Scala | apache-2.0 | 22,797 |
package org.http4s
package object scalaxml extends ElemInstances
| hvesalai/http4s | scala-xml/src/main/scala/scalaxml/package.scala | Scala | apache-2.0 | 66 |
package org.bitcoins.core.script
import org.bitcoins.core.script.constant.ScriptConstant
import org.bitcoins.core.util.BitcoinSUtil
import org.scalatest.{MustMatchers, FlatSpec}
/**
* Created by chris on 4/1/16.
*/
class ScriptConstantFactoryTest extends FlatSpec with MustMatchers {
"ScriptConstantFactory" must "create a constant from bytes" in {
val bytes = BitcoinSUtil.decodeHex("abc123")
ScriptConstant(bytes).bytes must be (bytes)
}
}
| SuredBits/bitcoin-s-sidechains | src/test/scala/org/bitcoins/core/script/ScriptConstantFactoryTest.scala | Scala | mit | 460 |
/* Copyright 2009-2021 EPFL, Lausanne */
package stainless
package verification
import trees._
import TypeCheckerUtils._
object TypeCheckerContext {
val letWitness = "__letWitness"
case class TypingContext(
depth: Int,
visibleFunctions: Set[Identifier],
visibleADTs: Set[Identifier],
typeVariables: Set[TypeParameter],
termVariables: Seq[Variable],
currentFid: Option[Identifier],
currentADT: Option[Identifier],
currentMeasure: Option[Expr],
measureType: Option[Type],
vcKind: VCKind,
checkSAT: Boolean,
emitVCs: Boolean,
) extends inox.utils.Positioned {
def inc() = {
copy(depth = depth + 1).setPos(this)
}
def checkFreshTermVariable(vd: ValDef)(using opts: PrinterOptions, ctx: inox.Context) = {
if (termVariables.contains(vd.toVariable))
ctx.reporter.internalError(s"Typing context already contains variable ${vd.id.asString}")
}
def checkFreshTermVariables(vds: Seq[ValDef])(using opts: PrinterOptions, ctx: inox.Context) = {
vds.foreach(checkFreshTermVariable)
}
def checkFreshTypeVariable(tp: TypeParameter)(using opts: PrinterOptions, ctx: inox.Context) = {
if (termVariables.contains(tp))
ctx.reporter.internalError(s"Typing context already contains type variable ${tp.asString}")
}
def checkFreshTypeVariables(tps: Iterable[TypeParameter])(using opts: PrinterOptions, ctx: inox.Context) = {
tps.foreach(checkFreshTypeVariable)
}
def bindWithValue(vd: ValDef, e: Expr)(using opts: PrinterOptions, ctx: inox.Context): TypingContext = {
checkFreshTermVariable(vd)
copy(termVariables = termVariables :+ vd.toVariable :+ Variable.fresh(letWitness, LetEquality(vd.toVariable,e))).setPos(this)
}
def bindWithValues(vds: Seq[ValDef], es: Seq[Expr])(using opts: PrinterOptions, ctx: inox.Context) = {
checkFreshTermVariables(vds)
vds.zip(es).foldLeft(this){
case(tcAcc, (vd,e)) => tcAcc.bindWithValue(vd, e)
}
}
def freshBindWithValue(vd: ValDef, e: Expr)(using opts: PrinterOptions, ctx: inox.Context): (TypingContext, Identifier) = {
val freshVd = vd.freshen
(
copy(termVariables = termVariables :+ freshVd.toVariable :+ Variable.fresh(letWitness, LetEquality(freshVd.toVariable,e))).setPos(this),
freshVd.id
)
}
def freshBindWithValues(vds: Seq[ValDef], es: Seq[Expr])(using opts: PrinterOptions, ctx: inox.Context): (TypingContext, Substituter) = {
if (vds.size != es.size)
ctx.reporter.internalError("Function `freshBindWithValues` expects sequences with the same size")
vds.zip(es).foldLeft((this, new Substituter(Map()))) {
case ((tcAcc, freshener), (vd, e)) =>
val (newTc, newId) = tcAcc.freshBindWithValue(freshener.transform(vd), freshener.transform(e))
if (freshener.contains(vd.id)) {
ctx.reporter.internalError(s"Substitution should not contain ${vd.id.asString}")
}
(newTc, freshener.enrich(vd.id, newId))
}
}
def withTruth(cond: Expr) = {
copy(termVariables = termVariables :+ Variable.fresh("__truthWitness", Truth(cond))).setPos(this)
}
def bind(vd: ValDef)(using opts: PrinterOptions, ctx: inox.Context): TypingContext = {
checkFreshTermVariable(vd)
copy(termVariables = termVariables :+ vd.toVariable).setPos(this)
}
def bind(vds: Seq[ValDef])(using opts: PrinterOptions, ctx: inox.Context): TypingContext = {
checkFreshTermVariables(vds)
copy(termVariables = termVariables ++ vds.map(_.toVariable)).setPos(this)
}
def freshBind(vd: ValDef)(using opts: PrinterOptions, ctx: inox.Context): (TypingContext, Identifier) = {
val freshVd = vd.freshen
(
copy(termVariables = termVariables :+ freshVd.toVariable).setPos(this),
freshVd.id
)
}
def withTypeVariables(vars: Set[TypeParameter])(using opts: PrinterOptions, ctx: inox.Context): TypingContext = {
checkFreshTypeVariables(vars)
copy(typeVariables = typeVariables ++ vars).setPos(this)
}
def withIdentifiers(ids: Set[Identifier])(using s: Symbols) = {
val (fids, sorts) = ids.partition(id => s.lookupFunction(id).isDefined)
copy(
visibleFunctions = visibleFunctions ++ fids,
visibleADTs = visibleADTs ++ sorts
).setPos(this)
}
def inFunction(id: Identifier) = {
copy(currentFid = Some(id)).setPos(this)
}
def inADT(id: Identifier) = {
copy(currentADT = Some(id)).setPos(this)
}
def withMeasureType(t: Option[Type]) = {
copy(measureType = t).setPos(this)
}
def withMeasure(m: Option[Expr]) = {
copy(currentMeasure = m).setPos(this)
}
def withVCKind(kind: VCKind) = {
copy(vcKind = kind).setPos(this)
}
def withCheckSAT(checkSAT: Boolean) = {
copy(checkSAT = checkSAT).setPos(this)
}
def withEmitVCs(emitVCs: Boolean) = {
copy(emitVCs = emitVCs).setPos(this)
}
def indent: String = " " * depth
def asString(indent: String = "")(using PrinterOptions) = {
(if (indent != "") s"${indent}Depth: $depth\\n" else "") +
s"""|${indent}Kind: ${vcKind}
|${indent}Check SAT: ${checkSAT}
|${indent}Emit VCs: ${emitVCs}
|${indent}Functions: ${visibleFunctions.map(_.asString).mkString(", ")}
|${indent}ADTs: ${visibleADTs.map(_.asString).mkString(", ")}
|${indent}Type Variables: ${typeVariables.map(_.asString).mkString(", ")}
|${indent}Term Variables:\\n${indent}${termVariables.map(v => " " + pp(v)).mkString("\\n" + indent)}
|""".stripMargin
}
}
object TypingContext {
def empty = TypingContext(
depth = 0,
visibleFunctions = Set(),
visibleADTs = Set(),
typeVariables = Set(),
termVariables = Seq(),
currentFid = None,
currentADT = None,
currentMeasure = None,
measureType = None,
vcKind = VCKind.CheckType,
checkSAT = false,
emitVCs = true,
)
}
}
| epfl-lara/stainless | core/src/main/scala/stainless/verification/TypeCheckerContext.scala | Scala | apache-2.0 | 6,102 |
package examples.demo
import java.awt.Dimension
import examples.demo.ui.{Circle, Rectangle, ShapesPanel}
import rescala._
import scala.swing.{MainFrame, SimpleSwingApplication, UIElement}
/**
* This is a static display of two circles and a rectangle.
* It demonstrates, how to display Shapes using our custom
* ShapesPanel. The only REScala Feature used here are Vars,
* which we explain in the next step.
*/
object ASwingFrame extends SimpleSwingApplication {
override lazy val top = {
val panel = new ShapesPanel(Var(List(
new Circle(centerX = Var(75), centerY = Var(30), diameter = Var(25)),
new Circle(Var(100), Var(100), Var(50)),
new Rectangle(centerX = Var(-50), centerY = Var(-100), hitboxWidth = Var(10), hitboxHeight = Var(100))
)))
panel.preferredSize = new Dimension(400, 300)
new MainFrame {
title = "REScala Demo"
contents = panel
setLocationRelativeTo(new UIElement {override def peer = null})
}
}
override def main(args: Array[String]): Unit = {
super.main(args)
while(!top.visible) Thread.sleep(5)
while(top.visible) {
Thread.sleep(1)
/* TODO main loop */
}
}
}
| volkc/REScala | Examples/examples/src/main/scala/examples/demo/ASwingFrame.scala | Scala | apache-2.0 | 1,187 |
package com.arcusys.valamis.lesson.scorm.service
import java.io._
import java.util.zip.ZipFile
import com.arcusys.valamis.file.storage.FileStorage
import com.arcusys.valamis.lesson.scorm.model.manifest.Activity
import com.arcusys.valamis.lesson.scorm.service.parser.ManifestParser
import com.arcusys.valamis.lesson.scorm.storage.{ ActivityStorage, ResourcesStorage, ScormPackagesStorage }
import com.arcusys.valamis.lesson.storage.PackageScopeRuleStorage
import com.arcusys.valamis.model.ScopeType
import com.arcusys.valamis.util.{ FileSystemUtil, StreamUtil, TreeNode, ZipUtil }
import com.escalatesoft.subcut.inject.{ BindingModule, Injectable }
import scala.xml.XML
object PackageProcessor {
def isValidPackage(packageFile: File) = {
ZipUtil.zipContains("imsmanifest.xml", packageFile)
}
}
class PackageProcessor(implicit val bindingModule: BindingModule) extends Injectable {
val scormRepository = inject[ScormPackagesStorage]
val resourceRepository = inject[ResourcesStorage]
val activityRepository = inject[ActivityStorage]
val fileStorage = inject[FileStorage]
val packageScopeRuleRepository = inject[PackageScopeRuleStorage]
def processPackageAndGetId(packageTitle: String, packageSummary: String, packageFile: File, courseId: Option[Int], logo: Option[String] = None): Long = {
val tempDirectory = FileSystemUtil.getTempDirectory("scormupload")
ZipUtil.unzipFile("imsmanifest.xml", tempDirectory, packageFile)
val root = XML.loadFile(new File(tempDirectory, "imsmanifest.xml"))
val doc = new ManifestParser(root, packageTitle, packageSummary).parse
val packageId = scormRepository.createAndGetID(doc.manifest.copy(logo = logo), courseId)
packageScopeRuleRepository.create(packageId, ScopeType.Instance, None, true, false)
packageScopeRuleRepository.create(packageId, ScopeType.Site, courseId.map(_.toString), true, false)
for (organizationNode <- doc.organizations) {
activityRepository.create(packageId, organizationNode.item)
createActivities(organizationNode.children)
}
for (resource <- doc.resources) resourceRepository.createForPackageAndGetID(packageId, resource)
def createActivities(activities: Seq[TreeNode[Activity]]) {
for (node <- activities) {
activityRepository.create(packageId, node.item)
createActivities(node.children)
}
}
val fileStorageDirectory = "data/" + packageId + "/"
unzipToFileStorage(fileStorageDirectory, packageFile)
FileSystemUtil.deleteFile(packageFile)
FileSystemUtil.deleteFile(tempDirectory)
packageId
}
private def unzipToFileStorage(directory: String, zipFile: File) {
val zip = new ZipFile(zipFile)
val entries = zip.entries
while (entries.hasMoreElements) {
val entry = entries.nextElement
if (entry.isDirectory) {
fileStorage.store(directory + entry.getName)
} else {
val stream = zip.getInputStream(entry)
val content = StreamUtil.toByteArray(stream)
stream.close()
fileStorage.store(directory + entry.getName, content)
}
}
zip.close()
}
}
| ViLPy/Valamis | valamis-scorm-lesson/src/main/scala/com/arcusys/valamis/lesson/scorm/service/PackageProcessor.scala | Scala | lgpl-3.0 | 3,125 |
package coursier.cli.publish.params
import cats.data.{Validated, ValidatedNel}
import coursier.cli.publish.options.SignatureOptions
final case class SignatureParams(
gpg: Boolean,
gpgKeyOpt: Option[String]
)
object SignatureParams {
def apply(options: SignatureOptions): ValidatedNel[String, SignatureParams] =
// check here that the passed gpg key exists?
Validated.validNel(
SignatureParams(
// TODO Adjust default value if --sonatype is passed
options.gpg.getOrElse(options.gpgKey.nonEmpty),
options.gpgKey
)
)
}
| coursier/coursier | modules/cli/src/main/scala/coursier/cli/publish/params/SignatureParams.scala | Scala | apache-2.0 | 573 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.scala.stream.table
import org.apache.flink.api.scala._
import org.apache.flink.table.api.java.utils.UserDefinedAggFunctions.WeightedAvgWithRetract
import org.apache.flink.table.api.{Table, ValidationException}
import org.apache.flink.table.api.scala._
import org.apache.flink.table.expressions.utils.Func1
import org.apache.flink.table.utils.TableTestUtil._
import org.apache.flink.table.utils.{StreamTableTestUtil, TableTestBase}
import org.junit.Test
class OverWindowTest extends TableTestBase {
private val streamUtil: StreamTableTestUtil = streamTestUtil()
val table: Table = streamUtil.addTable[(Int, String, Long)]("MyTable",
'a, 'b, 'c, 'proctime.proctime, 'rowtime.rowtime)
@Test(expected = classOf[ValidationException])
def testInvalidWindowAlias(): Unit = {
val result = table
.window(Over partitionBy 'c orderBy 'rowtime preceding 2.rows as 'w)
.select('c, 'b.count over 'x)
streamUtil.tEnv.optimize(result.getRelNode, updatesAsRetraction = true)
}
@Test(expected = classOf[ValidationException])
def testOrderBy(): Unit = {
val result = table
.window(Over partitionBy 'c orderBy 'abc preceding 2.rows as 'w)
.select('c, 'b.count over 'w)
streamUtil.tEnv.optimize(result.getRelNode, updatesAsRetraction = true)
}
@Test(expected = classOf[ValidationException])
def testPrecedingAndFollowingUsingIsLiteral(): Unit = {
val result = table
.window(Over partitionBy 'c orderBy 'rowtime preceding 2 following "xx" as 'w)
.select('c, 'b.count over 'w)
streamUtil.tEnv.optimize(result.getRelNode, updatesAsRetraction = true)
}
@Test(expected = classOf[ValidationException])
def testPrecedingAndFollowingUsingSameType(): Unit = {
val result = table
.window(Over partitionBy 'c orderBy 'rowtime preceding 2.rows following CURRENT_RANGE as 'w)
.select('c, 'b.count over 'w)
streamUtil.tEnv.optimize(result.getRelNode, updatesAsRetraction = true)
}
@Test(expected = classOf[ValidationException])
def testPartitionByWithUnresolved(): Unit = {
val result = table
.window(Over partitionBy 'a + 'b orderBy 'rowtime preceding 2.rows as 'w)
.select('c, 'b.count over 'w)
streamUtil.tEnv.optimize(result.getRelNode, updatesAsRetraction = true)
}
@Test(expected = classOf[ValidationException])
def testPartitionByWithNotKeyType(): Unit = {
val table2 = streamUtil.addTable[(Int, String, Either[Long, String])]("MyTable2", 'a, 'b, 'c)
val result = table2
.window(Over partitionBy 'c orderBy 'rowtime preceding 2.rows as 'w)
.select('c, 'b.count over 'w)
streamUtil.tEnv.optimize(result.getRelNode, updatesAsRetraction = true)
}
@Test(expected = classOf[ValidationException])
def testPrecedingValue(): Unit = {
val result = table
.window(Over orderBy 'rowtime preceding -1.rows as 'w)
.select('c, 'b.count over 'w)
streamUtil.tEnv.optimize(result.getRelNode, updatesAsRetraction = true)
}
@Test(expected = classOf[ValidationException])
def testFollowingValue(): Unit = {
val result = table
.window(Over orderBy 'rowtime preceding 1.rows following -2.rows as 'w)
.select('c, 'b.count over 'w)
streamUtil.tEnv.optimize(result.getRelNode, updatesAsRetraction = true)
}
@Test(expected = classOf[ValidationException])
def testUdAggWithInvalidArgs(): Unit = {
val weightedAvg = new WeightedAvgWithRetract
val result = table
.window(Over orderBy 'rowtime preceding 1.minutes as 'w)
.select('c, weightedAvg('b, 'a) over 'w)
streamUtil.tEnv.optimize(result.getRelNode, updatesAsRetraction = true)
}
@Test
def testAccessesWindowProperties(): Unit = {
thrown.expect(classOf[ValidationException])
thrown.expectMessage("Window start and end properties are not available for Over windows.")
table
.window(Over orderBy 'rowtime preceding 1.minutes as 'w)
.select('c, 'a.count over 'w, 'w.start, 'w.end)
}
@Test
def testScalarFunctionsOnOverWindow() = {
val weightedAvg = new WeightedAvgWithRetract
val plusOne = Func1
val result = table
.window(Over partitionBy 'b orderBy 'proctime preceding UNBOUNDED_ROW as 'w)
.select(
plusOne('a.sum over 'w as 'wsum) as 'd,
('a.count over 'w).exp(),
(weightedAvg('c, 'a) over 'w) + 1,
"AVG:".toExpr + (weightedAvg('c, 'a) over 'w),
array(weightedAvg('c, 'a) over 'w, 'a.count over 'w))
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamOverAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "a", "b", "c", "proctime")
),
term("partitionBy", "b"),
term("orderBy", "proctime"),
term("rows", "BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW"),
term("select", "a", "b", "c", "proctime",
"SUM(a) AS w0$o0",
"COUNT(a) AS w0$o1",
"WeightedAvgWithRetract(c, a) AS w0$o2")
),
term("select",
s"${plusOne.functionIdentifier}(w0$$o0) AS d",
"EXP(CAST(w0$o1)) AS _c1",
"+(w0$o2, 1) AS _c2",
"||('AVG:', CAST(w0$o2)) AS _c3",
"ARRAY(w0$o2, w0$o1) AS _c4")
)
streamUtil.verifyTable(result, expected)
}
@Test
def testProcTimeBoundedPartitionedRowsOver() = {
val weightedAvg = new WeightedAvgWithRetract
val result = table
.window(Over partitionBy 'b orderBy 'proctime preceding 2.rows following CURRENT_ROW as 'w)
.select('c, weightedAvg('c, 'a) over 'w)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamOverAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "a", "b", "c", "proctime")
),
term("partitionBy", "b"),
term("orderBy", "proctime"),
term("rows", "BETWEEN 2 PRECEDING AND CURRENT ROW"),
term("select", "a", "b", "c", "proctime", "WeightedAvgWithRetract(c, a) AS w0$o0")
),
term("select", "c", "w0$o0 AS _c1")
)
streamUtil.verifyTable(result, expected)
}
@Test
def testProcTimeBoundedPartitionedRangeOver() = {
val weightedAvg = new WeightedAvgWithRetract
val result = table
.window(
Over partitionBy 'a orderBy 'proctime preceding 2.hours following CURRENT_RANGE as 'w)
.select('a, weightedAvg('c, 'a) over 'w as 'myAvg)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamOverAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "a", "c", "proctime")
),
term("partitionBy", "a"),
term("orderBy", "proctime"),
term("range", "BETWEEN 7200000 PRECEDING AND CURRENT ROW"),
term(
"select",
"a",
"c",
"proctime",
"WeightedAvgWithRetract(c, a) AS w0$o0"
)
),
term("select", "a", "w0$o0 AS myAvg")
)
streamUtil.verifyTable(result, expected)
}
@Test
def testProcTimeBoundedNonPartitionedRangeOver() = {
val result = table
.window(Over orderBy 'proctime preceding 10.second as 'w)
.select('a, 'c.count over 'w)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamOverAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "a", "c", "proctime")
),
term("orderBy", "proctime"),
term("range", "BETWEEN 10000 PRECEDING AND CURRENT ROW"),
term("select", "a", "c", "proctime", "COUNT(c) AS w0$o0")
),
term("select", "a", "w0$o0 AS _c1")
)
streamUtil.verifyTable(result, expected)
}
@Test
def testProcTimeBoundedNonPartitionedRowsOver() = {
val result = table
.window(Over orderBy 'proctime preceding 2.rows as 'w)
.select('c, 'a.count over 'w)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamOverAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "a", "c", "proctime")
),
term("orderBy", "proctime"),
term("rows", "BETWEEN 2 PRECEDING AND CURRENT ROW"),
term("select", "a", "c", "proctime", "COUNT(a) AS w0$o0")
),
term("select", "c", "w0$o0 AS _c1")
)
streamUtil.verifyTable(result, expected)
}
@Test
def testProcTimeUnboundedPartitionedRangeOver() = {
val weightedAvg = new WeightedAvgWithRetract
val result = table
.window(Over partitionBy 'c orderBy 'proctime preceding UNBOUNDED_RANGE following
CURRENT_RANGE as 'w)
.select('a, 'c, 'a.count over 'w, weightedAvg('c, 'a) over 'w)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamOverAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "a", "c", "proctime")
),
term("partitionBy", "c"),
term("orderBy", "proctime"),
term("range", "BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW"),
term(
"select",
"a",
"c",
"proctime",
"COUNT(a) AS w0$o0",
"WeightedAvgWithRetract(c, a) AS w0$o1"
)
),
term(
"select",
"a",
"c",
"w0$o0 AS _c2",
"w0$o1 AS _c3"
)
)
streamUtil.verifyTable(result, expected)
}
@Test
def testProcTimeUnboundedPartitionedRowsOver() = {
val weightedAvg = new WeightedAvgWithRetract
val result = table
.window(
Over partitionBy 'c orderBy 'proctime preceding UNBOUNDED_ROW following CURRENT_ROW as 'w)
.select('c, 'a.count over 'w, weightedAvg('c, 'a) over 'w)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamOverAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "a", "c", "proctime")
),
term("partitionBy", "c"),
term("orderBy", "proctime"),
term("rows", "BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW"),
term("select", "a", "c", "proctime",
"COUNT(a) AS w0$o0",
"WeightedAvgWithRetract(c, a) AS w0$o1")
),
term("select", "c", "w0$o0 AS _c1", "w0$o1 AS _c2")
)
streamUtil.verifyTable(result, expected)
}
@Test
def testProcTimeUnboundedNonPartitionedRangeOver() = {
val result = table
.window(
Over orderBy 'proctime preceding UNBOUNDED_RANGE as 'w)
.select('a, 'c, 'a.count over 'w, 'a.sum over 'w)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamOverAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "a", "c", "proctime")
),
term("orderBy", "proctime"),
term("range", "BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW"),
term(
"select",
"a",
"c",
"proctime",
"COUNT(a) AS w0$o0",
"SUM(a) AS w0$o1"
)
),
term(
"select",
"a",
"c",
"w0$o0 AS _c2",
"w0$o1 AS _c3"
)
)
streamUtil.verifyTable(result, expected)
}
@Test
def testProcTimeUnboundedNonPartitionedRowsOver() = {
val result = table
.window(Over orderBy 'proctime preceding UNBOUNDED_ROW as 'w)
.select('c, 'a.count over 'w)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamOverAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "a", "c", "proctime")
),
term("orderBy", "proctime"),
term("rows", "BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW"),
term("select", "a", "c", "proctime", "COUNT(a) AS w0$o0")
),
term("select", "c", "w0$o0 AS _c1")
)
streamUtil.verifyTable(result, expected)
}
@Test
def testRowTimeBoundedPartitionedRowsOver() = {
val weightedAvg = new WeightedAvgWithRetract
val result = table
.window(
Over partitionBy 'b orderBy 'rowtime preceding 2.rows following CURRENT_ROW as 'w)
.select('c, 'b.count over 'w, weightedAvg('c, 'a) over 'w as 'wAvg)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamOverAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "a", "b", "c", "rowtime")
),
term("partitionBy", "b"),
term("orderBy", "rowtime"),
term("rows", "BETWEEN 2 PRECEDING AND CURRENT ROW"),
term("select", "a", "b", "c", "rowtime",
"COUNT(b) AS w0$o0",
"WeightedAvgWithRetract(c, a) AS w0$o1")
),
term("select", "c", "w0$o0 AS _c1", "w0$o1 AS wAvg")
)
streamUtil.verifyTable(result, expected)
}
@Test
def testRowTimeBoundedPartitionedRangeOver() = {
val weightedAvg = new WeightedAvgWithRetract
val result = table
.window(
Over partitionBy 'a orderBy 'rowtime preceding 2.hours following CURRENT_RANGE as 'w)
.select('a, 'c.avg over 'w, weightedAvg('c, 'a) over 'w as 'wAvg)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamOverAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "a", "c", "rowtime")
),
term("partitionBy", "a"),
term("orderBy", "rowtime"),
term("range", "BETWEEN 7200000 PRECEDING AND CURRENT ROW"),
term(
"select",
"a",
"c",
"rowtime",
"AVG(c) AS w0$o0",
"WeightedAvgWithRetract(c, a) AS w0$o1"
)
),
term("select", "a", "w0$o0 AS _c1", "w0$o1 AS wAvg")
)
streamUtil.verifyTable(result, expected)
}
@Test
def testRowTimeBoundedNonPartitionedRangeOver() = {
val result = table
.window(Over orderBy 'rowtime preceding 10.second as 'w)
.select('a, 'c.count over 'w)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamOverAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "a", "c", "rowtime")
),
term("orderBy", "rowtime"),
term("range", "BETWEEN 10000 PRECEDING AND CURRENT ROW"),
term("select", "a", "c", "rowtime", "COUNT(c) AS w0$o0")
),
term("select", "a", "w0$o0 AS _c1")
)
streamUtil.verifyTable(result, expected)
}
@Test
def testRowTimeBoundedNonPartitionedRowsOver() = {
val result = table
.window(Over orderBy 'rowtime preceding 2.rows as 'w)
.select('c, 'a.count over 'w)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamOverAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "a", "c", "rowtime")
),
term("orderBy", "rowtime"),
term("rows", "BETWEEN 2 PRECEDING AND CURRENT ROW"),
term("select", "a", "c", "rowtime", "COUNT(a) AS w0$o0")
),
term("select", "c", "w0$o0 AS _c1")
)
streamUtil.verifyTable(result, expected)
}
@Test
def testRowTimeUnboundedPartitionedRangeOver() = {
val weightedAvg = new WeightedAvgWithRetract
val result = table
.window(Over partitionBy 'c orderBy 'rowtime preceding UNBOUNDED_RANGE following
CURRENT_RANGE as 'w)
.select('a, 'c, 'a.count over 'w, weightedAvg('c, 'a) over 'w as 'wAvg)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamOverAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "a", "c", "rowtime")
),
term("partitionBy", "c"),
term("orderBy", "rowtime"),
term("range", "BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW"),
term(
"select",
"a",
"c",
"rowtime",
"COUNT(a) AS w0$o0",
"WeightedAvgWithRetract(c, a) AS w0$o1"
)
),
term(
"select",
"a",
"c",
"w0$o0 AS _c2",
"w0$o1 AS wAvg"
)
)
streamUtil.verifyTable(result, expected)
}
@Test
def testRowTimeUnboundedPartitionedRowsOver() = {
val weightedAvg = new WeightedAvgWithRetract
val result = table
.window(Over partitionBy 'c orderBy 'rowtime preceding UNBOUNDED_ROW following
CURRENT_ROW as 'w)
.select('c, 'a.count over 'w, weightedAvg('c, 'a) over 'w as 'wAvg)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamOverAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "a", "c", "rowtime")
),
term("partitionBy", "c"),
term("orderBy", "rowtime"),
term("rows", "BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW"),
term("select", "a", "c", "rowtime",
"COUNT(a) AS w0$o0",
"WeightedAvgWithRetract(c, a) AS w0$o1")
),
term("select", "c", "w0$o0 AS _c1", "w0$o1 AS wAvg")
)
streamUtil.verifyTable(result, expected)
}
@Test
def testRowTimeUnboundedNonPartitionedRangeOver() = {
val result = table
.window(
Over orderBy 'rowtime preceding UNBOUNDED_RANGE as 'w)
.select('a, 'c, 'a.count over 'w, 'a.sum over 'w)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamOverAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "a", "c", "rowtime")
),
term("orderBy", "rowtime"),
term("range", "BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW"),
term(
"select",
"a",
"c",
"rowtime",
"COUNT(a) AS w0$o0",
"SUM(a) AS w0$o1"
)
),
term(
"select",
"a",
"c",
"w0$o0 AS _c2",
"w0$o1 AS _c3"
)
)
streamUtil.verifyTable(result, expected)
}
@Test
def testRowTimeUnboundedNonPartitionedRowsOver() = {
val result = table
.window(Over orderBy 'rowtime preceding UNBOUNDED_ROW as 'w)
.select('c, 'a.count over 'w)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamOverAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "a", "c", "rowtime")
),
term("orderBy", "rowtime"),
term("rows", "BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW"),
term("select", "a", "c", "rowtime", "COUNT(a) AS w0$o0")
),
term("select", "c", "w0$o0 AS _c1")
)
streamUtil.verifyTable(result, expected)
}
}
object OverWindowTest{
case class Pojo(id: Long, name: String)
}
| hongyuhong/flink | flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/scala/stream/table/OverWindowTest.scala | Scala | apache-2.0 | 20,760 |
package org.embulk.input.dynamodb.item
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import java.util.{Optional, List => JList, Map => JMap}
import com.amazonaws.services.dynamodbv2.model.AttributeValue
import org.embulk.config.{Config, ConfigDefault, Task => EmbulkTask}
import scala.jdk.CollectionConverters._
import scala.util.chaining._
/**
* TODO: I want to bind directly `org.embulk.config.Config`` to `com.amazonaws.services.dynamodbv2.model.AttributeValue`.
* Should I implement `com.amazonaws.transform.JsonUnmarshallerContext`?
**/
object DynamodbAttributeValue {
trait Task extends EmbulkTask {
@Config("S")
@ConfigDefault("null")
def getS: Optional[String]
@Config("N")
@ConfigDefault("null")
def getN: Optional[String]
@Config("B")
@ConfigDefault("null")
def getB: Optional[String]
@Config("SS")
@ConfigDefault("null")
def getSS: Optional[JList[String]]
@Config("NS")
@ConfigDefault("null")
def getNS: Optional[JList[String]]
@Config("BS")
@ConfigDefault("null")
def getBS: Optional[JList[String]]
@Config("M")
@ConfigDefault("null")
def getM: Optional[JMap[String, DynamodbAttributeValue.Task]]
@Config("L")
@ConfigDefault("null")
def getL: Optional[JList[DynamodbAttributeValue.Task]]
@Config("NULL")
@ConfigDefault("null")
def getNULL: Optional[Boolean]
@Config("BOOL")
@ConfigDefault("null")
def getBOOL: Optional[Boolean]
}
def apply(task: Task): DynamodbAttributeValue = {
val original = new AttributeValue()
.tap(a => task.getS.ifPresent(v => a.setS(v)))
.tap(a => task.getN.ifPresent(v => a.setN(v)))
.tap { a =>
task.getB.ifPresent { v =>
a.setB(ByteBuffer.wrap(v.getBytes(StandardCharsets.UTF_8)))
}
}
.tap(a => task.getSS.ifPresent(v => a.setSS(v)))
.tap(a => task.getNS.ifPresent(v => a.setNS(v)))
.tap { a =>
task.getBS.ifPresent { v =>
a.setBS(
v.asScala
.map(e => ByteBuffer.wrap(e.getBytes(StandardCharsets.UTF_8)))
.asJava
)
}
}
.tap { a =>
task.getM.ifPresent { v =>
a.setM(v.asScala.map(x => (x._1, apply(x._2).getOriginal)).asJava)
}
}
.tap(a =>
task.getL.ifPresent(v =>
a.setL(v.asScala.map(apply).map(_.getOriginal).asJava)
)
)
.tap(a => task.getNULL.ifPresent(v => a.setNULL(v)))
.tap(a => task.getBOOL.ifPresent(v => a.setBOOL(v)))
new DynamodbAttributeValue(original)
}
def apply(original: AttributeValue): DynamodbAttributeValue = {
new DynamodbAttributeValue(original)
}
def apply(item: Map[String, AttributeValue]): DynamodbAttributeValue = {
val original = new AttributeValue().withM(item.asJava)
new DynamodbAttributeValue(original)
}
}
class DynamodbAttributeValue(original: AttributeValue) {
require(
message =
s"Invalid AttributeValue: ${original} which must have 1 attribute value.",
requirement = {
Seq(hasS, hasN, hasB, hasSS, hasNS, hasBS, hasM, hasL, hasNULL, hasBOOL)
.count(has => has) == 1
}
)
def getOriginal: AttributeValue = original
def isNull: Boolean = Option[Boolean](getOriginal.getNULL).getOrElse(false)
def hasS: Boolean = Option(getOriginal.getS).isDefined
def hasN: Boolean = Option(getOriginal.getN).isDefined
def hasB: Boolean = Option(getOriginal.getB).isDefined
def hasSS: Boolean = Option(getOriginal.getSS).isDefined
def hasNS: Boolean = Option(getOriginal.getNS).isDefined
def hasBS: Boolean = Option(getOriginal.getBS).isDefined
def hasM: Boolean = Option(getOriginal.getM).isDefined
def hasL: Boolean = Option(getOriginal.getL).isDefined
def hasNULL: Boolean = Option(getOriginal.getNULL).isDefined
def hasBOOL: Boolean = Option(getOriginal.getBOOL).isDefined
def getS: String = getOriginal.getS
def getN: String = getOriginal.getN
def getB: ByteBuffer = getOriginal.getB
def getSS: JList[String] = getOriginal.getSS
def getNS: JList[String] = getOriginal.getNS
def getBS: JList[ByteBuffer] = getOriginal.getBS
def getM: JMap[String, AttributeValue] = getOriginal.getM
def getL: JList[AttributeValue] = getOriginal.getL
def getNULL: Boolean = getOriginal.getNULL
def getBOOL: Boolean = getOriginal.getBOOL
def getType: DynamodbAttributeValueType = {
if (hasS) return DynamodbAttributeValueType.S
if (hasN) return DynamodbAttributeValueType.N
if (hasB) return DynamodbAttributeValueType.B
if (hasSS) return DynamodbAttributeValueType.SS
if (hasNS) return DynamodbAttributeValueType.NS
if (hasBS) return DynamodbAttributeValueType.BS
if (hasM) return DynamodbAttributeValueType.M
if (hasL) return DynamodbAttributeValueType.L
if (hasNULL) return DynamodbAttributeValueType.NULL
if (hasBOOL) return DynamodbAttributeValueType.BOOL
DynamodbAttributeValueType.UNKNOWN
}
}
| lulichn/embulk-input-dynamodb | src/main/scala/org/embulk/input/dynamodb/item/DynamodbAttributeValue.scala | Scala | mit | 5,013 |
package japgolly.scalajs.benchmark.gui
import japgolly.scalajs.benchmark.gui.Styles.{Menu => *}
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.html_<^._
import scalacss.ScalaCssReact._
object TableOfContents {
import Router.Page
sealed trait Item
object Item {
sealed trait NonBatchMode extends Item
sealed trait WithPage extends Item {
val urlPath: String
}
final case class Folder(name: String, children: Vector[NonBatchMode]) extends NonBatchMode {
val deepBmCount: Int =
children.iterator.map {
case c: Folder => c.deepBmCount
case c: Suite => c.suite.suite.bms.length
}.sum
}
final case class Suite(urlPath: String, suite: GuiSuite[_]) extends WithPage with NonBatchMode
final case class BatchMode(urlPath: String) extends WithPage
}
final case class Props(items : Seq[Item],
router : Router.Ctl,
headerStyle: TagMod = *.folder,
ulStyle : TagMod = *.folderUL,
liStyle : TagMod = *.folderLI,
) {
@inline def render: VdomElement = Component(this)
}
private def render(p: Props) = {
val li = <.li(p.liStyle)
def children(items: Seq[Item]): VdomTag =
<.ul(
p.ulStyle,
items.iterator.zipWithIndex.toVdomArray(x =>
li(^.key := x._2, go(x._1))))
def go(item: Item): VdomTag =
item match {
case i: Item.Folder => <.div(<.h3(p.headerStyle, i.name), children(i.children))
case i: Item.Suite => p.router.link(Page.Suite(i))(i.suite.name)
case i: Item.BatchMode => p.router.link(Page.BatchMode(i))(BatchMode.name)
}
children(p.items)
}
val Component =
ScalaComponent.builder[Props]
.render_P(render)
.build
} | japgolly/scalajs-benchmark | benchmark/src/main/scala/japgolly/scalajs/benchmark/gui/TableOfContents.scala | Scala | apache-2.0 | 1,880 |
package kata.calc
import org.scalatest.{FlatSpec, Matchers}
class AlgebraicListTest extends FlatSpec with Matchers {
it should " create a new empty list " in {
val list: AlgebraicList[Int] = AlgebraicList()
list.size shouldBe 0
list.isEmpty shouldBe true
}
it should " add element to front of list " in {
var list: AlgebraicList[Int] = AlgebraicList()
list = 3 :: 2 :: 1 :: list
list(0) shouldBe Some(3)
list(1) shouldBe Some(2)
list(2) shouldBe Some(1)
list.size shouldBe 3
}
it should " return first element of list " in {
val list = 3 :: 2 :: 1 :: AlgebraicList()
list.head shouldBe Some(3)
list.head shouldBe Some(3)
}
it should " return all elements without first " in {
val list = 3 :: 2 :: 1 :: AlgebraicList()
list.tail shouldBe Some(2 :: 1 :: AlgebraicList())
list.tail shouldBe Some(2 :: 1 :: AlgebraicList())
}
}
| Alex-Diez/Scala-TDD-Katas | old-katas/list-kata/day-6/src/test/scala/kata/calc/AlgebraicListTest.scala | Scala | mit | 987 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.util
import java.util.UUID
import scala.util.{Failure, Success, Try}
import scala.util.control.NonFatal
import org.json4s._
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
import org.apache.spark.internal.Logging
import org.apache.spark.ml.PipelineStage
import org.apache.spark.ml.param.{Param, Params}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.Dataset
import org.apache.spark.util.Utils
/**
* A small wrapper that defines a training session for an estimator, and some methods to log
* useful information during this session.
*/
private[spark] class Instrumentation private () extends Logging {
private val id = UUID.randomUUID()
private val shortId = id.toString.take(8)
private[util] val prefix = s"[$shortId] "
/**
* Log some info about the pipeline stage being fit.
*/
def logPipelineStage(stage: PipelineStage): Unit = {
// estimator.getClass.getSimpleName can cause Malformed class name error,
// call safer `Utils.getSimpleName` instead
val className = Utils.getSimpleName(stage.getClass)
logInfo(s"Stage class: $className")
logInfo(s"Stage uid: ${stage.uid}")
}
/**
* Log some data about the dataset being fit.
*/
def logDataset(dataset: Dataset[_]): Unit = logDataset(dataset.rdd)
/**
* Log some data about the dataset being fit.
*/
def logDataset(dataset: RDD[_]): Unit = {
logInfo(s"training: numPartitions=${dataset.partitions.length}" +
s" storageLevel=${dataset.getStorageLevel}")
}
/**
* Logs a debug message with a prefix that uniquely identifies the training session.
*/
override def logDebug(msg: => String): Unit = {
super.logDebug(prefix + msg)
}
/**
* Logs a warning message with a prefix that uniquely identifies the training session.
*/
override def logWarning(msg: => String): Unit = {
super.logWarning(prefix + msg)
}
/**
* Logs a error message with a prefix that uniquely identifies the training session.
*/
override def logError(msg: => String): Unit = {
super.logError(prefix + msg)
}
/**
* Logs an info message with a prefix that uniquely identifies the training session.
*/
override def logInfo(msg: => String): Unit = {
super.logInfo(prefix + msg)
}
/**
* Logs the value of the given parameters for the estimator being used in this session.
*/
def logParams(hasParams: Params, params: Param[_]*): Unit = {
val pairs: Seq[(String, JValue)] = for {
p <- params
value <- hasParams.get(p)
} yield {
val cast = p.asInstanceOf[Param[Any]]
p.name -> parse(cast.jsonEncode(value))
}
logInfo(compact(render(map2jvalue(pairs.toMap))))
}
def logNumFeatures(num: Long): Unit = {
logNamedValue(Instrumentation.loggerTags.numFeatures, num)
}
def logNumClasses(num: Long): Unit = {
logNamedValue(Instrumentation.loggerTags.numClasses, num)
}
def logNumExamples(num: Long): Unit = {
logNamedValue(Instrumentation.loggerTags.numExamples, num)
}
/**
* Logs the value with customized name field.
*/
def logNamedValue(name: String, value: String): Unit = {
logInfo(compact(render(name -> value)))
}
def logNamedValue(name: String, value: Long): Unit = {
logInfo(compact(render(name -> value)))
}
def logNamedValue(name: String, value: Double): Unit = {
logInfo(compact(render(name -> value)))
}
def logNamedValue(name: String, value: Array[String]): Unit = {
logInfo(compact(render(name -> compact(render(value.toSeq)))))
}
def logNamedValue(name: String, value: Array[Long]): Unit = {
logInfo(compact(render(name -> compact(render(value.toSeq)))))
}
def logNamedValue(name: String, value: Array[Double]): Unit = {
logInfo(compact(render(name -> compact(render(value.toSeq)))))
}
/**
* Logs the successful completion of the training session.
*/
def logSuccess(): Unit = {
logInfo("training finished")
}
/**
* Logs an exception raised during a training session.
*/
def logFailure(e: Throwable): Unit = {
val msg = e.getStackTrace.mkString("\\n")
super.logError(msg)
}
}
/**
* Some common methods for logging information about a training session.
*/
private[spark] object Instrumentation {
object loggerTags {
val numFeatures = "numFeatures"
val numClasses = "numClasses"
val numExamples = "numExamples"
val meanOfLabels = "meanOfLabels"
val varianceOfLabels = "varianceOfLabels"
}
def instrumented[T](body: (Instrumentation => T)): T = {
val instr = new Instrumentation()
Try(body(instr)) match {
case Failure(NonFatal(e)) =>
instr.logFailure(e)
throw e
case Success(result) =>
instr.logSuccess()
result
}
}
}
/**
* A small wrapper that contains an optional `Instrumentation` object.
* Provide some log methods, if the containing `Instrumentation` object is defined,
* will log via it, otherwise will log via common logger.
*/
private[spark] class OptionalInstrumentation private(
val instrumentation: Option[Instrumentation],
val className: String) extends Logging {
protected override def logName: String = className
override def logInfo(msg: => String) {
instrumentation match {
case Some(instr) => instr.logInfo(msg)
case None => super.logInfo(msg)
}
}
override def logWarning(msg: => String) {
instrumentation match {
case Some(instr) => instr.logWarning(msg)
case None => super.logWarning(msg)
}
}
override def logError(msg: => String) {
instrumentation match {
case Some(instr) => instr.logError(msg)
case None => super.logError(msg)
}
}
}
private[spark] object OptionalInstrumentation {
/**
* Creates an `OptionalInstrumentation` object from an existing `Instrumentation` object.
*/
def create(instr: Instrumentation): OptionalInstrumentation = {
new OptionalInstrumentation(Some(instr), instr.prefix)
}
/**
* Creates an `OptionalInstrumentation` object from a `Class` object.
* The created `OptionalInstrumentation` object will log messages via common logger and use the
* specified class name as logger name.
*/
def create(clazz: Class[_]): OptionalInstrumentation = {
new OptionalInstrumentation(None, clazz.getName.stripSuffix("$"))
}
}
| michalsenkyr/spark | mllib/src/main/scala/org/apache/spark/ml/util/Instrumentation.scala | Scala | apache-2.0 | 7,178 |
package org.jetbrains.plugins.scala
package editor.enterHandler
import com.intellij.codeInsight.editorActions.enter.EnterHandlerDelegate.Result
import com.intellij.codeInsight.editorActions.enter.EnterHandlerDelegateAdapter
import com.intellij.lang.ASTNode
import com.intellij.lexer.StringLiteralLexer
import com.intellij.openapi.actionSystem.DataContext
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.editor.actionSystem.EditorActionHandler
import com.intellij.openapi.util.Ref
import com.intellij.psi.{PsiElement, PsiFile, StringEscapesTokenTypes}
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes
import org.jetbrains.plugins.scala.lang.psi.api.base.ScLiteral
/**
* User: Dmitry Naydanov
* Date: 3/31/12
*/
class InterpolatedStringEnterHandler extends EnterHandlerDelegateAdapter {
override def preprocessEnter(file: PsiFile, editor: Editor, caretOffset: Ref[Integer], caretAdvance: Ref[Integer],
dataContext: DataContext, originalHandler: EditorActionHandler): Result = {
var offset = editor.getCaretModel.getOffset
val element = file.findElementAt(offset)
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes._
def modifyOffset(moveOn: Int) {
offset += moveOn
caretOffset.set(caretOffset.get + moveOn)
}
def isMLString(element: PsiElement) = element match {
case lit: ScLiteral => lit.isMultiLineString
case _ => false
}
Option(element) foreach (a =>
if (Set(tINTERPOLATED_STRING, tINTERPOLATED_STRING_ESCAPE, tINTERPOLATED_STRING_END).contains(a.getNode.getElementType)) {
a.getParent.getFirstChild.getNode match {
case b: ASTNode if b.getElementType == tINTERPOLATED_STRING_ID ||
b.getElementType == ScalaElementTypes.INTERPOLATED_PREFIX_PATTERN_REFERENCE ||
b.getElementType == ScalaElementTypes.INTERPOLATED_PREFIX_LITERAL_REFERENCE =>
if (a.getNode.getElementType == tINTERPOLATED_STRING_ESCAPE) {
if (caretOffset.get - a.getTextOffset == 1) modifyOffset(1)
} else {
val lexer = new StringLiteralLexer(StringLiteralLexer.NO_QUOTE_CHAR, a.getNode.getElementType)
lexer.start(a.getText, 0, a.getTextLength)
do {
if (lexer.getTokenStart + a.getTextOffset < caretOffset.get && caretOffset.get() < lexer.getTokenEnd + a.getTextOffset) {
if (StringEscapesTokenTypes.STRING_LITERAL_ESCAPES.contains(lexer.getTokenType)) {
modifyOffset(lexer.getTokenEnd + a.getTextOffset - caretOffset.get())
}
}
} while (caretOffset.get() > lexer.getTokenEnd + a.getTextOffset && (lexer.advance(), lexer.getTokenType != null)._2)
}
extensions.inWriteAction {
if (isMLString(a.getParent)) return Result.Continue
caretOffset.set(caretOffset.get + 3)
caretAdvance.set(b.getTextLength + 1)
editor.getDocument.insertString(offset, "\\" +" + b.getText + "\\"")
}
return Result.Continue
case _ =>
}
})
Result.Continue
}
}
| triggerNZ/intellij-scala | src/org/jetbrains/plugins/scala/editor/enterHandler/InterpolatedStringEnterHandler.scala | Scala | apache-2.0 | 3,282 |
package katas.scala.classic_katas.wordchain
import org.junit.Test
import org.scalatest.Matchers
import scala.collection.immutable.{Seq, TreeSet}
import scala.io.Source
/**
* User: dima
* Date: 22/04/2012
*/
class WordChains0 extends Matchers {
// TODO finish
@Test def aaa() {
val words = Source.fromFile("/usr/share/dict/words").getLines().foldLeft(TreeSet[String]()) {
(acc, word) =>
acc + word.toLowerCase
}
println(
findWordChain("cat", "dog", words)
)
}
def findWordChain(fromWord: String, toWord: String, words: TreeSet[String]): Seq[String] = {
if (!words.contains(fromWord) || !words.contains(toWord)) return Seq()
findMinWordChain(fromWord, toWord, (words - fromWord).filter {
_.length() == toWord.length()
})
}
def findMinWordChain(fromWord: String, toWord: String, words: TreeSet[String],
length: Int = 0, minLength: Int = Int.MaxValue): Seq[String] = {
if (length >= minLength) return Seq()
if (fromWord == toWord) return {
println("aa")
Seq("")
}
val nextWords = words.foldLeft(TreeSet[String]()) {
(acc, word) => if (canBeNext(fromWord, word)) acc + word else acc
}
println(nextWords.mkString(","))
var newMinLength = minLength
val updatedWords = words -- nextWords + toWord
nextWords.foldLeft(Seq[String]()) {
(acc, nextWord: String) =>
val wordChain = findMinWordChain(nextWord, toWord, updatedWords, length + 1, newMinLength)
if (!wordChain.isEmpty && wordChain.length < newMinLength) {
println(wordChain)
newMinLength = wordChain.length
wordChain :+ nextWord
} else {
acc
}
}
}
@Test def shouldDetermineIfCanBeNextWord() {
canBeNext("cat", "cad") should equal(true)
canBeNext("cat", "cata") should equal(true)
canBeNext("cat", "ct") should equal(true)
canBeNext("cat", "cat") should equal(false)
canBeNext("cat", "kata") should equal(false)
canBeNext("cat", "ccc") should equal(false)
}
def canBeNext(fromWord: String, word: String) = {
if (math.abs(fromWord.length() - word.length()) > 0) false
else if (fromWord == word) false
else {
var diffs = 0
var word1 = fromWord
var word2 = word
while ((!word1.isEmpty || !word2.isEmpty) && diffs <= 1) {
// println(word1)
// println(word2)
// println(diffs)
if (word1.isEmpty) {
word2 = word2.tail
diffs += 1
} else if (word2.isEmpty) {
word1 = word1.tail
diffs += 1
} else if (word1.head == word2.head) {
word1 = word1.tail
word2 = word2.tail
} else if (word1.size > 1 && word2.size > 1 && word1.tail.head == word2.tail.head) {
word1 = word1.tail
word2 = word2.tail
diffs += 1
} else if (word2.size > 1 && word1.head == word2.tail.head) {
word2 = word2.tail
diffs += 1
} else if (word1.size > 1 && word1.tail.head == word2.head) {
word1 = word1.tail
diffs += 1
} else {
word1 = word1.tail
word2 = word2.tail
diffs += 1
}
}
diffs <= 1
}
}
} | dkandalov/katas | scala/src/katas/scala/classic_katas/wordchain/WordChains0.scala | Scala | unlicense | 3,029 |
import scala.tools.partest.DirectTest
object Test extends DirectTest {
override def extraSettings: String =
s"-usejavacp -Vprint-pos -Vprint:typer -Yrangepos -Ystop-after:typer -d ${testOutput.path} -cp ${testOutput.path}"
override def code = """
object X {
val d = new D
d.field
}
""".trim
override def show(): Unit = {
Console.withErr(System.out) {
compile()
}
}
}
import language.dynamics
class D extends Dynamic {
def selectDynamic(name: String) = ???
}
| martijnhoekstra/scala | test/files/run/dynamic-selectDynamic.scala | Scala | apache-2.0 | 513 |
package com.scalaAsm.x86
package Instructions
package x87
// Description: Reverse Subtract
// Category: general/arith
trait FISUBR extends InstructionDefinition {
val mnemonic = "FISUBR"
}
object FISUBR extends OneOperand[FISUBR] with FISUBRImpl
trait FISUBRImpl extends FISUBR {
implicit object _0 extends OneOp[m32] {
val opcode: OneOpcode = 0xDA /+ 5
val format = RmFormat
override def hasImplicitOperand = true
}
implicit object _1 extends OneOp[m16] {
val opcode: OneOpcode = 0xDE /+ 5
val format = RmFormat
override def hasImplicitOperand = true
}
}
| bdwashbu/scala-x86-inst | src/main/scala/com/scalaAsm/x86/Instructions/x87/FISUBR.scala | Scala | apache-2.0 | 595 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.feats
import io.truthencode.ddo.model.attribute.Attribute
import io.truthencode.ddo.support.requisite._
/**
* Icon Feat Greater Two Handed Fighting.png Greater Two Handed Fighting Passive Increases the
* damage of glancing blow attacks when wielding a two-handed weapon by an additional 10% for a
* total of 50%. Also increases the chance for weapon effects to trigger on glancing blows by an
* additional 3% (9%) and an additional +2 to Melee Power (total of +6). * Improved Two Handed
* Fighting Strength 17 Base Attack Bonus +11
*/
trait GreaterTwoHandedFighting
extends FeatRequisiteImpl with Passive with RequiresAllOfFeat with AttributeRequisiteImpl
with RequiresAllOfAttribute with RequiresBaB with FighterBonusFeat {
self: GeneralFeat =>
override def allOfAttributes: Seq[(Attribute, Int)] = List((Attribute.Strength, 17))
override def requiresBaB: Int = 11
override def allOfFeats: Seq[GeneralFeat] = List(GeneralFeat.ImprovedTwoHandedFighting)
}
| adarro/ddo-calc | subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/feats/GreaterTwoHandedFighting.scala | Scala | apache-2.0 | 1,694 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.