code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Copyright 2014 Commonwealth Computer Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.locationtech.geomesa.tools
import java.io.OutputStream
import org.geotools.data.simple.SimpleFeatureCollection
import org.geotools.geojson.feature.FeatureJSON
class GeoJsonExport {
val featureJson = new FeatureJSON()
def write(features: SimpleFeatureCollection, output: OutputStream) {
featureJson.writeFeatureCollection(features, output)
}
}
| jwkessi/geomesa | geomesa-tools/src/main/scala/org/locationtech/geomesa/tools/GeoJsonExport.scala | Scala | apache-2.0 | 986 |
// Copyright 2011-2012 James Michael Callahan
// See LICENSE-2.0 file for licensing information.
package org.scalagfx.math
import scala.math.{max,ulp}
//--------------------------------------------------------------------------------------------------
// S C A L A R
//--------------------------------------------------------------------------------------------------
/** A collection of convienence methods for scalar values. */
object Scalar
{
/** Whether the two values are within a given epsilon of each other. */
def equiv(a: Short, b: Short, epsilon: Short): Boolean =
if(a < b) a+epsilon >= b
else b+epsilon >= a
/** Whether the two values are within a given epsilon of each other. */
def equiv(a: Int, b: Int, epsilon: Int): Boolean =
if(a < b) a+epsilon >= b
else b+epsilon >= a
/** Whether the two values are within a given epsilon of each other. */
def equiv(a: Long, b: Long, epsilon: Long): Boolean =
if(a < b) a+epsilon >= b
else b+epsilon >= a
/** Whether the two values are within a given epsilon of each other. */
def equiv(a: Float, b: Float, epsilon: Float): Boolean =
if(a < b) a+epsilon >= b
else b+epsilon >= a
/** Whether the two values are within a given epsilon of each other. */
def equiv(a: Double, b: Double, epsilon: Double): Boolean =
if(a < b) a+epsilon >= b
else b+epsilon >= a
/** Whether the two values are within a type specific minimal epsilon. */
def equiv(a: Short, b: Short): Boolean =
a == b
/** Whether the two values are within a type specific minimal epsilon. */
def equiv(a: Int, b: Int): Boolean =
a == b
/** Whether the two values are within a type specific minimal epsilon. */
def equiv(a: Long, b: Long): Boolean =
a == b
/** Whether the two values are within a type specific minimal epsilon. */
def equiv(a: Float, b: Float): Boolean =
equiv(a, b, max(ulp(a), ulp(b))*1.0E3f)
/** Whether the two values are within a type specific minimal epsilon. */
def equiv(a: Double, b: Double): Boolean =
equiv(a, b, max(ulp(a), ulp(b))*1.0E4)
/** Clamp a value to be between the given upper and lower bounds. */
def clamp(v: Short, lower: Short, upper: Short): Short =
if(v < lower) lower else if(v > upper) upper else v
/** Clamp a value to be between the given upper and lower bounds. */
def clamp(v: Int, lower: Int, upper: Int): Int =
if(v < lower) lower else if(v > upper) upper else v
/** Clamp a value to be between the given upper and lower bounds. */
def clamp(v: Long, lower: Long, upper: Long): Long =
if(v < lower) lower else if(v > upper) upper else v
/** Clamp a value to be between the given upper and lower bounds. */
def clamp(v: Float, lower: Float, upper: Float): Float =
if(v < lower) lower else if(v > upper) upper else v
/** Clamp a value to be between the given upper and lower bounds. */
def clamp(v: Double, lower: Double, upper: Double): Double =
if(v < lower) lower else if(v > upper) upper else v
/** Linearly interpolate between two values. */
def lerp(a: Float, b: Float, t: Float): Float =
a + t*(b - a)
/** Linearly interpolate between two values. */
def lerp(a: Double, b: Double, t: Double): Double =
a + t*(b - a)
/** Smooth-step interpolate between two values. */
def smoothlerp(a: Float, b: Float, t: Float): Float =
lerp(a, b, smoothstep(t))
/** Smooth-step interpolate between two values. */
def smoothlerp(a: Double, b: Double, t: Double): Double =
lerp(a, b, smoothstep(t))
/** The smooth-step interpolation function. */
def smoothstep(t: Float): Float =
(3.0f*t*t) - (2.0f*t*t*t)
/** The smooth-step interpolation function. */
def smoothstep(t: Double): Double =
(3.0*t*t) - (2.0*t*t*t)
}
| JimCallahan/Graphics | src/org/scalagfx/math/Scalar.scala | Scala | apache-2.0 | 3,855 |
package opencl.generator.stencil.acoustic
import ir.ArrayTypeWSWC
import ir.ast._
import lift.arithmetic.SizeVar
import opencl.executor.{Compile, DeviceCapabilityException, Execute, _}
import opencl.ir._
import opencl.ir.pattern._
import org.junit.Assert._
import org.junit.Assume.assumeFalse
import org.junit._
import rewriting.SimplifyAndFuse
import scala.language.implicitConversions
object TestAcousticOpt extends TestWithExecutor
class TestAcousticOpt {
@Test
def testTwoGridsThreeCalculationsAsym3DGeneralNoMaskWithOnlyOneWeights(): Unit = {
assumeFalse("Disabled on Apple OpenCL CPU.", Utils.isAppleCPU)
val compareData = AcousticComparisonArrays.testTwoGridsThreeCalculationsAsym3DGeneralNoMaskComparisonData8x4x12
val localDimX = 8
val localDimY = 4
val localDimZ = 12
val data = StencilUtilities.createDataFloat3D(localDimX, localDimY, localDimZ)
val stencilarr3D = data.map(x => x.map(y => y.map(z => Array(z))))
val stencilarrpadded3D = StencilUtilities.createDataFloat3DWithPadding(localDimX, localDimY, localDimZ)
val stencilarrOther3D = stencilarrpadded3D.map(x => x.map(y => y.map(z => z * 2.0f)))
val n = SizeVar("N")
val m = SizeVar("M")
val o = SizeVar("O")
val constantOriginal = Array(1.0f, 2.0f, 1.5f, 0.25f)
val const1 = constantOriginal(2)
val lambdaZip3D = fun(
ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(Float,1), m), n), o),
ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(Float, m + 2), n + 2), o + 2),
ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(Float, StencilUtilities.weights3D(0)(0).length), StencilUtilities.weights3D(0).length), StencilUtilities.weights3D.length),
ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(Float, StencilUtilities.weightsMiddle3D(0)(0).length), StencilUtilities.weightsMiddle3D(0).length), StencilUtilities.weightsMiddle3D.length),
Float,
(mat1, mat2, weights, weightsMiddle,c1) => {
MapGlb(0)(MapGlb(1)(MapGlb(2)((fun((m) =>
MapSeq(toGlobal(fun(x => mult(x,constantOriginal(3))))) o
MapSeq(addTuple) $
Zip(MapSeq(addTuple) $
Zip(toPrivate(MapSeq(fun(x => mult(x,c1)))) $ Get(m,0),
(toPrivate(MapSeq(fun(x => mult(x, constantOriginal(0))))) o ReduceSeq(add, 0.0f) o Join() o MapSeq(ReduceSeq(add, id $ 0.0f) o MapSeq(multTuple)) o Map(\(tuple => Zip(tuple._0, tuple._1))) $
Zip(( Join() $ Get(m,1)), Join() $ weights))),
(toPrivate(MapSeq(fun(x => mult(x,constantOriginal(1))))) o ReduceSeq(add, 0.0f) o Join() o MapSeq(ReduceSeq(add, id $ 0.0f) o MapSeq(multTuple)) o Map(\(tuple => Zip(tuple._0, tuple._1))) $
Zip(Join() $ Get(m, 1), Join() $ weightsMiddle)))
))))) $ Zip3D(mat1, (Slide3D(StencilUtilities.slidesize, StencilUtilities.slidestep) $ mat2))
})
try
{
val newLambda = SimplifyAndFuse(lambdaZip3D)
// OutputKernelJSON(newLambda,"/home/reese/workspace/sandbox/")
// OutputKernelJSON.printJSON(newLambda)
val source = Compile(newLambda)
val (output, runtime) = Execute(2,2,2,2,2,2, (true, true))[Array[Float]](source, newLambda,stencilarr3D,stencilarrOther3D, StencilUtilities.weights3D, StencilUtilities.weightsMiddle3D,const1) // stencilarr3D, stencilarr3DCopy, StencilUtilities.weights3D, StencilUtilities.weightsMiddle3D)
if(StencilUtilities.printOutput) StencilUtilities.printOriginalAndOutput3D(stencilarrpadded3D, output)
assertArrayEquals(compareData, output, StencilUtilities.stencilDelta)
}
catch
{
case e: DeviceCapabilityException =>
Assume.assumeNoException("Device not supported.", e)
}
}
@Test
def testTwoGridsThreeCalculationsWithMaskAsym3DGeneralOneWeights(): Unit = {
assumeFalse("Disabled on Apple OpenCL CPU.", Utils.isAppleCPU)
val compareData = AcousticComparisonArrays.testTwoGridsThreeCalculationsWithMaskAsym3DGeneralComparisonData4x6x10
val localDimX = 4
val localDimY = 6
val localDimZ = 10
val data = StencilUtilities.createDataFloat3D(localDimX, localDimY, localDimZ)
val stencilarr3D = data.map(x => x.map(y => y.map(z => Array(z))))
val stencilarrpadded3D = StencilUtilities.createDataFloat3DWithPadding(localDimX, localDimY, localDimZ)
val stencilarrOther3D = stencilarrpadded3D.map(x => x.map(y => y.map(z => z * 2.0f)))
val mask3D = BoundaryUtilities.createMaskDataAsym3D(localDimX, localDimY, localDimZ)
/* u[cp] = ( boundary ? constantBorder0 : constantOriginal0 ) * ( S*( boundary ? constantBorder1 : constantOriginal1 ) + u1[cp]*( boundary ? constantBorder2 : constantOriginal2 ) + u[cp]*( boundary ? constantBorder3 : constantOriginal3 ) */
val constantOriginal = Array(1.0f, 2.0f, 1.5f, 0.25f)
val constantBorder = Array(2.0f, 3.0f, 2.5f, 0.5f)
val m = SizeVar("M")
val n = SizeVar("N")
val o = SizeVar("O")
val lambdaNeigh = fun(
ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(Float,1),m-2), n-2), o-2),
ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(Float, m ), n ), o ),
ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(Int, 1), m-2), n-2), o-2),
ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(Float, StencilUtilities.weights3D(0)(0).length), StencilUtilities.weights3D(0).length), StencilUtilities.weights3D.length),
ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(Float, StencilUtilities.weightsMiddle3D(0)(0).length), StencilUtilities.weightsMiddle3D(0).length), StencilUtilities.weightsMiddle3D.length),
(mat1, mat2, mask1, weights, weightsMiddle) => {
MapGlb(0)(MapGlb(1)(MapGlb(2)((fun((m) =>
toGlobal(MapSeq(multTuple)) $ Zip(MapSeq(addTuple) $ Zip(MapSeq(addTuple) $ Zip((MapSeq(multTuple)) $ Zip(
ReduceSeq(add, 0.0f) $ Get(m, 0),
MapSeq(id) $ BoundaryUtilities.maskValue(Get(m,2), constantBorder(2), constantOriginal(2))
),
MapSeq(multTuple) $ Zip(
ReduceSeq(add, 0.0f) o Join() o MapSeq(ReduceSeq(add, id $ 0.0f) o MapSeq(multTuple)) o Map(\(tuple => Zip(tuple._0, tuple._1))) $ Zip(Join() $
Get(m, 1), Join() $ weights),
MapSeq(id) $ BoundaryUtilities.maskValue(Get(m,2), constantBorder(0), constantOriginal(0))
))
,
(MapSeq(multTuple)) $ Zip(
ReduceSeq(add, 0.0f) o Join() o MapSeq(ReduceSeq(add, id $ 0.0f) o MapSeq(multTuple)) o Map(\(tuple => Zip(tuple._0, tuple._1))) $ Zip(Join() $
Get(m, 1), Join() $ weightsMiddle),
MapSeq(id) $ BoundaryUtilities.maskValue(Get(m,2), constantBorder(1), constantOriginal(1)))
),
BoundaryUtilities.maskValue(Get(m,2), constantBorder(3), constantOriginal(3)))
))
))) $ Zip3D(mat1, (Slide3D(StencilUtilities.slidesize, StencilUtilities.slidestep) $ mat2), mask1)
})
try
{
val newLambda = SimplifyAndFuse(lambdaNeigh)
val source = Compile(newLambda)
val (output, runtime) = Execute(8, 8, 8, 8, 8, 8, (true, true))[Array[Float]](source, newLambda, stencilarr3D, stencilarrOther3D, mask3D, StencilUtilities.weights3D, StencilUtilities.weightsMiddle3D)
if (StencilUtilities.printOutput)
{
StencilUtilities.printOriginalAndOutput3D(data, output)
}
assertArrayEquals(compareData, output, StencilUtilities.stencilDelta)
}
catch
{
case e: DeviceCapabilityException =>
Assume.assumeNoException("Device not supported.", e)
}
}
@Test
def test3DConvolutionTile(): Unit = {
val localDimx = 12
val localDimy = 12
val localDimz = 16
val compareData = StencilUtilities.createDataFloat3D(localDimx,localDimy,localDimz)
val input3D = StencilUtilities.createDataFloat3DWithPadding(localDimx, localDimy, localDimz)
val M = SizeVar("M")
val N = SizeVar("N")
val O = SizeVar("O")
val stencil = fun(
ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(Float, M), N),O),
ArrayTypeWSWC(Float, StencilUtilities.weightsMiddle3D(0)(0).length*StencilUtilities.weightsMiddle3D(0).length*StencilUtilities.weightsMiddle3D.length),
(matrix, weights) => {
Untile3D() o MapWrg(2)(MapWrg(1)(MapWrg(0)(fun(tile =>
MapLcl(2)(MapLcl(1)(MapLcl(0)(
fun(elem => {
toGlobal(MapSeqUnroll(id)) o
ReduceSeq(fun((acc, pair) => {
val pixel = Get(pair, 0)
val weight = Get(pair, 1)
multAndSumUp.apply(acc, pixel, weight)
}), 0.0f) $ Zip(Join() o Join() $ elem, weights)
})
))) o Slide3D(3,1) o
toLocal(MapLcl(2)(MapLcl(1)(MapLcl(0)(id)))) $ tile
)))) o
Slide3D(8,6,8,6,10,8) $ matrix
}
)
val (output, runtime) = Execute(2,2,2,2,2,2, (true, true))[Array[Float]](stencil, input3D, StencilUtilities.weightsMiddle3D.flatten.flatten)
if(StencilUtilities.printOutput) StencilUtilities.printOriginalAndOutput3D(input3D, output)
assertArrayEquals(compareData.flatten.flatten, output, StencilUtilities.stencilDelta)
}
@Test
def testSimple3DStencilWithAt(): Unit = {
assumeFalse("Disabled on Apple OpenCL CPU.", Utils.isAppleCPU)
val compareData = Array(
1.0f, 2.0f, 3.0f, 4.0f,
2.0f, 3.0f, 4.0f, 5.0f,
3.0f, 4.0f, 5.0f, 6.0f,
4.0f, 5.0f, 6.0f, 7.0f,
2.0f, 3.0f, 4.0f, 5.0f,
3.0f, 4.0f, 5.0f, 6.0f,
4.0f, 5.0f, 6.0f, 7.0f,
5.0f, 6.0f, 7.0f, 8.0f,
3.0f, 4.0f, 5.0f, 6.0f,
4.0f, 5.0f, 6.0f, 7.0f,
5.0f, 6.0f, 7.0f, 8.0f,
6.0f, 7.0f, 8.0f, 9.0f,
4.0f, 5.0f, 6.0f, 7.0f,
5.0f, 6.0f, 7.0f, 8.0f,
6.0f, 7.0f, 8.0f, 9.0f,
7.0f, 8.0f, 9.0f, 10.0f
)
val localDim = 4
val dim = localDim + 2
val input = Array.tabulate(localDim,localDim,localDim){ (i,j,k) => (i+j+k+1).toFloat }
val input3D = StencilUtilities.createFakePaddingFloat3D(input, 0.0f, localDim, localDim)
val lambdaNeigh = fun(
ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(Float, dim), dim), dim),
ArrayTypeWSWC(Float, StencilUtilities.slidesize*StencilUtilities.slidesize*StencilUtilities.slidesize),
(mat, weights) => {
MapGlb(2)(MapGlb(1)(MapGlb(0)(fun(neighbours => {
val `tile[1][1][1]` = neighbours.at(1).at(1).at(1)
toGlobal(id) $ `tile[1][1][1]`
})))
) o Slide3D(StencilUtilities.slidesize, StencilUtilities.slidestep) $ mat
})
val source = Compile(lambdaNeigh)
val (output, runtime) = Execute(2,2,2,2,2,2, (true,true))[Array[Float]](source,lambdaNeigh, input3D, StencilUtilities.weightsMiddle3D.flatten.flatten)
if(StencilUtilities.printOutput) StencilUtilities.printOriginalAndOutput3D(input3D, output)
assertArrayEquals(compareData, output, StencilUtilities.stencilDelta)
}
@Test
def test3DAsymNoMaskStencilWithAt(): Unit = {
assumeFalse("Disabled on Apple OpenCL CPU.", Utils.isAppleCPU)
val compareData = AcousticComparisonArrays.testTwoGridsThreeCalculationsAsym3DGeneralNoMaskComparisonData8x4x12
val localDimX = 8
val localDimY = 4
val localDimZ = 12
val data = StencilUtilities.createDataFloat3D(localDimX, localDimY, localDimZ)
val stencilarr3D = data.map(x => x.map(y => y.map(z => Array(z))))
val stencilarrpadded3D = StencilUtilities.createDataFloat3DWithPadding(localDimX, localDimY, localDimZ)
val stencilarrOther3D = stencilarrpadded3D.map(x => x.map(y => y.map(z => z * 2.0f)))
val mask3D = BoundaryUtilities.createMaskDataAsym3D(localDimX, localDimY, localDimZ)
val constantOriginal = Array(1.0f, 2.0f, 1.5f, 0.25f)
val m = SizeVar("M")
val n = SizeVar("N")
val o = SizeVar("O")
val lambdaNeighAt = fun(
ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(Float, m-2), n-2), o-2),
ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(Float, m), n), o),
ArrayTypeWSWC(Float, StencilUtilities.slidesize*StencilUtilities.slidesize*StencilUtilities.slidesize),
(mat1, mat2, weights) => {
MapGlb(2)(MapGlb(1)(MapGlb(0)(fun(m => {
val `tile[1][1][1]` = Get(m,1).at(1).at(1).at(1)
val `tile[0][1][1]` = Get(m,1).at(0).at(1).at(1)
val `tile[1][0][1]` = Get(m,1).at(1).at(0).at(1)
val `tile[1][1][0]` = Get(m,1).at(1).at(1).at(0)
val `tile[1][1][2]` = Get(m,1).at(1).at(1).at(2)
val `tile[1][2][1]` = Get(m,1).at(1).at(2).at(1)
val `tile[2][1][1]` = Get(m,1).at(2).at(1).at(1)
val stencil = fun(x => add(x,`tile[0][1][1]`)) o
fun(x => add(x,`tile[1][0][1]`)) o
fun(x => add(x,`tile[1][1][0]`)) o
fun(x => add(x,`tile[1][1][2]`)) o
fun(x => add(x,`tile[1][2][1]`)) $ `tile[2][1][1]`
val valueMat1 = Get(m,0)
toGlobal(id) o toPrivate(fun( x => mult(x,constantOriginal(3)))) o addTuple $
Tuple(addTuple $ Tuple(fun(x => mult(x,constantOriginal(1))) $ `tile[1][1][1]`, fun(x => mult(x,constantOriginal(2))) $ valueMat1),
fun(x => mult(x, constantOriginal(0))) $ stencil )
})))
) $ Zip3D(mat1, Slide3D(StencilUtilities.slidesize, StencilUtilities.slidestep) $ mat2)
})
val source = Compile(lambdaNeighAt)
val (output, runtime) = Execute(2,2,2,2,2,2, (true,true))[Array[Float]](source,lambdaNeighAt, data, stencilarrOther3D, StencilUtilities.weightsMiddle3D.flatten.flatten)
if(StencilUtilities.printOutput) StencilUtilities.printOriginalAndOutput3D(stencilarrpadded3D, output)
assertArrayEquals(compareData, output, StencilUtilities.stencilDelta)
}
@Test
def test3DAsymMaskStencilWithAt(): Unit = {
assumeFalse("Disabled on Apple OpenCL CPU.", Utils.isAppleCPU)
val compareData = AcousticComparisonArrays.testTwoGridsThreeCalculationsWithMaskAsym3DGeneralComparisonData4x6x10
val localDimX = 4
val localDimY = 6
val localDimZ = 10
val data = StencilUtilities.createDataFloat3D(localDimX, localDimY, localDimZ)
val stencilarr3D = data.map(x => x.map(y => y.map(z => Array(z))))
val stencilarrpadded3D = StencilUtilities.createDataFloat3DWithPadding(localDimX, localDimY, localDimZ)
val stencilarrOther3D = stencilarrpadded3D.map(x => x.map(y => y.map(z => z * 2.0f)))
val mask3D = BoundaryUtilities.createMaskDataAsym3DNoArray(localDimX, localDimY, localDimZ)
val constantOriginal = Array(1.0f, 2.0f, 1.5f, 0.25f)
val constantBorder = Array(2.0f, 3.0f, 2.5f, 0.5f)
val m = SizeVar("M")
val n = SizeVar("N")
val o = SizeVar("O")
val lambdaNeigh = fun(
ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(Float,1),m-2), n-2), o-2),
ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(Float, m ), n ), o ),
ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(Int, 1), m-2), n-2), o-2),
ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(Float, StencilUtilities.weights3D(0)(0).length), StencilUtilities.weights3D(0).length), StencilUtilities.weights3D.length),
ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(Float, StencilUtilities.weightsMiddle3D(0)(0).length), StencilUtilities.weightsMiddle3D(0).length), StencilUtilities.weightsMiddle3D.length),
(mat1, mat2, mask1, weights, weightsMiddle) => {
MapGlb(0)(MapGlb(1)(MapGlb(2)((fun((m) =>
toGlobal(MapSeq(multTuple)) $ Zip(MapSeq(addTuple) $ Zip(MapSeq(addTuple) $ Zip((MapSeq(multTuple)) $ Zip(
ReduceSeq(add, 0.0f) $ Get(m, 0),
MapSeq(id) $ BoundaryUtilities.maskValue(Get(m,2), constantBorder(2), constantOriginal(2))
),
MapSeq(multTuple) $ Zip(
ReduceSeq(add, 0.0f) o Join() o MapSeq(ReduceSeq(add, id $ 0.0f) o MapSeq(multTuple)) o Map(\(tuple => Zip(tuple._0, tuple._1))) $ Zip(Join() $
Get(m, 1), Join() $ weights),
MapSeq(id) $ BoundaryUtilities.maskValue(Get(m,2), constantBorder(0), constantOriginal(0))
))
,
(MapSeq(multTuple)) $ Zip(
ReduceSeq(add, 0.0f) o Join() o MapSeq(ReduceSeq(add, id $ 0.0f) o MapSeq(multTuple)) o Map(\(tuple => Zip(tuple._0, tuple._1))) $ Zip(Join() $
Get(m, 1), Join() $ weightsMiddle),
MapSeq(id) $ BoundaryUtilities.maskValue(Get(m,2), constantBorder(1), constantOriginal(1)))
),
BoundaryUtilities.maskValue(Get(m,2), constantBorder(3), constantOriginal(3)))
))
))) $ Zip3D(mat1, (Slide3D(StencilUtilities.slidesize, StencilUtilities.slidestep) $ mat2), mask1)
})
def addP = toPrivate(add)
val lambdaNeighAt = fun(
ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(Float, m-2), n-2), o-2),
ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(Float, m), n), o),
ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(Int, m-2), n-2), o-2),
(mat1, mat2,mask) => {
MapGlb(2)(MapGlb(1)(MapGlb(0)(fun(m => {
val maskedValMult = BoundaryUtilities.maskValueNoArray(Get(m,2), constantBorder(3), constantOriginal(3))
val maskedValConstOrg = BoundaryUtilities.maskValueNoArray(Get(m,2), constantBorder(2), constantOriginal(2))
val maskedValConstSec = BoundaryUtilities.maskValueNoArray(Get(m,2), constantBorder(1), constantOriginal(1))
val maskedValStencil = BoundaryUtilities.maskValueNoArray(Get(m,2), constantBorder(0), constantOriginal(0))
val `tile[1][1][1]` = Get(m,1).at(1).at(1).at(1)
val `tile[0][1][1]` = Get(m,1).at(0).at(1).at(1)
val `tile[1][0][1]` = Get(m,1).at(1).at(0).at(1)
val `tile[1][1][0]` = Get(m,1).at(1).at(1).at(0)
val `tile[1][1][2]` = Get(m,1).at(1).at(1).at(2)
val `tile[1][2][1]` = Get(m,1).at(1).at(2).at(1)
val `tile[2][1][1]` = Get(m,1).at(2).at(1).at(1)
val x = addP(`tile[0][1][1]`,toPrivate(add)(`tile[0][1][1]`,`tile[1][0][1]`))
val stencil = toPrivate(fun(x => add(x,`tile[0][1][1]`))) o
toPrivate(fun(x => add(x,`tile[1][0][1]`))) o
toPrivate(fun(x => add(x,`tile[1][1][0]`))) o
toPrivate(fun(x => add(x,`tile[1][1][2]`))) o
toPrivate(fun(x => add(x,`tile[1][2][1]`))) $ `tile[2][1][1]`
val valueMat1 = Get(m,0)
toGlobal(id) o toPrivate(fun( x => mult(x,maskedValMult))) o toPrivate(addTuple) $
Tuple(toPrivate(addTuple) $ Tuple(toPrivate(fun(x => mult(x,maskedValConstSec))) $ `tile[1][1][1]`, toPrivate(fun(x => mult(x,maskedValConstOrg))) $ valueMat1),
toPrivate(fun(x => mult(x, maskedValStencil))) $ stencil )
})))
) $ Zip3D(mat1, Slide3D(StencilUtilities.slidesize, StencilUtilities.slidestep) $ mat2, mask)
})
val newLambda = SimplifyAndFuse(lambdaNeighAt)
val source = Compile(newLambda)
val (output, runtime) = Execute(2,2,2,2,2,2, (true,true))[Array[Float]](source,newLambda, data, stencilarrOther3D, mask3D)
if(StencilUtilities.printOutput) StencilUtilities.printOriginalAndOutput3D(stencilarrpadded3D, output)
assertArrayEquals(compareData, output, StencilUtilities.stencilDelta)
}
@Ignore
@Test
def simpleMapTransposeTest(): Unit =
{
val dim = 4
val dimX = dim
val dimY = dim
val data = Array.tabulate(dimX,dimY){(i,j) => (i+j+1).toFloat}
val stencil = fun(
ArrayTypeWSWC(ArrayTypeWSWC(Float, dim), dim),
(matrix) => {
MapWrg(2)(MapWrg(1)(MapWrg(0)(
toGlobal(MapSeq(id))
))) o Map(Transpose()) o Slide(3,1) o Map(Slide(3,1)) $ matrix
}
)
val (output, runtime) = Execute(4,4,4,4,4,4, (true, true))[Array[Float]](stencil, data)
StencilUtilities.print2DArray(data)
StencilUtilities.print1DArray(output)
StencilUtilities.print1DArrayAs3DArray(output,3,3,4)
}
@Test
def testNumNeighboursUserFun(): Unit = {
val localDimX = 6
val localDimY = 8
val localDimZ = 4
val input3D = Array.fill(localDimZ,localDimY,localDimX)(1)
val mask3DBP = BoundaryUtilities.createMaskDataWithNumBoundaryPts(localDimX+2, localDimY+2, localDimZ+2)
val idxF = UserFun("idxF", Array("i", "j", "k", "m", "n", "o"), "{ " +
"int count = 6; if(i == (m-1) || i == 0){ count--; } if(j == (n-1) || j == 0){ count--; } if(k == (o-1) || k == 0){ count--; }return count; }", Seq(Int,Int,Int,Int,Int,Int), Int)
val inp3d = ArrayTypeWSWC(ArrayTypeWSWC(ArrayTypeWSWC(Int, SizeVar("O")), SizeVar("N")), SizeVar("M"))
val numberOfNeighbours = fun(inp3d,
input => toGlobal(MapGlb(MapSeq(MapSeq(idI)))) $ Array3DFromUserFunGenerator(idxF, inp3d)
)
val (output, _) = Execute(2,2,2,2,2,2,(true,true))[Array[Int]](numberOfNeighbours, input3D)
if(StencilUtilities.printOutput)
{
StencilUtilities.print3DArray(mask3DBP)
StencilUtilities.print3DArray(input3D)
StencilUtilities.print1DArrayAs3DArray(output, localDimX, localDimY, localDimZ)
}
assertArrayEquals(mask3DBP.flatten.flatten, output)
}
}
| lift-project/lift | src/test/opencl/generator/stencil/acoustic/TestAcousticOpt.scala | Scala | mit | 21,108 |
package issue14
trait P2[T] { def foo: T }
object test2 {
class PWrapper[T] {
import java.util // make sure that macro expansion logic skips import contexts
import java.lang.reflect
val dummy1: util.List[_] = ???
val dummy2: reflect.Method = ???
@pkg.happytee val self: P2[T] = ???
}
}
| scala/scala | test/macro-annot/pos/issue14/Test2_2.scala | Scala | apache-2.0 | 311 |
package ee.cone.c4actor
import ee.cone.c4actor.CollectiveTransformProtocol.D_CollectiveTransformMeta
import ee.cone.c4assemble.Types.Values
import ee.cone.c4di.c4multi
import ee.cone.c4proto.{Id, protocol}
import scala.collection.immutable.Seq
trait LEventTransform extends Product {
def lEvents(local: Context): Seq[LEvent[Product]]
def leventsDescription: String = this.getClass.getName
}
trait CollectiveTransformAppBase
@c4multi("CollectiveTransformApp") final case class CollectiveTransform(srcId: String, events: Values[LEventTransform])(
txAdd: LTxAdd,
) extends TxTransform {
def transform(local: Context): Context =
txAdd.add(events.flatMap(_.lEvents(local)))(InsertOrigMeta(D_CollectiveTransformMeta(events.map(_.leventsDescription).toList) :: Nil)(local))
}
object InsertOrigMeta {
def apply(origs: List[Product]): Context => Context =
TxTransformOrigMetaKey.set(origs.map(MetaAttr))
}
trait CollectiveTransformProtocolAppBase
@protocol("CollectiveTransformProtocolApp") object CollectiveTransformProtocol {
@Id(0x0ab0) case class D_CollectiveTransformMeta(
@Id(0x0ab1) transforms: List[String]
)
}
| conecenter/c4proto | extra_lib/src/main/scala/ee/cone/c4actor/LEventTransform.scala | Scala | apache-2.0 | 1,147 |
package sci2s.sparkfingerprint
/**
* @author daniel
*/
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
import org.apache.hadoop.io.Text
import org.apache.spark.HashPartitioner
import org.apache.spark.rdd.RDD
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.util.SizeEstimator
import scala.collection.JavaConverters._
import scala.collection.Iterable
import sci2s.mrfingerprint.LSJiangArray
import sci2s.mrfingerprint.LocalStructureJiang
import sci2s.mrfingerprint.LocalStructure
import sci2s.mrfingerprint.PartialScoreJiang
import sci2s.mrfingerprint.PartialScore
object SparkMatcherJiang {
val usage = """
Usage: SparkMatcherJiang
[--template-file path]
[--map-file path]
[--output-dir path]
[--num-partitions num]
[--debug]
"""
type OptionMap = Map[Symbol, Any]
var DEBUG = false
def nextOption(map : OptionMap, list: List[String]) : OptionMap = {
list match {
case Nil => map
case "--template-file" :: value :: tail =>
nextOption(map ++ Map('templatefile -> value), tail)
case "--map-file" :: value :: tail =>
nextOption(map ++ Map('mapfile -> value), tail)
case "--output-dir" :: value :: tail =>
nextOption(map ++ Map('outputdir -> value), tail)
case "--num-partitions" :: value :: tail =>
nextOption(map ++ Map('numpartitions -> value.toInt), tail)
case "--debug" :: tail =>
DEBUG = true
nextOption(map, tail)
case option :: tail => println("Unknown option "+option)
System.exit(1)
map
}
}
def printSize(st : String, elem : AnyRef) : Unit = {
println(st + SizeEstimator.estimate(elem)/Math.pow(1024, 2) + " MB")
}
def main(args: Array[String]): Unit = {
if (args.length == 0) {
println(usage)
System.exit(-1)
}
val options = nextOption(Map(), args.toList)
println(options)
// Parameters
val templateFile = options.get('templatefile).get.toString
val outputDir = options.getOrElse('outputdir, "output_spark_jiang_").toString + System.currentTimeMillis
val mapFileName = options.get('mapfile).get.toString
val numPartitions = options.get('numpartitions).getOrElse(10).asInstanceOf[Int]
// TODO the number of processes may be passed as a parameter
val conf = new SparkConf().setAppName("SparkMatcherJiang " + templateFile.substring(templateFile.lastIndexOf('/')))
.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.set("spark.kryoserializer.buffer.max", "512m")
.set("spark.kryo.registrationRequired", "false")
.set("spark.hadoop.cloneConf", "true").setMaster("local[*]")
// Register classes for serialization
conf.registerKryoClasses(Array(
classOf[sci2s.mrfingerprint.LSJiangArray],
classOf[org.apache.hadoop.io.Text],
classOf[sci2s.mrfingerprint.LocalStructureJiang],
classOf[org.apache.hadoop.mapred.JobConf]))
// Set SparkContext
val sc = new SparkContext(conf)
val initialtime = System.currentTimeMillis
// Read template database
val templateLS = sc.sequenceFile[String, LocalStructureJiang](templateFile) //.partitionBy(new HashPartitioner(numPartitions))
.mapValues(new LocalStructureJiang(_))
// FOR DEBUGGING
if(DEBUG) {
println("Number of template LS: %s".format(templateLS.count()))
printSize("Template size: ", templateLS.collect())
println("Time: %g".format((System.currentTimeMillis - initialtime)/1000.0))
}
// Read input fingerprint(s)
val inputLSRDD = sc.sequenceFile[String, LSJiangArray](mapFileName)
.mapValues(new LSJiangArray(_).get().map(_.asInstanceOf[LocalStructureJiang]))
// Broadcast the input fingerprint(s)
val inputLS = sc.broadcast(inputLSRDD.collect())
// FOR DEBUGGING
if(DEBUG) {
println("Number of input LS: %s".format(inputLSRDD.count))
printSize("LS size: ", inputLSRDD.collect())
println("Time: %g".format((System.currentTimeMillis - initialtime)/1000.0))
}
// Compute the partial scores for each template ls
val partialscores = computeScores(templateLS, inputLS)
if(DEBUG) {
println("Number of scores: %s".format(partialscores.count()))
println("Partial scores computed. Time: %g".format((System.currentTimeMillis - initialtime)/1000.0))
printSize("Partial score size: ", partialscores.collect())
println("\\tPartial score sample: " + partialscores.first)
println("Time: %g".format((System.currentTimeMillis - initialtime)/1000.0))
// partialscores.sortBy({case (k,v) => v}).foreach(println(_))
}
// Sort by score and write output
// partialscores.sortBy({case (k,v) => v}).saveAsTextFile(outputDir)
partialscores.saveAsTextFile(outputDir)
// Print time
println("Total time: %g".format((System.currentTimeMillis - initialtime)/1000.0))
}
def computeScores(
templateLS : RDD[(String, LocalStructureJiang)],
inputLS : Broadcast[Array[(String, Array[LocalStructureJiang])]]) : RDD[(String, (String, Float))] = {
// First, compute the partial scores of each template LS with each input fingerprint.
val scores = templateLS.groupByKey().flatMapValues({ tlsarray =>
// For each input fingerprint, compute the partial score with tid
inputLS.value.map { ils =>
// For each template LS, compute the partial score with the input fingerprint ilsarray
val score = tlsarray.map ({ ls =>
new PartialScoreJiang(ls, ils._2.asInstanceOf[Array[LocalStructure]])
}).filter(! _.isEmpty).reduce(_.aggregateSinglePS(_).asInstanceOf[PartialScoreJiang]).computeScore(ils._2)
(ils._1, score)
}
})
if(DEBUG) {
val tmp = scores.collect()
printSize("Partitioned partial scores size: ", tmp)
println("\\tPartitioned partial score number: " + tmp.size)
println("\\tPartitioned partial score sample: " + tmp(0))
}
scores
}
} | dperaltac/bigdata-fingerprint | src/main/java/sci2s/sparkfingerprint/SparkMatcherJiang.scala | Scala | apache-2.0 | 6,475 |
/*
* Licensed to STRATIO (C) under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership. The STRATIO (C) licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.stratio.crossdata.server.mocks
import akka.actor.{Actor, Props}
import com.stratio.crossdata.common.result.{Result, QueryStatus}
import com.stratio.crossdata.communication.ACK
import com.stratio.crossdata.core.query.{IValidatedQuery, MetadataValidatedQuery, SelectValidatedQuery}
import org.apache.log4j.Logger
import com.stratio.crossdata.server.actors.TimeTracker
object MockPlannerActor {
def props(): Props = Props(new MockPlannerActor())
}
class MockPlannerActor() extends Actor with TimeTracker {
override lazy val timerName = this.getClass.getName
val log = Logger.getLogger(classOf[MockPlannerActor])
def receive : Receive = {
case query: MetadataValidatedQuery => {
val ack = ACK(query.getQueryId, QueryStatus.PLANNED)
sender ! ack
}
case query: SelectValidatedQuery => {
val ack = ACK(query.getQueryId, QueryStatus.PLANNED)
sender ! ack
}
case query: IValidatedQuery => {
val ack = ACK(query.getQueryId, QueryStatus.PLANNED)
sender ! ack
}
case _ => {
sender ! Result.createUnsupportedOperationErrorResult("Not recognized object")
}
}
}
| ccaballe/crossdata | crossdata-server/src/test/scala/com/stratio/crossdata/server/mocks/MockPlannerActor.scala | Scala | apache-2.0 | 1,936 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources
import java.io.{InputStream, OutputStream, OutputStreamWriter}
import java.nio.charset.{Charset, StandardCharsets}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.io.compress._
import org.apache.hadoop.mapreduce.JobContext
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
import org.apache.hadoop.util.ReflectionUtils
object CodecStreams {
private def getDecompressionCodec(config: Configuration, file: Path): Option[CompressionCodec] = {
val compressionCodecs = new CompressionCodecFactory(config)
Option(compressionCodecs.getCodec(file))
}
def createInputStream(config: Configuration, file: Path): InputStream = {
val fs = file.getFileSystem(config)
val inputStream: InputStream = fs.open(file)
getDecompressionCodec(config, file)
.map(codec => codec.createInputStream(inputStream))
.getOrElse(inputStream)
}
private def getCompressionCodec(
context: JobContext,
file: Option[Path] = None): Option[CompressionCodec] = {
if (FileOutputFormat.getCompressOutput(context)) {
val compressorClass = FileOutputFormat.getOutputCompressorClass(
context,
classOf[GzipCodec])
Some(ReflectionUtils.newInstance(compressorClass, context.getConfiguration))
} else {
file.flatMap { path =>
val compressionCodecs = new CompressionCodecFactory(context.getConfiguration)
Option(compressionCodecs.getCodec(path))
}
}
}
/**
* Create a new file and open it for writing.
* If compression is enabled in the [[JobContext]] the stream will write compressed data to disk.
* An exception will be thrown if the file already exists.
*/
def createOutputStream(context: JobContext, file: Path): OutputStream = {
val fs = file.getFileSystem(context.getConfiguration)
val outputStream: OutputStream = fs.create(file, false)
getCompressionCodec(context, Some(file))
.map(codec => codec.createOutputStream(outputStream))
.getOrElse(outputStream)
}
def createOutputStreamWriter(
context: JobContext,
file: Path,
charset: Charset = StandardCharsets.UTF_8): OutputStreamWriter = {
new OutputStreamWriter(createOutputStream(context, file), charset)
}
/** Returns the compression codec extension to be used in a file name, e.g. ".gzip"). */
def getCompressionExtension(context: JobContext): String = {
getCompressionCodec(context)
.map(_.getDefaultExtension)
.getOrElse("")
}
}
| SnappyDataInc/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/CodecStreams.scala | Scala | apache-2.0 | 3,385 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util
import java.util.concurrent._
import scala.concurrent.{ExecutionContext, ExecutionContextExecutor}
import com.google.common.util.concurrent.{MoreExecutors, ThreadFactoryBuilder}
private[spark] object ThreadUtils {
private val sameThreadExecutionContext =
ExecutionContext.fromExecutorService(MoreExecutors.sameThreadExecutor())
/**
* An `ExecutionContextExecutor` that runs each task in the thread that invokes `execute/submit`.
* The caller should make sure the tasks running in this `ExecutionContextExecutor` are short and
* never block.
*/
def sameThread: ExecutionContextExecutor = sameThreadExecutionContext
/**
* Create a thread factory that names threads with a prefix and also sets the threads to daemon.
*/
def namedThreadFactory(prefix: String): ThreadFactory = {
new ThreadFactoryBuilder().setDaemon(true).setNameFormat(prefix + "-%d").build()
}
/**
* Wrapper over newCachedThreadPool. Thread names are formatted as prefix-ID, where ID is a
* unique, sequentially assigned integer.
*/
def newDaemonCachedThreadPool(prefix: String): ThreadPoolExecutor = {
val threadFactory = namedThreadFactory(prefix)
Executors.newCachedThreadPool(threadFactory).asInstanceOf[ThreadPoolExecutor]
}
/**
* Create a cached thread pool whose max number of threads is `maxThreadNumber`. Thread names
* are formatted as prefix-ID, where ID is a unique, sequentially assigned integer.
*/
def newDaemonCachedThreadPool(
prefix: String, maxThreadNumber: Int, keepAliveSeconds: Int = 60): ThreadPoolExecutor = {
val threadFactory = namedThreadFactory(prefix)
val threadPool = new ThreadPoolExecutor(
maxThreadNumber, // corePoolSize: the max number of threads to create before queuing the tasks
maxThreadNumber, // maximumPoolSize: because we use LinkedBlockingDeque, this one is not used
keepAliveSeconds,
TimeUnit.SECONDS,
new LinkedBlockingQueue[Runnable],
threadFactory)
threadPool.allowCoreThreadTimeOut(true)
threadPool
}
/**
* Wrapper over newFixedThreadPool. Thread names are formatted as prefix-ID, where ID is a
* unique, sequentially assigned integer.
*/
def newDaemonFixedThreadPool(nThreads: Int, prefix: String): ThreadPoolExecutor = {
val threadFactory = namedThreadFactory(prefix)
Executors.newFixedThreadPool(nThreads, threadFactory).asInstanceOf[ThreadPoolExecutor]
}
/**
* Wrapper over newSingleThreadExecutor.
*/
def newDaemonSingleThreadExecutor(threadName: String): ExecutorService = {
val threadFactory = new ThreadFactoryBuilder().setDaemon(true).setNameFormat(threadName).build()
Executors.newSingleThreadExecutor(threadFactory)
}
/**
* Wrapper over newSingleThreadScheduledExecutor.
*/
def newDaemonSingleThreadScheduledExecutor(threadName: String): ScheduledExecutorService = {
val threadFactory = new ThreadFactoryBuilder().setDaemon(true).setNameFormat(threadName).build()
Executors.newSingleThreadScheduledExecutor(threadFactory)
}
}
| andrewor14/iolap | core/src/main/scala/org/apache/spark/util/ThreadUtils.scala | Scala | apache-2.0 | 3,892 |
package com.twitter.finagle.httpx.codec
import com.google.common.io.BaseEncoding
import com.twitter.finagle._
import com.twitter.finagle.httpx.Message
import com.twitter.util.{Throw, Try, Return}
import java.nio.charset.Charset
import scala.collection.mutable.ArrayBuffer
import scala.collection.JavaConverters._
/**
* Dtab serialization for Http. Dtabs are encoded into Http
* headers with keys
* x-dtab-$idx-(a|b)
* where $idx is a two-digit integer. These headers are encoded in
* pairs: 'a' and 'b' headers must exist for each index. Thus when
* header names are lexically sorted, Dtab entries are decoded
* pairwise. 'a' denoting prefix, 'b' destination.
*
* Header values are base64-encoded ("standard" alphabet)
* Utf8 strings.
*/
object HttpDtab {
private val Header = "dtab-local"
private val Prefix = "x-dtab-"
private val Maxsize = 100
private val Utf8 = Charset.forName("UTF-8")
private val Base64 = BaseEncoding.base64()
private val indexstr: Int => String =
((0 until Maxsize) map (i => i -> "%02d".format(i))).toMap
private def b64Encode(v: String): String =
Base64.encode(v.getBytes(Utf8))
private def b64Decode(v: String): Try[String] =
Try { Base64.decode(v) } map(new String(_, Utf8))
private val unmatchedFailure =
Failure("Unmatched X-Dtab headers")
private def decodingFailure(value: String) =
Failure("Value not b64-encoded: "+value)
private def pathFailure(path: String, cause: IllegalArgumentException) =
Failure("Invalid path: "+path, cause)
private def nameFailure(name: String, cause: IllegalArgumentException) =
Failure("Invalid name: "+name, cause)
private def decodePath(b64path: String): Try[Path] =
b64Decode(b64path) match {
case Throw(e: IllegalArgumentException) => Throw(decodingFailure(b64path))
case Throw(e) => Throw(e)
case Return(pathStr) =>
Try(Path.read(pathStr)) rescue {
case iae: IllegalArgumentException => Throw(pathFailure(pathStr, iae))
}
}
private def decodeName(b64name: String): Try[NameTree[Path]] =
b64Decode(b64name) match {
case Throw(e: IllegalArgumentException) => Throw(decodingFailure(b64name))
case Throw(e) => Throw(e)
case Return(nameStr) =>
Try(NameTree.read(nameStr)) rescue {
case iae: IllegalArgumentException => Throw(nameFailure(nameStr, iae))
}
}
private def validHeaderPair(aKey: String, bKey: String): Boolean =
aKey.length == bKey.length &&
aKey(aKey.length - 1) == 'a' &&
bKey(bKey.length - 1) == 'b' &&
aKey.substring(0, aKey.length - 1) == bKey.substring(0, bKey.length - 1)
private val EmptyReturn = Return(Dtab.empty)
def clear(msg: Message) {
val names = msg.headers.names.iterator()
msg.headers.remove(Header)
while (names.hasNext) {
val n = names.next()
if (n.toLowerCase startsWith Prefix)
msg.headers.remove(n)
}
}
def write(dtab: Dtab, msg: Message) {
if (dtab.isEmpty)
return
if (dtab.size >= Maxsize) {
throw new IllegalArgumentException(
"Dtabs with length greater than 100 are not serializable with HTTP")
}
for ((Dentry(prefix, dst), i) <- dtab.zipWithIndex) {
// TODO: now that we have a proper Dtab grammar,
// should just embed this directly instead.
msg.headers.set(Prefix+indexstr(i)+"-A", b64Encode(prefix.show))
msg.headers.set(Prefix+indexstr(i)+"-B", b64Encode(dst.show))
}
}
/**
* Parse old-style X-Dtab pairs and then new-style Dtab-Local headers,
* Dtab-Local taking precedence.
*/
def read(msg: Message): Try[Dtab] =
for {
dtab0 <- readXDtabPairs(msg)
dtab1 <- readDtabLocal(msg)
} yield dtab0 ++ dtab1
/**
* Parse Dtab-Local headers into a Dtab.
*
* If multiple Dtab-Local headers are present, they are concatenated.
* A Dtab-Local header may contain a comma-separated list of Dtabs.
*
* N.B. Comma is not a showable character in Paths nor is it meaningful in Dtabs.
*/
private def readDtabLocal(msg: Message): Try[Dtab] =
if (!msg.headers.contains(Header)) EmptyReturn else Try {
val headers = msg.headers().getAll(Header).asScala
val dentries = headers.view flatMap(_ split ',') flatMap(Dtab.read(_))
Dtab(dentries.toIndexedSeq)
}
/**
* Parse header pairs into a Dtab:
* X-Dtab-00-A: base64(/prefix)
* X-Dtab-00-B: base64(/dstA & /dstB)
*/
private def readXDtabPairs(msg: Message): Try[Dtab] = {
// Common case: no actual overrides.
var keys: ArrayBuffer[String] = null
val headers = msg.headers.iterator()
while (headers.hasNext()) {
val key = headers.next().getKey().toLowerCase
if (key startsWith Prefix) {
if (keys == null) keys = ArrayBuffer[String]()
keys += key
}
}
if (keys == null)
return EmptyReturn
if (keys.size % 2 != 0)
return Throw(unmatchedFailure)
keys = keys.sorted
val n = keys.size/2
val dentries = new Array[Dentry](n)
var i = 0
while (i < n) {
val j = i*2
val prefix = keys(j)
val dest = keys(j+1)
if (!validHeaderPair(prefix, dest))
return Throw(unmatchedFailure)
val tryDentry =
for {
path <- decodePath(msg.headers.get(prefix))
name <- decodeName(msg.headers.get(dest))
} yield Dentry(path, name)
dentries(i) =
tryDentry match {
case Return(dentry) => dentry
case Throw(e) =>
return Throw(e)
}
i += 1
}
Return(Dtab(dentries))
}
}
| folone/finagle | finagle-httpx/src/main/scala/com/twitter/finagle/httpx/codec/HttpDtab.scala | Scala | apache-2.0 | 5,633 |
import scala.tools.nsc.doc.model._
import scala.tools.partest.ScaladocModelTest
import language._
object Test extends ScaladocModelTest {
// test a file instead of a piece of code
override def resourceFile = "implicits-known-type-classes-res.scala"
// start implicits
def scaladocSettings = "-implicits"
def testModel(root: Package) = {
// get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
import access._
/** Tests the "known type classes" feature of scaladoc implicits
* if the test fails, please update the correct qualified name of
* the type class in src/compiler/scala/tools/nsc/doc/Settings.scala
* in the knownTypeClasses map. Thank you! */
val base = root._package("scala")._package("test")._package("scaladoc")._package("implicits")._package("typeclasses")
var conv: ImplicitConversion = null
val A = base._class("A")
for (conversion <- A.conversions if !conversion.isHiddenConversion) {
assert(conversion.constraints.length == 1, conversion.constraints.length + " == 1 (in " + conversion + ")")
assert(conversion.constraints.head.isInstanceOf[KnownTypeClassConstraint],
conversion.constraints.head + " is not a known type class constraint!")
}
}
}
| felixmulder/scala | test/scaladoc/run/implicits-known-type-classes.scala | Scala | bsd-3-clause | 1,318 |
package gv
package isi
package functional
package monoid
trait Addition[T] extends Any
with Monoid[T]
object Addition {
final implicit def apply[T: typeclass.Addable: typeclass.Zero]: Addition[T] =
new Addition[T] {
override def zero: T = implicitly[typeclass.Zero[T]].zero
override def op: (T, T) β T = implicitly[typeclass.Addable[T]].op
}
}
| mouchtaris/jleon | src/main/scala-2.12/gv/isi/functional/monoid/Addition.scala | Scala | mit | 372 |
/*
* Copyright (c) 2014 Oculus Info Inc.
* http://www.oculusinfo.com/
*
* Released under the MIT License.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is furnished to do
* so, subject to the following conditions:
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.oculusinfo.tilegen.tiling.analytics
import java.lang.{Integer => JavaInt}
import java.lang.{Long => JavaLong}
import java.lang.{Float => JavaFloat}
import java.lang.{Double => JavaDouble}
import java.util.{List => JavaList}
import scala.collection.JavaConverters._
import org.scalatest.FunSuite
import com.oculusinfo.binning.TileIndex
import com.oculusinfo.binning.impl.DenseTileData
import com.oculusinfo.tilegen.tiling.IPv4ZCurveIndexScheme.ipArrayToString
import com.oculusinfo.tilegen.tiling.IPv4ZCurveIndexScheme.longToIPArray
import com.oculusinfo.tilegen.tiling.IPv4ZCurveIndexScheme
import com.oculusinfo.binning.util.JSONUtilitiesTests
import org.json.JSONObject
class NumericAnalyticsTestSuite extends FunSuite {
import TileAnalytic.Locations._
def toJava (l: List[Double]) = l.map(new JavaDouble(_)).asJava
def assertSeqsEqual[T] (a: Seq[T], b: Seq[T]): Unit = {
assert(a.size === b.size)
for (n <- 0 until a.size) assert(a(n) === b(n))
}
def assertListsEqual[T] (a: JavaList[T], b: JavaList[T]): Unit = {
assert(a.size === b.size)
for (n <- 0 until a.size) assert(a.get(n) === b.get(n))
}
// Full tests on each type for the summation analytic
test("Standard Integer Analytic") {
// check base
val analytic = new NumericSumTileAnalytic[Int]()
assert(0 === analytic.defaultProcessedValue)
assert(0 === analytic.defaultUnprocessedValue)
assert(3 === analytic.aggregate(1, 2))
assert(analytic.aggregate(1, 2).isInstanceOf[Int])
// Check tile analytic output
JSONUtilitiesTests.assertJsonEqual(new JSONObject("""{"sum": 4}"""),
analytic.storableValue(4, Tile).get)
}
test("Standard Long Analytic") {
// check base
val analytic = new NumericSumTileAnalytic[Long]()
assert(0l === analytic.defaultProcessedValue)
assert(0l === analytic.defaultUnprocessedValue)
assert(3l === analytic.aggregate(1l, 2l))
assert(analytic.aggregate(1L, 2L).isInstanceOf[Long])
// Check tile analytic output
JSONUtilitiesTests.assertJsonEqual(new JSONObject("""{"sum": 4444444444}"""),
analytic.storableValue(4444444444L, Tile).get)
}
test("Standard Float Analytic") {
// check base
val analytic = new NumericSumTileAnalytic[Float]()
assert(0.0f === analytic.defaultProcessedValue)
assert(0.0f === analytic.defaultUnprocessedValue)
assert(3.0f === analytic.aggregate(1.0f, 2.0f))
assert(analytic.aggregate(1.0f, 2.0f).isInstanceOf[Float])
// Check tile analytic output
val expected = new JSONObject()
expected.put("sum", Float.box(4.2f))
JSONUtilitiesTests.assertJsonEqual(expected, analytic.storableValue(4.2f, Tile).get)
}
test("Standard Double Analytic") {
// check base
val analytic = new NumericSumTileAnalytic[Double]()
assert(0.0 === analytic.defaultProcessedValue)
assert(0.0 === analytic.defaultUnprocessedValue)
assert(3.0 === analytic.aggregate(1.0, 2.0))
assert(analytic.aggregate(1.0, 2.0).isInstanceOf[Double])
assert(analytic.aggregate(1.0f, 2.0f).isInstanceOf[Double])
assert(analytic.aggregate(1L, 2L).isInstanceOf[Double])
assert(analytic.aggregate(1, 2).isInstanceOf[Double])
// Check tile analytic output
JSONUtilitiesTests.assertJsonEqual(new JSONObject("""{"sum": 4.3}"""),
analytic.storableValue(4.3, Tile).get)
}
// Having testing the summation analytic fully for each type, we just do
// type checking on non-Int types for other analytics
test("Minimum Int Analytic") {
val analytic = new NumericMinAnalytic[Int]()
assert(1 === analytic.aggregate(1, 2))
assert(1 === analytic.aggregate(2, 1))
assert(analytic.aggregate(1, 2).isInstanceOf[Int])
}
test("Minimum Double Analytic ignores NaN") {
val sampleTile = new DenseTileData[JavaDouble](new TileIndex(0, 0, 0, 4, 4), JavaDouble.NaN)
sampleTile.setBin(0, 0, 1.0)
sampleTile.setBin(1, 1, 2.0)
sampleTile.setBin(2, 2, 3.0)
sampleTile.setBin(3, 3, 4.0)
val minConvert = AnalysisDescriptionTileWrapper.acrossTile((d: JavaDouble) => d.doubleValue,
new NumericMinTileAnalytic[Double]())
assert(1.0 === minConvert(sampleTile))
}
// test("Minimum Double with Payload analytic") {
// val analytic = new NumericMinWithPayloadAnalytic[Double, JavaDouble, String]()
// assert((1.0, "a") === analytic.aggregate((1.0, "a"), (2.0, "b")))
// assert((1.0, "a") === analytic.aggregate((2.0, "b"), (1.0, "a")))
// assert((1.0, "a") === analytic.aggregate((1.0, "a"), (JavaDouble.NaN, "b")))
// assert((1.0, "a") === analytic.aggregate((JavaDouble.NaN, "b"), (1.0, "a")))
// }
test("Minimum Long Analytic") {
assert(new NumericMinAnalytic[Long]().aggregate(1L, 2L).isInstanceOf[Long])
}
test("Minimum Float Analytic") {
assert(new NumericMinAnalytic[Float]().aggregate(1.1f, 2.2f).isInstanceOf[Float])
}
test("Minimum Double Analytic") {
assert(new NumericMinAnalytic[Double]().aggregate(1.2, 2.4).isInstanceOf[Double])
}
test("Maximum Int Analytic") {
val analytic = new NumericMaxAnalytic[Int]()
assert(2 === analytic.aggregate(1, 2))
assert(2 === analytic.aggregate(2, 1))
assert(analytic.aggregate(1, 2).isInstanceOf[Int])
}
test("Maximum Double Analytic ignores NaN") {
val sampleTile = new DenseTileData[JavaDouble](new TileIndex(0, 0, 0, 4, 4), JavaDouble.NaN)
sampleTile.setBin(0, 0, 1.0)
sampleTile.setBin(1, 1, 2.0)
sampleTile.setBin(2, 2, 3.0)
sampleTile.setBin(3, 3, 4.0)
val maxConvert = AnalysisDescriptionTileWrapper.acrossTile((d: JavaDouble) => d.doubleValue,
new NumericMaxTileAnalytic[Double]())
assert(4.0 === maxConvert(sampleTile))
}
// test("Maximum Double with Payload analytic") {
// val analytic = new NumericMaxWithPayloadAnalyticddd[Double, JavaDouble, String]()
// assert((1.0, "a") === analytic.aggregate((1.0, "a"), (0.0, "b")))
// assert((1.0, "a") === analytic.aggregate((0.0, "b"), (1.0, "a")))
// assert((1.0, "a") === analytic.aggregate((1.0, "a"), (JavaDouble.NaN, "b")))
// assert((1.0, "a") === analytic.aggregate((JavaDouble.NaN, "b"), (1.0, "a")))
// }
test("Maximum Long Analytic") {
assert(new NumericMaxAnalytic[Long]().aggregate(1L, 2L).isInstanceOf[Long])
}
test("Maximum Float Analytic") {
assert(new NumericMaxAnalytic[Float]().aggregate(1.1f, 2.2f).isInstanceOf[Float])
}
test("Maximum Double Analytic") {
assert(new NumericMaxAnalytic[Double]().aggregate(1.2, 2.4).isInstanceOf[Double])
}
test("Standard Mean Int Binning Analytic") {
// Test normal values
val analytic = new NumericMeanBinningAnalytic[Int]()
assert((0, 0) === analytic.defaultProcessedValue)
assert((0, 0) === analytic.defaultUnprocessedValue)
assert((3, 5) === analytic.aggregate((2, 2), (1, 3)))
assert(analytic.aggregate((2, 1), (1, 2))._1.isInstanceOf[Int])
assert(0.6 === analytic.finish(3, 5))
assert(JavaDouble.isNaN(analytic.finish((0, 0))))
// Test default values
val analyticDef1 = new NumericMeanBinningAnalytic[Int](emptyValue=1)
assert(1 == analyticDef1.finish((3, 0)))
val analyticDef2 = new NumericMeanBinningAnalytic[Int](emptyValue=3)
assert(3 == analyticDef2.finish((1, 0)))
// Test minimum count
val analyticCount1 = new NumericMeanBinningAnalytic[Int](minCount=4)
assert(analyticCount1.finish((3, 3)).isNaN)
assert(1.25 === analyticCount1.finish((5, 4)))
}
test("Standard Mean Long Binning Analytic") {
// Test normal values
val analytic = new NumericMeanAnalytic[Long]()
assert(analytic.aggregate((2L, 1), (1L, 2))._1.isInstanceOf[Long])
}
test("Standard Mean Float Binning Analytic") {
// Test normal values
val analytic = new NumericMeanAnalytic[Float]()
assert(analytic.aggregate((2.0f, 1), (1.0f, 2))._1.isInstanceOf[Float])
}
test("Standard Mean Double Binning Analytic") {
// Test normal values
val analytic = new NumericMeanAnalytic[Double]()
assert(analytic.aggregate((2.0, 1), (1.0, 2))._1.isInstanceOf[Double])
}
}
| unchartedsoftware/aperture-tiles | tile-generation/src/test/scala/com/oculusinfo/tilegen/tiling/analytics/NumericAnalyticsTestSuite.scala | Scala | mit | 9,229 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.sumologic.shellbase.commands
import com.sumologic.shellbase.ShellCommand
import com.sumologic.shellbase.cmdline.CommandLineArgument
import com.sumologic.shellbase.cmdline.RichCommandLine._
import com.sumologic.shellbase.timeutil.TimeFormats
import org.apache.commons.cli.{CommandLine, Options}
class TimeCommand(runCommand: String => Boolean) extends ShellCommand("time", "Measure the execution time of a command") {
private val CommandArgument = new CommandLineArgument("command", 0, true)
private def now = System.currentTimeMillis()
override def maxNumberOfArguments = 1
override def addOptions(opts: Options): Unit = {
opts += CommandArgument
}
import com.sumologic.shellbase.ShellBase.SubCommandExtractor
def execute(cmdLine: CommandLine) = {
cmdLine.get(CommandArgument) match {
case Some(SubCommandExtractor(cmd)) =>
val start = now
val exitStatus = runCommand(cmd)
val dt = now - start
val dtMessage = s"Execution took $dt ms (${TimeFormats.formatAsTersePeriod(dt)})"
_logger.info(s"$dtMessage for `$cmd`")
println(s"\\n$dtMessage\\n")
exitStatus
case badCmd =>
println(s"Usage: time `<command>`, but found $badCmd.")
false
}
}
}
| SumoLogic/shellbase | shellbase-core/src/main/scala/com/sumologic/shellbase/commands/TimeCommand.scala | Scala | apache-2.0 | 2,075 |
/*
*
* Copyright 2014 Commonwealth Computer Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the License);
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.locationtech.geomesa.core.stats
import java.util.concurrent.atomic.AtomicBoolean
import java.util.concurrent.{ScheduledThreadPoolExecutor, TimeUnit}
import com.google.common.collect.Queues
import com.google.common.util.concurrent.MoreExecutors
import com.typesafe.scalalogging.slf4j.Logging
import org.apache.accumulo.core.client.admin.TimeType
import org.apache.accumulo.core.client.mock.MockConnector
import org.apache.accumulo.core.client.{BatchWriterConfig, Connector, TableExistsException}
import org.locationtech.geomesa.core.stats.StatWriter.TableInstance
import scala.collection.JavaConverters._
import scala.collection.mutable
trait StatWriter {
def connector: Connector
// start the background thread
if(!connector.isInstanceOf[MockConnector]) {
StatWriter.startIfNeeded()
}
/**
* Writes a stat to accumulo. This implementation adds the stat to a bounded queue, which should
* be fast, then asynchronously writes the data in the background.
*
* @param stat
*/
def writeStat(stat: Stat, statTable: String): Unit =
StatWriter.queueStat(stat, TableInstance(connector, statTable))
}
/**
* Singleton object to manage writing of stats in a background thread.
*/
object StatWriter extends Runnable with Logging {
private val batchSize = 100
private val writeDelayMillis = 1000
private val batchWriterConfig = new BatchWriterConfig().setMaxMemory(10000L).setMaxWriteThreads(5)
// use the guava exiting executor so that this thread doesn't hold up the jvm shutdown
private val executor = MoreExecutors.getExitingScheduledExecutorService(new ScheduledThreadPoolExecutor(1))
private val running = new AtomicBoolean(false)
private val queue = Queues.newLinkedBlockingQueue[StatToWrite](batchSize)
private val tableCache = new mutable.HashMap[TableInstance, Boolean]
with mutable.SynchronizedMap[TableInstance, Boolean]
sys.addShutdownHook {
executor.shutdownNow()
}
/**
* Starts the background thread for writing stats, if it hasn't already been started
*/
private def startIfNeeded() {
if (running.compareAndSet(false, true)) {
// we want to wait between invocations to give more stats a chance to queue up
executor.scheduleWithFixedDelay(this, writeDelayMillis, writeDelayMillis, TimeUnit.MILLISECONDS)
}
}
/**
* Queues a stat for writing. We don't want to affect memory and accumulo performance too much...
* if we exceed the queue size, we drop any further stats
*
* @param stat
*/
private def queueStat(stat: Stat, table: TableInstance): Unit =
if (!queue.offer(StatToWrite(stat, table))) {
logger.debug("Stat queue is full - stat being dropped")
}
/**
* Writes the stats.
*
* @param statsToWrite
*/
def write(statsToWrite: Iterable[StatToWrite]): Unit =
statsToWrite.groupBy(s => StatGroup(s.table, s.stat.getClass)).foreach { case (group, stats) =>
// get the appropriate transform for this type of stat
val transform = group.clas match {
case c if c == classOf[QueryStat] => QueryStatTransform.asInstanceOf[StatTransform[Stat]]
case _ => throw new RuntimeException("Not implemented")
}
// create the table if necessary
checkTable(group.table)
// write to the table
val writer = group.table.connector.createBatchWriter(group.table.name, batchWriterConfig)
try {
writer.addMutations(stats.map(stw => transform.statToMutation(stw.stat)).asJava)
writer.flush()
} finally {
writer.close()
}
}
/**
* Create the stats table if it doesn't exist
* @param table
* @return
*/
private def checkTable(table: TableInstance) =
tableCache.getOrElseUpdate(table, {
val tableOps = table.connector.tableOperations()
if (!tableOps.exists(table.name)) {
try {
tableOps.create(table.name, true, TimeType.LOGICAL)
} catch {
case e: TableExistsException => // unlikely, but this can happen with multiple jvms
}
}
true
})
override def run() = {
try {
// wait for a stat to be queued
val head = queue.take()
// drain out any other stats that have been queued while sleeping
val stats = collection.mutable.ListBuffer(head)
queue.drainTo(stats.asJava)
write(stats)
} catch {
case e: InterruptedException =>
// normal thread termination, just propagate the interrupt
Thread.currentThread().interrupt()
case e: Exception =>
logger.error("Error in stat writing - stopping stat writer thread:", e)
executor.shutdown()
}
}
private[stats] case class StatToWrite(stat: Stat, table: TableInstance)
private[stats] case class TableInstance(connector: Connector, name: String)
private[stats] case class StatGroup(table: TableInstance, clas: Class[_ <: Stat])
}
| drmathochist/geomesa | geomesa-core/src/main/scala/org/locationtech/geomesa/core/stats/StatWriter.scala | Scala | apache-2.0 | 5,568 |
package com.github.takemikami.selica
import org.apache.spark.sql.SparkSession
import org.apache.log4j.{Level, Logger}
object SparkSessionForUnitTest {
val level = Level.WARN
Logger.getLogger("org").setLevel(level)
Logger.getLogger("akka").setLevel(level)
private val master = "local[2]"
private val appName = "SparkCF"
private var sparkSession: SparkSession = _
def getSession(): SparkSession = {
sparkSession = SparkSession
.builder()
.master(master)
.appName(appName)
.getOrCreate()
sparkSession
}
}
| takemikami/selica | src/test/scala/com/github/takemikami/selica/SparkSessionForUnitTest.scala | Scala | apache-2.0 | 556 |
/*
* Copyright 2015 Nicolas Rinaudo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kantan.csv
package cats
object equality extends kantan.codecs.cats.laws.discipline.EqInstances
| nrinaudo/scala-csv | cats/shared/src/test/scala/kantan/csv/cats/equality.scala | Scala | mit | 704 |
package chapter06
// Define an Origin object that extends java.awt.Point.
// Why is this not actually a good idea?
// (Have a close look at the methods of the Point class.)
object Exercise3 extends App {
object Origin extends java.awt.Point {}
// Shared state
}
| vsuharnikov/books-exercises | scala/scala-for-the-impatient/src/main/scala/chapter06/Exercise3.scala | Scala | mit | 268 |
package interpretation
/**
* Created by chanjinpark on 2016. 9. 7..
*/
object TopicWordCheck extends App {
}
| chanjin/NRFAnalysis | src/main/scala/interpretation/TopicWordCheck.scala | Scala | apache-2.0 | 116 |
package com.mesosphere.cosmos
import com.mesosphere.cosmos.error.CosmosError
import com.mesosphere.cosmos.rpc.v1.model.ErrorResponse
import com.mesosphere.cosmos.thirdparty.marathon.model.AppId
import com.mesosphere.universe
import com.twitter.bijection.Conversion
import io.circe.Json
import io.circe.jawn.parse
import java.util.UUID
object ItOps {
implicit def cosmosErrorToErrorResponse[E <: CosmosError]: Conversion[E, ErrorResponse] = {
Conversion.fromFunction(_.exception.errorResponse)
}
implicit val uuidToAppId: Conversion[UUID, AppId] = {
Conversion.fromFunction { id =>
AppId(id.toString)
}
}
implicit final class ItStringOps(val string: String) extends AnyVal {
def version: universe.v3.model.Version =
universe.v3.model.Version(string)
def detailsVersion: universe.v2.model.PackageDetailsVersion =
universe.v2.model.PackageDetailsVersion(string)
def json: Json = {
val Right(result) = parse(string)
result
}
}
}
| takirala/cosmos | cosmos-integration-tests/src/main/scala/com/mesosphere/cosmos/ItOps.scala | Scala | apache-2.0 | 1,003 |
package com.varunvats.practice.graph
import com.varunvats.practice.graph.BuildOrder.NoValidBuildOrderException
import com.varunvats.practice.sorting.UnitSpec
class BuildOrderSpec extends UnitSpec {
"The build order creator" must {
"create the correct build order" when {
"none of the projects have any dependencies and no dependencies are specified" in {
val projects = List('A', 'Z', 'P')
BuildOrder(projects, Nil) shouldBe Seq('A', 'Z', 'P')
}
"none of the projects have any dependencies but the dependency list contains unrelated projects" in {
val projects = List('A')
val dependencies = List('Z' -> 'P', 'X' -> 'Y')
BuildOrder(projects, dependencies) shouldBe Seq('A')
}
"there is only one project and it has only one dependency" in {
val projects = List('A')
val dependencies = List('A' -> 'P')
BuildOrder(projects, dependencies) shouldBe Seq('P', 'A')
}
"the are two projects and both have one dependency each" in {
val projects = List('A', 'B')
val dependencies = List('A' -> 'P', 'B' -> 'X')
BuildOrder(projects, dependencies) shouldBe Seq('P', 'A', 'X', 'B')
}
"there are two projects and they have the same dependency" in {
val projects = List('A', 'B')
val dependencies = List('A' -> 'P', 'B' -> 'P')
BuildOrder(projects, dependencies) shouldBe Seq('P', 'A', 'B')
}
"there are two projects and one project's dependency has another dependency" in {
val projects = List('A', 'B')
val dependencies = List('A' -> 'P', 'P' -> 'C', 'B' -> 'X')
BuildOrder(projects, dependencies) shouldBe Seq('C', 'P', 'A', 'X', 'B')
}
"the are two projects and each has multiple dependencies only one level deep" in {
val projects = List('A', 'B')
val dependencies = List(
'A' -> 'P',
'A' -> 'Q',
'A' -> 'R',
'B' -> 'F',
'B' -> 'G',
'B' -> 'H'
)
val expectedBuildOrder = Seq('R', 'Q', 'P', 'A', 'H', 'G', 'F', 'B')
BuildOrder(projects, dependencies) shouldBe expectedBuildOrder
}
"there are two projects and each has multiple dependencies, two levels deep" in {
val projects = List('A', 'B')
val dependencies = List(
'A' -> 'P',
'A' -> 'Q',
'A' -> 'R',
'P' -> 'T',
'P' -> 'U',
'R' -> 'S',
'B' -> 'F',
'B' -> 'G',
'B' -> 'H',
'H' -> 'I'
)
val expectedBuildOrder = Seq('S', 'R', 'Q', 'U', 'T', 'P', 'A', 'I', 'H', 'G', 'F', 'B')
BuildOrder(projects, dependencies) shouldBe expectedBuildOrder
}
"there are two projects and one of first project's many dependencies is the other project" in {
val projects = List('A', 'B')
val dependencies = List(
'A' -> 'P',
'A' -> 'B',
'P' -> 'Q',
'B' -> 'R',
'B' -> 'U',
'R' -> 'L'
)
val expectedBuildOrder = Seq('U', 'L', 'R', 'B', 'Q', 'P', 'A')
BuildOrder(projects, dependencies) shouldBe expectedBuildOrder
}
"two projects share the same dependency and that dependency has multiple multi-level dependencies" in {
val projects = List('A', 'B')
val dependencies = List(
'A' -> 'P',
'B' -> 'P',
'P' -> 'Q',
'P' -> 'R',
'P' -> 'S',
'Q' -> 'T',
'Q' -> 'U',
'U' -> 'M',
'R' -> 'N'
)
val expectedBuildOrder = Seq('S', 'N', 'R', 'M', 'U', 'T', 'Q', 'P', 'A', 'B')
BuildOrder(projects, dependencies) shouldBe expectedBuildOrder
}
"there is a diamond dependency" in {
val projects = List('A', 'B')
val dependencies = List(
'A' -> 'P',
'A' -> 'Q',
'P' -> 'R',
'Q' -> 'R',
'R' -> 'S',
'P' -> 'O',
'B' -> 'O'
)
val expectedBuildOrder = Seq('S', 'R', 'Q', 'O', 'P', 'A', 'B')
BuildOrder(projects, dependencies) shouldBe expectedBuildOrder
}
"there are two adjacent diamond dependencies that share two dependencies" in {
val projects = List('A', 'B')
val dependencies = List(
'A' -> 'P',
'A' -> 'Q',
'P' -> 'R',
'Q' -> 'R',
'B' -> 'P',
'B' -> 'S',
'R' -> 'T',
'S' -> 'T'
)
val expectedBuildOrder = Seq('T', 'R', 'Q', 'P', 'A', 'S', 'B')
BuildOrder(projects, dependencies) shouldBe expectedBuildOrder
}
"there is a cyclic dependency but none of the projects in the cyclic dependency need to be built" in {
val projects = List('A')
val dependencies = List(
'A' -> 'D',
'P' -> 'Q',
'Q' -> 'R',
'R' -> 'P'
)
val expectedBuildOrder = Seq('D', 'A')
BuildOrder(projects, dependencies) shouldBe expectedBuildOrder
}
}
"throw an exception" when {
"there is only one project and it depends on itself" in {
val projects = List('A')
val dependencies = List('A' -> 'A')
intercept[NoValidBuildOrderException] {
BuildOrder(projects, dependencies)
}
}
"there is a project with a cyclic dependency that involves more than one project" in {
val projects = List('A', 'B')
val dependencies = List(
'A' -> 'P',
'P' -> 'B',
'B' -> 'Q',
'Q' -> 'P',
'B' -> 'L'
)
intercept[NoValidBuildOrderException] {
BuildOrder(projects, dependencies)
}
}
"there are two projects and both depend on each other" in {
val projects = List('A', 'B')
val dependencies = List('A' -> 'B', 'B' -> 'A')
intercept[NoValidBuildOrderException] {
BuildOrder(projects, dependencies)
}
}
}
}
}
| varunvats/practice | jvm/src/test/scala/com/varunvats/practice/graph/BuildOrderSpec.scala | Scala | mit | 6,083 |
// Copyright 2017 EPFL DATA Lab (data.epfl.ch)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package squid
package ir
import org.scalatest.FunSuite
import MacroTesters._
class InterpreterTests extends FunSuite {
object b extends ir.SimpleAST
val Inter = new ir.BaseInterpreter
def same[A](xy: (A, A)) = assert(xy._1 == xy._2)
def runSame[A](xy: (b.Rep, A)) = {
same(b.reinterpret(xy._1, Inter)(), xy._2)
}
test("Constants") ({
runSame( shallowAndDeep(b){ 42 } )
runSame( shallowAndDeep(b){ "ok" } )
runSame( shallowAndDeep(b){ 'c' } )
runSame( shallowAndDeep(b){ 'Cool } )
})
test("Basic") {/*Inter.debugFor*/{
runSame( shallowAndDeep(b){ "ok".reverse } )
runSame( shallowAndDeep(b){ "ok".take(1)+"ko" } )
runSame( shallowAndDeep(b){ {0 -> 1} swap } )
}}
test("Bindings") {
//runSame( shallowAndDeep(b){ (arg: {val y:Int}) => arg.y } ) // Unsupported feature: Refinement type 'AnyRef{val y: Int}'
runSame( shallowAndDeep(b){ val x = 0; x + 1 } )
runSame( shallowAndDeep(b){ ((x: Int) => x + 1)(42) } )
runSame( shallowAndDeep(b){ {x: Int => x + 1}.apply(42) } )
}
test("Variables") {
runSame( shallowAndDeep(b){ lib.MutVar(0) } )
runSame( shallowAndDeep(b){ var x = ("ok" + "ko".reverse).length; x-=1; (x+=1, x, 'lol) } )
}
test("By-name") {{
runSame( shallowAndDeep(b){ Dummies.byNameMethod(42) } )
runSame( shallowAndDeep(b){ Dummies.byNameMethod(666) } )
}}
test("Varargs") {
runSame( shallowAndDeep(b){ lib.Imperative()(42) } )
runSame( shallowAndDeep(b){ var x = 0; lib.Imperative(x += 1)(x) } )
runSame( shallowAndDeep(b){ var x = 0; lib.Imperative(x += 1, x += 1)(x) } )
runSame( shallowAndDeep(b){ var x = 0; val modifs = Seq(x += 1, x += 1); lib.Imperative(modifs: _*)(x) } )
}
test("Virtualized Constructs") {
// Ascription
runSame( shallowAndDeep(b){ (List(1,2,3) : Seq[Any]).size: Int } )
runSame( shallowAndDeep(b){ "ok".length: Unit } )
// If then else
runSame( shallowAndDeep(b){ if (Math.PI > 0) "ok" else "ko" } )
runSame( shallowAndDeep(b){ var x = 0; if (true) x += 1 else x += 1; x } )
// While
runSame( shallowAndDeep(b){ var x = 0; while (x < 3) { x += 1; println(x) }; x } )
}
test("Java") {{
// overloading
runSame(shallowAndDeep(b){ "ok".indexOf('k'.toInt) })
runSame(shallowAndDeep(b){ "ok".indexOf('k') })
runSame(shallowAndDeep(b){ "okok".indexOf("ok") })
runSame(shallowAndDeep(b){ "okok".lastIndexOf("ok") })
runSame( shallowAndDeep(b){ String.valueOf(true) } )
// workaround for 2-way cache compiler bug (associates java.lang.String to 'object String' instead of 'class String')
runSame( shallowAndDeep(b){ "ok"+String.valueOf("ko") } )
runSame( shallowAndDeep(b){ ("ko"*2) })
runSame( shallowAndDeep(b){ ("ok" + "ko"*2).length })
}}
test("Method asInstanceOf") {
import b.Predef._
runSame(code"(42:Any).asInstanceOf[Int]+1".rep -> 43)
runSame(code"('ok:AnyRef).asInstanceOf[Symbol].name".rep -> "ok")
runSame(code"(1::Nil:Seq[Int]).asInstanceOf[List[Int]].isEmpty".rep -> false)
// Prints a warning -- cannot interpret `isInstanceOf` directly
//runSame(ir"(1::Nil:Seq[Int]).isInstanceOf[List[Int]]".rep -> true)
}
}
| epfldata/squid | src/test/scala/squid/ir/InterpreterTests.scala | Scala | apache-2.0 | 3,987 |
package io.mth.route
case class Failure(message: String)
| markhibberd/route | src/scala/io/mth/route/Failure.scala | Scala | bsd-3-clause | 58 |
package coursier.cache
abstract class PlatformCache[F[_]]
| alexarchambault/coursier | modules/cache/js/src/main/scala/coursier/cache/PlatformCache.scala | Scala | apache-2.0 | 59 |
/*
* The MIT License (MIT)
* <p>
* Copyright (c) 2020
* <p>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p>
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
* <p>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package io.techcode.streamy.util.monitor
import java.lang.management.{BufferPoolMXBean, ManagementFactory}
import akka.actor.{Actor, Timers}
import io.techcode.streamy.config.StreamyConfig
import io.techcode.streamy.event.{ActorListener, MonitorEvent}
import io.techcode.streamy.util.lang.SystemAccess
import io.techcode.streamy.util.monitor.JvmMonitor.Tick
import scala.jdk.CollectionConverters._
/**
* JVM monitor.
*/
class JvmMonitor(conf: StreamyConfig.JvmMonitor) extends Actor with ActorListener with Timers {
override def preStart(): Unit = {
timers.startTimerWithFixedDelay(JvmMonitor.TickKey, JvmMonitor.Tick, conf.refreshInterval)
}
override def receive: Receive = {
case Tick => eventStream.publish(MonitorEvent.Jvm(
timestamp = System.currentTimeMillis(),
uptime = SystemAccess.RuntimeBean.getUptime,
memHeapCommitted = JvmMonitor.getHeapCommitted,
memHeapUsed = JvmMonitor.getHeapUsed,
memHeapMax = JvmMonitor.getHeapMax,
memNonHeapCommitted = JvmMonitor.getNonHeapCommitted,
memNonHeapUsed = JvmMonitor.getNonHeapUsed,
thread = SystemAccess.ThreadBean.getThreadCount,
threadPeak = SystemAccess.ThreadBean.getPeakThreadCount,
classLoaded = SystemAccess.ClassLoadingBean.getLoadedClassCount,
classUnloaded = SystemAccess.ClassLoadingBean.getUnloadedClassCount,
classLoadedTotal = SystemAccess.ClassLoadingBean.getTotalLoadedClassCount,
bufferPools = JvmMonitor.getBufferPools,
garbageCollectors = JvmMonitor.getGarbageCollectors
))
}
}
/**
* Jvm monitor companion.
*/
object JvmMonitor {
// Tick implementation
private case object TickKey
private case object Tick
/**
* Returns the amount of heap used in bytes.
*/
private def getHeapUsed: Long = Math.max(0, SystemAccess.MemoryBean.getHeapMemoryUsage.getUsed)
/**
* Returns the amount of heap committed in bytes.
*/
private def getHeapCommitted: Long = Math.max(0, SystemAccess.MemoryBean.getHeapMemoryUsage.getCommitted)
/**
* Returns the amount of heap max in bytes.
*/
private def getHeapMax: Long = Math.max(0, SystemAccess.MemoryBean.getHeapMemoryUsage.getMax)
/**
* Returns the amount of non heap used in bytes.
*/
private def getNonHeapUsed: Long = Math.max(0, SystemAccess.MemoryBean.getNonHeapMemoryUsage.getUsed)
/**
* Returns the amount of non heap committed in bytes.
*/
private def getNonHeapCommitted: Long = Math.max(0, SystemAccess.MemoryBean.getNonHeapMemoryUsage.getCommitted)
/**
* Returns a list of buffer pools.
*/
private def getBufferPools: Seq[MonitorEvent.Jvm.BufferPool] =
ManagementFactory.getPlatformMXBeans(classOf[BufferPoolMXBean]).asScala.map { pool =>
MonitorEvent.Jvm.BufferPool(
name = pool.getName,
count = pool.getCount,
memUsed = pool.getMemoryUsed,
totalCapacity = pool.getTotalCapacity
)
}.toSeq
/**
* Returns a list of garbage collectors.
*/
private def getGarbageCollectors: Seq[MonitorEvent.Jvm.GarbageCollector] =
SystemAccess.GarbageCollectorsBean.asScala.map { gc =>
MonitorEvent.Jvm.GarbageCollector(
name = gc.getName,
collectionCount = gc.getCollectionCount,
collectionTime = gc.getCollectionTime
)
}.toSeq
}
| amannocci/streamy | core/src/main/scala/io/techcode/streamy/util/monitor/JvmMonitor.scala | Scala | mit | 4,485 |
package net.resonious.sburb.commands
import net.resonious.sburb.Sburb
import net.resonious.sburb.Structure
import net.resonious.sburb.abstracts.ActiveCommand
import net.resonious.sburb.abstracts.SburbException
import net.resonious.sburb.commands.SburbCommand.PlayerWithChat
import net.resonious.sburb.game.SburbGame
import net.resonious.sburb.game.SburbProperties
import net.minecraft.util.ChunkCoordinates
import scala.collection.JavaConverters.seqAsJavaListConverter
import net.minecraft.command.ICommandSender
import net.minecraft.entity.player.EntityPlayer
import net.minecraft.block.Block
import net.resonious.sburb.game.After
import net.minecraft.server.MinecraftServer
import net.minecraft.item.ItemStack
import net.resonious.sburb.items.SburbDisc
import net.minecraft.server.management.ServerConfigurationManager
import net.minecraft.util.RegistryNamespaced
import net.minecraft.util.RegistrySimple
import net.minecraft.util.ObjectIntIdentityMap
import scala.collection.JavaConversions._
import scala.collection.mutable.HashMap
import scala.math
object HomestuckCommand extends ActiveCommand {
override def getCommandName() = "homestuck"
override def getCommandUsage(sender: ICommandSender) = "/homestuck [house name]"
override def getCommandAliases() = List("homestuck", "hivebent", "housetrapped").asJava
override def canCommandSenderUseCommand(sender: ICommandSender) = {
sender match {
case player: EntityPlayer => true
case _ => false
}
}
override def addTabCompletionOptions(sender: ICommandSender, args: Array[String]) = {
null
}
override def isUsernameIndex(args: Array[String], i: Int) = false
override def processCommand(sender: ICommandSender, args: Array[String]): Unit = {
val player = sender.asInstanceOf[EntityPlayer]
val props = SburbProperties of player
if (props.hasGame) {
if (props.gameEntry.houseCurrentlyBeingGenerated)
player chat "Still working on generating your house, don't worry."
else
player chat "You're already in!"
return
}
val game = Sburb.games.size match {
case 0 => Sburb.newGame
case 1 => Sburb.games.values.iterator.next
case _ => throw new SburbException("Don't know what to do with more than 1 game yet!")
}
val houseName = if (args.length > 0) args(0) else game.randomHouseName
if (game.newPlayer(player, houseName, true)) {
// Keep track of this in case the player dies or something during the process...
val playerName = player.getCommandSenderName
// Keep track of game entry separately from properties for same reason.
val gameEntry = props.gameEntry
val house = gameEntry.house
gameEntry.houseCurrentlyBeingGenerated = true
Sburb log "Generating "+house.name+" for "+player.getCommandSenderName
player chat "Looking for a good spot to place your new house..."
house onceLoaded { _ =>
gameEntry.houseCurrentlyBeingGenerated = false
Sburb playerOfName playerName match {
case null => {
gameEntry.needsSburbDisc = true
gameEntry.spawnPointDirty = true
Sburb log "Finished house for "+playerName+". They will be there once they log back on."
}
case player => {
player.inventory.addItemStackToInventory(new ItemStack(SburbDisc, 1))
val housePos = props.gameEntry.house.spawn
player.setPositionAndUpdate(
housePos.x,
housePos.y,
housePos.z)
val coords = new ChunkCoordinates(housePos.x, housePos.y, housePos.z)
player.setSpawnChunk(coords, true, 0)
After(1, 'second) execute { player chat "Welcome home." }
}
}
}
house whenTakingAwhile { numberOfAttempts =>
try if (numberOfAttempts == 1 || numberOfAttempts % 3 == 0)
player chat "Don't worry, still working on it..."
else if (numberOfAttempts == 5)
player chat "Sometimes this just takes an absurd amount of time. I'm sorry."
catch {
case e: Throwable =>
}
}
}
else
throw new SburbException("Something went wrong adding "+player.getDisplayName+" to a Sburb game!")
}
}
| Resonious/mcsburb | src/main/scala/net/resonious/sburb/commands/HomestuckCommand.scala | Scala | mit | 4,286 |
package com.kleggett.examples.modeling.persistence
import java.sql.{Connection, PreparedStatement, ResultSet}
import com.kleggett.db.util.ScalaSqlUtils._
import com.kleggett.examples.modeling.model.Car
/**
* Example implementation of the JdbcCrudDAO.
*
* @author K. Leggett
* @since 1.0 (6/14/15 5:33 PM)
*/
class JdbcCarDAO(override val connection: Connection) extends JdbcVehicleDAO[Car]
{
override protected val insertSQL =
"""insert into cars(vin, make, model, doors)
|values (?, ?, ?, ?)
""".stripMargin
override protected val updateSQL =
"""update cars set
|make = ?,
|model = ?,
|doors = ?
|where vin = ?
""".stripMargin
override protected val deleteByIdSQL = "delete from cars where vin = ?"
override protected val getByIdSQL = "select * from cars where vin = ?"
override protected def populate(rs: ResultSet): Car = {
Car(rs.getString("vin"), rs.getString("make"), rs.getString("model"), rs.getInt("doors"))
}
override protected def prepInsert(t: Car): (PreparedStatement) => Unit = {
(ps: PreparedStatement) => {
ps.setString(1, t.vin)
ps.setString(2, t.make)
ps.setString(3, t.model)
ps.setInt(4, t.nDoors)
}
}
override protected def prepUpdate(t: Car): (PreparedStatement) => Unit = {
(ps: PreparedStatement) => {
ps.setString(1, t.make)
ps.setString(2, t.model)
ps.setInt(4, t.nDoors)
ps.setString(5, t.vin)
}
}
override protected def prepId(id: String): (PreparedStatement) => Unit = prepSingleString(id)
}
| kleggett/casemodeling | src/main/scala/com/kleggett/examples/modeling/persistence/JdbcCarDAO.scala | Scala | cc0-1.0 | 1,584 |
package com.pwootage.fasterflux.items
import com.pwootage.fasterflux.blocks.FasterFluxBlocks
import com.pwootage.fasterflux.blocks.data.FFMultiBlockType
import com.pwootage.fasterflux.config.FasterFluxConfig
import Reika.DragonAPI.ModInteract.ThermalRecipeHelper
import cpw.mods.fml.common.event.FMLInitializationEvent
import cpw.mods.fml.common.registry.GameRegistry
import net.minecraft.item.Item
import net.minecraft.item.ItemBucket
import net.minecraft.item.ItemStack
import net.minecraftforge.common.MinecraftForge
import net.minecraftforge.fluids.FluidContainerRegistry
import net.minecraftforge.fluids.FluidRegistry
import net.minecraftforge.fluids.FluidStack
import net.minecraftforge.oredict.ShapedOreRecipe
import com.pwootage.fasterflux.FasterFlux
class FasterFluxItems(conf: FasterFluxConfig, event: FMLInitializationEvent, blocks: FasterFluxBlocks) {
val superconductingWire = new ItemBasicCraftable(conf.scWire, "fasterflux:scwire", "ff.scwire")
val superconductingWireUnfilled = new ItemBasicCraftable(conf.scWireEmpty, "fasterflux:scwireempty", "ff.scwire.empty")
val basePlate = new ItemBasicCraftable(conf.basePlate, "fasterflux:baseplate", "ff.baseplate")
val computerCore = new ItemBasicCraftable(conf.computerCore, "fasterflux:computercore", "ff.computercore")
val scNetherAssembly = new ItemBasicCraftable(conf.scNetherAssembly, "fasterflux:scnetherassembly", "ff.scnetherassembly")
val scNetherUnstable = new ItemBasicCraftable(conf.scNetherUnstable, "fasterflux:scnetherunstable", "ff.scnetherunstable")
val scNetherStable = new ItemBasicCraftable(conf.scNetherStable, "fasterflux:scnetherstable", "ff.scnetherstable")
val liquidNetherBucket = new ItemBucket(conf.liquidNetherBucket, blocks.liquidNetherBlock.blockID) {
setUnlocalizedName("ff.liquid.star.bucket")
setTextureName("fasterflux:liquidnetherbucket")
setCreativeTab(FasterFlux.tab)
}
val liquidHandler = new LiquidHandler();
MinecraftForge.EVENT_BUS.register(liquidHandler)
//Need to allow it with the whole bucket handler thing I think
GameRegistry.registerItem(superconductingWire, superconductingWire.getUnlocalizedName())
GameRegistry.registerItem(superconductingWireUnfilled, superconductingWireUnfilled.getUnlocalizedName())
GameRegistry.registerItem(liquidNetherBucket, liquidNetherBucket.getUnlocalizedName())
FluidContainerRegistry.registerFluidContainer(
blocks.liquidNether,
new ItemStack(liquidNetherBucket),
new ItemStack(Item.bucketEmpty))
val redstone = FluidRegistry.getFluid("redstone")
val cryotheum = FluidRegistry.getFluid("cryotheum")
val ender = FluidRegistry.getFluid("ender")
GameRegistry.addRecipe(new ShapedOreRecipe(
new ItemStack(superconductingWireUnfilled, 6),
"eee",
"ddd",
"eee",
Character.valueOf('e'), "ingotEnderium",
Character.valueOf('c'), "dustCryotheum",
Character.valueOf('d'), Item.diamond))
ThermalRecipeHelper.addCrucibleRecipe(
new ItemStack(Item.netherStar, 1),
new FluidStack(blocks.liquidNether, 500),
240000)
ThermalRecipeHelper.addFluidTransposerFill(
new ItemStack(superconductingWireUnfilled, 1),
new ItemStack(superconductingWire, 1),
1000,
new FluidStack(blocks.liquidNether, 100),
false)
GameRegistry.addRecipe(new ShapedOreRecipe(
new ItemStack(scNetherAssembly, 1),
"ese",
"sts",
"ese",
Character.valueOf('e'), "ingotEnderium",
Character.valueOf('s'), new ItemStack(superconductingWire, 1),
Character.valueOf('t'), new ItemStack(Item.netherStar, 1)))
GameRegistry.addSmelting(scNetherAssembly.itemID, new ItemStack(scNetherUnstable, 1), 20)
ThermalRecipeHelper.addFluidTransposerFill(
new ItemStack(scNetherUnstable, 1),
new ItemStack(scNetherStable, 1),
5000,
new FluidStack(blocks.liquidNether, 2000),
false)
GameRegistry.addRecipe(new ShapedOreRecipe(
new ItemStack(basePlate, 4),
"ses",
"ede",
"ses",
Character.valueOf('e'), "ingotEnderium",
Character.valueOf('s'), new ItemStack(superconductingWire, 1)))
GameRegistry.addRecipe(new ShapedOreRecipe(
new ItemStack(blocks.mb1, 2, FFMultiBlockType.BATTERY_CASE.meta),
" p ",
"pep",
" p ",
Character.valueOf('p'), new ItemStack(basePlate, 1),
Character.valueOf('e'), "ingotEnderium"))
GameRegistry.addRecipe(new ShapedOreRecipe(
new ItemStack(blocks.mb1, 1, FFMultiBlockType.BATTERY_OUTPUT.meta),
" p ",
"psp",
" p ",
Character.valueOf('p'), new ItemStack(basePlate, 1),
Character.valueOf('s'), new ItemStack(superconductingWire, 1)))
GameRegistry.addShapelessRecipe(
new ItemStack(blocks.mb1, 1, FFMultiBlockType.BATTERY_OUTPUT.meta),
new ItemStack(blocks.mb1, 1, FFMultiBlockType.BATTERY_INPUT.meta))
GameRegistry.addShapelessRecipe(
new ItemStack(blocks.mb1, 1, FFMultiBlockType.BATTERY_INPUT.meta),
new ItemStack(blocks.mb1, 1, FFMultiBlockType.BATTERY_OUTPUT.meta))
GameRegistry.addRecipe(new ShapedOreRecipe(
new ItemStack(blocks.mb1, 1, FFMultiBlockType.BATTERY_ANODE.meta),
"sps",
"ptp",
"sps",
Character.valueOf('p'), new ItemStack(basePlate, 1),
Character.valueOf('t'), new ItemStack(scNetherStable, 1),
Character.valueOf('s'), new ItemStack(superconductingWire, 1)))
GameRegistry.addRecipe(new ShapedOreRecipe(
new ItemStack(computerCore, 1),
"sss",
"sts",
"sps",
Character.valueOf('s'), new ItemStack(superconductingWire, 1),
Character.valueOf('t'), new ItemStack(scNetherStable, 1),
Character.valueOf('p'), new ItemStack(basePlate, 1)))
GameRegistry.addRecipe(new ShapedOreRecipe(
new ItemStack(blocks.mb1, 1, FFMultiBlockType.BATTERY_CONTROLLER.meta),
" p ",
"pcp",
" p ",
Character.valueOf('p'), new ItemStack(basePlate, 1),
Character.valueOf('c'), new ItemStack(computerCore, 1)))
} | Pwootage/FasterFlux | src/main/scala/com/pwootage/fasterflux/items/FasterFluxItems.scala | Scala | mit | 5,874 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package impl
import java.nio.file.Files
import java.nio.file.StandardOpenOption
import java.util.Date
import com.lightbend.lagom.scaladsl.api.ServiceLocator.NoServiceLocator
import com.lightbend.lagom.scaladsl.server._
import com.lightbend.lagom.scaladsl.devmode.LagomDevModeComponents
import play.api.libs.ws.ahc.AhcWSComponents
import api.FooService
import com.softwaremill.macwire._
class FooLoader extends LagomApplicationLoader {
override def load(context: LagomApplicationContext): LagomApplication =
new FooApplication(context) {
override def serviceLocator = NoServiceLocator
}
override def loadDevMode(context: LagomApplicationContext): LagomApplication =
new FooApplication(context) with LagomDevModeComponents
}
abstract class FooApplication(context: LagomApplicationContext) extends LagomApplication(context) with AhcWSComponents {
override lazy val lagomServer = serverFor[FooService](wire[FooServiceImpl])
Files.write(
environment.getFile("target/reload.log").toPath,
s"${new Date()} - reloaded\\n".getBytes("utf-8"),
StandardOpenOption.CREATE,
StandardOpenOption.APPEND
)
}
| rcavalcanti/lagom | dev/sbt-plugin/src/sbt-test/sbt-plugin/run-all-scaladsl/a/impl/src/main/scala/impl/FooLoader.scala | Scala | apache-2.0 | 1,218 |
package com.jamontes79.scala.movielist
/*
import com.fortysevendeg.android.scaladays.model.Speaker
import com.fortysevendeg.android.scaladays.modules.ComponentRegistryImpl
import com.fortysevendeg.android.scaladays.ui.commons.AnalyticStrings._
import com.fortysevendeg.android.scaladays.ui.commons.{ListLayout, UiServices, LineItemDecorator}
*/
import android.app.Activity
import android.content.Intent
import android.os.Bundle
import android.support.v4.app.Fragment
import android.support.v4.view.ViewCompat
import android.view._
import com.fortysevendeg.macroid.extras.ImageViewTweaks._
import com.jamontes79.scala.movielist.entities.Movie
import com.jamontes79.scala.movielist.layouts.ImageFragmentLayout
import com.jamontes79.scala.movielist.utils.AsyncImageTweaks._
import com.jamontes79.scala.movielist.utils.MyUtils
import macroid.FullDsl._
import macroid.{ContextWrapper, Contexts}
/**
* Created by alberto on 2/9/15.
*/
class ImageFragment extends Fragment
with Contexts[Fragment]
with ImageFragmentLayout
with MyUtils
{
lazy val contextProvider: ContextWrapper = fragmentContextWrapper
var currentMovie: Movie = new Movie
override def onCreateView(inflater: LayoutInflater, container: ViewGroup, savedInstanceState: Bundle): View = {
setHasOptionsMenu(true)
val (maybeimage, maybepelicula) = Option(getActivity.getIntent.getExtras) map {
extras =>
val image: Option[String] = if (extras.containsKey(MyUtils.EXTRA_IMAGE))
Some(extras.getString(MyUtils.EXTRA_IMAGE))
else None
val movie: Option[Movie] = if (extras.containsKey(MyUtils.EXTRA_OBJECT))
Some(extras.getSerializable(MyUtils.EXTRA_OBJECT).asInstanceOf[Movie])
else None
(image, movie)
} getOrElse ((None, None))
currentMovie = maybepelicula.get
content
}
override def onViewCreated(view: View, savedInstanceState: Bundle): Unit = {
super.onViewCreated(view, savedInstanceState)
getActivity.setTitle(currentMovie.title)
runUi(
imageOnView <~
(currentMovie.cover map {
srcImageFile(_, R.drawable.placeholder_square, Some(R.drawable.no_disponible))
} getOrElse ivSrc(R.drawable.no_disponible))
)
ViewCompat.setTransitionName(imageOnView.get, MyUtils.EXTRA_IMAGE)
}
override def onActivityResult(requestCode: Int, resultCode: Int, data: Intent): Unit = {
super.onActivityResult(requestCode, resultCode, data)
}
override def onOptionsItemSelected(item: MenuItem): Boolean = {
if (item.getItemId == android.R.id.home){
getActivity.setResult(Activity.RESULT_CANCELED)
getActivity.finish()
true
}
else {
super.onOptionsItemSelected(item)
}
}
} | jamontes79/movieList | src/main/scala/com/jamontes79/scala/movielist/ImageFragment.scala | Scala | apache-2.0 | 2,743 |
package app
import _root_.util.Directory._
import _root_.util.Implicits._
import _root_.util.ControlUtil._
import _root_.util.{FileUtil, Validations, Keys}
import org.scalatra._
import org.scalatra.json._
import org.json4s._
import jp.sf.amateras.scalatra.forms._
import org.apache.commons.io.FileUtils
import model.Account
import scala.Some
import service.AccountService
import javax.servlet.http.{HttpServletResponse, HttpSession, HttpServletRequest}
import java.text.SimpleDateFormat
import javax.servlet.{FilterChain, ServletResponse, ServletRequest}
/**
* Provides generic features for controller implementations.
*/
abstract class ControllerBase extends ScalatraFilter
with ClientSideValidationFormSupport with JacksonJsonSupport with Validations {
implicit val jsonFormats = DefaultFormats
// Don't set content type via Accept header.
override def format(implicit request: HttpServletRequest) = ""
override def doFilter(request: ServletRequest, response: ServletResponse, chain: FilterChain) {
val httpRequest = request.asInstanceOf[HttpServletRequest]
val httpResponse = response.asInstanceOf[HttpServletResponse]
val context = request.getServletContext.getContextPath
val path = httpRequest.getRequestURI.substring(context.length)
if(path.startsWith("/console/")){
val account = httpRequest.getSession.getAttribute(Keys.Session.LoginAccount).asInstanceOf[Account]
if(account == null){
// Redirect to login form
httpResponse.sendRedirect(context + "/signin?" + path)
} else if(account.isAdmin){
// H2 Console (administrators only)
chain.doFilter(request, response)
} else {
// Redirect to dashboard
httpResponse.sendRedirect(context + "/")
}
} else if(path.startsWith("/git/")){
// Git repository
chain.doFilter(request, response)
} else {
// Scalatra actions
super.doFilter(request, response, chain)
}
}
/**
* Returns the context object for the request.
*/
implicit def context: Context = Context(servletContext.getContextPath, LoginAccount, currentURL, request)
private def currentURL: String = defining(request.getQueryString){ queryString =>
request.getRequestURI + (if(queryString != null) "?" + queryString else "")
}
private def LoginAccount: Option[Account] = session.getAs[Account](Keys.Session.LoginAccount)
def ajaxGet(path : String)(action : => Any) : Route =
super.get(path){
request.setAttribute(Keys.Request.Ajax, "true")
action
}
override def ajaxGet[T](path : String, form : MappingValueType[T])(action : T => Any) : Route =
super.ajaxGet(path, form){ form =>
request.setAttribute(Keys.Request.Ajax, "true")
action(form)
}
def ajaxPost(path : String)(action : => Any) : Route =
super.post(path){
request.setAttribute(Keys.Request.Ajax, "true")
action
}
override def ajaxPost[T](path : String, form : MappingValueType[T])(action : T => Any) : Route =
super.ajaxPost(path, form){ form =>
request.setAttribute(Keys.Request.Ajax, "true")
action(form)
}
protected def NotFound() =
if(request.hasAttribute(Keys.Request.Ajax)){
org.scalatra.NotFound()
} else {
org.scalatra.NotFound(html.error("Not Found"))
}
protected def Unauthorized()(implicit context: app.Context) =
if(request.hasAttribute(Keys.Request.Ajax)){
org.scalatra.Unauthorized()
} else {
if(context.loginAccount.isDefined){
org.scalatra.Unauthorized(redirect("/"))
} else {
if(request.getMethod.toUpperCase == "POST"){
org.scalatra.Unauthorized(redirect("/signin"))
} else {
org.scalatra.Unauthorized(redirect("/signin?redirect=" + currentURL))
}
}
}
protected def baseUrl = defining(request.getRequestURL.toString){ url =>
url.substring(0, url.length - (request.getRequestURI.length - request.getContextPath.length))
}
}
/**
* Context object for the current request.
*/
case class Context(path: String, loginAccount: Option[Account], currentUrl: String, request: HttpServletRequest){
def redirectUrl = if(request.getParameter("redirect") != null){
request.getParameter("redirect")
} else {
currentUrl
}
/**
* Get object from cache.
*
* If object has not been cached with the specified key then retrieves by given action.
* Cached object are available during a request.
*/
def cache[A](key: String)(action: => A): A =
defining(Keys.Request.Cache(key)){ cacheKey =>
Option(request.getAttribute(cacheKey).asInstanceOf[A]).getOrElse {
val newObject = action
request.setAttribute(cacheKey, newObject)
newObject
}
}
}
/**
* Base trait for controllers which manages account information.
*/
trait AccountManagementControllerBase extends ControllerBase with FileUploadControllerBase {
self: AccountService =>
protected def updateImage(userName: String, fileId: Option[String], clearImage: Boolean): Unit =
if(clearImage){
getAccountByUserName(userName).flatMap(_.image).map { image =>
new java.io.File(getUserUploadDir(userName), image).delete()
updateAvatarImage(userName, None)
}
} else {
fileId.map { fileId =>
val filename = "avatar." + FileUtil.getExtension(getUploadedFilename(fileId).get)
FileUtils.moveFile(
getTemporaryFile(fileId),
new java.io.File(getUserUploadDir(userName), filename)
)
updateAvatarImage(userName, Some(filename))
}
}
protected def uniqueUserName: Constraint = new Constraint(){
override def validate(name: String, value: String): Option[String] =
getAccountByUserName(value).map { _ => "User already exists." }
}
protected def uniqueMailAddress(paramName: String = ""): Constraint = new Constraint(){
override def validate(name: String, value: String, params: Map[String, String]): Option[String] =
getAccountByMailAddress(value)
.filter { x => if(paramName.isEmpty) true else Some(x.userName) != params.get(paramName) }
.map { _ => "Mail address is already registered." }
}
}
/**
* Base trait for controllers which needs file uploading feature.
*/
trait FileUploadControllerBase {
def generateFileId: String =
new SimpleDateFormat("yyyyMMddHHmmSSsss").format(new java.util.Date(System.currentTimeMillis))
def TemporaryDir(implicit session: HttpSession): java.io.File =
new java.io.File(GitBucketHome, s"tmp/_upload/${session.getId}")
def getTemporaryFile(fileId: String)(implicit session: HttpSession): java.io.File =
new java.io.File(TemporaryDir, fileId)
// def removeTemporaryFile(fileId: String)(implicit session: HttpSession): Unit =
// getTemporaryFile(fileId).delete()
def removeTemporaryFiles()(implicit session: HttpSession): Unit =
FileUtils.deleteDirectory(TemporaryDir)
def getUploadedFilename(fileId: String)(implicit session: HttpSession): Option[String] =
session.getAndRemove[String](Keys.Session.Upload(fileId))
} | libin/gitbucket | src/main/scala/app/ControllerBase.scala | Scala | apache-2.0 | 7,142 |
package infra.piece.sound
import play.api.Plugin
import infra.piece.core.{Pieces, Piece, PieceKind}
import play.api.libs.json.{Json, Format}
import play.api.templates.Html
import scala.concurrent.{Future, ExecutionContext}
import infra.piece.core.Pieces.File
/**
* @author alari (name.alari@gmail.com)
* @since 08.05.14 14:34
*/
class SoundKind(implicit app: play.api.Application) extends PieceKind("sound") with Plugin{
override type P = SoundPiece
override def handlePiece(implicit ec: ExecutionContext): PartialFunction[Piece, Future[P]] = {
case p: P => Future.successful(p)
}
override def html(piece: P): Html = infra.piece.sound.html.sound(piece)
override val format: Format[P] = Json.format[P]
override def handleFile(userId: String)(implicit ec: ExecutionContext): PartialFunction[File, Future[P]] = {
case file if file.contentType.exists(t => t == "audio/mpeg" || t == "audio/mp3") && file.filename.endsWith(".mp3") =>
Pieces.fileStorage
.store(file.copy(contentType = Some("audio/mpeg")), userId)
.map(fid => SoundPiece(None, fid, file.filename, Seq(fid)))
}
}
| alari/play-content | module-code/app/infra/piece/sound/SoundKind.scala | Scala | mit | 1,124 |
package io.iteratee
import cats.Monad
import cats.arrow.{ Category, Profunctor }
private[iteratee] trait EnumerateeInstances {
implicit final def enumerateeInstance[F[_]](implicit F: Monad[F]):
Category[Enumeratee[F, ?, ?]] with
Profunctor[Enumeratee[F, ?, ?]] =
new Category[Enumeratee[F, ?, ?]] with Profunctor[Enumeratee[F, ?, ?]] {
final def id[A]: Enumeratee[F, A, A] = Enumeratee.identity[F, A]
final def compose[A, B, C](f: Enumeratee[F, B, C], g: Enumeratee[F, A, B]): Enumeratee[F, A, C] = g.andThen(f)
final def dimap[A, B, C, D](fab: Enumeratee[F, A, B])(f: C => A)(g: B => D): Enumeratee[F, C, D] =
fab.map(g).contramap(f)
}
}
| flyingwalrusllc/iteratee | core/src/main/scala/io/iteratee/EnumerateeInstances.scala | Scala | apache-2.0 | 685 |
package scala.lms
package epfl
package test8
import common._
import test1._
import test7.{Print,PrintExp,ScalaGenPrint}
import test7.{ArrayLoops,ArrayLoopsExp,ScalaGenArrayLoops}
import util.OverloadHack
import java.io.{PrintWriter,StringWriter,FileOutputStream}
/*
if there's a crash here during compilation, it's likely due to #4363 (need latest scala-virtualized for fix)
*/
trait ArrayMutation extends ArrayLoops with PrimitiveOps {
def infix_update[T:Typ](a: Rep[Array[T]], i: Rep[Int], x: Rep[T]): Rep[Unit]
def infix_mutable[T:Typ](a: Rep[Array[T]]): Rep[Array[T]]
def infix_clone[T:Typ](a: Rep[Array[T]]): Rep[Array[T]]
}
trait ArrayMutationExp extends ArrayMutation with PrimitiveOpsExp with ArrayLoopsExp {
case class ArrayUpdate[T](a: Rep[Array[T]], i: Rep[Int], x: Rep[T]) extends Def[Unit]
case class ArrayMutable[T](a: Rep[Array[T]]) extends Def[Array[T]]
case class ArrayClone[T](a: Rep[Array[T]]) extends Def[Array[T]]
def infix_update[T:Typ](a: Rep[Array[T]], i: Rep[Int], x: Rep[T]) = reflectWrite(a)(ArrayUpdate(a,i,x))
def infix_mutable[T:Typ](a: Rep[Array[T]]) = reflectMutable(ArrayMutable(a))
def infix_clone[T:Typ](a: Rep[Array[T]]) = ArrayClone(a)
override def aliasSyms(e: Any): List[Sym[Any]] = e match {
case SimpleLoop(s,i, ArrayElem(y)) => Nil
case SimpleLoop(s,i, ReduceElem(y)) => syms(y) // could also return zero value
case SimpleLoop(s,i, ArrayIfElem(c,y)) => Nil
case SimpleLoop(s,i, ReduceIfElem(c,y)) => syms(y) // could also return zero value
case ArrayIndex(a,i) => Nil
case ArrayLen(a) => Nil
case ArrayUpdate(a,i,x) => Nil // syms(a) <-- any use to return a?
case ArrayMutable(a) => Nil
case ArrayClone(a) => Nil
case _ => super.aliasSyms(e)
}
override def containSyms(e: Any): List[Sym[Any]] = e match {
case SimpleLoop(s,i, ArrayElem(y)) => syms(y)
case SimpleLoop(s,i, ReduceElem(y)) => Nil
case SimpleLoop(s,i, ArrayIfElem(c,y)) => syms(y)
case SimpleLoop(s,i, ReduceIfElem(c,y)) => Nil
case ArrayIndex(a,i) => Nil
case ArrayLen(a) => Nil
case ArrayUpdate(a,i,x) => syms(x)
case ArrayMutable(a) => Nil
case ArrayClone(a) => Nil
case _ => super.containSyms(e)
}
override def extractSyms(e: Any): List[Sym[Any]] = e match {
case SimpleLoop(s,i, ArrayElem(y)) => Nil
case SimpleLoop(s,i, ReduceElem(y)) => Nil
case SimpleLoop(s,i, ArrayIfElem(c,y)) => Nil
case SimpleLoop(s,i, ReduceIfElem(c,y)) => Nil
case ArrayIndex(a,i) => syms(a)
case ArrayLen(a) => Nil
case ArrayUpdate(a,i,x) => Nil
case ArrayMutable(a) => Nil
case ArrayClone(a) => Nil
case _ => super.extractSyms(e)
}
override def copySyms(e: Any): List[Sym[Any]] = e match {
case SimpleLoop(s,i, ArrayElem(y)) => Nil
case SimpleLoop(s,i, ReduceElem(y)) => Nil
case SimpleLoop(s,i, ArrayIfElem(c,y)) => Nil
case SimpleLoop(s,i, ReduceIfElem(c,y)) => Nil
case ArrayIndex(a,i) => Nil
case ArrayLen(a) => Nil
case ArrayUpdate(a,i,x) => syms(a)
case ArrayMutable(a) => syms(a)
case ArrayClone(a) => syms(a)
case _ => super.copySyms(e)
}
}
trait ScalaGenArrayMutation extends ScalaGenArrayLoops {
val IR: ArrayMutationExp
import IR._
override def emitNode(sym: Sym[Any], rhs: Def[Any]) = rhs match {
case ArrayUpdate(a,i,x) =>
emitValDef(sym, quote(a) + ".update(" + quote(i) + ", " + quote(x) + ")")
case ArrayMutable(a) =>
emitValDef(sym, quote(a) + ".clone // mutable")
case ArrayClone(a) =>
emitValDef(sym, quote(a) + ".clone")
case _ => super.emitNode(sym, rhs)
}
}
class TestMutation extends FileDiffSuite {
val prefix = home + "test-out/epfl/test8-"
trait DSL extends ArrayMutation with PrimitiveOps with LiftPrimitives with OrderingOps with Variables with IfThenElse with While with RangeOps with Print {
def zeros(l: Rep[Int]) = array(l) { i => 0 }
def mzeros(l: Rep[Int]) = zeros(l).mutable
def infix_toDouble(x: Rep[Int]): Rep[Double] = x.asInstanceOf[Rep[Double]]
def test(x: Rep[Int]): Rep[Unit]
}
trait Impl extends DSL with ArrayMutationExp with PrimitiveOpsExp with OrderingOpsExp with VariablesExp
with BooleanOpsExp with StringOpsExp
with IfThenElseExp with WhileExp with RangeOpsExp with PrintExp { self =>
override val verbosity = 2
val codegen = new ScalaGenArrayMutation with ScalaGenPrimitiveOps with ScalaGenOrderingOps
with ScalaGenVariables with ScalaGenIfThenElse with ScalaGenWhile with ScalaGenRangeOps
with ScalaGenPrint { val IR: self.type = self }
codegen.emitSource(test, "Test", new PrintWriter(System.out))
}
def testMutation1 = {
withOutFile(prefix+"mutation1") {
// a write operation must unambigously identify the object being mutated
trait Prog extends DSL {
def test(x: Rep[Int]) = {
val vector1 = mzeros(100)
val vector2 = mzeros(100)
val a = if (x > 7) vector1 else vector2
a.update(40,40) // error: not clear which object is mutated (vector1 or vector2)
print(a.at(50))
}
}
new Prog with Impl
}
assertFileEqualsCheck(prefix+"mutation1")
}
def testMutation1b = {
withOutFile(prefix+"mutation1b") {
// a write operation must unambigously identify the object being mutated
trait Prog extends DSL {
def test(x: Rep[Int]) = {
val vector1 = mzeros(100)
val vector2 = mzeros(100)
val a = if (x > 7) vector1 else vector2
val a2 = a.mutable
a2.update(40,40) // ok: we have made a copy
print(a2.at(50))
}
}
new Prog with Impl
}
assertFileEqualsCheck(prefix+"mutation1b")
}
def testMutation2 = {
withOutFile(prefix+"mutation2") {
// an operation that might read from mutable data v will be serialized with all writes to v
trait Prog extends DSL {
def test(x: Rep[Int]) = {
val vector1 = mzeros(100)
val vector2 = mzeros(100)
val a = if (x > 7) vector1 else vector2
val x0 = a.at(10)
vector1.update(10,10) // must come after x0
vector2.update(10,20) // must come after x0
val x1 = a.at(10) // must come after both writes, no cse with x0
print(x1-x0) // minus should not have effect dep
}
}
new Prog with Impl
}
assertFileEqualsCheck(prefix+"mutation2")
}
def testMutation3 = {
withOutFile(prefix+"mutation3") {
// vars may not reference mutable objects
trait Prog extends DSL with LiftVariables {
def test(x: Rep[Int]) = {
var a = zeros(100)
val b = mzeros(100)
for (i <- 0 until b.length) { // this is also a curious case: range creation must not be reflected
val x1 = a.at(i)
b.update(i,8)
val x2 = a.at(i) // must be cse'd
a = b // error: here we learn that reads on a would need to be serialized with b but it's too late...
}
}
}
new Prog with Impl
}
assertFileEqualsCheck(prefix+"mutation3")
}
def testMutation3b = {
withOutFile(prefix+"mutation3b") {
// vars may not reference mutable objects
trait Prog extends DSL with LiftVariables {
def test(x: Rep[Int]) = {
var a = zeros(100)
val b = mzeros(100)
for (i <- 0 until b.length) {
val x1 = a.at(i)
b.update(i,8)
val x2 = a.at(i) // must be cse'd
a = b.clone // ok: making a copy
}
}
}
new Prog with Impl
}
assertFileEqualsCheck(prefix+"mutation3b")
}
def testMutation4 = {
withOutFile(prefix+"mutation4") {
// mutable objects cannot be nested
trait Prog extends DSL {
def test(x: Rep[Int]) = {
val a = mzeros(100)
val b = array(10) { i => a } // nested array
val b1 = b.mutable // error: internal arrays are mutable on their own
val x1 = b1.at(5).at(50)
print(x1)
}
}
new Prog with Impl
}
assertFileEqualsCheck(prefix+"mutation4")
}
def testMutation4b = {
withOutFile(prefix+"mutation4b") {
// mutable objects cannot be nested
trait Prog extends DSL {
def test(x: Rep[Int]) = {
val a = mzeros(100)
val b = array(10) { i => a } // nested array
val b1 = b.clone
val b2 = b1.mutable // error: internal arrays are *still* mutable, despite shallow clone
val x1 = b2.at(5).at(50)
print(x1)
}
}
new Prog with Impl
}
assertFileEqualsCheck(prefix+"mutation4b")
}
def testMutation4c = {
withOutFile(prefix+"mutation4c") {
// mutable objects cannot be nested
trait Prog extends DSL {
def test(x: Rep[Int]) = {
val a = mzeros(100)
val b = array(10) { i => a.clone } // nested array
val b1 = b.mutable // ok: internal arrays are immutable
val x1 = b1.at(5).at(50)
print(x1)
}
}
new Prog with Impl
}
assertFileEqualsCheck(prefix+"mutation4c")
}
def testMutation5 = {
withOutFile(prefix+"mutation5") {
// mutable objects cannot be nested
trait Prog extends DSL {
def test(x: Rep[Int]) = {
val a = zeros(100)
val b = array(10) { i => a } // nested array
val b1 = b.mutable
val c = mzeros(20)
b1.update(4,a) // ok: insert immutable array
b1.update(5,c) // error: cannot insert mutable array
c.update(50,50)
val x1 = b1.at(5).at(50)
print(x1)
}
}
new Prog with Impl
}
assertFileEqualsCheck(prefix+"mutation5")
}
def testMutation6 = {
withOutFile(prefix+"mutation6") {
// mutate nested object (within an immutable one)
trait Prog extends DSL {
def test(x: Rep[Int]) = {
val a = mzeros(100)
val b = array(10) { i => a } // nested array
val u = array(10) { i => zeros(100) }
val c = if (x > 7) b else u
val x1 = c.at(5).at(50)
a.update(50,50)
val x2 = c.at(5).at(50) // no cse, must serialize with update to a
print(x2-x1)
}
}
new Prog with Impl
}
assertFileEqualsCheck(prefix+"mutation6")
}
def testMutation7 = {
withOutFile(prefix+"mutation7") {
// local variables of primitive type
trait Prog extends DSL with LiftVariables {
def test(x0: Rep[Int]) = {
val x = x0.toDouble // avoid codegen for implicit convert
var c = 0.0
while (c < x) {
c = c + 1
}
if (c < x)
c = 8.0
print(c)
}
}
new Prog with Impl
}
assertFileEqualsCheck(prefix+"mutation7")
}
}
| astojanov/virtualization-lms-core | test-src/epfl/test8-effects/TestMutation.scala | Scala | bsd-3-clause | 11,133 |
package com.codacy.client.stash.client.auth
import scalaj.http.{HttpRequest, Token}
class OAuth1Authenticator(key: String, secretKey: String, token: String, secretToken: String) extends Authenticator {
override def withAuthentication(request: HttpRequest): HttpRequest = {
OAuth1.sign(request, Token(key, secretKey), Option(Token(token, secretToken)), None)
}
}
| codacy/stash-scala-client | src/main/scala/com/codacy/client/stash/client/auth/OAuth1Authenticator.scala | Scala | apache-2.0 | 373 |
package com.twitter.finagle.pool
import com.twitter.conversions.time._
import com.twitter.finagle.{Service, ServiceFactory, WriteException, MockTimer}
import com.twitter.util.{Time, Future, Promise}
import org.mockito.Matchers
import org.specs.SpecificationWithJUnit
import org.specs.mock.Mockito
class CachingPoolSpec extends SpecificationWithJUnit with Mockito {
"CachingPool" should {
val timer = new MockTimer
val obj = mock[Object]
val underlying = mock[ServiceFactory[Any, Any]]
underlying.close(any) returns Future.Done
val underlyingService = mock[Service[Any, Any]]
underlyingService.close(any) returns Future.Done
underlyingService.isAvailable returns true
underlyingService(Matchers.any) returns Future.value(obj)
underlying() returns Future.value(underlyingService)
"reflect the underlying factory availability" in {
val pool = new CachingPool[Any, Any](underlying, Int.MaxValue, 5.seconds, timer)
underlying.isAvailable returns false
pool.isAvailable must beFalse
there was one(underlying).isAvailable
underlying.isAvailable returns true
pool.isAvailable must beTrue
there were two(underlying).isAvailable
}
"cache objects for the specified amount of time" in {
Time.withCurrentTimeFrozen { timeControl =>
val cachingPool = new CachingPool[Any, Any](underlying, Int.MaxValue, 5.seconds, timer)
val f = cachingPool()()
f(123)() must be_==(obj)
there was one(underlying)()
timer.tasks must beEmpty
f.close()
there was one(underlyingService).isAvailable
there was no(underlyingService).close(any)
timer.tasks must haveSize(1)
timer.tasks.head.when must be_==(Time.now + 5.seconds)
// Reap!
timeControl.advance(5.seconds)
timer.tick()
there was one(underlyingService).close(any)
timer.tasks must beEmpty
}
}
"reuse cached objects & revive from death row" in {
Time.withCurrentTimeFrozen { timeControl =>
val cachingPool = new CachingPool[Any, Any](underlying, Int.MaxValue, 5.seconds, timer)
cachingPool()().close()
timer.tasks must haveSize(1)
there was one(underlying)()
there was no(underlyingService).close(any)
timer.tasks must haveSize(1)
timeControl.advance(4.seconds)
cachingPool()().close()
there was one(underlying)()
there was no(underlyingService).close(any)
timer.tasks must haveSize(1)
// Originally scheduled time.
timeControl.advance(1.second)
timer.tick()
timer.tasks must haveSize(1) // reschedule
there was no(underlyingService).close(any)
timer.tasks.head.when must be_==(Time.now + 4.seconds)
timeControl.advance(5.seconds)
timer.tick()
timer.tasks must beEmpty
there was one(underlyingService).close(any)
}
}
"handle multiple objects, expiring them only after they are due to" in {
Time.withCurrentTimeFrozen { timeControl =>
val o0 = mock[Object]
val o1 = mock[Object]
val o2 = mock[Object]
val s0 = mock[Service[Any, Any]]; s0(any) returns Future.value(o0); s0.close(any) returns Future.Done
val s1 = mock[Service[Any, Any]]; s1(any) returns Future.value(o1); s1.close(any) returns Future.Done
val s2 = mock[Service[Any, Any]]; s2(any) returns Future.value(o2); s2.close(any) returns Future.Done
val cachingPool = new CachingPool[Any, Any](underlying, Int.MaxValue, 5.seconds, timer)
underlying() returns Future.value(s0)
val f0 = cachingPool()()
f0(123)() must be_==(o0)
underlying() returns Future.value(s1)
val f1 = cachingPool()()
f1(123)() must be_==(o1)
underlying() returns Future.value(s2)
val f2 = cachingPool()()
f2(123)() must be_==(o2)
val ss = Seq(s0, s1, s2)
val fs = Seq(f0, f1, f2)
there were three(underlying)()
ss foreach { _.isAvailable returns true }
fs foreach { f =>
timeControl.advance(5.second)
f.close()
}
timer.tasks must haveSize(1)
ss foreach { s => there was no(s).close(any) }
timer.tick()
there was one(s0).close(any)
there was one(s1).close(any)
there was no(s2).close(any)
timer.tasks must haveSize(1)
timer.tick()
timer.tasks.head.when must be_==(Time.now + 5.seconds)
// Take it!
cachingPool()()(123)() must be_==(o2)
timeControl.advance(5.seconds)
timer.tick()
// Nothing left.
timer.tasks must beEmpty
}
}
"restart timers when a dispose occurs" in {
Time.withCurrentTimeFrozen { timeControl =>
val underlyingService = mock[Service[Any, Any]]
underlyingService.close(any) returns Future.Done
underlyingService.isAvailable returns true
underlyingService(Matchers.any) returns Future.value(obj)
val cachingPool = new CachingPool[Any, Any](underlying, Int.MaxValue, 5.seconds, timer)
underlying() returns Future.value(underlyingService)
timer.tasks must beEmpty
val service = cachingPool()()
service(123)() must be_==(obj)
timer.tasks must beEmpty
service.close()
timer.tasks must haveSize(1)
there was no(underlyingService).close(any)
timer.tasks.head.when must be_==(Time.now + 5.seconds)
timeControl.advance(1.second)
cachingPool()()(123)() must be_==(obj)
timeControl.advance(4.seconds)
timer.tasks must beEmpty
timer.tick()
there was no(underlyingService).close(any)
service.close()
there was no(underlyingService).close(any)
timer.tasks must haveSize(1)
}
}
"don't cache unhealthy objects" in {
Time.withCurrentTimeFrozen { timeControl =>
val cachingPool = new CachingPool[Any, Any](underlying, Int.MaxValue, 5.seconds, timer)
val underlyingService = mock[Service[Any, Any]]
underlyingService.close(any) returns Future.Done
underlyingService(Matchers.any) returns Future.value(obj)
underlying() returns Future.value(underlyingService)
underlyingService.isAvailable returns false
val service = cachingPool()()
service(123)() must be_==(obj)
service.close()
there was one(underlyingService).isAvailable
there was one(underlyingService).close(any)
// No need to clean up an already disposed object.
timer.tasks must beEmpty
}
}
"cache objects when client sends interrupt" in {
Time.withCurrentTimeFrozen { timeControl =>
val cachingPool = new CachingPool[Any, Any](underlying, Int.MaxValue, 5.seconds, timer)
val underlyingService = mock[Service[Any, Any]]
underlyingService.close(any) returns Future.Done
val slowService = new Promise[Service[Any, Any]]
underlying() returns slowService
val service1 = cachingPool()
val exception = new Exception("give up")
service1.raise(exception)
service1() must throwA[WriteException]
service1 onFailure {
case WriteException(e) => e mustEqual exception
case _ => assert(false, "exception was not write exception")
}
slowService.setValue(underlyingService)
val service2 = cachingPool()
service2.isDefined must beTrue
// not sure how else to verify the underlying is the same since CachingPool wraps
underlyingService(1) returns Future.value(2)
service2()(1)() mustEqual 2
}
}
"flush the queue on close()" in {
Time.withCurrentTimeFrozen { timeControl =>
val cachingPool = new CachingPool[Any, Any](underlying, Int.MaxValue, 5.seconds, timer)
val underlyingService = mock[Service[Any, Any]]
underlyingService.close(any) returns Future.Done
underlyingService(Matchers.any) returns Future.value(obj)
underlying() returns Future.value(underlyingService)
underlyingService.isAvailable returns true
val service = cachingPool()()
service.close()
there was no(underlyingService).close(any)
cachingPool.close()
there was one(underlyingService).close(any)
}
}
"release services as they are released after close()" in {
Time.withCurrentTimeFrozen { timeControl =>
val cachingPool = new CachingPool[Any, Any](underlying, Int.MaxValue, 5.seconds, timer)
val underlyingService = mock[Service[Any, Any]]
underlyingService.close(any) returns Future.Done
underlyingService(Matchers.any) returns Future.value(obj)
underlying() returns Future.value(underlyingService)
underlyingService.isAvailable returns true
val service = cachingPool()()
cachingPool.close()
there was no(underlyingService).close(any)
service.close()
there was one(underlyingService).close(any)
}
}
"close the underlying factory" in {
val cachingPool = new CachingPool[Any, Any](underlying, Int.MaxValue, 5.seconds, timer)
cachingPool.close()
there was one(underlying).close(any)
}
}
}
| foursquare/finagle | finagle-core/src/test/scala/com/twitter/finagle/pool/CachingPoolSpec.scala | Scala | apache-2.0 | 9,405 |
trait Or[A]
trait C
object ImplicitChainTest {
def ipl[A](implicit from: A => Or[A]): C = null
ipl
}
| dotty-staging/dotty | tests/pos/i5413-a.scala | Scala | apache-2.0 | 106 |
package dit4c.scheduler.ssh
object RsaFactorizer {
// From http://tools.ietf.org/html/rfc2313#section-7.2
type Modulus = BigInt // modulus n
type PublicExponent = BigInt // public exponent e
type PrivateExponent = BigInt // private exponent d
type Prime1 = BigInt // prime factor p of n
type Prime2 = BigInt // prime factor q of n
type Exponent1 = BigInt // d mod (p-1)
type Exponent2 = BigInt // d mod (q-1)
// Chinese Remainder Theorem coefficient q-1 mod p
type Coefficient = BigInt
type Pkcs1Params = (
Modulus,
PublicExponent,
PrivateExponent,
Prime1,
Prime2,
Exponent1,
Exponent2,
Coefficient)
/**
* Based on "Twenty Years of Attacks on the RSA Cryptosystem" by Dan Boneh
* (specifically Fact 1):
* https://crypto.stanford.edu/~dabo/papers/RSA-survey.pdf
* and "Handbook of Applied Cryptography" Section 8.2.2(i):
* http://cacr.uwaterloo.ca/hac/about/chap8.pdf
* as explained by Robert Mason's excellent answer here:
* http://crypto.stackexchange.com/a/14713/34512
*/
def apply(
n: Modulus,
e: PublicExponent,
d: PrivateExponent): Pkcs1Params = {
// For k=e*dβ1, we know:
// * k is even
// * k=(2^t)*r provided r is odd and t β₯ 1
val k = e * d - 1
val (r, t) = factorizeK(k)
// Pick a "random" x such that 2 β€ x β€ n
val xs = Stream.from(2).map(BigInt(_)).takeWhile(_ < n)
// Determine first s(i) such that:
// * abs(s(i)) is not 1
// * abs(s(i-1)) is 1
// If none fits, pick a new x
val si = xs.flatMap(x => siCandidate(x, k, r, t, n)).head
val p = (si - 1).gcd(n)
val q = n / p
(n, e, d, p, q, exponent(d,p), exponent(d,q), coefficient(p,q))
}
/**
* Produce (r, t) such that:
* - k=(2^t)*r
* - r is odd
* - t β₯ 1, but preferably small
*/
def factorizeK(k: BigInt): (BigInt, Int) = {
assert(k % 2 == BigInt(0) && k != BigInt(0))
Stream.from(1) // start from r = k / 2 and t = 1
.filter { t => k.testBit(t) } // bit is 1, so k >> t would be odd
.map { t => (k >> t, t) }
.head
}
/**
* Determine first s(i) such that:
* - abs(s(i)) is not 1
* - abs(s(i-1)) is 1
*/
def siCandidate(x: BigInt, k: BigInt, r: BigInt, t: Int, n: BigInt): Option[BigInt] =
sSeries(x, k, r, t, n)
.sliding(2, 1)
.collectFirst {
case Seq(oldSi: BigInt, si: BigInt) if oldSi == BigInt(1) && !(si == BigInt(1) || n - BigInt(1) == si) =>
si
}
def sSeries(x: BigInt, k: BigInt, r: BigInt, t: Int, n: BigInt): Seq[BigInt] =
sSeries(x, k, r, t, n, 1).map(_ % n)
def sSeries(x: BigInt, k: BigInt, r: BigInt, t: Int, n: BigInt, i: Int): List[BigInt] =
// s(t) = x^r
if (t == i) x.modPow(r, n) :: Nil
// s(i) = s(i+1)^2
else {
val rest = sSeries(x, k, r, t, n, i + 1)
rest.head.modPow(2,n) :: rest
}
def exponent(d: PrivateExponent, prime: BigInt) = d % (prime - 1)
// Chinese Remainder Theorem coefficient q-1 mod p
def coefficient(p: Prime1, q: Prime2): Coefficient = (q - 1) % p
} | dit4c/dit4c | dit4c-scheduler/src/main/scala/dit4c/scheduler/ssh/RsaFactorizer.scala | Scala | mit | 3,144 |
package com.twitter.scalding.hraven.reducer_estimation
import java.io.IOException
import cascading.flow.FlowStep
import com.twitter.hraven.{ Flow => HRavenFlow, JobDetails, HadoopVersion }
import com.twitter.hraven.rest.client.HRavenRestClient
import com.twitter.scalding.reducer_estimation._
import org.apache.hadoop.mapred.JobConf
import org.slf4j.LoggerFactory
import scala.collection.JavaConverters._
import com.twitter.hraven.JobDescFactory.{ JOBTRACKER_KEY, RESOURCE_MANAGER_KEY }
import scala.util.{ Failure, Success, Try }
object HRavenClient {
import HRavenHistoryService.jobConfToRichConfig
val apiHostnameKey = "hraven.api.hostname"
val clientConnectTimeoutKey = "hraven.client.connect.timeout"
val clientReadTimeoutKey = "hraven.client.read.timeout"
private final val clientConnectTimeoutDefault = 30000
private final val clientReadTimeoutDefault = 30000
def apply(conf: JobConf): Try[HRavenRestClient] =
conf.getFirstKey(apiHostnameKey)
.map(new HRavenRestClient(_,
conf.getInt(clientConnectTimeoutKey, clientConnectTimeoutDefault),
conf.getInt(clientReadTimeoutKey, clientReadTimeoutDefault)))
}
/**
* Mixin for ReducerEstimators to give them the ability to query hRaven for
* info about past runs.
*/
object HRavenHistoryService extends HistoryService {
private val LOG = LoggerFactory.getLogger(this.getClass)
val RequiredJobConfigs = Seq("cascading.flow.step.num")
case class MissingFieldsException(fields: Seq[String]) extends Exception
/**
* Add some helper methods to JobConf
*/
case class RichConfig(conf: JobConf) {
val MaxFetch = "hraven.reducer.estimator.max.flow.history"
val MaxFetchDefault = 8
def maxFetch: Int = conf.getInt(MaxFetch, MaxFetchDefault)
/**
* Try fields in order until one returns a value.
* Logs a warning if nothing was found.
*/
def getFirstKey(fields: String*): Try[String] =
fields.collectFirst {
case f if conf.get(f) != null => Success(conf.get(f))
}.getOrElse {
LOG.warn("Missing required config param: " + fields.mkString(" or "))
Failure(MissingFieldsException(fields))
}
}
implicit def jobConfToRichConfig(conf: JobConf): RichConfig = RichConfig(conf)
/**
* Fetch flows until it finds one that was successful
* (using "HdfsBytesRead > 0" as a marker for successful jobs since it seems
* that this is only set on completion of jobs)
*
* TODO: query hRaven for successful jobs (first need to add ability to filter
* results in hRaven REST API)
*/
private def fetchSuccessfulFlows(client: HRavenRestClient, cluster: String, user: String, batch: String, signature: String, max: Int, nFetch: Int): Try[Seq[HRavenFlow]] = {
Try(client.fetchFlowsWithConfig(cluster, user, batch, signature, nFetch, RequiredJobConfigs: _*)).map { flows =>
val successfulFlows = flows.asScala.filter(_.getHdfsBytesRead > 0).take(max)
if (successfulFlows.isEmpty) {
LOG.warn("Unable to find any successful flows in the last " + nFetch + " jobs.")
}
successfulFlows
} recoverWith {
case e: IOException =>
LOG.error("Error making API request to hRaven. HRavenHistoryService will be disabled.")
Failure(e)
}
}
/**
* Fetch info from hRaven for the last time the given JobStep ran.
* Finds the last successful complete flow and selects the corresponding
* step from it.
*
* @param step FlowStep to get info for
* @return Details about the previous successful run.
*/
def fetchPastJobDetails(step: FlowStep[JobConf], max: Int): Try[Seq[JobDetails]] = {
val conf = step.getConfig
val stepNum = step.getStepNum
def findMatchingJobStep(pastFlow: HRavenFlow) =
pastFlow.getJobs.asScala.find { step =>
try {
step.getConfiguration.get("cascading.flow.step.num").toInt == stepNum
} catch {
case _: NumberFormatException => false
}
} orElse {
LOG.warn("No matching job step in the retrieved hRaven flow.")
None
}
def lookupClusterName(client: HRavenRestClient): Try[String] = {
// regex for case matching URL to get hostname out
val hostRegex = """(.*):\\d+""".r
// first try resource manager (for Hadoop v2), then fallback to job tracker
conf.getFirstKey(RESOURCE_MANAGER_KEY, JOBTRACKER_KEY).flatMap {
// extract hostname from hostname:port
case hostRegex(host) =>
// convert hostname -> cluster name (e.g. dw2@smf1)
Try(client.getCluster(host))
}
}
val flowsTry = for {
// connect to hRaven REST API
client <- HRavenClient(conf)
// lookup cluster name used by hRaven
cluster <- lookupClusterName(client)
// get identifying info for this job
user <- conf.getFirstKey("hraven.history.user.name", "user.name")
batch <- conf.getFirstKey("batch.desc")
signature <- conf.getFirstKey("scalding.flow.class.signature")
// query hRaven for matching flows
flows <- fetchSuccessfulFlows(client, cluster, user, batch, signature, max, conf.maxFetch)
} yield flows
// Find the FlowStep in the hRaven flow that corresponds to the current step
// *Note*: when hRaven says "Job" it means "FlowStep"
flowsTry.map(flows => flows.flatMap(findMatchingJobStep))
}
override def fetchHistory(info: FlowStrategyInfo, maxHistory: Int): Try[Seq[FlowStepHistory]] =
fetchPastJobDetails(info.step, maxHistory).map { history =>
for {
step <- history
hadoopVersion = step.getHadoopVersion match {
case HadoopVersion.ONE => 1
case HadoopVersion.TWO => 2
}
keys = FlowStepKeys(step.getJobName, step.getUser, step.getPriority, step.getStatus, step.getVersion, hadoopVersion, "")
tasks = step.getTasks.asScala.map{ t => Task(t.getTaskId, t.getType, t.getStatus, t.getSplits.toSeq, t.getStartTime, t.getFinishTime, t.getTaskAttemptId, t.getTrackerName, t.getHttpPort, t.getHostname, t.getState, t.getError, t.getShuffleFinished, t.getSortFinished) }
} yield FlowStepHistory(keys, step.getSubmitTime, step.getLaunchTime, step.getFinishTime, step.getTotalMaps, step.getTotalReduces, step.getFinishedMaps, step.getFinishedReduces, step.getFailedMaps, step.getFailedReduces, step.getMapFileBytesRead, step.getMapFileBytesWritten, step.getReduceFileBytesRead, step.getHdfsBytesRead, step.getHdfsBytesWritten, step.getMapSlotMillis, step.getReduceSlotMillis, step.getReduceShuffleBytes, 0, tasks)
}
}
class HRavenRatioBasedEstimator extends RatioBasedEstimator {
override val historyService = HRavenHistoryService
}
class HRavenBasicMedianRuntimeBasedEstimator extends BasicRuntimeReducerEstimator {
override val historyService = HRavenHistoryService
override val runtimeEstimationScheme = MedianEstimationScheme
}
class HRavenBasicMeanRuntimeBasedEstimator extends BasicRuntimeReducerEstimator {
override val historyService = HRavenHistoryService
override val runtimeEstimationScheme = MeanEstimationScheme
}
class HRavenInputScaledMedianRuntimeBasedEstimator extends InputScaledRuntimeReducerEstimator {
override val historyService = HRavenHistoryService
override val runtimeEstimationScheme = MedianEstimationScheme
}
class HRavenInputScaledMeanRuntimeBasedEstimator extends InputScaledRuntimeReducerEstimator {
override val historyService = HRavenHistoryService
override val runtimeEstimationScheme = MeanEstimationScheme
}
| sid-kap/scalding | scalding-hraven/src/main/scala/com/twitter/scalding/hraven/reducer_estimation/HRavenHistoryService.scala | Scala | apache-2.0 | 7,548 |
package sttp.client3.asynchttpclient
import java.nio.ByteBuffer
import io.netty.handler.codec.http.HttpHeaders
import org.asynchttpclient.AsyncHandler.State
import org.asynchttpclient.handler.StreamedAsyncHandler
import org.asynchttpclient.proxy.ProxyServer
import org.asynchttpclient.ws.{WebSocketListener, WebSocketUpgradeHandler, WebSocket => AHCWebSocket}
import org.asynchttpclient.{
AsyncHandler,
AsyncHttpClient,
BoundRequestBuilder,
DefaultAsyncHttpClient,
DefaultAsyncHttpClientConfig,
HttpResponseBodyPart,
HttpResponseStatus,
Realm,
RequestBuilder,
Request => AsyncRequest,
Response => AsyncResponse
}
import org.reactivestreams.{Publisher, Subscriber, Subscription}
import sttp.capabilities.{Effect, Streams}
import sttp.client3
import sttp.client3.SttpBackendOptions.ProxyType.{Http, Socks}
import sttp.client3.internal.ws.{SimpleQueue, WebSocketEvent}
import sttp.monad.syntax._
import sttp.monad.{Canceler, MonadAsyncError, MonadError}
import sttp.client3.{Response, SttpBackend, SttpBackendOptions, _}
import sttp.model._
import scala.collection.JavaConverters._
import scala.collection.immutable.Seq
import scala.util.Try
abstract class AsyncHttpClientBackend[F[_], S <: Streams[S], P](
asyncHttpClient: AsyncHttpClient,
private implicit val monad: MonadAsyncError[F],
closeClient: Boolean,
customizeRequest: BoundRequestBuilder => BoundRequestBuilder
) extends SttpBackend[F, P] {
val streams: Streams[S]
type PE = P with Effect[F]
override def send[T, R >: PE](r: Request[T, R]): F[Response[T]] =
adjustExceptions(r) {
preparedRequest(r).flatMap { ahcRequest =>
if (r.isWebSocket) sendWebSocket(r, ahcRequest) else sendRegular(r, ahcRequest)
}
}
private def sendRegular[T, R >: PE](r: Request[T, R], ahcRequest: BoundRequestBuilder): F[Response[T]] = {
monad.flatten(monad.async[F[Response[T]]] { cb =>
def success(r: F[Response[T]]): Unit = cb(Right(r))
def error(t: Throwable): Unit = cb(Left(t))
val lf = ahcRequest.execute(streamingAsyncHandler(r, success, error))
Canceler(() => lf.abort(new InterruptedException))
})
}
private def sendWebSocket[T, R >: PE](
r: Request[T, R],
ahcRequest: BoundRequestBuilder
): F[Response[T]] =
createSimpleQueue[WebSocketEvent].flatMap { queue =>
monad.flatten(monad.async[F[Response[T]]] { cb =>
val initListener =
new WebSocketInitListener(r, queue, (r: F[Response[T]]) => cb(Right(r)), t => cb(Left(t)))
val h = new WebSocketUpgradeHandler.Builder()
.addWebSocketListener(initListener)
.build()
val lf = ahcRequest.execute(h)
Canceler(() => lf.abort(new InterruptedException))
})
}
override def responseMonad: MonadError[F] = monad
protected def bodyFromAHC: BodyFromAHC[F, S]
protected def bodyToAHC: BodyToAHC[F, S]
protected def createSimpleQueue[T]: F[SimpleQueue[F, T]]
private def streamingAsyncHandler[T, R >: PE](
request: Request[T, R],
success: F[Response[T]] => Unit,
error: Throwable => Unit
): AsyncHandler[Unit] = {
new StreamedAsyncHandler[Unit] {
private val builder = new AsyncResponse.ResponseBuilder()
private var publisher: Option[Publisher[ByteBuffer]] = None
private var completed = false
// when using asStream(...), trying to detect ignored streams, where a subscription never happened
@volatile private var subscribed = false
override def onStream(p: Publisher[HttpResponseBodyPart]): AsyncHandler.State = {
// Sadly we don't have .map on Publisher
publisher = Some(new Publisher[ByteBuffer] {
override def subscribe(s: Subscriber[_ >: ByteBuffer]): Unit = {
subscribed = true
p.subscribe(new Subscriber[HttpResponseBodyPart] {
override def onError(t: Throwable): Unit = s.onError(t)
override def onComplete(): Unit = s.onComplete()
override def onNext(t: HttpResponseBodyPart): Unit =
s.onNext(t.getBodyByteBuffer)
override def onSubscribe(v: Subscription): Unit =
s.onSubscribe(v)
})
}
})
// #2: sometimes onCompleted() isn't called, only onStream(); this
// seems to be true esp for https sites. For these cases, completing
// the request here.
doComplete()
State.CONTINUE
}
override def onBodyPartReceived(bodyPart: HttpResponseBodyPart): AsyncHandler.State =
throw new IllegalStateException("Requested a streaming backend, unexpected eager body parts.")
override def onHeadersReceived(headers: HttpHeaders): AsyncHandler.State = {
builder.accumulate(headers)
State.CONTINUE
}
override def onStatusReceived(responseStatus: HttpResponseStatus): AsyncHandler.State = {
builder.accumulate(responseStatus)
State.CONTINUE
}
override def onCompleted(): Unit = {
// if the request had no body, onStream() will never be called
doComplete()
}
private def doComplete(): Unit = {
if (!completed) {
completed = true
val baseResponse = readResponseNoBody(request, builder.build())
val p = publisher.getOrElse(EmptyPublisher)
val b = bodyFromAHC(Left(p), request.response, baseResponse, () => subscribed)
success(b.map(t => baseResponse.copy(body = t)))
}
}
override def onThrowable(t: Throwable): Unit = {
error(t)
}
}
}
private class WebSocketInitListener[T](
request: Request[T, _],
queue: SimpleQueue[F, WebSocketEvent],
success: F[Response[T]] => Unit,
error: Throwable => Unit
) extends WebSocketListener {
override def onOpen(ahcWebSocket: AHCWebSocket): Unit = {
ahcWebSocket.removeWebSocketListener(this)
val webSocket = WebSocketImpl.newCoupledToAHCWebSocket(ahcWebSocket, queue)
queue.offer(WebSocketEvent.Open())
val baseResponse =
Response(
(),
StatusCode.SwitchingProtocols,
"",
readHeaders(ahcWebSocket.getUpgradeHeaders),
Nil,
request.onlyMetadata
)
val bf = bodyFromAHC(Right(webSocket), request.response, baseResponse, () => false)
success(bf.map(b => baseResponse.copy(body = b)))
}
override def onClose(webSocket: AHCWebSocket, code: Int, reason: String): Unit = {
throw new IllegalStateException("Should never be called, as the listener should be removed after onOpen")
}
override def onError(t: Throwable): Unit = error(t)
}
private def preparedRequest[R >: PE](r: Request[_, R]): F[BoundRequestBuilder] = {
monad.fromTry(Try(asyncHttpClient.prepareRequest(requestToAsync(r)))).map(customizeRequest)
}
private def requestToAsync[R >: PE](r: Request[_, R]): AsyncRequest = {
val readTimeout = r.options.readTimeout
val rb = new RequestBuilder(r.method.method)
.setUrl(r.uri.toString)
.setReadTimeout(if (readTimeout.isFinite) readTimeout.toMillis.toInt else -1)
.setRequestTimeout(if (readTimeout.isFinite) readTimeout.toMillis.toInt else -1)
r.headers.foreach { header => rb.setHeader(header.name, header.value) }
bodyToAHC(r, r.body, rb)
rb.build()
}
private def readResponseNoBody(request: Request[_, _], response: AsyncResponse): Response[Unit] = {
client3.Response(
(),
StatusCode.unsafeApply(response.getStatusCode),
response.getStatusText,
readHeaders(response.getHeaders),
Nil,
request.onlyMetadata
)
}
private def readHeaders(h: HttpHeaders): Seq[Header] =
h.iteratorAsString()
.asScala
.map(e => Header(e.getKey, e.getValue))
.toList
override def close(): F[Unit] = {
if (closeClient) monad.eval(asyncHttpClient.close()) else monad.unit(())
}
private def adjustExceptions[T](request: Request[_, _])(t: => F[T]): F[T] =
SttpClientException.adjustExceptions(responseMonad)(t)(
SttpClientException.defaultExceptionToSttpClientException(request, _)
)
}
object AsyncHttpClientBackend {
val DefaultWebSocketBufferCapacity: Option[Int] = Some(1024)
private[asynchttpclient] def defaultConfigBuilder(
options: SttpBackendOptions
): DefaultAsyncHttpClientConfig.Builder = {
val configBuilder = new DefaultAsyncHttpClientConfig.Builder()
.setConnectTimeout(options.connectionTimeout.toMillis.toInt)
.setCookieStore(null)
options.proxy match {
case None => configBuilder
case Some(p) =>
val proxyType: org.asynchttpclient.proxy.ProxyType =
p.proxyType match {
case Socks => org.asynchttpclient.proxy.ProxyType.SOCKS_V5
case Http => org.asynchttpclient.proxy.ProxyType.HTTP
}
configBuilder.setProxyServer {
val builder = new ProxyServer.Builder(p.host, p.port)
.setProxyType(proxyType) // Fix issue #145
.setNonProxyHosts(p.nonProxyHosts.asJava)
p.auth.foreach { proxyAuth =>
builder.setRealm(
new Realm.Builder(proxyAuth.username, proxyAuth.password).setScheme(Realm.AuthScheme.BASIC)
)
}
builder.build()
}
}
}
private[asynchttpclient] def defaultClient(options: SttpBackendOptions): AsyncHttpClient = {
new DefaultAsyncHttpClient(defaultConfigBuilder(options).build())
}
private[asynchttpclient] def clientWithModifiedOptions(
options: SttpBackendOptions,
updateConfig: DefaultAsyncHttpClientConfig.Builder => DefaultAsyncHttpClientConfig.Builder
): AsyncHttpClient = {
new DefaultAsyncHttpClient(updateConfig(defaultConfigBuilder(options)).build())
}
}
| softwaremill/sttp | async-http-client-backend/src/main/scala/sttp/client3/asynchttpclient/AsyncHttpClientBackend.scala | Scala | apache-2.0 | 9,850 |
package com.chatwork.quiz.misc
import org.scalatest.{ Matchers, FunSpec }
import scala.math.Ordering.IntOrdering
class LeafSpec extends FunSpec with Matchers {
describe("Leaf#size") {
it("should return 1") {
Leaf(1).size shouldBe 1
Leaf(1.0).size shouldBe 1
}
}
describe("Leaf#max") {
it("should return the value of Leaf") {
Leaf(1).max shouldBe 1
Leaf(3.21).max shouldBe 3.21
}
}
describe("Leaf#min") {
it("should return the value of Leaf") {
Leaf(1).min shouldBe 1
Leaf(3.21).min shouldBe 3.21
}
}
describe("Leaf#sum") {
it("should return the value of Leaf") {
Leaf(1).sum shouldBe 1
Leaf(3.21).sum shouldBe 3.21
}
}
describe("Leaf#avg") {
it("should return the double value") {
Leaf(1).avg shouldBe 1.toDouble
Leaf(BigDecimal(3.21)).avg shouldBe 3.21
}
}
describe("Leaf#find") {
it("should return a node has the value in the Leaf") {
Leaf(1).find(1) shouldBe Some(Leaf(1))
Leaf(4.24f).find(4.24f) shouldBe Some(Leaf(4.24f))
}
}
}
| kazzna/scala-quiz | src/test/scala/com/chatwork/quiz/misc/LeafSpec.scala | Scala | mit | 1,086 |
/**
* Copyright 2013 Alex Jones
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with work for additional information
* regarding copyright ownership. The ASF licenses file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package update
import java.net.URI
import dao.GameDao
import dates.{September, TimeOfDay, ZonedDateTimeFactory, ZonedDateTimeFactoryImpl}
import html.{AttendenceUpdateCommand, DatePlayedLocator, DatePlayedUpdateCommand, GameKeyLocator, GameUpdateCommand, SeasonTicketsUpdateCommand}
import logging.{RemoteStream, SimpleRemoteStream}
import models.{Competition, Location}
import Competition._
import Location._
import model.{Game, GameKey}
import java.time.{Clock, ZoneId, ZonedDateTime}
import monads.FE.FutureEitherNel
import monads.{FE, FO}
import org.specs2.concurrent.ExecutionEnv
import org.specs2.matcher.DisjunctionMatchers
import org.specs2.mock.Mockito
import org.specs2.mutable.Specification
import update.fixtures.FixturesGameScanner
import update.tickets.TicketsGameScanner
import scala.concurrent.{ExecutionContext, Future}
import cats.instances.future._
/**
* @author alex
*
*/
class MainUpdateServiceImplSpec extends Specification with DisjunctionMatchers with Mockito {
sequential
val TICKETS_URL = new URI("http://tickets")
val FIXTURES_URL = new URI("http://fixtures")
val SOUTHAMPTON = GameKey(FACP, HOME, "Southampton", 2014)
val LIVERPOOL = GameKey(FACP, HOME, "Liverpool", 2014)
val LATEST_SEASON: Option[Int] = Some(2015)
def updates(gameUpdateCommands: GameUpdateCommand*)(implicit ec: ExecutionContext): FutureEitherNel[String, Seq[GameUpdateCommand]] = {
FE(Future.successful(gameUpdateCommands))
}
"Adding a completely new game" should {
"create and update a new game" in { implicit ee: ExecutionEnv =>
val s = new Services()
implicit val zonedDateTimeFactory: ZonedDateTimeFactory = s.zonedDateTimeFactory
s.gameDao.getAll returns Future.successful(List.empty[Game])
s.gameDao.getLatestSeason returns FO(LATEST_SEASON)
s.fixturesGameScanner.scan(LATEST_SEASON)(s.remoteStream) returns updates(
DatePlayedUpdateCommand(SOUTHAMPTON, September(5, 2013) at(15, 0)))
s.ticketsGameScanner.scan(LATEST_SEASON)(s.remoteStream) returns updates()
val expectedStoredGame = Game.gameKey(SOUTHAMPTON).copy(at = Some(September(5, 2013) at (15, 0)))
s.gameDao.store(expectedStoredGame) returns Future.successful(expectedStoredGame)
s.mainUpdateService.processDatabaseUpdates(s.remoteStream).value must beRight(1).await
there was one(s.gameDao).store(expectedStoredGame)
there was one(s.lastUpdated).at(s.now)
}
}
"Updating an existing game with new information" should {
"update the game" in { implicit ee: ExecutionEnv =>
val s = new Services()
implicit val zonedDateTimeFactory: ZonedDateTimeFactory = s.zonedDateTimeFactory
val existingStoredGame = Game.gameKey(SOUTHAMPTON).copy(
at = Some(September(5, 2013) at (15, 0))
)
s.gameDao.getAll returns Future.successful(List(existingStoredGame))
s.gameDao.getLatestSeason returns FO(LATEST_SEASON)
s.fixturesGameScanner.scan(LATEST_SEASON)(s.remoteStream) returns updates(AttendenceUpdateCommand(SOUTHAMPTON, 34966))
s.ticketsGameScanner.scan(LATEST_SEASON)(s.remoteStream) returns updates()
val expectedStoredGame = Game.gameKey(SOUTHAMPTON).copy(
at = Some(September(5, 2013) at (15, 0)),
attendance = Some(34966)
)
s.gameDao.store(expectedStoredGame) returns Future.successful(expectedStoredGame)
s.mainUpdateService.processDatabaseUpdates(s.remoteStream).value must beRight(1).await
there was one(s.gameDao).store(expectedStoredGame)
there was one(s.lastUpdated).at(s.now)
}
}
"Updating an existing game with no extra information" should {
"not update the game" in { implicit ee: ExecutionEnv =>
val s = new Services()
implicit val zonedDateTimeFactory: ZonedDateTimeFactory = s.zonedDateTimeFactory
val existingStoredGame = Game.gameKey(SOUTHAMPTON).copy(
at = Some(September(5, 2013) at (15, 0)),
attendance = Some(34966)
)
s.gameDao.getAll returns Future.successful(List(existingStoredGame))
s.gameDao.getLatestSeason returns FO(LATEST_SEASON)
s.fixturesGameScanner.scan(LATEST_SEASON)(s.remoteStream) returns updates(
AttendenceUpdateCommand(SOUTHAMPTON, 34966))
s.ticketsGameScanner.scan(LATEST_SEASON)(s.remoteStream) returns updates()
s.mainUpdateService.processDatabaseUpdates(s.remoteStream).value must beRight(1).await
there were no(s.gameDao).store(any[Game]())
there was one(s.lastUpdated).at(s.now)
}
}
"Updating an existing game with new ticket information" should {
"update the game" in { implicit ee: ExecutionEnv =>
val s = new Services()
implicit val zonedDateTimeFactory: ZonedDateTimeFactory = s.zonedDateTimeFactory
val existingStoredGame = Game.gameKey(SOUTHAMPTON).copy(
at = Some(September(5, 2013) at (15, 0))
)
s.gameDao.getAll returns Future.successful(List(existingStoredGame))
s.gameDao.getLatestSeason returns FO(LATEST_SEASON)
s.fixturesGameScanner.scan(LATEST_SEASON)(s.remoteStream) returns updates()
s.ticketsGameScanner.scan(LATEST_SEASON)(s.remoteStream) returns updates(
SeasonTicketsUpdateCommand(DatePlayedLocator(September(5, 2013) at(15, 0)), September(3, 2013) at(9, 0)))
val expectedStoredGame = Game.gameKey(SOUTHAMPTON).copy(
at = Some(September(5, 2013) at (15, 0)),
seasonTicketsAvailable = Some(September(3, 2013) at (9, 0))
)
s.gameDao.store(expectedStoredGame) returns Future.successful(expectedStoredGame)
s.mainUpdateService.processDatabaseUpdates(s.remoteStream).value must beRight(1).await
there was one(s.gameDao).store(expectedStoredGame)
there was one(s.lastUpdated).at(s.now)
}
}
"Updating an existing game with no new ticket information" should {
"not update the game" in { implicit ee: ExecutionEnv =>
val s = new Services()
implicit val zonedDateTimeFactory: ZonedDateTimeFactory = s.zonedDateTimeFactory
val existingStoredGame = Game.gameKey(SOUTHAMPTON).copy(
at = Some(September(5, 2013) at (15, 0)),
seasonTicketsAvailable = Some(September(3, 2013) at (9, 0))
)
s.gameDao.getAll returns Future.successful(List(existingStoredGame))
s.gameDao.getLatestSeason returns FO(LATEST_SEASON)
s.fixturesGameScanner.scan(LATEST_SEASON)(s.remoteStream) returns updates()
s.ticketsGameScanner.scan(LATEST_SEASON)(s.remoteStream) returns updates(
SeasonTicketsUpdateCommand(DatePlayedLocator(September(5, 2013) at(15, 0)), September(3, 2013) at(9, 0)))
s.mainUpdateService.processDatabaseUpdates(s.remoteStream).value must beRight(1).await
there was one(s.lastUpdated).at(s.now)
}
}
"Creating a new game and also updating its ticket information" should {
"create and update the game" in { implicit ee: ExecutionEnv =>
val s = new Services()
implicit val zonedDateTimeFactory: ZonedDateTimeFactory = s.zonedDateTimeFactory
s.gameDao.getAll returns Future.successful(List.empty)
s.gameDao.getLatestSeason returns FO(LATEST_SEASON)
s.fixturesGameScanner.scan(LATEST_SEASON)(s.remoteStream) returns updates(
DatePlayedUpdateCommand(SOUTHAMPTON, September(5, 2013) at(15, 0)))
s.ticketsGameScanner.scan(LATEST_SEASON)(s.remoteStream) returns updates(
SeasonTicketsUpdateCommand(DatePlayedLocator(September(5, 2013) at(15, 0)), September(3, 2013) at(9, 0)))
val firstStoredGame = Game.gameKey(SOUTHAMPTON).copy(at = Some(September(5, 2013) at (15, 0)))
val secondStoredGame = Game.gameKey(SOUTHAMPTON).copy(
at = Some(September(5, 2013) at (15, 0)),
seasonTicketsAvailable = Some(September(3, 2013) at (9, 0))
)
List(firstStoredGame, secondStoredGame) foreach { game =>
s.gameDao.store(game) returns Future.successful(game)
}
s.mainUpdateService.processDatabaseUpdates(s.remoteStream).value must beRight(1).await
there was one(s.gameDao).store(firstStoredGame)
there was one(s.gameDao).store(secondStoredGame)
there was one(s.lastUpdated).at(s.now)
}
}
"Tickets for a non-existing game" should {
"be ignored" in { implicit ee: ExecutionEnv =>
val s = new Services()
implicit val zonedDateTimeFactory: ZonedDateTimeFactory = s.zonedDateTimeFactory
s.gameDao.getLatestSeason returns FO(LATEST_SEASON)
s.gameDao.getAll returns Future.successful(List.empty)
s.fixturesGameScanner.scan(LATEST_SEASON)(s.remoteStream) returns updates()
s.ticketsGameScanner.scan(LATEST_SEASON)(s.remoteStream) returns updates(
SeasonTicketsUpdateCommand(DatePlayedLocator(September(5, 2013) at(15, 0)), September(3, 2013) at(9, 0)))
s.mainUpdateService.processDatabaseUpdates(s.remoteStream).value must beRight(0).await
there were no(s.gameDao).store(any[Game]())
there was one(s.lastUpdated).at(s.now)
}
}
"Attending a game" should {
"persist its attended flag to true" in { implicit ee: ExecutionEnv =>
val s = new Services()
implicit val zonedDateTimeFactory: ZonedDateTimeFactory = s.zonedDateTimeFactory
val unattendedGame = Game.gameKey(SOUTHAMPTON)
val attendedGame = Game.gameKey(SOUTHAMPTON).copy(attended = true)
s.gameDao.getLatestSeason returns FO(LATEST_SEASON)
s.gameDao.findById(1l) returns FO(Some(unattendedGame))
s.gameDao.store(attendedGame) returns Future.successful(attendedGame)
val changedGame = s.mainUpdateService.attendGame(1l).value
changedGame.map(_.map(_.attended)) must beSome(true).await
there was one(s.gameDao).store(attendedGame)
}
}
"Unattending a game" should {
"persist its attended flag to false" in { implicit ee: ExecutionEnv =>
val s = new Services()
implicit val zonedDateTimeFactory: ZonedDateTimeFactory = s.zonedDateTimeFactory
val unattendedGame = Game.gameKey(SOUTHAMPTON).copy(attended = false)
val attendedGame = Game.gameKey(SOUTHAMPTON).copy(attended = true)
s.gameDao.getLatestSeason returns FO(LATEST_SEASON)
s.gameDao.findById(1l) returns FO(Some(attendedGame))
s.gameDao.store(unattendedGame) returns Future.successful(unattendedGame)
val changedGame = s.mainUpdateService.unattendGame(1l).value
changedGame.map(_.map(_.attended)) must beSome(false).await
}
}
"Attending all home games in a season" should {
"persist all home games attended flag to true" in { implicit ee: ExecutionEnv =>
val s = new Services()
implicit val zonedDateTimeFactory: ZonedDateTimeFactory = s.zonedDateTimeFactory
val homeGames2013 = List(SOUTHAMPTON, LIVERPOOL)
val unattendedGames = homeGames2013.map(Game.gameKey)
val attendedGames = homeGames2013.map(Game.gameKey(_).copy(attended = true))
s.gameDao.getAllForSeasonAndLocation(2013, HOME) returns Future.successful(unattendedGames)
attendedGames foreach (attendedGame => s.gameDao.store(attendedGame) returns Future.successful(attendedGame))
s.mainUpdateService.attendAllHomeGamesForSeason(2013).map(_.map(_.attended)) must be_==(List(true, true)).await
}
}
// Game locator implicits
implicit val zonedDateTimeToDatePlayedLocator: TimeOfDay => DatePlayedLocator = DatePlayedLocator(_)
implicit val gameKeyToGameKeyLocator: GameKey => GameKeyLocator = GameKeyLocator
/**
* A matcher that checks that two games are equal.
*/
implicit val gameMatcher: Game => (Game => Boolean) = { g1 =>
g2 =>
g1.gameKey == g2.gameKey &&
g1.attended == g2.attended &&
g1.attendance == g2.attendance &&
g1.academyMembersAvailable == g2.academyMembersAvailable &&
g1.bondholdersAvailable == g2.bondholdersAvailable &&
g1.generalSaleAvailable == g2.generalSaleAvailable &&
g1.at == g2.at &&
g1.priorityPointAvailable == g2.priorityPointAvailable &&
g1.seasonTicketsAvailable == g2.seasonTicketsAvailable &&
g1.id == g2.id &&
g1.location == g2.location &&
g1.matchReport == g2.matchReport &&
g1.result == g2.result &&
g1.televisionChannel == g2.televisionChannel
}
/**
* A class that holds all the mocked services
*/
class Services(implicit ee: ExecutionEnv) {
private val zoneId = ZoneId.of("Europe/London")
implicit val zonedDateTimeFactory: ZonedDateTimeFactory = new ZonedDateTimeFactoryImpl()
val now: ZonedDateTime = zonedDateTimeFactory.now
val gameDao: GameDao = mock[GameDao]
val lastUpdated: LastUpdated = mock[LastUpdated]
val ticketsGameScanner: TicketsGameScanner = mock[TicketsGameScanner]
val fixturesGameScanner: FixturesGameScanner = mock[FixturesGameScanner]
val mainUpdateService = new MainUpdateServiceImpl(gameDao, fixturesGameScanner, ticketsGameScanner, lastUpdated)
val remoteStream: RemoteStream = new SimpleRemoteStream()
}
}
| unclealex72/west-ham-calendar | test/update/MainUpdateServiceImplSpec.scala | Scala | apache-2.0 | 13,810 |
package javaee6.web.jaxrs
import java.net.URL
import javax.ws.rs.ApplicationPath
import javax.ws.rs.core.{MediaType, Response}
import org.jboss.arquillian.container.test.api.{Deployment, RunAsClient}
import org.jboss.arquillian.junit.Arquillian
import org.jboss.arquillian.test.api.ArquillianResource
import org.jboss.resteasy.client.{ClientRequest, ClientResponse}
import org.jboss.resteasy.plugins.providers.RegisterBuiltin
import org.jboss.resteasy.spi.ResteasyProviderFactory
import org.jboss.shrinkwrap.api.ShrinkWrap
import org.jboss.shrinkwrap.api.spec.WebArchive
import org.jboss.shrinkwrap.resolver.api.maven.Maven
import org.junit.{BeforeClass, Test}
import org.junit.runner.RunWith
import org.junit.Assert._
import org.hamcrest.CoreMatchers._
@RunWith(classOf[Arquillian])
@RunAsClient
class SimpleResourceTest {
private val resourcePrefix =
classOf[SimpleApplication]
.getAnnotation(classOf[ApplicationPath])
.value
.substring(1)
@ArquillianResource
private var deploymentUrl: URL = _
@Test
def helloTest: Unit = {
val request = new ClientRequest(deploymentUrl + resourcePrefix + "/simple")
request.header("Accept", MediaType.TEXT_PLAIN)
val response = request.get(classOf[String])
assertThat(response.getStatus, is(Response.Status.OK.getStatusCode))
assertThat(response.getEntity, is("Hello World"))
}
}
object SimpleResourceTest {
@Deployment
def createDeployment: WebArchive =
ShrinkWrap
.create(classOf[WebArchive], "arquillian-test.war")
.addPackages(true, "javaee6.web.jaxrs")
.addAsLibraries {
Maven
.resolver
.resolve("org.scala-lang:scala-library:2.10.3")
.withTransitivity
.asFile: _*
}
}
| kazuhira-r/javaee6-scala-examples | arquillian-jax-rs/src/test/scala/javaee6/web/jaxrs/SimpleResourceTest.scala | Scala | mit | 1,754 |
package net.liftweb.mapper
/*
* Copyright 2006-2009 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
import _root_.scala.collection.mutable._
import _root_.java.lang.reflect.Method
import _root_.java.sql.{ResultSet, Types}
import _root_.scala.xml.{Text, Node, NodeSeq, Group,
Elem, Null, PrefixedAttribute, MetaData}
import _root_.java.util.Date
import _root_.net.liftweb.http.{S, SHtml, FieldError, FieldIdentifier}
import _root_.net.liftweb.http.S._
import _root_.net.liftweb.http.js._
import _root_.net.liftweb.util._
import Helpers._
/**
* The base (not Typed) trait that defines a field that is mapped to a column or more than 1 column
* (e.g., MappedPassword) in the database
*/
@serializable
trait BaseMappedField extends SelectableField with Bindable {
/**
* Get a JDBC friendly representation of the named field (this is used for MappedFields that correspond to more than
* 1 column in the database.)
* @param field -- the name of the field being mapped to
*/
def jdbcFriendly(field : String): AnyRef
/**
* Get a JDBC friendly object for the part of this field that maps to the first
* column in the database
*/
def jdbcFriendly: AnyRef
/**
* Get the JDBC SQL Type for this field
*/
def targetSQLType(field: String): Int
/**
* Get the JDBC SQL Type for this field
*/
def targetSQLType: Int
/**
* Validate this field and return a list of Validation Issues
*/
def validate: List[FieldError]
/**
* Given the driver type, return the string required to create the column in the database
*/
def fieldCreatorString(dbType: DriverType, colName: String): String
/**
* Given the driver type, return a list of statements to create the columns in the database
*/
def fieldCreatorString(dbType: DriverType): List[String]
/**
* The human name of this field
*/
def name: String
/**
* Convert the field to its name/value pair (e.g., name=David)
*/
def asString: String
/**
* The number of database columns that this field represents
*/
def dbColumnCount: Int
def dbColumnNames(in: String): List[String]
def dbColumnName: String
/**
* Should the field be indexed?
*/
def dbIndexed_? : Boolean
/**
* Is the field the table's primary key
*/
def dbPrimaryKey_? : Boolean
/**
* Is the field a foreign key reference
*/
def dbForeignKey_? : Boolean
/**
* Called when a column has been added to the database via Schemifier
*/
def dbAddedColumn: Box[() => Unit]
/**
* Called when a column has indexed via Schemifier
*/
def dbAddedIndex: Box[() => Unit]
/**
* Create an input field for the item
*/
def toForm: Box[NodeSeq]
/**
* A unique 'id' for the field for form generation
*/
def fieldId: Option[NodeSeq] = None
def displayNameHtml: Box[NodeSeq] = Empty
def displayHtml: NodeSeq = displayNameHtml openOr Text(displayName)
/**
* This is where the instance creates its "toForm" stuff.
* The actual toForm method wraps the information based on
* mode.
*/
def _toForm: Box[NodeSeq]
def asHtml: NodeSeq
/**
* Called after the field is saved to the database
*/
protected[mapper] def doneWithSave()
/**
* The display name of this field (e.g., "First Name")
*/
def displayName: String
def asJsExp: JsExp
def asJs: List[(String, JsExp)] = List((name, asJsExp))
def renderJs_? = true
}
/**
* Mix this trait into a BaseMappedField and it will be indexed
*/
trait DBIndexed extends BaseMappedField {
override def dbIndexed_? = true
}
/**
* The Trait that defines a field that is mapped to a foreign key
*/
trait MappedForeignKey[KeyType, MyOwner <: Mapper[MyOwner], Other <: KeyedMapper[KeyType, Other]] extends MappedField[KeyType, MyOwner] {
type FieldType <: KeyType
type ForeignType <: KeyedMapper[KeyType, Other]
override def equals(other: Any) = other match {
case km: KeyedMapper[KeyType, Other] => this.is == km.primaryKeyField.is
case _ => super.equals(other)
}
def dbKeyToTable: KeyedMetaMapper[KeyType, Other]
def validSelectValues: Box[List[(KeyType, String)]] = Empty
def immutableMsg: NodeSeq = Text(?("Can't change"))
override def _toForm: Box[NodeSeq] = Full(validSelectValues.flatMap{
case Nil => Empty
case xs =>
val mapBack: HashMap[String, KeyType] = new HashMap
var selected: Box[String] = Empty
Full(SHtml.selectObj(xs, Full(this.is), this.set))
}.openOr(immutableMsg))
/**
* Is the key defined
*/
def defined_? : Boolean
/**
* Is the obj field cached
*/
def cached_? : Boolean = synchronized{ _calcedObj}
/**
* Load and cache the record that this field references
*/
def obj: Box[Other] = synchronized {
if (!_calcedObj) {
_calcedObj = true
this._obj = if(defined_?) dbKeyToTable.find(i_is_!) else Empty
}
_obj
}
/**
* Prime the reference of this FK reference
*/
def primeObj(obj: Box[Other]) = synchronized {
_obj
_calcedObj = true
}
private var _obj: Box[Other] = Empty
private var _calcedObj = false
}
trait BaseOwnedMappedField[OwnerType <: Mapper[OwnerType]] extends BaseMappedField
trait TypedField[FieldType] {
/**
* The default value for the field
*/
def defaultValue: FieldType
/**
* What is the real class that corresponds to FieldType
*/
def dbFieldClass: Class[FieldType]
}
/**
* The strongly typed field that's mapped to a column (or many columns) in the database.
* FieldType is the type of the field and OwnerType is the Owner of the field
*/
trait MappedField[FieldType <: Any,OwnerType <: Mapper[OwnerType]] extends TypedField[FieldType] with BaseOwnedMappedField[OwnerType] with FieldIdentifier {
/**
* Should the field be ignored by the OR Mapper?
*/
def ignoreField_? = false
/**
* Get the field that this prototypical field represents
*
* @param actual the object to find the field on
*/
def actualField(actual: OwnerType): MappedField[FieldType, OwnerType] = actual.getSingleton.getActualField(actual, this)
/**
* Given the driver type, return the string required to create the column in the database
*/
def fieldCreatorString(dbType: DriverType, colName: String): String
/**
* Given the driver type, return a list of SQL creation strings for the columns represented by this field
*/
def fieldCreatorString(dbType: DriverType): List[String] = dbColumnNames(name).map{c => fieldCreatorString(dbType, c)}
/**
* Is the field dirty
*/
private var _dirty_? = false
/**
* Is the field dirty (has it been changed since the record was loaded from the database
*/
def dirty_? = !dbPrimaryKey_? && _dirty_?
/**
* Make the field dirty
*/
protected def dirty_?(b: Boolean) = _dirty_? = b
/**
* Called when a column has been added to the database via Schemifier
*/
def dbAddedColumn: Box[() => Unit] = Empty
/**
* Called when a column has indexed via Schemifier
*/
def dbAddedIndex: Box[() => Unit] = Empty
/**
* override this method in indexed fields to indicate that the field has been saved
*/
def dbIndexFieldIndicatesSaved_? = false;
/**
* Return the owner of this field
*/
def fieldOwner: OwnerType
/**
* Are we in "safe" mode (i.e., the value of the field can be read or written without any security checks.)
*/
final def safe_? : Boolean = fieldOwner.safe_?
/**
* Given the current execution state, can the field be written?
*/
def writePermission_? = false
/**
* Given the current execution state, can the field be read?
*/
def readPermission_? = false
/**
* Assignment from the underlying type. It's ugly, but:<br />
* field() = new_value <br />
* field := new_value <br />
* field set new_value <br />
* field.set(new_value) <br />
* are all the same
*/
def update[Q <% FieldType](v: Q) {
this.set(v)
}
def apply[Q <% FieldType](v: Q): OwnerType = {
this.set(v)
fieldOwner
}
private var _name : String = null
/**
* The internal name of this field. Use name
*/
private[mapper] final def i_name_! = _name
/**
* The name of this field
*/
final def name = synchronized {
if (_name eq null) {
fieldOwner.checkNames
}
_name
}
/**
* Set the name of this field
*/
private[mapper] final def setName_!(newName : String) : String = {
if(safe_?) _name = newName.toLowerCase
_name
}
/**
* The display name of this field (e.g., "First Name")
*/
override def displayName: String = name
def resetDirty {
if (safe_?) dirty_?(false)
}
def dbDisplay_? = true
/**
* pascal-style assignment for syntactic sugar
*/
/*
def ::=(v : Any) : T
*/
/**
* Attempt to figure out what the incoming value is and set the field to that value. Return true if
* the value could be assigned
*/
def setFromAny(value: Any): FieldType
def toFormAppendedAttributes: MetaData =
if (Props.mode == Props.RunModes.Test)
new PrefixedAttribute("lift", "field_name", Text(calcFieldName), Null)
else Null
def calcFieldName: String = fieldOwner.getSingleton._dbTableName+":"+name
final def toForm: Box[NodeSeq] = {
def mf(in: Node): NodeSeq = in match {
case g: Group => g.nodes.flatMap(mf)
case e: Elem => e % toFormAppendedAttributes
case other => other
}
_toForm.map(_.flatMap(mf) )
}
/**
* Create an input field for the item
*/
override def _toForm: Box[NodeSeq] =
S.fmapFunc({s: List[String] => this.setFromAny(s)}){funcName =>
Full(<input type='text' id={fieldId}
name={funcName} lift:gc={funcName}
value={is match {case null => "" case s => s.toString}}/>)
}
/**
* Set the field to the value
*/
def set(value: FieldType): FieldType = {
if (safe_? || writePermission_?) i_set_!(value)
else throw new Exception("Do not have permissions to set this field")
}
/**
* Set the field to the Box value if the Box is Full
*/
def set_?(value: Box[FieldType]): Box[FieldType] = {
value.foreach(v => this.set(v))
value
}
/**
* A list of functions that transform the value before it is set. The transformations
* are also applied before the value is used in a query. Typical applications
* of this are trimming and/or toLowerCase-ing strings
*/
protected def setFilter: List[FieldType => FieldType] = Nil
protected final def i_set_!(value: FieldType): FieldType = {
real_i_set_!(runFilters(value, setFilter))
}
def runFilters(in: FieldType, filter: List[FieldType => FieldType]): FieldType =
filter match {
case Nil => in
case x :: xs => runFilters(x(in), xs)
}
/**
* Must be implemented to store the value of the field
*/
protected def real_i_set_!(value: FieldType): FieldType
def buildSetActualValue(accessor: Method, inst : AnyRef, columnName : String) : (OwnerType, AnyRef) => Unit
def buildSetLongValue(accessor: Method, columnName: String): (OwnerType, Long, Boolean) => Unit
def buildSetStringValue(accessor: Method, columnName: String): (OwnerType, String) => Unit
def buildSetDateValue(accessor: Method, columnName: String): (OwnerType, Date) => Unit
def buildSetBooleanValue(accessor: Method, columnName: String) : (OwnerType, Boolean, Boolean) => Unit
protected def getField(inst: OwnerType, meth: Method) = meth.invoke(inst).asInstanceOf[MappedField[FieldType,OwnerType]];
protected def doField(inst: OwnerType, meth: Method, func: PartialFunction[MappedField[FieldType, OwnerType], Unit]) {
val f = getField(inst, meth)
if (func.isDefinedAt(f)) func(f)
}
/**
* Convert the field to its "context free" type (e.g., String, Int, Long, etc.)
* If there are no read permissions, the value will be obscured
*/
def is: FieldType = {
if (safe_? || readPermission_?) i_is_!
else i_obscure_!(i_is_!)
}
/**
* What value was the field's value when it was pulled from the DB?
*/
def was: FieldType = {
if (safe_? || readPermission_?) i_was_!
else i_obscure_!(i_was_!)
}
/**
* The actual value of the field
*/
protected def i_is_! : FieldType
/**
* The value of the field when it was pulled from the DB
*/
protected def i_was_! : FieldType
/**
* Obscure the incoming value to a "safe" value (e.g., if there are
* not enough rights to view the entire social security number 123-45-5678, this
* method might return ***-**-*678
*/
protected def i_obscure_!(in : FieldType): FieldType
/**
* Return the field name and field value, delimited by an '='
*/
def asString = displayName + "=" + toString
def dbColumnCount = 1
def dbColumnNames(in : String) = if (dbColumnCount == 1) List(dbColumnName) else List(in.toLowerCase)
def dbColumnName = name.toLowerCase match {
case name if DB.reservedWords.contains(name) => name+"_c"
case name => name
}
lazy val dbSelectString = fieldOwner.getSingleton.
dbTableName + "." + dbColumnName
def dbIndexed_? : Boolean = false
def dbPrimaryKey_? : Boolean = false
/**
* Is the field a foreign key reference
*/
def dbForeignKey_? : Boolean = false
def jdbcFriendly(field : String) : Object
def jdbcFriendly: Object = jdbcFriendly(dbColumnName)
/**
* Get the JDBC SQL Type for this field
*/
def targetSQLType(field : String): Int = targetSQLType
/**
* Get the JDBC SQL Type for this field
*/
def targetSQLType: Int
override def toString : String =
is match {
case null => ""
case v => v.toString
}
def validations: List[FieldType => List[FieldError]] = Nil
def validate : List[FieldError] = {
val cv = is
validations.flatMap{
case pf: PartialFunction[FieldType, List[FieldError]] =>
if (pf.isDefinedAt(cv)) pf(cv)
else Nil
case f => f(cv)
}
}
final def convertToJDBCFriendly(value: FieldType): Object = real_convertToJDBCFriendly(runFilters(value, setFilter))
protected def real_convertToJDBCFriendly(value: FieldType): Object
/**
* Does the "right thing" comparing mapped fields
*/
override def equals(other: Any): Boolean = {
other match {
case mapped: MappedField[Any, Nothing] => this.is == mapped.is
case ov: AnyRef if (ov ne null) && dbFieldClass.isAssignableFrom(ov.getClass) => this.is == runFilters(ov.asInstanceOf[FieldType], setFilter)
case ov => this.is == ov
}
}
override def asHtml : Node = Text(toString)
}
object MappedField {
implicit def mapToType[T, A<:Mapper[A]](in : MappedField[T, A]): T = in.is
}
trait IndexedField[O] extends BaseIndexedField {
def convertKey(in: String): Box[O]
def convertKey(in: Int): Box[O]
def convertKey(in: Long): Box[O]
def convertKey(in: AnyRef): Box[O]
def makeKeyJDBCFriendly(in: O): AnyRef
def dbDisplay_? = false
}
trait BaseIndexedField extends BaseMappedField {
}
/**
* A trait that defines foreign key references
*/
trait BaseForeignKey extends BaseMappedField {
type KeyType
type KeyedForeignType <: KeyedMapper[KeyType, KeyedForeignType]
type OwnerType <: Mapper[OwnerType]
/**
* Is the key defined?
*/
def defined_? : Boolean
/**
* get the object referred to by this foreign key
*/
def dbKeyToTable: BaseMetaMapper
def dbKeyToColumn: BaseMappedField
def findFor(key: KeyType): List[OwnerType]
def findFor(key: KeyedForeignType): List[OwnerType]
/**
* Called when Schemifier adds a foreign key. Return a function that will be called when Schemifier
* is done with the schemification.
*/
def dbAddedForeignKey: Box[() => Unit]
}
trait LifecycleCallbacks {
def beforeValidation {}
def beforeValidationOnCreate {}
def beforeValidationOnUpdate {}
def afterValidation {}
def afterValidationOnCreate {}
def afterValidationOnUpdate {}
def beforeSave {}
def beforeCreate {}
def beforeUpdate {}
def afterSave {}
def afterCreate {}
def afterUpdate {}
def beforeDelete {}
def afterDelete {}
}
| andreum/liftweb | lift-mapper/src/main/scala/net/liftweb/mapper/MappedField.scala | Scala | apache-2.0 | 16,668 |
package com.regblanc.sgl
package menu.android
import android.app.Activity
import android.os.Bundle
import sgl.{InputHelpersComponent, GameLoopStatisticsComponent, ViewportComponent}
import sgl.android._
import sgl.util._
import sgl.scene._
import test.core._
class MainActivity extends Activity with AbstractApp with AndroidApp
with InputHelpersComponent with NoLoggingProvider with GameLoopStatisticsComponent
with SceneComponent with ViewportComponent {
override val TargetFps = Some(40)
}
| regb/scala-game-library | examples/menu/android/src/main/scala/MainActivity.scala | Scala | mit | 504 |
package com.getjenny.command
/**
* Created by angelo on 29/03/17.
*/
import java.io.File
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.marshalling.Marshal
import akka.http.scaladsl.model.headers.RawHeader
import akka.http.scaladsl.model.{HttpRequest, _}
import com.getjenny.starchat.serializers.JsonSupport
import com.getjenny.starchat.services.FileToDocuments
import scopt.OptionParser
import scala.collection.immutable
import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContextExecutor, Future}
object IndexDecisionTable extends JsonSupport {
private[this] case class Params(
host: String = "http://localhost:8888",
indexName: String = "index_getjenny_english_0",
path: String = "/decisiontable",
inputfile: String = "decision_table.csv",
separator: Char = ',',
skiplines: Int = 1,
timeout: Int = 60,
headerKv: Seq[String] = Seq.empty[String]
)
private[this] def execute(params: Params) {
implicit val system: ActorSystem = ActorSystem()
implicit val executionContext: ExecutionContextExecutor = system.dispatcher
val baseUrl = params.host + "/" + params.indexName + params.path
val file = new File(params.inputfile)
val httpHeader: immutable.Seq[HttpHeader] = if(params.headerKv.nonEmpty) {
val headers: Seq[RawHeader] = params.headerKv.map(x => {
val header_opt = x.split(":")
val key = header_opt(0)
val value = header_opt(1)
RawHeader(key, value)
}) ++ Seq(RawHeader("application", "json"))
headers.to[immutable.Seq]
} else {
immutable.Seq(RawHeader("application", "json"))
}
val timeout = Duration(params.timeout, "s")
FileToDocuments.getDTDocumentsFromCSV(log = system.log, file = file, separator = params.separator)
.foreach(state => {
val entity_future = Marshal(state).to[MessageEntity]
val entity = Await.result(entity_future, 10.second)
val responseFuture: Future[HttpResponse] =
Http().singleRequest(HttpRequest(
method = HttpMethods.POST,
uri = baseUrl,
headers = httpHeader,
entity = entity))
val result = Await.result(responseFuture, timeout)
result.status match {
case StatusCodes.Created | StatusCodes.OK => println("indexed: " + state.state)
case _ =>
system.log.error("failed indexing state(" + state.state + ") Message(" + result.toString() + ")")
}
}
)
Await.ready(system.terminate(), Duration.Inf)
}
def main(args: Array[String]) {
val defaultParams = Params()
val parser = new OptionParser[Params]("IndexDecisionTable") {
head("Index data into DecisionTable")
help("help").text("prints this usage text")
opt[String]("inputfile")
.text(s"the path of the file with the decision table entries" +
s" default: ${defaultParams.inputfile}")
.action((x, c) => c.copy(inputfile = x))
opt[String]("host")
.text(s"*Chat base url" +
s" default: ${defaultParams.host}")
.action((x, c) => c.copy(host = x))
opt[String]("index_name")
.text(s"the index_name, e.g. index_<language>_XXX" +
s" default: ${defaultParams.indexName}")
.action((x, c) => c.copy(indexName = x))
opt[String]("path")
.text(s"the service path" +
s" default: ${defaultParams.path}")
.action((x, c) => c.copy(path = x))
opt[Int]("timeout")
.text(s"the timeout in seconds of each insert operation" +
s" default: ${defaultParams.timeout}")
.action((x, c) => c.copy(timeout = x))
opt[Int]("skiplines")
.text(s"skip the first N lines from vector file" +
s" default: ${defaultParams.skiplines}")
.action((x, c) => c.copy(skiplines = x))
opt[Seq[String]]("header_kv")
.text(s"header key-value pair, as key1:value1,key2:value2" +
s" default: ${defaultParams.headerKv}")
.action((x, c) => c.copy(headerKv = x))
}
parser.parse(args, defaultParams) match {
case Some(params) =>
execute(params)
sys.exit(0)
case _ =>
sys.exit(1)
}
}
}
| GetJenny/starchat | src/main/scala/com/getjenny/command/IndexDecisionTable.scala | Scala | gpl-2.0 | 4,505 |
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//package cogdebugger.ui.fieldvisualizations.vector
//
//
//import libcog._
//import cogx.platform.cpumemory.VectorFieldMemory
//import scala.swing._
//
///** Stand-alone testing of a field viewer.
// *
// * @author Greg Snider
// */
//object TestVectorFieldComponentsPanel extends SimpleSwingApplication {
// // Create a simple vector field
// val Rows = 100
// val Columns = 100
// val vectorField = VectorFieldMemory(Rows, Columns,
// (r, c) => new Vector(r, c))
//
// lazy val top = new MainFrame {
// title = "Test VectorFieldComponentsPanel"
// contents = new BoxPanel(Orientation.Horizontal) {
// contents += new VectorFieldComponentsPanel(vectorField.fieldType)
// {
// update(vectorField, vectorField, 0L)
// }
// //contents += new Geometric2DVectorView(field0D)
// }
// minimumSize = new Dimension(250, 100)
// }
//}
| hpe-cct/cct-core | src/test/scala/cogdebugger/ui/fieldvisualizations/vector/TestVectorFieldComponentsPanel.scala | Scala | apache-2.0 | 1,504 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.log
import org.apache.kafka.common.requests.ListOffsetsResponse
sealed trait IndexEntry {
// We always use Long for both key and value to avoid boxing.
def indexKey: Long
def indexValue: Long
}
/**
* The mapping between a logical log offset and the physical position
* in some log file of the beginning of the message set entry with the
* given offset.
*/
case class OffsetPosition(offset: Long, position: Int) extends IndexEntry {
override def indexKey = offset
override def indexValue = position.toLong
}
/**
* The mapping between a timestamp to a message offset. The entry means that any message whose timestamp is greater
* than that timestamp must be at or after that offset.
* @param timestamp The max timestamp before the given offset.
* @param offset The message offset.
*/
case class TimestampOffset(timestamp: Long, offset: Long) extends IndexEntry {
override def indexKey = timestamp
override def indexValue = offset
}
object TimestampOffset {
val Unknown = TimestampOffset(ListOffsetsResponse.UNKNOWN_TIMESTAMP, ListOffsetsResponse.UNKNOWN_OFFSET)
}
| guozhangwang/kafka | core/src/main/scala/kafka/log/IndexEntry.scala | Scala | apache-2.0 | 1,910 |
/*
* Copyright 2013 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.storehaus.algebra
import com.twitter.algebird.Monoid
import com.twitter.bijection.Injection
import com.twitter.storehaus.{ FutureCollector, Store }
import com.twitter.util.Future
/**
* Algebraic Enrichments on Store.
*/
object StoreAlgebra {
implicit def enrich[K, V](store: Store[K, V]): AlgebraicStore[K, V] =
new AlgebraicStore(store)
@deprecated("Use com.twitter.storehaus.Store#convert", "0.3.1")
def convert[K1, K2, V1, V2](store: Store[K1, V1])(kfn: K2 => K1)
(implicit inj: Injection[V2, V1]): Store[K2, V2] =
new com.twitter.storehaus.ConvertedStore(store)(kfn)
}
class AlgebraicStore[K, V](store: Store[K, V]) {
def toMergeable(implicit mon: Monoid[V], fc: FutureCollector[(K, Option[V])]): MergeableStore[K, V] =
MergeableStore.fromStore(store)
@deprecated("Use com.twitter.storehaus.EnrichedStore#composeKeyMapping", "0.3.1")
def composeKeyMapping[K1](fn: K1 => K): Store[K1, V] = StoreAlgebra.convert(store)(fn)
@deprecated("Use com.twitter.storehaus.EnrichedStore#mapValues", "0.3.1")
def mapValues[V1](implicit inj: Injection[V1, V]): Store[K, V1] = StoreAlgebra.convert(store)(identity[K])
@deprecated("Use com.twitter.storehaus.EnrichedStore#convert", "0.3.1")
def convert[K1, V1](fn: K1 => K)(implicit inj: Injection[V1, V]): Store[K1, V1] =
StoreAlgebra.convert(store)(fn)
}
| rubanm/storehaus | storehaus-algebra/src/main/scala/com/twitter/storehaus/algebra/StoreAlgebra.scala | Scala | apache-2.0 | 1,962 |
package com.twitter.zipkin.web
import com.google.common.io.ByteStreams
import com.twitter.finagle.httpx.{ParamMap, Request, Response}
import com.twitter.finagle.stats.{Stat, StatsReceiver}
import com.twitter.finagle.tracing.SpanId
import com.twitter.finagle.{Filter, Service}
import com.twitter.finatra.httpclient.HttpClient
import com.twitter.io.Buf
import com.twitter.util.{Future, TwitterDateFormat}
import com.twitter.zipkin.common.Trace
import com.twitter.zipkin.json._
import com.twitter.zipkin.web.mustache.ZipkinMustache
import com.twitter.zipkin.{Constants => ZConstants}
import org.jboss.netty.handler.codec.http.QueryStringEncoder
import java.io.{File, FileInputStream, InputStream}
import scala.annotation.tailrec
class Handlers(mustacheGenerator: ZipkinMustache, queryExtractor: QueryExtractor) {
private[this] val fmt = TwitterDateFormat("MM-dd-yyyy'T'HH:mm:ss.SSSZ")
import Util._
type Renderer = (Response => Unit)
case class CopyRenderer(input: Response) extends Renderer {
def apply(response: Response) {
response.contentString = input.contentString
response.statusCode = input.statusCode
input.headerMap foreach (e => response.headerMap.put(e._1,e._2))
}
}
case class ErrorRenderer(code: Int, msg: String) extends Renderer {
def apply(response: Response) {
response.contentString = msg
response.statusCode = code
}
}
case class MustacheRenderer(template: String, data: Map[String, Object]) extends Renderer {
def apply(response: Response) {
response.contentType = "text/html"
response.contentString = generate
}
def generate = mustacheGenerator.render(template, data)
}
case class StaticRenderer(input: InputStream, typ: String) extends Renderer {
private[this] val content = {
val bytes = ByteStreams.toByteArray(input)
input.close()
bytes
}
def apply(response: Response) {
response.setContentType(typ)
response.content = Buf.ByteArray.Owned(content)
}
}
private[this] val EmptyTraces = Future.value(Seq.empty[TraceSummary])
private[this] val EmptyStrings = Future.value(Seq.empty[String])
private[this] val NotFound = Future.value(ErrorRenderer(404, "Not Found"))
def collectStats(stats: StatsReceiver): Filter[Request, Response, Request, Response] =
Filter.mk[Request, Response, Request, Response] { (req, svc) =>
Stat.timeFuture(stats.stat("request"))(svc(req)) onSuccess { rep =>
stats.scope("response").counter(rep.statusCode.toString).incr()
}
}
val catchExceptions =
Filter.mk[Request, Renderer, Request, Renderer] { (req, svc) =>
svc(req) rescue { case thrown: Throwable =>
val errorMsg = Option(thrown.getMessage).getOrElse("Unknown error")
val stacktrace = Option(thrown.getStackTraceString).getOrElse("")
Future.value(ErrorRenderer(500, errorMsg + "\n\n\n" + stacktrace))
}
}
val renderPage =
Filter.mk[Request, Response, Request, Renderer] { (req, svc) =>
svc(req) map { renderer =>
val res = req.response
renderer(res)
res.contentLength = res.content.length
res
}
}
def checkPath(path: List[String]): Filter[Request, Renderer, Request, Renderer] = {
val requiredSize = path.takeWhile { !_.startsWith(":") }.size
if (path.size == requiredSize) Filter.identity[Request, Renderer] else
Filter.mk[Request, Renderer, Request, Renderer] { (req, svc) =>
if (req.path.split("/").size >= path.size) svc(req) else NotFound
}
}
def addLayout(pageTitle: String, environment: String): Filter[Request, Renderer, Request, Renderer] =
Filter.mk[Request, Renderer, Request, Renderer] { (req, svc) =>
svc(req) map { renderer =>
response: Response => {
renderer(response)
val data = Map[String, Object](
("pageTitle" -> pageTitle),
("environment" -> environment),
("body" -> response.contentString))
val r = MustacheRenderer("v2/layout.mustache", data)
r(response)
}
}
}
def handlePublic(
resourceDirs: Set[String],
typesMap: Map[String, String],
docRoot: Option[String] = None
) =
new Service[Request, Renderer] {
private[this] var rendererCache = Map.empty[String, Future[Renderer]]
private[this] def getStream(path: String): Option[InputStream] =
docRoot map { root =>
new FileInputStream(new File(root, path))
} orElse {
Option(getClass.getResourceAsStream(path)) filter { _.available > 0 }
}
private[this] def getRenderer(path: String): Option[Future[Renderer]] = {
rendererCache.get(path) orElse {
synchronized {
rendererCache.get(path) orElse {
resourceDirs find(path.startsWith) flatMap { _ =>
val typ = typesMap find { case (n, _) => path.endsWith(n) } map { _._2 } getOrElse("text/plain")
getStream(path) map { input =>
val renderer = Future.value(StaticRenderer(input, typ))
if (docRoot.isEmpty) rendererCache += (path -> renderer)
renderer
}
}
}
}
}
}
def apply(req: Request): Future[Renderer] =
if (req.path contains "..") NotFound else {
getRenderer(req.path) getOrElse NotFound
}
}
case class MustacheServiceDuration(name: String, count: Int, max: Long)
case class MustacheTraceSummary(
traceId: String,
startTs: String,
timestamp: Long,
duration: Long,
durationStr: String,
servicePercentage: Int,
spanCount: Int,
serviceDurations: Seq[MustacheServiceDuration],
width: Int)
case class MustacheTraceId(id: String)
@tailrec
private[this] def totalServiceTime(stamps: Seq[SpanTimestamp], acc: Long = 0): Long =
if (stamps.isEmpty) acc else {
val ts = stamps.minBy(_.startTs)
val (current, next) = stamps.partition { t => t.startTs >= ts.startTs && t.endTs <= ts.endTs }
val endTs = current.map(_.endTs).max
totalServiceTime(next, acc + (endTs - ts.startTs))
}
private[this] def traceSummaryToMustache(
serviceName: Option[String],
ts: Seq[TraceSummary]
): Map[String, Any] = {
val maxDuration = ts.foldLeft(Long.MinValue) { case ((maxD), t) =>
math.max(t.durationMicro / 1000, maxD)
}
val traces = ts.map { t =>
val duration = t.durationMicro / 1000
val groupedSpanTimestamps = t.spanTimestamps.groupBy(_.name)
val serviceDurations = groupedSpanTimestamps.map { case (n, sts) =>
MustacheServiceDuration(n, sts.length, sts.map(_.duration).max / 1000)
}.toSeq
val serviceTime = for {
name <- serviceName
timestamps <- groupedSpanTimestamps.get(name)
} yield totalServiceTime(timestamps)
MustacheTraceSummary(
t.traceId,
fmt.format(new java.util.Date(t.startTs / 1000)),
t.startTs,
duration,
durationStr(t.durationMicro.toLong * 1000),
serviceTime.map { st => ((st.toFloat / t.durationMicro.toFloat) * 100).toInt }.getOrElse(0),
groupedSpanTimestamps.foldLeft(0) { case (acc, (_, sts)) => acc + sts.length },
serviceDurations,
((duration.toFloat / maxDuration) * 100).toInt
)
}.sortBy(_.duration).reverse
Map(
("traces" -> traces),
("count" -> traces.size))
}
def handleIndex(client: HttpClient): Service[Request, Renderer] =
Service.mk[Request, Renderer] { req =>
val serviceName = req.params.get("serviceName").filterNot(_ == "")
val spanName = req.params.get("spanName").filterNot(_ == "")
val spansCall = serviceName match {
case Some(service) => client.executeJson[Seq[String]](Request(s"/api/v1/spans?serviceName=${service}"))
case None => EmptyStrings
}
// only call get traces if the user entered a query
val tracesCall = serviceName match {
case Some(service) => route[Seq[Seq[JsonSpan]]](client, "/api/v1/traces", req.params)
.map(traces => traces.map(_.map(JsonSpan.invert))
.map(Trace.apply(_)).flatMap(TraceSummary(_).toSeq))
case None => EmptyTraces
}
for (spans <- spansCall; traces <- tracesCall) yield {
val spanList = spans.toList map {
span => Map("name" -> span, "selected" -> (if (Some(span) == spanName) "selected" else ""))
}
var data = Map[String, Object](
("serviceName" -> serviceName),
("endTs" -> queryExtractor.getTimestampStr(req)),
("annotationQuery" -> req.params.get("annotationQuery").getOrElse("")),
("spans" -> spanList),
("limit" -> queryExtractor.getLimitStr(req)))
queryExtractor.getAnnotations(req).foreach( annos =>
data ++= Map(
("queryResults" -> traceSummaryToMustache(serviceName, traces)),
("annotations" -> annos._1),
("binaryAnnotations" -> annos._2)))
MustacheRenderer("v2/index.mustache", data)
}
}
def handleRoute(client: HttpClient, baseUri: String): Service[Request, Renderer] =
Service.mk[Request, Renderer] { req =>
val encoder = new QueryStringEncoder(baseUri)
req.params.foreach { case (key, value) =>
encoder.addParam(key, value)
}
client.execute(Request(encoder.toString)).map(CopyRenderer)
}
private[this] def route[T: Manifest](client: HttpClient, baseUri: String, params: ParamMap) = {
val encoder = new QueryStringEncoder(baseUri)
params.foreach { case (key, value) =>
encoder.addParam(key, value)
}
client.executeJson[T](Request(encoder.toString))
}
def handleDependency(): Service[Request, MustacheRenderer] =
Service.mk[Request, MustacheRenderer] { req =>
Future(MustacheRenderer("v2/dependency.mustache", Map[String, Object]()))
}
private[this] def pathTraceId(id: Option[String]): Option[SpanId] =
id.flatMap(SpanId.fromString(_))
trait NotFoundService extends Service[Request, Renderer] {
def process(req: Request): Option[Future[Renderer]]
def apply(req: Request): Future[Renderer] =
process(req) getOrElse NotFound
}
private[this] def renderTrace(trace: Trace): Renderer = {
val traceStartTs = trace.getStartAndEndTimestamp.map(_.start).getOrElse(0L)
val spanDepths = trace.toSpanDepths
val childMap = trace.getIdToChildrenMap
val spanMap = trace.getIdToSpanMap
val spans = for {
rootSpan <- trace.getRootSpans()
span <- trace.getSpanTree(rootSpan, childMap).toList
} yield {
val start = span.firstAnnotation.map(_.timestamp).getOrElse(traceStartTs)
val depth = trace.toSpanDepths.get.getOrElse(span.id, 1)
val width = span.duration.map { d => (d.toDouble / trace.duration.toDouble) * 100 }.getOrElse(0.0)
val binaryAnnotations = span.binaryAnnotations.map {
case ann if ZConstants.CoreAddress.contains(ann.key) =>
val key = ZConstants.CoreAnnotationNames.get(ann.key).get
val value = ann.host.map { e => s"${e.getHostAddress}:${e.getUnsignedPort}" }.get
JsonBinaryAnnotation(key, value, None, ann.host.map(JsonEndpoint))
case ann => JsonBinaryAnnotation(ann)
}
Map(
"spanId" -> SpanId(span.id).toString,
"parentId" -> span.parentId.filter(spanMap.get(_).isDefined).map(SpanId(_).toString),
"spanName" -> span.name,
"serviceNames" -> span.serviceNames.mkString(","),
"serviceName" -> span.serviceName,
"duration" -> span.duration,
"durationStr" -> span.duration.map { d => durationStr(d * 1000) },
"left" -> ((start - traceStartTs).toFloat / trace.duration.toFloat) * 100,
"width" -> (if (width < 0.1) 0.1 else width),
"depth" -> (depth + 1) * 5,
"depthClass" -> (depth - 1) % 6,
"children" -> childMap.get(span.id).map(_.map(s => SpanId(s.id).toString).mkString(",")),
"annotations" -> span.annotations.map { a =>
Map(
"isCore" -> ZConstants.CoreAnnotations.contains(a.value),
"left" -> span.duration.map { d => ((a.timestamp - start).toFloat / d.toFloat) * 100 },
"endpoint" -> a.host.map { e => s"${e.getHostAddress}:${e.getUnsignedPort}" },
"value" -> annoToString(a.value),
"timestamp" -> a.timestamp,
"relativeTime" -> durationStr((a.timestamp - traceStartTs) * 1000),
"serviceName" -> a.host.map(_.serviceName),
"width" -> 8
)
},
"binaryAnnotations" -> binaryAnnotations
)
}
val traceDuration = trace.duration * 1000
val serviceDurations = TraceSummary(trace) map { summary =>
summary.spanTimestamps.groupBy(_.name).map { case (n, sts) =>
MustacheServiceDuration(n, sts.length, sts.map(_.duration).max / 1000)
}.toSeq
}
val timeMarkers = Seq[Double](0.0, 0.2, 0.4, 0.6, 0.8, 1.0).zipWithIndex map { case (p, i) =>
Map("index" -> i, "time" -> durationStr((traceDuration * p).toLong))
}
val timeMarkersBackup = timeMarkers.map { m => collection.mutable.Map() ++ m }
val spansBackup = spans.map { m => collection.mutable.Map() ++ m }
val data = Map[String, Object](
"duration" -> durationStr(traceDuration),
"services" -> serviceDurations.map(_.size),
"depth" -> spanDepths.map(_.values.max),
"totalSpans" -> spans.size.asInstanceOf[Object],
"serviceCounts" -> serviceDurations.map(_.sortBy(_.name)),
"timeMarkers" -> timeMarkers,
"timeMarkersBackup" -> timeMarkersBackup,
"spans" -> spans,
"spansBackup" -> spansBackup)
MustacheRenderer("v2/trace.mustache", data)
}
def handleTraces(client: HttpClient): Service[Request, Renderer] =
Service.mk[Request, Renderer] { req =>
pathTraceId(req.path.split("/").lastOption) map { id =>
client.executeJson[Seq[JsonSpan]](Request(s"/api/v1/trace/$id"))
.map(_.map(JsonSpan.invert))
.map(Trace.apply(_))
.map(renderTrace(_))
} getOrElse NotFound
}
}
| jfeltesse-mdsol/zipkin | zipkin-web/src/main/scala/com/twitter/zipkin/web/Handlers.scala | Scala | apache-2.0 | 14,271 |
package io.scalajs.nodejs
package url
import io.scalajs.util.JSONHelper._
import io.scalajs.util.JsUnderOrHelper._
import org.scalatest._
/**
* URL Tests
* @author lawrence.daniels@gmail.com
*/
class URLTest extends FunSpec {
describe("URL") {
val originalUrl = "https://www.google.com/webhp?sourceid=chrome-instant&ion=1&espv=2&ie=UTF-8#q=node"
val parsedUrl = URL.parse(originalUrl)
it("should break down URLs into components") {
assert(
parsedUrl.toJson == """{"protocol":"https:","slashes":true,"auth":null,"host":"www.google.com","port":null,"hostname":"www.google.com","hash":"#q=node","search":"?sourceid=chrome-instant&ion=1&espv=2&ie=UTF-8","query":"sourceid=chrome-instant&ion=1&espv=2&ie=UTF-8","pathname":"/webhp","path":"/webhp?sourceid=chrome-instant&ion=1&espv=2&ie=UTF-8","href":"https://www.google.com/webhp?sourceid=chrome-instant&ion=1&espv=2&ie=UTF-8#q=node"}""")
}
it("should be properly extracted") {
assert(parsedUrl.query ?== "sourceid=chrome-instant&ion=1&espv=2&ie=UTF-8")
}
it("should properly extract the search query") {
assert(parsedUrl.search ?== "?sourceid=chrome-instant&ion=1&espv=2&ie=UTF-8")
}
it("should reconstituted the URL to match the original") {
assert(URL.format(parsedUrl) == originalUrl)
}
}
}
| scalajs-io/nodejs | app/lts/src/test/scala/io/scalajs/nodejs/url/URLTest.scala | Scala | apache-2.0 | 1,332 |
/*
*
* * Licensed to the Apache Software Foundation (ASF) under one or more
* * contributor license agreements. See the NOTICE file distributed with
* * this work for additional information regarding copyright ownership.
* * The ASF licenses this file to You under the Apache License, Version 2.0
* * (the "License"); you may not use this file except in compliance with
* * the License. You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package org.apache.eagle.datastream
import com.typesafe.config.Config
import org.jgrapht.experimental.dag.DirectedAcyclicGraph
import org.slf4j.LoggerFactory
/**
* to set name for each StreamProducer
* 1. if name is given programatically, then use this name
* 2. otherwise use name generated by scala internally
*/
class StreamNameExpansion(config: Config) extends StreamDAGExpansion(config){
val LOG = LoggerFactory.getLogger(classOf[StreamNameExpansion])
override def expand(dag: DirectedAcyclicGraph[StreamProducer, StreamConnector]) = {
val iter = dag.iterator()
while(iter.hasNext){
val sp = iter.next()
sp.name = NodeNameSelector(sp).getName
}
}
}
| eBay/Eagle | eagle-core/eagle-data-process/eagle-stream-process-api/src/main/scala/org/apache/eagle/datastream/StreamNameExpansion.scala | Scala | apache-2.0 | 1,541 |
/**
* Copyright (c) 2013, The National Archives <digitalpreservation@nationalarchives.gov.uk>
* https://www.nationalarchives.gov.uk
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package uk.gov.nationalarchives.csv.validator.schema.v1_0
import org.junit.runner.RunWith
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scalax.file.{Path, PathSet}
import scalaz._
@RunWith(classOf[JUnitRunner])
class FileWildcardSearchSpec extends Specification {
class FindBaseTestableFileWildcardSearch extends FileWildcardSearch[Int]{
import scalaz.Scalaz._
val pathSubstitutions = List.empty
def matchSimplePath(fullPath : String) : ValidationNel[String, Int] = 0.successNel[String]
def matchWildcardPaths(matchList: PathSet[Path], fullPath: String): ValidationNel[String, Int] = 0.successNel[String]
def findBaseWrapper(path: String) : (String, String) = {
val result = super.findBase(path)
(result._1.toString, result._2)
}
}
"File Wildcard Search" should {
"find the correct base path for a Windows file URI starting file:///" in {
val fileWildcardSearch = new FindBaseTestableFileWildcardSearch
val base = "file:///C:/x/y/z"
val file = "file1.jp2"
fileWildcardSearch.findBaseWrapper(base + "/" + file) mustEqual (base, file)
}
"find the correct base path for a Windows file URI starting file:/// with path in file" in {
val fileWildcardSearch = new FindBaseTestableFileWildcardSearch
val base = "file:///C:/x/y/z"
val fileBase = "A/B"
val file = "file1.jp2"
fileWildcardSearch.findBaseWrapper(base + "/" + fileBase + "/" + file) mustEqual (base + "/" + fileBase, file)
}
"find the correct base path for a Windows file URI starting file:/" in {
val fileWildcardSearch = new FindBaseTestableFileWildcardSearch
val base = "file:/C:/x/y/z"
val file = "file1.jp2"
fileWildcardSearch.findBaseWrapper(base + "/" + file) mustEqual (base, file)
}
"find the correct base path for a Windows file URI starting file:/ with path in file" in {
val fileWildcardSearch = new FindBaseTestableFileWildcardSearch
val base = "file:/C:/x/y/z"
val fileBase = "A/B"
val file = "file1.jp2"
fileWildcardSearch.findBaseWrapper(base + "/" + fileBase + "/" + file) mustEqual (base + "/" + fileBase, file)
}
"find the correct base path for a Windows file" in {
val fileWildcardSearch = new FindBaseTestableFileWildcardSearch
val base = """C:\\x\\y\\z"""
val file = "file1.jp2"
fileWildcardSearch.findBaseWrapper(base + """\\""" + file) mustEqual (base, file)
}
"find the correct base path for a Windows file with path in file" in {
val fileWildcardSearch = new FindBaseTestableFileWildcardSearch
val base = """C:\\x\\y\\z"""
val fileBase = """A\\B"""
val file = "file1.jp2"
fileWildcardSearch.findBaseWrapper(base + """\\""" + fileBase + """\\""" + file) mustEqual (base + """\\""" + fileBase, file)
}
"find the correct base path for a Linux file URI" in {
val fileWildcardSearch = new FindBaseTestableFileWildcardSearch
val base = "file:///x/y/z"
val file = "file1.jp2"
fileWildcardSearch.findBaseWrapper(base + "/" + file) mustEqual (base, file)
}
"find the correct base path for a Linux file URI with path in file" in {
val fileWildcardSearch = new FindBaseTestableFileWildcardSearch
val base = "file:///x/y/z"
val fileBase = "A/B"
val file = "file1.jp2"
fileWildcardSearch.findBaseWrapper(base + "/" + fileBase + "/" + file) mustEqual (base + "/" + fileBase, file)
}
"find the correct base path for a Linux file" in {
val fileWildcardSearch = new FindBaseTestableFileWildcardSearch
val base = "/x/y/z"
val file = "file1.jp2"
fileWildcardSearch.findBaseWrapper(base + "/" + file) mustEqual (base, file)
}
"find the correct base path for a Linux file with path in file" in {
val fileWildcardSearch = new FindBaseTestableFileWildcardSearch
val base = "/x/y/z"
val fileBase = "A/B"
val file = "file1.jp2"
fileWildcardSearch.findBaseWrapper(base + "/" + fileBase + "/" + file) mustEqual (base + "/" + fileBase, file)
}
}
}
| adamretter/csv-validator | csv-validator-core/src/test/scala/uk/gov/nationalarchives/csv/validator/schema/v1_0/FileWildcardSearchSpec.scala | Scala | mpl-2.0 | 4,511 |
package lila.opening
import play.api.libs.json._
import reactivemongo.bson._
import reactivemongo.bson.Macros
import lila.db.Types.Coll
import lila.rating.Glicko
import lila.user.User
case class UserInfos(user: User, history: List[Attempt], chart: JsArray)
object UserInfos {
private def historySize = 20
private def chartSize = 12
import Attempt.attemptBSONHandler
def apply(attemptColl: Coll) = new {
def apply(user: User): Fu[UserInfos] = fetchAttempts(user.id) map { attempts =>
new UserInfos(user, makeHistory(attempts), makeChart(attempts))
} recover {
case e: Exception =>
play.api.Logger("Opening UserInfos").error(e.getMessage)
new UserInfos(user, Nil, JsArray())
}
def apply(user: Option[User]): Fu[Option[UserInfos]] =
user ?? { apply(_) map (_.some) }
private def fetchAttempts(userId: String): Fu[List[Attempt]] =
attemptColl.find(BSONDocument(
Attempt.BSONFields.userId -> userId
)).sort(BSONDocument(
Attempt.BSONFields.date -> -1
)).cursor[Attempt].collect[List](math.max(historySize, chartSize))
}
private def makeHistory(attempts: List[Attempt]) = attempts.take(historySize)
private def makeChart(attempts: List[Attempt]) = JsArray {
val ratings = attempts.take(chartSize).reverse map (_.userPostRating)
val filled = List.fill(chartSize - ratings.size)(Glicko.default.intRating) ::: ratings
filled map { JsNumber(_) }
}
}
| danilovsergey/i-bur | modules/opening/src/main/UserInfos.scala | Scala | mit | 1,468 |
package scala.tasty.internal.dotc.util
class DotClass {
def unsupported(methodName: String): Nothing =
throw new UnsupportedOperationException(s"$getClass.$methodName")
}
| VladimirNik/tasty | plugin/src/main/scala/scala/tasty/internal/dotc/util/DotClass.scala | Scala | bsd-3-clause | 178 |
package be.objectify.deadbolt.scala.test.modules
import be.objectify.deadbolt.scala.TemplateFailureListener
import be.objectify.deadbolt.scala.cache.HandlerCache
import be.objectify.deadbolt.scala.test.dao.SubjectDao
import be.objectify.deadbolt.scala.test.security.{MyCustomTemplateFailureListener, MyHandlerCache}
import play.api.inject.{Binding, Module}
import play.api.{Configuration, Environment}
/**
* @author Steve Chaloner (steve@objectify.be)
*/
class CustomDeadboltHook extends Module {
override def bindings(environment: Environment, configuration: Configuration): Seq[Binding[_]] =
Seq(
bind[HandlerCache].to[MyHandlerCache],
bind[TemplateFailureListener].to[MyCustomTemplateFailureListener]
)
}
| schaloner/deadbolt-2-scala | test-app/app/be/objectify/deadbolt/scala/test/modules/CustomDeadboltHook.scala | Scala | apache-2.0 | 743 |
package render
import java.awt.{Color}
import core.main._, core.pieces._, core.shapes.dim0._, core.shapes.dim1._, core.shapes.dim2._, core.shapes.forces._
object View2D {
def apply(w: Int, h: Int, r: Rect, gen: Point=>Color) = {
def getPos(x: Int,y: Int) = Point(r.p1.x+x*r.w/w,r.p1.y+y*r.h/h)
new ViewImpl(w,h,Array.tabulate(w*h)(i=>gen(getPos(i%w,i/w)).getRGB()),getPos _)
}
} | radiotech/FlatLand | src/core/render/View2D.scala | Scala | mit | 392 |
/**
* Copyright (c) 2012, www.quartzsource.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.quartzsource.meutrino
import java.io.File
import org.junit.Test
import org.junit.Assert._
import org.quartzsource.meutrino.client.CommandServerFactory
import org.quartzsource.meutrino.client.AbstractClientTest
import org.quartzsource.meutrino.client.CommandServerConfig
@Test
class StressTest extends AbstractClientTest {
@Test
def testDifferentRepositories {
val workers = (1 to 5).map(i => {
val rootFolder: File = getTestFolder(i)
val repo = factory.create(rootFolder)
new WriterWorker(i, repo)
})
workers.map(worker => {
val thread = new Thread(worker)
thread.start()
thread
}).foreach(_.join)
assertTrue(workers.forall(_.success))
}
private trait Result {
def success: Boolean
}
private class WriterWorker(number: Int, repo: QRepository) extends Runnable with Result {
var success = false
def run() {
val start = repo.log().size
for (i <- 1 to 5) {
append("a", "a\\n", folder = repo.root())
repo.commit("first " + number, Some("foo"), addRemove = true)
}
val end = repo.log().size
assertEquals(start + 5, end)
for (i <- 1 to 10) repo.log()
success = true //no exception
}
}
private class ReaderWorker(number: Int, repo: QRepository) extends Runnable with Result {
var success = false
def run() {
for (i <- 1 to 100) repo.log()
success = true //no exception
}
}
/**
* many thread may not use the same instance of repository
* (the communication over the pipe becomes broken)
*/
@Test
def testSameRepository {
//assertFalse(sameRepo(false)) //no lock
assertTrue(sameRepo(true)) //sync with lock
}
def sameRepo(syncWithLock: Boolean): Boolean = {
val rootFolder: File = getTestFolder()
val conf = Map(("ui" -> Map("username" -> "py4fun")))
val factory = new CommandServerFactory("hg", new CommandServerConfig(config = conf, sync = syncWithLock))
val repo = factory.create(rootFolder)
val writer = new WriterWorker(1, repo)
val workers = writer :: (2 to 5).map(i => {
new ReaderWorker(i, repo)
}).toList
workers.map(worker => {
val thread = new Thread(worker)
thread.start()
thread
}).foreach(_.join)
workers.forall(_.success)
}
}
| cyberspinach/meutrino | src/test/scala/org/quartzsource/meutrino/client/StressTest.scala | Scala | apache-2.0 | 2,928 |
/*
* ____ ____ _____ ____ ___ ____
* | _ \ | _ \ | ____| / ___| / _/ / ___| Precog (R)
* | |_) | | |_) | | _| | | | | /| | | _ Advanced Analytics Engine for NoSQL Data
* | __/ | _ < | |___ | |___ |/ _| | | |_| | Copyright (C) 2010 - 2013 SlamData, Inc.
* |_| |_| \_\ |_____| \____| /__/ \____| All Rights Reserved.
*
* This program is free software: you can redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by the Free Software Foundation, either version
* 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
* the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this
* program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.precog.niflheim
import com.precog.util.IOUtils
import java.util.concurrent.ScheduledThreadPoolExecutor
import org.specs2.mutable.{After, Specification}
class CookStateLogSpecs extends Specification {
val txLogScheduler = new ScheduledThreadPoolExecutor(5)
trait LogState extends After {
val workDir = IOUtils.createTmpDir("cookstatespecs").unsafePerformIO
def after = {
IOUtils.recursiveDelete(workDir).unsafePerformIO
}
}
"CookStateLog" should {
"Properly initialize" in new LogState {
val txLog = new CookStateLog(workDir, txLogScheduler)
txLog.currentBlockId mustEqual 0l
txLog.pendingCookIds must beEmpty
}
"Lock its directory during operation" in new LogState {
val txLog = new CookStateLog(workDir, txLogScheduler)
(new CookStateLog(workDir, txLogScheduler)) must throwAn[Exception]
}
}
}
| precog/platform | niflheim/src/main/scala/com/precog/niflheim/CookStateLogSpecs.scala | Scala | agpl-3.0 | 1,965 |
object CustomConstructs extends scala.App {
import java.util.Calendar
def myWhile(cond: => Boolean)(body: => Unit): Unit = {
if (cond) {
body;
myWhile(cond)(body)
} else ()
}
var i = 0;
myWhile(i < 4) {
i += 1;
println(i)
}
// Prints:
// 1
// 2
// 3
// 4
// A benchmark construct:
def benchmark(body: => Unit): Long = {
val start = Calendar.getInstance().getTimeInMillis
body
val end = Calendar.getInstance().getTimeInMillis
end - start
}
val myTime = benchmark {
var i = 0
myWhile(i < 1000000) {
i += 1
}
}
println("myWhile took: " + myTime)
val time = benchmark {
var i = 0
while (i < 1000000) {
i += 1
}
}
println("while took: " + time)
// A short-circuiting or:
def myOr(left: Boolean, right: => Boolean): Boolean = {
if (left) true else right
}
println(myOr(true, throw new Exception("Boom!")))
// Prints:
// true
} | winse/learning-scala-in-small-bites | src/main/scala/CustomConstructs.scala | Scala | apache-2.0 | 966 |
package io.plasmap.query.engine
import _root_.io.plasmap.model.geometry.Feature
import _root_.io.plasmap.query.engine.TypeAliases.SourceGraph
import _root_.io.plasmap.queryengine.macros.Macros.gottaMatchEmAll
import _root_.io.plasmap.serializer.{GeoJsonSerialiser, OsmDenormalizedSerializer}
import akka.NotUsed
import akka.http.scaladsl.model.ws.TextMessage.Strict
import akka.http.scaladsl.model.ws.{Message, TextMessage}
import akka.stream.FanOutShape.Init
import com.typesafe.scalalogging.Logger
import io.plasmap.query.engine.QueryTranslator.{NotYetImplemented, TranslationError}
import io.plasmap.querymodel.PMSerialiser._
import akka.stream._
import akka.stream.scaladsl._
import org.slf4j.LoggerFactory
import scala.concurrent.ExecutionContext
import scala.language.experimental.macros
import scalaz.{-\/, \/, \/-}
/**
* Contains methods to trenslate Websocket requests into Akka Streams Flows
*
* @author Jan Schulte <jan@plasmap.io>
*/
object Flows {
val log = Logger(LoggerFactory.getLogger(QueryTranslator.getClass.getName))
/* case class WSTerminatorShape[A](_init: Init[A] = akka.stream.FanOutShape.Name[A]("WSTerminator"))
extends FanOutShape[A](_init) {
val out = newOutlet[A]("nodes")
protected override def construct(i: Init[A]) = WSTerminatorShape[A](i)
}
class WSTerminator[A] extends FlexiRoute[A, WSTerminatorShape[A]](
WSTerminatorShape[A](), Attributes.name("WSTerminator")) {
import FlexiRoute._
override def createRouteLogic(p: PortT) = new RouteLogic[A] {
override def initialCompletionHandling = CompletionHandling(
onUpstreamFinish = (ctx) => {
// case object EverythingIsAbsolutelyFineException extends Exception
// ctx.fail(EverythingIsAbsolutelyFineException)
ctx.finish()
},
onUpstreamFailure = (ctx, thr) => {
ctx.finish()
},
onDownstreamFinish = (ctx, output) => {
SameState
}
)
override def initialState =
State[Any](DemandFromAll(p.out)) {
(ctx, _, elem) =>
ctx.emit(p.out)(elem)
SameState
}
}
}*/
/*
object WSTerminator {
def apply[A] = new WSTerminator[A]
}*/
sealed trait QueryError
final case class UnpickleError() extends QueryError
final case class QueryTranslationError(e: TranslationError) extends QueryError
def translate(msg: String)(implicit mat: Materializer, ec: ExecutionContext): Source[String, NotUsed] = {
val translatedQuery: QueryError \/ Query[_ <: Shape, _] = for {
query <- deserialiseQuery(msg).leftMap(_ => UnpickleError())
translatedQuery <- QueryTranslator.translate(query)(mat, ec).leftMap(QueryTranslationError)
} yield translatedQuery
def poiMapImport(pq: POIQuery[_]): Source[String, NotUsed] = {
import POIQueries._
//Important for compilation order.
import POIs._
//Important for compilation order as well.
poiMap(pq)
}
def poiMap(pq: POIQuery[_]): Source[String, NotUsed] = macro gottaMatchEmAll[POIElement]
def areaToGeoJson(el: AreaElement) = OsmDenormalizedSerializer.toGeoJsonString(el.osmObject)
def toSource[A](shape: SourceGraph[A], serialise: A => String): Source[String, NotUsed] =
Source.fromGraph(shape).via(serialiser(serialise))
def sourceArea[A <: AreaElement](shape: SourceGraph[A]) = toSource(shape, areaToGeoJson)
translatedQuery match {
// FIXME: This should be simplified
case \/-(CountryQuery(shape)) => sourceArea(shape)
case \/-(StateQuery(shape)) => sourceArea(shape)
case \/-(RegionQuery(shape)) => sourceArea(shape)
case \/-(CityQuery(shape)) => sourceArea(shape)
case \/-(TownshipQuery(shape)) => sourceArea(shape)
case \/-(DistrictQuery(shape)) => sourceArea(shape)
case \/-(VillageQuery(shape)) => sourceArea(shape)
case \/-(CommunityQuery(shape)) => sourceArea(shape)
case \/-(CoordinatesQuery(shape)) =>
val serialise = (l: Location) => GeoJsonSerialiser.jsonFromFeature(Feature(l.point, Map.empty))
toSource(shape, serialise)
case \/-(p: POIQuery[_]) => poiMapImport(p)
case -\/(ue: UnpickleError) =>
Source.single("Your request was invalid")
case -\/(qte@QueryTranslationError(NotYetImplemented(q))) =>
Source.single(s"Your query type $q is not yet implemented.")
case -\/(qte@QueryTranslationError(_)) =>
Source.single(s"Could not process the query.")
}
}
def toQuery(mat: Materializer, ec: ExecutionContext): (String) => Source[String, NotUsed] = (msg: String) => {
/*
val terminatorFlow: Flow[String, String, NotUsed] = GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
val wsTerminator: WSTerminatorShape[String] = b add WSTerminator[String]
val packS = b add Flow[String].map(identity)
val unpackS = b add Flow[String].map(identity)
packS ~> wsTerminator.in
wsTerminator.out ~> unpackS.in
(packS.in, unpackS.outlet)
}}*/
translate(msg)(mat, ec) //.via(terminatorFlow)
}
def serialiser[E](toString: (E) => String): Flow[E, String, NotUsed] = Flow[E]
.map(toString)
def query(toQuery: (String) => Source[String, NotUsed])(implicit mat: Materializer, ec: ExecutionContext): Flow[Message, Message, NotUsed] = {
val unpackS = Flow[Message]
.collect[String] { case TextMessage.Strict(txt: String) => txt }
val packS = {
Flow[String].map(TextMessage(_))
}
val queryFlow: Flow[String, String, NotUsed] = Flow[String]
.map(toQuery)
.flatMapConcat(identity)
unpackS
.via(queryFlow)
.via(packS)
}
def apply(implicit fm: ActorMaterializer, ec: ExecutionContext): Flow[Message, Message, NotUsed] =
Flows.query(Flows.toQuery(fm, ec))(fm, ec)
}
| plasmap/plasmap | query-engine/src/main/scala/io/plasmap/query/engine/Flows.scala | Scala | apache-2.0 | 5,933 |
/*
* This file is part of the \\BlueLaTeX project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gnieh.blue
package core
package impl
package paper
import couch._
import common._
import http._
import permission._
import java.util.UUID
import tiscaf._
import com.typesafe.config.Config
import org.osgi.framework.BundleContext
import resource._
import scala.sys.process._
import scala.util.{
Try,
Success
}
import gnieh.sohva.control.CouchClient
/** Delete an existing paper.
*
* @author Lucas Satabin
*/
class DeletePaperLet(
paperId: String,
context: BundleContext,
val couch: CouchClient,
config: Config,
recaptcha: ReCaptcha,
logger: Logger)
extends SyncPermissionLet(paperId, config, logger) {
def permissionAct(user: Option[UserInfo], role: Role, permissions: Set[Permission])(implicit talk: HTalk): Try[Unit] = permissions match {
case Delete() =>
// only authors may delete a paper
// first delete the paper files
import FileUtils._
// delete the paper directory if it exists
val paperDir = configuration.paperDir(paperId)
val continue =
if(paperDir.exists)
paperDir.deleteRecursive()
else
true
if(continue) {
import OsgiUtils._
val manager = entityManager("blue_papers")
manager.deleteEntity(paperId) map {
case true =>
// notifiy deletion hooks
for(hook <- context.getAll[PaperDeleted])
Try(hook.afterDelete(paperId, entityManager("blue_papers")))
talk.writeJson(true)
case false =>
talk
.setStatus(HStatus.InternalServerError)
.writeJson(ErrorResponse("cannot_delete_paper", "Unable to delete the paper database"))
}
} else {
Success(talk
.setStatus(HStatus.InternalServerError)
.writeJson(ErrorResponse("cannot_delete_paper", "Unable to delete the paper files")))
}
case _ =>
Success(
talk
.setStatus(HStatus.Forbidden)
.writeJson(ErrorResponse("no_sufficient_rights", "You have no permission to delete this paper")))
}
}
| tdurieux/bluelatex | blue-core/src/main/scala/gnieh/blue/core/impl/paper/DeletePaper.scala | Scala | apache-2.0 | 2,703 |
package me.ilinskiy.ChessAI
import me.ilinskiy.ChessAI.AIUtil.Time
import me.ilinskiy.chess.chessboard._
import me.ilinskiy.chess.game.moves.Move
import me.ilinskiy.chess.game.{GameRunner, GameUtil, Player}
/**
* Author: Svyatoslav Ilinskiy
* Date: 8/8/15.
*/
class AI(myColor: PieceColor) extends Player {
override def getPlayerColor: PieceColor = myColor
override def getMove(board: Board): Move = {
assert(!GameUtil.getAvailableMoves(myColor, board).isEmpty)
AI.movesMade += 1
val timeout: Long = GameRunner.TIMEOUT_IN_SECONDS.secondsToMillis
val moveNeededBy =
if (timeout > 0) Some(System.currentTimeMillis() + timeout) //yay for magic numbers!
else None
val boardWrapper: BoardWrapper = BoardWrapper.getCopy(board)
MoveMaker.getMove(boardWrapper, myColor, moveNeededBy)
}
override def getPieceTypeForPromotedPawn: PieceType = PieceType.Queen
}
object AI {
var movesMade: Int = 0
val averageGameLength = 40
}
| ilinum/ChessAI | src/main/scala/me/ilinskiy/ChessAI/AI.scala | Scala | mit | 970 |
// src/main/scala/progscala2/traits/ui2/VetoableClicks.scala
package progscala2.traits.ui2
import progscala2.traits.observer._
trait VetoableClicks extends Clickable { // <1>
// Default number of allowed clicks.
val maxAllowed = 1 // <2>
private var count = 0
abstract override def click() = {
if (count < maxAllowed) { // <3>
count += 1
super.click()
}
}
}
| sunilrebel/programming-scala | examples/src/main/scala/progscala2/traits/ui2/VetoableClicks.scala | Scala | mpl-2.0 | 505 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.shuffle
import java.nio.ByteBuffer
import org.apache.spark.network.buffer.ManagedBuffer
import org.apache.spark.storage.ShuffleBlockId
private[spark]
trait ShuffleBlockManager {
type ShuffleId = Int
/**
* Get shuffle block data managed by the local ShuffleBlockManager.
* @return Some(ByteBuffer) if block found, otherwise None.
*/
def getBytes(blockId: ShuffleBlockId): Option[ByteBuffer]
def getBlockData(blockId: ShuffleBlockId): ManagedBuffer
def stop(): Unit
}
| Dax1n/spark-core | core/src/main/scala/org/apache/spark/shuffle/ShuffleBlockManager.scala | Scala | apache-2.0 | 1,317 |
package core
import org.scalatest.{FunSpec, Matchers}
class ServiceHeadersSpec extends FunSpec with Matchers
with helpers.ServiceHelpers
{
it("headers can reference imported enums") {
val enumsService = makeService(
enums = Seq(
makeEnum(
name = "content_type",
values = Seq(makeEnumValue("application/json"))
)
)
)
val service = s"""
{
"name": "API Builder",
"apidoc": { "version": "0.9.6" },
"imports": [
{ "uri": "${makeImportUri(enumsService)}" }
],
"headers": [
{ "name": "Content-Type", "type": "${enumsService.namespace}.enums.content_type" }
]
}
"""
val fetcher = MockServiceFetcher()
fetcher.add(makeImportUri(enumsService), enumsService)
val validator = TestHelper.serviceValidatorFromApiJson(service, fetcher = fetcher)
validator.errors().mkString("") should be("")
val ct = validator.service().headers.find(_.name == "Content-Type").get
ct.`type` should be(s"${enumsService.namespace}.enums.content_type")
}
}
| apicollective/apibuilder | core/src/test/scala/core/ServiceHeadersSpec.scala | Scala | mit | 1,084 |
package com.github.K0zka.scala
import com.github.K0zka.HelloService
class ScalaHello extends HelloService {
def greet(name : String) : String = {
return s"Hello $name!"
}
}
| DummyWarhead/hello-babel | stringperf-scala/src/main/scala/com/github/K0zka/scala/ScalaHello.scala | Scala | apache-2.0 | 189 |
/* *\\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2013-2015, Gary Keorkunian **
** **
\\* */
package squants.energy
import squants._
import squants.time.{ TimeIntegral, TimeDerivative, Hours }
import squants.radio._
import squants.space.{ SquaredRadians, SquareMeters, SolidAngle }
import squants.electro.{ Volts, ElectricCurrent, Amperes, ElectricPotential }
import squants.radio.Irradiance
import squants.radio.SpectralPower
import squants.radio.RadiantIntensity
/**
* Represents a quantity of power / load, the rate at which energy produced or used
*
* The first time derivative of [[squants.energy.Energy]]
*
* @author garyKeorkunian
* @since 0.1
*
* @param value value in [[squants.energy.Watts]]
*/
final class Power private (val value: Double, val unit: PowerUnit)
extends Quantity[Power]
with TimeDerivative[Energy]
with TimeIntegral[PowerRamp] {
def dimension = Power
protected[squants] def timeIntegrated = WattHours(toWatts)
protected def timeDerived = WattsPerHour(toWatts)
protected[squants] def time = Hours(1)
def /(that: Length): SpectralPower = WattsPerMeter(toWatts / that.toMeters)
def /(that: SpectralPower): Length = Meters(toWatts / that.toWattsPerMeter)
def /(that: Area): Irradiance = WattsPerSquareMeter(toWatts / that.toSquareMeters)
def /(that: Irradiance): Area = SquareMeters(this.toWatts / that.toWattsPerSquareMeter)
def /(that: RadiantIntensity): SolidAngle = SquaredRadians(toWatts / that.toWattsPerSteradian)
def /(that: SolidAngle): RadiantIntensity = WattsPerSteradian(toWatts / that.toSteradians)
def /(that: ElectricPotential): ElectricCurrent = Amperes(toWatts / that.toVolts)
def /(that: ElectricCurrent): ElectricPotential = Volts(toWatts / that.toAmperes)
def /(that: Volume) = ??? // Power Density
def toMilliwatts = to(Milliwatts)
def toWatts = to(Watts)
def toKilowatts = to(Kilowatts)
def toMegawatts = to(Megawatts)
def toGigawatts = to(Gigawatts)
def toBtusPerHour = to(BtusPerHour)
}
/**
* Companion object for [[squants.energy.Power]]
*/
object Power extends Dimension[Power] {
private[energy] def apply[A](n: A, unit: PowerUnit)(implicit num: Numeric[A]) = new Power(num.toDouble(n), unit)
def apply(energy: Energy, time: Time): Power = apply(energy.toWattHours / time.toHours, Watts)
def apply = parse _
def name = "Power"
def primaryUnit = Watts
def siUnit = Watts
def units = Set(Watts, Milliwatts, Kilowatts, Megawatts, Gigawatts, BtusPerHour)
}
trait PowerUnit extends UnitOfMeasure[Power] with UnitConverter {
def apply[A](n: A)(implicit num: Numeric[A]) = Power(n, this)
}
object Milliwatts extends PowerUnit {
val conversionFactor = MetricSystem.Milli
val symbol = "mW"
}
object Watts extends PowerUnit with PrimaryUnit with SiUnit {
val symbol = "W"
}
object Kilowatts extends PowerUnit {
val conversionFactor = MetricSystem.Kilo
val symbol = "kW"
}
object Megawatts extends PowerUnit {
val conversionFactor = MetricSystem.Mega
val symbol = "MW"
}
object Gigawatts extends PowerUnit {
val conversionFactor = MetricSystem.Giga
val symbol = "GW"
}
object BtusPerHour extends PowerUnit {
val conversionFactor = EnergyConversions.btuMultiplier
val symbol = "Btu/hr"
}
object PowerConversions {
lazy val milliwatt = Milliwatts(1)
lazy val mW = milliwatt
lazy val watt = Watts(1)
lazy val W = watt
lazy val kilowatt = Kilowatts(1)
lazy val kW = kilowatt
lazy val megawatt = Megawatts(1)
lazy val MW = megawatt
lazy val gigawatt = Gigawatts(1)
lazy val GW = gigawatt
implicit class PowerConversions[A](n: A)(implicit num: Numeric[A]) {
def mW = Milliwatts(n)
def W = Watts(n)
def kW = Kilowatts(n)
def MW = Megawatts(n)
def GW = Gigawatts(n)
def milliwatts = Milliwatts(n)
def watts = Watts(n)
def kilowatts = Kilowatts(n)
def megawatts = Megawatts(n)
def gigawatts = Gigawatts(n)
def BTUph = BtusPerHour(n)
}
implicit class PowerStringConversions(s: String) {
def toPower = Power(s)
}
implicit object PowerNumeric extends AbstractQuantityNumeric[Power](Power.primaryUnit)
}
| rmihael/squants | shared/src/main/scala/squants/energy/Power.scala | Scala | apache-2.0 | 4,561 |
package org.broadinstitute.clio.util.json
import java.net.URI
import java.time.OffsetDateTime
import enumeratum._
import io.circe.parser._
import io.circe.syntax._
import org.broadinstitute.clio.util.model.UpsertId
import org.scalatest.prop.TableDrivenPropertyChecks
import org.scalatest.{EitherValues, FlatSpec, Matchers}
import scala.collection.immutable.IndexedSeq
class ModelAutoDerivationSpec
extends FlatSpec
with Matchers
with TableDrivenPropertyChecks
with EitherValues
with ModelAutoDerivation {
behavior of "ModelAutoDerivation"
it should "encode with snake case" in {
case class TestClass(fieldA: Option[String], fieldB: Int)
val jsonValues =
Table(
("input", "expected"),
(
TestClass(Option("hello"), 123),
"""{"field_a":"hello","field_b":123}"""
),
(TestClass(None, 456), """{"field_b":456}""")
)
forAll(jsonValues) { (input, expected) =>
input.asJson.printWith(defaultPrinter) should be(expected)
}
}
it should "decode snake case" in {
case class TestClass(fieldA: Option[String], fieldB: Int)
val jsonValues =
Table(
("input", "expected"),
(
"""{"field_a": "hello", "field_b": 123}""",
TestClass(Option("hello"), 123)
),
("""{"field_b": 456}""", TestClass(None, 456))
)
forAll(jsonValues) { (input, expected) =>
decode[TestClass](input).right.value should be(expected)
}
}
it should "fail to decode camel case" in {
case class TestClass(fieldA: Option[String], fieldB: Int)
val jsonValues =
Table(
("input", "expected"),
(
"""{"fieldA": "hello", "field_b": 123}""",
"""DecodingFailure at .field_b{!_/}: Leftover keys: fieldA"""
),
(
"""{"fieldB": 456}""",
"""DecodingFailure at .field_b: Attempt to decode value on failed cursor"""
)
)
import cats.syntax.show._
forAll(jsonValues) { (input, expected) =>
decode[TestClass](input).left.value.show should be(expected)
}
}
it should "encode a date" in {
case class TestClass(date: OffsetDateTime)
val test = TestClass(OffsetDateTime.parse("1970-01-01T12:34:56.789+05:00"))
test.asJson.printWith(defaultPrinter) should be(
"""{"date":"1970-01-01T12:34:56.789+05:00"}"""
)
}
it should "decode a date" in {
case class TestClass(date: OffsetDateTime)
val json = """{"date":"1970-01-01T12:34:56.789+05:00"}"""
decode[TestClass](json).right.value should be(
TestClass(OffsetDateTime.parse("1970-01-01T12:34:56.789+05:00"))
)
}
it should "show the string which caused the error when failing to decode a date" in {
case class TestClass(date: OffsetDateTime)
val malformed = "not-a-date"
val json = s"""{"date":"$malformed"}"""
import cats.syntax.show._
decode[TestClass](json).left.value.show should include(malformed)
}
it should "encode an enum" in {
import ModelAutoDerivationSpec._
case class TestClass(enum: TestEnum)
val test = TestClass(TestEnum.TestValue1)
test.asJson.printWith(defaultPrinter) should be("""{"enum":"TestValue1"}""")
}
it should "decode an enum" in {
import ModelAutoDerivationSpec._
case class TestClass(enum: TestEnum)
val json = """{"enum":"TestValue2"}"""
decode[TestClass](json).right.value should be(
TestClass(TestEnum.TestValue2)
)
}
it should "show the string which caused the error when failing to decode an enum" in {
import ModelAutoDerivationSpec._
case class TestClass(enum: TestEnum)
import cats.syntax.show._
val malformed = "TestValue3"
val json = s"""{"enum": "$malformed"}"""
decode[TestClass](json).left.value.show should include(malformed)
}
it should "encode a URI" in {
case class TestClass(uri: URI)
val uriValues = Table(
"uri",
"/seq/picard/some/file/path.stuff",
"gs://broad-gotc-dev-storage/some/file/path.stuff"
)
forAll(uriValues) { uri =>
TestClass(URI.create(uri)).asJson.printWith(defaultPrinter) should be(
s"""{"uri":"$uri"}"""
)
}
}
it should "decode a URI" in {
case class TestClass(uri: URI)
val uriValues = Table(
"uri",
"/seq/picard/some/file/path.stuff",
"gs://some-bucket/some/file/path.stuff"
)
forAll(uriValues) { uri =>
decode[TestClass](s"""{"uri":"$uri"}""").right.value should be(
TestClass(URI.create(uri))
)
}
}
it should "show the string which caused the error when failing to decode a URI" in {
case class TestClass(uri: URI)
import cats.syntax.show._
val malformed = "*&^)"
val json = s"""{"uri":"$malformed"}"""
decode[TestClass](json).left.value.show should include(malformed)
}
it should "encode an UpsertID" in {
case class TestClass(id: UpsertId)
val id = UpsertId.nextId()
TestClass(id).asJson.printWith(defaultPrinter) should be(s"""{"id":"${id.id}"}""")
}
it should "decode an UpsertID" in {
case class TestClass(id: UpsertId)
val id = UpsertId.nextId()
decode[TestClass](s"""{"id":"${id.id}"}""").right.value should be(TestClass(id))
}
it should "show the string which caused the error when failing to decode an UpsertId" in {
case class TestClass(id: UpsertId)
import cats.syntax.show._
val malformed = "123badId"
val json = s"""{"id":"$malformed"}"""
decode[TestClass](json).left.value.show should include(malformed)
}
}
object ModelAutoDerivationSpec {
sealed trait TestEnum extends EnumEntry
object TestEnum extends Enum[TestEnum] {
override def values: IndexedSeq[TestEnum] = findValues
case object TestValue1 extends TestEnum
case object TestValue2 extends TestEnum
}
}
| broadinstitute/clio | clio-util/src/test/scala/org/broadinstitute/clio/util/json/ModelAutoDerivationSpec.scala | Scala | bsd-3-clause | 5,872 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.cluster.sdv.generated
import org.apache.spark.sql.common.util._
import org.apache.spark.sql.Row
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterAll
/**
* Test Class for for create table as select command
*/
class CreateTableAsSelectTestCase extends QueryTest with BeforeAndAfterAll {
//Check create table as select with select from same table name when table exists
test("CreateTableAsSelect_001_01", Include) {
sql("drop table if exists ctas_same_table_name").collect
sql("CREATE TABLE ctas_same_table_name(key INT, value STRING) STORED by 'carbondata'").collect
intercept[Exception] {
sql("create table ctas_same_table_name stored by 'carbondata' as select * from ctas_same_table_name")
}
}
//Check create table as select with select from same table name when table does not exists
test("CreateTableAsSelect_001_02", Include) {
sql("drop table if exists ctas_same_table_name").collect
intercept[Exception] {
sql("create table ctas_same_table_name stored by 'carbondata' as select * from ctas_same_table_name")
}
}
//Check create table as select with select from same table name with if not exists clause
test("CreateTableAsSelect_001_03", Include) {
sql("drop table if exists ctas_same_table_name").collect
sql("CREATE TABLE ctas_same_table_name(key INT, value STRING) STORED by 'carbondata'").collect
sql("create table if not exists ctas_same_table_name stored by 'carbondata' as select * from ctas_same_table_name").collect
assert(true)
}
//Check create table as select with select from another carbon table
test("CreateTableAsSelect_001_04", Include) {
sql("DROP TABLE IF EXISTS ctas_select_carbon").collect
sql("create table ctas_select_carbon stored by 'carbondata' as select * from carbon_ctas_test").collect
checkAnswer(sql("select * from ctas_select_carbon"), sql("select * from carbon_ctas_test"))
}
//Check create table as select with select from another parquet table
test("CreateTableAsSelect_001_05", Include) {
sql("DROP TABLE IF EXISTS ctas_select_parquet").collect
sql("create table ctas_select_parquet stored by 'carbondata' as select * from parquet_ctas_test").collect
checkAnswer(sql("select * from ctas_select_parquet"), sql("select * from parquet_ctas_test"))
}
//Check test create table as select with select from another hive/orc table
test("CreateTableAsSelect_001_06", Include) {
sql("DROP TABLE IF EXISTS ctas_select_orc").collect
sql("create table ctas_select_orc stored by 'carbondata' as select * from orc_ctas_test").collect
checkAnswer(sql("select * from ctas_select_orc"), sql("select * from orc_ctas_test"))
}
//Check create table as select with where clause in select from carbon table that returns data
test("CreateTableAsSelect_001_07", Include) {
sql("DROP TABLE IF EXISTS ctas_select_where_carbon").collect
sql("create table ctas_select_where_carbon stored by 'carbondata' as select * from carbon_ctas_test where key=100").collect
checkAnswer(sql("select * from ctas_select_where_carbon"), sql("select * from carbon_ctas_test where key=100"))
}
//Check create table as select with where clause in select from carbon table that does not return data
test("CreateTableAsSelect_001_08", Include) {
sql("DROP TABLE IF EXISTS ctas_select_where_carbon").collect
sql("create table ctas_select_where_carbon stored by 'carbondata' as select * from carbon_ctas_test where key=300").collect
checkAnswer(sql("select * from ctas_select_where_carbon"), sql("select * from carbon_ctas_test where key=300"))
}
//Check create table as select with where clause in select from carbon table and load again
test("CreateTableAsSelect_001_09", Include) {
sql("DROP TABLE IF EXISTS ctas_select_where_carbon").collect
sql("create table ctas_select_where_carbon stored by 'carbondata' as select * from carbon_ctas_test where key=100").collect
sql("insert into ctas_select_where_carbon select 200,'hive'").collect
checkAnswer(sql("select * from ctas_select_where_carbon"), sql("select * from carbon_ctas_test"))
}
//Check create table as select with where clause in select from parquet table
test("CreateTableAsSelect_001_10", Include) {
sql("DROP TABLE IF EXISTS ctas_select_where_parquet").collect
sql("create table ctas_select_where_parquet stored by 'carbondata' as select * from parquet_ctas_test where key=100").collect
checkAnswer(sql("select * from ctas_select_where_parquet"), sql("select * from parquet_ctas_test where key=100"))
}
//Check create table as select with where clause in select from hive/orc table
test("CreateTableAsSelect_001_11", Include) {
sql("DROP TABLE IF EXISTS ctas_select_where_orc").collect
sql("create table ctas_select_where_orc stored by 'carbondata' as select * from orc_ctas_test where key=100").collect
checkAnswer(sql("select * from ctas_select_where_orc"), sql("select * from orc_ctas_test where key=100"))
}
//Check create table as select with select directly having the data
test("CreateTableAsSelect_001_12", Include) {
sql("DROP TABLE IF EXISTS ctas_select_direct_data").collect
sql("create table ctas_select_direct_data stored by 'carbondata' as select 300,'carbondata'").collect
checkAnswer(sql("select * from ctas_select_direct_data"), Seq(Row(300,"carbondata")))
}
//Check create table as select with select from another carbon table with more data
test("CreateTableAsSelect_001_13", Include) {
sql("DROP TABLE IF EXISTS ctas_select_hugedata1").collect
sql("DROP TABLE IF EXISTS ctas_select_hugedata2").collect
sql(s"""CREATE TABLE ctas_select_hugedata1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table ctas_select_hugedata1 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql("create table ctas_select_hugedata2 stored by 'carbondata' as select * from ctas_select_hugedata1").collect
checkAnswer(sql("select * from ctas_select_hugedata1"), sql("select * from ctas_select_hugedata2"))
sql("DROP TABLE IF EXISTS ctas_select_hugedata1").collect
sql("DROP TABLE IF EXISTS ctas_select_hugedata2").collect
}
//Check create table as select with where clause in select from parquet table that does not return data
test("CreateTableAsSelect_001_14", Include) {
sql("DROP TABLE IF EXISTS ctas_select_where_parquet").collect
sql(
"""
| CREATE TABLE ctas_select_where_parquet
| STORED BY 'carbondata'
| AS SELECT * FROM parquet_ctas_test
| WHERE key=300
""".stripMargin).collect
checkAnswer(sql("SELECT * FROM ctas_select_where_parquet"),
sql("SELECT * FROM parquet_ctas_test where key=300"))
}
//Check create table as select with where clause in select from hive/orc table that does not return data
test("CreateTableAsSelect_001_15", Include) {
sql("DROP TABLE IF EXISTS ctas_select_where_orc").collect
sql(
"""
| CREATE TABLE ctas_select_where_orc
| STORED BY 'carbondata'
| AS SELECT * FROM orc_ctas_test
| WHERE key=100
""".stripMargin).collect
checkAnswer(sql("SELECT * FROM ctas_select_where_orc"), sql("SELECT * FROM orc_ctas_test WHERE key=100"))
}
override protected def beforeAll() {
// Dropping existing tables
sql("DROP TABLE IF EXISTS carbon_ctas_test")
sql("DROP TABLE IF EXISTS parquet_ctas_test")
sql("DROP TABLE IF EXISTS orc_ctas_test")
// create carbon table and insert data
sql("CREATE TABLE carbon_ctas_test(key INT, value STRING) STORED by 'carbondata'")
sql("insert into carbon_ctas_test select 100,'spark'")
sql("insert into carbon_ctas_test select 200,'hive'")
// create parquet table and insert data
sql("CREATE TABLE parquet_ctas_test(key INT, value STRING) STORED as parquet")
sql("insert into parquet_ctas_test select 100,'spark'")
sql("insert into parquet_ctas_test select 200,'hive'")
// create hive table and insert data
sql("CREATE TABLE orc_ctas_test(key INT, value STRING) STORED as ORC")
sql("insert into orc_ctas_test select 100,'spark'")
sql("insert into orc_ctas_test select 200,'hive'")
}
override protected def afterAll(): Unit = {
sql("DROP TABLE IF EXISTS carbon_ctas_test")
sql("DROP TABLE IF EXISTS parquet_ctas_test")
sql("DROP TABLE IF EXISTS orc_ctas_test")
sql("DROP TABLE IF EXISTS ctas_same_table_name")
sql("DROP TABLE IF EXISTS ctas_select_carbon")
sql("DROP TABLE IF EXISTS ctas_select_direct_data")
sql("DROP TABLE IF EXISTS ctas_select_parquet")
sql("DROP TABLE IF EXISTS ctas_select_orc")
sql("DROP TABLE IF EXISTS ctas_select_where_carbon")
sql("DROP TABLE IF EXISTS ctas_select_where_parquet")
sql("DROP TABLE IF EXISTS ctas_select_where_orc")
sql("DROP TABLE IF EXISTS ctas_select_direct_data")
sql("DROP TABLE IF EXISTS ctas_select_hugedata1")
sql("DROP TABLE IF EXISTS ctas_select_hugedata2")
}
} | sgururajshetty/carbondata | integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/CreateTableAsSelectTestCase.scala | Scala | apache-2.0 | 10,458 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE
* file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file
* to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package kafka.api
import java.nio.file.Files
import java.time.Duration
import java.util.Collections
import java.util.concurrent.{ExecutionException, TimeUnit}
import scala.collection.JavaConverters._
import org.apache.kafka.clients.admin.{AdminClient, AdminClientConfig}
import org.apache.kafka.clients.consumer.{ConsumerConfig, KafkaConsumer}
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.{KafkaException, TopicPartition}
import org.apache.kafka.common.errors.SaslAuthenticationException
import org.junit.{After, Before, Test}
import org.junit.Assert._
import kafka.admin.ConsumerGroupCommand.{ConsumerGroupCommandOptions, ConsumerGroupService}
import kafka.server.KafkaConfig
import kafka.utils.{JaasTestUtils, TestUtils}
import kafka.zk.ConfigEntityChangeNotificationZNode
import org.apache.kafka.common.security.auth.SecurityProtocol
class SaslClientsWithInvalidCredentialsTest extends IntegrationTestHarness with SaslSetup {
private val kafkaClientSaslMechanism = "SCRAM-SHA-256"
private val kafkaServerSaslMechanisms = List(kafkaClientSaslMechanism)
override protected val securityProtocol = SecurityProtocol.SASL_PLAINTEXT
override protected val serverSaslProperties = Some(kafkaServerSaslProperties(kafkaServerSaslMechanisms, kafkaClientSaslMechanism))
override protected val clientSaslProperties = Some(kafkaClientSaslProperties(kafkaClientSaslMechanism))
val consumerCount = 1
val producerCount = 1
val brokerCount = 1
this.serverConfig.setProperty(KafkaConfig.OffsetsTopicReplicationFactorProp, "1")
this.serverConfig.setProperty(KafkaConfig.TransactionsTopicReplicationFactorProp, "1")
this.serverConfig.setProperty(KafkaConfig.TransactionsTopicMinISRProp, "1")
this.consumerConfig.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")
val topic = "topic"
val numPartitions = 1
val tp = new TopicPartition(topic, 0)
override def configureSecurityBeforeServersStart() {
super.configureSecurityBeforeServersStart()
zkClient.makeSurePersistentPathExists(ConfigEntityChangeNotificationZNode.path)
// Create broker credentials before starting brokers
createScramCredentials(zkConnect, JaasTestUtils.KafkaScramAdmin, JaasTestUtils.KafkaScramAdminPassword)
}
@Before
override def setUp(): Unit = {
startSasl(jaasSections(kafkaServerSaslMechanisms, Some(kafkaClientSaslMechanism), Both,
JaasTestUtils.KafkaServerContextName))
super.setUp()
createTopic(topic, numPartitions, brokerCount)
}
@After
override def tearDown(): Unit = {
super.tearDown()
closeSasl()
}
@Test
def testProducerWithAuthenticationFailure() {
val producer = createProducer()
verifyAuthenticationException(sendOneRecord(producer, maxWaitMs = 10000))
verifyAuthenticationException(producer.partitionsFor(topic))
createClientCredential()
verifyWithRetry(sendOneRecord(producer))
}
@Test
def testTransactionalProducerWithAuthenticationFailure() {
val txProducer = createTransactionalProducer()
verifyAuthenticationException(txProducer.initTransactions())
createClientCredential()
try {
txProducer.initTransactions()
fail("Transaction initialization should fail after authentication failure")
} catch {
case _: KafkaException => // expected exception
}
}
@Test
def testConsumerWithAuthenticationFailure() {
val consumer = createConsumer()
consumer.subscribe(List(topic).asJava)
verifyConsumerWithAuthenticationFailure(consumer)
}
@Test
def testManualAssignmentConsumerWithAuthenticationFailure() {
val consumer = createConsumer()
consumer.assign(List(tp).asJava)
verifyConsumerWithAuthenticationFailure(consumer)
}
@Test
def testManualAssignmentConsumerWithAutoCommitDisabledWithAuthenticationFailure() {
this.consumerConfig.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false.toString)
val consumer = createConsumer()
consumer.assign(List(tp).asJava)
consumer.seek(tp, 0)
verifyConsumerWithAuthenticationFailure(consumer)
}
private def verifyConsumerWithAuthenticationFailure(consumer: KafkaConsumer[Array[Byte], Array[Byte]]) {
verifyAuthenticationException(consumer.poll(Duration.ofMillis(1000)))
verifyAuthenticationException(consumer.partitionsFor(topic))
createClientCredential()
val producer = createProducer()
verifyWithRetry(sendOneRecord(producer))
verifyWithRetry(assertEquals(1, consumer.poll(Duration.ofMillis(1000)).count))
}
@Test
def testKafkaAdminClientWithAuthenticationFailure() {
val props = TestUtils.adminClientSecurityConfigs(securityProtocol, trustStoreFile, clientSaslProperties)
props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList)
val adminClient = AdminClient.create(props)
def describeTopic(): Unit = {
try {
val response = adminClient.describeTopics(Collections.singleton(topic)).all.get
assertEquals(1, response.size)
response.asScala.foreach { case (topic, description) =>
assertEquals(numPartitions, description.partitions.size)
}
} catch {
case e: ExecutionException => throw e.getCause
}
}
try {
verifyAuthenticationException(describeTopic())
createClientCredential()
verifyWithRetry(describeTopic())
} finally {
adminClient.close()
}
}
@Test
def testConsumerGroupServiceWithAuthenticationFailure() {
val consumerGroupService: ConsumerGroupService = prepareConsumerGroupService
val consumer = createConsumer()
consumer.subscribe(List(topic).asJava)
verifyAuthenticationException(consumerGroupService.listGroups)
consumerGroupService.close()
}
@Test
def testConsumerGroupServiceWithAuthenticationSuccess() {
createClientCredential()
val consumerGroupService: ConsumerGroupService = prepareConsumerGroupService
val consumer = createConsumer()
consumer.subscribe(List(topic).asJava)
verifyWithRetry(consumer.poll(Duration.ofMillis(1000)))
assertEquals(1, consumerGroupService.listGroups.size)
consumerGroupService.close()
}
private def prepareConsumerGroupService = {
val propsFile = TestUtils.tempFile()
val propsStream = Files.newOutputStream(propsFile.toPath)
propsStream.write("security.protocol=SASL_PLAINTEXT\\n".getBytes())
propsStream.write(s"sasl.mechanism=$kafkaClientSaslMechanism".getBytes())
propsStream.close()
val cgcArgs = Array("--bootstrap-server", brokerList,
"--describe",
"--group", "test.group",
"--command-config", propsFile.getAbsolutePath)
val opts = new ConsumerGroupCommandOptions(cgcArgs)
val consumerGroupService = new ConsumerGroupService(opts)
consumerGroupService
}
private def createClientCredential(): Unit = {
createScramCredentials(zkConnect, JaasTestUtils.KafkaScramUser2, JaasTestUtils.KafkaScramPassword2)
}
private def sendOneRecord(producer: KafkaProducer[Array[Byte], Array[Byte]], maxWaitMs: Long = 15000): Unit = {
val record = new ProducerRecord(tp.topic(), tp.partition(), 0L, "key".getBytes, "value".getBytes)
val future = producer.send(record)
producer.flush()
try {
val recordMetadata = future.get(maxWaitMs, TimeUnit.MILLISECONDS)
assertTrue(s"Invalid offset $recordMetadata", recordMetadata.offset >= 0)
} catch {
case e: ExecutionException => throw e.getCause
}
}
private def verifyAuthenticationException(action: => Unit): Unit = {
val startMs = System.currentTimeMillis
try {
action
fail("Expected an authentication exception")
} catch {
case e : Exception =>
// expected exception
val elapsedMs = System.currentTimeMillis - startMs
assertTrue(s"Poll took too long, elapsed=$elapsedMs", elapsedMs <= 5000)
}
}
private def verifyWithRetry(action: => Unit): Unit = {
var attempts = 0
TestUtils.waitUntilTrue(() => {
try {
attempts += 1
action
true
} catch {
case _: SaslAuthenticationException => false
}
}, s"Operation did not succeed within timeout after $attempts")
}
private def createTransactionalProducer(): KafkaProducer[Array[Byte], Array[Byte]] = {
producerConfig.setProperty(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "txclient-1")
producerConfig.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true")
createProducer()
}
}
| KevinLiLu/kafka | core/src/test/scala/integration/kafka/api/SaslClientsWithInvalidCredentialsTest.scala | Scala | apache-2.0 | 9,369 |
import scala.io._
import scala.actors._
import Actor._
object PageLoader {
def getPageSize(url: String) = Source.fromURL(url)("ISO-8859-1").mkString.length
}
val urls = List("http://www.amazon.com/",
"http://www.google.com/",
"https://www.linkedin.com/",
"http://www.cnn.com/",
"https://www.twitter.com/")
def timeMethod(method: () => Unit) = {
val start = System.nanoTime
method()
val end = System.nanoTime
println("Method took " + (end - start)/1000000000.0 + " seconds.")
}
def getPageSizeSequentially() = {
for (url <- urls) {
println("Size for " + url + ": " + PageLoader.getPageSize(url))
}
}
def getPageSizeConcurrently() = {
val caller = self
for (url <- urls) {
actor { caller ! (url, PageLoader.getPageSize(url)) }
}
for (i <- 1 to urls.size) {
receive {
case (url, size) =>
println("Size for " + url + ": " + size)
}
}
}
println("Sequential run:")
timeMethod { getPageSizeSequentially }
println("Concurrent run:")
timeMethod { getPageSizeConcurrently }
| spolnik/7-languages-in-7-weeks | scala/sizer.scala | Scala | apache-2.0 | 1,091 |
package root
class BuyCoin(accountNumber:Long, buyRate:Double, buyDate:Long, seller:String) {
val newBitcoin = new Bitcoin(buyRate, buyDate, seller, false, Double.NaN, Double.NaN)
val account = new MyAccount(accountNumber)
account.addBitcoin(newBitcoin)
account.setUsedBudget(account.getUsedBudget + buyRate)
}
| anoopdixith/SmartBCT | src/main/scala/BuyCoin.scala | Scala | mit | 319 |
package com.regblanc.sgl.platformer
package core
import sgl._
import geometry._
import scene._
import util._
import sgl.util.metrics.InstrumentationProvider
import tiled._
trait MainScreenComponent extends ViewportComponent with TiledMapRendererComponent {
this: GraphicsProvider with InputProvider with SystemProvider with WindowProvider with AudioProvider
with GameStateComponent with InputHelpersComponent with InstrumentationProvider
with LoggingProvider with TmxJsonParserComponent =>
import Graphics.{Bitmap, BitmapRegion, Canvas, Color, Animation}
private implicit val LogTag = Logger.Tag("main-screen")
private val dt: Long = 5l
class MainScreen extends FixedTimestepGameScreen(dt) {
override def name: String = "platformer-screen"
private val playerPaint = Graphics.defaultPaint.withColor(Color.Blue)
val levelLoader: Loader[TiledMap] = System.loadText(ResourcesRoot / "levels" / "level.json").map(lvl => TmxJsonParser.parse(lvl.iterator))
val tiledMapRendererLoader: Loader[TiledMapRenderer] = levelLoader.flatMap(lvl => TiledMapRenderer.load(lvl))
addPreloading(levelLoader)
addPreloading(tiledMapRendererLoader)
val playerLoader: Loader[Bitmap] = Graphics.loadImage(MultiDPIResourcesRoot / "player.png")
addPreloading(playerLoader)
private var map: TiledMap = _
private var tiledMapRenderer: TiledMapRenderer = _
private var viewport = new Viewport(Window.width, Window.height)
private var playerRect = Rect(0, 0, 0, 0)
private var oldPlayerRect = Rect(0, 0, 0, 0)
private var playerAnimation: Animation[BitmapRegion] = _
private var goalEllipse = Ellipse(0, 0, 0, 0)
private var solidCollisionLayers: Vector[TileLayer] = _
override def onLoaded(): Unit = {
map = levelLoader.value.get.get
tiledMapRenderer = tiledMapRendererLoader.value.get.get
viewport.setCamera(0, 0, map.totalWidth, map.totalHeight)
viewport.scalingStrategy = Viewport.Fit
val objectLayer = map.objectLayer("GameObjects")
playerRect = objectLayer("player").asInstanceOf[TiledMapRect].rect
oldPlayerRect = playerRect.clone
playerAnimation = new Animation(200, BitmapRegion.split(playerLoader.value.get.get, 0, 0, 30, 60, 3, 1), Animation.Loop)
goalEllipse = objectLayer("goal").asInstanceOf[TiledMapEllipse].ellipse
solidCollisionLayers = map.tileLayers.filter(_.properties.find(_.name == "collision_type").flatMap(_.stringValue).exists(_ == "solid"))
}
private var totalTime: Long = 0
override def fixedUpdate(): Unit = {
totalTime += dt
InputHelpers.processEvents(e => e match {
case _ => ()
})
if(Inputs.Keyboard.left) {
playerRect.left = playerRect.left - 0.15f*dt
}
if(Inputs.Keyboard.right) {
playerRect.left = playerRect.left + 0.15f*dt
}
val collidingX = solidCollisionLayers.exists(tl => {
tl.intersectingTiles(playerRect).exists(_.nonEmpty)
})
if(collidingX) {
playerRect.left = oldPlayerRect.left
} else {
oldPlayerRect.left = playerRect.left
}
if(Inputs.Keyboard.up) {
playerRect.top = playerRect.top - 0.15f*dt
}
if(Inputs.Keyboard.down) {
playerRect.top = playerRect.top + 0.15f*dt
}
val collidingY = solidCollisionLayers.exists(tl => {
tl.intersectingTiles(playerRect).exists(_.nonEmpty)
})
if(collidingY) {
playerRect.top = oldPlayerRect.top
} else {
oldPlayerRect.top = playerRect.top
}
}
private val BackgroundColor = Color.rgb(0, 255, 50)
private val metricsPaint = Graphics.defaultPaint.withColor(Color.White).withFont(Graphics.Font.Monospace.withSize(20))
override def render(canvas: Canvas): Unit = {
canvas.drawColor(BackgroundColor)
viewport.withViewport(canvas){
tiledMapRenderer.render(canvas, totalTime)
canvas.drawBitmap(playerAnimation.currentFrame(totalTime), playerRect.left, playerRect.top)
canvas.drawOval(goalEllipse.x, goalEllipse.y, goalEllipse.width, goalEllipse.height, playerPaint)
}
canvas.translate(0, Window.height)
Metrics.renderMetrics(canvas, metricsPaint)
canvas.translate(0, -Window.height)
}
}
}
| regb/scala-game-library | examples/platformer/core/src/main/scala/MainScreen.scala | Scala | mit | 4,297 |
package model
import skinny.DBSettings
import skinny.test._
import org.scalatest.fixture.FlatSpec
import org.scalatest._
import scalikejdbc._
import scalikejdbc.scalatest._
import org.joda.time._
class AccessLogSpec extends FlatSpec with Matchers with DBSettings with AutoRollback {
}
| yoshitakes/skinny-task-example | src/test/scala/model/AccessLogSpec.scala | Scala | mit | 287 |
/*
* The MIT License
*
* Copyright (c) 2015-2016 Fulcrum Genomics LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.fulcrumgenomics.commons.util
import java.nio.file.{Path, Paths}
import java.text.{DecimalFormat, NumberFormat}
object StringUtil {
/** Wraps a given string with a prefix and optional suffix */
def enclose(prefix: String, str: String, suffix: String = ""): String = s"$prefix$str$suffix"
/**
* Return input string with newlines inserted to ensure that all lines
* have length <= maxLineLength. If a word is too long, it is simply broken
* at maxLineLength. Does not handle tabs intelligently (due to implementer laziness).
*/
def wordWrap(s: String, maxLength: Int): String = {
s.split(" ").foldLeft(Array(""))( (out, in) => {
if ((out.last + " " + in).trim.length > maxLength) out :+ in
else out.updated(out.length - 1, out.last + " " + in)
}).mkString("\\n").trim
}
/** Computes the levenshtein distance between two strings
*
* @param string1 the first string
* @param string2 the second string
* @param swap the swap penalty
* @param substitution the substitution penalty
* @param insertion the insertion penalty
* @param deletion the deletion penalty
* @return the levenshtein distance
*/
def levenshteinDistance(string1: String, string2: String, swap: Int, substitution: Int, insertion: Int, deletion: Int): Int = {
val str1 = string1.getBytes()
val str2 = string2.getBytes()
var row0: Array[Int] = Array.fill[Int](string2.length+1)(0)
var row1 = Array.fill[Int](string2.length+1)(0)
var row2: Array[Int] = Array.fill[Int](string2.length+1)(0)
row1 = row1.zipWithIndex.map{ case (v: Int, i: Int) => i * insertion}
str1.zipWithIndex.foreach { case (s: Byte, i: Int) =>
row2(0) = (i + 1) * deletion
str2.zipWithIndex.foreach { case (t: Byte, j: Int) =>
row2(j + 1) = row1(j)
if (str1(i) != str2(j)) {
row2(j + 1) += substitution
}
if (i > 0 && j > 0 && str1(i - 1) == str2(j) && str1(i) == str2(j - 1) && row2(j + 1) > row0(j - 1) + swap) {
row2(j + 1) = row0(j - 1) + swap
}
if (row2(j + 1) > row1(j + 1) + deletion) {
row2(j + 1) = row1(j + 1) + deletion
}
if (row2(j + 1) > row2(j) + insertion) {
row2(j + 1) = row2(j) + insertion
}
}
val dummy = row0
row0 = row1
row1 = row2
row2 = dummy
}
row1(str2.size)
}
/**
* Takes in a camel-case string and converts it to a GNU option style string by identifying capital letters and
* replacing them with a hyphen followed by the lower case letter.
*/
def camelToGnu(in: String) : String = {
val builder = new StringBuilder
builder.sizeHint(in.length + 4)
val chs = in.toCharArray
for (i <- 0 until chs.length) {
val ch = chs(i)
if (ch.isUpper) {
if (i > 0) builder.append("-")
builder.append(ch.toLower)
}
else {
builder.append(ch)
}
}
builder.toString()
}
/** Takes in a camel-case string and places a space before any uppercase letter. */
def addSpacesToCamelCase(str: String): String = {
val builder = new StringBuilder
builder.sizeHint(str.length + 5)
val chs = str.toCharArray
for (i <- 0 until chs.length) {
val ch = chs(i)
if (i > 0 && ch.isUpper) builder.append(" ")
builder.append(ch)
}
builder.toString()
}
/** A simple version of Unix's `column` utility. This assumes the table is NxM. */
def columnIt(rows: List[List[String]], delimiter: String = " "): String = {
try {
// get the number of columns
val numColumns = rows.head.size
// for each column, find the maximum length of a cell
val maxColumnLengths = 0.to(numColumns - 1).map { i => rows.map(_ (i).length).max }
// pad each row in the table
rows.map { row =>
0.to(numColumns - 1).map { i =>
val cell = row(i)
val formatString = "%" + maxColumnLengths(i) + "s"
String.format(formatString, cell)
}.mkString(delimiter)
}.mkString("\\n")
}
catch {
case _: java.lang.IndexOutOfBoundsException =>
throw new IllegalArgumentException("columnIt failed. Did you forget to input an NxM table?")
}
}
/** Splits line on delimiter, placing up to `arr.length` values into `arr` and returning the number of values.
*
* Multiple consecutive delimiters are treated as individual delimiters, delimiting empty fields. E.g. with
* `delimiter=','` the string `a,b,,,c` will return `["a", "b", "", "", "c"]`.
*
* If `concatenateRemaining` is true, remaining text is placed into the last value in the array instead of discarding
* extra values.
* */
def split(line: String, delimiter: Char = '\\t', arr: Array[String], concatenateRemaining: Boolean = false): Int = {
val cs = line.toCharArray
val len = cs.length
var i = 0
var count = 0
var start = 0
while (i <= len && count < arr.length) {
if (i == len || cs(i) == delimiter) {
arr(count) = new String(cs, start, i-start)
count += 1
start = i + 1
}
i += 1
}
if (concatenateRemaining && i < len) {
arr(arr.length-1) += delimiter
arr(arr.length-1) += new String(cs, start, len-start)
}
count
}
}
| fulcrumgenomics/commons | src/main/scala/com/fulcrumgenomics/commons/util/StringUtil.scala | Scala | mit | 6,517 |
/**
* Swaggy Jenkins
* Jenkins API clients generated from Swagger / Open API specification
*
* The version of the OpenAPI document: 1.1.2-pre.0
* Contact: blah@cliffano.com
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package org.openapitools.client.model
import org.openapitools.client.core.ApiModel
case class HudsonMasterComputermonitorData (
hudsonNodeMonitorsSwapSpaceMonitor: Option[SwapSpaceMonitorMemoryUsage2] = None,
hudsonNodeMonitorsTemporarySpaceMonitor: Option[DiskSpaceMonitorDescriptorDiskSpace] = None,
hudsonNodeMonitorsDiskSpaceMonitor: Option[DiskSpaceMonitorDescriptorDiskSpace] = None,
hudsonNodeMonitorsArchitectureMonitor: Option[String] = None,
hudsonNodeMonitorsResponseTimeMonitor: Option[ResponseTimeMonitorData] = None,
hudsonNodeMonitorsClockMonitor: Option[ClockDifference] = None,
`class`: Option[String] = None
) extends ApiModel
| cliffano/swaggy-jenkins | clients/scala-akka/generated/src/main/scala/org/openapitools/client/model/HudsonMasterComputermonitorData.scala | Scala | mit | 1,009 |
package scutil.log
import minitest._
trait TestLogging extends Logging {
override val logHandler:TestLogHandler = new TestLogHandler
}
class TestLogHandler extends DefaultLogHandler {
var strings = Vector.empty[String]
def reset():Unit = {
strings = Vector.empty
}
override def print(s:String):Unit = {
strings :+= s
}
}
//------------------------------------------------------------------------------
object LoggingTest extends SimpleTestSuite with TestLogging {
test("Logging should just work") {
logHandler.reset()
INFO("logging works")
assert(
logHandler.strings(0) matches """^INFO\t\[.*\]\tLoggingTest.scala:\d+\tlogging works$"""
)
}
}
| ritschwumm/scutil | modules/jdk/src/test/scala/scutil/log/LoggingTest.scala | Scala | bsd-2-clause | 675 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.utils
import org.apache.flink.annotation.Experimental
import org.apache.flink.configuration.ConfigOption
import org.apache.flink.configuration.ConfigOptions.key
import org.apache.flink.table.api.{TableConfig, TableException}
import org.apache.flink.table.planner.plan.logical.{LogicalWindow, SessionGroupWindow}
import org.apache.flink.table.planner.plan.utils.AggregateUtil.isRowtimeAttribute
import org.apache.flink.table.planner.utils.TableConfigUtils.getMillisecondFromConfigDuration
import org.apache.flink.table.runtime.operators.window.TimeWindow
import org.apache.flink.table.runtime.operators.window.triggers._
import java.lang.{Boolean, Long}
import java.time.Duration
class WindowEmitStrategy(
isEventTime: Boolean,
isSessionWindow: Boolean,
earlyFireDelay: Long,
earlyFireDelayEnabled: Boolean,
lateFireDelay: Long,
lateFireDelayEnabled: Boolean,
allowLateness: Long) {
checkValidation()
def getAllowLateness: Long = allowLateness
private def checkValidation(): Unit = {
if (isSessionWindow && (earlyFireDelayEnabled || lateFireDelayEnabled)) {
throw new TableException("Session window doesn't support EMIT strategy currently.")
}
if (isEventTime && lateFireDelayEnabled && allowLateness <= 0L) {
throw new TableException("The 'AFTER WATERMARK' emit strategy requires set " +
"'minIdleStateRetentionTime' in table config.")
}
if (earlyFireDelayEnabled && (earlyFireDelay == null || earlyFireDelay < 0)) {
throw new TableException("Early-fire delay should not be null or negative value when" +
"enable early-fire emit strategy.")
}
if (lateFireDelayEnabled && (lateFireDelay == null || lateFireDelay < 0)) {
throw new TableException("Late-fire delay should not be null or negative value when" +
"enable late-fire emit strategy.")
}
}
def produceUpdates: Boolean = {
if (isEventTime) {
earlyFireDelayEnabled || lateFireDelayEnabled
} else {
earlyFireDelayEnabled
}
}
def getTrigger: Trigger[TimeWindow] = {
val earlyTrigger = createTriggerFromInterval(earlyFireDelayEnabled, earlyFireDelay)
val lateTrigger = createTriggerFromInterval(lateFireDelayEnabled, lateFireDelay)
if (isEventTime) {
val trigger = EventTimeTriggers.afterEndOfWindow[TimeWindow]()
(earlyTrigger, lateTrigger) match {
case (Some(early), Some(late)) => trigger.withEarlyFirings(early).withLateFirings(late)
case (Some(early), None) => trigger.withEarlyFirings(early)
case (None, Some(late)) => trigger.withLateFirings(late)
case (None, None) => trigger
}
} else {
val trigger = ProcessingTimeTriggers.afterEndOfWindow[TimeWindow]()
// late trigger is ignored, as no late element in processing time
earlyTrigger match {
case Some(early) => trigger.withEarlyFirings(early)
case None => trigger
}
}
}
override def toString: String = {
val builder = new StringBuilder
val earlyString = intervalToString(earlyFireDelayEnabled, earlyFireDelay)
val lateString = intervalToString(lateFireDelayEnabled, lateFireDelay)
if (earlyString != null) {
builder.append("early ").append(earlyString)
}
if (lateString != null) {
if (earlyString != null) {
builder.append(", ")
}
builder.append("late ").append(lateString)
}
builder.toString
}
private def createTriggerFromInterval(
enableDelayEmit: Boolean,
interval: Long): Option[Trigger[TimeWindow]] = {
if (!enableDelayEmit) {
None
} else {
if (interval > 0) {
Some(ProcessingTimeTriggers.every(Duration.ofMillis(interval)))
} else {
Some(ElementTriggers.every())
}
}
}
private def intervalToString(enableDelayEmit: Boolean, interval: Long): String = {
if (!enableDelayEmit) {
null
} else {
if (interval > 0) {
s"delay $interval millisecond"
} else {
"no delay"
}
}
}
}
object WindowEmitStrategy {
def apply(tableConfig: TableConfig, window: LogicalWindow): WindowEmitStrategy = {
val isEventTime = isRowtimeAttribute(window.timeAttribute)
val isSessionWindow = window.isInstanceOf[SessionGroupWindow]
val allowLateness = if (isSessionWindow) {
// ignore allow lateness in session window because retraction is not supported
0L
} else if (tableConfig.getMinIdleStateRetentionTime < 0) {
// min idle state retention time is not set, use 0L as default which means not allow lateness
0L
} else {
// use min idle state retention time as allow lateness
tableConfig.getMinIdleStateRetentionTime
}
val enableEarlyFireDelay = tableConfig.getConfiguration.getBoolean(
TABLE_EXEC_EMIT_EARLY_FIRE_ENABLED)
val earlyFireDelay = getMillisecondFromConfigDuration(
tableConfig, TABLE_EXEC_EMIT_EARLY_FIRE_DELAY)
val enableLateFireDelay = tableConfig.getConfiguration.getBoolean(
TABLE_EXEC_EMIT_LATE_FIRE_ENABLED)
val lateFireDelay = getMillisecondFromConfigDuration(
tableConfig, TABLE_EXEC_EMIT_LATE_FIRE_DELAY)
new WindowEmitStrategy(
isEventTime,
isSessionWindow,
earlyFireDelay,
enableEarlyFireDelay,
lateFireDelay,
enableLateFireDelay,
allowLateness)
}
// It is a experimental config, will may be removed later.
@Experimental
val TABLE_EXEC_EMIT_EARLY_FIRE_ENABLED: ConfigOption[Boolean] =
key("table.exec.emit.early-fire.enabled")
.defaultValue(Boolean.valueOf(false))
.withDescription("Specifies whether to enable early-fire emit." +
"Early-fire is an emit strategy before watermark advanced to end of window.")
// It is a experimental config, will may be removed later.
@Experimental
val TABLE_EXEC_EMIT_EARLY_FIRE_DELAY: ConfigOption[String] =
key("table.exec.emit.early-fire.delay")
.noDefaultValue
.withDescription("The early firing delay in milli second, early fire is " +
"the emit strategy before watermark advanced to end of window. " +
"< 0 is illegal configuration. " +
"0 means no delay (fire on every element). " +
"> 0 means the fire interval. ")
// It is a experimental config, will may be removed later.
@Experimental
val TABLE_EXEC_EMIT_LATE_FIRE_ENABLED: ConfigOption[Boolean] =
key("table.exec.emit.late-fire.enabled")
.defaultValue(Boolean.valueOf(false))
.withDescription("Specifies whether to enable late-fire emit. " +
"Late-fire is an emit strategy after watermark advanced to end of window.")
// It is a experimental config, will may be removed later.
@Experimental
val TABLE_EXEC_EMIT_LATE_FIRE_DELAY: ConfigOption[String] =
key("table.exec.emit.late-fire.delay")
.noDefaultValue
.withDescription("The late firing delay in milli second, late fire is " +
"the emit strategy after watermark advanced to end of window. " +
"< 0 is illegal configuration. " +
"0 means no delay (fire on every element). " +
"> 0 means the fire interval.")
}
| hequn8128/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/utils/WindowEmitStrategy.scala | Scala | apache-2.0 | 8,019 |
package com.cerner.beadledom.jaxrs.provider
import com.cerner.beadledom.correlation.CorrelationContext
import org.scalatest._
import org.scalatest.mockito.MockitoSugar
import org.slf4j.MDC
class CorrelationIdFilterSpec extends FunSpec with BeforeAndAfter with Matchers
with MockitoSugar with CorrelationIdFilterBehaviors {
val defaultHeaderName = "Correlation-Id"
val defaultMdcName = "Correlation-Id"
after {
MDC.clear()
}
val correlationIdContext = CorrelationContext.create()
val defaultFilter = new CorrelationIdFilter()
val customFilter = new CorrelationIdFilter("customHeader", "customMdc")
describe("CorrelationIdFilter") {
describe("with default id names") {
it should behave like correlationIdFilter(defaultFilter, defaultHeaderName, defaultMdcName, correlationIdContext)
}
describe("with custom id names") {
it should behave like correlationIdFilter(customFilter, "customHeader", "customMdc", correlationIdContext)
}
}
}
| cerner/beadledom | jaxrs/src/test/scala/com/cerner/beadledom/jaxrs/provider/CorrelationIdFilterSpec.scala | Scala | apache-2.0 | 985 |
/* Copyright 2009-2021 EPFL, Lausanne */
object MyTuple4 {
sealed abstract class A
case class B(i: Int) extends A
case class C(a: A) extends A
def foo(): Int = {
val t = (1, (C(B(4)), 2), 3)
val (a1, (C(B(x)), a2), a3) = t
x
} ensuring( _ == 4)
}
| epfl-lara/stainless | frontends/benchmarks/verification/valid/MicroTests/MyTuple4.scala | Scala | apache-2.0 | 274 |
import org.scalatestplus.play._
import scala.collection.mutable
import services.Counter
/** Example of test for Guice-injected component. */
class CounterSpec extends PlaySpec with OneAppPerSuite {
"Counter component" should {
"resolve implementation and produce increasing values" in {
val counter: Counter = app.injector.instanceOf[Counter]
counter.nextCount() mustBe 0
counter.nextCount() mustBe 1
counter.nextCount() mustBe 2
}
}
}
| daniel-kun/omni | concepts/prototype/web/backend/test/CounterSpec.scala | Scala | mit | 476 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive
/** Support for interacting with different versions of the HiveMetastoreClient */
package object client {
private[hive] sealed abstract class HiveVersion(
val fullVersion: String,
val extraDeps: Seq[String] = Nil,
val exclusions: Seq[String] = Nil)
// scalastyle:off
private[hive] object hive {
case object v12 extends HiveVersion("0.12.0")
case object v13 extends HiveVersion("0.13.1")
// Hive 0.14 depends on calcite 0.9.2-incubating-SNAPSHOT which does not exist in
// maven central anymore, so override those with a version that exists.
//
// The other excluded dependencies are also nowhere to be found, so exclude them explicitly. If
// they're needed by the metastore client, users will have to dig them out of somewhere and use
// configuration to point Spark at the correct jars.
case object v14 extends HiveVersion("0.14.0",
extraDeps = Seq("org.apache.calcite:calcite-core:1.3.0-incubating",
"org.apache.calcite:calcite-avatica:1.3.0-incubating"),
exclusions = Seq("org.pentaho:pentaho-aggdesigner-algorithm"))
case object v1_0 extends HiveVersion("1.0.0",
exclusions = Seq("eigenbase:eigenbase-properties",
"org.pentaho:pentaho-aggdesigner-algorithm",
"net.hydromatic:linq4j",
"net.hydromatic:quidem"))
// The curator dependency was added to the exclusions here because it seems to confuse the ivy
// library. org.apache.curator:curator is a pom dependency but ivy tries to find the jar for it,
// and fails.
case object v1_1 extends HiveVersion("1.1.0",
exclusions = Seq("eigenbase:eigenbase-properties",
"org.apache.curator:*",
"org.pentaho:pentaho-aggdesigner-algorithm",
"net.hydromatic:linq4j",
"net.hydromatic:quidem"))
case object v1_2 extends HiveVersion("1.2.2",
exclusions = Seq("eigenbase:eigenbase-properties",
"org.apache.curator:*",
"org.pentaho:pentaho-aggdesigner-algorithm",
"net.hydromatic:linq4j",
"net.hydromatic:quidem"))
case object v2_0 extends HiveVersion("2.0.1",
exclusions = Seq("org.apache.curator:*",
"org.pentaho:pentaho-aggdesigner-algorithm"))
case object v2_1 extends HiveVersion("2.1.1",
exclusions = Seq("org.apache.curator:*",
"org.pentaho:pentaho-aggdesigner-algorithm"))
case object v2_2 extends HiveVersion("2.2.0",
exclusions = Seq("org.apache.curator:*",
"org.pentaho:pentaho-aggdesigner-algorithm"))
case object v2_3 extends HiveVersion("2.3.3",
exclusions = Seq("org.apache.curator:*",
"org.pentaho:pentaho-aggdesigner-algorithm"))
val allSupportedHiveVersions = Set(v12, v13, v14, v1_0, v1_1, v1_2, v2_0, v2_1, v2_2, v2_3)
}
// scalastyle:on
}
| bravo-zhang/spark | sql/hive/src/main/scala/org/apache/spark/sql/hive/client/package.scala | Scala | apache-2.0 | 3,638 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.spark
import java.util
/**
* This object is a clean way to store and sort all cells that will be bulk
* loaded into a single row
*/
class FamiliesQualifiersValues extends Serializable {
//Tree maps are used because we need the results to
// be sorted when we read them
val familyMap = new util.TreeMap[ByteArrayWrapper,
util.TreeMap[ByteArrayWrapper, Array[Byte]]]()
//normally in a row there are more columns then
//column families this wrapper is reused for column
//family look ups
val reusableWrapper = new ByteArrayWrapper(null)
/**
* Adds a new cell to an existing row
* @param family HBase column family
* @param qualifier HBase column qualifier
* @param value HBase cell value
*/
def += (family: Array[Byte], qualifier: Array[Byte], value: Array[Byte]): Unit = {
reusableWrapper.value = family
var qualifierValues = familyMap.get(reusableWrapper)
if (qualifierValues == null) {
qualifierValues = new util.TreeMap[ByteArrayWrapper, Array[Byte]]()
familyMap.put(new ByteArrayWrapper(family), qualifierValues)
}
qualifierValues.put(new ByteArrayWrapper(qualifier), value)
}
}
| tmalaska/SparkOnHBase | src/main/scala/org/apache/hadoop/hbase/spark/FamiliesQualifiersValues.scala | Scala | apache-2.0 | 2,003 |
package com.ing.baker.petrinet.api
trait MultiSetOps {
implicit class MultiSetFunctions[T](mset: MultiSet[T]) {
def multisetDifference(other: MultiSet[T]): MultiSet[T] =
other.foldLeft(mset) {
case (result, (p, count)) => result.get(p) match {
case None => result
case Some(n) if n <= count => result - p
case Some(n) => result + (p -> (n - count))
}
}
def multisetSum(other: MultiSet[T]): MultiSet[T] =
other.foldLeft(mset) {
case (m, (p, count)) => m.get(p) match {
case None => m + (p -> count)
case Some(n) => m + (p -> (n + count))
}
}
/**
* This checks that the given (other) multiset is a sub set of this one.
*
* @param other
* @return
*/
def isSubSet(other: MultiSet[T]): Boolean =
!other.exists {
case (element, count) => mset.get(element) match {
case None => true
case Some(n) if n < count => true
case _ => false
}
}
def multisetSize: Int = mset.values.sum
def setMultiplicity(map: Map[T, Int])(element: T, m: Int) = map + (element -> m)
def allElements: Iterable[T] = mset.foldLeft(List.empty[T]) {
case (list, (e, count)) => List.fill[T](count)(e) ::: list
}
def multisetDecrement(element: T, count: Int): MultiSet[T] =
mset.get(element) match {
case None => mset
case Some(n) if n <= count => mset - element
case Some(n) => mset + (element -> (n - count))
}
def multisetIncrement(element: T, count: Int): MultiSet[T] = mset + (element -> (count + mset.getOrElse(element, 0)))
def multisetIntersects(other: MultiSet[T]): Boolean = {
mset.exists { case (p, n) => other.getOrElse(p, 0) > 0 }
}
}
}
| ing-bank/baker | core/intermediate-language/src/main/scala/com/ing/baker/petrinet/api/MultiSetOps.scala | Scala | mit | 1,915 |
package boardGame
abstract class Player(name:String) {
} | ksk9687/Aquire | src/main/scala/boardGame/Player.scala | Scala | mit | 59 |
package nexus.diff
import nexus._
import shapeless._
import scala.collection._
/**
* @author Tongfei Chen
*/
trait Batch[+X] extends IndexedSeq[X] {
def batchSize: Int
def apply(i: Int): X
final def length = batchSize
}
object Batch {
def apply[X](xs: X*) = fromSeq(xs)
def fromSeq[X](xs: Seq[X]): Batch[X] = new BatchObject[X](xs.toIndexedSeq)
def fromTensors[T[_], E, U](xs: Seq[T[U]])(implicit T: IsTensorK[T, E]): Batch[T[U]] = {
val v = ??? // T.stack(xs, batch)
???
}
}
class BatchObject[X](xs: IndexedSeq[X]) extends Batch[X] with IndexedSeq[X] {
def batchSize = xs.length
def apply(i: Int) = xs(i)
def asSeq = this
}
trait BatchTensor[T[_], E, U] extends Batch[T[U]] {
type Uh <: HList
def underlying: T[BatchDim :: Uh]
implicit def T: IsTensorK[T, E]
def batchSize = T.sizeOfDim(underlying, 0)
override def apply(i: Int) = {
val v: T[Uh] = T.sliceAlong(underlying, BatchDim, i)
v.asInstanceOf[T[U]] // this cast is safe
}
}
| ctongfei/nexus | diff/src/main/scala/nexus/diff/Batch.scala | Scala | mit | 994 |
/**
* Copyright 2011-2017 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.commons.model
case class Credentials(username: String, password: String)
| MykolaB/gatling | gatling-commons/src/main/scala/io/gatling/commons/model/Credentials.scala | Scala | apache-2.0 | 711 |
/*
* Copyright (c) 2013 Scott Abernethy.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package controllers
import play.api.mvc._
object Partials extends Controller {
def home() = Action { request =>
Ok(views.html.home())
}
def tome() = Action { request =>
Ok(views.html.tome())
}
def recruit() = Action { request =>
Ok(views.html.recruit())
}
def addGateway() = Action { request =>
Ok(views.html.addGateway("Add Gateway"))
}
def editGateway() = Action { request =>
Ok(views.html.addGateway("Edit Gateway"))
}
}
| scott-abernethy/opener-of-the-way | app/controllers/Partials.scala | Scala | gpl-3.0 | 1,168 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.fuberlin.wiwiss.silk.linkagerule.input
/**
* Simple transformer which transforms all values of the first input.
*/
abstract class SimpleTransformer extends Transformer {
override final def apply(values: Seq[Set[String]]): Set[String] = {
values.reduce(_ ++ _).map(evaluate)
}
def evaluate(value: String): String
}
| fusepoolP3/p3-silk | silk-core/src/main/scala/de/fuberlin/wiwiss/silk/linkagerule/input/SimpleTransformer.scala | Scala | apache-2.0 | 904 |
object Solution {
type Triplet = (Int, Int, Int)
def findTriplet(n: Int): Option[Triplet] = {
var c = n / 3 + 1 // suppose we have a < b < c
while (c < n / 2) { // of course a + b > c
// a + b = N - c; a^2 + b^2 = c^2 => 2ab = (N-c)^2 - c^2
// a^2 - 2ab + b^2 = c^2 - (N-c)^2 + c^2 = c^2 - N^2 + 2Nc
// (a-b)^2 = c^2 - N^2 + 2Nc should be a square of natural number
val squareOfABDiff = c * c - n * n + 2 * n * c
val squareRoot = math.sqrt(squareOfABDiff).toInt
if (squareRoot * squareRoot == squareOfABDiff) {
val b = (n - c + squareRoot) / 2
val a = n - c - b
return Some((a, b, c)) // if we can find a result. Then this result will be the biggest as abc = -N(c - N/4)^2 + N*(N/4)^2
}
c += 1
}
return None
}
def main(args: Array[String]) {
val t = readLine.toInt
for (i <- 1 to t) {
val n = readLine.toInt
val tripleLet = findTriplet(n)
if (tripleLet.isDefined) {
val (a, b, c) = tripleLet.get
println(a * b * c)
} else {
println("-1")
}
}
}
}
| advancedxy/hackerrank | project-euler/problem-9/SpecialPythagoreanTriplet.scala | Scala | mit | 1,113 |
package uk.co.turingatemyhamster
package owl2
package examples
import ast._
import scala.language.implicitConversions
object UsingAst {
implicit class W3CHelper(val sc: StringContext) extends AnyVal {
// def pn(args: Any*): PrefixName = ???
// def iri(args: Any*): IRI = ???
def pl(args: Any*): AbbreviatedIRI = {
val s = sc.s(args :_*)
val i = s.indexOf(":")
val (pn, ln) = s.splitAt(i+1)
AbbreviatedIRI(PrefixName(pn), ln)
}
def lit(args: Any*): StringLiteralNoLangauge =
StringLiteralNoLangauge(sc.s(args :_*))
}
implicit def stringPairToIRI(ss: (String, String)): IRI = AbbreviatedIRI(prefixName = PrefixName(ss._1), abbreviatedString = ss._2)
implicit def toClass[I](i: I)(implicit iIRI: I => IRI): Class = Class(iIRI(i))
implicit def toDatatype[I](i: I)(implicit iIRI: I => IRI): Datatype = Datatype(i)
implicit def toAnnotationProperty[I](i: I)(implicit iIRI: I => IRI): AnnotationProperty = AnnotationProperty(i)
implicit def toNamedIndividual[I](i: I)(implicit iIRI: I => IRI): NamedIndividual = NamedIndividual(i)
implicit def toObjectProperty[I](i: I)(implicit iIRI: I => IRI): ObjectProperty = ObjectProperty(i)
implicit def toDataProperty[I](i: I)(implicit iIRI: I => IRI): DataProperty = DataProperty(i)
implicit class Syntax[I](val _i: I) extends AnyVal {
def β [J](j: J) = examples.β(_i, j)
def is_a [J](j: J)(implicit iIndividual: I => Individual, jIRI: J => ClassExpression): ClassAssertion =
ClassAssertion(Nil, _i, j)
def --- [J](j: J) = new {
def --> [K](k: K) = Triple(_i, j, k)
}
def inverse = Inverse(_i)
def complement = Complement(_i)
def β€ (c: Int): β€[I] = examples.β€(_i, BigInt(c))
def === (c: Int): ===[I] = examples.===(_i, BigInt(c))
def β₯ (c: Int): β₯[I] = examples.β₯(_i, BigInt(c))
def ^^[J](j: J)(implicit iString: I => String, jDatatype: J => Datatype): TypedLiteral = TypedLiteral(_i, j)
def | [J](j: J)(implicit iDT: I => Datatype, jFR: J => FacetRestriction): DatatypeRestriction =
DatatypeRestriction(datatype = _i, restrictions = j::Nil)
def --> [J](j: J)(implicit iP: I => AnnotationProperty, jV: J => AnnotationValue) : Annotation =
Annotation(Nil, _i, j)
def ## (anns: Annotation*): examples.##[I] = examples.##(_i, anns.to[List])
def =β¨[J](j1: J, j2: J, js: J*): β¨[J, I] = β¨(_i, j1, j2, js.to[List])
def β‘[J](j: J): β‘β‘[I,J] = β‘β‘(_i, j)
def β[J](j: J): ββ[I, J] = ββ(_i, j)
def Β¬ = _i.complement
}
}
case class ##[A](ax: A, anns: List[Annotation])
object ## {
implicit def toAxiom[A, AX <: Axiom](annotated: ##[A])(implicit aAx: A => AX): AX = {
val ax = annotated.ax : AX
ax.copy(axiomAnnotations = ax.axiomAnnotations ++ annotated.anns)
}
}
case class β [Sub, Sup] (sub: Sub, sup: Sup)
object β {
implicit def toSubClass[Sub, Sup](sc: β[Sub, Sup])(implicit
ceSub: Sub => ClassExpression,
ceSup: Sup => ClassExpression): SubClassOf =
SubClassOf(Nil, sc.sub, sc.sup)
implicit def toSubObjectPropertyOf[Sub, Sup](sc: β[Sub, Sup])(implicit
opeSub: Sub => ObjectPropertyExpression,
opeSup: Sup => ObjectPropertyExpression): SubObjectPropertyOf =
SubObjectPropertyOf(Nil, sc.sub, (sc.sup : ObjectPropertyExpression) :: Nil)
}
case class Triple[Sub, Pred, Obj](sub: Sub, pred: Pred, obj: Obj)
object Triple {
implicit def toAnnotation[Sub, Pred, Obj](t: Triple[Sub, Pred, Obj]
)(implicit
subSub: Sub => AnnotationSubject,
predProp: Pred => AnnotationProperty,
objVal: Obj => AnnotationValue): AnnotationAssertion =
AnnotationAssertion(Nil, t.sub, t.pred, t.obj)
implicit def toObjectPropertyAssertion[Sub, Pred, Obj](t: Triple[Sub, Pred, Obj]
)(implicit
subIndividual: Sub => Individual,
predPE: Pred => ObjectPropertyExpression,
objIndividual: Obj => Individual): ObjectPropertyAssertion =
ObjectPropertyAssertion(Nil, t.sub, t.pred, t.obj)
implicit def toDataPropertyAssertion[Sub, Pred, Obj](t: Triple[Sub, Pred, Obj]
)(implicit
subIndividual: Sub => Individual,
predPE: Pred => DataPropertyExpression,
objLit: Obj => Literal): DataPropertyAssertion =
DataPropertyAssertion(Nil, t.sub, t.pred, t.obj)
}
case class Inverse[P](p: P) {
def inverse: P = p
}
object Inverse {
implicit def inverseOfP[P](i: Inverse[P])(implicit pP : P => ObjectProperty): InverseObjectProperty =
InverseObjectProperty(i.p)
implicit def inverseOfI(i: Inverse[InverseObjectProperty]): ObjectProperty =
i.p.objectProperty
}
case class Complement[T](t: T) {
def complement: T = t
}
object Complement {
implicit def dataComplement[T](c: Complement[T])(implicit tD: T => DataRange): DataComplementOf =
DataComplementOf(dataRange = c.t)
implicit def objectComplement[T](c: Complement[T])(implicit cC: T => ClassExpression): ObjectComplementOf =
ObjectComplementOf(classExpression = c.t)
}
case class β [I](i1: I, i2: I, is: List[I])
object β {
def apply[I](i1: I, i2: I, is: I*): β[I] = β(i1, i2, is.to[List])
implicit def objectIntersection[I](oi: β[I])(implicit iCE: I => ClassExpression): ObjectIntersectionOf =
ObjectIntersectionOf(oi.i1 :: oi.i2 :: oi.is map iCE)
implicit def dataIntersection[I](oi: β[I])(implicit iDR: I => DataRange): DataIntersectionOf =
DataIntersectionOf(dataRanges = oi.i1 :: oi.i2 :: oi.is map iDR)
}
case class ββ[I, J](i: I, j: J)
object ββ {
implicit def objectIntersection[I, J](oi: ββ[I, J])(implicit
iCE: I => ClassExpression,
jCE: J => ClassExpression): ObjectIntersectionOf =
ObjectIntersectionOf(iCE(oi.i)::jCE(oi.j)::Nil)
}
case class β [I](i1: I, i2: I, is: List[I])
object β {
def apply[I](i1: I, i2: I, is: I*): β[I] = β(i1, i2, is.to[List])
implicit def objectUnion[I](oi: β [I])(implicit iCE: I => ClassExpression): ObjectUnionOf =
ObjectUnionOf(oi.i1 :: oi.i2 :: oi.is map iCE)
implicit def dataUnion[I](oi: β[I])(implicit iDR: I => DataRange): DataUnionOf =
DataUnionOf(dataRanges = oi.i1 :: oi.i2 :: oi.is map iDR)
}
case class β=β
[I](i1: I, i2: I, is: List[I])
case object β=β
{
def apply[I](i1: I, i2: I, is: I*): β=β
[I] = β=β
(i1, i2, is.to[List])
implicit def toDisjointClasses[I](disj: β=β
[I])(implicit iCE: I => ClassExpression): DisjointClasses =
DisjointClasses(Nil, (disj.i1 :: disj.i2 :: disj.is) map iCE)
implicit def toDisjointObjectProperties[I](disj: β=β
[I])(implicit iP: I => ObjectPropertyExpression): DisjointObjectProperties =
DisjointObjectProperties(Nil, (disj.i1 :: disj.i2 :: disj.is) map iP)
implicit def toDisjointDataProperties[I](disj: β=β
[I])(implicit iP: I => DataPropertyExpression): DisjointDataProperties =
DisjointDataProperties(Nil, (disj.i1 :: disj.i2 :: disj.is) map iP)
}
case class β¨[I, C](c: C, i1: I, i2: I, is: List[I])
object β¨ {
def apply[I, C](c: C, i1: I, i2: I, is: I*): β¨[I, C] = β¨(c: C, i1, i2, is.to[List])
def toDisjointUnion[I, C](disj: β¨[I, C])(implicit iCE: I => ClassExpression, cC: C => Class): DisjointUnion =
DisjointUnion(Nil, (disj.i1 :: disj.i2 :: disj.is) map iCE, disj.c)
}
case class β‘[I](i1: I, i2: I, is: List[I])
object β‘ {
def apply[I](i1: I, i2: I, is: I*): β‘ [I] = β‘(i1, i2, is.to[List])
implicit def toEquivalentClasses[I](equiv: β‘[I])(implicit iClassExpression: I => ClassExpression): EquivalentClasses =
EquivalentClasses(Nil, (equiv.i1 :: equiv.i2 :: equiv.is) map iClassExpression)
implicit def toEquivalentObjectProperties[I](equiv: β‘[I])(implicit iP: I => ObjectPropertyExpression): EquivalentObjectProperties =
EquivalentObjectProperties(Nil, (equiv.i1 :: equiv.i2 :: equiv.is) map iP)
implicit def toEquivalentDataProperties[I](equiv: β‘[I])(implicit iP: I => DataPropertyExpression): EquivalentDataProperties =
EquivalentDataProperties(Nil, (equiv.i1 :: equiv.i2 :: equiv.is) map iP)
}
case class β‘β‘[I, J](i: I, j: J)
object β‘β‘ {
implicit def toEquivalentClasses[I, J](equiv: β‘β‘[I, J])(implicit
iCE: I => ClassExpression,
jCE: J => ClassExpression): EquivalentClasses =
EquivalentClasses(Nil, iCE(equiv.i)::jCE(equiv.j)::Nil)
}
case class β’[I](i1: I, i2: I, is: List[I])
object β’ {
def apply[I](i1: I, i2: I, is: I*): β’[I] = β’(i1, i2, is.to[List])
implicit def toDifferentIndividuals[I](nequiv: β’[I])(implicit iIndividual: I => Individual): DifferentIndividuals =
DifferentIndividuals(Nil, (nequiv.i1 :: nequiv.i2 :: nequiv.is) map iIndividual)
}
case class β[I](i1: I, is: List[I])
object β {
def apply[I](i1: I, is: I*): β[I] = β(i1, is.to[List])
implicit def toObjectOneOf[I](members: β[I])(implicit iIndividual: I => Individual): ObjectOneOf =
ObjectOneOf((members.i1 :: members.is) map iIndividual)
}
case class β[P, V](p: P, v: V)
case object β {
implicit def someObjectSomeValuesFrom[P, V](some: β[P, V])(implicit
pPE: P => ObjectPropertyExpression,
vCE: V => ClassExpression): ObjectSomeValuesFrom =
ObjectSomeValuesFrom(some.p, some.v)
implicit def someDataSomeValuesFrom[P, V](some: β[P, V])(implicit
pPE: P => DataPropertyExpression,
vDR: V => DataRange): DataSomeValuesFrom =
DataSomeValuesFrom(some.p::Nil, some.v)
}
case class β[P, V](p: P, v: V)
case object β {
implicit def allObjectValuesFrom[P, V](all: β[P, V])(implicit
pPE: P => ObjectPropertyExpression,
vCE: V => ClassExpression): ObjectAllValuesFrom =
ObjectAllValuesFrom(all.p, all.v)
implicit def allDataValuesFrom[P, V](all: β[P, V])(implicit
pPE: P => DataPropertyExpression,
vDR: V => DataRange): DataAllValuesFrom =
DataAllValuesFrom(all.p::Nil, all.v)
}
case class β[P, V](p: P, v: V)
object β {
implicit def toObjectHasValue[P, V](pv: β[P, V])(implicit
pPE: P => ObjectPropertyExpression,
vI: V => Individual): ObjectHasValue =
ObjectHasValue(pv.p, pv.v)
implicit def toDataHasValue[P, V](pv: β[P, V])(implicit
pPE: P => DataPropertyExpression,
vI: V => Literal): DataHasValue =
DataHasValue(pv.p, pv.v)
implicit def toFacetRestriction[P, V](pv: β[P, V])(implicit
pI: P => IRI,
vI: V => Literal): FacetRestriction =
FacetRestriction(pv.p, pv.v)
}
case class β€[P](p: P, c: BigInt)
object β€ {
implicit def toObjectMaxCardinality[P](lteq: β€[P])(implicit pPE: P => ObjectPropertyExpression): ObjectMaxCardinality =
ObjectMaxCardinality(lteq.p, None, lteq.c)
implicit def toDataMinCardinality[P](lteq: β€[P])(implicit pPE: P => DataPropertyExpression): DataMaxCardinality =
DataMaxCardinality(lteq.p, None, lteq.c)
}
case class ===[P](p: P, c: BigInt)
object === {
implicit def toObjectExactCardinality[P](eq: ===[P])(implicit pPE: P => ObjectPropertyExpression): ObjectExactCardinality =
ObjectExactCardinality(eq.p, None, eq.c)
implicit def toDataExactCardinality[P](eq: ===[P])(implicit pPE: P => DataPropertyExpression): DataExactCardinality =
DataExactCardinality(eq.p, None, eq.c)
}
case class β₯[P](p: P, c: BigInt)
object β₯ {
implicit def toObjectMaxCardinality[P](gteq: β₯[P])(implicit pPE: P => ObjectPropertyExpression): ObjectMinCardinality =
ObjectMinCardinality(gteq.p, None, gteq.c)
implicit def toDataMaxCardinality[P](gteq: β₯[P])(implicit pPE: P => DataPropertyExpression): DataMinCardinality =
DataMinCardinality(gteq.p, None, gteq.c)
}
/**
*
*
* @author Matthew Pocock
*/
class UsingAst {
import UsingAst._
Ontology(
directlyImportsDocuments = "http://www.example.com/ontolgy1#"::Nil,
ontologyAnnotations = Annotation(annotationProperty = AnnotationProperty("rdfs" -> "label"),
annotationValue = StringLiteralNoLangauge("An example")) :: Nil,
axioms = SubClassOf(subClassExpression = "a" -> "Child", superClassExpression = "a" -> "Person") :: Nil)
Ontology(
directlyImportsDocuments = FullIRI("http://www.example.com/2.0")::Nil,
ontologyIRI = Some(FullIRI("http://www.example.com/importing-ontology")))
val aChild1 = Class(entityIRI = AbbreviatedIRI(prefixName = PrefixName("a"), abbreviatedString = "Child"))
val aChild2 = Class("a" -> "child")
val aChild3 = "a" -> "child" : Class
val aChild4 = pl"a:child" : Class
val aPerson1 = pl"a:person" : Class
SubClassOf(
subClassExpression = Class(AbbreviatedIRI(PrefixName("a"), "Child")),
superClassExpression = Class(AbbreviatedIRI(PrefixName("a"), "Person")))
SubClassOf(
subClassExpression = "a" -> "Child",
superClassExpression = "a" -> "Person")
SubClassOf(Nil, "a" -> "Child", "a" -> "Person")
β(aChild1, aPerson1)
β(aChild1, aPerson1) : SubClassOf
aChild1 β aPerson1
aChild1 β aPerson1 : SubClassOf
pl"a:Child" β pl"a:Person" : SubClassOf
pl"a:Child" β pl"a:Person" : ClassAxiom
val xsdInteger1 = pl"xsd:integer"
val xsdInteger2 = pl"xsd:integer" : Datatype
val xsdInteter3 = pl"xsd:integer" : DataRange
DataPropertyRange(Nil, pl"a:hasAge", pl"xsd:integer")
Declaration(Nil, Class("owl" -> "Thing"))
Declaration(Nil, Class("owl" -> "Nothing"))
Declaration(Nil, ObjectProperty("owl" -> "topObjectProperty"))
Declaration(Nil, ObjectProperty("owl" -> "bottomObjectProperty"))
Declaration(Nil, DataProperty("owl" -> "topDataProperty"))
Declaration(Nil, DataProperty("owl" -> "bottomDataProperty"))
Declaration(Nil, Datatype("rdfs" -> "Literal"))
ClassAssertion(Nil, "a" -> "Dog", "a" -> "Brian")
ClassAssertion(Nil, "a" -> "Species", "a" -> "Dog")
ClassAssertion(Nil, "a" -> "PetAnimal", "a" -> "Dog")
pl"a:Brian" is_a pl"a:Dog"
pl"a:Dog" is_a pl"a:species"
pl"a:Dog" is_a pl"a:PetAnimal"
AnnotationAssertion(Nil, "a" -> "Dog", "a" -> "addedBy", lit"Seth MacFarlane")
pl"a:Dog" --- pl"a:addedBy" --> lit"Seth MacFarlane" : AnnotationAssertion
ObjectPropertyAssertion(Nil, "a" -> "Peter", "a" -> "fatherOf", "a" -> "Stewie")
pl"a:Peter" --- pl"a:fatherOf" --> pl"a:Stewie" : ObjectPropertyAssertion
InverseObjectProperty("a" -> "fatherOf")
pl"a:fatherOf" : ObjectProperty
pl"a:fatherOf".inverse : InverseObjectProperty
(pl"a:fatherOf".inverse : InverseObjectProperty).inverse : ObjectProperty
pl"a:fatherOf".inverse.inverse : ObjectProperty
DataIntersectionOf(dataRanges =
("xsd" -> "nonNegativeInteger" : DataRange) ::
("xsd" -> "nonPositiveInteger" : DataRange) ::
Nil)
β(pl"xsd:nonNegativeInteger", pl"xsd:nonPositiveInteger") : DataIntersectionOf
DataUnionOf(dataRanges =
("xsd" -> "string" : DataRange) ::
("xsd" -> "integer" : DataRange) ::
Nil)
β(pl"xsd:string", pl"xsd:integer") : DataUnionOf
DataComplementOf(dataRange = "xsd" -> "positiveInteger")
pl"xsd:positiveInteger".complement : DataComplementOf
pl"xsd:positiveInteger".complement.complement : Datatype
ObjectIntersectionOf(("a" -> "Dog" : ClassExpression) :: ("a" -> "CanTalk" : ClassExpression) :: Nil)
β(pl"a:Dog", pl"a:CanTalk") : ObjectIntersectionOf
ObjectUnionOf(("a" -> "Man" : ClassExpression) :: ("a" -> "Woman" : ClassExpression) :: Nil)
β("a" -> "Man", "a" -> "Woman") : ObjectUnionOf
ObjectComplementOf("a" -> "man")
pl"a:man".complement : ObjectComplementOf
pl"a:man".complement.complement : ClassExpression
EquivalentClasses(Nil, List("a" -> "GriffinFamilyMember" : ClassExpression, ObjectOneOf(
List("a" -> "Peter" : Individual,
"a" -> "Lois",
"a" -> "Stewie",
"a" -> "Meg",
"a" -> "Chris",
"a" -> "Brian")
)))
DifferentIndividuals(individuals = List("a" -> "Quagmire" : Individual,
"a" -> "Peter",
"a" -> "Lois",
"a" -> "Stewie",
"a" -> "Meg",
"a" -> "Chris",
"a" -> "Brian")
)
pl"a:GriffinFamilyMember".complement : ClassExpression
β‘(pl"a:GriffinFamilyMember" : ClassExpression,
β(
pl"a:Peter",
pl"a:Lois",
pl"a:Stewie",
pl"a:Meg",
pl"a:Chris",
pl"a:Brian") : ClassExpression) : ClassAxiom
β’(pl"a:Quagmire",
pl"a:Peter",
pl"a:Lois",
pl"a:Stewie",
pl"a:Meg",
pl"a:Chris",
pl"a:Brian") : DifferentIndividuals
ObjectPropertyAssertion(Nil, "a" -> "Peter", "a" -> "fatherOf", "a" -> "Stewie")
ClassAssertion(Nil, "a" -> "Stewie", "a" -> "Man")
pl"a:Peter" --- pl"a:fatherOf" --> pl"a:Stewie"
pl"a:Stewie" is_a pl"a:Man"
ObjectSomeValuesFrom("a" -> "fatherOf", "a" -> "Man")
β(pl"a:fatherOf", pl"a:Man") : ObjectSomeValuesFrom
ObjectAllValuesFrom("a" -> "hasPet", "a" -> "Dog")
β(pl"a:hasPet", pl"a:Dog") : ObjectAllValuesFrom
ObjectHasValue("a" -> "hasPet", "a" -> "Brian")
β(pl"a:hasPet", pl"a:Brian") : ObjectHasValue
ObjectHasSelf("a" -> "likes")
ObjectMaxCardinality(objectPropertyExpression = "a" -> "hasPet", cardinality = BigInt(1))
pl"a:Peter" is_a (pl"a:hasPet" β€ 1 : DataMaxCardinality)
ObjectExactCardinality(objectPropertyExpression = "a" -> "hasPet", cardinality = BigInt(1))
pl"a:hasPet" === 1 : ObjectExactCardinality
ObjectMinCardinality(objectPropertyExpression = "a" -> "hasPet", cardinality = BigInt(1))
pl"a:hasPet" β₯ 1 : ObjectMinCardinality
DataPropertyAssertion(Nil, "a" -> "Meg", "a" -> "hasAge", "17"^^("xsd" -> "integer"))
pl"a:Meg" --- pl"a:hasAge" --> ("17"^^pl"xsd:integer") : DataPropertyAssertion
DataSomeValuesFrom(
("a" -> "hasAge")::Nil,
DatatypeRestriction(
BigInt(1),
"xsd" -> "integer",
FacetRestriction("xsd" -> "maxExclusive", "20"^^("xsd" -> "integer"))::Nil),
BigInt(1))
β(pl"a:hasAge", pl"xsd:integer" | β(pl"xsd:maxExclusive", "20"^^pl"xsd:integer")) : DataSomeValuesFrom
β(pl"a:hasZIP", pl"xsd:integer") : DataAllValuesFrom
β(pl"a:hasAge", "17"^^pl"xsd:integer") : DataHasValue
DataMinCardinality(pl"a:hasName", None, BigInt(2))
pl"a:hasName" β₯ 2 : DataMinCardinality
pl"a:hasName" === 2 : DataExactCardinality
pl"a:hasName" β€ 2 : DataMaxCardinality
SubClassOf(Annotation(Nil, pl"rdfs:comment", lit"Male people are people.")::Nil, pl"a:Man", pl"a:Person")
(pl"a:Man" β pl"Person" : SubClassOf) ## (pl"rdfs:comment" --> lit"Male people are people")
(pl"a:Man" β pl"Person" : ClassAxiom) ## (pl"rdfs:comment" --> lit"Male people are people")
examples.##.toAxiom((pl"a:Man" β pl"Person") ## (pl"rdfs:comment" --> lit"Male people are people")) : ClassAxiom
(pl"a:Man" β pl"Person") ## (pl"rdfs:comment" --> lit"Male people are people") : ClassAxiom
SubObjectPropertyOf(Nil, "a" -> "hasDog", ("a" -> "hasPet" : ObjectPropertyExpression) :: Nil)
ObjectPropertyAssertion(Nil, "a" -> "Peter", "a" -> "hasDog", "a" -> "Brian")
ObjectPropertyAssertion(Nil, "a" -> "Peter", "a" -> "hasPet", "a" -> "Brian")
pl"a:hasDog" β pl"a:hasPet" : SubObjectPropertyOf
pl"a:Peter" --- pl"a:hasDog" --> pl"a:Brian"
pl"a:Peter" --- pl"a:hasPet" --> pl"a:Brian"
pl"a:Peter" --- (pl"a:hasDog" --> pl"a:Brian",
pl"a:hasPet" --> pl"a:Brian")
pl"a:Meg" --- pl"a:hasName" --> (lit"Meg",
lit"Meggan")
pl"a:ChildlessPerson" β‘ (pl"a:Person" β pl"a:Parent"Β¬)
}
| drdozer/owl2 | core/src/main/scala/uk/co/turingatemyhamster/owl2/examples/UsingAst.scala | Scala | apache-2.0 | 20,929 |
import scala.quoted.*
inline def foo = ${fooImpl}
def fooImpl(using Quotes) = {
val res = Expr.ofList(List('{"One"}))
Expr(res.show)
}
| dotty-staging/dotty | tests/run-macros/i6765-b/Macro_1.scala | Scala | apache-2.0 | 141 |
package com.sksamuel.scapegoat.inspections.matching
import com.sksamuel.scapegoat.PluginRunner
import org.scalatest.{ FreeSpec, Matchers, OneInstancePerTest }
import akka.actor.PossiblyHarmful
/** @author Stephen Samuel */
class SuspiciousMatchOnClassObjectTest
extends FreeSpec
with Matchers
with PluginRunner
with OneInstancePerTest {
override val inspections = Seq(new SuspiciousMatchOnClassObject)
"SuspiciousMatchOnClassObject" - {
"should report warning" - {
"for matching on object for case class with params" in {
val code = """
trait Machine
case class Terminator(name:String) extends Machine
class Test {
def b : Any = 4
b match {
case Terminator =>
case _ =>
}
} """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 1
}
"for matching on object for case class with no params" in {
val code = """
trait Machine
case class Terminator() extends Machine
class Test {
def b : Any = 4
b match {
case Terminator =>
case _ =>
}
} """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 1
}
}
"should not report warning" - {
"for wildcard with types" in {
val code = """
trait Machine
case class Terminator(name:String) extends Machine
case class Android(name:String) extends Machine
case class Man(name:String, gender:String) extends Machine
object Test {
val b : Any = Terminator("arnie")
b match {
case _ : Man =>
case _ =>
}
} """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"for unapply" in {
val code = """
trait Machine
case class Terminator(name:String) extends Machine
case class Android(name:String) extends Machine
object Test {
val b : Any = Terminator("arnie")
b match {
case Android("data") => println("yay data")
case _ =>
}
} """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"for top level case objects" in {
val code = """package com.sammy
trait Android
case object Lal extends Android
case object Data extends Android
case class Robot(name:String) extends Android
object Test {
val b : Any = Data
b match {
case Data => println("Yes captain")
case Lal => println("Yes dad")
case Robot(name) => println(name)
case _ =>
}
} """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"for top level objects" in {
val code = """
|package com.sammy
|
|trait TestTrait
|object TestObject extends TestTrait
|
|object Go {
| def test(t: TestTrait): Unit = t match {
| case TestObject β println("ok")
| }
|} """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"for nested case objects" in {
val code = """
package com.sammy
|object Go {
| trait TestTrait
| object TestObject extends TestTrait
|
| def test(t: TestTrait): Unit = t match {
| case TestObject β println("ok")
| }
|} """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"for case objects with abstract companion class" in {
val code = """ abstract class Kill
| case object Kill extends Kill
| object A {
| val a: AnyRef = "sam"
| a match {
| case Kill =>
| case _ =>
| }
| }""".stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"for instances of case class" in {
val code = """ case class Robot(name:String)
| object A {
| val Robbie = Robot("Robbie")
| val a: AnyRef = Robot("t800")
| a match {
| case Robbie =>
| case Robot(name) =>
| case _ =>
| }
| }""".stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
}
}
} | pwwpche/scalac-scapegoat-plugin | src/test/scala/com/sksamuel/scapegoat/inspections/matching/SuspiciousMatchOnClassObjectTest.scala | Scala | apache-2.0 | 6,109 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.cloudml.zen.ml.neuralNetwork
import com.github.cloudml.zen.ml.util.MnistDatasetSuite
import org.scalatest.{FunSuite, Matchers}
class RBMSuite extends FunSuite with MnistDatasetSuite with Matchers {
ignore("RBM") {
val (data, numVisible) = mnistTrainDataset(2500)
val rbm = RBM.train(data.map(_._1), 1000, numVisible, 256, 0.1, 0.05, 0.0)
}
}
| lenovor/zen | ml/src/test/scala/com/github/cloudml/zen/ml/neuralNetwork/RBMSuite.scala | Scala | apache-2.0 | 1,179 |
/*
* Copyright (c) 2016 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.hadoop.scalding
/**
* Object to either discard or mutate a bad row containing a TSV raw event
*/
trait TsvProcessor {
/**
* Decide whether to try to fix up a given bad row, then act accordingly
*
* @param inputTsv The tab-separated raw event in the Cloudfront Access Log format
* @param errors An array of errors describing why the inputTsv is invalid
* @return Some(mutatedInputTsv), or None if this bad row should be ignored
*/
def process(inputTsv: String, errors: Seq[String]): Option[String]
}
| bigdecisions/snowplow | 3-enrich/hadoop-event-recovery/src/main/scala/com/snowplowanalytics/hadoop/scalding/TsvProcessor.scala | Scala | apache-2.0 | 1,257 |
package prv.tr.webmonitor.sample
import prv.tr.webmonitor._
import prv.tr.webmonitor.pushbullet.PushBulletNotifier
object WebMonitorSampleMain extends App {
val settings = new MonitorSettings(new SampleMonitor, List(new PushBulletNotifier("3kI... (Your API key) ...9YZ")))
WebMonitorExecutor.execute(settings)
} | rixiaterurun/WebMonitor | sample/prv/tr/webmonitor/sample/WebMonitorSampleMain.scala | Scala | mit | 316 |
import org.scalatest.{Matchers, FunSuite}
class BinaryTest extends FunSuite with Matchers {
test("empty string") {
Binary("").toDecimal should be (0)
}
test("invalid string - all chars invalid") {
Binary("carrot").toDecimal should be (0)
}
test("invalid string - leading char invalid") {
Binary("a1111").toDecimal should be (0)
}
test("invalid string - trailing char invalid") {
Binary("1111a").toDecimal should be (0)
}
test("invalid string - middle char invalid") {
Binary("0101F0").toDecimal should be (0)
}
test("invalid string - invalid digits") {
Binary("22").toDecimal should be (0)
}
test("1") {
Binary("1").toDecimal should be (1)
}
test("2") {
Binary("10").toDecimal should be (2)
}
test("3") {
Binary("11").toDecimal should be (3)
}
test("4") {
Binary("100").toDecimal should be (4)
}
test("9") {
Binary("1001").toDecimal should be (9)
}
test("26") {
Binary("11010").toDecimal should be (26)
}
test("Ultimate answer to everything") {
Binary("101010").toDecimal should be (42)
}
test("1128") {
Binary("10001101000").toDecimal should be (1128)
}
}
| daewon/til | exercism/scala/binary/src/test/scala/binary_test.scala | Scala | mpl-2.0 | 1,183 |
// Copyright (C) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in project root for information.
package org.apache.spark.ml.param
import org.apache.spark.ml.Transformer
/** Param for Transformer. Needed as spark has explicit params for many different
* types but not Transformer.
*/
class TransformerParam(parent: Params, name: String, doc: String, isValid: Transformer => Boolean)
extends ComplexParam[Transformer](parent, name, doc, isValid) {
def this(parent: Params, name: String, doc: String) =
this(parent, name, doc, ParamValidators.alwaysTrue)
}
| rastala/mmlspark | src/core/serialize/src/main/scala/params/TransformerParam.scala | Scala | mit | 617 |
// The sbt-pack plugin will print a warning if there is no main class in the
// project. The sole purpose of this file is to mask this warning.
object DummyMain extends App
| ananda13/swarm | workers/gsim/src/main/scala/DummyMain.scala | Scala | mit | 174 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.