code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package eu.shiftforward.icfpc2015.solver
import eu.shiftforward.icfpc2015.model._
import spray.json._
import scala.io.Source
import scala.util.Random
case class OptimizationResult(score: Long, parameters: Array[Double])
trait Optimizer {
val hyperparametersLenght = 6 * 8
protected def score(filename: String, hp: Array[Double]) = {
val input = Source.fromFile(filename).mkString.parseJson.convertTo[Input]
val solver = new SmartSolver(hp, debugOnGameOver = false)
val score = input.sourceSeeds.map { seed =>
val units = input.orderedUnitsBySeed(seed)
val grid = Grid(input.width, input.height).filled(input.filled: _*)
val powerPhrases = PowerPhrase.knownPhrases
val gameState = GameState(grid, units, powerPhrases)
val solution = solver.play(gameState).toList
gameState.nextState(solution).score.get.currentScore
}.sum / input.sourceSeeds.size
OptimizationResult(score, hp)
}
def optimize(filename: Array[String], maxIter: Int): OptimizationResult
}
object RandomOptimizer extends Optimizer {
def optimize(filename: Array[String], maxIter: Int) = {
def optimizeAux(iter: Int, bestModel: OptimizationResult): OptimizationResult = iter match {
case 0 => bestModel
case i =>
val hp = Array.fill(hyperparametersLenght)((Random.nextDouble() - 0.5) * 2)
val newModel = score(filename(0), hp)
if (newModel.score > bestModel.score) optimizeAux(i - 1, newModel)
else optimizeAux(i - 1, bestModel)
}
optimizeAux(maxIter, score(filename(0), Array.fill(hyperparametersLenght)(0.5)))
}
}
object GeneticOptimizer extends Optimizer {
type Specimen = Array[Double]
type Gene = Double
// Preserve previous runs...
val knowledgePool = List[Specimen](
/* problem 0 (6733) */ Array(0.15, 0.9, -0.204, -0.205, 0.966, -0.69, 0.923, 0.911, 0.97, -0.629, 0.828, 0.666, 0.62, 0.16, 0.06, -0.66, -0.8, 0.275, 0.394, 0.2, -0.854, -0.46, -0.919, 0.7, 0.896, 0.047, -0.32, -0.052, 0.358, 0.776, 0.685, 0.97, -0.77, -0.54, 0.385, 0.397, -0.49, -0.24, 0.4, -0.99, 0.88, -0.23, -0.46, 0.453, 0.94, -0.355, 0.631, 0.486),
/* problem 1 (3662) */ Array(0.773, 0.302, -0.164, -0.662, 0.63, -0.543, 0.509, 0.073, 0.219, -0.981, -0.985, 0.64, 0.721, 0.061, -0.892, -0.773, -0.113, 0.844, -0.234, 0.806, 0.406, 0.273, -0.158, 0.727, 0.363, -0.893, 0.214, -0.176, 0.683, -0.54, 0.041, 0.63, 0.022, -0.915, 0.881, -0.135, 0.018, 0.423, 0.638, -0.294, 0.884, -0.067, -0.216, -0.819, -0.819, 0.392, 0.0, -0.209),
/* problem 2 (6469) */ Array(-0.23, -0.5, 0.09, -0.34, -0.03, -0.42, 0.18, -0.19, 0.08, -0.07, -0.17, 0.19, 0.7, 0.18, 0.81, 0.17, -0.39, -0.9, -0.11, -0.49, 0.45, -0.06, -0.02, 0.18, 0.07, -0.64, -0.73, -0.14, -0.28, 0.62, -0.69, -0.72, -0.02, 0.08, -0.06, -0.75, 0.64, 0.34, 0.47, 0.97, -0.25, 0.28, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0),
/* problem 3 (4189) */ Array(0.85, -0.36, 0.78, -0.21, 0.46, 0.84, -0.98, 0.2, 0.06, 0.09, -0.72, 0.94, -0.34, 0.56, -0.23, 0.8, -0.2, -0.1, 0.75, -0.09, 0.65, -0.72, -0.05, 0.64, 0.83, 0.08, 0.2, 0.16, 0.49, -0.65, -0.05, 0.24, -0.72, -0.51, 0.25, 0.5, 0.17, 0.99, 0.61, -0.21, 0.31, 0.88, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0),
/* problem 4 (4416) */ Array(-0.853, 0.923, 0.316, 0.978, 0.27, 0.869, 0.95, 0.034, 0.886, 0.159, -0.969, -0.004, 0.819, 0.16, 0.048, 0.427, -0.395, 0.87, 0.032, -0.554, -0.017, -0.861, -0.78, -0.41, 0.625, 0.08, 0.268, 0.787, 0.853, 0.517, 0.36, -0.607, -0.637, -0.54, 0.733, 0.021, 0.292, 0.57, 0.913, 0.74, 0.712, 0.05, 0.1, 0.21, 0.604, -0.476, -0.549, -0.593),
/* problem 5 (3575) */ Array(-0.026, 0.46, -0.477, 0.452, -0.19, 0.009, 0.73, -0.292, -0.266, -0.965, 0.418, -0.41, 0.39, 0.492, -0.582, -0.424, -0.02, 0.428, 0.673, 0.583, 0.299, -0.16, -0.546, -0.653, -0.862, -0.39, 0.318, 0.41, 0.738, -0.761, 0.062, -0.169, 0.019, -0.821, -0.06, 0.099, 0.63, 0.17, 0.57, 0.87, -0.873, 0.41, 0.989, 0.755, 0.476, -0.765, 0.55, 0.745),
/* problem 6 (6151) */ Array(
0.23, -0.88, -0.87, 0.55, 0.15, -0.6,
0.28, 0.47, -0.05, -0.89, -0.17, 0.93,
-0.18, 0.04, -0.43, -0.42, -0.18, -0.38,
-0.27, 0.13, 0.76, -0.83, 0.74, -0.54,
0.26, 0.69, -0.4, 0.51, -0.85, 0.05,
-0.5, 0.54, 0.02, -0.14, -0.06, -0.61,
-0.82, 0.34, 0.91, -0.49, -0.97, 0.69,
0, 0, 0, 0, 0, 0),
/* problem 7 (3782) */ Array(-0.786, 0.8, -0.439, -0.884, -0.877, -0.371, 0.509, -0.932, 0.217, -0.07, 0.163, 0.87, -0.77, 0.5, 0.06, 0.82, -0.25, 0.999, 0.007, 0.21, 0.95, -0.9, 0.159, 0.33, 0.67, -0.64, -0.24, -0.915, 0.32, 0.319, 0.781, -0.652, -0.676, 0.09, 0.2, 0.271, -0.11, 0.96, 0.341, 0.136, 0.897, -0.162, 0.797, 0.735, 0.937, -0.625, -0.766, 0.923),
/* problem 8 (12792) */ Array(0.6, -0.2, -0.08, -0.54, 0.92, -0.49, 0.72, -0.939, 0.85, 0.59, -0.459, 0.207, -0.356, 0.92, -0.13, -0.678, -0.51, 0.39, -0.998, -0.151, 0.942, 0.294, 0.254, -0.7, -0.98, 0.695, -0.48, 0.84, -0.39, 0.44, 0.88, 0.57, 0.65, -0.384, 0.908, 0.132, -0.96, -0.412, -0.121, 0.201, 0.41, 0.91, 0.78, 0.0, 0.0, -0.472, -0.3, 0.0),
/* problem 9 (3545) */ Array(0.587, 0.228, 0.828, -0.54, 0.793, -0.097, 0.833, 0.293, 0.481, 0.422, -0.539, 0.634, 0.977, 0.183, -0.32, -0.897, 0.252, 0.607, -0.491, -0.449, -0.581, -0.774, -0.439, -0.379, 0.218, 0.681, -0.995, -0.047, -0.914, -0.34, -0.078, -0.646, 0.476, -0.729, -0.669, 0.006, 0.111, -0.534, 0.43, 0.891, 0.096, -0.328, -0.963, 0.27, 0.338, -0.977, -0.742, 0.583),
/* problem 10 (4192) */ Array(-0.54, -0.01, -0.6, -0.81, 0.39, -0.75, -0.17, -0.19, -0.21, 0.17, 0.13, -0.74, 0.31, 0.16, 0.14, -0.11, -0.51, 0.7, 0.79, 0.14, 0.1, -0.82, 0.78, 0.9, -0.66, -0.5, -0.8, 0.74, -0.91, -0.82, 0.44, -0.66, 0.54, -0.73, 0.43, -0.71, -0.91, 0.57, 0.82, 0.07, 0.37, -0.71, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0),
/* problem 11 (3180) */ Array(
-0.68, -0.96, 0.59, 0.9, -0.66, -0.98,
0.62, -0.88, 0.31, -0.74, -0.78, -0.19,
0.93, 0.03, -0.43, -0.72, -0.07, 0.54,
-0.22, 0.16, 0.73, -0.27, 0.35, 0.33,
-0.3, -0.92, 0.92, -0.85, -0.85, -0.48,
-0.88, -0.07, -0.88, -0.54, -0.63, 0.36,
0.52, 0.73, -0.27, -0.75, -0.9, 0.41,
0, 0, 0, 0, 0, 0),
/* problem 12 (6481) */ Array(
-0.46, -0.88, -0.08, -0.68, 0.78, -0.49,
-0.02, -0.11, 0.16, 0.59, -0.33, 0.56,
0.62, 0.92, -0.13, -0.66, -0.51, 0.1,
0.04, 0.36, 0.45, -0.09, 0.92, 0.27,
-0.51, -0.38, -0.98, -0.46, 0.82, -0.34,
0.04, -0.95, 0.65, -0.67, 0.56, -0.4,
0.22, -0.24, 0.71, -0.91, 0.37, -0.65,
0, 0, 0, 0, 0, 0),
/* problem 13 (2573) */ Array(0.55, -0.15, 0.25, -0.93, -0.173, -0.232, 0.95, -0.03, -0.96, -0.89, 0.236, 0.762, -0.77, 0.49, 0.27, -0.43, -0.51, -0.7, 0.678, -0.88, 0.2, 0.294, 0.74, 0.686, 0.45, -0.76, -0.8, 0.84, -0.3, 0.05, 0.07, -0.1, -0.65, -0.384, 0.2, 0.271, -0.718, 0.639, -0.001, -0.866, 0.897, 0.93, 0.638, 0.0, 0.0, 0.0, 0.142, 0.0),
/* problem 14 (6594) */ Array(
-0.21, 0.02, -0.83, -0.98, -0.68, 0.94,
-0.61, 0.19, 0.34, -0.22, -0.5, -0.03,
-0.46, 0.79, -0.46, 0.06, 0.12, 0.89,
0.41, 0.25, 0.05, -0.75, -0.98, 0.93,
0.4, 0.78, -0.06, -0.99, 0.32, 0.03,
0.98, 0.97, 0.14, 0.37, -0.99, -0.58,
-0.97, -0.24, 0.03, -0.71, 0.47, -0.53,
0, 0, 0, 0, 0, 0),
/* problem 15 (4116) */ Array(
0.38, -0.88, -0.77, 0.66, -0.93, -0.79,
0.08, -0.35, -0.53, -0.59, -0.38, -0.37,
0.64, 0.84, 0.44, -0.1, -0.02, -0.33,
0.91, -0.97, 0.46, -0.62, -0.85, 0.95,
-0.51, -0.45, -0.95, 0.41, 0.36, -0.06,
-0.18, -0.75, -0.93, 0.61, -0.06, -0.33,
-0.48, 0.17, -0.23, 0.87, -0.97, 0.41,
0, 0, 0, 0, 0, 0),
/* problem 16 (7568) */ Array(-0.23, 0.46, 0.34, 0.38, 0.92, -0.25, 0.35, 0.9, 0.66, 0.54, 0.62, 0.61, -0.51, 0.53, 0.51, -0.12, 0.16, -0.7, 0.56, -0.54, 0.76, -0.42, 0.57, 0.7, 0.45, -0.39, -0.6, 0.0, -0.3, 0.27, 0.11, 0.5, -0.56, -0.28, -0.25, -0.08, 0.64, 0.59, 0.57, 0.51, -0.77, -0.71, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0),
/* problem 17 (5300) */ Array(0.44, -0.71, 0.31, 0.79, 0.92, -0.95, 0.72, 0.12, 0.14, -0.02, 0.13, 0.84, -0.28, -0.36, 0.14, 0.87, 0.28, 0.39, -0.93, 0.59, 0.89, -0.54, 0.58, 0.33, 0.98, -0.76, -0.9, 0.84, -0.32, 0.44, 0.36, 0.52, 0.88, -0.55, -0.99, 0.31, -0.11, -0.44, -0.32, -0.47, 0.41, 0.91, 0.59, 0.49, 0.28, -0.51, 0.2, 0.89),
/* problem 18 (8596) */ Array(-0.881, -0.473, 0.957, -0.393, 0.92, 0.48, -0.1, -0.799, 0.38, -0.035, -0.44, -0.077, -0.518, 0.752, -0.882, -0.396, -0.387, 0.474, 0.41, -0.973, 0.266, 0.813, 0.698, 0.471, 0.307, 0.71, -0.111, 0.006, 0.332, 0.18, -0.26, -0.136, -0.525, 0.34, -0.422, 0.371, 0.64, -0.219, -0.217, -0.411, -0.005, -0.337, 0.0, 0.453, 0.171, -0.65, 0.287, 0.861),
/* problem 19 (5128) */ Array(0.613, 0.679, 0.493, 0.965, 0.831, 0.714, -0.739, 0.893, 0.215, -0.79, -0.694, -0.216, 0.966, 0.763, 0.132, 0.065, -0.387, 0.819, 0.759, -0.051, 0.196, 0.335, 0.369, 0.939, 0.403, -0.133, -0.213, -0.048, -0.823, 0.531, 0.174, 0.085, -0.698, -0.381, -0.924, 0.406, -0.888, 0.774, 0.089, -0.065, -0.8, -0.662, 0.248, 0.735, 0.251, 0.049, 0.528, -0.104),
/* problem 20 (6018) */ Array(0.5, -0.15, -0.87, -0.06, 0.89, -0.35, 0.95, 0.47, 0.76, 0.05, -0.89, 0.71, -0.13, 0.01, 0.05, 0.38, 0.06, 0.22, 0.04, -0.18, 0.45, 0.32, 0.92, 0.18, -0.02, -0.74, 0.91, 0.28, 0.67, 0.3, 0.72, -0.85, -0.65, -0.96, 0.75, 0.89, -0.4, 0.34, 0.37, 0.25, -0.31, 0.97, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0),
/* problem 21 (1434) */ Array(-0.46, 0.49, -0.87, -0.84, -0.24, 0.01, 0.4, 0.64, -0.81, -0.46, -0.21, -0.45, -0.04, 0.1, 0.42, -0.36, -0.37, 0.41, 0.56, 0.13, 0.34, -0.95, -0.52, 0.69, 0.62, 0.2, -0.46, 0.34, -0.07, -0.71, -0.81, -0.07, -0.52, 0.62, -0.72, -0.83, 0.6, -0.97, -0.46, 0.25, -0.37, -0.5, -0.4, 0.24, -0.79, -0.2, -0.87, 0.87),
/* problem 22 (2300) */ Array(-0.208, 0.256, -0.646, 0.727, 0.421, -0.724, -0.367, 0.893, 0.529, -0.945, 0.127, 0.38, 0.927, -0.89, -0.94, 0.391, -0.413, 0.3, -0.391, -0.015, -0.224, -0.602, -0.606, 0.899, -0.073, -0.847, -0.723, 0.553, 0.29, 0.884, 0.28, 0.25, -0.686, -0.001, -0.924, 0.295, 0.538, -0.259, 0.787, 0.276, 0.275, -0.271, 0.794, 0.755, -0.25, 0.481, 0.433, 0.648),
/* problem 23 (1116) */ Array(0.217, -0.24, -0.259, 0.85, -0.17, -0.645, 0.393, 0.041, 0.506, -0.073, 0.432, 0.104, -0.523, -0.466, -0.838, 0.786, -0.916, -0.342, -0.916, -0.872, 0.703, -0.324, 0.719, 0.948, 0.857, 0.451, -0.655, 0.393, 0.93, -0.734, 0.252, -0.637, 0.107, 0.851, -0.25, -0.279, 0.601, 0.486, 0.174, 0.326, -0.28, -0.081, 0.255, 0.634, -0.096, -0.974, 0.645, 0.978),
/* problem 24 (77000/ */ Array(-0.23, -0.01, -0.6, -0.93, 0.27, 0.97, -0.69, -0.83, 0.85, -0.42, 0.35, -0.24, 0.38, 0.16, -0.46, -0.85, 0.58, 0.87, 0.32, 0.98, 0.26, -0.31, -0.95, -0.7, -0.98, -0.44, -0.48, -0.16, -0.3, -0.91, 0.07, -0.66, 0.88, -0.96, -0.07, -0.06, -0.96, 0.57, 1.0, 0.74, -0.71, 0.93, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0),
/* meta 0 - 6 (26416 out of 29805) */ Array(
0.02, 0.13, -0.87, -0.95, 0.44, 0.15,
-0.85, 0.67, 0.38, -0.18, -0.09, 0.54,
0.2, 0.33, -0.43, 0.95, 0.16, 0.87,
0.61, 0.22, 0.73, -0.68, -0.7, 0.27,
-0.28, 0.4, -0.73, 0.39, -0.88, -0.48,
0.81, -0.02, 0.47, -0.99, -0.09, 0.03,
-0.93, -0.8, 0.48, -0.21, -0.15, 0.41,
0, 0, 0, 0, 0, 0),
/* meta 7 -13 (23732 out of 26853) */ Array(
0.6, -0.2, -0.1, -0.54, -0.29, -0.79,
-0.21, -0.11, 0.46, -0.79, -0.61, 0.54,
0.62, 0.04, 0.66, -0.21, 0.82, 0.7,
0.8, -0.26, 0.58, -0.84, -0.02, -0.98,
-0.51, -0.38, -0.55, -0.21, -0.39, -0.06,
0.88, 0.57, -0.93, -0.95, -0.5, 0.99,
0.66, 0.84, 0.8, 0.74, 0.62, 0.44,
0, 0, 0, 0, 0, 0),
/* meta 15-21 (22221 out of 22341) */ Array(
-0.34, -0.88, 0.09, 0.15, 0.6, -0.21,
0.73, 0.47, -0.96, 0.42, 0.35, -0.28,
0.45, -0.07, 0.65, -0.43, 0.83, 0.94,
-0.31, 0.98, 0.62, -0.16, -0.53, 0.93,
-0.36, -0.5, 0.92, -0.09, -0.0, 0.06,
-0.73, -0.1, -0.16, 0.42, -0.09, -0.49,
-0.82, -0.36, 0.67, -0.49, 0.08, 0.64,
0, 0, 0, 0, 0, 0),
/* fast problems (39729) */ Array(-0.23, -0.01, -0.6, -0.93, 0.27, 0.97, -0.69, -0.83, 0.85, -0.42, 0.35, -0.24, 0.38, 0.16, -0.46, -0.85, 0.58, 0.87, 0.32, 0.98, 0.26, -0.31, -0.95, -0.7, -0.98, -0.44, -0.48, -0.16, -0.3, -0.91, 0.07, -0.66, 0.88, -0.96, -0.07, -0.06, -0.96, 0.57, 1.0, 0.74, -0.71, 0.93, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0),
/* seed A */ Array(0.39, 0.46, 0.81, 0.99, 0.19, -0.38, -0.35, -0.45, 0.75, 0.51, -0.77, 0.87, 0.05, -0.09, -0.01, 0.46, -0.9, 0.07, -0.01, -0.12, 0.71, -0.93, 0.77, -0.7, -0.32, 0.71, 0.9, -0.13, -0.37, -0.34, -0.77, 0.09, 0.0, -0.1, -0.22, -0.17, 0.98, 0.39, 0.3, -0.06, -0.45, 0.33, 0.75, 0.9, 0.94, -0.62, -0.4, 0.93),
/* seed B */ Array(-0.3, 0.44, -0.23, -0.89, 0.78, 0.49, -0.78, 0.75, 0.32, -0.03, 0.06, 0.64, -0.28, -0.21, -0.19, 0.46, -0.76, -0.13, 0.64, 0.44, 0.29, -0.73, 0.39, 0.34, -0.3, 0.81, 0.09, -0.88, -0.71, -0.09, 0.1, 0.1, -0.76, -0.84, 0.1, 0.07, -0.6, 0.88, -0.18, 0.22, -0.01, 0.49, 0.94, 0.53, 0.66, -0.51, 0.55, 0.93),
/* seed C */ Array(-0.72, 0.79, 0.3, 0.57, -0.35, -0.09, -0.45, 0.07, 0.73, 0.06, -0.44, 0.71, 0.8, -0.33, 0.71, 0.55, -0.66, 0.57, -0.52, -0.84, 0.75, -0.66, 0.1, -0.36, 0.15, 0.54, 0.23, 0.87, 0.84, -0.32, -0.42, -0.2, 0.08, 0.34, 0.27, 0.6, 0.93, -0.26, -0.17, -0.7, -0.37, 0.33, -0.14, 0.77, 0.68, -0.85, -0.88, -0.44),
/* seed D */ Array(-0.52, -0.31, 0.48, -0.74, -0.58, 0.54, -0.36, -0.29, 0.46, 0.94, 0.3, 0.44, -0.06, 0.5, -0.71, 0.89, -0.53, 0.78, 0.86, 0.67, 0.71, -0.14, -0.52, 0.97, -0.54, 0.26, -0.12, 0.05, -0.86, -0.74, -0.82, -0.07, 0.38, 0.59, -0.33, 0.49, 0.63, -0.85, 0.08, 0.82, 0.54, 0.74, 0.59, 0.59, 0.49, -0.61, -1.0, -0.17)
)
class GeneticExploration(mutationRate: Double,
crossOverRate: Double,
population: Int,
geneGenerator: () => Gene,
geneMutator: Gene => Gene,
specimenBuilder: Iterable[Gene] => Specimen,
specimenFixer: Specimen => Specimen,
fitnessF: Specimen => Long,
stopCondition: (Int, List[Specimen]) => Boolean)(implicit ev1: Specimen => Iterable[Gene]) {
type Pool = List[Specimen]
type MatePool = List[(Specimen, Long)]
def newSpecimen(len: Int): Specimen = specimenBuilder(Stream.continually(geneGenerator()).take(len))
def randomPool(archetype: Specimen, population: Int = population): Pool = {
(1 to population).map(_ => newSpecimen(archetype.length)).toList
}
implicit def toMatePool(p: Pool): MatePool = matePool(p)
def evolution(pool: MatePool, epoch: Int = 0): (MatePool, Int) = {
val best = pool.maxBy(_._2)
// println("---------------------------------------------------------------------------------------")
println(f"[$epoch] ${best._2}: ${best._1.toList.mkString(", ")}")
// println("---------------------------------------------------------------------------------------")
// pool.foreach { case (a, b) => println(s"$b: ${a.toList.mkString(" ")}") }
val newGeneration = popReproduction(pool)
if (stopCondition(epoch, newGeneration)) (newGeneration, epoch)
else evolution(newGeneration, epoch + 1)
}
private[this] def matePool(pool: Pool): MatePool = {
val fitnesses = pool.par.map(fitnessF).toArray
pool.zip(fitnesses)
}
@inline private[this] def renormalize(vector: Array[Long]) = {
val sum = vector.sum
vector.map(_.toDouble / sum)
}
private[this] def popReproduction(matePool: MatePool): Pool = {
val normalizedPool = matePool.map(_._1).zip(renormalize(matePool.map(_._2).toArray))
// Always preserve the better specimen (elitist)
(matePool.maxBy(_._2)._1 +:
(1 until population).par.map(_ => crossover(monteCarlo(normalizedPool), monteCarlo(normalizedPool)))
).toList
}
private[this] def monteCarlo[A](weightedList: List[(A, Double)]): A = {
def go(xs: List[(A, Double)], r: Double): A = {
xs match {
case Nil => throw new IllegalArgumentException("Calling monteCarlo on an empty list!")
case (h, _) :: Nil => h
case (h, v) :: t =>
if (r < v) h
else go(xs.tail, r - v)
}
}
go(weightedList, Random.nextFloat())
}
private[this] def crossover(a: Specimen, b: Specimen): Specimen =
mutate(specimenBuilder(a.zip(b).map(gene =>
if (Random.nextFloat >= crossOverRate) gene._1 else gene._2)))
private[this] def mutate(s: Specimen): Specimen =
specimenBuilder(s.map(gene =>
if (mutationRate > Random.nextFloat) geneMutator(gene) else gene))
}
def optimize(filenames: Array[String], maxIter: Int) = {
def fitness(s: Specimen): Long = {
filenames.map { filename =>
score(filename, s).score
}.sum
}
val population = 32
val petri = new GeneticExploration(
0.1, 0.5, population, // rate of mutation, crossover ratio, max population
() => (Random.nextDouble() - 0.5) * 2, // random gene pool
_ => (Random.nextDouble() - 0.5) * 2, // gene mutator
cs => cs.map(v => (math rint v * 1000) / 1000).toArray, // how to build a specimen from genes
cs => { val sum = cs.sum; cs.map(_ / sum) },
fitness, // the fitness function
(iter, _) => iter > maxIter // the stop condition
)
val best = petri.evolution(petri.toMatePool(
knowledgePool ++ petri.randomPool(Array.fill(hyperparametersLenght)(0.5), population - knowledgePool.length))
)._1.maxBy(_._2)
println(f"DONE\tBest Fit ${best._2}\tSpecimen ${best._1.toList}")
OptimizationResult(best._2, best._1)
}
}
object OptimizerMain extends App {
println(GeneticOptimizer.optimize(args, 50))
}
| ShiftForward/icfpc2015 | src/main/scala/eu/shiftforward/icfpc2015/solver/Optimizer.scala | Scala | mit | 17,836 |
package io.youi.component.support
import io.youi.component.Component
import io.youi.component.feature.{FeatureParent, ThemeFeature}
trait ThemeSupport {
this: Component =>
lazy val theme: ThemeFeature = new ThemeFeature(this)
} | outr/youi | gui/src/main/scala/io/youi/component/support/ThemeSupport.scala | Scala | mit | 234 |
package analyzer
import dotty.tools.dotc.ast.tpd
import dotty.tools.dotc.core.Contexts.Context
import dotty.tools.dotc.transform.MegaPhase._
import dotty.tools.dotc.transform.{ReifyQuotes, FirstTransform}
import dotty.tools.dotc.plugins._
/** Unset the `defTree` property of symbols. See the doc for `SetDefTree` */
class SetDefTreeOff extends PluginPhase {
import tpd._
override val phaseName: String = SetDefTreeOff.name
override def runsAfter: Set[String] = Set(SetDefTree.name)
override def runsBefore: Set[String] = Set(FirstTransform.name)
override def transformValDef(tree: ValDef)(implicit ctx: Context): Tree = {
tree.symbol.defTree = EmptyTree
tree
}
override def transformDefDef(tree: DefDef)(implicit ctx: Context): Tree = {
tree.symbol.defTree = EmptyTree
tree
}
override def transformTypeDef(tree: TypeDef)(implicit ctx: Context): Tree = {
tree.symbol.defTree = EmptyTree
tree
}
}
object SetDefTreeOff {
val name: String = "SetDefTreeOff"
}
| som-snytt/dotty | sbt-dotty/sbt-test/sbt-dotty/analyzer-plugin/plugin/SetDefTreeOff.scala | Scala | apache-2.0 | 1,008 |
package controllers
import com.google.inject.Inject
import models.AcquireCacheKeyPrefix.CookiePrefix
import models.{AllCacheKeys, VehicleLookupCacheKeys}
import play.api.mvc.{Action, Controller}
import uk.gov.dvla.vehicles.presentation.common
import common.clientsidesession.ClientSideSessionFactory
import common.clientsidesession.CookieImplicits.{RichCookies, RichResult}
import common.model.VehicleAndKeeperDetailsModel
import common.LogFormats.DVLALogger
import utils.helpers.Config
class SuppressedV5C @Inject()()(implicit clientSideSessionFactory: ClientSideSessionFactory,
config: Config) extends Controller with DVLALogger {
def present = Action { implicit request =>
request.cookies.getModel[VehicleAndKeeperDetailsModel] match {
case Some(vehicleAndKeeperDetails) =>
logMessage(request.cookies.trackingId(), Info, "Presenting suppressed V5C page")
Ok(views.html.acquire.suppressedV5C())
case _ =>
val msg = "When presenting suppressed V5C, did not find VehicleDetailsModel cookie. " +
s"Now redirecting to ${routes.SetUpTradeDetails.present()}"
logMessage(request.cookies.trackingId(), Warn, msg)
Redirect(routes.SetUpTradeDetails.present())
}
}
def buyAnotherVehicle = Action { implicit request =>
logMessage(request.cookies.trackingId(), Debug, s"Redirecting to ${routes.VehicleLookup.present()}")
Redirect(routes.VehicleLookup.present()).
discardingCookies(VehicleLookupCacheKeys)
}
def finish = Action { implicit request =>
logMessage(request.cookies.trackingId(), Debug, s"Redirecting to ${routes.BeforeYouStart.present()}")
Redirect(routes.BeforeYouStart.present()).
discardingCookies(AllCacheKeys)
}
} | dvla/vehicles-acquire-online | app/controllers/SuppressedV5C.scala | Scala | mit | 1,774 |
import sbt._
import Keys._
import play.Project._
object ApplicationBuild extends Build {
val appName = "computer-database"
val appVersion = "1.0"
val appDependencies = Seq(
javaCore,
javaJdbc,
javaEbean
)
val main = play.Project(appName, appVersion, appDependencies).settings(
// Add your own project settings here
)
}
| cescoffier/maven-play2-plugin | src/it/computer-database/project/Build.scala | Scala | apache-2.0 | 404 |
package filodb.memory.data
import scala.concurrent.Await
import com.typesafe.scalalogging.StrictLogging
import kamon.Kamon
object Shutdown extends StrictLogging {
val forcedShutdowns = Kamon.counter("forced-shutdowns").withoutTags()
def haltAndCatchFire(e: Exception, unitTest: Boolean = false): Unit = {
forcedShutdowns.increment()
if (unitTest) throw e
logger.error(s"Shutting down process since it may be in an unstable/corrupt state", e)
import scala.concurrent.duration._
Await.result(Kamon.stopModules(), 5.minutes)
Runtime.getRuntime.halt(189)
}
}
| filodb/FiloDB | memory/src/main/scala/filodb.memory/data/Shutdown.scala | Scala | apache-2.0 | 590 |
/* - Coeus web framework -------------------------
*
* Licensed under the Apache License, Version 2.0.
*
* Author: Spiros Tzavellas
*/
package com.tzavellas.coeus.core.error
import com.tzavellas.coeus.mvc.{ WebRequest, WebResponse }
import com.tzavellas.coeus.mvc.view.View
/**
* A <code>View</code> instance to be returned from <code>ExceptionHandler</code>
* implementations.
*
* <p>This <code>View</code> is used to convey to the framework to propagate any
* uncaught exceptions to the Servlet container.</p>
*
* @see ExceptionHandler
*/
case object ErrorPageView extends View {
/** The content-type is null. */
def contentType = null
/** Does nothing. */
def render(request: WebRequest, response: WebResponse) { }
} | sptz45/coeus | src/main/scala/com/tzavellas/coeus/core/error/ErrorPageView.scala | Scala | apache-2.0 | 751 |
package com.typesafe.slick.testkit.tests
import org.junit.Assert._
import com.typesafe.slick.testkit.util.{RelationalTestDB, AsyncTest}
class JoinTest extends AsyncTest[RelationalTestDB] {
import tdb.profile.api._
@deprecated("Using deprecated join operators", "3.0")
def testJoin = {
class Categories(tag: Tag) extends Table[(Int, String)](tag, "cat_j") {
def id = column[Int]("id")
def name = column[String]("name")
def * = (id, name)
}
val categories = TableQuery[Categories]
class Posts(tag: Tag) extends Table[(Int, String, Int)](tag, "posts_j") {
def id = column[Int]("id", O.PrimaryKey, O.AutoInc)
def title = column[String]("title")
def category = column[Int]("category")
def * = (id, title, category)
}
val posts = TableQuery[Posts]
for {
_ <- (categories.schema ++ posts.schema).create
_ <- categories ++= Seq(
(1, "Scala"),
(2, "ScalaQuery"),
(3, "Windows"),
(4, "Software")
)
_ <- posts.map(p => (p.title, p.category)) ++= Seq(
("Test Post", -1),
("Formal Language Processing in Scala, Part 5", 1),
("Efficient Parameterized Queries in ScalaQuery", 2),
("Removing Libraries and HomeGroup icons from the Windows 7 desktop", 3),
("A ScalaQuery Update", 2)
)
// Implicit join
q1 = (for {
c <- categories
p <- posts if c.id === p.category
} yield (p.id, c.id, c.name, p.title)).sortBy(_._1)
_ <- q1.map(p => (p._1, p._2)).result.map(_ shouldBe List((2,1), (3,2), (4,3), (5,2)))
// Explicit inner join
q2 = (for {
(c,p) <- categories innerJoin posts on (_.id === _.category)
} yield (p.id, c.id, c.name, p.title)).sortBy(_._1)
_ <- q2.map(p => (p._1, p._2)).result.map(_ shouldBe List((2,1), (3,2), (4,3), (5,2)))
// Left outer join (nulls first)
q3 = (for {
(c,p) <- categories leftJoin posts on (_.id === _.category)
} yield (p.id, (p.id.?.getOrElse(0), c.id, c.name, p.title.?.getOrElse("")))).sortBy(_._1.nullsFirst).map(_._2)
_ <- q3.map(p => (p._1, p._2)).result.map(_ shouldBe List((0,4), (2,1), (3,2), (4,3), (5,2)))
// Read NULL from non-nullable column
q3a = (for {
(c,p) <- categories leftJoin posts on (_.id === _.category)
} yield (p.id, c.id, c.name, p.title)).sortBy(_._1.nullsFirst)
_ <- q3a.result.failed.map(_.shouldBeA[SlickException])
// Left outer join (nulls last)
q3b = (for {
(c,p) <- categories leftJoin posts on (_.id === _.category)
} yield (p.id, (p.id.?.getOrElse(0), c.id, c.name, p.title.?.getOrElse("")))).sortBy(_._1.nullsLast).map(_._2)
_ <- q3b.map(p => (p._1, p._2)).result.map(_ shouldBe List((2,1), (3,2), (4,3), (5,2), (0,4)))
// Right outer join
q4 = (for {
(c,p) <- categories rightJoin posts on (_.id === _.category)
} yield (p.id, c.id.?.getOrElse(0), c.name.?.getOrElse(""), p.title)).sortBy(_._1)
_ <- q4.map(p => (p._1, p._2)).result.map(_ shouldBe List((1,0), (2,1), (3,2), (4,3), (5,2)))
// Full outer join
q5 = (for {
(c,p) <- categories outerJoin posts on (_.id === _.category)
} yield (p.id.?.getOrElse(0), c.id.?.getOrElse(0), c.name.?.getOrElse(""), p.title.?.getOrElse(""))).sortBy(_._1)
_ <- q5.map(p => (p._1, p._2)).result.map(_ shouldBe Vector((0,4), (1,0), (2,1), (3,2), (4,3), (5,2)))
} yield ()
}
def testOptionExtendedJoin = {
class Data(name: String)(tag: Tag) extends Table[(Int, String)](tag, name) {
def a = column[Int]("a")
def b = column[String]("b")
def * = (a, b)
}
val xs = TableQuery(new Data("xs_jo")(_))
val ys = TableQuery(new Data("ys_jo")(_))
for {
_ <- (xs.schema ++ ys.schema).create
_ <- xs ++= Seq((1, "a"), (2, "b"), (3, "b"), (4, "c"), (5, "c"))
_ <- ys ++= Seq((1, "a"), (2, "b"), (3, "b"), (4, "d"), (5, "d"))
// Left outer, lift primitive value
q1 = (xs.map(_.b) joinLeft ys.map(_.b) on (_ === _)).to[Set]
r1 <- q1.result
r1t: Set[(String, Option[String])] = r1
_ = r1 shouldBe Set(("a",Some("a")), ("b",Some("b")), ("c",None))
// Nested left outer, lift primitive value
q2 = ((xs.map(_.b) joinLeft ys.map(_.b) on (_ === _)) joinLeft ys.map(_.b) on (_._1 === _)).to[Set]
r2 <- q2.result
r2t: Set[((String, Option[String]), Option[String])] = r2
_ = r2 shouldBe Set((("a",Some("a")),Some("a")), (("b",Some("b")),Some("b")), (("c",None),None))
// Left outer, lift non-primitive value
q3 = (xs joinLeft ys on (_.b === _.b)).to[Set]
r3 <- q3.result
r3t: Set[((Int, String), Option[(Int, String)])] = r3
_ = r3 shouldBe Set(((3,"b"),Some((3,"b"))), ((3,"b"),Some((2,"b"))), ((5,"c"),None), ((1,"a"),Some((1,"a"))), ((4,"c"),None), ((2,"b"),Some((3,"b"))), ((2,"b"),Some((2,"b"))))
// Left outer, lift non-primitive value, then map to primitive
q4 = (xs joinLeft ys on (_.b === _.b)).map { case (x, yo) => (x.a, yo.map(_.a)) }.to[Set]
r4 <- q4.result
r4t: Set[(Int, Option[Int])] = r4
_ = r4 shouldBe Set((4,None), (3,Some(2)), (2,Some(3)), (2,Some(2)), (3,Some(3)), (1,Some(1)), (5,None))
// Nested left outer, lift non-primitive value
q5 = ((xs joinLeft ys on (_.b === _.b)) joinLeft ys on (_._1.b === _.b)).to[Set]
r5 <- q5.result
r5t: Set[(((Int, String), Option[(Int, String)]), Option[(Int, String)])] = r5
_ = r5 shouldBe Set(
(((1,"a"),Some((1,"a"))),Some((1,"a"))),
(((2,"b"),Some((2,"b"))),Some((2,"b"))),
(((2,"b"),Some((2,"b"))),Some((3,"b"))),
(((2,"b"),Some((3,"b"))),Some((2,"b"))),
(((2,"b"),Some((3,"b"))),Some((3,"b"))),
(((3,"b"),Some((2,"b"))),Some((2,"b"))),
(((3,"b"),Some((2,"b"))),Some((3,"b"))),
(((3,"b"),Some((3,"b"))),Some((2,"b"))),
(((3,"b"),Some((3,"b"))),Some((3,"b"))),
(((4,"c"),None),None),
(((5,"c"),None),None)
)
// Right outer, lift primitive value
q6 = (ys.map(_.b) joinRight xs.map(_.b) on (_ === _)).to[Set]
r6 <- q6.result
r6t: Set[(Option[String], String)] = r6
_ = r6 shouldBe Set((Some("a"),"a"), (Some("b"),"b"), (None,"c"))
// Nested right outer, lift primitive value
// (left-associative; not symmetrical to the nested left outer case)
q7 = ((ys.map(_.b) joinRight xs.map(_.b) on (_ === _)) joinRight xs.map(_.b) on (_._2 === _)).to[Set]
r7 <- q7.result
rt: Set[(Option[(Option[String], String)], String)] = r7
_ = r7 shouldBe Set((Some((Some("a"),"a")),"a"), (Some((Some("b"),"b")),"b"), (Some((None,"c")),"c"))
// Right outer, lift non-primitive value
q8 = (ys joinRight xs on (_.b === _.b)).to[Set]
r8 <- q8.result
r8t: Set[(Option[(Int, String)], (Int, String))] = r8
_ = r8 shouldBe Set(
(Some((1,"a")), (1,"a")),
(Some((2,"b")), (2,"b")),
(Some((3,"b")), (2,"b")),
(Some((2,"b")), (3,"b")),
(Some((3,"b")), (3,"b")),
(None, (4,"c")),
(None, (5,"c"))
)
// Right outer, lift non-primitive value, then map to primitive
q9 = (ys joinRight xs on (_.b === _.b)).map { case (yo, x) => (yo.map(_.a), x.a) }.to[Set]
r9 <- q9.result
r9t: Set[(Option[Int], Int)] = r9
_ = r9 shouldBe Set((None,4), (Some(2),3), (Some(3),2), (Some(2),2), (Some(3),3), (Some(1),1), (None,5))
// Nested right outer, lift non-primitive value
// (left-associative; not symmetrical to the nested left outer case)
q10 = ((ys joinRight xs on (_.b === _.b)) joinRight xs on (_._1.map(_.b) === _.b)).to[Set]
r10 <- q10.result
r10t: Set[(Option[(Option[(Int, String)], (Int, String))], (Int, String))] = r10
_ = r10 shouldBe Set(
(Some((Some((1,"a")),(1,"a"))),(1,"a")),
(Some((Some((2,"b")),(2,"b"))),(2,"b")),
(Some((Some((2,"b")),(2,"b"))),(3,"b")),
(Some((Some((2,"b")),(3,"b"))),(2,"b")),
(Some((Some((2,"b")),(3,"b"))),(3,"b")),
(Some((Some((3,"b")),(2,"b"))),(2,"b")),
(Some((Some((3,"b")),(2,"b"))),(3,"b")),
(Some((Some((3,"b")),(3,"b"))),(2,"b")),
(Some((Some((3,"b")),(3,"b"))),(3,"b")),
(None,(4,"c")),
(None,(5,"c"))
)
// Full outer, lift primitive values
q11 = (xs.map(_.b) joinFull ys.map(_.b) on (_ === _)).to[Set]
r11 <- q11.result
r11t: Set[(Option[String], Option[String])] = r11
_ = r11 shouldBe Set((Some("a"),Some("a")), (Some("b"),Some("b")), (Some("c"),None), (None,Some("d")))
// Full outer, lift non-primitive values
q12 = (xs joinFull ys on (_.b === _.b)).to[Set]
r12 <- q12.result
r12t: Set[(Option[(Int, String)], Option[(Int, String)])] = r12
_ = r12 shouldBe Set(
(Some((1,"a")),Some((1,"a"))),
(Some((2,"b")),Some((2,"b"))),
(Some((2,"b")),Some((3,"b"))),
(Some((3,"b")),Some((2,"b"))),
(Some((3,"b")),Some((3,"b"))),
(Some((4,"c")),None),
(Some((5,"c")),None),
(None,Some((4,"d"))),
(None,Some((5,"d")))
)
} yield ()
}
def testComputedStarProjection = {
class X(tag: Tag) extends Table[(Int, Int)](tag, "x_star") {
def a = column[Int]("a")
def b = column[Int]("b", O.Default(2))
def * = (a, b * 10)
}
val xs = TableQuery[X]
for {
_ <- xs.schema.create
_ <- xs.map(_.a) ++= Seq(1)
q1 = xs joinLeft xs
_ <- q1.result.map(_ shouldBe Vector(((1, 20), Some((1, 20)))))
} yield ()
}
def testZip = ifCap(rcap.zip) {
class Categories(tag: Tag) extends Table[(Int, String)](tag, "cat_z") {
def id = column[Int]("id")
def name = column[String]("name")
def * = (id, name)
}
val categories = TableQuery[Categories]
class Posts(tag: Tag) extends Table[(Int, String, Int)](tag, "posts_z") {
def id = column[Int]("id", O.PrimaryKey, O.AutoInc)
def title = column[String]("title")
def category = column[Int]("category")
def * = (id, title, category)
}
val posts = TableQuery[Posts]
for {
_ <- (categories.schema ++ posts.schema).create
_ <- categories ++= Seq(
(1, "Scala"),
(3, "Windows"),
(2, "ScalaQuery"),
(4, "Software")
)
_ <- posts.map(p => (p.title, p.category)) ++= Seq(
("Test Post", -1),
("Formal Language Processing in Scala, Part 5", 1),
("Efficient Parameterized Queries in ScalaQuery", 2),
("Removing Libraries and HomeGroup icons from the Windows 7 desktop", 3),
("A ScalaQuery Update", 2)
)
q1 = for {
(c, i) <- categories.sortBy(_.id).zipWithIndex
} yield (c.id, i)
_ <- mark("q1", q1.result).map(_ shouldBe List((1,0), (2,1), (3,2), (4,3)))
q2 = for {
(c, p) <- categories.sortBy(_.id) zip posts.sortBy(_.category)
} yield (c.id, p.category)
_ <- mark("q2", q2.result).map(_ shouldBe List((1,-1), (2,1), (3,2), (4,2)))
q3 = for {
(c, p) <- categories zip posts
} yield (c.id, p.category)
_ <- mark("q3", q3.result).map(_ shouldBe List((1, -1), (3, 1), (2, 2), (4, 3)))
q4 = for {
res <- categories.zipWith(posts, (c: Categories, p: Posts) => (c.id, p.category))
} yield res
_ <- mark("q4", q4.result).map(_ shouldBe List((1, -1), (3, 1), (2, 2), (4, 3)))
q5 = for {
(c, i) <- categories.zipWithIndex
} yield (c.id, i)
_ <- mark("q5", q5.result).map(_ shouldBe List((1,0), (3,1), (2,2), (4,3)))
q6 = for {
((c, p), i) <- (categories zip posts).zipWithIndex
} yield (c.id, p.category, i)
_ <- mark("q6", q6.result).map(_ shouldBe List((1, -1, 0), (3, 1, 1), (2, 2, 2), (4, 3, 3)))
} yield ()
}
def testNoJoinCondition = {
class T(tag: Tag) extends Table[Int](tag, "t_nojoincondition") {
def id = column[Int]("id")
def * = id
}
lazy val ts = TableQuery[T]
for {
_ <- ts.schema.create
q1 = ts joinLeft ts
_ <- q1.result
q2 = ts joinRight ts
_ <- q2.result
q3 = ts join ts
_ <- q3.result
} yield ()
}
def testMixedJoin = {
class A(tag: Tag) extends Table[Int](tag, "a_mixedjoin") {
def id = column[Int]("id")
def * = id
}
lazy val as = TableQuery[A]
class B(tag: Tag) extends Table[Int](tag, "b_mixedjoin") {
def foreignId = column[Int]("foreignId")
def * = foreignId
}
lazy val bs = TableQuery[B]
class C(tag: Tag) extends Table[Int](tag, "c_mixedjoin") {
def foreignId = column[Int]("foreignId")
def * = foreignId
}
lazy val cs = TableQuery[C]
val q1 = for {
(a, b) <- as joinLeft bs on (_.id === _.foreignId)
} yield (a, b)
val q2 = for {
(a, b) <- q1
c <- cs if c.foreignId === a.id
} yield (a, c)
val q3 = for {
(a, b) <- as joinLeft bs on (_.id === _.foreignId)
c <- cs if c.foreignId === a.id
} yield (a, c)
DBIO.seq(
(as.schema ++ bs.schema ++ cs.schema).create,
as ++= Seq(1,2,3),
bs ++= Seq(1,2,4,5),
cs ++= Seq(1,2,4,6),
q1.result.map(_.toSet shouldBe Set((1, Some(1)), (2, Some(2)), (3, None))),
q2.result.map(_.toSet shouldBe Set((1,1), (2,2))),
q3.result.map(_.toSet shouldBe Set((1,1), (2,2)))
)
}
}
| lukasz-golebiewski/slick | slick-testkit/src/main/scala/com/typesafe/slick/testkit/tests/JoinTest.scala | Scala | bsd-2-clause | 13,505 |
/**
* Created by ahan on 10/05/2017.
*/
import scala.util.matching.Regex
object RegTest {
def main(args: Array[String]): Unit = {
var pattern = "Scala".r
var str = "Scala is Scalable and cool"
println(pattern findFirstIn str)
pattern = new Regex("(S|s)cala") // 首字母可以是大写 S 或小写 s
str = "Scala is scalable and cool"
println((pattern findAllIn str).mkString(",")) // 使用逗号 , 连接返回结果
pattern = "(S|s)cala".r;
str = "Scala is scalable and cool"
println(pattern replaceFirstIn(str, "Java"));
pattern = new Regex("abl[ae]\\d+")
str = "ablaw is able1 and cool"
println((pattern findAllIn str).mkString(","))
}
}
| AmeryHan/amery_base_java | scala/src/main/scala/advanced/RegTest.scala | Scala | apache-2.0 | 716 |
package com.buysomegames.repository
import java.util.concurrent.TimeUnit
import com.buysomegames.kernel.MongoConnectionModule
import com.buysomegames.model.Game
import com.buysomegames.test.FreshDatabase
import com.twitter.inject.app.TestInjector
import com.twitter.inject.{Injector, IntegrationTest}
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}
class GameRepositoryTest extends IntegrationTest with FreshDatabase {
override protected def injector: Injector = TestInjector(modules = Seq(MongoConnectionModule))
"GameRepository.findAllGames" should {
"return all available games" in {
val gamesFuture: Future[Iterable[Game]] = injector.instance[GameRepository].findAllGames
val games: Iterable[Game] = Await.result(gamesFuture, Duration(10, TimeUnit.SECONDS))
games should have size 2
games.map(_.name) should contain only ("Uncharted: Drake’s Fortune", "Gravity Rush")
}
}
}
| kaliy/buysomegames | src/test/scala/com/buysomegames/repository/GameRepositoryTest.scala | Scala | agpl-3.0 | 957 |
/**
* Copyright 2015, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.deeplang.doperations.spark.wrappers.estimators
import io.deepsense.commons.utils.Version
import io.deepsense.deeplang.DOperation.Id
import io.deepsense.deeplang.documentation.SparkOperationDocumentation
import io.deepsense.deeplang.doperables.spark.wrappers.estimators.ALS
import io.deepsense.deeplang.doperations.EstimatorAsFactory
class CreateALS extends EstimatorAsFactory[ALS]
with SparkOperationDocumentation {
override val id: Id = "5a9e4883-b653-418e-bc51-a42fde476a63"
override val name: String = "ALS"
override val description: String = "Creates an ALS recommendation model"
override protected[this] val docsGuideLocation =
Some("mllib-collaborative-filtering.html#collaborative-filtering")
override val since: Version = Version(1, 0, 0)
}
| deepsense-io/seahorse-workflow-executor | deeplang/src/main/scala/io/deepsense/deeplang/doperations/spark/wrappers/estimators/CreateALS.scala | Scala | apache-2.0 | 1,391 |
package com.github.mdr.graphospasm.core.graph.mutable
import com.github.mdr.graphospasm.core.graph._
import com.google.common.collect.{ ListMultimap, ArrayListMultimap }
import scala.collection.JavaConversions._
object MutableGraphImpl {
def copy(graph: Graph): MutableGraph = {
val clone = new MutableGraphImpl
var oldToNewVertexMap: Map[Vertex, MutableVertex] = Map()
for (oldVertex ← graph.vertices) {
val newVertex = clone.addVertex(oldVertex.name)
for ((key, value) ← oldVertex.attributes)
newVertex.setAttribute(key, value)
oldToNewVertexMap = oldToNewVertexMap + (oldVertex -> newVertex)
}
for (oldEdge ← graph.edges)
clone.addEdge(oldToNewVertexMap(oldEdge.source), oldToNewVertexMap(oldEdge.target), oldEdge.nameOpt)
clone
}
def emptyGraph: MutableGraph = new MutableGraphImpl
}
class MutableGraphImpl extends MutableGraph {
private var vertices_ : List[MutableVertexImpl] = Nil
private var edges_ : List[MutableEdge] = Nil
private val incomingEdgeMap: ListMultimap[MutableVertex, MutableEdgeImpl] = ArrayListMultimap.create()
private val outgoingEdgeMap: ListMultimap[MutableVertex, MutableEdgeImpl] = ArrayListMultimap.create()
private def checkInvariants() {
require(edges flatMap (_.vertices) forall vertices_.contains)
require(incomingEdgeMap.keySet forall vertices_.contains)
require(incomingEdgeMap.values.toSet == edges_.toSet)
require(outgoingEdgeMap.keySet forall vertices_.contains)
require(outgoingEdgeMap.values.toSet == edges_.toSet)
require(edges_ forall { edge ⇒ incomingEdgeMap.get(edge.target).contains(edge) && outgoingEdgeMap.get(edge.source).contains(edge) })
}
def vertices: List[MutableVertex] = vertices_
def edges: List[MutableEdge] = edges_
def incomingEdges(v: Vertex): List[MutableEdge] = v match {
case mv: MutableVertexImpl ⇒ incomingEdgeMap.get(mv).toSeq.toList
case _ ⇒ Nil
}
def outgoingEdges(v: Vertex): List[MutableEdge] = v match {
case mv: MutableVertexImpl ⇒ outgoingEdgeMap.get(mv).toSeq.toList
case _ ⇒ Nil
}
def copy = MutableGraphImpl.copy(this)
def addVertex(name: Name): MutableVertex = {
val v = new MutableVertexImpl(name)
vertices_ = vertices_ :+ v
checkInvariants()
v
}
def removeVertex(v: MutableVertex) {
vertices_ = vertices_ filterNot (_ == v)
edges_ = edges_ filterNot (_ isIncidentTo v)
incomingEdgeMap.removeAll(v)
outgoingEdgeMap.removeAll(v)
checkInvariants()
}
def addEdge(source: MutableVertex, target: MutableVertex, nameOpt: Option[Name] = None): MutableEdge = {
val edge = new MutableEdgeImpl(source, target, nameOpt)
edges_ = edges_ :+ edge
incomingEdgeMap.put(target, edge)
outgoingEdgeMap.put(source, edge)
checkInvariants()
edge
}
def removeEdge(edge: MutableEdge) {
edges_ = edges_ filterNot (_ == edge)
outgoingEdgeMap.get(edge.source).remove(edge)
incomingEdgeMap.get(edge.target).remove(edge)
checkInvariants()
}
private class MutableVertexImpl(initialName: Name) extends MutableVertex {
private var name_ : Name = initialName
def name = name_
def setName(name: Name) { name_ = name }
private var attributes_ : Map[Name, AnyRef] = Map()
def attributes = attributes_
def setAttribute(name: Name, value: AnyRef) {
attributes_ += (name -> value)
}
def removeAttribute(name: Name) {
attributes_ = attributes_ - name
}
override def toString = "[" + name + ": " + attributes + "]"
}
private class MutableEdgeImpl(initialSource: MutableVertex, initialTarget: MutableVertex, initialName: Option[Name] = None) extends MutableEdge {
private var nameOpt_ : Option[Name] = initialName
def nameOpt = nameOpt_
def setName(name: Name) { nameOpt_ = Some(name) }
private var source_ : MutableVertex = initialSource
def source = source_
private var target_ : MutableVertex = initialTarget
def target = target_
override def toString = "e(" + nameOpt + ", " + source + " -> " + target + ")"
}
override def toString = {
var sb = new StringBuilder
sb.append("MutableGraphImpl(\\n")
for (vertex ← vertices_)
sb.append(" " + vertex + "\\n")
for (edge ← edges)
sb.append(" " + edge + "\\n")
sb.append(")")
sb.toString
}
}
| mdr/graphospasm | com.github.mdr.graphospasm.core/src/main/scala/com/github/mdr/graphospasm/core/graph/mutable/MutableGraphImpl.scala | Scala | mit | 4,431 |
package com.nrinaudo.fetch
import org.scalatest.{Matchers, FunSpec}
import org.scalatest.prop.GeneratorDrivenPropertyChecks
import org.scalacheck.{Arbitrary, Gen}
object StatusSpec {
def success = for(status <- Gen.choose(200, 299)) yield Status(status)
def redirection = for(status <- Gen.choose(300, 399)) yield Status(status)
def clientError = for(status <- Gen.choose(400, 499)) yield Status(status)
def serverError = for(status <- Gen.choose(500, 599)) yield Status(status)
def status = Gen.oneOf(success, redirection, clientError, serverError)
def invalidStatus: Gen[Int] = Arbitrary.arbitrary[Int].suchThat(i => i < 0 || i > 600)
private def response(status: Status) = new Response(status, new Headers(), status.code)
}
class StatusSpec extends FunSpec with Matchers with GeneratorDrivenPropertyChecks {
import StatusSpec._
describe("The Status companion object") {
it("should apply on legal statuses") {
forAll(status) { status => Status(status.code) should be(status) }
}
it("should fail to apply on illegal statuses") {
forAll(invalidStatus) { status => intercept[IllegalArgumentException](Status(status)); () }
}
def expected[T](t: T, pass: Boolean) =
if(pass) Some(t)
else None
def validate(status: Status, extractor: Status.Extractor, pass: Boolean) = {
extractor.unapply(status) should be(expected(status, pass))
extractor.unapply(response(status)) should be(expected(status, pass))
}
it("should extract success statuses") {
forAll(success) { status =>
validate(status, Status.Success, true)
validate(status, Status.Redirection, false)
validate(status, Status.ClientError, false)
validate(status, Status.ServerError, false)
validate(status, Status.Error, false)
}
}
it("should extract redirection statuses") {
forAll(redirection) { status =>
validate(status, Status.Success, false)
validate(status, Status.Redirection, true)
validate(status, Status.ClientError, false)
validate(status, Status.ServerError, false)
validate(status, Status.Error, false)
}
}
it("should extract client error statuses") {
forAll(clientError) { status =>
validate(status, Status.Success, false)
validate(status, Status.Redirection, false)
validate(status, Status.ClientError, true)
validate(status, Status.ServerError, false)
validate(status, Status.Error, true)
}
}
it("should extract server error statuses") {
forAll(serverError) { status =>
validate(status, Status.Success, false)
validate(status, Status.Redirection, false)
validate(status, Status.ClientError, false)
validate(status, Status.ServerError, true)
validate(status, Status.Error, true)
}
}
}
describe("An instance of Status") {
it("should detect success statuses correctly") {
forAll(success) { status =>
status.isSuccess should be(true)
status.isRedirection should be(false)
status.isClientError should be(false)
status.isServerError should be(false)
}
}
it("should detect redirection statuses correctly") {
forAll(redirection) { status =>
status.isSuccess should be(false)
status.isRedirection should be(true)
status.isClientError should be(false)
status.isServerError should be(false)
}
}
it("should detect client errors correctly") {
forAll(clientError) { status =>
status.isSuccess should be(false)
status.isRedirection should be(false)
status.isClientError should be(true)
status.isServerError should be(false)
}
}
it("should detect server errors correctly") {
forAll(serverError) { status =>
status.isSuccess should be(false)
status.isRedirection should be(false)
status.isClientError should be(false)
status.isServerError should be(true)
}
}
it("should unapply responses with the same status") {
forAll(status) { status => status.unapply(response(status)) should be (Some(status)) }
}
def diffStatuses = for {
s1 <- status
s2 <- status if s1 != s2
} yield (s1, s2)
it("should not unapply responses with a different status") {
forAll(diffStatuses) { case (s1, s2) =>
s1.unapply(response(s2)) should be(None)
}
}
it("should serialize to itself") {
forAll(status) { status =>
Status(status.toString.toInt) should be(status)
}
}
}
}
| nrinaudo/fetch | core/src/test/scala/com/nrinaudo/fetch/StatusSpec.scala | Scala | mit | 4,646 |
package scorex.consensus.qora
import com.google.common.primitives.{Bytes, Longs}
import play.api.libs.json.{JsObject, Json}
import scorex.block.BlockField
import scorex.crypto.Base58
case class QoraConsensusBlockField(override val value: QoraLikeConsensusBlockData)
extends BlockField[QoraLikeConsensusBlockData] {
override val name: String = "qora-consensus"
override def bytes: Array[Byte] =
Bytes.ensureCapacity(Longs.toByteArray(value.generatingBalance), 8, 0) ++
value.generatorSignature
override def json: JsObject = Json.obj(name -> Json.obj (
"base-target" -> value.generatingBalance,
"generation-signature" -> Base58.encode(value.generatorSignature)
))
}
| beni55/Scorex-Lagonaki | scorex-consensus/src/main/scala/scorex/consensus/qora/QoraConsensusBlockField.scala | Scala | cc0-1.0 | 699 |
package org.deepdive.extraction.datastore
import org.deepdive.datastore.DataStoreUtils
import play.api.libs.json._
object ExtractionDataStore {
type JsonExtractionDataStore = ExtractionDataStore[_ <: JsValue]
}
/* Stores extraction results and queries the database for extracted data */
trait ExtractionDataStoreComponent {
def dataStore : ExtractionDataStore[_ <: JsValue]
}
/* Stores extraction results and queries the database for extracted data */
trait ExtractionDataStore[A <: JsValue] {
/* Initialize the data store. Must be called before anything else */
def init() : Unit
/*
* Returns the result of the query as a stream of untyped Maps.
* How the query string is interpreted depends on the implementing data store.
* For example, Postgres interprets the query as a SQL statement
*/
def queryAsMap[B](query: String, batchSize: Option[Int] = None)
(block: Iterator[Map[String, Any]] => B) : B
/* Returns the result of the query as a stream of JSON objects */
def queryAsJson[B](query: String, batchSize: Option[Int] = None)(block: Iterator[A] => B) : B
/* Updates the query */
def queryUpdate(query: String)
/**
* Writes a list of tuples back to the datastore.
* IMPORTANT: This method must assign a globally unique variable id to each record
*/
def addBatch(result: Iterator[JsObject], outputRelation: String) : Unit
} | gaapt/deepdive | src/main/scala/org/deepdive/extraction/datastore/ExtractionDataStore.scala | Scala | apache-2.0 | 1,394 |
/*
* Copyright © 2015-2019 the contributors (see Contributors.md).
*
* This file is part of Knora.
*
* Knora is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Knora is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public
* License along with Knora. If not, see <http://www.gnu.org/licenses/>.
*/
package org.knora.webapi.routing.admin
import java.net.URLEncoder
import akka.actor.ActorSystem
import akka.http.scaladsl.client.RequestBuilding.Get
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.{PathMatcher, Route}
import akka.stream.ActorMaterializer
import io.swagger.annotations.Api
import javax.ws.rs.Path
import org.knora.webapi.messages.admin.responder.permissionsmessages.{AdministrativePermissionForProjectGroupGetRequestADM, PermissionType}
import org.knora.webapi.routing.{Authenticator, KnoraRoute, KnoraRouteData, RouteUtilADM}
import org.knora.webapi.util.IriConversions._
import org.knora.webapi.util.clientapi.EndpointFunctionDSL._
import org.knora.webapi.util.clientapi._
import org.knora.webapi.{OntologyConstants, SharedTestDataADM}
import scala.concurrent.{ExecutionContext, Future}
object PermissionsRouteADM {
val PermissionsBasePath = PathMatcher("admin" / "permissions")
val PermissionsBasePathString: String = "/admin/permissions"
}
@Api(value = "permissions", produces = "application/json")
@Path("/admin/permissions")
class PermissionsRouteADM(routeData: KnoraRouteData) extends KnoraRoute(routeData) with Authenticator with ClientEndpoint {
import PermissionsRouteADM._
/**
* The name of this [[ClientEndpoint]].
*/
override val name: String = "PermissionsEndpoint"
/**
* The directory name to be used for this endpoint's code.
*/
override val directoryName: String = "permissions"
/**
* The URL path of of this [[ClientEndpoint]].
*/
override val urlPath: String = "/permissions"
/**
* A description of this [[ClientEndpoint]].
*/
override val description: String = "An endpoint for working with Knora permissions."
// Classes used in client function definitions.
private val AdministrativePermissionResponse = classRef(OntologyConstants.KnoraAdminV2.AdministrativePermissionResponse.toSmartIri)
private val projectIri: String = URLEncoder.encode(SharedTestDataADM.imagesProject.id, "utf-8")
private val groupIri: String = URLEncoder.encode(OntologyConstants.KnoraAdmin.ProjectMember, "utf-8")
/**
* Returns the route.
*/
override def knoraApiPath: Route = getAdministrativePermission
private def getAdministrativePermission: Route = path(PermissionsBasePath / Segment / Segment) { (projectIri, groupIri) =>
get {
requestContext =>
val params = requestContext.request.uri.query().toMap
val permissionType = params.getOrElse("permissionType", PermissionType.AP)
val requestMessage = for {
requestingUser <- getUserADM(requestContext)
} yield permissionType match {
case _ => AdministrativePermissionForProjectGroupGetRequestADM(projectIri, groupIri, requestingUser)
}
RouteUtilADM.runJsonRoute(
requestMessage,
requestContext,
settings,
responderManager,
log
)
}
}
private val getAdministrativePermissionFunction: ClientFunction =
"getAdministrativePermission" description "Gets the administrative permission for a project and group." params(
"projectIri" description "The project IRI." paramType UriDatatype,
"groupIri" description "The group IRI." paramType UriDatatype,
) doThis {
httpGet(
path = arg("projectIri") / arg("groupIri")
)
} returns AdministrativePermissionResponse
/*
// Commented out because the 'projectType' parameter is ignored.
private val getAdministrativePermissionByTypeFunction: ClientFunction =
"getAdministrativePermissionByType" description "Gets the administrative permission for a project and group, specifying a permission type." params(
"projectIri" description "The project IRI." paramType UriDatatype,
"groupIri" description "The group IRI." paramType UriDatatype,
"permissionType" description "The permission type." paramType StringDatatype
) doThis {
httpGet(
path = arg("projectIri") / arg("groupIri"),
params = Seq("permissionType" -> arg("permissionType"))
)
} returns AdministrativePermissionResponse
*/
private def getAdministrativePermissionTestResponse: Future[SourceCodeFileContent] = {
for {
responseStr <- doTestDataRequest(Get(s"$baseApiUrl$PermissionsBasePathString/$projectIri/$groupIri"))
} yield SourceCodeFileContent(
filePath = SourceCodeFilePath.makeJsonPath("get-administrative-permission-response"),
text = responseStr
)
}
/**
* The functions defined by this [[ClientEndpoint]].
*/
override val functions: Seq[ClientFunction] = Seq(
getAdministrativePermissionFunction
)
/**
* Returns test data for this endpoint.
*
* @return a set of test data files to be used for testing this endpoint.
*/
override def getTestData(implicit executionContext: ExecutionContext, actorSystem: ActorSystem, materializer: ActorMaterializer): Future[Set[SourceCodeFileContent]] = {
Future.sequence {
Set(
getAdministrativePermissionTestResponse
)
}
}
}
| musicEnfanthen/Knora | webapi/src/main/scala/org/knora/webapi/routing/admin/PermissionsRouteADM.scala | Scala | agpl-3.0 | 6,245 |
/**
* Copyright (c) 2016 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.trustedanalytics.sparktk.graph.internal.ops
import org.trustedanalytics.sparktk.frame.Frame
import org.apache.spark.sql.functions._
import org.graphframes.GraphFrame
import org.graphframes.GraphFrame.ID
import org.apache.spark.sql.DataFrame
import org.graphframes.lib.AggregateMessages
import org.apache.spark.sql.Column
import org.apache.spark.sql.functions.lit
import org.trustedanalytics.sparktk.graph.internal.{ GraphState, GraphSummarization, BaseGraph }
trait LabelPropagationSummarization extends BaseGraph {
/**
*
* Label propagation attempts to determine communities based off of neighbor associations. A community
* is a label that any vertex can have assigned to it, vertices are assumed to be more likely members
* of communities they are near.
*
* This algorithm can fail to converge, oscillate or return the trivial solution (all members of one
* community).
*
* @param maxIterations the number of iterations to run label propagation for
* @return dataFrame with the vertices associated with their respective communities
*/
def labelPropagation(maxIterations: Int): Frame = {
execute[Frame](LabelPropagation(maxIterations))
}
}
case class LabelPropagation(maxIterations: Int) extends GraphSummarization[Frame] {
require(maxIterations > 0, "maxIterations must be a positive value")
val index = "label"
val indexNew = "labelNew"
// This is the column name spark gives to count
val countName = "count"
val maxName = "max"
val maxValue = "max.MSG"
val messageName = "MSG"
override def work(state: GraphState): Frame = {
var graph = GraphFrame(state.graphFrame.vertices.withColumn(index, monotonicallyIncreasingId()), state.graphFrame.edges)
for (i <- 1 to maxIterations) {
val updatedComponent =
GraphHelpers.agg(graph, Some(AggregateMessages.dst(index)), Some(AggregateMessages.src(index)))
.groupBy(ID, messageName)
.count()
.groupBy(ID)
.agg(max(struct(col(countName),
col(messageName))).alias(maxName))
.select(col(ID), col(maxValue))
.withColumnRenamed(messageName, indexNew)
val joinedComponent = updatedComponent
.join(graph.vertices, graph.vertices(ID) === updatedComponent(ID))
.drop(graph.vertices(ID))
val newVertices = joinedComponent
.drop(index)
.withColumnRenamed(indexNew, index)
val unCachedVertices = AggregateMessages.getCachedDataFrame(newVertices)
graph = GraphFrame(unCachedVertices, state.graphFrame.edges)
}
new Frame(graph.vertices)
}
}
| trustedanalytics/spark-tk | sparktk-core/src/main/scala/org/trustedanalytics/sparktk/graph/internal/ops/LabelPropagation.scala | Scala | apache-2.0 | 3,317 |
package com.eevolution.context.dictionary.domain.api.service
import com.eevolution.context.dictionary._
import com.eevolution.context.dictionary.domain.model.SequenceNo
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: eduardo.moreno@e-evolution.com, http://www.e-evolution.com , http://github.com/e-Evolution
* Created by eduardo.moreno@e-evolution.com , www.e-evolution.com
*/
/**
* Sequence No Service
*/
trait SequenceNoService extends api.Service[SequenceNo, Int] {
//Definition
} | adempiere/ADReactiveSystem | dictionary-api/src/main/scala/com/eevolution/context/dictionary/domain/api/service/SequenceNoService.scala | Scala | gpl-3.0 | 1,200 |
package db.generated
import anorm._
import anorm.JodaParameterMetaData._
import io.flow.common.v0.models.UserReference
import io.flow.postgresql.{OrderBy, Query}
import io.flow.postgresql.play.db.DbHelpers
import io.flow.util.IdGenerator
import java.sql.Connection
import javax.inject.{Inject, Singleton}
import org.joda.time.DateTime
import play.api.db.Database
import play.api.libs.json.{JsObject, JsValue, Json}
case class BuildLastStates(
id: String,
buildId: String,
timestamp: DateTime,
versions: Seq[JsObject]
) {
lazy val form: BuildLastStatesForm = BuildLastStatesForm(
buildId = buildId,
timestamp = timestamp,
versions = versions
)
}
case class BuildLastStatesForm(
buildId: String,
timestamp: DateTime,
versions: Seq[JsValue]
) {
assert(
versions.forall(_.isInstanceOf[JsObject]),
s"Field[versions] must contain JsObjects and not a ${versions.filterNot(_.isInstanceOf[JsObject]).map(_.getClass.getName).distinct}"
)
}
@Singleton
class BuildLastStatesDao @Inject() (
db: Database
) {
private[this] val idGenerator = IdGenerator("bls")
def randomId(): String = idGenerator.randomId()
private[this] val dbHelpers = DbHelpers(db, "build_last_states")
private[this] val BaseQuery = Query("""
| select build_last_states.id,
| build_last_states.build_id,
| build_last_states.timestamp,
| build_last_states.versions::text as versions_text,
| build_last_states.created_at,
| build_last_states.updated_at,
| build_last_states.updated_by_user_id,
| build_last_states.hash_code
| from build_last_states
""".stripMargin)
private[this] val UpsertQuery = Query("""
| insert into build_last_states
| (id, build_id, timestamp, versions, updated_by_user_id, hash_code)
| values
| ({id}, {build_id}, {timestamp}::timestamptz, {versions}::json, {updated_by_user_id}, {hash_code}::bigint)
| on conflict (build_id)
| do update
| set timestamp = {timestamp}::timestamptz,
| versions = {versions}::json,
| updated_by_user_id = {updated_by_user_id},
| hash_code = {hash_code}::bigint
| where build_last_states.hash_code != {hash_code}::bigint
| returning id
""".stripMargin)
private[this] val UpdateQuery = Query("""
| update build_last_states
| set build_id = {build_id},
| timestamp = {timestamp}::timestamptz,
| versions = {versions}::json,
| updated_by_user_id = {updated_by_user_id},
| hash_code = {hash_code}::bigint
| where id = {id}
| and build_last_states.hash_code != {hash_code}::bigint
""".stripMargin)
private[this] def bindQuery(query: Query, form: BuildLastStatesForm): Query = {
query.
bind("build_id", form.buildId).
bind("timestamp", form.timestamp).
bind("versions", Json.toJson(form.versions)).
bind("hash_code", form.hashCode())
}
private[this] def toNamedParameter(updatedBy: UserReference, form: BuildLastStatesForm): Seq[NamedParameter] = {
Seq(
Symbol("id") ->randomId(),
Symbol("build_id") ->form.buildId,
Symbol("timestamp") ->form.timestamp,
Symbol("versions") ->Json.toJson(form.versions).toString,
Symbol("updated_by_user_id") ->updatedBy.id,
Symbol("hash_code") ->form.hashCode()
)
}
def upsertIfChangedByBuildId(updatedBy: UserReference, form: BuildLastStatesForm): Unit = {
if (!findByBuildId(form.buildId).map(_.form).contains(form)) {
upsertByBuildId(updatedBy, form)
}
}
def upsertByBuildId(updatedBy: UserReference, form: BuildLastStatesForm): Unit = {
db.withConnection { implicit c =>
upsertByBuildId(c, updatedBy, form)
}
}
def upsertByBuildId(implicit c: Connection, updatedBy: UserReference, form: BuildLastStatesForm): Unit = {
bindQuery(UpsertQuery, form).
bind("id", randomId()).
bind("updated_by_user_id", updatedBy.id).
anormSql.execute()
}
def upsertBatchByBuildId(updatedBy: UserReference, forms: Seq[BuildLastStatesForm]): Unit = {
db.withConnection { implicit c =>
upsertBatchByBuildId(c, updatedBy, forms)
}
}
def upsertBatchByBuildId(implicit c: Connection, updatedBy: UserReference, forms: Seq[BuildLastStatesForm]): Unit = {
if (forms.nonEmpty) {
val params = forms.map(toNamedParameter(updatedBy, _))
BatchSql(UpsertQuery.sql(), params.head, params.tail: _*).execute()
}
}
def updateIfChangedById(updatedBy: UserReference, id: String, form: BuildLastStatesForm): Unit = {
if (!findById(id).map(_.form).contains(form)) {
updateById(updatedBy, id, form)
}
}
def updateById(updatedBy: UserReference, id: String, form: BuildLastStatesForm): Unit = {
db.withConnection { implicit c =>
updateById(c, updatedBy, id, form)
}
}
def updateById(implicit c: Connection, updatedBy: UserReference, id: String, form: BuildLastStatesForm): Unit = {
bindQuery(UpdateQuery, form).
bind("id", id).
bind("updated_by_user_id", updatedBy.id).
anormSql.execute()
}
def update(updatedBy: UserReference, existing: BuildLastStates, form: BuildLastStatesForm): Unit = {
db.withConnection { implicit c =>
update(c, updatedBy, existing, form)
}
}
def update(implicit c: Connection, updatedBy: UserReference, existing: BuildLastStates, form: BuildLastStatesForm): Unit = {
updateById(c, updatedBy, existing.id, form)
}
def delete(deletedBy: UserReference, buildLastStates: BuildLastStates): Unit = {
dbHelpers.delete(deletedBy, buildLastStates.id)
}
def deleteById(deletedBy: UserReference, id: String): Unit = {
db.withConnection { implicit c =>
deleteById(c, deletedBy, id)
}
}
def deleteById(c: java.sql.Connection, deletedBy: UserReference, id: String): Unit = {
dbHelpers.delete(c, deletedBy, id)
}
def deleteByBuildId(deletedBy: UserReference, buildId: String): Unit = {
findByBuildId(buildId).foreach { r =>
delete(deletedBy, r)
}
}
def findById(id: String): Option[BuildLastStates] = {
db.withConnection { implicit c =>
findByIdWithConnection(c, id)
}
}
def findByIdWithConnection(c: java.sql.Connection, id: String): Option[BuildLastStates] = {
findAllWithConnection(c, ids = Some(Seq(id)), limit = Some(1)).headOption
}
def findByBuildId(buildId: String): Option[BuildLastStates] = {
db.withConnection { implicit c =>
findByBuildIdWithConnection(c, buildId)
}
}
def findByBuildIdWithConnection(c: java.sql.Connection, buildId: String): Option[BuildLastStates] = {
findAllWithConnection(c, buildId = Some(buildId), limit = Some(1)).headOption
}
def findAll(
ids: Option[Seq[String]] = None,
buildId: Option[String] = None,
limit: Option[Long],
offset: Long = 0,
orderBy: OrderBy = OrderBy("build_last_states.id")
) (
implicit customQueryModifier: Query => Query = { q => q }
): Seq[BuildLastStates] = {
db.withConnection { implicit c =>
findAllWithConnection(
c,
ids = ids,
buildId = buildId,
limit = limit,
offset = offset,
orderBy = orderBy
)(customQueryModifier)
}
}
def findAllWithConnection(
c: java.sql.Connection,
ids: Option[Seq[String]] = None,
buildId: Option[String] = None,
limit: Option[Long],
offset: Long = 0,
orderBy: OrderBy = OrderBy("build_last_states.id")
) (
implicit customQueryModifier: Query => Query = { q => q }
): Seq[BuildLastStates] = {
customQueryModifier(BaseQuery).
optionalIn("build_last_states.id", ids).
equals("build_last_states.build_id", buildId).
optionalLimit(limit).
offset(offset).
orderBy(orderBy.sql).
as(BuildLastStatesDao.parser.*)(c)
}
}
object BuildLastStatesDao {
val parser: RowParser[BuildLastStates] = {
SqlParser.str("id") ~
SqlParser.str("build_id") ~
SqlParser.get[DateTime]("timestamp") ~
SqlParser.str("versions_text") map {
case id ~ buildId ~ timestamp ~ versions => BuildLastStates(
id = id,
buildId = buildId,
timestamp = timestamp,
versions = Json.parse(versions).as[Seq[JsObject]]
)
}
}
} | flowcommerce/delta | api/app/db/generated/GeneratorDeltaBuildLastStatesDao.scala | Scala | mit | 8,338 |
/*
* Copyright (C) 2009-2018 Lightbend Inc. <https://www.lightbend.com>
*/
package play.utils
import java.sql._
import java.util.logging.Logger
class ProxyDriver(proxied: Driver) extends Driver {
def acceptsURL(url: String) = proxied.acceptsURL(url)
def connect(user: String, properties: java.util.Properties) = proxied.connect(user, properties)
def getMajorVersion() = proxied.getMajorVersion
def getMinorVersion() = proxied.getMinorVersion
def getPropertyInfo(user: String, properties: java.util.Properties) = proxied.getPropertyInfo(user, properties)
def jdbcCompliant() = proxied.jdbcCompliant
def getParentLogger(): Logger = null
}
| Shenker93/playframework | framework/src/play/src/main/scala/play/utils/ProxyDriver.scala | Scala | apache-2.0 | 658 |
/*
* Copyright 2015 Webtrends (http://www.webtrends.com)
*
* See the LICENCE.txt file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webtrends.service.sockorest
import akka.util.Timeout
import com.webtrends.harness.command.{Command, CommandBean, CommandException, CommandResponse}
import com.webtrends.harness.component.ComponentHelper
import com.webtrends.harness.component.socko.route.SockoGet
import com.webtrends.service.Person
import scala.concurrent.duration._
import scala.concurrent.{Future, Promise}
import scala.util.{Failure, Success}
class SReadJSON extends Command
with SockoGet
with ComponentHelper {
implicit val executionContext = context.dispatcher
implicit val timeout = Timeout(2 seconds)
override def path: String = "/person/json/$name"
/**
* Name of the command that will be used for the actor name
*
* @return
*/
override def commandName: String = SReadJSON.CommandName
/**
* The primary entry point for the command, the actor for this command
* will ignore all other messaging and only execute through this
*
* @return
*/
def execute[T](bean: Option[CommandBean]): Future[CommandResponse[T]] = {
val p = Promise[CommandResponse[T]]
bean match {
case Some(b) =>
getComponent("wookiee-cache-memcache") onComplete {
case Success(actor) =>
val personName = b("name").asInstanceOf[String]
Person(personName).readFromCache(actor) onComplete {
case Success(person) =>
person match {
case Some(per) => p success CommandResponse[T](Some(per.asInstanceOf[T]), "json")
case None => p failure CommandException("SReadJSON", s"Person not found with name [$personName]")
}
case Failure(f) => p failure f
}
case Failure(f) => p failure f
}
case None => p failure new CommandException("SReadJSON", "Cache not initialized")
}
p.future
}
}
object SReadJSON {
def CommandName = "SReadJSON"
} | mjwallin1/wookiee-spray | example-rest/src/main/scala/com/webtrends/service/sockorest/SReadJSON.scala | Scala | apache-2.0 | 2,669 |
package fabricator
import java.awt.font.{FontRenderContext, TextLayout}
import java.awt.image.BufferedImage
import java.awt.{Color, Font, Graphics2D, Rectangle}
import fabricator.entities.CsvFileBuilder
import fabricator.enums.{FileType, MimeType}
import scala.util.Random
object FileGenerator {
def apply(): FileGenerator = {
new FileGenerator( Alphanumeric(), new Random(),
Contact(), Words(), Calendar(), Finance(),
Internet(), Location(), Mobile(), UserAgent(), UtilityService())
}
def apply(locale: String): FileGenerator = {
new FileGenerator(Alphanumeric(), new Random(),
Contact(locale), Words(locale), Calendar(locale), Finance(locale),
Internet(locale), Location(locale), Mobile(), UserAgent(), UtilityService())
}
}
class FileGenerator(private val alpha: Alphanumeric,
private val random: Random,
private val contact: Contact,
private val words: Words,
private val calendar: Calendar,
private val finance: Finance,
private val internet: Internet,
private val location: Location,
private val mobile: Mobile,
private val userAgent: UserAgent,
private val utility: UtilityService) {
def image(width: Int, height: Int, path: String): Boolean = {
if (width > 2560 || height > 2560) throw new IllegalArgumentException("Image cannot be more then 2560x2560")
val label: String = "" + width + "x" + height
val font: Font = new Font("Arial", Font.PLAIN, 32)
val frc: FontRenderContext = new FontRenderContext(null, true, true)
val layout: TextLayout = new TextLayout(label, font, frc)
val rectangle: Rectangle = layout.getPixelBounds(null, 0, 0)
val bufferedImage: BufferedImage = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB)
val graphics2D: Graphics2D = bufferedImage.getGraphics.asInstanceOf[Graphics2D]
//filing background with black color
graphics2D.setColor(Color.black)
graphics2D.fillRect(0, 0, width, height)
//writing with white color width and height of the image
graphics2D.setColor(Color.white)
layout.draw(graphics2D, width / 2 - rectangle.getWidth.toInt / 2, height / 2)
//done with drawing
graphics2D.dispose()
// write image to a file
javax.imageio.ImageIO.write(bufferedImage, "png", new java.io.File(path))
}
def csvBuilder : CsvFileBuilder = new CsvFileBuilder(alpha, calendar, contact, finance, internet,
location, mobile, userAgent, words)
def fileName: String = fileName(FileType.getRandom)
def fileName(fileType: FileType): String = {
val fileExt = fileExtension(fileType)
val fileName = words.word
fileName + "." + fileExt
}
def fileExtension: String = fileExtension(FileType.getRandom)
def fileExtension(fileType: FileType): String = {
fileType match {
case FileType.AUDIO => utility.getValueFromArray("audio_file_extensions")
case FileType.IMAGE => utility.getValueFromArray("image_file_extensions")
case FileType.TEXT => utility.getValueFromArray("text_file_extensions")
case FileType.DOCUMENT => utility.getValueFromArray("document_file_extensions")
case FileType.VIDEO => utility.getValueFromArray("video_file_extensions")
}
}
def mime_type: String = {
mime_type(MimeType.getRandom)
}
def mime_type(mimeType: MimeType): String = {
mimeType match {
case MimeType.APPLICATION => utility.getValueFromArray("application_mime_types")
case MimeType.AUDIO => utility.getValueFromArray("audio_mime_types")
case MimeType.IMAGE => utility.getValueFromArray("image_mime_types")
case MimeType.MESSAGE => utility.getValueFromArray("message_mime_types")
case MimeType.MODEL => utility.getValueFromArray("model_mime_types")
case MimeType.MULTIPART => utility.getValueFromArray("multipart_mime_types")
case MimeType.TEXT => utility.getValueFromArray("text_mime_types")
case MimeType.VIDEO => utility.getValueFromArray("video_mime_types")
}
}
}
| azakordonets/fabricator | src/main/scala/fabricator/FileGenerator.scala | Scala | apache-2.0 | 4,200 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ml.dmlc.mxnet
import ml.dmlc.mxnet.Base._
import scala.collection.mutable.{ArrayBuffer, ListBuffer}
/**
* JNI functions
*/
private[mxnet] class LibInfo {
@native def nativeLibInit(): Int
@native def mxGetLastError(): String
// Operators
@native def mxListAllOpNames(names: ListBuffer[String]): Int
@native def nnGetOpHandle(opName: String, opHandle: RefLong): Int
// NDArray
@native def mxImperativeInvoke(creator: FunctionHandle,
inputs: Array[NDArrayHandle],
outputsGiven: Array[NDArrayHandle],
outputs: ArrayBuffer[NDArrayHandle],
numParams: Int,
paramKeys: Array[String],
paramVals: Array[String]): Int
@native def mxNDArrayFree(handle: NDArrayHandle): Int
@native def mxNDArrayCreateNone(out: NDArrayHandleRef): Int
@native def mxNDArrayCreateEx(shape: Array[Int],
ndim: Int,
devType: Int,
devId: Int,
delayAlloc: Int,
dtype: Int,
out: NDArrayHandleRef): Int
@native def mxNDArrayWaitAll(): Int
@native def mxNDArrayWaitToRead(handle: NDArrayHandle): Int
@native def mxListFunctions(functions: ListBuffer[FunctionHandle]): Int
@native def mxFuncDescribe(handle: FunctionHandle,
nUsedVars: MXUintRef,
nScalars: MXUintRef,
nMutateVars: MXUintRef,
typeMask: RefInt): Int
@native def mxFuncGetInfo(handle: FunctionHandle,
name: RefString,
desc: RefString,
numArgs: MXUintRef,
argNames: ListBuffer[String],
argTypes: ListBuffer[String],
argDescs: ListBuffer[String]): Int
@native def mxFuncInvoke(function: FunctionHandle,
useVars: Array[NDArrayHandle],
scalarArgs: Array[MXFloat],
mutateVars: Array[NDArrayHandle]): Int
@native def mxFuncInvokeEx(function: FunctionHandle,
useVars: Array[NDArrayHandle],
scalarArgs: Array[MXFloat],
mutateVars: Array[NDArrayHandle],
numParams: Int,
paramKeys: Array[Array[Byte]],
paramVals: Array[Array[Byte]]): Int
@native def mxNDArrayGetShape(handle: NDArrayHandle,
ndim: MXUintRef,
data: ArrayBuffer[Int]): Int
@native def mxNDArraySyncCopyToCPU(handle: NDArrayHandle,
data: Array[Byte],
size: Int): Int
@native def mxNDArraySlice(handle: NDArrayHandle,
start: MXUint,
end: MXUint,
sliceHandle: NDArrayHandleRef): Int
@native def mxNDArrayAt(handle: NDArrayHandle,
idx: MXUint,
out: NDArrayHandleRef): Int
@native def mxNDArrayReshape(handle: NDArrayHandle,
nDim: Int,
dims: Array[Int],
reshapeHandle: NDArrayHandleRef): Int
@native def mxNDArraySyncCopyFromCPU(handle: NDArrayHandle,
source: Array[MXFloat],
size: Int): Int
@native def mxNDArrayLoad(fname: String,
outSize: MXUintRef,
handles: ArrayBuffer[NDArrayHandle],
outNameSize: MXUintRef,
names: ArrayBuffer[String]): Int
@native def mxNDArraySave(fname: String,
handles: Array[NDArrayHandle],
keys: Array[String]): Int
@native def mxNDArrayGetContext(handle: NDArrayHandle, devTypeId: RefInt, devId: RefInt): Int
@native def mxNDArraySaveRawBytes(handle: NDArrayHandle, buf: ArrayBuffer[Byte]): Int
@native def mxNDArrayLoadFromRawBytes(bytes: Array[Byte], handle: NDArrayHandleRef): Int
@native def mxNDArrayGetDType(handle: NDArrayHandle, dtype: RefInt): Int
// KVStore Server
@native def mxInitPSEnv(keys: Array[String], values: Array[String]): Int
@native def mxKVStoreRunServer(handle: KVStoreHandle, controller: KVServerControllerCallback): Int
@native def mxKVStoreGetNumDeadNode(handle: KVStoreHandle, nodeId: Int, number: RefInt): Int
// KVStore
@native def mxKVStoreCreate(name: String, handle: KVStoreHandleRef): Int
@native def mxKVStoreInit(handle: KVStoreHandle,
len: MXUint,
keys: Array[Int],
values: Array[NDArrayHandle]): Int
@native def mxKVStoreInitEx(handle: KVStoreHandle,
len: MXUint,
keys: Array[String],
values: Array[NDArrayHandle]): Int
@native def mxKVStorePush(handle: KVStoreHandle,
len: MXUint,
keys: Array[Int],
values: Array[NDArrayHandle],
priority: Int): Int
@native def mxKVStorePushEx(handle: KVStoreHandle,
len: MXUint,
keys: Array[String],
values: Array[NDArrayHandle],
priority: Int): Int
@native def mxKVStorePull(handle: KVStoreHandle,
len: MXUint,
keys: Array[Int],
outs: Array[NDArrayHandle],
priority: Int): Int
@native def mxKVStorePullEx(handle: KVStoreHandle,
len: MXUint,
keys: Array[String],
outs: Array[NDArrayHandle],
priority: Int): Int
@native def mxKVStoreSetUpdater(handle: KVStoreHandle, updaterFunc: MXKVStoreUpdater): Int
@native def mxKVStoreIsWorkerNode(isWorker: RefInt): Int
@native def mxKVStoreGetType(handle: KVStoreHandle, kvType: RefString): Int
@native def mxKVStoreSendCommmandToServers(handle: KVStoreHandle,
head: Int, body: String): Int
@native def mxKVStoreBarrier(handle: KVStoreHandle): Int
@native def mxKVStoreGetGroupSize(handle: KVStoreHandle, size: RefInt): Int
@native def mxKVStoreGetRank(handle: KVStoreHandle, size: RefInt): Int
@native def mxKVStoreSetBarrierBeforeExit(handle: KVStoreHandle, doBarrier: Int): Int
@native def mxKVStoreFree(handle: KVStoreHandle): Int
// DataIter Funcs
@native def mxListDataIters(handles: ListBuffer[DataIterCreator]): Int
@native def mxDataIterCreateIter(handle: DataIterCreator,
keys: Array[String],
vals: Array[String],
out: DataIterHandleRef): Int
@native def mxDataIterGetIterInfo(creator: DataIterCreator,
name: RefString,
description: RefString,
argNames: ListBuffer[String],
argTypeInfos: ListBuffer[String],
argDescriptions: ListBuffer[String]): Int
@native def mxDataIterFree(handle: DataIterHandle): Int
@native def mxDataIterBeforeFirst(handle: DataIterHandle): Int
@native def mxDataIterNext(handle: DataIterHandle, out: RefInt): Int
@native def mxDataIterGetLabel(handle: DataIterHandle,
out: NDArrayHandleRef): Int
@native def mxDataIterGetData(handle: DataIterHandle,
out: NDArrayHandleRef): Int
@native def mxDataIterGetIndex(handle: DataIterHandle,
outIndex: ListBuffer[Long],
outSize: RefLong): Int
@native def mxDataIterGetPadNum(handle: DataIterHandle,
out: MXUintRef): Int
// Executors
@native def mxExecutorOutputs(handle: ExecutorHandle, outputs: ArrayBuffer[NDArrayHandle]): Int
@native def mxExecutorFree(handle: ExecutorHandle): Int
@native def mxExecutorForward(handle: ExecutorHandle, isTrain: Int): Int
@native def mxExecutorBackward(handle: ExecutorHandle,
grads: Array[NDArrayHandle]): Int
@native def mxExecutorPrint(handle: ExecutorHandle, debugStr: RefString): Int
@native def mxExecutorSetMonitorCallback(handle: ExecutorHandle, callback: MXMonitorCallback): Int
// Symbols
@native def mxSymbolListAtomicSymbolCreators(symbolList: ListBuffer[SymbolHandle]): Int
@native def mxSymbolGetAtomicSymbolInfo(handle: SymbolHandle,
name: RefString,
desc: RefString,
numArgs: MXUintRef,
argNames: ListBuffer[String],
argTypes: ListBuffer[String],
argDescs: ListBuffer[String],
keyVarNumArgs: RefString): Int
@native def mxSymbolCreateAtomicSymbol(handle: SymbolHandle,
paramKeys: Array[String],
paramVals: Array[String],
symHandleRef: SymbolHandleRef): Int
@native def mxSymbolSetAttr(handle: SymbolHandle, key: String, value: String): Int
@native def mxSymbolListAttrShallow(handle: SymbolHandle,
outSize: MXUintRef,
out: ArrayBuffer[String]): Int
@native def mxSymbolListAttr(handle: SymbolHandle,
outSize: MXUintRef,
out: ArrayBuffer[String]): Int
@native def mxSymbolCompose(handle: SymbolHandle,
name: String,
keys: Array[String],
args: Array[SymbolHandle]): Int
@native def mxSymbolCreateVariable(name: String, out: SymbolHandleRef): Int
@native def mxSymbolGetAttr(handle: SymbolHandle,
key: String,
ret: RefString,
success: RefInt): Int
@native def mxSymbolListArguments(handle: SymbolHandle,
arguments: ArrayBuffer[String]): Int
@native def mxSymbolCopy(handle: SymbolHandle, clonedHandle: SymbolHandleRef): Int
@native def mxSymbolListAuxiliaryStates(handle: SymbolHandle,
arguments: ArrayBuffer[String]): Int
@native def mxSymbolListOutputs(handle: SymbolHandle,
outputs: ArrayBuffer[String]): Int
@native def mxSymbolCreateGroup(handles: Array[SymbolHandle], out: SymbolHandleRef): Int
@native def mxSymbolPrint(handle: SymbolHandle, str: RefString): Int
@native def mxSymbolGetInternals(handle: SymbolHandle, out: SymbolHandleRef): Int
@native def mxSymbolInferType(handle: SymbolHandle,
keys: Array[String],
sdata: Array[Int],
argTypeData: ListBuffer[Int],
outTypeData: ListBuffer[Int],
auxTypeData: ListBuffer[Int],
complete: RefInt): Int
@native def mxSymbolInferShape(handle: SymbolHandle,
numArgs: MXUint,
keys: Array[String],
argIndPtr: Array[MXUint],
argShapeData: Array[MXUint],
inShapeData: ListBuffer[Array[Int]],
outShapeData: ListBuffer[Array[Int]],
auxShapeData: ListBuffer[Array[Int]],
complete: RefInt): Int
@native def mxSymbolGetOutput(handle: SymbolHandle, index: Int, out: SymbolHandleRef): Int
@native def mxSymbolSaveToJSON(handle: SymbolHandle, out: RefString): Int
@native def mxSymbolCreateFromJSON(json: String, handle: SymbolHandleRef): Int
// scalastyle:off parameterNum
@native def mxExecutorBindX(handle: SymbolHandle,
deviceTypeId: Int,
deviceID: Int,
numCtx: Int,
ctxMapKeys: Array[String],
ctxMapDevTypes: Array[Int],
ctxMapDevIDs: Array[Int],
numArgs: Int,
argsHandle: Array[NDArrayHandle],
argsGradHandle: Array[NDArrayHandle],
reqsArray: Array[Int],
auxArgsHandle: Array[NDArrayHandle],
out: ExecutorHandleRef): Int
@native def mxExecutorBindEX(handle: SymbolHandle,
deviceTypeId: Int,
deviceID: Int,
numCtx: Int,
ctxMapKeys: Array[String],
ctxMapDevTypes: Array[Int],
ctxMapDevIDs: Array[Int],
numArgs: Int,
argsHandle: Array[NDArrayHandle],
argsGradHandle: Array[NDArrayHandle],
reqsArray: Array[Int],
auxArgsHandle: Array[NDArrayHandle],
sharedExec: ExecutorHandle,
out: ExecutorHandleRef): Int
// scalastyle:on parameterNum
@native def mxSymbolSaveToFile(handle: SymbolHandle, fname: String): Int
@native def mxSymbolCreateFromFile(fname: String, handle: SymbolHandleRef): Int
@native def mxSymbolFree(handle: SymbolHandle): Int
// Random
@native def mxRandomSeed(seed: Int): Int
@native def mxNotifyShutdown(): Int
// RecordIO
@native def mxRecordIOWriterCreate(uri: String, out: RecordIOHandleRef): Int
@native def mxRecordIOReaderCreate(uri: String, out: RecordIOHandleRef): Int
@native def mxRecordIOWriterFree(handle: RecordIOHandle): Int
@native def mxRecordIOReaderFree(handle: RecordIOHandle): Int
@native def mxRecordIOWriterWriteRecord(handle: RecordIOHandle, buf: String, size: Int): Int
@native def mxRecordIOReaderReadRecord(handle: RecordIOHandle, buf: RefString): Int
@native def mxRecordIOWriterTell(handle: RecordIOHandle, pos: RefInt): Int
@native def mxRecordIOReaderSeek(handle: RecordIOHandle, pos: Int): Int
// Rtc
@native def mxRtcCreate(name: String,
inputNames: Array[String],
outputNames: Array[String],
inputs: Array[NDArrayHandle],
outputs: Array[NDArrayHandle],
kernel: String,
out: RtcHandleRef): Int
@native def mxRtcPush(handle: RtcHandle,
inputs: Array[NDArrayHandle],
outputs: Array[NDArrayHandle],
gridDimX: Int,
gridDimY: Int,
gridDimZ: Int,
blockDimX: Int,
blockDimY: Int,
blockDimZ: Int): Int
@native def mxRtcFree(handle: RtcHandle): Int
// CustomOp
@native def mxCustomOpRegister(regName: String, opProp: CustomOpProp): Int
// Profiler
@native def mxSetProfilerConfig(mode: Int, fileName: String): Int
@native def mxSetProfilerState(state: Int): Int
@native def mxDumpProfile(): Int
}
| Mega-DatA-Lab/mxnet | scala-package/core/src/main/scala/ml/dmlc/mxnet/LibInfo.scala | Scala | apache-2.0 | 17,193 |
package pkg03
import org.junit.Test;
import org.junit.Assert.assertEquals
class HelloServiceTest
{
@Test
def test3()
{
assertEquals("Hello from module 3", HelloService3.hello)
}
}
| scoverage/scoverage-maven-samples | aggregation/module03/src/test/scala/pkg03/HelloServiceTest.scala | Scala | apache-2.0 | 207 |
package boilerless
object Models {
@enum('Unsealed, 'NotInterested) class Enum0 {
class Case0
@options('NotFinal) class Case1
@open class Case2
@ignore class MemberClass
}
@enum class Enum1
}
| LPTK/Boilerless | core/src/test/scala/boilerless/Models.scala | Scala | mit | 230 |
import java.util.Arrays
import java.util.List
import java.util.Properties
import org.apache.kafka.clients.producer._
object ProducerApp {
def main(args: Array[String]): Unit = {
val props = new Properties()
// Must-have properties
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092")
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
// Optional properties
props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, "none")
props.put(ProducerConfig.SEND_BUFFER_CONFIG, (1024*100).toString)
props.put(ProducerConfig.RETRY_BACKOFF_MS_CONFIG, (100).toString)
props.put(ProducerConfig.METADATA_MAX_AGE_CONFIG, (5*60*1000L).toString)
//props.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, (60*1000l).toString)
props.put(ProducerConfig.ACKS_CONFIG, (0).toString)
//props.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, (1500).toString)
props.put(ProducerConfig.RETRIES_CONFIG, (3).toString)
props.put(ProducerConfig.LINGER_MS_CONFIG, (1000).toString)
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, (32 * 1024 * 1024L).toString)
props.put(ProducerConfig.BATCH_SIZE_CONFIG, (200).toString)
props.put(ProducerConfig.CLIENT_ID_CONFIG, "kafka-app-producer")
val producer = new KafkaProducer[String, String](props)
// Thread hook to close produer
Runtime.getRuntime.addShutdownHook(new Thread() {
override def run() {
producer.close()
}
})
// send 10 messages
var i = 0
for( i <- (1 to 10)) {
val data = new ProducerRecord[String, String]("test-topic", "test-key", s"test-message $i")
producer.send(data)
}
// Reduce package lost
Thread.sleep(1000)
producer.close()
}
} | snyang/TechNotes | Spark_Scala/SparkProjects/KafkaSampleApp/src/main/scala/ProducerApp.scala | Scala | mit | 1,890 |
package com.twitter.finagle.memcached.unit
import com.twitter.io.Buf
import com.twitter.finagle.memcached._
import com.twitter.finagle.memcached.protocol._
import com.twitter.finagle.Service
import com.twitter.util.{ Await, Future }
import org.junit.runner.RunWith
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.scalatest.junit.JUnitRunner
import org.scalatest.mock.MockitoSugar
import org.scalatest.FunSuite
@RunWith(classOf[JUnitRunner])
class ConnectedClientTest extends FunSuite with MockitoSugar {
val service = mock[Service[Command, Response]]
val client = Client.apply(service)
val casUnique = Buf.Utf8("unique key")
val key = "key"
val value = Buf.Utf8("value")
test("cas correctly responds to return states of the service") {
when(service.apply(any[Command])).thenReturn(Future.value(Stored()))
assert(Await.result(client.checkAndSet(key, value, casUnique).map(_.replaced)))
when(service.apply(any[Command])).thenReturn(Future.value(Exists()))
assert(!Await.result(client.checkAndSet(key, value, casUnique).map(_.replaced)))
when(service.apply(any[Command])).thenReturn(Future.value(NotFound()))
assert(!Await.result(client.checkAndSet(key, value, casUnique).map(_.replaced)))
}
test("checkAndSet correctly responds to return states of the service") {
when(service.apply(any[Command])).thenReturn(Future.value(Stored()))
assert(Await.result(client.checkAndSet(key, value, casUnique)) == CasResult.Stored)
when(service.apply(any[Command])).thenReturn(Future.value(Exists()))
assert(Await.result(client.checkAndSet(key, value, casUnique)) == CasResult.Exists)
when(service.apply(any[Command])).thenReturn(Future.value(NotFound()))
assert(Await.result(client.checkAndSet(key, value, casUnique)) == CasResult.NotFound)
}
test("checkAndSet correctly responds to the error states of the service") {
when(service.apply(any[Command])).thenReturn(Future.value(Error(new IllegalAccessException("exception"))))
intercept[IllegalAccessException] { Await.result(client.checkAndSet(key, value, casUnique)) }
when(service.apply(any[Command])).thenReturn(Future.value(Deleted()))
intercept[IllegalStateException] { Await.result(client.checkAndSet(key, value, casUnique)) }
}
}
| spockz/finagle | finagle-memcached/src/test/scala/com/twitter/finagle/memcached/unit/ConnectedClientTest.scala | Scala | apache-2.0 | 2,288 |
package com.karasiq.bootstrap.table
import rx._
import com.karasiq.bootstrap.context.RenderingContext
import com.karasiq.bootstrap.form.Forms
import com.karasiq.bootstrap.grid.Grids
import com.karasiq.bootstrap.icons.Icons
import com.karasiq.bootstrap.utils.Utils
trait UniversalSortableTables extends SortableTables { self: RenderingContext with Tables with PagedTables with Icons with Utils with Forms with Grids ⇒
import scalaTags.all._
type SortableTable[T] = UniversalSortableTable[T]
object SortableTable extends AbstractSortableTableFactory {
def apply[T](items: Rx[Seq[T]], columns: Rx[Seq[TableCol[T, _]]],
rowModifiers: T ⇒ Modifier = (_: T) ⇒ Bootstrap.noModifier,
filterItem: (T, String) ⇒ Boolean = (i: T, f: String) ⇒ i.toString.contains(f)): SortableTable[T] = {
Builder(columns, rowModifiers, filterItem).createTable(items)
}
case class Builder[T](columns: Rx[Seq[TableCol[T, _]]] = Var(Nil),
rowModifiers: T ⇒ Modifier = (_: T) ⇒ Bootstrap.noModifier,
filterItem: (T, String) ⇒ Boolean = (i: T, f: String) ⇒ i.toString.contains(f)) {
def withColumns(columns: Rx[Seq[TableCol[T, _]]]) = copy(columns = columns)
def withColumns(columns: TableCol[T, _]*) = copy(columns = Var(columns.asInstanceOf[GenTableCols[T]]))
def withRowModifiers(rowModifiers: T ⇒ Modifier) = copy(rowModifiers = rowModifiers)
def withFilter(filterItem: (T, String) ⇒ Boolean) = copy(filterItem = filterItem)
def createTable(items: Rx[Seq[T]]): SortableTable[T] = {
val _items = items
new UniversalSortableTable[T] {
val items = _items
val columns = Builder.this.columns.asInstanceOf[Rx[GenTableCols[T]]]
val sortByColumn = Var(columns.now.head)
val reverseOrdering = Var(false)
val filter = Var("")
def filterItem(item: T, filter: String): Boolean = Builder.this.filterItem(item, filter)
def rowModifiers(item: T): Modifier = Builder.this.rowModifiers(item)
}
}
def createTable(items: T*): SortableTable[T] = {
createTable(Var(items))
}
}
}
trait UniversalSortableTable[T] extends AbstractSortableTable[T] with BootstrapHtmlComponent {
protected lazy val hideFilterRx = Rx(items().lengthCompare(1) <= 0)
lazy val pagedTable = {
val heading = Rx {
val columns = this.columns()
columns.map { column ⇒
val icon = Rx[Frag] {
if (sortByColumn() == column) span(Icon(if (reverseOrdering()) "triangle-bottom" else "triangle-top")) // "▼" else "▲"
else Bootstrap.noContent
}
span(icon, column.name, cursor.pointer, onclick := Callback.onClick(_ ⇒ setOrdering(column)))
}
}
val content = Rx {
val columns = this.columns()
val items = this.items()
val filter = this.filter()
val filteredItems = if (hideFilterRx() || filter.isEmpty) items else items.filter(item ⇒ filterItem(item, filter))
val selectedCol = this.sortByColumn()
val ordering = if (reverseOrdering()) selectedCol.ord.reverse else selectedCol.ord
val sortedItems = filteredItems.sortBy(item ⇒ selectedCol.extract(item))(ordering)
sortedItems.map(item ⇒ TableRow(columns.map(col ⇒ col.render(item)), rowModifiers(item)))
}
PagedTable(heading, content)
}
def renderTag(md: ModifierT*): TagT = {
div(
GridSystem.mkRow(Form(FormInput.text("", filter.reactiveInput)), hideFilterRx.reactiveHide),
GridSystem.mkRow(pagedTable.renderTag(md:_*))
)
}
}
}
| Karasiq/scalajs-bootstrap | library/shared/src/main/scala/com/karasiq/bootstrap/table/UniversalSortableTables.scala | Scala | mit | 3,748 |
package org.machine.communication.harness
import akka.actor.{ ActorSystem, Props, Terminated}
import akka.camel.{CamelMessage, Consumer, Producer}
import scala.concurrent.Future
import akka.stream.scaladsl.Source
import akka.stream.scaladsl.Sink
import akka.stream.actor.{ActorPublisher, ActorSubscriber, ActorSubscriberMessage, OneByOneRequestStrategy}
import akka.stream.{ActorMaterializer}
import com.typesafe.scalalogging.{LazyLogging, StrictLogging}
/** Responsible for communicating with the application harness over STDIN and STDOUT.
==Design==
The design combines Akka Streams with Akka Camel.
Client -> STDIN Consumer Source -> Subscriber Sink
|
v
Client <------------------------- STDOUT Producer
This should be used for boot strapping the engine, but not for
application level communication. That should be handled by the
Web Socket endpoint.
*/
object STDEngineHarness extends LazyLogging{
/** Initializes the STDIN/STDOUT stream.
Consumes from STDIN and returns messages on STDOUT.
*/
def startStdinHarness = {
import scala.concurrent.ExecutionContext.Implicits.global
implicit val system = ActorSystem("std-actor-system")
implicit val materializer = ActorMaterializer()
val terminatedFuture:Future[Terminated] = system.whenTerminated
terminatedFuture onSuccess {
case ok =>{
logger.info("std-actor-system was succesfully terminated. Terminating the JVM.")
System.exit(0)
}
}
terminatedFuture onFailure {
case err =>{
logger.error("std-actor-system generated an error while attempting to terminate.", err)
logger.error("Shutting down the JVM in error state.")
System.exit(1)
}
}
val source = Source.actorPublisher[String](Props[StdinConsumer])
val sink = Sink.actorSubscriber[String](Props[StdinSubscriber])
source.map(_.toUpperCase).
to(sink).
run()
}
/**
Communication with Node.js should be over stdin, stdout and stderr.
BUG: I can't figure out why things are getting capitalied in their response.
BUG: Not logging to file.
==Message Types==
SIGHUP: Shuts down the system.
*/
class StdinConsumer extends Consumer with ActorPublisher[String] with StrictLogging{
def endpointUri = "stream:in"
import akka.stream.actor.ActorPublisherMessage._
def receive = {
case msg: CamelMessage =>
msg.bodyAs[String] match {
case "SIGHUP" => {
logger.info("CamelConsumer: Received SIGHUP")
onComplete()
}
case txt if (totalDemand > 0) =>
logger.info(s"CamelConsumer: Received ${txt}")
onNext(txt)
}
case Request(_) => //ignored
case Cancel =>
context.stop(self)
}
}
/** Directs all incoming messages to STDOUT.
*/
class StdoutProducer extends Producer {
def endpointUri = "stream:out"
}
/** Process inbound messages. Directs responses to StdoutProducer.
*/
class StdinSubscriber extends ActorSubscriber with StrictLogging{
import ActorSubscriberMessage._
override val requestStrategy = OneByOneRequestStrategy
val endPoint = context.actorOf(Props[StdoutProducer])
override def preStart() = {
logger.debug(s"Engine CamelSubscriber preStart complete.")
endPoint ! "ENGINE_READY"
}
override def postStop() = {
logger.info(s"Engine CamelSubscriber postStop complete.")
}
def receive = {
case OnComplete => {
logger.debug("Engine CamelSubscriber: Received OnComplete. Initiating Actor System Shutdown")
context.system.terminate()
}
case OnNext(msg: String) => {
logger.debug(s"CamelSubscriber: Recieved OnNext(${msg})")
endPoint ! msg
}
case _ => {
logger.error("Stdin passed a message that could not be handled.")
}
}
}
}
| sholloway/graph-engine | src/main/scala/org/machine/engine/communication/harness/STDEngineHarness.scala | Scala | mit | 3,947 |
/*
* Copyright 2013 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.storage.anormdb
import anorm._
import anorm.SqlParser._
import java.sql.{Blob, Connection, DriverManager, SQLException}
/**
* Provides SQL database access via Anorm from the Play framework.
*
* See http://www.playframework.com/documentation/2.1.1/ScalaAnorm for
* documentation on using Anorm.
*/
case class DB(dbconfig: DBConfig = new DBConfig()) {
// Load the driver
Class.forName(dbconfig.driver)
// Install the schema if requested
if (dbconfig.install) this.install().close()
/**
* Gets a java.sql.Connection to the SQL database.
*
* Example usage:
*
* implicit val conn: Connection = (new DB()).getConnection()
* // Do database updates
* conn.close()
*/
def getConnection() = {
DriverManager.getConnection(dbconfig.location)
}
/**
* Set up the database tables.
*
* Returns an open database connection, so remember to close it, for example
* with `(new DB()).install().close()`
*/
def install(): Connection = {
implicit val con = this.getConnection()
SQL(
"""CREATE TABLE IF NOT EXISTS zipkin_spans (
| span_id BIGINT NOT NULL,
| parent_id BIGINT,
| trace_id BIGINT NOT NULL,
| span_name VARCHAR(255) NOT NULL,
| debug SMALLINT NOT NULL,
| duration BIGINT,
| created_ts BIGINT
|)
""".stripMargin).execute()
//SQL("CREATE INDEX trace_id ON zipkin_spans (trace_id)").execute()
SQL(
"""CREATE TABLE IF NOT EXISTS zipkin_annotations (
| span_id BIGINT NOT NULL,
| trace_id BIGINT NOT NULL,
| span_name VARCHAR(255) NOT NULL,
| service_name VARCHAR(255) NOT NULL,
| value TEXT,
| ipv4 INT,
| port INT,
| a_timestamp BIGINT NOT NULL,
| duration BIGINT
|)
""".stripMargin).execute()
//SQL("CREATE INDEX trace_id ON zipkin_annotations (trace_id)").execute()
SQL(
"""CREATE TABLE IF NOT EXISTS zipkin_binary_annotations (
| span_id BIGINT NOT NULL,
| trace_id BIGINT NOT NULL,
| span_name VARCHAR(255) NOT NULL,
| service_name VARCHAR(255) NOT NULL,
| annotation_key VARCHAR(255) NOT NULL,
| annotation_value %s,
| annotation_type_value INT NOT NULL,
| ipv4 INT,
| port INT
|)
""".stripMargin.format(this.getBlobType)).execute()
//SQL("CREATE INDEX trace_id ON zipkin_binary_annotations (trace_id)").execute()
con
}
// Get the column the current database type uses for BLOBs.
private def getBlobType = dbconfig.description match {
case "PostgreSQL" => "BYTEA" /* As usual PostgreSQL has to be different */
case "MySQL" => "MEDIUMBLOB" /* MySQL has length limits, in this case 16MB */
case _ => "BLOB"
}
// (Below) Provide Anorm with the ability to handle BLOBs.
// The documentation says it can do it in 2.1.1, but it's wrong.
/**
* Attempt to convert a SQL value into a byte array.
*/
private def valueToByteArrayOption(value: Any): Option[Array[Byte]] = {
value match {
case bytes: Array[Byte] => Some(bytes)
case blob: Blob => try {
Some(blob.getBytes(1, blob.length.asInstanceOf[Int]))
}
catch {
case e: SQLException => None
}
case _ => None
}
}
/**
* Implicitly convert an Anorm row to a byte array.
*/
def rowToByteArray: Column[Array[Byte]] = {
Column.nonNull[Array[Byte]] { (value, meta) =>
val MetaDataItem(qualified, nullable, clazz) = meta
valueToByteArrayOption(value) match {
case Some(bytes) => Right(bytes)
case _ => Left(TypeDoesNotMatch("Cannot convert " + value + ":" + value.asInstanceOf[AnyRef].getClass + " to Byte Array for column " + qualified))
}
}
}
/**
* Build a RowParser factory for a byte array column.
*/
def bytes(columnName: String): RowParser[Array[Byte]] = {
get[Array[Byte]](columnName)(rowToByteArray)
}
}
| rajatdutta/zipkin | zipkin-anormdb/src/main/scala/com/twitter/zipkin/storage/anormdb/DB.scala | Scala | apache-2.0 | 4,632 |
package app
import util.Directory._
import util.Implicits._
import util.ControlUtil._
import _root_.util.{ReferrerAuthenticator, JGitUtil, FileUtil, StringUtil}
import service._
import org.scalatra._
import java.io.File
import org.eclipse.jgit.api.Git
import org.eclipse.jgit.lib._
import org.apache.commons.io.FileUtils
import org.eclipse.jgit.treewalk._
import org.eclipse.jgit.api.errors.RefNotFoundException
class RepositoryViewerController extends RepositoryViewerControllerBase
with RepositoryService with AccountService with ReferrerAuthenticator
/**
* The repository viewer.
*/
trait RepositoryViewerControllerBase extends ControllerBase {
self: RepositoryService with AccountService with ReferrerAuthenticator =>
/**
* Returns converted HTML from Markdown for preview.
*/
post("/:owner/:repository/_preview")(referrersOnly { repository =>
contentType = "text/html"
view.helpers.markdown(params("content"), repository,
params("enableWikiLink").toBoolean,
params("enableRefsLink").toBoolean)
})
/**
* Displays the file list of the repository root and the default branch.
*/
get("/:owner/:repository")(referrersOnly {
fileList(_)
})
/**
* Displays the file list of the specified path and branch.
*/
get("/:owner/:repository/tree/*")(referrersOnly { repository =>
val (id, path) = splitPath(repository, multiParams("splat").head)
if(path.isEmpty){
fileList(repository, id)
} else {
fileList(repository, id, path)
}
})
/**
* Displays the commit list of the specified resource.
*/
get("/:owner/:repository/commits/*")(referrersOnly { repository =>
val (branchName, path) = splitPath(repository, multiParams("splat").head)
val page = params.getOrElse("page", "1").toInt
using(Git.open(getRepositoryDir(repository.owner, repository.name))){ git =>
JGitUtil.getCommitLog(git, branchName, page, 30, path) match {
case Right((logs, hasNext)) =>
repo.html.commits(if(path.isEmpty) Nil else path.split("/").toList, branchName, repository,
logs.splitWith{ (commit1, commit2) =>
view.helpers.date(commit1.time) == view.helpers.date(commit2.time)
}, page, hasNext)
case Left(_) => NotFound
}
}
})
/**
* Displays the file content of the specified branch or commit.
*/
get("/:owner/:repository/blob/*")(referrersOnly { repository =>
val (id, path) = splitPath(repository, multiParams("splat").head)
val raw = params.get("raw").getOrElse("false").toBoolean
using(Git.open(getRepositoryDir(repository.owner, repository.name))){ git =>
val revCommit = JGitUtil.getRevCommitFromId(git, git.getRepository.resolve(id))
@scala.annotation.tailrec
def getPathObjectId(path: String, walk: TreeWalk): ObjectId = walk.next match {
case true if(walk.getPathString == path) => walk.getObjectId(0)
case true => getPathObjectId(path, walk)
}
val objectId = using(new TreeWalk(git.getRepository)){ treeWalk =>
treeWalk.addTree(revCommit.getTree)
treeWalk.setRecursive(true)
getPathObjectId(path, treeWalk)
}
if(raw){
// Download
defining(JGitUtil.getContent(git, objectId, false).get){ bytes =>
contentType = FileUtil.getContentType(path, bytes)
bytes
}
} else {
// Viewer
val large = FileUtil.isLarge(git.getRepository.getObjectDatabase.open(objectId).getSize)
val viewer = if(FileUtil.isImage(path)) "image" else if(large) "large" else "other"
val bytes = if(viewer == "other") JGitUtil.getContent(git, objectId, false) else None
val content = if(viewer == "other"){
if(bytes.isDefined && FileUtil.isText(bytes.get)){
// text
JGitUtil.ContentInfo("text", bytes.map(StringUtil.convertFromByteArray))
} else {
// binary
JGitUtil.ContentInfo("binary", None)
}
} else {
// image or large
JGitUtil.ContentInfo(viewer, None)
}
repo.html.blob(id, repository, path.split("/").toList, content, new JGitUtil.CommitInfo(revCommit))
}
}
})
/**
* Displays details of the specified commit.
*/
get("/:owner/:repository/commit/:id")(referrersOnly { repository =>
val id = params("id")
using(Git.open(getRepositoryDir(repository.owner, repository.name))){ git =>
defining(JGitUtil.getRevCommitFromId(git, git.getRepository.resolve(id))){ revCommit =>
JGitUtil.getDiffs(git, id) match { case (diffs, oldCommitId) =>
repo.html.commit(id, new JGitUtil.CommitInfo(revCommit),
JGitUtil.getBranchesOfCommit(git, revCommit.getName),
JGitUtil.getTagsOfCommit(git, revCommit.getName),
repository, diffs, oldCommitId)
}
}
}
})
/**
* Displays branches.
*/
get("/:owner/:repository/branches")(referrersOnly { repository =>
using(Git.open(getRepositoryDir(repository.owner, repository.name))){ git =>
// retrieve latest update date of each branch
val branchInfo = repository.branchList.map { branchName =>
val revCommit = git.log.add(git.getRepository.resolve(branchName)).setMaxCount(1).call.iterator.next
(branchName, revCommit.getCommitterIdent.getWhen)
}
repo.html.branches(branchInfo, repository)
}
})
/**
* Displays tags.
*/
get("/:owner/:repository/tags")(referrersOnly {
repo.html.tags(_)
})
/**
* Download repository contents as an archive.
*/
get("/:owner/:repository/archive/:name")(referrersOnly { repository =>
val name = params("name")
if(name.endsWith(".zip")){
val revision = name.replaceFirst("\\\\.zip$", "")
val workDir = getDownloadWorkDir(repository.owner, repository.name, session.getId)
if(workDir.exists){
FileUtils.deleteDirectory(workDir)
}
workDir.mkdirs
// clone the repository
val cloneDir = new File(workDir, revision)
using(Git.cloneRepository
.setURI(getRepositoryDir(repository.owner, repository.name).toURI.toString)
.setDirectory(cloneDir)
.setBranch(revision)
.call){ git =>
// checkout the specified revision
git.checkout.setName(revision).call
}
// remove .git
FileUtils.deleteDirectory(new File(cloneDir, ".git"))
// create zip file
val zipFile = new File(workDir, (if(revision.length == 40) revision.substring(0, 10) else revision) + ".zip")
FileUtil.createZipFile(zipFile, cloneDir)
contentType = "application/octet-stream"
zipFile
} else {
BadRequest
}
})
get("/:owner/:repository/network/members")(referrersOnly { repository =>
repo.html.forked(
getRepository(
repository.repository.originUserName.getOrElse(repository.owner),
repository.repository.originRepositoryName.getOrElse(repository.name),
baseUrl),
getForkedRepositories(
repository.repository.originUserName.getOrElse(repository.owner),
repository.repository.originRepositoryName.getOrElse(repository.name)),
repository)
})
private def splitPath(repository: service.RepositoryService.RepositoryInfo, path: String): (String, String) = {
val id = repository.branchList.collectFirst {
case branch if(path == branch || path.startsWith(branch + "/")) => branch
} orElse repository.tags.collectFirst {
case tag if(path == tag.name || path.startsWith(tag.name + "/")) => tag.name
} orElse Some(path.split("/")(0)) get
(id, path.substring(id.length).replaceFirst("^/", ""))
}
/**
* Provides HTML of the file list.
*
* @param repository the repository information
* @param revstr the branch name or commit id(optional)
* @param path the directory path (optional)
* @return HTML of the file list
*/
private def fileList(repository: RepositoryService.RepositoryInfo, revstr: String = "", path: String = ".") = {
if(repository.commitCount == 0){
repo.html.guide(repository)
} else {
using(Git.open(getRepositoryDir(repository.owner, repository.name))){ git =>
val revisions = Seq(if(revstr.isEmpty) repository.repository.defaultBranch else revstr, repository.branchList.head)
// get specified commit
JGitUtil.getDefaultBranch(git, repository, revstr).map { case (objectId, revision) =>
defining(JGitUtil.getRevCommitFromId(git, objectId)){ revCommit =>
// get files
val files = JGitUtil.getFileList(git, revision, path)
// process README.md
val readme = files.find(_.name == "README.md").map { file =>
StringUtil.convertFromByteArray(JGitUtil.getContent(Git.open(getRepositoryDir(repository.owner, repository.name)), file.id, true).get)
}
repo.html.files(revision, repository,
if(path == ".") Nil else path.split("/").toList, // current path
new JGitUtil.CommitInfo(revCommit), // latest commit
files, readme)
}
} getOrElse NotFound
}
}
}
} | libin/gitbucket | src/main/scala/app/RepositoryViewerController.scala | Scala | apache-2.0 | 9,279 |
object Test extends Application {
class X
class Y extends X
class C {
def apply(x: X, y: Y) = 1
def apply(x: Y, y: X) = 2
}
class A {
def foo: C = new C
def foo(x: X, y: X) = 3
}
val a = new A
val z = a./* line: 9 */foo(new X, new Y)
print(z)
} | ilinum/intellij-scala | testdata/resolve2/overloading/hardOverloadings/ParameterlessFunction.scala | Scala | apache-2.0 | 279 |
package blended.mgmt.agent.internal
import akka.actor.Props
import blended.akka.{OSGIActor, OSGIActorConfig}
import blended.container.context.api.ContainerContext
import blended.util.logging.Logger
import scala.util.{Failure, Try}
/**
* Actor, that collects various container information and send's it to a remote management container.
*
* Sources of information:
*
* * [[ServiceInfo]] from the Akka event stream
* * `([[Long]], List[[[Profile]]])` from the Akka event stream
*
* Send to remote container:
*
* * [[ContainerInfo]] send via HTTP POST request
*
* Configuration:
*
* This actor reads a configuration class [[MgmtReporterConfig]] from the [[OSGIActorConfig]].
* Only if all necessary configuration are set (currently `initialUpdateDelayMsec` and `updateIntervalMsec`),
* the reporter sends information to the management container.
* The target URL of the management container is configured with the `registryUrl` config entry.
*
*/
class OsgiMgmtReporter(cfg : OSGIActorConfig) extends OSGIActor(cfg) with MgmtReporter {
import MgmtReporter._
private[this] val log = Logger[OsgiMgmtReporter]
val config : Try[MgmtReporterConfig] = MgmtReporterConfig.fromConfig(cfg.config) match {
case f @ Failure(e) =>
log.warn(e)("Incomplete management reporter config. Disabled connection to management server.")
f
case x =>
log.info(s"Management reporter config: $x")
x
}
override protected val ctContext : ContainerContext = cfg.ctContext
}
object OsgiMgmtReporter {
def props(cfg : OSGIActorConfig) : Props = Props(new OsgiMgmtReporter(cfg))
}
| woq-blended/blended | blended.mgmt.agent/src/main/scala/blended/mgmt/agent/internal/OsgiMgmtReporter.scala | Scala | apache-2.0 | 1,617 |
/*
* Copyright 2019 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio.examples.extra
import com.spotify.scio.avro._
import com.spotify.scio.testing._
import scala.jdk.CollectionConverters._
class AvroInOutTest extends PipelineSpec {
val input: Seq[TestRecord] = Seq(
new TestRecord(1, 0L, 0f, 1000.0, false, "Alice", List[CharSequence]("a").asJava),
new TestRecord(2, 0L, 0f, 1500.0, false, "Bob", List[CharSequence]("b").asJava)
)
val expected: Seq[Account] =
Seq(
new Account(1, "checking", "Alice", 1000.0, AccountStatus.Active),
new Account(2, "checking", "Bob", 1500.0, AccountStatus.Active)
)
"AvroInOut" should "work" in {
JobTest[com.spotify.scio.examples.extra.AvroInOut.type]
.args("--input=in.avro", "--output=out.avro")
.input(AvroIO[TestRecord]("in.avro"), input)
.output(AvroIO[Account]("out.avro"))(coll => coll should containInAnyOrder(expected))
.run()
}
}
| spotify/scio | scio-examples/src/test/scala/com/spotify/scio/examples/extra/AvroInOutTest.scala | Scala | apache-2.0 | 1,500 |
package ee.cone.c4actor.rdb
import ee.cone.c4actor.UniversalProp
trait CustomFieldAdapter {
def supportedCl: Class[_]
def encode(value: Object): String
def toUniversalProp(tag: Int, value: String): UniversalProp
} | conecenter/c4proto | extra_lib/src/main/scala/ee/cone/c4actor/rdb/UniversalPropHandler.scala | Scala | apache-2.0 | 221 |
/*
* Copyright (c) 2021, salesforce.com, inc.
* All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause
* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause
*/
package com.krux.hyperion.contrib.activity.notification
import scala.jdk.CollectionConverters._
import com.amazonaws.regions.Regions
import com.amazonaws.services.sns.AmazonSNSClientBuilder
import com.amazonaws.services.sns.model.{MessageAttributeValue, PublishRequest}
import scopt.OptionParser
object SendSnsMessage {
case class Options(
region: Option[String] = None,
topicArn: Option[String] = None,
message: Option[String] = None,
subject: Option[String] = None,
json: Boolean = false,
attributes: Map[String, String] = Map.empty
)
def apply(options: Options): Boolean = try {
// Setup the SNS client
val snsClientBuilder: AmazonSNSClientBuilder = AmazonSNSClientBuilder.standard()
val sns = options.region
.map(regionName => snsClientBuilder.withRegion(Regions.fromName(regionName)))
.getOrElse(snsClientBuilder)
.build()
// Create the request from the options specified
val request = new PublishRequest()
options.topicArn.foreach(request.setTopicArn)
options.message.foreach(request.setMessage)
options.subject.foreach(request.setSubject)
if (options.json) request.setMessageStructure("json")
// Add the message attributes if any
if (options.attributes.nonEmpty) {
request.setMessageAttributes(options.attributes.flatMap { case (k, v) =>
k.split(":").toList match {
case key :: dataType :: Nil => Option(key -> new MessageAttributeValue().withStringValue(v).withDataType(dataType))
case key :: Nil => Option(key -> new MessageAttributeValue().withStringValue(v).withDataType("String"))
case _ => None
}
}.asJava)
}
// Publish the message
val response = sns.publish(request)
// Print out the message-id to output
println(response.getMessageId)
true
} catch {
case e: Throwable =>
System.err.println(s"${e.getMessage}\n")
false
}
def main(args: Array[String]): Unit = {
val parser = new OptionParser[Options](s"hyperion-notification-sns-activity") {
override def showUsageOnError = Option(true)
note(
"""Sends a notification message to a SNS Topic.
""".stripMargin
)
help("help").text("prints this usage text")
opt[String]("region").valueName("REGION").optional().action((x, c) => c.copy(region = Option(x)))
.text("Sets the region to REGION")
opt[String]("topic-arn").valueName("ARN").required().action((x, c) => c.copy(topicArn = Option(x)))
.text("Sends the message to the given topic ARN")
opt[Unit]("json").optional().action((_, c) => c.copy(json = true))
.text("Interprets the message TEXT as a structured JSON message")
opt[String]("message").valueName("TEXT").required().action((x, c) => c.copy(message = Option(x)))
.text("Sends the given TEXT as the message")
opt[String]("subject").valueName("TEXT").optional().action((x, c) => c.copy(subject = Option(x)))
.text("Sends the given TEXT as the subject")
opt[Map[String, String]]("attributes").valueName("k1=v1,k2:type=v2...").action((x, c) => c.copy(attributes = x))
.text("Sets the messages attributes")
}
if (!parser.parse(args, Options()).exists(apply)) {
System.exit(3)
}
}
}
| realstraw/hyperion | contrib/activity/notification/src/main/scala/com/krux/hyperion/contrib/activity/notification/SendSnsMessage.scala | Scala | bsd-3-clause | 3,530 |
package breeze.stats
import org.scalatest.funsuite.AnyFunSuite
import breeze.linalg._
/**Tests for breeze.linalg.max.scala
* Test for clip is currently located in "DenseVectorTest.scala"
* @author ktakagaki
* @date 3/13/14.
*/
class histogramTest extends AnyFunSuite {
val testDV = DenseVector(0.0, 0.1, 2.8, 2.9, 5)
val testWeights = DenseVector(0.5, 0.5, 1.0, 3.0, 7.0)
test("histogram returns correct values") {
val result = hist(testDV, 3)
assert(result.hist == DenseVector(2, 2, 1))
assert(result.binEdges == DenseVector(0.0, 5.0 / 3.0, 2 * 5.0 / 3.0, 5.0))
}
test("histogram respects range argument") {
val result = hist(testDV, 3, (0.0, 3.0))
assert(result.hist == DenseVector(2, 0, 2))
assert(result.binEdges == DenseVector(0.0, 1.0, 2.0, 3.0))
}
test("histogram handles weights") {
val result = hist(testDV, 3, testWeights)(hist.defaultHistBinsWeights)
assert(result.hist == DenseVector(1.0, 4.0, 7.0))
assert(result.binEdges == DenseVector(0.0, 5.0 / 3.0, 2 * 5.0 / 3.0, 5.0))
}
test("fails for empty array") {
intercept[IllegalArgumentException] {
hist(DenseVector[Int]())(hist.defaultHist)
}
}
test("negative values") {
val v_neg = DenseVector(-4, -3, -4, 1, 1, 1, 4, 3, 4)
val h_neg = hist(v_neg, 3)
assert(h_neg.hist == DenseVector(3, 3, 3))
val v_ok = v_neg + 4
val h_ok = hist(v_ok, 3)
assert(h_ok.hist == DenseVector(3, 3, 3))
}
}
| scalanlp/breeze | math/src/test/scala/breeze/stats/histogramTest.scala | Scala | apache-2.0 | 1,463 |
package koncept.http.web.renderer.freemarker
import java.io.InputStreamReader
import java.net.URL
import freemarker.cache.TemplateLoader
class ClassPathTemplateLoader extends TemplateLoader {
def findTemplateSource(name: String) : Object = {
// FileSystemLocator.resourcesLocation
// println("Looking for template /templates/" + name)
val url = getClass().getResource("/templates/" + name)
// println("url is " + url)
url
}
def getLastModified(o: Object) : Long = {
0
}
def closeTemplateSource(o: Object) {
// println("closeTemplateSource " + o)
}
def getReader(o: Object, encoding: String) : java.io.Reader = {
return o match {
case url: URL => new InputStreamReader(url.openStream)
}
}
} | nkrul/http-router | src/main/scala/koncept/http/web/renderer/freemarker/ClassPathTemplateLoader.scala | Scala | mit | 790 |
/*
* Copyright 2015 - 2016 Red Bull Media House GmbH <http://www.redbullmediahouse.com> - all rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rbmhtechnology.example.stream
//# durable-event-writer
import akka.stream.scaladsl.Source
import com.rbmhtechnology.eventuate.DurableEvent
import com.rbmhtechnology.eventuate.adapter.stream.DurableEventWriter
//#
object DurableEventWriterExample extends App with DurableEventLogs {
//# durable-event-writer
val writerId = "writer-1"
Source(List("a", "b", "c"))
.map(DurableEvent(_))
.via(DurableEventWriter(writerId, logA))
.map(event => (event.payload, event.localSequenceNr))
.runForeach(println)
// prints (on first run):
// (a,1)
// (b,2)
// (c,3)
//#
}
| RBMHTechnology/eventuate | eventuate-example-stream/src/main/scala/com/rbmhtechnology/example/stream/DurableEventWriterExample.scala | Scala | apache-2.0 | 1,281 |
import scala.util.Random
import Board._
import BoardEntry._
import BoardIndex._
import FullBorderExtras._
import BoardGenerators._
object BoardGenerators {
/**
* Generate testing board with the following boundary entries (and 0:s in
* the interior):
*
* 1, 2, 3, 4, 5, 6, 7, 8, 9
* 32 10
* 31 11
* 30 12
* 29 13
* 28 14
* 27 15
* 26 16
* 25,24,23,22,21,20,19,18,17
*
* Note that the encode/decode functions in BoardEntry will not
* decode this board into a string. However, it is useful for testing.
*/
def genericBoard: Board = {
val top = (2 to 8).toArray
val left = (26 to 32).toArray.reverse
val right = (10 to 16).toArray
val bottom = (18 to 24).toArray.reverse
new Board(1, top, 9, left, right, 25, bottom, 17)
}
/**
* Return a list of `n` boards with random entries (from 1, .., 9) on the boundary.
* Each side is guaranteed to be a permutation of {1,.., 9}. However, the boards
* need not be valid (isValid need not be true), or have a unique solution.
*
* Adjust the value of `n` for faster/slower test suite.
*/
def randomBoards(n: Int = 500): Iterable[Board] = {
val perms: List[FullBorder] = (1 to 9).toArray.permutations.toList
def randomEntry(aList: List[FullBorder]): FullBorder = {
assert(aList.length > 0)
aList(util.Random.nextInt(aList.length))
}
def randomBoard(): Board = {
val top = randomEntry(perms)
val right = randomEntry(perms.filter(pRight => pRight.head == top.last))
val bottom = randomEntry(perms.filter(pBottom => (pBottom.last == right.last) &&
(pBottom.head != top.head)))
val left = randomEntry(perms.filter(pLeft => (pLeft.head == top.head) &&
(pLeft.last == bottom.head)))
Board.emptyBoard
.withNewTopRow(top)
.withNewRightSide(right)
.withNewBottomRow(bottom)
.withNewLeftSide(left)
}
(1 to n).map(i => randomBoard())
}
/** Return one random board */
def randomBoard(): Board = randomBoards(1).head
/**
* Return a list of 7x7 = 49 strings. Each string represents a random board with an
* entry set in the interior. Together the strings cover all 49 interior points.
*/
def randomBoardsWithInteriors(): Iterable[String] = {
val b = randomBoard()
def takenAt(i: BoardRow, j: BoardColumn): String = {
val intDigit = 1 + Random.nextInt(9) // random digit 1, ..., 9
def getEntry(r: BoardRow, c: BoardColumn): BoardEntry = {
if ((i, j) == (r, c)) {
b.entryAt(r, c) + intDigit
} else {
b.entryAt(r, c)
}
}
def row(r: BoardRow): String = BoardIndex.columns.map(c => getEntry(r, c)).mkString("")
BoardIndex.rows.map(r => row(r)).mkString("")
}
for (intRow <- 1 to 7; intCol <- 1 to 7) yield takenAt(intRow, intCol)
}
/**
* The square has 8 symmetries. Transform a given board `b` into these.
*/
def transformations(b: Board): List[Board] = {
List(b, // identity
b.mirrorAntiDiagonal,
b.mirrorVertical,
b.mirrorAntiDiagonal.mirrorVertical, // rotate clockwise 90 deg
b.mirrorVertical.mirrorAntiDiagonal, // rotate anti-clockwise 90 deg
b.mirrorVertical.mirrorAntiDiagonal.mirrorVertical, // mirror over diagonal
b.mirrorAntiDiagonal.mirrorVertical.mirrorAntiDiagonal, // rotate clockwise 180 deg
b.mirrorAntiDiagonal.mirrorVertical.mirrorAntiDiagonal.mirrorVertical // rotate anti-clockwise 180 deg
)
}
}
| matiasdahl/Boundary-Sudoku | src/test/scala/BoardGenerators.scala | Scala | mit | 3,776 |
package com.machinomy.bergae
import java.util.UUID
import cats.data.Xor
import com.machinomy.bergae.crypto._
import com.machinomy.bergae.storage.Storage
import io.circe._
import io.circe.generic.JsonCodec
import io.circe.syntax._
import scala.util.Try
private[bergae] object Messaging {
@JsonCodec
sealed trait Payload
case class Nop(height: Long, approve: Set[Sha256Hash] = Set.empty[Sha256Hash]) extends Payload
case class Update(height: Long, uuid: UUID, operation: String, approve: Set[Sha256Hash] = Set.empty[Sha256Hash]) extends Payload
object Payload
case class Signed(payload: Payload, pub: ECPub, signature: ECSignature) {
lazy val txid: Sha256Hash = Digest[Sha256Hash](jsonString)
lazy val jsonString: String = toJsonString(this)
}
def signed(crypto: Crypto, payload: Payload, key: ECKey): Signed = {
val payloadBytes = payload.asJson.noSpaces.getBytes
val signature = crypto.sign(payloadBytes, key)
Signed(payload, key.pub, signature)
}
def toJsonString(signed: Signed): String = {
signed.asJson.noSpaces
}
def fromJsonString(string: String): Option[Signed] = {
parser.decode[Signed](string).toOption
}
implicit val encodeUUID: Encoder[UUID] = Encoder.encodeString.contramap { uuid =>
uuid.toString
}
implicit val decodeUUID: Decoder[UUID] = Decoder.decodeString.map { string =>
UUID.fromString(string)
}
implicit val encodedSha256Hash: Encoder[Sha256Hash] = Encoder.encodeString.contramap { hash =>
Hex.encode(hash.bytes)
}
implicit val decodeSha256Hash: Decoder[Sha256Hash] = Decoder.decodeString.map { string =>
Sha256Hash(Hex.decode(string).toArray)
}
implicit val encodeSigned = new Encoder[Signed] {
override def apply(a: Signed): Json = {
val fields: Map[String, Json] = Map(
"payload" -> a.payload.asJson,
"pub" -> implicitly[Encoder[ECPub]].apply(a.pub),
"sig" -> implicitly[Encoder[ECSignature]].apply(a.signature)
)
Json.fromFields(fields)
}
}
implicit val decodeSigned: Decoder[Signed] = Decoder.decodeJsonObject.emapTry { jsonObject =>
val fields = jsonObject.toMap
val payloadTry: Try[Payload] = fields("payload").as[Payload].toTry
val pubTry: Try[ECPub] = fields("pub").as[String].toTry.map(s => ECPub(Base58Check.decode(s)._2))
val sigTry: Try[ECSignature] = fields("sig").as[String].toTry.map(s => ECSignature.decode(Base58Check.decode(s)._2))
for {
payload <- payloadTry
pub <- pubTry
sig <- sigTry
} yield Signed(payload, pub, sig)
}
implicit val encodeECPub: Encoder[ECPub] = Encoder.encodeString.contramap { pub =>
Base58Check.encode(Base58Check.Prefix.PublicKey, pub.toByteArray)
}
implicit val decodeECPub: Decoder[ECPub] = Decoder.decodeString.emap { string =>
Xor.catchNonFatal(ECPub(Base58Check.decode(string)._2)).leftMap(_ => "ECPub")
}
implicit val encodeECSignature: Encoder[ECSignature] = Encoder.encodeString.contramap { signature =>
val bytes = ECSignature.encode(signature)
Base58Check.encode(Base58Check.Prefix.Signature, bytes)
}
implicit val decodeECSignature: Decoder[ECSignature] = Decoder.decodeString.emap { string =>
Xor.catchNonFatal(ECSignature.decode(Base58Check.decode(string)._2)).leftMap(_ => "ECSignature")
}
}
| machinomy/bergae | src/main/scala/com/machinomy/bergae/Messaging.scala | Scala | apache-2.0 | 3,312 |
package io.udash.web.commons.styles.utils
import io.udash.css.CssBase
import scala.language.postfixOps
import scalacss.internal.DslBase.ToStyle
/**
* Created by malchik on 2016-03-30.
*/
object MediaQueries extends CssBase {
import dsl._
def desktop(properties: ToStyle*) = style(
media.screen.minWidth(StyleConstants.MediaQueriesBounds.TabletLandscapeMax + 1 px) (
properties:_*
)
)
def tabletLandscape(properties: ToStyle*) = style(
media.screen.minWidth(1 px).maxWidth(StyleConstants.MediaQueriesBounds.TabletLandscapeMax px) (
properties:_*
)
)
def tabletPortrait(properties: ToStyle*) = style(
media.screen.minWidth(1 px).maxWidth(StyleConstants.MediaQueriesBounds.TabletMax px) (
properties:_*
)
)
def phone(properties: ToStyle*) = style(
media.screen.minWidth(1 px).maxWidth(StyleConstants.MediaQueriesBounds.PhoneMax px) (
properties:_*
)
)
}
| UdashFramework/udash-guide | shared/src/main/scala/io/udash/web/commons/styles/utils/MediaQueries.scala | Scala | gpl-3.0 | 936 |
import _root_.sbtassembly.Plugin.AssemblyKeys._
import java.util.NoSuchElementException
import sbt._
import Keys._
import sbtassembly.Plugin.{MergeStrategy, PathList}
import xerial.sbt.Sonatype
object PillarBuild extends Build {
val assemblyTestSetting = test in assembly := {}
val assemblyMergeStrategySetting = mergeStrategy in assembly <<= (mergeStrategy in assembly) {
(old) => {
case PathList("javax", "servlet", xs@_*) => MergeStrategy.first
case "META-INF/io.netty.versions.properties" => MergeStrategy.last
case x => old(x)
}
}
val dependencies = Seq(
"com.datastax.cassandra" % "cassandra-driver-core" % "3.0.0",
"com.typesafe" % "config" % "1.0.1",
"org.clapper" %% "argot" % "1.0.3",
"org.mockito" % "mockito-core" % "1.9.5" % "test",
"org.scalatest" %% "scalatest" % "2.2.0" % "test",
"org.cassandraunit" % "cassandra-unit" % "3.0.0.1" % "test",
"com.google.guava" % "guava" % "18.0" % "test",
"ch.qos.logback" % "logback-classic" % "1.1.7" % "test"
)
val rhPackage = TaskKey[File]("rh-package", "Packages the application for Red Hat Package Manager")
val rhPackageTask = rhPackage <<= (sourceDirectory, target, assembly, version) map {
(sourceDirectory: File, targetDirectory: File, archive: File, versionId: String) =>
val rootPath = new File(targetDirectory, "staged-package")
val subdirectories = Map(
"bin" -> new File(rootPath, "bin"),
"conf" -> new File(rootPath, "conf"),
"lib" -> new File(rootPath, "lib")
)
subdirectories.foreach {
case (_, subdirectory) => IO.createDirectory(subdirectory)
}
IO.copyFile(archive, new File(subdirectories("lib"), "pillar.jar"))
val bashDirectory = new File(sourceDirectory, "main/bash")
bashDirectory.list.foreach {
script =>
val destination = new File(subdirectories("bin"), script)
IO.copyFile(new File(bashDirectory, script), destination)
destination.setExecutable(true, false)
}
val resourcesDirectory = new File(sourceDirectory, "main/resources")
resourcesDirectory.list.foreach {
resource =>
IO.copyFile(new File(resourcesDirectory, resource), new File(subdirectories("conf"), resource))
}
val iterationId = try { sys.env("GO_PIPELINE_COUNTER") } catch { case e: NoSuchElementException => "DEV" }
"fpm -f -s dir -t rpm --package %s -n pillar --version %s --iteration %s -a all --prefix /opt/pillar -C %s/staged-package/ .".format(targetDirectory.getPath, versionId, iterationId, targetDirectory.getPath).!
val pkg = file("%s/pillar-%s-%s.noarch.rpm".format(targetDirectory.getPath, versionId, iterationId))
if(!pkg.exists()) throw new RuntimeException("Packaging failed. Check logs for fpm output.")
pkg
}
lazy val root = Project(
id = "pillar",
base = file("."),
settings = Defaults.coreDefaultSettings ++ sbtassembly.Plugin.assemblySettings ++ net.virtualvoid.sbt.graph.Plugin.graphSettings ++ Sonatype.sonatypeSettings
).settings(
assemblyMergeStrategySetting,
assemblyTestSetting,
libraryDependencies := dependencies,
name := "pillar",
organization := "de.kaufhof",
version := "3.2.0",
homepage := Some(url("https://github.com/Galeria-Kaufhof/pillar")),
licenses := Seq("MIT license" -> url("http://www.opensource.org/licenses/mit-license.php")),
scalaVersion := "2.11.8",
crossScalaVersions := Seq("2.10.6", "2.11.8"),
rhPackageTask
).settings(
publishTo := {
val nexus = "https://oss.sonatype.org/"
if (isSnapshot.value)
Some("snapshots" at nexus + "content/repositories/snapshots")
else
Some("releases" at nexus + "service/local/staging/deploy/maven2")
},
parallelExecution in Test := false,
publishMavenStyle := true,
publishArtifact in Test := false,
pomIncludeRepository := { _ => false },
pomExtra := (
<scm>
<url>git@github.com:Galeria-Kaufhof/pillar.git</url>
<connection>scm:git:git@github.com:Galeria-Kaufhof/pillar.git</connection>
</scm>
<developers>
<developer>
<id>marcopriebe</id>
<name>MarcoPriebe</name>
<url>https://github.com/MarcoPriebe</url>
</developer>
<developer>
<id>lichtsprung</id>
<name>Robert Giacinto</name>
<url>https://github.com/lichtsprung</url>
</developer>
<developer>
<id>adelafogoros</id>
<name>Adela Fogoros</name>
<url>https://github.com/adelafogoros</url>
</developer>
<developer>
<id>muellenborn</id>
<name>Markus Müllenborn</name>
<url>https://github.com/muellenborn</url>
</developer>
</developers>
)
)
}
| j-potts/pillar | project/PillarBuild.scala | Scala | mit | 4,843 |
/**
* Copyright (C) 2009-2017 Lightbend Inc. <http://www.lightbend.com>
*/
package akka.io
import java.net.DatagramSocket
import java.net.InetSocketAddress
import com.typesafe.config.Config
import scala.collection.immutable
import akka.io.Inet.{ SoJavaFactories, SocketOption }
import akka.util.Helpers.Requiring
import akka.util.ByteString
import akka.actor._
/**
* UDP Extension for Akka’s IO layer.
*
* This extension implements the connectionless UDP protocol without
* calling `connect` on the underlying sockets, i.e. without restricting
* from whom data can be received. For “connected” UDP mode see [[UdpConnected]].
*
* For a full description of the design and philosophy behind this IO
* implementation please refer to <a href="http://doc.akka.io/">the Akka online documentation</a>.
*
* The Java API for generating UDP commands is available at [[UdpMessage]].
*/
object Udp extends ExtensionId[UdpExt] with ExtensionIdProvider {
override def lookup = Udp
override def createExtension(system: ExtendedActorSystem): UdpExt = new UdpExt(system)
/**
* Java API: retrieve the Udp extension for the given system.
*/
override def get(system: ActorSystem): UdpExt = super.get(system)
/**
* The common interface for [[Command]] and [[Event]].
*/
sealed trait Message
/**
* The common type of all commands supported by the UDP implementation.
*/
trait Command extends SelectionHandler.HasFailureMessage with Message {
def failureMessage = CommandFailed(this)
}
/**
* Each [[Send]] can optionally request a positive acknowledgment to be sent
* to the commanding actor. If such notification is not desired the [[Send#ack]]
* must be set to an instance of this class. The token contained within can be used
* to recognize which write failed when receiving a [[CommandFailed]] message.
*/
case class NoAck(token: Any) extends Event
/**
* Default [[NoAck]] instance which is used when no acknowledgment information is
* explicitly provided. Its “token” is `null`.
*/
object NoAck extends NoAck(null)
/**
* This message is understood by the “simple sender” which can be obtained by
* sending the [[SimpleSender]] query to the [[UdpExt#manager]] as well as by
* the listener actors which are created in response to [[Bind]]. It will send
* the given payload data as one UDP datagram to the given target address. The
* UDP actor will respond with [[CommandFailed]] if the send could not be
* enqueued to the O/S kernel because the send buffer was full. If the given
* `ack` is not of type [[NoAck]] the UDP actor will reply with the given
* object as soon as the datagram has been successfully enqueued to the O/S
* kernel.
*
* The sending UDP socket’s address belongs to the “simple sender” which does
* not handle inbound datagrams and sends from an ephemeral port; therefore
* sending using this mechanism is not suitable if replies are expected, use
* [[Bind]] in that case.
*/
final case class Send(payload: ByteString, target: InetSocketAddress, ack: Event) extends Command {
require(ack != null, "ack must be non-null. Use NoAck if you don't want acks.")
def wantsAck: Boolean = !ack.isInstanceOf[NoAck]
}
object Send {
def apply(data: ByteString, target: InetSocketAddress): Send = Send(data, target, NoAck)
}
/**
* Send this message to the [[UdpExt#manager]] in order to bind to the given
* local port (or an automatically assigned one if the port number is zero).
* The listener actor for the newly bound port will reply with a [[Bound]]
* message, or the manager will reply with a [[CommandFailed]] message.
*/
final case class Bind(
handler: ActorRef,
localAddress: InetSocketAddress,
options: immutable.Traversable[SocketOption] = Nil) extends Command
/**
* Send this message to the listener actor that previously sent a [[Bound]]
* message in order to close the listening socket. The recipient will reply
* with an [[Unbound]] message.
*/
case object Unbind extends Command
/**
* Retrieve a reference to a “simple sender” actor of the UDP extension.
* The newly created “simple sender” will reply with the [[SimpleSenderReady]] notification.
*
* The “simple sender” is a convenient service for being able to send datagrams
* when the originating address is meaningless, i.e. when no reply is expected.
*
* The “simple sender” will not stop itself, you will have to send it a [[akka.actor.PoisonPill]]
* when you want to close the socket.
*/
case class SimpleSender(options: immutable.Traversable[SocketOption] = Nil) extends Command
object SimpleSender extends SimpleSender(Nil)
/**
* Send this message to a listener actor (which sent a [[Bound]] message) to
* have it stop reading datagrams from the network. If the O/S kernel’s receive
* buffer runs full then subsequent datagrams will be silently discarded.
* Re-enable reading from the socket using the `ResumeReading` command.
*/
case object SuspendReading extends Command
/**
* This message must be sent to the listener actor to re-enable reading from
* the socket after a `SuspendReading` command.
*/
case object ResumeReading extends Command
/**
* The common type of all events emitted by the UDP implementation.
*/
trait Event extends Message
/**
* When a listener actor receives a datagram from its socket it will send
* it to the handler designated in the [[Bind]] message using this message type.
*/
final case class Received(data: ByteString, sender: InetSocketAddress) extends Event
/**
* When a command fails it will be replied to with this message type,
* wrapping the failing command object.
*/
final case class CommandFailed(cmd: Command) extends Event
/**
* This message is sent by the listener actor in response to a [[Bind]] command.
* If the address to bind to specified a port number of zero, then this message
* can be inspected to find out which port was automatically assigned.
*/
final case class Bound(localAddress: InetSocketAddress) extends Event
/**
* The “simple sender” sends this message type in response to a [[SimpleSender]] query.
*/
sealed trait SimpleSenderReady extends Event
case object SimpleSenderReady extends SimpleSenderReady
/**
* This message is sent by the listener actor in response to an `Unbind` command
* after the socket has been closed.
*/
sealed trait Unbound
case object Unbound extends Unbound
/**
* Scala API: This object provides access to all socket options applicable to UDP sockets.
*
* For the Java API see [[UdpSO]].
*/
object SO extends Inet.SoForwarders {
/**
* [[akka.io.Inet.SocketOption]] to set the SO_BROADCAST option
*
* For more information see [[java.net.DatagramSocket#setBroadcast]]
*/
final case class Broadcast(on: Boolean) extends SocketOption {
override def beforeDatagramBind(s: DatagramSocket): Unit = s.setBroadcast(on)
}
}
private[io] class UdpSettings(_config: Config) extends SelectionHandlerSettings(_config) {
import _config._
val NrOfSelectors: Int = getInt("nr-of-selectors") requiring (_ > 0, "nr-of-selectors must be > 0")
val DirectBufferSize: Int = getIntBytes("direct-buffer-size")
val MaxDirectBufferPoolSize: Int = getInt("direct-buffer-pool-limit")
val BatchReceiveLimit: Int = getInt("receive-throughput")
val ManagementDispatcher: String = getString("management-dispatcher")
override val MaxChannelsPerSelector: Int = if (MaxChannels == -1) -1 else math.max(MaxChannels / NrOfSelectors, 1)
private[this] def getIntBytes(path: String): Int = {
val size = getBytes(path)
require(size < Int.MaxValue, s"$path must be < 2 GiB")
size.toInt
}
}
}
class UdpExt(system: ExtendedActorSystem) extends IO.Extension {
import Udp.UdpSettings
val settings: UdpSettings = new UdpSettings(system.settings.config.getConfig("akka.io.udp"))
val manager: ActorRef = {
system.systemActorOf(
props = Props(classOf[UdpManager], this).withDeploy(Deploy.local),
name = "IO-UDP-FF")
}
/**
* Java API: retrieve the UDP manager actor’s reference.
*/
def getManager: ActorRef = manager
/**
* INTERNAL API
*/
private[io] val bufferPool: BufferPool = new DirectByteBufferPool(settings.DirectBufferSize, settings.MaxDirectBufferPoolSize)
}
/**
* Java API: factory methods for the message types used when communicating with the Udp service.
*/
object UdpMessage {
import Udp._
import java.lang.{ Iterable ⇒ JIterable }
import scala.collection.JavaConverters._
/**
* Each [[Udp.Send]] can optionally request a positive acknowledgment to be sent
* to the commanding actor. If such notification is not desired the [[Udp.Send#ack]]
* must be set to an instance of this class. The token contained within can be used
* to recognize which write failed when receiving a [[Udp.CommandFailed]] message.
*/
def noAck(token: AnyRef): NoAck = NoAck(token)
/**
* Default [[Udp.NoAck]] instance which is used when no acknowledgment information is
* explicitly provided. Its “token” is `null`.
*/
def noAck: NoAck = NoAck
/**
* This message is understood by the “simple sender” which can be obtained by
* sending the [[Udp.SimpleSender]] query to the [[UdpExt#manager]] as well as by
* the listener actors which are created in response to [[Udp.Bind]]. It will send
* the given payload data as one UDP datagram to the given target address. The
* UDP actor will respond with [[Udp.CommandFailed]] if the send could not be
* enqueued to the O/S kernel because the send buffer was full. If the given
* `ack` is not of type [[Udp.NoAck]] the UDP actor will reply with the given
* object as soon as the datagram has been successfully enqueued to the O/S
* kernel.
*
* The sending UDP socket’s address belongs to the “simple sender” which does
* not handle inbound datagrams and sends from an ephemeral port; therefore
* sending using this mechanism is not suitable if replies are expected, use
* [[Udp.Bind]] in that case.
*/
def send(payload: ByteString, target: InetSocketAddress, ack: Event): Command = Send(payload, target, ack)
/**
* The same as `send(payload, target, noAck())`.
*/
def send(payload: ByteString, target: InetSocketAddress): Command = Send(payload, target)
/**
* Send this message to the [[UdpExt#manager]] in order to bind to the given
* local port (or an automatically assigned one if the port number is zero).
* The listener actor for the newly bound port will reply with a [[Udp.Bound]]
* message, or the manager will reply with a [[Udp.CommandFailed]] message.
*/
def bind(handler: ActorRef, endpoint: InetSocketAddress, options: JIterable[SocketOption]): Command =
Bind(handler, endpoint, options.asScala.to)
/**
* Bind without specifying options.
*/
def bind(handler: ActorRef, endpoint: InetSocketAddress): Command = Bind(handler, endpoint, Nil)
/**
* Send this message to the listener actor that previously sent a [[Udp.Bound]]
* message in order to close the listening socket. The recipient will reply
* with an [[Udp.Unbound]] message.
*/
def unbind: Command = Unbind
/**
* Retrieve a reference to a “simple sender” actor of the UDP extension.
* The newly created “simple sender” will reply with the [[Udp.SimpleSenderReady]] notification.
*
* The “simple sender” is a convenient service for being able to send datagrams
* when the originating address is meaningless, i.e. when no reply is expected.
*
* The “simple sender” will not stop itself, you will have to send it a [[akka.actor.PoisonPill]]
* when you want to close the socket.
*/
def simpleSender(options: JIterable[SocketOption]): Command = SimpleSender(options.asScala.to)
/**
* Retrieve a simple sender without specifying options.
*/
def simpleSender: Command = SimpleSender
/**
* Send this message to a listener actor (which sent a [[Udp.Bound]] message) to
* have it stop reading datagrams from the network. If the O/S kernel’s receive
* buffer runs full then subsequent datagrams will be silently discarded.
* Re-enable reading from the socket using the `Udp.ResumeReading` command.
*/
def suspendReading: Command = SuspendReading
/**
* This message must be sent to the listener actor to re-enable reading from
* the socket after a `Udp.SuspendReading` command.
*/
def resumeReading: Command = ResumeReading
}
object UdpSO extends SoJavaFactories {
import Udp.SO._
/**
* [[akka.io.Inet.SocketOption]] to set the SO_BROADCAST option
*
* For more information see [[java.net.DatagramSocket#setBroadcast]]
*/
def broadcast(on: Boolean) = Broadcast(on)
}
| rorygraves/perf_tester | corpus/akka/akka-actor/src/main/scala/akka/io/Udp.scala | Scala | apache-2.0 | 13,015 |
/*
* Copyright 2006 - 2013
* Stefan Balev <stefan.balev@graphstream-project.org>
* Julien Baudry <julien.baudry@graphstream-project.org>
* Antoine Dutot <antoine.dutot@graphstream-project.org>
* Yoann Pigné <yoann.pigne@graphstream-project.org>
* Guilhelm Savin <guilhelm.savin@graphstream-project.org>
*
* This file is part of GraphStream <http://graphstream-project.org>.
*
* GraphStream is a library whose purpose is to handle static or dynamic
* graph, create them from scratch, file or any source and display them.
*
* This program is free software distributed under the terms of two licenses, the
* CeCILL-C license that fits European law, and the GNU Lesser General Public
* License. You can use, modify and/ or redistribute the software under the terms
* of the CeCILL-C license as circulated by CEA, CNRS and INRIA at the following
* URL <http://www.cecill.info> or under the terms of the GNU LGPL as published by
* the Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* The fact that you are presently reading this means that you have had
* knowledge of the CeCILL-C and LGPL licenses and that you accept their terms.
*/
package org.graphstream.ui.j2dviewer.renderer
import org.graphstream.ui.geom.Point3
import java.awt.{Graphics, Graphics2D, Font, Color, RenderingHints}
import java.awt.event.{ActionListener, ActionEvent}
import java.awt.geom.{Point2D}
import javax.swing.{JComponent, JPanel, BorderFactory, JTextField, JButton, SwingConstants, ImageIcon}
import javax.swing.border.Border
import org.graphstream.ui.graphicGraph.{GraphicElement, GraphicNode, GraphicSprite, StyleGroup}
import org.graphstream.ui.graphicGraph.stylesheet.{Values, StyleConstants}
import org.graphstream.ui.j2dviewer.{J2DGraphRenderer, Camera, Backend}
import org.graphstream.ui.util.swing.{FontCache, ImageCache}
import org.graphstream.ui.j2dviewer.renderer.shape.swing._
/**
* Renderer for nodes and sprites represented as Swing components.
*/
class JComponentRenderer(styleGroup:StyleGroup, val mainRenderer:J2DGraphRenderer) extends StyleRenderer(styleGroup) {
// Attribute
/** The size of components. */
protected var size:Values = null
/** The size in PX of components. */
protected var width:Int = 0
/** The size in PX of components. */
protected var height:Int = 0
/** Association between Swing components and graph elements. */
protected val compToElement = new scala.collection.mutable.HashMap[JComponent,ComponentElement]
/** The potential shadow. */
protected var shadow:SquareShape = null
protected var antialiasSetting:AnyRef = null
// Command
protected def setupRenderingPass(bck:Backend, camera:Camera, forShadow:Boolean) {
val metrics = camera.metrics
val g = bck.graphics2D
size = group.getSize
width = metrics.lengthToPx(size, 0).toInt
height = if(size.size > 1) metrics.lengthToPx(size, 1).toInt else width
if(group.getShadowMode != StyleConstants.ShadowMode.NONE)
shadow = new SquareShape
else shadow = null
antialiasSetting = g.getRenderingHint( RenderingHints.KEY_ANTIALIASING )
g.setRenderingHint( RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF )
}
override protected def endRenderingPass(bck:Backend, camera:Camera, forShadow:Boolean) {
bck.graphics2D.setRenderingHint(RenderingHints.KEY_ANTIALIASING, antialiasSetting)
}
protected def pushStyle(bck:Backend, camera:Camera, forShadow:Boolean) {
if(shadow ne null) {
shadow.configureForGroup(bck, group, camera)
// shadow.configure(bck, group, camera, null)
// shadow.size(group, camera)
}
}
protected def pushDynStyle(bck:Backend, camera:Camera, element:GraphicElement) {
}
protected def renderElement(bck:Backend, camera:Camera, element:GraphicElement) {
val ce = getOrEquipWithJComponent(element)
ce.setVisible(true)
ce.updatePosition(camera)
ce.updateLabel
if(ce.init == false)
checkStyle(camera, ce, true)
else if(group.hasEventElements)
checkStyle(camera, ce, ! hadEvents) // hadEvents allows to know if we just
else checkStyle(camera, ce, hadEvents) // changed the style due to an event
} // and therefore must change the style.
protected def renderShadow(bck:Backend, camera:Camera, element:GraphicElement) {
if(shadow ne null) {
// val pos = new Point2D.Double( element.getX, element.getY )
//
// if( element.isInstanceOf[GraphicSprite] ) {
// camera.getSpritePosition( element.asInstanceOf[GraphicSprite], pos, StyleConstants.Units.GU )
// }
//
//// shadow.setupContents( g, camera, element, null )
// shadow.positionAndFit( g, camera, null, element, pos.x, pos.y )
shadow.configureForElement(bck, element, null, camera)
shadow.renderShadow(bck, camera, element, null)
}
}
protected def elementInvisible(bck:Backend, camera:Camera, element:GraphicElement) {
getOrEquipWithJComponent(element).setVisible(false)
}
// Utility
def unequipElement(element:GraphicElement) {
compToElement.get(element.getComponent.asInstanceOf[JComponent]) match {
case e:ComponentElement => { e.detach }
case _ => {}
}
}
/**
* Get the pair (swing component, graph element) corresponding to the given element. If the
* element is not yet associated with a Swing component, the binding is done.
*/
protected def getOrEquipWithJComponent(element:GraphicElement):ComponentElement = {
import StyleConstants.JComponents._
val component = element.getComponent.asInstanceOf[JComponent]
var ce:ComponentElement = null
if(component eq null) {
group.getJComponent match {
case BUTTON => ce = new ButtonComponentElement(element, new JButton(""))
case TEXT_FIELD => ce = new TextFieldComponentElement(element, new JTextField(""))
case PANEL => throw new RuntimeException("panel not yet available")
case _ => throw new RuntimeException("WTF ?!?")
}
if( ce != null )
compToElement.put(ce.jComponent, ce)
} else {
ce = compToElement.get(component).get
}
ce
}
protected def checkStyle(camera:Camera, ce:ComponentElement, force:Boolean) {
if(force) {
ce.checkIcon(camera)
ce.checkBorder(camera, force)
ce.setFill
ce.setTextAlignment
ce.setTextFont
}
}
// Nested classes
/**
* Represents the link between a JComponent and a GraphicElement.
*
* Each of these component elements receive the action events of their button/text-field (for panel
* the user is free to do whatever he wants). They are in charge of adding and removing the
* component in the rendering surface, etc.
*
* These elements also allow to push and remove the style to Swing components. We try to do this
* only when the style potentially changed, not at each redraw.
*/
abstract class ComponentElement(val element:GraphicElement) extends JPanel {
// Attribute
/** Set to true if the element is not yet initialised with its style. */
var init = false
// Construction
setLayout(null) // No layout in this panel, we set the component bounds ourselves.
mainRenderer.renderingSurface.add(this)
// Access
/** The Swing Component. */
def jComponent:JComponent
/** Set of reset the fill mode and colour for the Swing component. */
def setFill() {
// setBackground( group.getFillColor( 0 ) )
// setOpaque( true )
// if( group.getFillMode == StyleConstants.FillMode.PLAIN )
// jComponent.setBackground( group.getFillColor( 0 ) )
}
/** Set or reset the text alignment for the Swing component. */
def setTextAlignment()
/** Set or reset the text font size, style and colour for the Swing component. */
def setTextFont()
/** Set or reset the label of the component. */
def updateLabel()
def setBounds(x:Int, y:Int, width:Int, height:Int, camera:Camera) {
setBounds(x, y, width, height)
var borderWidth:Int = 0
if(group.getStrokeMode != StyleConstants.StrokeMode.NONE && group.getStrokeWidth.value > 0)
borderWidth = camera.metrics.lengthToPx(group.getStrokeWidth).toInt
jComponent.setBounds(borderWidth, borderWidth, width-(borderWidth*2), height-(borderWidth*2))
}
/**
* Detach the Swing component from the graph element, remove the Swing component from its
* Swing container and remove any listeners on the Swing component. The ComponentElement
* is not usable after this.
*/
def detach { mainRenderer.renderingSurface.remove(this) }
/**
* Check the swing component follows the graph element position.
* @param camera The transformation from GU to PX.
*/
def updatePosition(camera:Camera) {
element match {
case e:GraphicNode => positionNodeComponent( element.asInstanceOf[GraphicNode], camera)
case e:GraphicSprite => positionSpriteComponent(element.asInstanceOf[GraphicSprite], camera)
case _ => throw new RuntimeException("WTF ?")
}
}
// Custom painting
override def paint(g:Graphics) {
paintComponent(g) // XXX Remove this ??? XXX
paintBorder(g)
paintChildren(g)
}
// Command -- Utility, positioning
protected def positionNodeComponent(node:GraphicNode, camera:Camera) {
val pos = camera.transformGuToPx(node.getX, node.getY, 0)
setBounds((pos.x-(width/2)).toInt, (pos.y-(height/2)).toInt, width, height, camera)
}
protected def positionSpriteComponent( sprite:GraphicSprite, camera:Camera ) {
val pos = camera.getSpritePosition( sprite, new Point3, StyleConstants.Units.PX)
setBounds((pos.x-(width/2)).toInt, (pos.y-(height/2)).toInt, width, height, camera)
}
// Command -- Utility, applying CSS style to Swing components
def checkBorder(camera:Camera, force:Boolean) {
if(force) {
if(group.getStrokeMode != StyleConstants.StrokeMode.NONE && group.getStrokeWidth().value > 0)
setBorder(createBorder(camera))
else setBorder(null)
} else {
updateBorder(camera)
}
}
protected def createBorder( camera:Camera ):Border = {
import StyleConstants.StrokeMode._
val width:Int = camera.metrics.lengthToPx( group.getStrokeWidth ).toInt
group.getStrokeMode match {
case PLAIN => BorderFactory.createLineBorder( group.getStrokeColor( 0 ), width )
case DOTS => throw new RuntimeException( "TODO create dots and dashes borders for component to respect stroke-mode." );
case DASHES => throw new RuntimeException( "TODO create dots and dashes borders for component to respect stroke-mode." );
case _ => null
}
}
protected def updateBorder( camera:Camera ) {}
def checkIcon( camera:Camera )
}
class TextFieldComponentElement( element:GraphicElement, val comp:JTextField ) extends ComponentElement( element ) with ActionListener {
// Construction
element.setComponent( comp )
comp.addActionListener( this )
add( comp )
// Command
override def detach() {
super.detach
comp.removeActionListener( this )
remove( comp )
element.setComponent( null )
//component = null
//element = null
}
def actionPerformed( e:ActionEvent ) {
element.label = comp.asInstanceOf[JTextField].getText
element.setAttribute( "ui.label", element.label )
element.setAttribute( "ui.clicked" )
}
override def jComponent:JComponent = comp
override def setTextAlignment() {
import StyleConstants.TextAlignment._
group.getTextAlignment match {
case ABOVE => comp.setHorizontalAlignment( SwingConstants.CENTER )
case UNDER => comp.setHorizontalAlignment( SwingConstants.CENTER )
case ALONG => comp.setHorizontalAlignment( SwingConstants.CENTER )
case JUSTIFY => comp.setHorizontalAlignment( SwingConstants.CENTER )
case CENTER => comp.setHorizontalAlignment( SwingConstants.CENTER )
case AT_RIGHT => comp.setHorizontalAlignment( SwingConstants.RIGHT )
case RIGHT => comp.setHorizontalAlignment( SwingConstants.RIGHT )
case AT_LEFT => comp.setHorizontalAlignment( SwingConstants.LEFT )
case LEFT => comp.setHorizontalAlignment( SwingConstants.LEFT )
case _ => {}
}
}
override def setTextFont() {
var font = if( ! group.getTextFont.equals( "default" ) )
FontCache.getFont( group.getTextFont, group.getTextStyle, group.getTextSize.value.toInt )
else FontCache.getDefaultFont( group.getTextStyle, group.getTextSize.value.toInt )
comp.setFont( font )
comp.setForeground( group.getTextColor( 0 ) )
}
override def updateLabel() {
if( ! comp.hasFocus() )
comp.setText( element.getLabel )
}
override def checkIcon( camera:Camera ) { /* NOP */ }
}
class ButtonComponentElement( element:GraphicElement, val comp:JButton ) extends ComponentElement( element ) with ActionListener {
// Construction
element.setComponent( comp )
comp.addActionListener( this )
add( comp)
// Commands
override def detach() {
super.detach
comp.removeActionListener( this )
remove( comp)
element.setComponent( null )
// component = null;
// element = null;
}
def actionPerformed( e:ActionEvent ) {
element.label = comp.getText
element.setAttribute( "ui.label", element.label )
element.setAttribute( "ui.clicked" )
element.myGraph.setAttribute( "ui.clicked", element.getId )
}
override def jComponent:JComponent = comp
override def setTextAlignment() {
import StyleConstants.TextAlignment._
group.getTextAlignment match {
case ALONG => comp.setHorizontalAlignment( SwingConstants.CENTER )
case JUSTIFY => comp.setHorizontalAlignment( SwingConstants.CENTER )
case CENTER => comp.setHorizontalAlignment( SwingConstants.CENTER )
case AT_RIGHT => comp.setHorizontalAlignment( SwingConstants.RIGHT )
case RIGHT => comp.setHorizontalAlignment( SwingConstants.RIGHT )
case AT_LEFT => comp.setHorizontalAlignment( SwingConstants.LEFT )
case LEFT => comp.setHorizontalAlignment( SwingConstants.LEFT )
case ABOVE => comp.setVerticalAlignment( SwingConstants.TOP )
case UNDER => comp.setVerticalAlignment( SwingConstants.BOTTOM )
case _ => {}
}
}
override def setTextFont() {
val font = if( ! group.getTextFont().equals( "default" ) )
FontCache.getFont( group.getTextFont, group.getTextStyle, group.getTextSize.value.toInt )
else FontCache.getDefaultFont( group.getTextStyle, group.getTextSize().value.toInt )
comp.setFont( font )
comp.setForeground( group.getTextColor( 0 ) )
}
override def updateLabel() {
val label = element.getLabel
if( label != null )
comp.setText( label )
}
override def checkIcon( camera:Camera ) {
import StyleConstants.IconMode._
if( group.getIconMode != StyleConstants.IconMode.NONE ) {
val url = group.getIcon
val image = ImageCache.loadImage( url ).get
if( image != null ) {
comp.setIcon( new ImageIcon( image ) )
group.getIconMode match {
case AT_LEFT => { comp.setHorizontalTextPosition( SwingConstants.RIGHT ); comp.setVerticalTextPosition( SwingConstants.CENTER ) }
case AT_RIGHT => { comp.setHorizontalTextPosition( SwingConstants.LEFT ); comp.setVerticalTextPosition( SwingConstants.CENTER ) }
case ABOVE => { comp.setHorizontalTextPosition( SwingConstants.CENTER ); comp.setVerticalTextPosition( SwingConstants.BOTTOM ) }
case UNDER => { comp.setHorizontalTextPosition( SwingConstants.CENTER ); comp.setVerticalTextPosition( SwingConstants.TOP ) }
case _ => { throw new RuntimeException( "unknown image mode" ) }
}
}
}
}
}
} | prismsoul/gedgraph | sources/prismsoul.genealogy.gedgraph/gs-ui/org/graphstream/ui/j2dviewer/renderer/JComponentRenderer.scala | Scala | gpl-2.0 | 16,151 |
package io.cloudslang.content.google.actions.compute.compute_engine.disks
import java.util
import com.google.api.services.compute.model.Disk
import com.hp.oo.sdk.content.annotations.{Action, Output, Param, Response}
import com.hp.oo.sdk.content.plugin.ActionMetadata.{MatchType, ResponseType}
import io.cloudslang.content.constants.OutputNames.{EXCEPTION, RETURN_CODE, RETURN_RESULT}
import io.cloudslang.content.constants.{ResponseNames, ReturnCodes}
import io.cloudslang.content.google.services.compute.compute_engine.disks.DiskService
import io.cloudslang.content.google.utils.Constants._
import io.cloudslang.content.google.utils.action.DefaultValues.{DEFAULT_PRETTY_PRINT, DEFAULT_PROXY_PORT}
import io.cloudslang.content.google.utils.action.InputNames._
import io.cloudslang.content.google.utils.action.InputUtils.verifyEmpty
import io.cloudslang.content.google.utils.action.InputValidator.{validateBoolean, validateProxyPort}
import io.cloudslang.content.google.utils.service.{GoogleAuth, HttpTransportUtils, JsonFactoryUtils}
import io.cloudslang.content.utils.BooleanUtilities.toBoolean
import io.cloudslang.content.utils.NumberUtilities.toInteger
import io.cloudslang.content.utils.OutputUtilities.{getFailureResultsMap, getSuccessResultsMap}
import org.apache.commons.lang3.StringUtils.{EMPTY, defaultIfEmpty}
/**
* Created by victor on 3/3/17.
*/
class DisksList {
/**
* This operation can be used to retrieve the list of Disk resources, as JSON array.
*
* @param projectId Google Cloud project id.
* Example: "example-project-a"
* @param zone The name of the zone where the Disks resource is located.
* Examples: "us-central1-a", "us-central1-b", "us-central1-c"
* @param accessToken The access token returned by the GetAccessToken operation, with at least the
* following scope: "https://www.googleapis.com/auth/compute.readonly".
* @param filter Optional - Sets a filter expression for filtering listed resources, in the form filter={expression}.
* Your {expression} must be in the format: field_name comparison_string literal_string.
* The field_name is the name of the field you want to compare. Only atomic field types are
* supported (string, number, boolean). The comparison_string must be either eq (equals) or ne
* (not equals). The literal_string is the string value to filter to. The literal value must
* be valid for the type of field you are filtering by (string, number, boolean). For string
* fields, the literal value is interpreted as a regular expression using RE2 syntax. The
* literal value must match the entire field.
* For example, to filter for instances that do not have a name of example-instance, you would
* use filter=name ne example-instance.
* You can filter on nested fields. For example, you could filter on instances that have set
* the scheduling.automaticRestart field to true. Use filtering on nested fields to take
* advantage of labels to organize and search for results based on label values.
* To filter on multiple expressions, provide each separate expression within parentheses. For
* example, (scheduling.automaticRestart eq true) (zone eq us-central1-f). Multiple
* expressions are treated as AND expressions, meaning that resources must match all
* expressions to pass the filters.
* @param orderBy Optional - Sorts list results by a certain order. By default, results are returned in alphanumerical
* order based on the resource name.
* You can also sort results in descending order based on the creation timestamp using
* orderBy="creationTimestamp desc". This sorts results based on the creationTimestamp field
* in reverse chronological order (newest result first). Use this to sort resources like
* operations so that the newest operation is returned first.
* Currently, only sorting by name or creationTimestamp desc is supported.
* @param proxyHost Optional - Proxy server used to connect to Google Cloud API. If empty no proxy will
* be used.
* @param proxyPortInp Optional - Proxy server port used to access the provider services.
* Default: "8080"
* @param proxyUsername Optional - Proxy server user name.
* @param proxyPasswordInp Optional - Proxy server password associated with the <proxyUsername> input value.
* @param prettyPrintInp Optional - Whether to format (pretty print) the resulting json.
* Valid values: "true", "false"
* Default: "true"
* @return a map containing the list of Disks resources as returnResult
*/
@Action(name = "List Disks",
outputs = Array(
new Output(RETURN_CODE),
new Output(RETURN_RESULT),
new Output(EXCEPTION)
),
responses = Array(
new Response(text = ResponseNames.SUCCESS, field = RETURN_CODE, value = ReturnCodes.SUCCESS, matchType = MatchType.COMPARE_EQUAL, responseType = ResponseType.RESOLVED),
new Response(text = ResponseNames.FAILURE, field = RETURN_CODE, value = ReturnCodes.FAILURE, matchType = MatchType.COMPARE_EQUAL, responseType = ResponseType.ERROR, isOnFail = true)
)
)
def execute(@Param(value = PROJECT_ID, required = true) projectId: String,
@Param(value = ZONE, required = true) zone: String,
@Param(value = ACCESS_TOKEN, required = true, encrypted = true) accessToken: String,
@Param(value = FILTER) filter: String,
@Param(value = ORDER_BY) orderBy: String,
@Param(value = PROXY_HOST) proxyHost: String,
@Param(value = PROXY_PORT) proxyPortInp: String,
@Param(value = PROXY_USERNAME) proxyUsername: String,
@Param(value = PROXY_PASSWORD, encrypted = true) proxyPasswordInp: String,
@Param(value = PRETTY_PRINT) prettyPrintInp: String): util.Map[String, String] = {
val proxyHostOpt = verifyEmpty(proxyHost)
val proxyUsernameOpt = verifyEmpty(proxyUsername)
val filterOpt = verifyEmpty(filter)
val orderByOpt = verifyEmpty(orderBy)
val proxyPortStr = defaultIfEmpty(proxyPortInp, DEFAULT_PROXY_PORT)
val proxyPassword = defaultIfEmpty(proxyPasswordInp, EMPTY)
val prettyPrintStr = defaultIfEmpty(prettyPrintInp, DEFAULT_PRETTY_PRINT)
val validationStream = validateProxyPort(proxyPortStr) ++
validateBoolean(prettyPrintStr, PRETTY_PRINT)
if (validationStream.nonEmpty) {
return getFailureResultsMap(validationStream.mkString(NEW_LINE))
}
val proxyPort = toInteger(proxyPortStr)
val prettyPrint = toBoolean(prettyPrintStr)
try {
val httpTransport = HttpTransportUtils.getNetHttpTransport(proxyHostOpt, proxyPort, proxyUsernameOpt, proxyPassword)
val jsonFactory = JsonFactoryUtils.getDefaultJacksonFactory
val credential = GoogleAuth.fromAccessToken(accessToken)
val disksDelimiter = if (prettyPrint) COMMA_NEW_LINE else COMMA
val resultList = DiskService.list(httpTransport, jsonFactory, credential, projectId, zone, filterOpt, orderByOpt)
.map { disk: Disk => if (prettyPrint) disk.toPrettyString else disk.toString }
.mkString(SQR_LEFT_BRACKET, disksDelimiter, SQR_RIGHT_BRACKET)
getSuccessResultsMap(resultList)
} catch {
case e: Throwable => getFailureResultsMap(e)
}
}
}
| victorursan/cs-actions | cs-google/src/main/scala/io/cloudslang/content/google/actions/compute/compute_engine/disks/DisksList.scala | Scala | apache-2.0 | 8,060 |
package edu.gemini.qv.plugin.ui
import java.awt.Color
import javax.swing.{UIManager, BorderFactory, Icon, ImageIcon}
import scala.Some
import scala.swing.Dialog._
import scala.swing._
/**
* A collection of silly little UI related tools, definitions and thingies that did not fit anywhere else.
* Parts of the stuff in here should probably live in places like edu.gemini.shared.gui and could be
* migrated there at some point.
*/
object QvGui {
// =====
// ICONS
// =====
private def load(name: String): ImageIcon =
new ImageIcon(this.getClass.getResource(name))
val ErrorIcon = load("/resources/images/error_tsk.gif")
val InfoIcon = load("/resources/images/info_tsk.gif")
val DownIcon = load("/resources/images/DownArrow16.gif")
val UpIcon = load("/resources/images/UpArrow16.gif")
val AddIcon = load("/resources/images/add.gif")
val DelIcon = load("/resources/images/remove.gif")
val EditIcon = load("/resources/images/edit.gif")
val CheckIcon = load("/resources/images/check.png")
val Spinner16Icon = load("/resources/images/spinner16.gif")
val CalendarIcon = load("/resources/images/dates.gif")
val QvIcon = load("/resources/images/qvicon.png")
// =====
// =====
// COLORS
// =====
// some QV specific colors; these colors are used in different places in order to keep the number
// of different colors used in the UI low and somewhat streamlined
val Text = UIManager.getDefaults.get("Label.foreground").asInstanceOf[java.awt.Color] // get the pretty green for OT labels
val Green = new Color(51, 160, 44)
val Blue = new Color(31,120,180)
val Red = new Color(227,26,28)
val MoonColor = new Color(225,225,225)
// Note: The colors below are copies of colors defined in OtColor in jsky.app.ot.util.
// I did not want to make this plugin depend on the jsky.app.ot.util bundle just for those colors
// so I repeat them here. These colors should live in edu.gemini.shared.gui in order to be reusable.
val LightOrange = new Color(255, 225, 172)
val VeryLightGray = new Color(247, 243, 239)
val DarkGreen = new Color(51, 102, 51)
// ====
// =====
// ACTION BUTTONS
// =====
// Shortcuts to create non-focusable buttons with label, icon, tooltip and a simple action.
// Depending on the executable function that is passed along, the button itself will be passed into the
// executable function, this is useful if we have to show a dialog and need the button as a position reference.
object ActionButton {
// action buttons with callback without parameters
def apply(label: String, tip: String, executable: () => Unit) =
new ActionButton(label, tip, Some(executable), None, None)
def apply(label: String, tip: String, executable: () => Unit, pic: Icon) =
new ActionButton(label, tip, Some(executable), None, Some(pic))
// action buttons with callback that expects the button as parameter
def apply(label: String, tip: String, executable: (Button) => Unit) =
new ActionButton(label, tip, None, Some(executable), None)
def apply(label: String, tip: String, executable: (Button) => Unit, pic: Icon) =
new ActionButton(label, tip, None, Some(executable), Some(pic))
}
sealed class ActionButton(label: String, tip: String, executable: Option[() => Unit], executableWithBtn: Option[(Button) => Unit], pic: Option[Icon] = None)
extends AbstractActionButton(label, tip, executable, executableWithBtn, pic)
abstract class AbstractActionButton(label: String, tip: String, executable: Option[() => Unit], executableWithBtn: Option[(Button) => Unit], pic: Option[Icon])
extends Button() {
focusable = false
action = new Action(label) {
toolTip = tip
pic.foreach(icon = _)
def apply(): Unit = {
executable.foreach(_()) // execute function without param (if available)
executableWithBtn.foreach(_(AbstractActionButton.this)) // execute function with param (if available)
}
}
}
// =====
// DIALOGS
// =====
def showInfo(title: String, message: String) = showMessage(Message.Info, title, message)
def showWarning(title: String, message: String) = showMessage(Message.Warning, title, message)
def showError(title: String, message: String) = showMessage(Message.Error, title, message)
def showError(title: String, message: String, t: Throwable) = showMessage(Message.Error, title, s"$message\\n${t.getMessage}")
// Dialogs are sometimes brought up to indicate errors caused by background worker tasks,
// therefore we want to make sure here that they are always executed on the Swing event thread
def showMessage(messageType: Message.Value, title: String, message: String) =
Swing.onEDT {
Dialog.showMessage(
messageType=messageType,
title=title,
message=message)
}
/**
* Shows a simple busy dialog to give some feedback to the users in case they start an operation which does not
* immediately provide a visual feedback. This helps for example to avoid users double clicking observations wildly
* if opening an observation in the OT does not happen immediately..
* Creation of the dialog is done on the EDT just to be sure (in case this is ever used in a future).
* Use the helper method done() to close this dialog on the EDT.
* @param title
* @param message
* @return
*/
def showBusy(title: String, message: String): BusyDialog = {
val dialog = new BusyDialog(title, message)
Swing.onEDT {
dialog.centerOnScreen()
dialog.open()
}
dialog
}
/**
* Simple dialog that shows a message and a spinner icon to provide a visual feedback that something
* is happening in the background.
*/
class BusyDialog(label: String, message: String) extends Dialog {
modal = false
title = label
contents = new Label(message) {
border = BorderFactory.createEmptyBorder(10, 15, 10, 15)
icon = Spinner16Icon
}
/**
* Done will dispose of the dialog on the EDT.
* If you use close() or dispose() on the dialog directly, make sure it is done on the EDT.
*/
def done(): Unit = Swing.onEDT { dispose() }
}
// Areas for explanations.
class Instructions extends TextArea {
opaque = false
editable = false
foreground = Color.DARK_GRAY
}
}
| spakzad/ocs | bundle/edu.gemini.qv.plugin/src/main/scala/edu/gemini/qv/plugin/ui/QvGui.scala | Scala | bsd-3-clause | 6,372 |
package swe.origin.opc
import java.io._
import java.net._
import java.security.cert._
import java.util._
import java.util.concurrent._
import scala.actors._
import scala.util.Random
import org.apache.commons.logging._
import org.apache.log4j._
import org.opcfoundation.ua.builtintypes._
import org.opcfoundation.ua.common._
import org.opcfoundation.ua.core._
import org.opcfoundation.ua.transport.security._
import org.opcfoundation.ua.utils._
import com.prosysopc.ua._
import com.prosysopc.ua.PkiFileBasedCertificateValidator
import com.prosysopc.ua.PkiFileBasedCertificateValidator._
import com.prosysopc.ua.UaApplication._
import com.prosysopc.ua.server._
import com.prosysopc.ua.server.nodes._
import com.prosysopc.ua.server.nodes.opcua._
object OPCServer extends App{
private val log:Log = LogFactory.getLog( this.getClass )
private val server = new UaServer
private val validator = new PkiFileBasedCertificateValidator
private val appDescription = createAppDescription
private val identity = createAppIdentity( appDescription )
private val port = 52520
private val serverName = "OPCServer"
private val url = "github.com/flosse/semanticExperiments"
private val addressSpaceName = "SampleAddressSpace"
log.debug( "setup server ..." )
setupServer
log.debug( "init server ..." )
initServer
log.debug( "setup address space server ..." )
setupAddressSpace
log.debug( "start server ..." )
startServer
log.debug( "... server started" )
private def createAppDescription = {
val appDescription = new ApplicationDescription
appDescription.setApplicationName( new LocalizedText( serverName, Locale.ENGLISH ) )
appDescription.setApplicationUri( "urn:localhost:UA:" + serverName )
appDescription.setProductUri( "urn:" + url + ":UA:" + serverName )
appDescription.setApplicationType( ApplicationType.Server )
appDescription
}
private def createAppIdentity( appDescription:ApplicationDescription ) =
ApplicationIdentity
.loadOrCreateCertificate(
appDescription,
"Semantic Experiments",
null,
new File( validator.getBaseDir, "private" )
,true
)
private def setupServer {
server.setApplicationIdentity( identity )
server.setPort( port )
server.setUseLocalhost( true )
server.setServerName( serverName )
server.setUseAllIpAddresses( true )
server.setSecurityModes( SecurityMode.ALL )
server.setCertificateValidator( validator )
addUserTokenPolicies
addValidationListener
addUserValidator
}
def addUserTokenPolicies{
server.addUserTokenPolicy( UserTokenPolicy.ANONYMOUS )
server.addUserTokenPolicy( UserTokenPolicy.SECURE_USERNAME_PASSWORD )
server.addUserTokenPolicy( UserTokenPolicy.SECURE_CERTIFICATE )
}
def addValidationListener{
validator.setValidationListener( new CertificateValidationListener() {
def onValidate(
certificate:Cert,
applicationDescription:ApplicationDescription,
passedChecks:EnumSet[CertificateCheck]
) = ValidationResult.AcceptPermanently
})
}
def addUserValidator {
server.setUserValidator( new UserValidator() {
def onValidate( session:Session, userIdentity:UserIdentity ) = true
})
}
private def initServer = server.init
private def setupAddressSpace {
val myNodeManager = new NodeManagerUaNode(
server,"http://" + url + "/" + addressSpaceName )
val ns = myNodeManager.getNamespaceIndex
var mySwitch = new PlainVariable[Boolean](myNodeManager, new NodeId( ns, "MySwitch") , "MySwitch", Locale.ENGLISH )
mySwitch.setCurrentValue( false )
myNodeManager.addNode( mySwitch )
var number = new PlainVariable[Int]( myNodeManager, new NodeId( ns, "MyNumber"), "MyNumber", Locale.ENGLISH )
number.setCurrentValue( 13 )
myNodeManager.addNode( number )
var simulator = new Actor{
def act{ loop{
mySwitch.setCurrentValue( Random.nextBoolean )
number.setCurrentValue( Random.nextInt )
Thread.sleep( 6000 )
} }
}
simulator.start
}
private def addObjectFolder{
}
private def startServer = server.start
}
| flosse/semanticExperiments | origins/opc/src/main/scala/OPCServer.scala | Scala | gpl-3.0 | 4,197 |
package org.atnos.eff
package syntax.addon.scalaz
import scalaz._
object safe extends org.atnos.eff.syntax.safe with safe
trait safe {
implicit def toSafeEffectScalazOps[R, A](e: Eff[R, A]): SafeEffectScalazOps[R, A] = new SafeEffectScalazOps[R, A](e)
}
final class SafeEffectScalazOps[R, A](val e: Eff[R, A]) extends AnyVal {
def runSafeDisjunction[U](implicit m: Member.Aux[Safe, R, U]): Eff[U, (Throwable \\/ A, List[Throwable])] =
addon.scalaz.safe.runSafeDisjunction(e)
def execSafeDisjunction[U](implicit m: Member.Aux[Safe, R, U]): Eff[U, Throwable \\/ A] =
addon.scalaz.safe.execSafeDisjunction(e)
def attemptSafeDisjunction(implicit m: Safe /= R): Eff[R, (Throwable \\/ A, List[Throwable])] =
addon.scalaz.safe.attemptSafeDisjunction(e)
def attemptDisjunction(implicit member: MemberInOut[Safe, R]): Eff[R, Throwable \\/ A] =
addon.scalaz.safe.attemptDisjunction(e)
}
| etorreborre/eff | scalaz/src/main/scala/org/atnos/eff/syntax/addon/scalaz/safe.scala | Scala | mit | 910 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2016 Algolia
* http://www.algolia.com/
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package algolia.dsl
import algolia.AlgoliaDsl._
import algolia.AlgoliaTest
import algolia.http.{HttpPayload, POST, PUT}
class SaveObjectTest extends AlgoliaTest {
case class BasicObject(name: String, age: Int)
case class BasicObjectWithObjectID(name: String, age: Int, objectID: String)
describe("index") {
describe("without objectId") {
it("should index case class") {
index into "toto" `object` BasicObject("algolia", 2)
}
it("should call API") {
(index into "toto" `object` BasicObject("algolia", 2))
.build() should be(
HttpPayload(
POST,
List("1", "indexes", "toto"),
body = Some("""{"name":"algolia","age":2}"""),
isSearch = false,
requestOptions = None
)
)
}
describe("batch") {
it("should index case classes") {
index into "toto" objects Seq(BasicObject("algolia", 2))
}
it("without objectID should call API ") {
(index into "toto" objects Seq(BasicObject("algolia", 2)))
.build() should be(
HttpPayload(
POST,
List("1", "indexes", "toto", "batch"),
body = Some(
"""{"requests":[{"body":{"name":"algolia","age":2},"action":"addObject"}]}"""
),
isSearch = false,
requestOptions = None
)
)
}
it("with objectID should call API ") {
(index into "toto" objects Seq(
BasicObjectWithObjectID("algolia", 2, "id")
)).build() should be(
HttpPayload(
POST,
List("1", "indexes", "toto", "batch"),
body = Some(
"""{"requests":[{"body":{"name":"algolia","age":2,"objectID":"id"},"action":"updateObject"}]}"""
),
isSearch = false,
requestOptions = None
)
)
}
}
}
describe("with objectId") {
it("should index case class") {
index into "toto" objectId "1" `object` BasicObject("algolia", 2)
}
it("should index case class with id") {
index into "toto" `object` ("1", BasicObject("algolia", 2))
}
it("should call API") {
(index into "toto" objectId "1" `object` BasicObject("algolia", 2))
.build() should be(
HttpPayload(
PUT,
List("1", "indexes", "toto", "1"),
body = Some("""{"name":"algolia","age":2}"""),
isSearch = false,
requestOptions = None
)
)
}
describe("batch") {
it("should index case classes") {
index into "toto" objects Map("1" -> BasicObject("algolia", 2))
}
it("should call API") {
(index into "toto" objects Map("1" -> BasicObject("algolia", 2)))
.build() should be(
HttpPayload(
POST,
List("1", "indexes", "toto", "batch"),
body = Some(
"""{"requests":[{"body":{"objectID":"1","name":"algolia","age":2},"action":"updateObject"}]}"""
),
isSearch = false,
requestOptions = None
)
)
}
}
}
}
}
| algolia/algoliasearch-client-scala | src/test/scala/algolia/dsl/SaveObjectTest.scala | Scala | mit | 4,506 |
package example
/**
* @author Kai Han
*/
case class ScalaStudent(id : Long, name : String, age : Int, sex : Short, rank : Int)
| hank-whu/common4s | src/test/scala/example/ScalaStudent.scala | Scala | apache-2.0 | 130 |
package com.twitter.finagle.http.param
import com.twitter.conversions.StorageUnitOps._
import com.twitter.finagle.Stack
import com.twitter.util.StorageUnit
/**
* automatically send 100-CONTINUE responses to requests which set
* the 'Expect: 100-Continue' header. See longer note on
* `com.twitter.finagle.Http.Server#withNoAutomaticContinue`
*/
case class AutomaticContinue(enabled: Boolean)
object AutomaticContinue {
implicit val automaticContinue: Stack.Param[AutomaticContinue] =
Stack.Param(AutomaticContinue(true))
}
/**
* the maximum size of all headers.
*/
case class MaxHeaderSize(size: StorageUnit)
object MaxHeaderSize {
implicit val maxHeaderSizeParam: Stack.Param[MaxHeaderSize] =
Stack.Param(MaxHeaderSize(8.kilobytes))
}
/**
* the maximum size of the initial line.
*/
case class MaxInitialLineSize(size: StorageUnit)
object MaxInitialLineSize {
implicit val maxInitialLineSizeParam: Stack.Param[MaxInitialLineSize] =
Stack.Param(MaxInitialLineSize(4.kilobytes))
}
/**
* The maximum size of an inbound HTTP request that this
* Finagle server can receive from a client.
*
* @note This param only applies to Finagle HTTP servers,
* and not to Finagle HTTP clients.
*/
case class MaxRequestSize(size: StorageUnit) {
require(size < 2.gigabytes, s"MaxRequestSize should be less than 2 Gb, but was $size")
}
object MaxRequestSize {
implicit val maxRequestSizeParam: Stack.Param[MaxRequestSize] =
Stack.Param(MaxRequestSize(5.megabytes))
}
/**
* The maximum size of an inbound HTTP response that this
* Finagle client can receive from a server.
*
* @note This param only applies to Finagle HTTP clients,
* and not to Finagle HTTP servers.
*/
case class MaxResponseSize(size: StorageUnit) {
require(size < 2.gigabytes, s"MaxResponseSize should be less than 2 Gb, but was $size")
}
object MaxResponseSize {
implicit val maxResponseSizeParam: Stack.Param[MaxResponseSize] =
Stack.Param(MaxResponseSize(5.megabytes))
}
sealed abstract class Streaming private {
def enabled: Boolean
final def disabled: Boolean = !enabled
}
object Streaming {
private[finagle] case object Disabled extends Streaming {
def enabled: Boolean = false
}
private[finagle] final case class Enabled(fixedLengthStreamedAfter: StorageUnit)
extends Streaming {
def enabled: Boolean = true
}
implicit val streamingParam: Stack.Param[Streaming] =
Stack.Param(Disabled)
def apply(enabled: Boolean): Streaming =
if (enabled) Enabled(5.megabytes)
else Disabled
def apply(fixedLengthStreamedAfter: StorageUnit): Streaming =
Enabled(fixedLengthStreamedAfter)
}
case class FixedLengthStreamedAfter(size: StorageUnit)
object FixedLengthStreamedAfter {
implicit val fixedLengthStreamedAfter: Stack.Param[FixedLengthStreamedAfter] =
Stack.Param(FixedLengthStreamedAfter(5.megabytes))
}
case class Decompression(enabled: Boolean)
object Decompression extends {
implicit val decompressionParam: Stack.Param[Decompression] =
Stack.Param(Decompression(enabled = true))
}
case class CompressionLevel(level: Int)
object CompressionLevel {
implicit val compressionLevelParam: Stack.Param[CompressionLevel] =
Stack.Param(CompressionLevel(-1))
}
| luciferous/finagle | finagle-base-http/src/main/scala/com/twitter/finagle/http/param/params.scala | Scala | apache-2.0 | 3,235 |
package com.twitter.finagle.tracing
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.FunSuite
@RunWith(classOf[JUnitRunner])
class SpanIdTest extends FunSuite {
test("parse positive long") {
assert(SpanId.fromString("7fffffffffffffff").get.toLong === Long.MaxValue)
}
test("parse negative long") {
assert(SpanId.fromString("8000000000000000").get.toLong === Long.MinValue)
}
test("create a span with the ID 123 from hex '7b'") {
assert(SpanId.fromString("7b").get.toLong === 123L)
}
test("return None if string is not valid hex") {
assert(SpanId.fromString("rofl") === None)
}
test("represent a span with the ID 123 as the hex '000000000000007b'") {
assert(SpanId(123L).toString === "000000000000007b") // padded for lexical ordering
}
test("be equal if the underlying value is equal") {
val a = SpanId(1234L)
val b = SpanId(1234L)
assert(a === b)
}
}
| travisbrown/finagle | finagle-core/src/test/scala/com/twitter/finagle/tracing/SpanIdTest.scala | Scala | apache-2.0 | 959 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.usergrid.scenarios
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import org.apache.usergrid.datagenerators.FeederGenerator
import org.apache.usergrid.helpers.Headers
import org.apache.usergrid.settings.Settings
object ConnectionScenarios {
val postUserConnection = exec(
http("POST connection")
.post("/users/${user1}/likes/users/${user2}")
.headers(Headers.authToken)
.check(status.is(200))
)
val postUserToDeviceConnection = exec(
http("Connect user with device")
.post("/users/${username}/devices/${deviceId}")
.headers(Headers.authToken)
.check(status.is(200))
)
val postConnection = exec(
http("Connect user with device")
.post("/${connectionName}/${entityId}/${connectionType}/${entityId}")
.headers(Headers.authToken)
.check(status.is(200))
)
val entityNameFeeder = FeederGenerator.generateEntityNameFeeder("device", Settings.numEntities)
val createScenario = scenario("Create Connections")
.feed(Settings.getUserFeeder)
.exec(TokenScenarios.getUserToken)
.exec( UserScenarios.getUserByUsername)
.repeat(2){
feed(entityNameFeeder)
.exec(DeviceScenarios.postDeviceWithNotifier)
.exec(ConnectionScenarios.postUserToDeviceConnection)
}
.exec(session => {
// print the Session for debugging, don't do that on real Simulations
println(session)
session
})
.exec( )
}
| mdunker/usergrid | tests/performance/src/main/scala/org/apache/usergrid/scenarios/ConnectionScenarios.scala | Scala | apache-2.0 | 2,280 |
package lara.epfl.scalasca.rules
import lara.epfl.scalasca.core._
import scala.tools.nsc._
case class IntraProceduralControlFlowGraphMap(methodCFGMap: Map[Global#Symbol, ControlFlowGraph]) extends RuleResult {
override def warning = Warning("GEN_CFG_GENERATOR_INTRA",
"Generating Control Flow Graph",
Console.YELLOW + "Unable to generate control flow graph" + Console.RESET,
FatalCategory())
override def toString(): String =
if (methodCFGMap.size > 0)
methodCFGMap.foldLeft("")((acc, item) => acc + "Method " + item._1.name + ":\\n" + item._2.toString())
else
warning.formattedDefaultMessage
override def isSuccess: Boolean = methodCFGMap.size != 0
}
class IntraProceduralControlFlowGraphGenerator[T <: Global](val global: T, inputResults: List[RuleResult] = List()) extends StandardRule {
import global._
type RR = IntraProceduralControlFlowGraphMap
override val ruleName = "GEN_CFG_GENERATOR_INTRA"
private var funGraphMap = Map[Global#Symbol, ControlFlowGraph]()
private var labelMap = Map[Symbol, Label]()
private var unseenLabelsPreviousNodes = Map[Symbol, List[ControlFlowGraphNode]]()
case class TraversalState(
currentMethod: Option[Symbol],
currentCatch: Option[ControlFlowGraphNode],
previousNodes: List[ControlFlowGraphNode]
)
def apply(tree: Tree): RR = {
def getUpdatedGraph(newNode: ControlFlowGraphNode, newNodePreviousNodes: List[ControlFlowGraphNode], ts: TraversalState): ControlFlowGraph =
if (ts.currentCatch.isEmpty)
funGraphMap(ts.currentMethod.get).withNode(newNode).withDirectedEdges(newNodePreviousNodes, newNode)
else
funGraphMap(ts.currentMethod.get).withNode(newNode).withDirectedEdges(newNodePreviousNodes, newNode).withDirectedEdge(newNode, ts.currentCatch.get)
def traverse(tree: Tree, ts: TraversalState): (Option[ControlFlowGraphNode], List[ControlFlowGraphNode]) = tree match {
case q"$mods object $tname extends { ..$early } with ..$parents { $self => ..$body }" if ts.currentMethod.isEmpty && !body.isEmpty =>
// println("ObjectDef")
val firstStatTraversal = traverse(body.head, ts)
val blockTraversal = body.tail.foldLeft(firstStatTraversal)((prevTraversal, stat) => traverse(stat, ts.copy(previousNodes = prevTraversal._2)))
(firstStatTraversal._1, blockTraversal._2)
// Quasiquote throws weird match error in some cases?
// case q"$mods class $tpname[..$targs] $ctorMods(...$paramss) extends { ..$early } with ..$parents { $self => ..$stats }" if ts.currentMethod.isEmpty && !stats.isEmpty =>
// println("ClassDef")
case ClassDef(mods, name, tparams, Template(parents, self, stats)) if ts.currentMethod.isEmpty && !stats.isEmpty =>
val firstStatTraversal = traverse(stats.head, ts)
val blockTraversal = stats.tail.foldLeft(firstStatTraversal)((prevTraversal, stat) => traverse(stat, ts.copy(previousNodes = prevTraversal._2)))
(firstStatTraversal._1, blockTraversal._2)
case q"$mods trait $tpname[..$tparams] extends { ..$earlydefns } with ..$parents { $self => ..$stats }" if ts.currentMethod.isEmpty && !stats.isEmpty =>
// println("TraitDef")
val firstStatTraversal = traverse(stats.head, ts)
val blockTraversal = stats.tail.foldLeft(firstStatTraversal)((prevTraversal, stat) => traverse(stat, ts.copy(previousNodes = prevTraversal._2)))
(firstStatTraversal._1, blockTraversal._2)
case m @ q"$mods def $tname[..$tparams](...$paramss): $tpt = $expr" =>
// println("MethodDef " + tree)
val currentNode = MethodDef(m)
funGraphMap += (m.symbol -> new ControlFlowGraph().withNode(currentNode))
(Some(currentNode), traverse(expr, ts.copy(currentMethod = Some(m.symbol), previousNodes = List(currentNode)))._2)
case l @ LabelDef(name, params, rhs) if !ts.currentMethod.isEmpty =>
// println("LabelDef " + tree)
val graphToModify = funGraphMap(ts.currentMethod.get)
val newNode = Label(l)
funGraphMap += ts.currentMethod.get -> getUpdatedGraph(newNode, ts.previousNodes, ts)
labelMap += (l.symbol -> newNode)
(Some(newNode), traverse(rhs, ts.copy(previousNodes = List(newNode)))._2)
case v @ q"$mods val $tname: $tpt = $expr" if !ts.currentMethod.isEmpty =>
// println("ValDef " + tree)
val exprTraversal = traverse(expr, ts)
val newNode = ValueDef(v)
val (exprEntryNode, exprExitNodes) = (exprTraversal._1.getOrElse(EmptyNode()), exprTraversal._2)
funGraphMap += ts.currentMethod.get -> getUpdatedGraph(newNode, exprExitNodes, ts)
(Some(exprEntryNode), List(newNode))
case v @ q"$mods var $tname: $tpt = $expr" if !ts.currentMethod.isEmpty =>
// println("VarDef " + tree)
val exprTraversal = traverse(expr, ts)
val newNode = VariableDef(v)
val (exprEntryNode, exprExitNodes) = (exprTraversal._1.getOrElse(EmptyNode()), exprTraversal._2)
funGraphMap += ts.currentMethod.get -> getUpdatedGraph(newNode, exprExitNodes, ts)
(Some(exprEntryNode), List(newNode))
case i @ q"if ($condE) $thenE else $elseE" if !ts.currentMethod.isEmpty =>
// println("IfElse " + tree)
val condTraversal = traverse(condE, ts)
val (condEntryNode, condExitNodes) = (condTraversal._1.getOrElse(EmptyNode()), condTraversal._2)
val thenTraversal = traverse(thenE, ts.copy(previousNodes = condExitNodes))
val (thenEntryNode, thenExitNodes) = (thenTraversal._1.getOrElse(EmptyNode()), thenTraversal._2)
val elseExitNodes = traverse(elseE, ts.copy(previousNodes = condExitNodes))._2
(Some(condEntryNode), thenExitNodes ::: elseExitNodes)
case n @ q"new { ..$earlydefns } with ..$parents { $self => ..$stats }" if !ts.currentMethod.isEmpty =>
// println("New")
val newNode = NewNode(n)
funGraphMap += ts.currentMethod.get -> getUpdatedGraph(newNode, ts.previousNodes, ts)
(Some(newNode), List(newNode))
case m @ q"$target.$method(...$exprss)" if !ts.currentMethod.isEmpty && exprss.size > 0 =>
// println("exprss " + tree)
val flatExprss = exprss.flatten
val newNode = MethodCall(m, Set(target.symbol), method)
val exprssTraversal =
if (!flatExprss.isEmpty) {
val firstExprssTraversal = traverse(flatExprss.head, ts)
if (flatExprss.size > 1)
flatExprss.foldLeft(firstExprssTraversal)((prevTraversal, stat) => traverse(stat, ts.copy(previousNodes = prevTraversal._2)))
else
firstExprssTraversal
}
else {
(Some(newNode), List(newNode))
}
funGraphMap += ts.currentMethod.get -> getUpdatedGraph(newNode, exprssTraversal._2, ts)
(exprssTraversal._1, List(newNode))
case Apply(obj, List()) if !(ts.currentMethod.isEmpty || ts.previousNodes.isEmpty) =>
// println("Application " + tree)
if (labelMap.contains(obj.symbol)) {
funGraphMap += ts.currentMethod.get ->
funGraphMap(ts.currentMethod.get).withDirectedEdges(ts.previousNodes, labelMap(obj.symbol))
(None, List(labelMap(obj.symbol)))
}
else {
if (unseenLabelsPreviousNodes.contains(obj.symbol)) {
unseenLabelsPreviousNodes += obj.symbol -> (unseenLabelsPreviousNodes(obj.symbol) ::: ts.previousNodes)
(None, List())
}
else {
unseenLabelsPreviousNodes += obj.symbol -> ts.previousNodes
(None, List())
}
}
//Quasiquote not working
//case q"try { $tryE } catch { case ..$catchCases } finally { $finallyE }" if !ts.currentMethod.isEmpty =>
case Try(tryE, catchCases, finallyE) if !ts.currentMethod.isEmpty =>
// println("trycatch")
val (tryTraversal, catchExitNodes) =
if (!catchCases.isEmpty) {
val catchNode = CatchNode()
funGraphMap += ts.currentMethod.get -> funGraphMap(ts.currentMethod.get).withNode(catchNode)
(traverse(tryE, ts.copy(currentCatch = Some(catchNode))),
catchCases.foldLeft(List[ControlFlowGraphNode]())((acc, catchCase) => acc ::: traverse(catchCase.body, ts.copy(previousNodes = List(catchNode)))._2))
}
else
(traverse(tryE, ts), List())
val (tryEntryNode, tryExitNodes) = (tryTraversal._1.getOrElse(EmptyNode()), tryTraversal._2)
val finallyExitNodes =
traverse(finallyE, ts.copy(previousNodes = tryExitNodes ::: catchExitNodes))._2
finallyExitNodes match {
case f :: fs =>
(Some(tryEntryNode), finallyExitNodes)
case _ if !catchExitNodes.isEmpty => (Some(tryEntryNode), tryExitNodes ::: catchExitNodes)
case _ => (Some(tryEntryNode), tryExitNodes)
}
case q"return $expr" if !ts.currentMethod.isEmpty =>
// println("returnExpr")
val exprTraversal = traverse(expr, ts)
val (exprEntryNode, exprExitNodes) = (exprTraversal._1.getOrElse(EmptyNode()), exprTraversal._2)
(Some(exprEntryNode), List())
case t @ q"throw $expr" if !ts.currentMethod.isEmpty =>
// println("others in method " + showRaw(tree))
val throwTraversal = traverse(expr, ts)
val newNode = ThrowNode(t)
funGraphMap += ts.currentMethod.get -> getUpdatedGraph(newNode, throwTraversal._2, ts)
(throwTraversal._1, List())
case q"{ ..$stats }" if stats.size > 1 =>
// println("Stats " + showRaw(tree))
if (stats.forall(s => (s.isInstanceOf[ValDef] && s.asInstanceOf[ValDef].symbol.isCase && s.asInstanceOf[ValDef].symbol.isSynthetic) || s.isInstanceOf[LabelDef])) {
// println("Case Stats " + showRaw(tree))
val firstStatTraversal = traverse(stats.head, ts)
val exitNodes = stats.tail.foldLeft(firstStatTraversal._2)((acc, stat) =>
if (unseenLabelsPreviousNodes.contains(stat.symbol)) {
val s = unseenLabelsPreviousNodes(stat.symbol)
unseenLabelsPreviousNodes -= stat.symbol
acc ::: traverse(stat, ts.copy(previousNodes = s))._2
}
else
traverse(stat, ts.copy(previousNodes = acc))._2)
(firstStatTraversal._1, exitNodes)
}
else {
val firstStatTraversal = traverse(stats.head, ts)
val blockTraversal = stats.tail.foldLeft(firstStatTraversal)((prevTraversal, stat) => traverse(stat, ts.copy(previousNodes = prevTraversal._2)))
(firstStatTraversal._1, blockTraversal._2)
}
case a @ q"$expr1 = $expr2" if !ts.currentMethod.isEmpty =>
// println("expr1 = expr2")
val expr2Traversal = traverse(expr2, ts)
val expr1Traversal = traverse(expr1, ts.copy(previousNodes = expr2Traversal._2))
val newNode = AssignNode(a)
funGraphMap += ts.currentMethod.get -> getUpdatedGraph(newNode, expr1Traversal._2, ts)
(expr2Traversal._1, List(newNode))
case e if !ts.currentMethod.isEmpty =>
// println("others in method " + showRaw(tree))
val newNode = ExprNode(e)
funGraphMap += ts.currentMethod.get -> getUpdatedGraph(newNode, ts.previousNodes, ts)
(Some(newNode), List(newNode))
case e =>
// println("others not in method")
val traversalResults = e.children.map(c => traverse(c, ts))
if (traversalResults.size > 0)
(traversalResults.head._1, traversalResults.last._2)
else
(None, List())
}
traverse(tree, TraversalState(None, None, List()))
IntraProceduralControlFlowGraphMap(funGraphMap)
}
} | jean-andre-gauthier/scalasca | src/main/scala/lara/epfl/scalasca/rules/IntraProceduralControlFlowGraphGenerator.scala | Scala | bsd-3-clause | 10,990 |
package org.automanlang.adapters.mturk.mock
import java.util.UUID
import com.amazonaws.services.mturk.model.QualificationType
//import com.amazonaws.mturk.requester.{HIT, QualificationType}
import com.amazonaws.services.mturk.model.{HIT}
import org.automanlang.core.mock.MockResponse
import org.automanlang.core.question.Question
import org.automanlang.core.util.Utilities
case class MockServiceState(budget: java.math.BigDecimal,
question_by_question_id: Map[UUID,Question],
hit_type_by_hit_type_id: Map[String,MockHITType],
hits_by_question_id: Map[UUID, List[HIT]],
answers_by_assignment_id: Map[UUID,MockResponse],
assignment_status_by_assignment_id: Map[UUID,(AssignmentStatus.Value,Option[String])],
assignment_ids_by_question_id: Map[UUID, List[UUID]],
qualification_types: List[QualificationType]
) {
def addHIT(question_id: UUID, hit: HIT) : MockServiceState = {
// update hit list
val hitlist = hit :: (
if (this.hits_by_question_id.contains(question_id)) {
this.hits_by_question_id(question_id)
} else {
List.empty
})
// return a new MockServiceState
MockServiceState(
this.budget,
this.question_by_question_id,
this.hit_type_by_hit_type_id,
this.hits_by_question_id + (question_id -> hitlist),
this.answers_by_assignment_id,
this.assignment_status_by_assignment_id,
this.assignment_ids_by_question_id,
qualification_types
)
}
def addHITType(hit_type: MockHITType) : MockServiceState = {
MockServiceState(
budget,
question_by_question_id,
hit_type_by_hit_type_id + (hit_type.id.toString -> hit_type),
hits_by_question_id,
answers_by_assignment_id,
assignment_status_by_assignment_id,
assignment_ids_by_question_id,
qualification_types
)
}
def budgetDelta(delta: java.math.BigDecimal) : MockServiceState = {
assert (delta.compareTo(budget) != 1)
MockServiceState(
budget.add(delta),
question_by_question_id,
hit_type_by_hit_type_id,
hits_by_question_id,
answers_by_assignment_id,
assignment_status_by_assignment_id,
assignment_ids_by_question_id,
qualification_types
)
}
private def cloneHIT(hit: HIT) : HIT = {
val cloned_hit = new HIT()
cloned_hit.setAssignmentDurationInSeconds(hit.getAssignmentDurationInSeconds)
cloned_hit.setAutoApprovalDelayInSeconds(hit.getAutoApprovalDelayInSeconds)
cloned_hit.setCreationTime(hit.getCreationTime)
cloned_hit.setDescription(hit.getDescription)
cloned_hit.setExpiration(hit.getExpiration)
cloned_hit.setHITGroupId(hit.getHITGroupId)
cloned_hit.setHITId(hit.getHITId)
cloned_hit.setHITLayoutId(hit.getHITLayoutId)
cloned_hit.setHITReviewStatus(hit.getHITReviewStatus)
cloned_hit.setHITStatus(hit.getHITStatus)
cloned_hit.setHITTypeId(hit.getHITTypeId)
cloned_hit.setKeywords(hit.getKeywords)
cloned_hit.setMaxAssignments(hit.getMaxAssignments)
cloned_hit.setNumberOfAssignmentsAvailable(hit.getNumberOfAssignmentsAvailable)
cloned_hit.setNumberOfAssignmentsCompleted(hit.getNumberOfAssignmentsCompleted)
cloned_hit.setNumberOfAssignmentsPending(hit.getNumberOfAssignmentsPending)
cloned_hit.setQualificationRequirements(hit.getQualificationRequirements)
cloned_hit.setQuestion(hit.getQuestion)
//cloned_hit.setRequest(hit.getRequest) //TODO: this doesn't seem to exist anymore
cloned_hit.setRequesterAnnotation(hit.getRequesterAnnotation)
cloned_hit.setReward(hit.getReward)
cloned_hit.setTitle(hit.getTitle)
cloned_hit
}
def extendHIT(hitId: String, deltaSec: Int, deltaAssignments: Int) = {
assert(hits_by_question_id.size >= 1)
// get HIT
val hit = getHITforHITId(hitId)
val question_id = UUID.fromString(hit.getRequesterAnnotation)
// clone
val cloned_hit = cloneHIT(hit)
// update selected fields
cloned_hit.setExpiration(Utilities.calInSeconds(cloned_hit.getExpiration, deltaSec).getTime())
cloned_hit.setMaxAssignments(cloned_hit.getMaxAssignments + deltaAssignments)
// update hit list
val hitlist = cloned_hit :: this.hits_by_question_id(question_id).filter(_.getHITId != hitId)
MockServiceState(
this.budget,
this.question_by_question_id,
this.hit_type_by_hit_type_id,
this.hits_by_question_id + (question_id -> hitlist),
this.answers_by_assignment_id,
this.assignment_status_by_assignment_id,
this.assignment_ids_by_question_id,
qualification_types
)
}
def getHITforHITId(hitId: String) : HIT = {
assert(hits_by_question_id.nonEmpty)
hits_by_question_id.flatMap(_._2).filter(_.getHITId == hitId).head
}
def addQuestion(question: Question) : MockServiceState = {
MockServiceState(
budget,
question_by_question_id + (question.id -> question),
hit_type_by_hit_type_id,
hits_by_question_id,
answers_by_assignment_id,
assignment_status_by_assignment_id,
assignment_ids_by_question_id,
qualification_types
)
}
def addAssignments(question_id: UUID, assignments: Map[UUID,MockResponse]) : MockServiceState = {
val answers = answers_by_assignment_id ++ assignments
val status = assignment_status_by_assignment_id ++
assignments.map { case (id,a) => id -> (AssignmentStatus.UNANSWERED, None) }
val a_by_q = assignment_ids_by_question_id + (question_id -> assignments.map(_._1).toList)
MockServiceState(
budget,
question_by_question_id,
hit_type_by_hit_type_id,
hits_by_question_id,
answers,
status,
a_by_q,
qualification_types
)
}
def updateAssignmentStatusMap(am: Map[UUID,(AssignmentStatus.Value,Option[String])]) : MockServiceState = {
MockServiceState(
budget,
question_by_question_id,
hit_type_by_hit_type_id,
hits_by_question_id,
answers_by_assignment_id,
am,
assignment_ids_by_question_id,
qualification_types
)
}
def updateAssignmentStatus(assignmentId: UUID, new_status: AssignmentStatus.Value) : MockServiceState = {
updateAssignmentStatus(assignmentId, None, new_status)
}
def updateAssignmentStatus(assignmentId: UUID, hit_id_opt: Option[String], new_status: AssignmentStatus.Value) : MockServiceState = {
MockServiceState(
budget,
question_by_question_id,
hit_type_by_hit_type_id,
hits_by_question_id,
answers_by_assignment_id,
changeAssignmentStatus(assignmentId, hit_id_opt, new_status, assignment_status_by_assignment_id),
assignment_ids_by_question_id,
qualification_types
)
}
def addQualificationType(qualType: QualificationType) : MockServiceState = {
MockServiceState(
budget,
question_by_question_id,
hit_type_by_hit_type_id,
hits_by_question_id,
answers_by_assignment_id,
assignment_status_by_assignment_id,
assignment_ids_by_question_id,
qualType :: qualification_types
)
}
def deleteQualificationById(qualID: String) : (MockServiceState,QualificationType) = {
val qt = qualification_types.filter(_.getQualificationTypeId == qualID).head
val mss = MockServiceState(
budget,
question_by_question_id,
hit_type_by_hit_type_id,
hits_by_question_id,
answers_by_assignment_id,
assignment_status_by_assignment_id,
assignment_ids_by_question_id,
qualification_types.filter(_.getQualificationTypeId != qualID)
)
(mss,qt)
}
private def changeAssignmentStatus(assignmentId: UUID, hit_id_opt: Option[String], new_status: AssignmentStatus.Value, assn_map: Map[UUID,(AssignmentStatus.Value,Option[String])])
: Map[UUID,(AssignmentStatus.Value,Option[String])] = {
val current_status = assn_map(assignmentId)._1
// Ensure that only valid state transitions are allowed
new_status match {
case AssignmentStatus.APPROVED => assert(current_status == AssignmentStatus.ANSWERED)
case AssignmentStatus.REJECTED => assert(current_status == AssignmentStatus.ANSWERED)
case AssignmentStatus.ANSWERED => assert(current_status == AssignmentStatus.UNANSWERED)
case AssignmentStatus.UNANSWERED => // for cancellation
assert(
current_status == AssignmentStatus.ANSWERED ||
current_status == AssignmentStatus.UNANSWERED
)
case _ => assert(false)
}
val hio = hit_id_opt match {
case None => assn_map(assignmentId)._2
case Some(h) => Some(h)
}
assn_map + (assignmentId -> (new_status, hio))
}
}
| dbarowy/AutoMan | libautoman/src/main/scala/org/automanlang/adapters/mturk/mock/MockServiceState.scala | Scala | gpl-2.0 | 8,836 |
/*
* Copyright (c) 2013-2014 Telefónica Investigación y Desarrollo S.A.U.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package es.tid.cosmos.infinity.server.plugin
import org.apache.commons.logging.LogFactory
import org.apache.hadoop.conf.{Configurable, Configuration}
import org.apache.hadoop.hdfs.server.namenode.NameNode
import org.apache.hadoop.util.ServicePlugin
import es.tid.cosmos.infinity.server.authentication.cosmosapi.CosmosApiAuthenticationService
import es.tid.cosmos.infinity.server.authentication.AuthenticationService
import es.tid.cosmos.infinity.server.config.MetadataServerConfig
import es.tid.cosmos.infinity.server.metadata.MetadataServer
import es.tid.cosmos.infinity.server.hadoop.{DfsClientFactory, HdfsNameNode}
import es.tid.cosmos.infinity.server.urls.InfinityUrlMapper
/** Namenode plugin to serve Infinity metadata. */
class MetadataPlugin extends ServicePlugin with Configurable {
private val log = LogFactory.getLog(classOf[MetadataPlugin])
private var hadoopConfOpt: Option[Configuration] = None
private var serverOpt: Option[MetadataServer] = None
override def setConf(conf: Configuration): Unit = {
hadoopConfOpt = Some(conf)
}
override def getConf: Configuration =
hadoopConfOpt.getOrElse(throw new IllegalStateException("Not yet injected with Hadoop config"))
override def start(service: Any): Unit = service match {
case nameNode: NameNode =>
while (nameNode.isInSafeMode) {
log.info("Waiting for NN to exit safe mode before starting metadata server")
Thread.sleep(1000)
}
log.info("Starting Infinity metadata server as a namenode plugin")
val config = new MetadataServerConfig(pluginConfig)
val dfsClientFactory = new DfsClientFactory(
getConf, NameNode.getUri(nameNode.getServiceRpcAddress))
val urlMapper = new InfinityUrlMapper(config)
val apis = HdfsNameNode.NameNodeApis(
protocols = nameNode.getRpcServer,
nameSystem = nameNode.getNamesystem,
dfsClientFactory
)
val server = new MetadataServer(
nameNode = new HdfsNameNode(config, apis, urlMapper),
config = config,
authService = authentication)
server.start()
serverOpt = Some(server)
case other =>
log.error(
s"""Metadata plugin initialization failed: a NameNode was expected but ${service.getClass}
| was found. Make sure you have configured it as namenode plugin instead of datanode one.
""".stripMargin
)
}
override def stop(): Unit = {
log.info("Shutting down Infinity metadata plugin")
serverOpt.foreach(_.stop())
serverOpt = None
}
override def close(): Unit = stop()
private lazy val pluginConfig = PluginConfig.load(getConf)
private lazy val authentication: AuthenticationService =
CosmosApiAuthenticationService.fromConfig(pluginConfig)
}
| telefonicaid/fiware-cosmos-platform | infinity/server/src/main/scala/es/tid/cosmos/infinity/server/plugin/MetadataPlugin.scala | Scala | apache-2.0 | 3,411 |
package xitrum.annotation
import scala.annotation.StaticAnnotation
sealed trait Cache extends StaticAnnotation
case class CacheActionDay (days: Int) extends Cache
case class CacheActionHour (hours: Int) extends Cache
case class CacheActionMinute(minutes: Int) extends Cache
case class CacheActionSecond(seconds: Int) extends Cache
case class CachePageDay (days: Int) extends Cache
case class CachePageHour (hours: Int) extends Cache
case class CachePageMinute(minutes: Int) extends Cache
case class CachePageSecond(seconds: Int) extends Cache
| caiiiycuk/xitrum | src/main/scala/xitrum/annotation/Caches.scala | Scala | mit | 563 |
package patterns
object Puzzles {
def eval[X, Y]: (X => Y) => X => Y = xy => x => xy(x)
/**
* In this context it means: the argument of the function you're trying to define
* so " _(xh) " is a function of type ((X => H) => H) => ( X => H) => H
It takes an (X => (X => H)), and applies it to an X => H to get an H !
**/
def mu[X, H]: ((((X => H) => H) => H) => H) => ((X => H) => H) = xhhhh => xh => xhhhh(a => a(xh))
//let mu = \\ xhhhh ->\\ xh-> xhhhh (\\ a -> a xh)
def strength[X, Y, H]: (X => Y) => ((X => H) => H) => (Y => H) => H =
xy => xhh => yh => xhh(x => yh(xy(x)))
def em[X, H, A]: (((X => H) => H) => X ) => (((A => X) => H) => H) => (A => X) =
xhhx => axhh => a => xhhx(xh => axhh(ax => xh(ax(a))))
def as[H, A, B]: (((A => B) => H) => H) => ((A => H) => H) => (B => H) => H =
abhh => ahh => bh => abhh(ab => ahh(a => bh(ab(a))))
/*
Well, you have to work backwards.
We're given an XHHX and have to produce an AXHHAX.
Equivalently, we're given XHHX, AXHH and have to produce AX.
Equivalently, we're given XHHX, AXHH, A and have to produce X.
Now, the only way to produce X from what we have is to find an XHH and feed it into our XHHX = XHH => X.
So we have to find an XHH = XH => H. So given an XH together with all our other stuff, we have to produce an H.
So the problem reduces to: given XHHX, AXHH, A, XH, find an H.
We now clearly have to find an AXH = (A => X) => H to feed into the AXHH.
And that reduces to: given an A => X, produce an H.
*/
def rm[X, S]: (S => S => X) => S => X = ssx => s => ssx(s)(s)
/** *
* a = len
* xh = "World "
* xhhhh = "Hello"
*/
def main(args: Array[String]) {
val f = eval[String, Int]
def len(s: String) = s.length
println(f(len)("Hello"))
val g = rm[String, Int]
def fun(x: String)(y: String) : Int = x.length + y.length
val result = rm(fun)("Bye")
//mu("Hello")("World")()
println(result)
}
}
| dongarerahul/FunctionalProgrammingInScala | src/patterns/Puzzles.scala | Scala | apache-2.0 | 2,059 |
package de.tudresden.inf.lat.tabulas.datatype
import java.math.BigDecimal
import java.util.Objects
/** This models a decimal value.
*
*/
case class DecimalValue(number: BigDecimal) extends PrimitiveTypeValue {
override val getType: PrimitiveType = DecimalType()
override val isEmpty: Boolean = false
override val render: String = number.toString
override val renderAsList: Seq[String] = List(render)
override def compareTo(other: PrimitiveTypeValue): Int = {
val result = other match {
case otherValue: DecimalValue =>
number.compareTo(otherValue.getValue)
case _ =>
render.compareTo(other.render)
}
result
}
val getValue: BigDecimal = number
override val toString: String = number.toString
}
object DecimalValue {
def apply(): DecimalValue = new DecimalValue(BigDecimal.ZERO)
/** Constructs a new decimal value using a string.
*
* @param str string
* @throws ParseException
* <code>str</code> is not a valid representation of a decimal
* value.
*/
def apply(str: String): DecimalValue = {
Objects.requireNonNull(str)
try {
new DecimalValue(new BigDecimal(str))
} catch {
case e: NumberFormatException => throw new ParseException(e.getMessage, e)
}
}
}
| julianmendez/tabulas | tabulas-core/src/main/scala/de/tudresden/inf/lat/tabulas/datatype/DecimalValue.scala | Scala | apache-2.0 | 1,280 |
/*
* This file is part of the sohva project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gnieh.sohva
package test
import org.scalatest._
/** @author satabin
*
*/
class TestSecurity extends SohvaTestSpec with Matchers with BeforeAndAfterEach {
var secDb: Database = couch.database("sohva_test_security")
var adminSecDb: Database = _
val secDoc1 = SecurityDoc(admins = SecurityList(names = List("secUser2")))
val secDoc2 = SecurityDoc(admins = SecurityList(roles = List("role1")))
val secDoc3 = SecurityDoc(members = SecurityList(roles = List("role2")))
val secDoc4 = SecurityDoc(members = SecurityList(names = List("secUser1")))
override def beforeEach() {
// create the database for tests
adminSecDb = session.database("sohva_test_security")
synced(adminSecDb.create)
// add the test users
synced(session.users.add("secUser1", "secUser1", List("role1", "role2")))
synced(session.users.add("secUser2", "secUser2", List("role2")))
}
override def afterEach() {
// delete the database
synced(adminSecDb.delete)
// delete the test user
synced(session.users.delete("secUser1"))
synced(session.users.delete("secUser2"))
}
"a database with no security document" should "be readable by everybody" in {
synced(secDb.info) should be('defined)
}
it should "be writtable to anybody" in {
val saved = synced(secDb.saveDoc(TestDoc("some_doc", 17)))
saved should have(
'_id("some_doc"),
'toto(17))
}
"server admin" should "be able to add a security document" in {
synced(adminSecDb.saveSecurityDoc(secDoc1)) should be(true)
}
"database admin" should "be able to update the security document" in {
synced(adminSecDb.saveSecurityDoc(secDoc1)) should be(true)
val session2 = couch.startBasicSession("secUser2", "secUser2")
synced(session2.database("sohva_test_security").saveSecurityDoc(secDoc2)) should be(true)
}
"anonymous user" should "not be able to read a database with a members list" in {
synced(secDb.saveDoc(TestDoc("some_doc", 13)))
synced(adminSecDb.saveSecurityDoc(secDoc3)) should be(true)
val thrown = the[SohvaException] thrownBy {
synced(secDb.getDocById[TestDoc]("some_doc"))
}
val ce = CauseMatchers.findExpectedExceptionRecursively[CouchException](thrown)
withClue("CouchException should be present in the stack trace: ") { ce should not be ('empty) }
ce.get.status should be(401)
}
it should "not be able to write into a database with a member list" in {
synced(adminSecDb.saveSecurityDoc(secDoc3)) should be(true)
val thrown = the[SohvaException] thrownBy {
synced(secDb.saveDoc(TestDoc("some_doc", 13)))
}
val ce = CauseMatchers.findExpectedExceptionRecursively[CouchException](thrown)
withClue("CouchException should be present in the stack trace: ") { ce should not be ('empty) }
ce.get.status should be(401)
}
}
| gnieh/sohva | src/test/scala/gnieh/sohva/test/TestSecurity.scala | Scala | apache-2.0 | 3,452 |
package main.scala
import org.apache.spark.sql.DataFrame
import org.apache.spark.SparkContext
import org.apache.spark.sql.functions.countDistinct
import org.apache.spark.sql.functions.max
import org.apache.spark.sql.functions.count
import org.apache.spark.sql.functions.udf
/**
* TPC-H Query 21
* Savvas Savvides <savvas@purdue.edu>
*
*/
class Q21 extends TpchQuery {
override def execute(sc: SparkContext, schemaProvider: TpchSchemaProvider): DataFrame = {
// this is used to implicitly convert an RDD to a DataFrame.
val sqlContext = new org.apache.spark.sql.SQLContext(sc)
import sqlContext.implicits._
import schemaProvider._
val fsupplier = supplier.select($"s_suppkey", $"s_nationkey", $"s_name")
val plineitem = lineitem.select($"l_suppkey", $"l_orderkey", $"l_receiptdate", $"l_commitdate")
//cache
val flineitem = plineitem.filter($"l_receiptdate" > $"l_commitdate")
// cache
val line1 = plineitem.groupBy($"l_orderkey")
.agg(countDistinct($"l_suppkey").as("suppkey_count"), max($"l_suppkey").as("suppkey_max"))
.select($"l_orderkey".as("key"), $"suppkey_count", $"suppkey_max")
val line2 = flineitem.groupBy($"l_orderkey")
.agg(countDistinct($"l_suppkey").as("suppkey_count"), max($"l_suppkey").as("suppkey_max"))
.select($"l_orderkey".as("key"), $"suppkey_count", $"suppkey_max")
val forder = order.select($"o_orderkey", $"o_orderstatus")
.filter($"o_orderstatus" === "F")
nation.filter($"n_name" === "SAUDI ARABIA")
.join(fsupplier, $"n_nationkey" === fsupplier("s_nationkey"))
.join(flineitem, $"s_suppkey" === flineitem("l_suppkey"))
.join(forder, $"l_orderkey" === forder("o_orderkey"))
.join(line1, $"l_orderkey" === line1("key"))
.filter($"suppkey_count" > 1 || ($"suppkey_count" == 1 && $"l_suppkey" == $"max_suppkey"))
.select($"s_name", $"l_orderkey", $"l_suppkey")
.join(line2, $"l_orderkey" === line2("key"), "left_outer")
.select($"s_name", $"l_orderkey", $"l_suppkey", $"suppkey_count", $"suppkey_max")
.filter($"suppkey_count" === 1 && $"l_suppkey" === $"suppkey_max")
.groupBy($"s_name")
.agg(count($"l_suppkey").as("numwait"))
.sort($"numwait".desc, $"s_name")
.limit(100)
}
}
| ssavvides/tpch-spark | src/main/scala/Q21.scala | Scala | mit | 2,284 |
/*
* ProcessElement.scala
* Class for an element whose value is a process
*
* Created By: Avi Pfeffer (apfeffer@cra.com)
* Creation Date: Nov 27, 2014
*
* Copyright 2014 Avrom J. Pfeffer and Charles River Analytics, Inc.
* See http://www.cra.com or email figaro@cra.com for information.
*
* See http://www.github.com/p2t2/figaro for a copy of the software license.
*/
package com.cra.figaro.library.collection
import com.cra.figaro.language._
/**
* Represents an element whose value is a process.
*
* Elements that are created by operations are put in the same universe as this element.
*/
class ProcessElement[Index, Value](val element: Element[Process[Index, Value]]) {
/**
* Creates an element whose value is the value at the corresponding index of the value of the process element.
*/
def apply(i: Index): Element[Value] = {
Chain(element, (p: Process[Index, Value]) => p(i))("", element.universe)
}
/**
* Safely creates an element whose value is the optional value at the corresponding index of the value
* of the process element. If the value of the process element does not have the corresponding index, the value
* of this element is None.
*/
def get(i: Index): Element[Option[Value]] = {
Chain(element, (p: Process[Index, Value]) => p.get(i))("", element.universe)
}
/**
* Map the given function pointwise through the value of the process element.
*/
def map[Value2](f: Value => Value2): ProcessElement[Index, Value2] = {
new ProcessElement(Apply(element, (p: Process[Index, Value]) => p.map(f))("", element.universe))
}
/**
* Chain the given function pointwise through the value of the process element.
*/
def chain[Value2](f: Value => Element[Value2]): ProcessElement[Index, Value2] = {
new ProcessElement(Apply(element, (p: Process[Index, Value]) => p.chain(f))("", element.universe))
}
}
| agarbuno/figaro | Figaro/src/main/scala/com/cra/figaro/library/collection/ProcessElement.scala | Scala | bsd-3-clause | 1,900 |
package models.admin
import scalaz._
import Scalaz._
import scalaz.effect.IO
import scalaz.Validation
import scalaz.Validation.FlatMap._
import scalaz.NonEmptyList._
import cache._
import db._
import io.megam.auth.funnel.FunnelErrors._
import io.megam.common.uid.UID
import io.megam.util.Time
import net.liftweb.json._
import net.liftweb.json.scalaz.JsonScalaz._
import java.nio.charset.Charset
import java.util.UUID
import com.datastax.driver.core.{ ResultSet, Row }
import com.websudos.phantom.dsl._
import scala.concurrent.{ Future => ScalaFuture }
import com.websudos.phantom.connectors.{ ContactPoint, KeySpaceDef }
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.annotation.tailrec
import models.tosca.{ KeyValueField, KeyValueList}
import org.joda.time.{DateTime, DateTimeZone}
import org.joda.time.format.{DateTimeFormat,ISODateTimeFormat}
import net.liftweb.json._
import net.liftweb.json.scalaz.JsonScalaz._
import java.nio.charset.Charset
import controllers.stack.ImplicitJsonFormats
import models.Constants._;
import utils.{DateHelper, StringStuff}
case class FlavorInput( name: String, cpu: String, ram: String, disk: String, category: List[String],
regions: List[String], price: KeyValueList, properties: KeyValueList, status: String)
case class FlavorResult(
id: String,
name: String,
cpu: String,
ram: String,
disk: String,
category: List[String],
regions: List[String],
price: KeyValueList,
properties: KeyValueList,
status: String,
json_claz: String,
updated_at: DateTime,
created_at: DateTime)
sealed class FlavorSacks extends CassandraTable[FlavorSacks, FlavorResult] with ImplicitJsonFormats {
object id extends StringColumn(this) with PartitionKey[String]
object name extends StringColumn(this) with PrimaryKey[String]
object cpu extends StringColumn(this)
object ram extends StringColumn(this)
object disk extends StringColumn(this)
object category extends ListColumn[FlavorSacks, FlavorResult, String](this)
object regions extends ListColumn[FlavorSacks, FlavorResult, String](this)
object price extends JsonListColumn[FlavorSacks, FlavorResult, KeyValueField](this) {
override def fromJson(obj: String): KeyValueField = {
JsonParser.parse(obj).extract[KeyValueField]
}
override def toJson(obj: KeyValueField): String = {
compactRender(Extraction.decompose(obj))
}
}
object properties extends JsonListColumn[FlavorSacks, FlavorResult, KeyValueField](this) {
override def fromJson(obj: String): KeyValueField = {
JsonParser.parse(obj).extract[KeyValueField]
}
override def toJson(obj: KeyValueField): String = {
compactRender(Extraction.decompose(obj))
}
}
object status extends StringColumn(this)
object json_claz extends StringColumn(this)
object updated_at extends DateTimeColumn(this)
object created_at extends DateTimeColumn(this)
override def fromRow(r: Row): FlavorResult = {
FlavorResult(
id(r),
name(r),
cpu(r),
ram(r),
disk(r),
category(r),
regions(r),
price(r),
properties(r),
status(r),
json_claz(r),
updated_at(r),
created_at(r))
}
}
abstract class ConcreteFlavors extends FlavorSacks with RootConnector {
override lazy val tableName = "flavors"
override implicit def space: KeySpace = scyllaConnection.space
override implicit def session: Session = scyllaConnection.session
def getRecordById(id: String): ValidationNel[Throwable, Option[FlavorResult]] = {
val res = select.allowFiltering().where(_.id eqs id).one()
Await.result(res, 5.seconds).successNel
}
def getRecordByName(name: String): ValidationNel[Throwable, Option[FlavorResult]] = {
val res = select.where(_.name eqs name).one()
Await.result(res, 5.seconds).successNel
}
def listAllRecords(): ValidationNel[Throwable, Seq[FlavorResult]] = {
val res = select.consistencyLevel_=(ConsistencyLevel.ONE).fetch
Await.result(res, 5.seconds).successNel
}
def insertNewRecord(mpr: FlavorResult): ValidationNel[Throwable, ResultSet] = {
val res = insert.value(_.id, mpr.id)
.value(_.name, mpr.name)
.value(_.cpu, mpr.cpu)
.value(_.ram, mpr.ram)
.value(_.disk,mpr.disk)
.value(_.category,mpr.category)
.value(_.regions, mpr.regions)
.value(_.price, mpr.price)
.value(_.properties, mpr.properties)
.value(_.status, mpr.status)
.value(_.json_claz, mpr.json_claz)
.value(_.updated_at, mpr.updated_at)
.value(_.created_at, mpr.created_at)
.future()
Await.result(res, 5.seconds).successNel
}
def updateRecord(email: String, rip: FlavorResult, aor: Option[FlavorResult]): ValidationNel[Throwable, ResultSet] = {
val oldstatus = aor.get.status
val newstatus = rip.status
val oldcpu = aor.get.cpu
val newcpu = rip.cpu
val oldram = aor.get.ram
val newram = rip.ram
val olddisk = aor.get.disk
val newdisk = rip.disk
val res = update.where(_.name eqs rip.name).and(_.id eqs rip.id)
.modify(_.status setTo StringStuff.NilOrNot(newstatus, oldstatus))
.and(_.cpu setTo StringStuff.NilOrNot(newcpu, oldcpu))
.and(_.ram setTo StringStuff.NilOrNot(newram, oldram))
.and(_.disk setTo StringStuff.NilOrNot(newdisk, olddisk))
.and(_.category setTo rip.category)
.and(_.regions setTo rip.regions)
.and(_.price setTo rip.price)
.and(_.properties setTo rip.properties)
.and(_.updated_at setTo DateHelper.now())
.future()
scala.concurrent.Await.result(res, 5.seconds).successNel
}
def deleteRecord(account_id: String, name: String): ValidationNel[Throwable, ResultSet] = {
val res = delete.where(_.name eqs name).future()
Await.result(res,5.seconds).successNel
}
}
object Flavors extends ConcreteFlavors {
private def mkFlavorsSack(email: String, input: String): ValidationNel[Throwable, FlavorResult] = {
val mktsInput: ValidationNel[Throwable, FlavorInput] = (Validation.fromTryCatchThrowable[FlavorInput, Throwable] {
parse(input).extract[FlavorInput]
} leftMap { t: Throwable => new MalformedBodyError(input, t.getMessage) }).toValidationNel
for {
mkt <- mktsInput
uir <- (UID("FLV").get leftMap { ut: NonEmptyList[Throwable] => ut })
} yield {
(new FlavorResult(uir.get._1 + uir.get._2, mkt.name, mkt.cpu, mkt.ram, mkt.disk, mkt.category,
mkt.regions, mkt.price, mkt.properties, mkt.status, models.Constants.FLAVORCLAZ, DateHelper.now(), DateHelper.now()))
}
}
def listAll: ValidationNel[Throwable, Seq[FlavorResult]] = {
(listAllRecords() leftMap { t: NonEmptyList[Throwable] =>
new ResourceItemNotFound("", "Flavor items = nothing found.")
}).toValidationNel.flatMap { nm: Seq[FlavorResult] =>
if (!nm.isEmpty) {
Validation.success[Throwable, Seq[FlavorResult]](nm.sortWith(_.cpu.toFloat < _.cpu.toFloat).sortWith(_.ram.toFloat < _.ram.toFloat).sortWith(_.disk.toFloat < _.disk.toFloat)).toValidationNel
} else {
Validation.failure[Throwable, Seq[FlavorResult]](new ResourceItemNotFound("", "Flavor = nothing found.")).toValidationNel
}
}
}
def create(email: String, input: String): ValidationNel[Throwable, Option[FlavorResult]] = {
for {
wa <- (mkFlavorsSack(email, input) leftMap { err: NonEmptyList[Throwable] => err })
set <- (insertNewRecord(wa) leftMap { t: NonEmptyList[Throwable] => t })
} yield {
play.api.Logger.warn(("%s%s%-20s%s%s").format(Console.GREEN, Console.BOLD, "Flavors","|+| ✔", Console.RESET))
wa.some
}
}
def delete(email: String, name: String): ValidationNel[Throwable, FlavorResult] = {
for {
wa <- (findByName(name, email) leftMap { t: NonEmptyList[Throwable] => t })
set <- (deleteRecord(email, name) leftMap { t: NonEmptyList[Throwable] => t })
} yield {
play.api.Logger.warn(("%s%s%-20s%s%s").format(Console.RED, Console.BOLD, "Flavors","|-| ✔", Console.RESET))
wa
}
}
def update(email: String, input: String): ValidationNel[Throwable, FlavorResult] = {
val ripNel: ValidationNel[Throwable, FlavorResult] = (Validation.fromTryCatchThrowable[FlavorResult,Throwable] {
parse(input).extract[FlavorResult]
} leftMap { t: Throwable => new MalformedBodyError(input, t.getMessage) }).toValidationNel
for {
rip <- ripNel
wa <- (findByName(rip.name, email) leftMap { t: NonEmptyList[Throwable] => t })
set <- updateRecord(email, rip, wa.some)
} yield {
wa
}
}
def findById(id: String): ValidationNel[Throwable, Seq[FlavorResult]] = {
(getRecordById(id) leftMap { t: NonEmptyList[Throwable] ⇒
new ServiceUnavailableError(id, (t.list.map(m ⇒ m.getMessage)).mkString("\\n"))
}).toValidationNel.flatMap { xso: Option[FlavorResult] ⇒
xso match {
case Some(xs) ⇒ {
Validation.success[Throwable, Seq[FlavorResult]](List(xs)).toValidationNel
}
case None ⇒ Validation.failure[Throwable, Seq[FlavorResult]](new ResourceItemNotFound(id, "")).toValidationNel
}
}
}
def findByName(name: String, email: String): ValidationNel[Throwable, FlavorResult] = {
(getRecordByName(name) leftMap { t: NonEmptyList[Throwable] ⇒
new ServiceUnavailableError(name, (t.list.map(m ⇒ m.getMessage)).mkString("\\n"))
}).toValidationNel.flatMap { xso: Option[FlavorResult] ⇒
xso match {
case Some(xs) ⇒ {
Validation.success[Throwable, FlavorResult](xs).toValidationNel
}
case None ⇒ Validation.failure[Throwable, FlavorResult](new ResourceItemNotFound(name, "")).toValidationNel
}
}
}
}
| megamsys/verticegateway | app/models/admin/Flavors.scala | Scala | mit | 9,810 |
/***********************************************************************
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.index
import com.vividsolutions.jts.geom.Polygon
import org.joda.time.format.DateTimeFormat
import org.joda.time.{DateTime, DateTimeZone}
import org.junit.runner.RunWith
import org.locationtech.geomesa.utils.geohash.GeoHash
import org.locationtech.geomesa.utils.text.WKTUtils
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class QueryPlannersTest extends Specification {
"QueryPlanner" should {
val mm = DatePlanner(DateTimeFormat.forPattern("MM"))
val ss = DatePlanner(DateTimeFormat.forPattern("ss"))
val mmdd = DatePlanner(DateTimeFormat.forPattern("MM-dd"))
"return full ranges for unspecified dates " in {
val ghPoly = GeoHash("c23j").bbox.geom match {
case p: Polygon => p
case _ => throw new Exception("geohash c23j should have a polygon bounding box")
}
mm.getKeyPlan(SpatialFilter(ghPoly), true, ExplainNull) must be equalTo KeyRange("01", "12")
ss.getKeyPlan(SpatialFilter(ghPoly), true, ExplainNull) must be equalTo KeyRange("00", "59")
mmdd.getKeyPlan(SpatialFilter(ghPoly), true, ExplainNull) must be equalTo KeyRange("01-01", "12-31")
}
"return appropriate ranges for date ranges" in {
val dt1 = new DateTime(2005, 3, 3, 5, 7, DateTimeZone.forID("UTC"))
val dt2 = new DateTime(2005, 10, 10, 10, 10, DateTimeZone.forID("UTC"))
val dt3 = new DateTime(2001, 3, 3, 5, 7, DateTimeZone.forID("UTC"))
val dt4 = new DateTime(2005, 3, 9, 5, 7, DateTimeZone.forID("UTC"))
val dt5 = new DateTime(2005, 9, 9, 5, 7, DateTimeZone.forID("UTC"))
mm.getKeyPlan(DateRangeFilter(dt1, dt2), true, ExplainNull) must be equalTo KeyRangeTiered("03", "10")
ss.getKeyPlan(DateRangeFilter(dt1, dt2), true, ExplainNull) must be equalTo KeyRangeTiered("00", "59")
mm.getKeyPlan(DateRangeFilter(dt3, dt1), true, ExplainNull) must be equalTo KeyRangeTiered("01", "12")
mm.getKeyPlan(DateRangeFilter(dt1, dt4), true, ExplainNull) must be equalTo KeyRangeTiered("03", "03")
mm.getKeyPlan(DateRangeFilter(dt4, dt5), true, ExplainNull) must be equalTo KeyRangeTiered("03", "09")
}
"return appropriate regexes for regex" in {
val planners = List(ConstStringPlanner("foo"), RandomPartitionPlanner(3), GeoHashKeyPlanner(0,1))
val cp = CompositePlanner(planners, "~")
val poly = WKTUtils.read("POLYGON((-109 31, -115 31, -115 37,-109 37,-109 31))").asInstanceOf[Polygon]
val kp = cp.getKeyPlan(SpatialFilter(poly), true, ExplainNull)
val expectedKP = KeyRanges(List(
KeyRange("foo~0~.","foo~0~."),
KeyRange("foo~0~9","foo~0~9"),
KeyRange("foo~1~.","foo~1~."),
KeyRange("foo~1~9","foo~1~9"),
KeyRange("foo~2~.","foo~2~."),
KeyRange("foo~2~9","foo~2~9")))
kp must be equalTo expectedKP
}
}
}
| drackaer/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/index/QueryPlannersTest.scala | Scala | apache-2.0 | 3,371 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datastax.demo
import java.util.UUID
import akka.actor._
import com.datastax.spark.connector.SomeColumns
import com.datastax.spark.connector.cql.CassandraConnector
import com.datastax.spark.connector.streaming._
import org.apache.spark.streaming.{Milliseconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}
import scala.concurrent.duration._
/**
* Demo application that streams messages from RabbitMQ and allows them to be consumed
* from Spark
*/
object DemoApp extends App {
implicit val system = ActorSystem("data-feeder")
implicit val timeout = 10 seconds
val settings = new DemoSettings()
import settings._
val log = system.log
lazy val conf = new SparkConf().setAppName("rmq-receiver-demo")
.setMaster(SparkMaster)
.set("spark.executor.memory", SparkExecutorMemory)
.set("spark.default.parallelism", SparkParallelism)
.set("spark.cassandra.connection.host", CassandraHosts)
.set("spark.cassandra.auth.username", "cassandra")
.set("spark.cassandra.auth.password", "cassandra")
log.info("Lazily creating spark context")
lazy val sc = new SparkContext(conf)
//Create a keyspace and table for placing the messages in
createSchema(conf)
lazy val ssc = new StreamingContext(sc, Milliseconds(SparkStreamingBatchWindow))
//Create an RMQReciver actor stream that is used to publish items of type String
val msgs = ssc.actorStream[String](Props(classOf[RMQReceiver], RMQHost,
Some(RMQUsername),
Some(RMQPassword),
RMQQueuename,
RMQExchange), "rmq-receiver")
//Dump Messages to a log table with a unique id....
msgs.map { msg =>
(UUID.randomUUID(), msg)
} saveToCassandra("msgs", "msg_audit", SomeColumns("uid", "msgbody"))
//Print to stdout for visibility
msgs.print()
//Start all streams...
ssc.start()
//Shutdown Hook
system.registerOnTermination {
ssc.stop(stopSparkContext = true, stopGracefully = true)
}
log.info("Awaiting termination...")
ssc.awaitTermination()
system.awaitTermination()
def createSchema(conf: SparkConf): Boolean = {
CassandraConnector(conf).withSessionDo { sess =>
sess.execute("CREATE KEYSPACE IF NOT EXISTS msgs WITH REPLICATION = {'class': 'SimpleStrategy', 'replication_factor': 1 }")
sess.execute("CREATE TABLE IF NOT EXISTS msgs.msg_audit (uid uuid primary key, msgbody text)")
} wasApplied
}
}
| yookore/spark-streaming-rabbitmq | src/main/scala/com/datastax/demo/DemoApp.scala | Scala | apache-2.0 | 3,203 |
package com.sksamuel.elastic4s.source
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
/** @author Stephen Samuel */
class ObjectSource(any: AnyRef) extends DocumentSource {
def json: String = ObjectSource.mapper.writeValueAsString(any)
}
object ObjectSource {
val mapper = new ObjectMapper
mapper.registerModule(DefaultScalaModule)
def apply(any: AnyRef) = new ObjectSource(any)
}
| maxcom/elastic4s | src/main/scala/com/sksamuel/elastic4s/source/ObjectSource.scala | Scala | apache-2.0 | 455 |
package proxy.remoteproxy
import java.rmi.Naming
import java.rmi.RemoteException
import java.rmi.server.UnicastRemoteObject
import java.util.Date
object ReportGeneratorImpl extends App {
try {
val reportGenerator: ReportGenerator = new ReportGeneratorImpl()
Naming.rebind("PizzaCoRemoteGenerator", reportGenerator)
} catch {
case e: Exception => e.printStackTrace()
}
}
class ReportGeneratorImpl protected ()
extends UnicastRemoteObject
with ReportGenerator {
@throws(classOf[RemoteException])
override def generateDailyReport: String =
s"""
|********************Location X Daily Report********************
| Location ID: 012
| Today’s Date: ${new Date().toString}
| Total Pizza Sell: 112
| Total Sale: $$2534
| Net Profit: $$1985
|***************************************************************
""".stripMargin
}
| BBK-PiJ-2015-67/sdp-portfolio | exercises/week11/src/main/scala/proxy/remoteproxy/ReportGeneratorImpl.scala | Scala | unlicense | 896 |
package com.twitter.finagle.client
import com.twitter.conversions.DurationOps._
import com.twitter.finagle.{param, ServiceFactory, Stack, Stackable, StackBuilder}
import com.twitter.finagle.pool.{WatermarkPool, CachingPool, BufferingPool}
import com.twitter.finagle.stats.StatsReceiver
import com.twitter.finagle.util.DefaultTimer
import com.twitter.util.{Timer, Duration}
object DefaultPool {
val Role = StackClient.Role.pool
object Roles {
val bufferingPool = Stack.Role("BufferingPool")
val cachingPool = Stack.Role("CachingPool")
val watermarkPool = Stack.Role("WatermarkPool")
}
/**
* A class eligible for configuring a [[com.twitter.finagle.Stackable]]
* default pool module.
*
* @param low The low watermark used in the Watermark pool. If there
* is sufficient request concurrency, no fewer connections will be
* maintained by the pool.
*
* @param high The high watermark. The pool will not maintain more
* connections than this.
*
* @param bufferSize Specifies the size of the lock-free buffer in front of
* the pool configuration. Skipped if 0.
*
* @param idleTime The amount of idle time for which a connection is
* cached. This is applied to connections that number greater than
* the low watermark but fewer than the high.
*
* @param maxWaiters The maximum number of connection requests that
* are queued when the connection concurrency exceeds the high
* watermark.
*/
case class Param(low: Int, high: Int, bufferSize: Int, idleTime: Duration, maxWaiters: Int) {
def mk(): (Param, Stack.Param[Param]) =
(this, Param.param)
}
object Param {
implicit val param = Stack.Param(Param(0, Int.MaxValue, 0, Duration.Top, Int.MaxValue))
}
/**
* A [[com.twitter.finagle.Stackable]] client connection pool.
*
* @see [[com.twitter.finagle.pool.BufferingPool]].
* @see [[com.twitter.finagle.pool.WatermarkPool]].
* @see [[com.twitter.finagle.pool.CachingPool]].
*/
def module[Req, Rep]: Stackable[ServiceFactory[Req, Rep]] =
new Stack.Module[ServiceFactory[Req, Rep]] {
val role = DefaultPool.Role
val description = "Control client connection pool"
val parameters = Seq(
implicitly[Stack.Param[Param]],
implicitly[Stack.Param[param.Stats]],
implicitly[Stack.Param[param.Timer]]
)
def make(prms: Stack.Params, next: Stack[ServiceFactory[Req, Rep]]) = {
val Param(low, high, bufferSize, idleTime, maxWaiters) = prms[Param]
val param.Stats(statsReceiver) = prms[param.Stats]
val param.Timer(timer) = prms[param.Timer]
val stack = new StackBuilder[ServiceFactory[Req, Rep]](next)
if (idleTime > 0.seconds && high > low) {
stack.push(
Roles.cachingPool,
(sf: ServiceFactory[Req, Rep]) =>
new CachingPool(sf, high - low, idleTime, timer, statsReceiver)
)
}
stack.push(
Roles.watermarkPool,
(sf: ServiceFactory[Req, Rep]) =>
new WatermarkPool(sf, low, high, statsReceiver, maxWaiters)
)
if (bufferSize > 0) {
stack.push(
Roles.bufferingPool,
(sf: ServiceFactory[Req, Rep]) => new BufferingPool(sf, bufferSize)
)
}
stack.result
}
}
}
/**
* Create a watermark pool backed by a caching pool. This is the
* default pooling setup of Finagle.
*
* @param low The low watermark used in the Watermark pool. If there
* is sufficient request concurrency, no fewer connections will be
* maintained by the pool.
*
* @param high The high watermark. The pool will not maintain more
* connections than this.
*
* @param bufferSize Specifies the size of the lock-free buffer in front of
* the pool configuration. Skipped if 0.
*
* @param idleTime The amount of idle time for which a connection is
* cached. This is applied to connections that number greater than
* the low watermark but fewer than the high.
*
* @param maxWaiters The maximum number of connection requests that
* are queued when the connection concurrency exceeds the high
* watermark.
*/
case class DefaultPool[Req, Rep](
low: Int = 0,
high: Int = Int.MaxValue,
bufferSize: Int = 0,
idleTime: Duration = Duration.Top,
maxWaiters: Int = Int.MaxValue,
timer: Timer = DefaultTimer)
extends (StatsReceiver => Transformer[Req, Rep]) {
def apply(statsReceiver: StatsReceiver) = inputFactory => {
val factory =
if (idleTime <= 0.seconds || high <= low) inputFactory
else
new CachingPool(inputFactory, high - low, idleTime, timer, statsReceiver)
// NB: WatermarkPool conceals the first "low" closes from CachingPool, so that
// CachingPool only caches the last "high - low", and WatermarkPool caches the first
// "low".
val pool = new WatermarkPool(factory, low, high, statsReceiver, maxWaiters)
if (bufferSize <= 0) pool else new BufferingPool(pool, bufferSize)
}
}
| twitter/finagle | finagle-core/src/main/scala/com/twitter/finagle/client/DefaultPool.scala | Scala | apache-2.0 | 5,015 |
package org.dberg.hubot.models
sealed trait MessageType
object MessageType {
case object Direct extends MessageType {
def unapply(message: Message) = message.messageType match {
case Direct => true
case _ => false
}
}
case object Group extends MessageType {
def unapply(message: Message) = message.messageType match {
case Group => true
case _ => false
}
}
}
abstract class MessageBase(user: User, body: String, messageType: MessageType, params: Map[String, String] = Map()) {
val room = user.room
}
final case class Message(
user: User,
body: String,
messageType: MessageType,
params: Map[String, String] = Map(),
done: Boolean = false
) extends MessageBase(user, body, messageType)
object Body {
def unapply(message: Message): Option[String] =
Some(message.body)
}
object & {
def unapply[A](value: A): Option[(A, A)] = Some(value, value)
}
object SentBy {
def unapply(message: Message): Option[User] =
Some(message.user)
}
object Room {
def unapply(message: Message): Option[String] =
Some(message.user.room)
}
| denen99/hubot-scala | src/main/scala/org/dberg/hubot/models/Message.scala | Scala | apache-2.0 | 1,103 |
package io.buoyant.linkerd.protocol
import com.twitter.finagle.http.{Response, Request, TlsFilter}
import com.twitter.finagle.ssl.Ssl
import com.twitter.finagle.stats.NullStatsReceiver
import com.twitter.finagle.tracing.NullTracer
import com.twitter.finagle.transport.Transport
import com.twitter.util.{Future, Var}
import java.io.{FileInputStream, File}
import java.net.{SocketAddress, InetSocketAddress}
import java.security.KeyStore
import java.security.cert.CertificateFactory
import javax.net.ssl.{SSLContext, TrustManagerFactory}
import scala.sys.process._
import com.twitter.finagle.{Http => FinagleHttp, _}
object TlsUtils {
/*
* helpers
*/
def run(p: ProcessBuilder): Int = p ! DevNull
case class ServiceCert(cert: File, key: File)
case class Certs(caCert: File, serviceCerts: Map[String, ServiceCert])
def withCerts(names: String*)(f: Certs => Unit): Unit = {
// First, we create a CA and get a cert/key for linker
val tmpdir = new File("mktemp -d -t linkerd-tls.XXXXXX".!!.stripLineEnd)
try {
val configFile = mkCaDirs (tmpdir)
val caCert = new File(tmpdir, "ca+cert.pem")
val caKey = new File(tmpdir, "private/ca_key.pem")
assertOk(newKeyAndCert("/C=US/CN=Test CA", configFile, caKey, caCert))
val svcCerts = names.map { name =>
val routerReq = new File(tmpdir, s"${name}_req.pem")
val routerCert = new File(tmpdir, s"${name}_cert.pem")
val routerKey = new File(tmpdir, s"private/${name}_key.tmp.pem")
val routerPk8 = new File(tmpdir, s"private/${name}_pk8.pem")
assertOk(newReq(s"/C=US/CN=$name", configFile, routerReq, routerKey))
assertOk(signReq(configFile, caKey, caCert, routerReq, routerCert))
assertOk(toPk8(routerKey, routerPk8))
// routerCert has the server's cert, signed by caCert
name -> ServiceCert(routerCert, routerPk8)
}.toMap
f(Certs (caCert, svcCerts) )
} finally{
val _ = Seq("rm", "-rf", tmpdir.getPath).!
}
}
def assertOk(cmd: ProcessBuilder): Unit =
assert(run(cmd) == 0, s"`$cmd` failed")
def upstreamTls(server: ListeningServer, tlsName: String, caCert: File) = {
val address = Address(server.boundAddress.asInstanceOf[InetSocketAddress])
// Establish an SSL context that uses our generated certificate.
// Cribbed from http://stackoverflow.com/questions/18513792
val cf = CertificateFactory.getInstance("X.509");
val tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm)
val ks = KeyStore.getInstance(KeyStore.getDefaultType())
ks.load(null)
ks.setCertificateEntry("caCert", cf.generateCertificate(new FileInputStream(caCert)))
tmf.init(ks)
val ctx = SSLContext.getInstance("TLS")
ctx.init(null, tmf.getTrustManagers(), null)
val name = Name.Bound(Var.value(Addr.Bound(address)), address)
FinagleHttp.client
.configured(param.Stats(NullStatsReceiver))
.configured(param.Tracer(NullTracer))
.withTransport.tls(ctx, tlsName)
.transformed(_.remove(TlsFilter.role)) // do NOT rewrite Host headers using tlsName
.newClient(name, "upstream").toService
}
def upstream(server: ListeningServer) = {
val address = Address(server.boundAddress.asInstanceOf[InetSocketAddress])
val name = Name.Bound(Var.value(Addr.Bound(address)), address)
FinagleHttp.client
.configured(param.Stats(NullStatsReceiver))
.configured(param.Tracer(NullTracer))
.transformed(_.remove(TlsFilter.role)) // do NOT rewrite Host headers using tlsName
.newClient(name, "upstream").toService
}
case class Downstream(name: String, server: ListeningServer) {
val address = server.boundAddress.asInstanceOf[InetSocketAddress]
val port = address.getPort
val dentry = Dentry(
Path.read(s"/svc/$name"),
NameTree.read(s"/$$/inet/127.1/$port")
)
}
object Downstream {
def mk(name: String)(f: Request => Response): Downstream = {
val service = Service.mk { req: Request => Future(f(req)) }
val server = FinagleHttp.server
.configured(param.Label(name))
.configured(param.Tracer(NullTracer))
.serve(":*", service)
Downstream(name, server)
}
def mkTls(name: String, cert: File, key: File)
(f: Request => Response): Downstream = {
val service = Service.mk { req: Request => Future(f(req)) }
val server = FinagleHttp.server
.configured(param.Label(name))
.configured(param.Tracer(NullTracer))
.configured(
Transport.TLSServerEngine(
Some(() => Ssl.server(cert.getPath, key.getPath, null, null, null))
)
)
.serve(":*", service)
Downstream(name, server)
}
def const(name: String, value: String): Downstream =
mk(name) { _ =>
val rsp = Response()
rsp.contentString = value
rsp
}
def constTls(
name: String,
value: String,
cert: File,
key: File
): Downstream =
mkTls(name, cert, key) { _ =>
val rsp = Response()
rsp.contentString = value
rsp
}
}
val DevNull = ProcessLogger(_ => ())
def mkCaDirs(dir: File): File = {
new File(dir, "newcerts").mkdir()
new File(dir, "private").mkdir()
new File(dir, "index.txt").createNewFile()
val serial = new java.io.PrintWriter(new File(dir, "serial"))
serial.println("01")
serial.close()
val configFile = new File(dir, "openssl.cfg")
val cw = new java.io.PrintWriter(configFile)
cw.print(opensslCfg(dir.getPath))
cw.close()
configFile
}
// copied from http://www.eclectica.ca/howto/ssl-cert-howto.php
def opensslCfg(dir: String) = s"""
|dir = $dir
|
|[ ca ]
|default_ca = CA_default
|
|[ CA_default ]
|serial = $$dir/serial
|database = $$dir/index.txt
|new_certs_dir = $$dir/newcerts
|certificate = $$dir/cacert.pem
|private_key = $$dir/private/cakey.pem
|default_days = 1
|default_md = sha256
|preserve = no
|email_in_dn = no
|nameopt = default_ca
|certopt = default_ca
|policy = policy_match
|
|[ policy_match ]
|commonName = supplied
|countryName = optional
|stateOrProvinceName = optional
|organizationName = optional
|organizationalUnitName = optional
|emailAddress = optional
|
|[ req ]
|default_bits = 2048
|default_keyfile = priv.pem
|default_md = sha256
|distinguished_name = req_distinguished_name
|req_extensions = v3_req
|encyrpt_key = no
|
|[ req_distinguished_name ]
|
|[ v3_ca ]
|basicConstraints = CA:TRUE
|subjectKeyIdentifier = hash
|authorityKeyIdentifier = keyid:always,issuer:always
|
|[ v3_req ]
|basicConstraints = CA:FALSE
|subjectKeyIdentifier = hash
|""".stripMargin
def newKeyAndCert(subj: String, cfg: File, key: File, cert: File): ProcessBuilder =
Seq("openssl", "req", "-x509", "-nodes", "-newkey", "rsa:2048",
"-config", cfg.getPath,
"-subj", subj,
"-keyout", key.getPath,
"-out", cert.getPath
)
def newReq(subj: String, cfg: File, req: File, key: File): ProcessBuilder =
Seq("openssl", "req", "-new", "-nodes",
"-config", cfg.getPath,
"-subj", subj,
"-keyout", key.getPath,
"-out", req.getPath
)
def signReq(cfg: File, key: File, cert: File, req: File, newCert: File): ProcessBuilder =
Seq("openssl", "ca", "-batch",
"-config", cfg.getPath,
"-keyfile", key.getPath,
"-cert", cert.getPath,
"-out", newCert.getPath,
"-infiles", req.getPath
)
def toPk8(in: File, out: File): ProcessBuilder =
Seq("openssl", "pkcs8", "-topk8", "-nocrypt", "-in", in.getPath, "-out", out.getPath)
}
| hhtpcd/linkerd | linkerd/protocol/http/src/integration/scala/io/buoyant/linkerd/protocol/TlsUtils.scala | Scala | apache-2.0 | 7,847 |
/*
* Copyright (C) 2009-2018 Lightbend Inc. <https://www.lightbend.com>
*/
package scalaguide.tests.guice
import java.io.File
import java.net.URLClassLoader
import play.api.{ Configuration, Environment, Mode }
import play.api.test._
// #builder-imports
import play.api.inject.guice.GuiceApplicationBuilder
// #builder-imports
// #bind-imports
import play.api.inject.bind
// #bind-imports
// #injector-imports
import play.api.inject.guice.GuiceInjectorBuilder
// #injector-imports
class ScalaGuiceApplicationBuilderSpec extends PlaySpecification {
"Scala GuiceApplicationBuilder" should {
"set environment" in {
val classLoader = new URLClassLoader(Array.empty)
// #set-environment
val application = new GuiceApplicationBuilder()
.load(new play.api.inject.BuiltinModule, new play.api.i18n.I18nModule, new play.api.mvc.CookiesModule) // ###skip
.loadConfig(Configuration.reference) // ###skip
.configure("play.http.filters" -> "play.api.http.NoHttpFilters") // ###skip
.in(Environment(new File("path/to/app"), classLoader, Mode.Test))
.build()
// #set-environment
application.path must_== new File("path/to/app")
application.mode must_== Mode.Test
application.classloader must be(classLoader)
}
"set environment values" in {
val classLoader = new URLClassLoader(Array.empty)
// #set-environment-values
val application = new GuiceApplicationBuilder()
.load(new play.api.inject.BuiltinModule, new play.api.i18n.I18nModule, new play.api.mvc.CookiesModule) // ###skip
.loadConfig(Configuration.reference) // ###skip
.configure("play.http.filters" -> "play.api.http.NoHttpFilters") // ###skip
.in(new File("path/to/app"))
.in(Mode.Test)
.in(classLoader)
.build()
// #set-environment-values
application.path must_== new File("path/to/app")
application.mode must_== Mode.Test
application.classloader must be(classLoader)
}
"add configuration" in {
// #add-configuration
val application = new GuiceApplicationBuilder()
.configure(Configuration("a" -> 1))
.configure(Map("b" -> 2, "c" -> "three"))
.configure("d" -> 4, "e" -> "five")
.build()
// #add-configuration
application.configuration.get[Int]("a") must beEqualTo(1)
application.configuration.get[Int]("b") must beEqualTo(2)
application.configuration.get[String]("c") must beEqualTo("three")
application.configuration.get[Int]("d") must beEqualTo(4)
application.configuration.get[String]("e") must beEqualTo("five")
}
"override configuration" in {
// #override-configuration
val application = new GuiceApplicationBuilder()
.loadConfig(env => Configuration.load(env))
.build()
// #override-configuration
application.configuration.keys must not be empty
}
"add bindings" in {
// #add-bindings
val injector = new GuiceApplicationBuilder()
.configure("play.http.filters" -> "play.api.http.NoHttpFilters") // ###skip
.bindings(new ComponentModule)
.bindings(bind[Component].to[DefaultComponent])
.injector()
// #add-bindings
injector.instanceOf[Component] must beAnInstanceOf[DefaultComponent]
}
"override bindings" in {
// #override-bindings
val application = new GuiceApplicationBuilder()
.configure("play.http.filters" -> "play.api.http.NoHttpFilters") // ###skip
.configure("play.http.router" -> classOf[Routes].getName) // ###skip
.bindings(new ComponentModule) // ###skip
.overrides(bind[Component].to[MockComponent])
.build()
// #override-bindings
running(application) {
val Some(result) = route(application, FakeRequest(GET, "/"))
contentAsString(result) must_== "mock"
}
}
"load modules" in {
// #load-modules
val injector = new GuiceApplicationBuilder()
.configure("play.http.filters" -> "play.api.http.NoHttpFilters") // ###skip
.load(
new play.api.inject.BuiltinModule,
new play.api.i18n.I18nModule,
new play.api.mvc.CookiesModule,
bind[Component].to[DefaultComponent]
).injector()
// #load-modules
injector.instanceOf[Component] must beAnInstanceOf[DefaultComponent]
}
"disable modules" in {
// #disable-modules
val injector = new GuiceApplicationBuilder()
.configure("play.http.filters" -> "play.api.http.NoHttpFilters") // ###skip
.bindings(new ComponentModule) // ###skip
.disable[ComponentModule]
.injector()
// #disable-modules
injector.instanceOf[Component] must throwA[com.google.inject.ConfigurationException]
}
"injector builder" in {
// #injector-builder
val injector = new GuiceInjectorBuilder()
.configure("key" -> "value")
.bindings(new ComponentModule)
.overrides(bind[Component].to[MockComponent])
.injector()
val component = injector.instanceOf[Component]
// #injector-builder
component must beAnInstanceOf[MockComponent]
}
}
}
| Shenker93/playframework | documentation/manual/working/scalaGuide/main/tests/code/tests/guice/ScalaGuiceApplicationBuilderSpec.scala | Scala | apache-2.0 | 5,251 |
package com.jakway.util.error
import java.io.File
import java.nio.file.Files
import scala.util.{Failure, Success, Try}
trait UsesTempDir[E <: WithCause[E]] {
/**
* a user-specified temporary dir that will be checked first
* set to None if user input is not desirable
*/
val tempDirParam: Option[File]
val defaultTempDirPrefix: String
val defaultSuffix = ".xml"
def usesTempDirErrorTypeCTOR: String => E
def getTempDir(): Either[E, File] = {
def checkDir(errHeader: String, d: File): Either[E, File] = {
def err = (x: String) => Left(usesTempDirErrorTypeCTOR(errHeader + ": " + x))
//wrap IO actions in a Try to catch SecurityExceptions
if(!d.exists() && !Try(d.mkdirs()).getOrElse(false)) {
err(s"expected $d to exist")
} else if(!d.isDirectory) {
err(s"expected $d to be a directory")
} else if(!d.canWrite && !Try(d.setWritable(true)).getOrElse(false)) {
err(s"expected $d to be writeable")
} else {
Right(d)
}
}
tempDirParam match {
//fail if the passed dir isn't valid
case Some(dir) => checkDir(s"Could not use passed temp dir $tempDirParam", dir)
//try and generated one otherwise
case None => {
Try(Files.createTempDirectory(defaultTempDirPrefix)) match {
case Success(dir) => checkDir(s"Could not use generated temp dir $dir", dir.toFile)
.map { x => x.deleteOnExit(); x }
case Failure(t) => Left(usesTempDirErrorTypeCTOR("Could not create temporary dir").withCause(t))
}
}
}
}
def getTempFile(dir: File, suffix: String = defaultSuffix): Either[E, File] = {
Try(Files.createTempFile(dir.toPath, null, suffix)) match {
case Success(f) => Right(f.toFile)
case Failure(t) => Left(
usesTempDirErrorTypeCTOR(s"Could not create temp file in $dir").withCause(t))
}
}
def stringToTempFile(dir: File, suffix: String = defaultSuffix)(str: String): Either[E, File] = {
import java.io.PrintWriter
//close over the XML and write it out to the passed file
def write(dest: File): Either[E, Unit] = {
val res = Try(new PrintWriter(dest))
.map { p =>
p.println(str)
p.flush()
p.close()
}
res match {
case Success(_) => Right(())
case Failure(t) => Left(
usesTempDirErrorTypeCTOR(s"Failed to write `$str` to file $dest").withCause(t))
}
}
for {
f <- getTempFile(dir, suffix)
_ <- write(f)
} yield f
}
}
| tjakway/Gnucash-Regex-Importer | src/main/scala/com/jakway/util/error/UsesTempDir.scala | Scala | gpl-2.0 | 2,561 |
/*******************************************************************************
Copyright (c) 2013, S-Core.
All rights reserved.
Use is subject to license terms.
This distribution may include materials developed by third parties.
******************************************************************************/
package kr.ac.kaist.jsaf.analysis.typing.models.Tizen
import kr.ac.kaist.jsaf.analysis.typing.AddressManager._
import kr.ac.kaist.jsaf.analysis.cfg.{CFG, CFGExpr, InternalError}
import kr.ac.kaist.jsaf.analysis.typing.domain.{BoolFalse => F, BoolTrue => T, _}
import kr.ac.kaist.jsaf.analysis.typing.models._
import kr.ac.kaist.jsaf.analysis.typing._
import kr.ac.kaist.jsaf.analysis.typing.models.builtin.BuiltinArray
import kr.ac.kaist.jsaf.analysis.typing.domain.UIntSingle
import kr.ac.kaist.jsaf.analysis.typing.domain.Context
import kr.ac.kaist.jsaf.analysis.typing.models.AbsBuiltinFunc
import kr.ac.kaist.jsaf.analysis.typing.models.AbsConstValue
import kr.ac.kaist.jsaf.analysis.typing.domain.Heap
object TIZENpackage extends Tizen {
private val name = "package"
/* predefined locations */
val loc_obj = TIZENtizen.loc_package
val loc_proto = newSystemRecentLoc(name + "Proto")
val loc_pkginfo: Loc = newSystemLoc("PackageInformation", Old)
val loc_pkginfoarr: Loc = newSystemLoc("PackageInformationArr", Old)
val loc_appidarr: Loc = newSystemLoc("ApplicationIdArr", Old)
override def getInitList(): List[(Loc, List[(String, AbsProperty)])] = List(
(loc_obj, prop_obj), (loc_proto, prop_proto), (loc_pkginfo, prop_pkginfo_ins), (loc_pkginfoarr, prop_pkginfoarr_ins),
(loc_appidarr, prop_appidarr_ins)
)
/* constructor or object*/
private val prop_obj: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Object")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(loc_proto), F, F, F)))),
("@extensible", AbsConstValue(PropValue(T)))
)
/* prototype */
private val prop_proto: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("CallbackObject")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(ObjProtoLoc), F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("install", AbsBuiltinFunc("tizen.package.install",3)),
("uninstall", AbsBuiltinFunc("tizen.package.uninstall",3)),
("getPackagesInfo", AbsBuiltinFunc("tizen.package.getPackagesInfo",2)),
("getPackageInfo", AbsBuiltinFunc("tizen.package.getPackageInfo",1)),
("setPackageInfoEventListener", AbsBuiltinFunc("tizen.package.setPackageInfoEventListener",1)),
("unsetPackageInfoEventListener", AbsBuiltinFunc("tizen.package.unsetPackageInfoEventListener",0))
)
private val prop_pkginfo_ins: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Object")))),
("@proto", AbsConstValue(PropValue(ObjectValue(TIZENPackageInformation.loc_proto, F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("id", AbsConstValue(PropValue(ObjectValue(Value(StrTop), T, T, T)))),
("name", AbsConstValue(PropValue(ObjectValue(Value(StrTop), T, T, T)))),
("iconPath", AbsConstValue(PropValue(ObjectValue(Value(StrTop), T, T, T)))),
("version", AbsConstValue(PropValue(ObjectValue(Value(StrTop), T, T, T)))),
("totalSize", AbsConstValue(PropValue(ObjectValue(Value(NumTop), T, T, T)))),
("dataSize", AbsConstValue(PropValue(ObjectValue(Value(NumTop), T, T, T)))),
("lastModified", AbsConstValue(PropValue(ObjectValue(Value(TIZENtizen.loc_date), T, T, T)))),
("author", AbsConstValue(PropValue(ObjectValue(Value(StrTop), T, T, T)))),
("description", AbsConstValue(PropValue(ObjectValue(Value(StrTop), T, T, T)))),
("appIds", AbsConstValue(PropValue(ObjectValue(Value(loc_appidarr), T, T, T))))
)
private val prop_pkginfoarr_ins: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Array")))),
("@proto", AbsConstValue(PropValue(ObjectValue(BuiltinArray.ProtoLoc, F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("length", AbsConstValue(PropValue(ObjectValue(UInt, T, F, F)))),
("@default_number", AbsConstValue(PropValue(ObjectValue(Value(loc_pkginfo), T, T, T))))
)
private val prop_appidarr_ins: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Array")))),
("@proto", AbsConstValue(PropValue(ObjectValue(BuiltinArray.ProtoLoc, F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("length", AbsConstValue(PropValue(ObjectValue(UInt, T, F, F)))),
("@default_number", AbsConstValue(PropValue(ObjectValue(Value(StrTop), T, T, T))))
)
override def getSemanticMap(): Map[String, SemanticFun] = {
Map(
("tizen.package.install" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._1._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = set_addr.head
val addr1 = cfg.getAPIAddress(addr_env, 0)
val addr2 = cfg.getAPIAddress(addr_env, 1)
val addr3 = cfg.getAPIAddress(addr_env, 2)
val l_r1 = addrToLoc(addr1, Recent)
val l_r2 = addrToLoc(addr2, Recent)
val l_r3 = addrToLoc(addr3, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val (h_2, ctx_2) = Helper.Oldify(h_1, ctx_1, addr2)
val (h_3, ctx_3) = Helper.Oldify(h_2, ctx_2, addr3)
val v_1 = getArgValue(h_3, ctx_3, args, "0")
val v_2 = getArgValue(h_3, ctx_3, args, "1")
val n_arglen = Operator.ToUInt32(getArgValue(h_3, ctx_3, args, "length"))
val es_1 =
if (v_1._1._5 </ StrTop)
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val (h_4, es_2) = v_2._2.foldLeft((h_3, TizenHelper.TizenExceptionBot))((_he, l) => {
val v1 = Helper.Proto(_he._1, l, AbsString.alpha("onprogress"))
val v2 = Helper.Proto(_he._1, l, AbsString.alpha("oncomplete"))
val es1 =
if (v1._2.exists((l) => Helper.IsCallable(_he._1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es2 =
if (v2._2.exists((l) => Helper.IsCallable(_he._1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr = Helper.NewArrayObject(AbsNumber.alpha(2)).
update("0", PropValue(ObjectValue(Value(StrTop), T, T, T))).
update("1", PropValue(ObjectValue(Value(NumTop), T, T, T)))
val o_arr1 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(StrTop), T, T, T)))
val h_4 = _he._1.update(l_r1, o_arr).update(l_r2, o_arr1)
val h_5 = TizenHelper.addCallbackHandler(h_4, AbsString.alpha("PkgProgressCB.onprogress"), Value(v1._2), Value(l_r1))
val h_6 = TizenHelper.addCallbackHandler(h_5, AbsString.alpha("PkgProgressCB.oncomplete"), Value(v2._2), Value(l_r2))
(h_6, _he._2 ++ es1 ++ es2)
})
val (h_5, es_3) = n_arglen match {
case UIntSingle(n) if n == 2 =>
(h_4, TizenHelper.TizenExceptionBot)
case UIntSingle(n) if n >= 3 =>
val v_3 = getArgValue(h_4, ctx_3, args, "2")
val es1 =
if (v_3._2.exists((l) => Helper.IsCallable(h_4, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr2 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(LocSet(TIZENtizen.loc_notFounderr) ++ LocSet(TIZENtizen.loc_unknownerr)), T, T, T)))
val h_5 = h_4.update(l_r3, o_arr2)
val h_6 = TizenHelper.addCallbackHandler(h_5, AbsString.alpha("errorCB"), Value(v_3._2), Value(l_r3))
(h_6, es1)
case _ =>
(HeapBot, TizenHelper.TizenExceptionBot)
}
val est = Set[WebAPIException](SecurityError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es_1 ++ es_2 ++ es_3 ++ est)
((h_5, ctx_3), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.package.uninstall" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._1._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = set_addr.head
val addr1 = cfg.getAPIAddress(addr_env, 0)
val addr2 = cfg.getAPIAddress(addr_env, 1)
val addr3 = cfg.getAPIAddress(addr_env, 2)
val l_r1 = addrToLoc(addr1, Recent)
val l_r2 = addrToLoc(addr2, Recent)
val l_r3 = addrToLoc(addr3, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val (h_2, ctx_2) = Helper.Oldify(h_1, ctx_1, addr2)
val (h_3, ctx_3) = Helper.Oldify(h_2, ctx_2, addr3)
val v_1 = getArgValue(h_3, ctx_3, args, "0")
val v_2 = getArgValue(h_3, ctx_3, args, "1")
val n_arglen = Operator.ToUInt32(getArgValue(h_3, ctx_3, args, "length"))
val es_1 =
if (v_1._1._5 </ StrTop)
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val (h_4, es_2) = v_2._2.foldLeft((h_3, TizenHelper.TizenExceptionBot))((_he, l) => {
val v1 = Helper.Proto(_he._1, l, AbsString.alpha("onprogress"))
val v2 = Helper.Proto(_he._1, l, AbsString.alpha("oncomplete"))
val es1 =
if (v1._2.exists((l) => Helper.IsCallable(_he._1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es2 =
if (v2._2.exists((l) => Helper.IsCallable(_he._1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr = Helper.NewArrayObject(AbsNumber.alpha(2)).
update("0", PropValue(ObjectValue(Value(v_1._1._5), T, T, T))).
update("1", PropValue(ObjectValue(Value(NumTop), T, T, T)))
val o_arr1 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(v_1._1._5), T, T, T)))
val h_4 = _he._1.update(l_r1, o_arr).update(l_r2, o_arr1)
val h_5 = TizenHelper.addCallbackHandler(h_4, AbsString.alpha("PkgProgressCB.onprogress"), Value(v1._2), Value(l_r1))
val h_6 = TizenHelper.addCallbackHandler(h_5, AbsString.alpha("PkgProgressCB.oncomplete"), Value(v2._2), Value(l_r2))
(h_6, _he._2 ++ es1 ++ es2)
})
val (h_5, es_3) = n_arglen match {
case UIntSingle(n) if n == 2 =>
(h_4, TizenHelper.TizenExceptionBot)
case UIntSingle(n) if n >= 3 =>
val v_3 = getArgValue(h_4, ctx_3, args, "2")
val es1 =
if (v_3._2.exists((l) => Helper.IsCallable(h_4, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr2 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(LocSet(TIZENtizen.loc_notFounderr) ++ LocSet(TIZENtizen.loc_unknownerr)), T, T, T)))
val h_5 = h_4.update(l_r3, o_arr2)
val h_6 = TizenHelper.addCallbackHandler(h_5, AbsString.alpha("errorCB"), Value(v_3._2), Value(l_r3))
(h_6, es1)
case _ =>
(HeapBot, TizenHelper.TizenExceptionBot)
}
val est = Set[WebAPIException](SecurityError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es_1 ++ es_2 ++ es_3 ++ est)
((h_5, ctx_3), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.package.getPackagesInfo" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._1._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = set_addr.head
val addr1 = cfg.getAPIAddress(addr_env, 0)
val addr2 = cfg.getAPIAddress(addr_env, 1)
val l_r1 = addrToLoc(addr1, Recent)
val l_r2 = addrToLoc(addr2, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val (h_2, ctx_2) = Helper.Oldify(h_1, ctx_1, addr2)
val v_1 = getArgValue(h_2, ctx_2, args, "0")
val n_arglen = Operator.ToUInt32(getArgValue(h_2, ctx_2, args, "length"))
val es_1 =
if (v_1._2.exists((l) => Helper.IsCallable(h_2, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(TIZENpackage.loc_pkginfoarr), T, T, T)))
val h_3 = h_2.update(l_r1, o_arr)
val h_4 = TizenHelper.addCallbackHandler(h_3, AbsString.alpha("PkgInfoArrSuccessCB"), Value(v_1._2), Value(l_r1))
val (h_5, es_2) = n_arglen match {
case UIntSingle(n) if n == 1 =>
(h_4, TizenHelper.TizenExceptionBot)
case UIntSingle(n) if n >= 2 =>
val v_2 = getArgValue(h_4, ctx_2, args, "1")
val es_2 =
if (v_2._2.exists((l) => Helper.IsCallable(h_4, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr2 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(LocSet(TIZENtizen.loc_notFounderr) ++ LocSet(TIZENtizen.loc_unknownerr)), T, T, T)))
val h_5 = h_4.update(l_r2, o_arr2)
val h_6 = TizenHelper.addCallbackHandler(h_5, AbsString.alpha("errorCB"), Value(v_2._2), Value(l_r2))
(h_6, es_2)
case _ => (HeapBot, TizenHelper.TizenExceptionBot)
}
val est = Set[WebAPIException](SecurityError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es_1 ++ es_2 ++ est)
((h_5, ctx_2), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.package.getPackageInfo" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val v = getArgValue(h, ctx, args, "0")
val es =
if (v._1._2 </ NullTop && v._1._5 </StrTop)
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val est = Set[WebAPIException](SecurityError, NotFoundError, UnknownError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es ++ est)
((Helper.ReturnStore(h, Value(TIZENpackage.loc_pkginfo)), ctx), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.package.setPackageInfoEventListener" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._1._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = set_addr.head
val addr1 = cfg.getAPIAddress(addr_env, 0)
val addr2 = cfg.getAPIAddress(addr_env, 1)
val addr3 = cfg.getAPIAddress(addr_env, 2)
val l_r1 = addrToLoc(addr1, Recent)
val l_r2 = addrToLoc(addr2, Recent)
val l_r3 = addrToLoc(addr3, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val (h_2, ctx_2) = Helper.Oldify(h_1, ctx_1, addr2)
val (h_3, ctx_3) = Helper.Oldify(h_2, ctx_2, addr3)
val v_1 = getArgValue(h_3, ctx_3, args, "0")
val es_1 =
if (v_1._1._5 </ StrTop)
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val (h_4, es_2) = v_1._2.foldLeft((h_3, TizenHelper.TizenExceptionBot))((_he, l) => {
val v1 = Helper.Proto(_he._1, l, AbsString.alpha("oninstalled"))
val v2 = Helper.Proto(_he._1, l, AbsString.alpha("onupdated"))
val v3 = Helper.Proto(_he._1, l, AbsString.alpha("onuninstalled"))
val es1 =
if (v1._2.exists((l) => Helper.IsCallable(_he._1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es2 =
if (v2._2.exists((l) => Helper.IsCallable(_he._1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es3 =
if (v3._2.exists((l) => Helper.IsCallable(_he._1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(TIZENpackage.loc_pkginfo), T, T, T)))
val o_arr1 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(TIZENpackage.loc_pkginfo), T, T, T)))
val o_arr2 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(StrTop), T, T, T)))
val h_4 = _he._1.update(l_r1, o_arr).update(l_r2, o_arr1).update(l_r3, o_arr2)
val h_5 = TizenHelper.addCallbackHandler(h_4, AbsString.alpha("PkgInfoEventCB.oninstalled"), Value(v1._2), Value(l_r1))
val h_6 = TizenHelper.addCallbackHandler(h_5, AbsString.alpha("PkgInfoEventCB.onupdated"), Value(v2._2), Value(l_r2))
val h_7 = TizenHelper.addCallbackHandler(h_6, AbsString.alpha("PkgInfoEventCB.onuninstalled"), Value(v3._2), Value(l_r3))
(h_7, _he._2 ++ es1 ++ es2 ++ es3)
})
val est = Set[WebAPIException](SecurityError, UnknownError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es_1 ++ es_2 ++ est)
((h_4, ctx_3), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.package.unsetPackageInfoEventListener" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val est = Set[WebAPIException](SecurityError, UnknownError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, est)
((h, ctx), (he + h_e, ctxe + ctx_e))
}
))
)
}
override def getPreSemanticMap(): Map[String, SemanticFun] = {Map()}
override def getDefMap(): Map[String, AccessFun] = {Map()}
override def getUseMap(): Map[String, AccessFun] = {Map()}
}
object TIZENPackageInformation extends Tizen {
private val name = "PackageInformation"
/* predefined locations */
val loc_proto = newSystemRecentLoc(name + "Proto")
override def getInitList(): List[(Loc, List[(String, AbsProperty)])] = List(
(loc_proto, prop_proto)
)
/* prototype */
private val prop_proto: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("CallbackObject")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(ObjProtoLoc), F, F, F)))),
("@extensible", AbsConstValue(PropValue(T)))
)
override def getSemanticMap(): Map[String, SemanticFun] = {
Map()
}
override def getPreSemanticMap(): Map[String, SemanticFun] = {Map()}
override def getDefMap(): Map[String, AccessFun] = {Map()}
override def getUseMap(): Map[String, AccessFun] = {Map()}
} | daejunpark/jsaf | src/kr/ac/kaist/jsaf/analysis/typing/models/Tizen/TIZENpackage.scala | Scala | bsd-3-clause | 20,760 |
package xlx
import collection.mutable.HashMap
import scala.util.Try
import org.http4s._
import org.http4s.server._
import org.http4s.dsl._
import org.http4s.json4s.jackson._
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.json4s.JsonDSL._
case class User(id: Int, firstName : String, lastName: String, age:Int)
case class ReturnStatus(response: String)
case class UserRepository(userDictionary: HashMap[Int,User]){
var allUsers = userDictionary retain {(key,value) => (key == value.id && value.id>0 && value.age>=0)}
def returnAll(): List[User] = {
allUsers.values.toList
}
def findUser(id: Int): Option[User] = {
if (allUsers.keySet.exists(_ == id)){
Option(allUsers(id))
} else {
Option(null)
}
}
def postUser(user: User): ReturnStatus = {
if (user.id > 0 && user.age >= 0) {
allUsers += (user.id -> user)
ReturnStatus("Success")
} else {
ReturnStatus("Failure")
}
}
}
object Users {
implicit val formats = DefaultFormats
implicit val userReader = new Reader[User] {
def read(value: JValue): User = Try(value.extract[User]).getOrElse(User(0,"","",0))
}
implicit val userDec = jsonOf[User]
def userToJson(user:User):String = {
val userResultJson = (
("id" -> user.id) ~
("firstName" -> user.firstName) ~
("lastName" -> user.lastName) ~
("age" -> user.age)
)
if (user.id<=0) {""} else {compact(render(userResultJson))}
}
def returnStatusToJson(returnStatus:ReturnStatus):String = {
val returnStatusJson = (
("response" -> returnStatus.response)
)
compact(render(returnStatusJson))
}
var userRepository = UserRepository(HashMap(1->User(1,"Mickey","Mouse",83)))
val service = HttpService {
case GET -> Root / "Users" =>
Ok("["+userRepository.returnAll().map(x => userToJson(x)).mkString(",")+"]\\n")
case GET -> Root / "Users" / id => {
userRepository.findUser(Try(id.toInt).getOrElse(-1)) match {
case Some(user) => Ok(userToJson(user)+"\\n")
case None => Ok("")
}
}
case req @ POST -> Root / "Users" =>
req.as[User] flatMap ( user => Ok(returnStatusToJson(userRepository.postUser(user))+"\\n"))
}
} | xialingxiao/scala_example | skeletonapi/src/main/scala/xlx/skeletonapi/Users.scala | Scala | apache-2.0 | 2,409 |
/**
* Copyright (C) 2013 Carnegie Mellon University
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tdb.examples.test
import org.scalatest._
import tdb.examples._
class AlgorithmTests extends FlatSpec with Matchers {
Experiment.verbosity = 0
Experiment.check = true
Experiment.port = 2553
val defaults = Array("--verbosity", "0", "--envHomePath", "asdf")
val intensity = sys.env.getOrElse("TDB_INTENSITY", 10)
"MapTest" should "run map successfully." in {
val conf = new ExperimentConf(
Array("--algorithms", "map",
"--chunkSizes", "1",
"--counts", intensity.toString,
"--files", "input.txt",
"--partitions", "1", "4") ++ defaults)
Experiment.run(conf)
}
"PageRankTest" should "run page rank successfully." in {
val conf = new ExperimentConf(Array(
"--algorithms", "pgrank",
"--chunkSizes", "1",
"--files", "graph100.txt",
"--partitions", "1") ++ defaults)
Experiment.run(conf)
}
"WordcountTest" should "run wordcount successfully." in {
val conf = new ExperimentConf(Array(
"--algorithms", "wc",
"--chunkSizes", "1", "4",
"--counts", intensity.toString,
"--partitions", "1", "4") ++ defaults)
Experiment.run(conf)
}
}
| twmarshall/tdb | core/src/test/scala/tdb/AlgorithmTests.scala | Scala | apache-2.0 | 1,806 |
package models
import play.api.libs.json._
import play.api.libs.json.JsString
sealed abstract case class NetworkProvider(country: Country.Country) {
val name: String
}
object personal extends NetworkProvider(Country.argentina) {
val name = "personal"
}
object movistar extends NetworkProvider(Country.argentina) {
val name = "movistar"
}
object NetworkProvider {
def apply(provider: String): NetworkProvider = provider match {
case "personal" => personal
case "movistar" => movistar
}
implicit object NetworkProviderFormat extends Format[NetworkProvider] {
override def reads(json: JsValue): JsResult[NetworkProvider] = json match {
case JsString(provider) => JsSuccess(NetworkProvider(provider))
case _ => JsError("Invalid JsValue type for NetworkProvider conversion: JsString")
}
override def writes(np: NetworkProvider) = JsString(np.name)
}
}
| lukiano/networkblame | app/models/NetworkProvider.scala | Scala | mit | 958 |
object Ex0207 extends App {
def printProductOfUnicodes(str: String) {
println(str.foldLeft(1)((o: Int, n: Char) => o * n.toInt))
}
printProductOfUnicodes("Hello")
}
| mklinga/projects | scala/impatient/2/Ex0207.scala | Scala | gpl-3.0 | 177 |
package com.twitter.finagle.netty3
import com.twitter.conversions.time._
import com.twitter.finagle.netty3.channel.{
ChannelRequestStatsHandler,
ChannelStatsHandler,
WriteCompletionTimeoutHandler
}
import com.twitter.finagle.netty3.ssl.server.SslServerConnectHandler
import com.twitter.finagle.param.Label
import com.twitter.finagle.ssl.server.SslServerConfiguration
import com.twitter.finagle.stats.{InMemoryStatsReceiver, NullStatsReceiver, StatsReceiver}
import com.twitter.finagle.transport.Transport
import com.twitter.finagle.Stack
import com.twitter.util.Duration
import javax.net.ssl.SSLEngine
import org.jboss.netty.channel.{Channels, ChannelHandler, ChannelPipeline, SimpleChannelHandler}
import org.jboss.netty.handler.ssl.SslHandler
import org.jboss.netty.handler.timeout.ReadTimeoutHandler
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import org.scalatest.mock.MockitoSugar
import scala.collection.JavaConverters._
import scala.reflect.ClassTag
@RunWith(classOf[JUnitRunner])
class Netty3ListenerTest extends FunSuite with MockitoSugar {
private[this] def findHandlerInPipeline[T <: ChannelHandler: ClassTag](
pipeline: ChannelPipeline
): Option[T] = {
val clazz = implicitly[ClassTag[T]].runtimeClass
pipeline.toMap.asScala.values
.find {
case h if clazz.isInstance(h) => true
case _ => false
}
.map(_.asInstanceOf[T])
}
// Netty3Listener.apply return a Listener[In, Out] but tests require a concrete Netty3Listener.
private[this] def makeNetty3Listener(params: Stack.Params): Netty3Listener[Int, Int] = {
val listener = Netty3Listener[Int, Int](Channels.pipelineFactory(Channels.pipeline()), params)
listener.asInstanceOf[Netty3Listener[Int, Int]]
}
private[this] def makeListenerPipeline(
params: Stack.Params,
statsReceiver: StatsReceiver = NullStatsReceiver
): ChannelPipeline = {
val listener = makeNetty3Listener(params)
val pipelineFactory =
listener.newServerPipelineFactory(statsReceiver, () => new SimpleChannelHandler)
val pipeline = pipelineFactory.getPipeline()
assert(pipeline != null)
pipeline
}
test("creates a Netty3Listener instance based on Stack params") {
val params = Stack.Params.empty
val pipelineFactory = Channels.pipelineFactory(Channels.pipeline())
val listener = Netty3Listener[Int, Int](pipelineFactory, params)
assert(listener != null)
}
test("ChannelSnooper is not added by default") {
val params = Stack.Params.empty + Label("name")
val pipeline = makeListenerPipeline(params)
val channelSnooper = findHandlerInPipeline[ChannelSnooper](pipeline)
assert(channelSnooper.isEmpty)
}
test("ChannelSnooper is added when Verbose param is true") {
val params = Stack.Params.empty + Label("name") + Transport.Verbose(true)
val pipeline = makeListenerPipeline(params)
val channelSnooper = findHandlerInPipeline[ChannelSnooper](pipeline)
assert(channelSnooper.nonEmpty)
}
test("ChannelStatsHandler is not added when the statsReceiver is a NullStatsReceiver") {
// The statsReceiver used is the one passed in to newServerPipelineFactory.
// It is not determined by the Stats params.
val params = Stack.Params.empty + Label("name")
val pipeline = makeListenerPipeline(params, NullStatsReceiver)
val statsHandler = findHandlerInPipeline[ChannelStatsHandler](pipeline)
assert(statsHandler.isEmpty)
}
test("ChannelStatsHandler is added when the statsReceiver is not a NullStatsReceiver") {
val params = Stack.Params.empty + Label("name")
val pipeline = makeListenerPipeline(params, new InMemoryStatsReceiver)
val statsHandler = findHandlerInPipeline[ChannelStatsHandler](pipeline)
assert(statsHandler.nonEmpty)
}
test("ChannelRequestStatsHandler is not added when the statsReceiver is a NullStatsReceiver") {
// Like ChannelStatsHandler, the statsReceiver used is the one passed
// in to newServerPipelineFactory. It is not determined by the Stats params.
val params = Stack.Params.empty + Label("name")
val pipeline = makeListenerPipeline(params, NullStatsReceiver)
val statsHandler = findHandlerInPipeline[ChannelRequestStatsHandler](pipeline)
assert(statsHandler.isEmpty)
}
test("ChannelRequestStatsHandler is added when the statsReceiver is not a NullStatsReceiver") {
val params = Stack.Params.empty + Label("name")
val pipeline = makeListenerPipeline(params, new InMemoryStatsReceiver)
val statsHandler = findHandlerInPipeline[ChannelRequestStatsHandler](pipeline)
assert(statsHandler.nonEmpty)
}
test("ReadTimeoutHandler is not added by default") {
val params = Stack.Params.empty + Label("name")
val pipeline = makeListenerPipeline(params)
val readHandler = findHandlerInPipeline[ReadTimeoutHandler](pipeline)
assert(readHandler.isEmpty)
}
test("ReadTimeoutHandler is not added if the read timeout is Duration.Top") {
val params = Stack.Params.empty + Label("name") +
Transport.Liveness(Duration.Top, 1.second, None)
val pipeline = makeListenerPipeline(params)
val readHandler = findHandlerInPipeline[ReadTimeoutHandler](pipeline)
assert(readHandler.isEmpty)
}
test("ReadTimeoutHandler is added if the read timeout is less than Duration.Top") {
val params = Stack.Params.empty + Label("name") +
Transport.Liveness(1.second, Duration.Top, None)
val pipeline = makeListenerPipeline(params)
val readHandler = findHandlerInPipeline[ReadTimeoutHandler](pipeline)
assert(readHandler.nonEmpty)
}
test("WriteCompletionTimeoutHandler is not added by default") {
val params = Stack.Params.empty + Label("name")
val pipeline = makeListenerPipeline(params)
val writeHandler = findHandlerInPipeline[WriteCompletionTimeoutHandler](pipeline)
assert(writeHandler.isEmpty)
}
test("WriteCompletionTimeoutHandler is not added if the write timeout is Duration.Top") {
val params = Stack.Params.empty + Label("name") +
Transport.Liveness(1.second, Duration.Top, None)
val pipeline = makeListenerPipeline(params)
val writeHandler = findHandlerInPipeline[WriteCompletionTimeoutHandler](pipeline)
assert(writeHandler.isEmpty)
}
test("WriteCompletionTimeoutHandler is added if the write timeout is less than Duration.Top") {
val params = Stack.Params.empty + Label("name") +
Transport.Liveness(Duration.Top, 1.second, None)
val pipeline = makeListenerPipeline(params)
val writeHandler = findHandlerInPipeline[WriteCompletionTimeoutHandler](pipeline)
assert(writeHandler.nonEmpty)
}
test("SslHandler is not added by default") {
val params = Stack.Params.empty + Label("name")
val pipeline = makeListenerPipeline(params)
val sslHandler = findHandlerInPipeline[SslHandler](pipeline)
assert(sslHandler.isEmpty)
}
test("SslHandler is not added if the SSL/TLS server configuration param is None") {
val params = Stack.Params.empty + Label("name") + Transport.ServerSsl(None)
val pipeline = makeListenerPipeline(params)
val sslHandler = findHandlerInPipeline[SslHandler](pipeline)
assert(sslHandler.isEmpty)
}
test("SslHandler is added if the SSL/TLS server configuration param is configured") {
val engine = mock[SSLEngine]
val params = Stack.Params.empty + Label("name") +
Transport.ServerSsl(Some(SslServerConfiguration()))
val pipeline = makeListenerPipeline(params)
val sslHandler = findHandlerInPipeline[SslHandler](pipeline)
assert(sslHandler.nonEmpty)
}
test("SslServerConnectHandler is not added by default") {
val params = Stack.Params.empty + Label("name")
val pipeline = makeListenerPipeline(params)
val sslConnectHandler = findHandlerInPipeline[SslServerConnectHandler](pipeline)
assert(sslConnectHandler.isEmpty)
}
test("SslServerConnectHandler is not added if the SSL/TLS server config param is None") {
val params = Stack.Params.empty + Label("name") + Transport.ServerSsl(None)
val pipeline = makeListenerPipeline(params)
val sslConnectHandler = findHandlerInPipeline[SslServerConnectHandler](pipeline)
assert(sslConnectHandler.isEmpty)
}
test("SslServerConnectHandler is added if the SSL/TLS server config param is configured") {
val engine = mock[SSLEngine]
val params = Stack.Params.empty + Label("name") +
Transport.ServerSsl(Some(SslServerConfiguration()))
val pipeline = makeListenerPipeline(params)
val sslConnectHandler = findHandlerInPipeline[SslServerConnectHandler](pipeline)
assert(sslConnectHandler.nonEmpty)
}
test("FinagleBridge is added by default") {
class TestBridgeHandler extends SimpleChannelHandler {}
val testBridgeHandler = new TestBridgeHandler()
val params = Stack.Params.empty + Label("name")
val listener = makeNetty3Listener(params)
val pipelineFactory =
listener.newServerPipelineFactory(NullStatsReceiver, () => testBridgeHandler)
val pipeline = pipelineFactory.getPipeline()
val bridgeHandler = findHandlerInPipeline[TestBridgeHandler](pipeline)
assert(bridgeHandler.nonEmpty)
assert(bridgeHandler.exists(_ == testBridgeHandler))
}
}
| mkhq/finagle | finagle-netty3/src/test/scala/com/twitter/finagle/netty3/Netty3ListenerTest.scala | Scala | apache-2.0 | 9,274 |
// Copyright (C) 2014-2017 the original author or authors.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package il.co.codeguru.extreme.engine
import il.co.codeguru.extreme.engine.MachineInstructionOpcode._
import il.co.codeguru.extreme.engine.Register._
import il.co.codeguru.extreme.engine.datatypes.{M86Byte, M86Word}
import org.scalatest.FunSuite
/**
*
* @author Romi Kuntsman <romik.code@gmail.com>
* @since 2016-12-26
*/
class MachineTest extends FunSuite {
test("mov [bx],al") {
val program = Vector(
0x88, 0x07 // 00000005 8807 mov [bx],al
)
val (machine: Machine, cpu: Cpu) = getMachineWithProgram(program)
assert(cpu.state.ip == M86Word(0x00000000))
val opcode = machine.fetchNextOpcode()
assert(opcode == MOV(Mem8Operand(MemoryBaseAddressing(BX)), Reg8Operand(AL)))
assert(cpu.state.ip == M86Word(0x00000002))
}
test("jmp short 0x5") {
val program = Vector(
0xEB, 0xFB // 00000008 EBFB jmp short 0x5
)
val (machine: Machine, cpu: Cpu) = getMachineWithProgram(program)
assert(cpu.state.ip == M86Word(0x00000000))
val opcode = machine.fetchNextOpcode()
//val value = M86Byte(0x5)
//FixMe: assert(opcode == JMP(ShortLabelOperand(value)))
assert(cpu.state.ip == M86Word(0x00000002))
}
test("Bomber: simplest survivor") {
val program = Vector(
0xB0, 0xCC, // 00000000 B0CC mov al,0xcc
0xBB, 0x00, 0x00, // 00000002 BB0000 mov bx,0x0
0x88, 0x07, // 00000005 8807 mov [bx],al
0x43, // 00000007 43 inc bx
0xEB, 0xFB // 00000008 EBFB jmp short 0x5
)
val (machine: Machine, cpu: Cpu) = getMachineWithProgram(program)
assert(cpu.state.ip == M86Word(0x00000000))
val opcode1 = machine.fetchNextOpcode()
val value1: Short = 0xCC
assert(opcode1 == MOV(Reg8Operand(AL), Immed8Operand(M86Byte(value1))))
assert(cpu.state.ip == M86Word(0x00000002))
val opcode2 = machine.fetchNextOpcode()
val value2: Short = 0x00
assert(opcode2 == MOV(Reg16Operand(BX), Immed16Operand(M86Word(value2))))
assert(cpu.state.ip == M86Word(0x00000005))
val opcode3 = machine.fetchNextOpcode()
assert(opcode3 == MOV(Mem8Operand(MemoryBaseAddressing(BX)), Reg8Operand(AL)))
assert(cpu.state.ip == M86Word(0x00000007))
val opcode4 = machine.fetchNextOpcode()
assert(opcode4 == INC(Reg16Operand(BX)))
assert(cpu.state.ip == M86Word(0x00000008))
val opcode5 = machine.fetchNextOpcode()
//val value5 = M86Byte(0x5)
//FixMe: assert(opcode5 == JMP(ShortLabelOperand(value5)))
}
test("Execute and validate state: mov ax,42; mov bx,ax") {
val program = Vector(0xB8, 0x2A, 0x00, 0x89, 0xC3)
val (machine: Machine, cpu: Cpu) = getMachineWithProgram(program)
// initial state
val state0 = cpu.state
assert(state0.ip == M86Word(0))
assert(state0.ax == M86Word(0))
assert(state0.bx == M86Word(0))
// mov ax,42
val opcode1 = machine.fetchNextOpcode()
assert(opcode1 == MOV(Reg16Operand(AX), Immed16Operand(M86Word(42))))
val time1 = machine.runOpcode(opcode1)
assert(time1 == 8)
val state1 = cpu.state
assert(state1.ip == M86Word(3))
assert(state1.ax == M86Word(42))
assert(state1.bx == M86Word(0))
// mov bx,ax
val opcode2 = machine.fetchNextOpcode()
assert(opcode2 == MOV(Reg16Operand(BX), Reg16Operand(AX)))
val time2 = machine.runOpcode(opcode2)
assert(time2 == 8)
val state2 = cpu.state
assert(state2.ip == M86Word(5))
assert(state2.ax == M86Word(42))
assert(state2.bx == M86Word(42))
}
private def getMachineWithProgram(program: Vector[Int]) = {
val listener = NullMemoryAccessListener
val machine = new Machine()
val initialCpuState = new CpuState()
val cpu = new Cpu(initialCpuState, machine)
val memoryInit: Vector[M86Byte] = program.map(M86Byte(_))
machine.boot(memoryInit, listener)
machine.setActiveCpu(cpu)
(machine, cpu)
}
}
| codeguru-il/codeguru-extreme | src/test/scala/il/co/codeguru/extreme/engine/MachineTest.scala | Scala | apache-2.0 | 4,736 |
package chrome.contextMenus
import chrome.ChromeAPI
import chrome.contextMenus.bindings.{CreateProperties, MenuInfo, UpdateProperties}
import chrome.events.EventSource
import chrome.events.EventSourceImplicits._
import chrome.permissions.APIPermission
import chrome.tabs.bindings.Tab
import scala.concurrent.{Future, Promise}
import scala.scalajs.concurrent.JSExecutionContext.Implicits.queue
import scala.scalajs.js
import scala.scalajs.js.|
object ContextMenus extends ChromeAPI
{
val ContextMenus = APIPermission("contextMenus", "permission to add items to content menus")
def onClicked: EventSource[(MenuInfo, Tab)] = bindings.ContextMenus.onClicked
val requiredPermissions: Set[APIPermission] = Set(ContextMenus)
def create(createProperties: CreateProperties): String | Int = bindings.ContextMenus.create(createProperties)
def create(id: String, title: String, contexts: List[String]): String | Int = create(bindings.CreateProperties(id, title, contexts = js.Array(contexts:_*)))
def update(id: String | Int, properties: UpdateProperties): Unit = bindings.ContextMenus.update(id, properties )
def remove(menuItemId: String | Int, callback: () => Unit): Future[String | Int] = {
val promise = Promise[Unit]
val result = bindings.ContextMenus.remove(menuItemId, js.Any.fromFunction0[Unit](()=> {
promise.success(Unit)
}))
promise.future.map(_=>result)
}
def removeAll(): Future[Unit] = {
val promise = Promise[Unit]
bindings.ContextMenus.removeAll(js.Any.fromFunction0[Unit](()=> {
promise.success(Unit)
}))
promise.future
}
}
| antonkulaga/bio-nlp | chrome-bio/src/main/scala/chrome/contextMenus/ContextMenus.scala | Scala | mpl-2.0 | 1,607 |
import sbt._
import sbt.Keys._
import com.typesafe.sbt.SbtAspectj._
import com.typesafe.sbt.SbtAspectj.AspectjKeys._
object AspectJ {
lazy val settings = aspectjSettings ++ Seq(
aspectjVersion := Dependencies.Versions.aspectj,
compileOnly in Aspectj := true,
fork in Test := true,
javaOptions in Test <++= weaverOptions in Aspectj,
javaOptions in run <++= weaverOptions in Aspectj,
lintProperties in Aspectj += "invalidAbsoluteTypeName = ignore"
)
}
| Coiney/akka-patterns | project/AspectJ.scala | Scala | bsd-3-clause | 520 |
package org.bitcoins.core
/**
* This package contains different HD wallet
* key derivation paths.
*
* @see [[http://bitcoin-s.github.io/bitcoin-s-core/latest/api/ Hierarchical Deterministic Key Creation]]
* on Bitcoin.org Developer Guide
*
* @see [[https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki BIP32]],
* Hierarchical Deterministic Wallets
*/
package object hd
| bitcoin-s/bitcoin-s-core | core/src/main/scala/org/bitcoins/core/hd/package.scala | Scala | mit | 406 |
package dk.tennis.compare.rating.multiskill.infer.matchprob.givenskills
import dk.bayes.math.gaussian.Gaussian
import dk.tennis.compare.rating.multiskill.infer.perfdiffgivenskills.inferPerfDiffGivenSkills
import scala.math._
import dk.tennisprob.TennisProbFormulaCalc
import dk.tennisprob.TennisProbCalc.MatchTypeEnum._
import dk.tennis.compare.rating.multiskill.infer.perfdiffgivenskills.inferPerfDiffGivenSkills
import dk.tennis.compare.rating.multiskill.infer.outcome.InferOutcomeGivenPerfDiff
object inferMatchProbGivenSkills {
def apply(p1SkillOnServe: Gaussian, p1SkillOnReturn: Gaussian, p2SkillOnServe: Gaussian, p2SkillOnReturn: Gaussian,
logPerfStdDev: Double, numOfSets: Int): Double = {
val p1OnServeGamePerfDiff = inferPerfDiffGivenSkills(p1SkillOnServe, p2SkillOnReturn, logPerfStdDev)
val p2OnServeGamePerfDiff = inferPerfDiffGivenSkills(p2SkillOnServe, p1SkillOnReturn, logPerfStdDev)
val matchProb = inferMatchProbGivenSkills(p1OnServeGamePerfDiff.perfDiff, p2OnServeGamePerfDiff.perfDiff, numOfSets)
matchProb
}
def apply(p1OnServeGamePerfDiff: Gaussian, p2OnServeGamePerfDiff: Gaussian, numOfSets: Int): Double = {
val p1ProbOnServe = exp(InferOutcomeGivenPerfDiff.loglik(p1OnServeGamePerfDiff, true))
val p2ProbOnServe = exp(InferOutcomeGivenPerfDiff.loglik(p2OnServeGamePerfDiff, true))
val matchType = if (numOfSets == 2) THREE_SET_MATCH
else if (numOfSets == 3) FIVE_SET_MATCH
else throw new IllegalArgumentException("Incorrect number of sets")
val matchProb = TennisProbFormulaCalc.matchProb(p1ProbOnServe, 1 - p2ProbOnServe, matchType)
matchProb
}
} | danielkorzekwa/tennis-player-compare | multiskill/src/main/scala/dk/tennis/compare/rating/multiskill/infer/matchprob/givenskills/inferMatchProbGivenSkills.scala | Scala | bsd-2-clause | 1,641 |
package com.twitter.finagle.mysql.protocol
import com.twitter.finagle.mysql.ClientError
import java.nio.charset.{Charset => JCharset}
import java.nio.ByteOrder
import org.jboss.netty.buffer.{ChannelBuffer, ChannelBuffers}
/**
* The BufferReader and BufferWriter interfaces provide methods for
* reading/writing primitive data types exchanged between the client/server.
* This includes all primitive numeric types and strings (null-terminated and length coded).
* All Buffer methods are side-effecting. That is, each call to a read* or write*
* method will increase the current offset.
*
* Both BufferReader and BufferWriter assume bytes are written
* in little endian. This conforms with the MySQL protocol.
*/
object Buffer {
val NULL_LENGTH = -1 // denotes a SQL NULL value when reading a length coded binary.
val EMPTY_STRING = new String
val EMPTY_BYTE_ARRAY = new Array[Byte](0)
case object CorruptBufferException
extends Exception("Corrupt data or client/server are out of sync.")
/**
* Calculates the size required to store a length
* according to the MySQL protocol for length coded
* binary.
*/
def sizeOfLen(l: Long) =
if (l < 251) 1 else if (l < 65536) 3 else if (l < 16777216) 4 else 9
/**
* Wraps the arrays into a ChannelBuffer with the
* appropriate MySQL protocol byte order. A wrappedBuffer
* avoids copying the underlying arrays.
*/
def toChannelBuffer(bytes: Array[Byte]*) =
ChannelBuffers.wrappedBuffer(ByteOrder.LITTLE_ENDIAN, bytes: _*)
}
trait BufferReader {
/**
* Buffer capacity.
*/
def capacity: Int = array.size
/**
* Current offset in the buffer.
*/
def offset: Int
/**
* Access the underlying array. Note, this
* is not always a safe operation because the
* the buffer could contain a composition of
* arrays, in which case this will throw an
* exception.
*/
def array: Array[Byte]
/**
* Denotes if the buffer is readable upto the given width
* based on the current offset.
*/
def readable(width: Int): Boolean
def readByte(): Byte
def readUnsignedByte(): Short
def readShort(): Short
def readUnsignedShort(): Int
def readInt24(): Int
def readUnsignedInt24(): Int
def readInt(): Int
def readUnsignedInt(): Long
def readLong(): Long
def readFloat(): Float
def readDouble(): Double
/**
* Increases offset by n.
*/
def skip(n: Int): Unit
/**
* Consumes the rest of the buffer and returns
* it in a new Array[Byte].
* @return Array[Byte] containing the rest of the buffer.
*/
def takeRest(): Array[Byte] = take(capacity - offset)
/**
* Consumes n bytes in the buffer and
* returns them in a new Array.
* @return An Array[Byte] containing bytes from offset to offset+n
*/
def take(n: Int): Array[Byte]
/**
* Reads a MySQL data field. A variable-length numeric value.
* Depending on the first byte, reads a different width from
* the buffer. For more info, refer to MySQL Client/Server protocol
* documentation.
* @return a numeric value representing the number of
* bytes expected to follow.
*/
def readLengthCodedBinary(): Int = {
val firstByte = readUnsignedByte()
if (firstByte < 251)
firstByte
else
firstByte match {
case 251 => Buffer.NULL_LENGTH
case 252 => readUnsignedShort()
case 253 => readUnsignedInt24()
// 254 Indicates a set of bytes with length >= 2^24.
// The current implementation does not support
// this.
case 254 =>
throw new ClientError("BufferReader: LONG_BLOB is not supported!")
// readLong()
case _ =>
throw Buffer.CorruptBufferException
}
}
/**
* Reads a null-terminated string where
* null is denoted by '\\0'. Uses Charset.defaultCharset
* to decode strings.
* @return a null-terminated String starting at offset.
*/
def readNullTerminatedString(): String = {
val start = offset
var length = 0
while (readByte() != 0x00)
length += 1
this.toString(start, length, Charset.defaultCharset)
}
/**
* Reads a length encoded string according to the MySQL
* Client/Server protocol. Uses Charset.defaultCharset to
* decode strings. For more details refer to MySQL
* documentation.
* @return a MySQL length coded String starting at
* offset.
*/
def readLengthCodedString(): String = {
val length = readLengthCodedBinary()
if (length == Buffer.NULL_LENGTH)
null
else if (length == 0)
Buffer.EMPTY_STRING
else {
val start = offset
skip(length)
this.toString(start, length, Charset.defaultCharset)
}
}
/**
* Reads a length encoded set of bytes according to the MySQL
* Client/Server protocol. This is indentical to a length coded
* string except the bytes are returned raw.
* @return an Array[Byte] containing the length coded set of
* bytes starting at offset.
*/
def readLengthCodedBytes(): Array[Byte] = {
val len = readLengthCodedBinary()
if (len == Buffer.NULL_LENGTH)
null
else if (len == 0)
Buffer.EMPTY_BYTE_ARRAY
else
take(len)
}
/**
* Returns the bytes from start to start+length
* into a string using the given java.nio.charset.Charset.
*/
def toString(start: Int, length: Int, charset: JCharset) =
new String(array, start, length, charset)
/**
* Returns a Netty ChannelBuffer representing
* the underlying array. The ChannelBuffer
* is guaranteed ByteOrder.LITTLE_ENDIAN.
*/
def toChannelBuffer: ChannelBuffer
}
object BufferReader {
/**
* Creates a BufferReader from an Array[Byte].
* @param bytes Byte array to read from.
* @param startOffset initial offset.
*/
def apply(bytes: Array[Byte], startOffset: Int = 0): BufferReader = {
require(bytes != null)
require(startOffset >= 0)
val underlying = Buffer.toChannelBuffer(bytes)
underlying.readerIndex(startOffset)
new BufferReaderImpl(underlying)
}
/**
* Creates a BufferReader from a Netty ChannelBuffer.
* The ChannelBuffer must have ByteOrder.LITTLE_ENDIAN.
*/
def apply(underlying: ChannelBuffer): BufferReader = {
require(underlying.order == ByteOrder.LITTLE_ENDIAN)
new BufferReaderImpl(underlying)
}
/**
* BufferReader implementation backed by a Netty ChannelBuffer.
*/
private[this] class BufferReaderImpl(underlying: ChannelBuffer) extends BufferReader {
override def capacity = underlying.capacity
def offset = underlying.readerIndex
def array = underlying.array
def readable(width: Int) = underlying.readableBytes >= width
def readByte(): Byte = underlying.readByte()
def readUnsignedByte(): Short = underlying.readUnsignedByte()
def readShort(): Short = underlying.readShort()
def readUnsignedShort(): Int = underlying.readUnsignedShort()
def readInt24(): Int = underlying.readMedium()
def readUnsignedInt24(): Int = underlying.readUnsignedMedium()
def readInt(): Int = underlying.readInt()
def readUnsignedInt(): Long = underlying.readUnsignedInt()
def readLong(): Long = underlying.readLong()
def readFloat() = underlying.readFloat()
def readDouble() = underlying.readDouble()
def skip(n: Int) = underlying.skipBytes(n)
def take(n: Int) = {
val res = new Array[Byte](n)
underlying.readBytes(res)
res
}
/**
* Forward to ChannelBuffer in case underlying is a composition of
* arrays.
*/
override def toString(start: Int, length: Int, charset: JCharset) =
underlying.toString(start, length, charset)
def toChannelBuffer = underlying
}
}
trait BufferWriter {
/**
* Buffer capacity.
*/
def capacity: Int = array.size
/**
* Current writer offset.
*/
def offset: Int
/**
* Access the underlying array. Note, this
* is not always a safe operation because the
* the buffer could contain a composition of
* arrays, in which case this will throw an
* exception.
*/
def array: Array[Byte]
/**
* Denotes if the buffer is writable upto the given width
* based on the current offset.
*/
def writable(width: Int): Boolean
def writeBoolean(b: Boolean): BufferWriter
def writeByte(n: Int): BufferWriter
def writeShort(n: Int): BufferWriter
def writeInt24(n: Int): BufferWriter
def writeInt(n: Int): BufferWriter
def writeLong(n: Long): BufferWriter
def writeFloat(f: Float): BufferWriter
def writeDouble(d: Double): BufferWriter
def skip(n: Int): BufferWriter
/**
* Fills the rest of the buffer with the given byte.
* @param b Byte used to fill.
*/
def fillRest(b: Byte) = fill(capacity - offset, b)
/**
* Fills the buffer from current offset to offset+n with b.
* @param n width to fill
* @param b Byte used to fill.
*/
def fill(n: Int, b: Byte) = {
(offset until offset + n) foreach { j => writeByte(b) }
this
}
/**
* Writes bytes onto the buffer.
* @param bytes Array[Byte] to copy onto the buffer.
*/
def writeBytes(bytes: Array[Byte]): BufferWriter
/**
* Writes a length coded binary according the the MySQL
* Client/Server protocol. Refer to MySQL documentation for
* more information.
*/
def writeLengthCodedBinary(length: Long): BufferWriter = {
if (length < 251) {
writeByte(length.toInt)
} else if (length < 65536) {
writeByte(252)
writeShort(length.toInt)
} else if (length < 16777216) {
writeByte(253)
writeInt24(length.toInt)
} else {
writeByte(254)
writeLong(length)
}
}
/**
* Writes a null terminated string onto the buffer where
* '\\0' denotes null. Uses Charset.defaultCharset to decode the given
* String.
* @param s String to write.
*/
def writeNullTerminatedString(s: String): BufferWriter = {
writeBytes(s.getBytes(Charset.defaultCharset))
writeByte('\\0')
this
}
/**
* Writes a length coded string using the MySQL Client/Server
* protocol. Uses Charset.defaultCharset to decode the given
* String.
* @param s String to write to buffer.
*/
def writeLengthCodedString(s: String): BufferWriter = {
writeLengthCodedBinary(s.length)
writeBytes(s.getBytes(Charset.defaultCharset))
this
}
/**
* Writes a length coded set of bytes according to the MySQL
* client/server protocol.
*/
def writeLengthCodedBytes(bytes: Array[Byte]): BufferWriter = {
writeLengthCodedBinary(bytes.length)
writeBytes(bytes)
this
}
/**
* Returns a Netty ChannelBuffer representing
* the underlying buffer. The ChannelBuffer
* is guaranteed ByteOrder.LITTLE_ENDIAN.
*/
def toChannelBuffer: ChannelBuffer
}
object BufferWriter {
/**
* Creates a BufferWriter from an Array[Byte].
* @param bytes Byte array to read from.
* @param startOffset initial offset.
*/
def apply(bytes: Array[Byte], startOffset: Int = 0): BufferWriter = {
require(bytes != null)
require(startOffset >= 0)
// Note, a wrappedBuffer avoids copying the the array.
val underlying = Buffer.toChannelBuffer(bytes)
underlying.writerIndex(startOffset)
new BufferWriterImpl(underlying)
}
/**
* Creates a BufferWriter from a Netty ChannelBuffer.
*/
def apply(underlying: ChannelBuffer): BufferWriter = {
require(underlying.order == ByteOrder.LITTLE_ENDIAN)
new BufferWriterImpl(underlying)
}
/**
* BufferWriter implementation backed by a Netty ChannelBuffer.
*/
private[this] class BufferWriterImpl(underlying: ChannelBuffer) extends BufferWriter {
override def capacity = underlying.capacity
def offset = underlying.writerIndex
def array = underlying.array
def writable(width: Int = 1): Boolean = underlying.writableBytes >= width
def writeBoolean(b: Boolean): BufferWriter = if(b) writeByte(1) else writeByte(0)
def writeByte(n: Int): BufferWriter = {
underlying.writeByte(n)
this
}
def writeShort(n: Int): BufferWriter = {
underlying.writeShort(n)
this
}
def writeInt24(n: Int): BufferWriter = {
underlying.writeMedium(n)
this
}
def writeInt(n: Int): BufferWriter = {
underlying.writeInt(n)
this
}
def writeLong(n: Long): BufferWriter = {
underlying.writeLong(n)
this
}
def writeFloat(f: Float): BufferWriter = {
underlying.writeFloat(f)
this
}
def writeDouble(d: Double): BufferWriter = {
underlying.writeDouble(d)
this
}
def skip(n: Int) = {
underlying.writerIndex(offset + n)
this
}
def writeBytes(bytes: Array[Byte]) = {
underlying.writeBytes(bytes)
this
}
def toChannelBuffer = underlying
}
}
| foursquare/finagle | finagle-mysql/src/main/scala/com/twitter/finagle/mysql/protocol/Buffer.scala | Scala | apache-2.0 | 12,898 |
/*
* Copyright 2014 porter <https://github.com/eikek/porter>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eknet.spray.openid.model
import org.scalatest.WordSpec
import org.eknet.spray.openid.model
class RealmVerifyTest extends WordSpec {
import spray.http.Uri
implicit class UriAdds(uri: Uri) {
def matchesRealm(realm: String) = uriMatchesRealm(uri, Uri(realm))
}
"A realm verifiyer" must {
"properly find mismatches" in {
assert(Uri("http://example.com/path").matchesRealm("http://example.com/path#frag") === false)
assert(Uri("https://example.com/").matchesRealm("http://example.com/") === false)
assert(Uri("http://example.com/").matchesRealm("https://example.com/") === false)
assert(Uri("http://example.com:82").matchesRealm("https://example.com/") === false)
assert(Uri("http://example.com").matchesRealm("https://example.com:88") === false)
assert(Uri("https://example.com/path/other").matchesRealm("https://example.com/Path") === false)
}
"match correct urls" in {
assert(Uri("http://example.com/path").matchesRealm("http://example.com/path") === true)
assert(Uri("http://example.com:122/path").matchesRealm("http://example.com:122/path") === true)
assert(Uri("https://example.com:9443/path/other").matchesRealm("https://example.com:9443/path") === true)
}
}
}
| eikek/spray-openid | src/test/scala/org/eknet/spray/openid/model/RealmVerifyTest.scala | Scala | apache-2.0 | 1,890 |
package com.github.ldaniels528.trifecta.modules.etl.io.trigger.impl
import com.github.ldaniels528.trifecta.modules.etl.StoryConfig
import com.github.ldaniels528.trifecta.modules.etl.io.flow.Flow
import com.github.ldaniels528.trifecta.modules.etl.io.trigger.Trigger
import scala.concurrent.ExecutionContext
/**
* Startup Trigger
* @author lawrence.daniels@gmail.com
*/
case class StartupTrigger(id: String, flows: Seq[Flow]) extends Trigger {
override def destroy() = {}
override def execute(story: StoryConfig)(implicit ec: ExecutionContext) = {
Trigger.taskPool ! new Runnable {
override def run() = {
process(story, flows zip (flows map (createScope(story, _))))
()
}
}
}
}
| ldaniels528/trifecta | app-modules/etl/src/main/scala/com/github/ldaniels528/trifecta/modules/etl/io/trigger/impl/StartupTrigger.scala | Scala | apache-2.0 | 730 |
object test {
trait A { def apply(x: Int) = x}
trait B { def apply(x: Int) = x}
trait C { }
implicit def A2B(a: A): B = null
implicit def C2B(a: C): B = null
def foo[A]: A = null
/*start*/(foo[A](1), foo[B](1), foo[C](1), null.asInstanceOf[A](1))/*end*/
}
//(Int, Int, Int, Int) | ilinum/intellij-scala | testdata/typeInference/bugs4/SCL2779.scala | Scala | apache-2.0 | 295 |
package com.socrata.bq.soql
import com.socrata.datacoordinator.id.UserColumnId
import com.socrata.soql.SoQLAnalysis
import com.socrata.soql.typed._
import com.socrata.soql.types._
import com.socrata.bq.soql.SqlizerContext.SqlizerContext
case class BQSql(sql: String, setParams: Seq[String]) {
// This is a little awful but... we are working with a String built up from other Strings, not ParametricSql
def injectParams: String = {
var i = -1
val result = new StringBuilder()
sql.split(" ").foreach {
case "?" =>
i += 1
result.append(setParams(i) + " ")
case str => result.append(str + " ")
}
result.toString()
}
}
trait Sqlizer[T] {
import Sqlizer._
import SqlizerContext._
def sql(physicalColumnMapping: Map[UserColumnId, String], setParams: Seq[String], ctx: Context, escape: Escape): BQSql
val underlying: T
protected def useUpper(ctx: Context): Boolean = {
if (caseInsensitive(ctx))
ctx(SoqlPart) match {
case SoqlWhere | SoqlGroup | SoqlHaving => true
case SoqlSelect => usedInGroupBy(ctx)
case SoqlSearch => false
case _ => false
}
else false
}
protected def usedInGroupBy(ctx: Context): Boolean = {
val rootExpr = ctx.get(RootExpr)
ctx(SoqlPart) match {
case SoqlSelect | SoqlOrder =>
ctx.get(Analysis) match {
case Some(analysis: SoQLAnalysis[_, _]) =>
analysis.groupBy match {
case Some(groupBy) =>
// Use upper in select if this expression or the selected expression it belongs to is found in group by
groupBy.exists(expr => (underlying == expr) || rootExpr.exists(_ == expr))
case None => false
}
case _ => false
}
case SoqlSearch => false
case _ => false
}
}
protected val ParamPlaceHolder: String = "?"
private def caseInsensitive(ctx: Context): Boolean =
ctx.contains(CaseSensitivity) && ctx(CaseSensitivity) == CaseInsensitive
}
object Sqlizer {
type Context = Map[SqlizerContext, Any]
implicit def stringLiteralSqlizer(lit: StringLiteral[SoQLType]): Sqlizer[StringLiteral[SoQLType]] = {
new StringLiteralSqlizer(lit)
}
implicit def functionCallSqlizer(lit: FunctionCall[UserColumnId, SoQLType]): Sqlizer[FunctionCall[UserColumnId, SoQLType]] = {
new FunctionCallSqlizer(lit)
}
implicit def coreExprSqlizer(expr: CoreExpr[UserColumnId, SoQLType]): Sqlizer[_] = {
expr match {
case fc: FunctionCall[UserColumnId, SoQLType] => new FunctionCallSqlizer(fc)
case cr: ColumnRef[UserColumnId, SoQLType] => new ColumnRefSqlizer(cr)
case lit: StringLiteral[SoQLType] => new StringLiteralSqlizer(lit)
case lit: NumberLiteral[SoQLType] => new NumberLiteralSqlizer(lit)
case lit: BooleanLiteral[SoQLType] => new BooleanLiteralSqlizer(lit)
case lit: NullLiteral[SoQLType] => new NullLiteralSqlizer(lit)
}
}
implicit def orderBySqlizer(ob: OrderBy[UserColumnId, SoQLType]): Sqlizer[OrderBy[UserColumnId, SoQLType]] = {
new OrderBySqlizer(ob)
}
implicit def analysisSqlizer(analysisTable: Tuple2[SoQLAnalysis[UserColumnId, SoQLType], String]) = {
new SoQLAnalysisSqlizer(analysisTable._1, analysisTable._2)
}
}
object SqlizerContext extends Enumeration {
type SqlizerContext = Value
val Analysis = Value("analysis")
val SoqlPart = Value("soql-part")
val SoqlSelect = Value("select")
val SoqlWhere = Value("where")
val SoqlGroup = Value("group")
val SoqlHaving = Value("having")
val SoqlOrder = Value("order")
val SoqlSearch = Value("search")
val Extras = Value("extras")
// Need to append % after the string
val BeginsWith = Value("begins-with")
val IdRep = Value("id-rep")
val VerRep = Value("ver-rep")
val RootExpr = Value("root-expr")
val CaseSensitivity = Value("case-sensitivity")
}
sealed trait CaseSensitivity
object CaseInsensitive extends CaseSensitivity
object CaseSensitive extends CaseSensitivity
| socrata-platform/soql-bigquery-adapter | common-bq/src/main/scala/com/socrata/bq/soql/Sqlizer.scala | Scala | apache-2.0 | 4,028 |
/* NSC -- new Scala compiler
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package dotty.tools.lispify
import dotty.tools.dotc.CompilationUnit
import dotty.tools.dotc.ast.untpd._
import dotty.tools.dotc.core.Contexts.{ContextBase, Context}
import dotty.tools.dotc.parsing.Parsers.Parser
import dotty.tools.dotc.util.{NoSource, SourceFile}
import scala.reflect.io.PlainFile
object Translator {
import Lispyfy._
protected def initCtx = (new ContextBase).initialCtx
def main(args: Array[String]): Unit = {
implicit val ctx: Context = initCtx.fresh
def getSource(fileName: String): SourceFile = {
val f = new PlainFile(fileName)
if (f.exists) new SourceFile(f)
else {
ctx.error(s"not found: $fileName")
NoSource
}
}
// val fileName = "/Users/oleg/IdeaProjects/dotty-master/examples/hello.scala"
val fileName = "/Users/oleg/HelloWorld.scala"
val sourceCode = getSource(fileName)
val unitX = new CompilationUnit(sourceCode)
val tree = new Parser(unitX.source).parse()
println("stage 1:")
println(tree.show)
println("STAGE 2:")
println("------------------------------------------------------------------------")
tree match {
case tree: PackageDef => {
println("PackageDef.pid: " + tree.pid)
println()
for (ss <- tree.stats) {
procTopModuleDef(ss.asInstanceOf[ModuleDef])
}
}
}
println("------------------------------------------------------------------------")
}
}
| spetz911/dotty | src/dotty/tools/lispify/Translator.scala | Scala | bsd-3-clause | 1,564 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy
/**
* Used to send state on-the-wire about Executors from Worker to Master.
* This state is sufficient for the Master to reconstruct its internal data structures during
* failover.
*/
private[deploy] class ExecutorDescription(
val appId: String,
val execId: Int,
val cores: Int,
val state: ExecutorState.Value)
extends Serializable {
override def toString: String =
"ExecutorState(appId=%s, execId=%d, cores=%d, state=%s)".format(appId, execId, cores, state)
}
| sh-cho/cshSpark | deploy/ExecutorDescription.scala | Scala | apache-2.0 | 1,325 |
import cats.instances.future._
import cats.syntax.functor._
import com.bot4s.telegram.api.declarative.{ Commands, RegexCommands }
import com.bot4s.telegram.future.Polling
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.util.Try
/**
* Showcases different ways to declare commands (Commands + RegexCommands).
*
* Note that non-ASCII commands are not clickable.
*
* @param token Bot's token.
*/
class CommandsBot(token: String)
extends ExampleBot(token)
with Polling
with Commands[Future]
with RegexCommands[Future] {
// Extractor
object Int {
def unapply(s: String): Option[Int] = Try(s.toInt).toOption
}
// String commands.
onCommand("/hello") { implicit msg =>
reply("Hello America!").void
}
// '/' prefix is optional
onCommand("hola") { implicit msg =>
reply("Hola Mundo!").void
}
// Several commands can share the same handler.
// Shows the 'using' extension to extract information from messages.
onCommand("/hallo" | "/bonjour" | "/ciao" | "/hola") { implicit msg =>
using(_.from) { // sender
user =>
reply(s"Hello ${user.firstName} from Europe?").void
}
}
// Also using Symbols; the "/" prefix is added by default.
onCommand("привет") { implicit msg =>
reply("\\uD83C\\uDDF7\\uD83C\\uDDFA").void
}
// Note that non-ascii commands are not clickable.
onCommand("こんにちは" | "你好" | "안녕하세요") { implicit msg =>
reply("Hello from Asia?").void
}
// Different spellings + emoji commands.
onCommand("/metro" | "/métro" | "/🚇") { implicit msg =>
reply("Metro schedule bla bla...").void
}
onCommand("beer" | "beers" | "🍺" | "🍻") { implicit msg =>
reply("Beer menu bla bla...").void
}
// withArgs extracts command arguments.
onCommand("echo") { implicit msg =>
withArgs { args =>
reply(args.mkString(" ")).void
}
}
// withArgs with pattern matching.
onCommand("/inc") { implicit msg =>
withArgs {
case Seq(Int(i)) =>
reply("" + (i + 1)).void
// Conveniently avoid MatchError, providing hints on usage.
case _ =>
reply("Invalid argument. Usage: /inc 123").void
}
}
// Regex commands also available.
onRegex("""/timer\\s+([0-5]?[0-9]):([0-5]?[0-9])""".r) { implicit msg =>
{ case Seq(Int(mm), Int(ss)) =>
reply(s"Timer set: $mm minute(s) and $ss second(s)").void
Utils.after(mm.minutes + ss.seconds) {
reply("Time's up!!!")
}
}
}
}
| mukel/telegrambot4s | examples/src/CommandsBot.scala | Scala | apache-2.0 | 2,528 |
package de.htwg.zeta.server.controller.restApi
import java.util.UUID
import javax.inject.Inject
import scala.concurrent.ExecutionContext
import scala.concurrent.Future
import com.mohiva.play.silhouette.api.actions.SecuredRequest
import de.htwg.zeta.common.format.entity.FilterFormat
import de.htwg.zeta.common.models.entity.File
import de.htwg.zeta.common.models.entity.Filter
import de.htwg.zeta.persistence.general.FileRepository
import de.htwg.zeta.persistence.general.FilterRepository
import de.htwg.zeta.server.silhouette.ZetaEnv
import grizzled.slf4j.Logging
import play.api.libs.json.JsArray
import play.api.libs.json.JsError
import play.api.libs.json.JsPath
import play.api.libs.json.JsResult
import play.api.libs.json.JsSuccess
import play.api.libs.json.JsValue
import play.api.libs.json.JsonValidationError
import play.api.mvc.AnyContent
import play.api.mvc.InjectedController
import play.api.mvc.Result
import play.api.mvc.Results
/**
* REST-ful API for filter definitions
*/
class FilterRestApi @Inject()(
filterRepo: FilterRepository,
fileRepo: FileRepository,
filterFormat: FilterFormat,
implicit val ec: ExecutionContext
) extends InjectedController with Logging {
/** Lists all filter.
*
* @param request The request
* @return The result
*/
def showForUser()(request: SecuredRequest[ZetaEnv, AnyContent]): Future[Result] = {
getEntities.map(getJsonArray).recover {
case e: Exception =>
error("Exception while trying to read all `Filter` from DB", e)
BadRequest(e.getMessage)
}
}
private def getEntities: Future[List[Filter]] = {
filterRepo.readAllIds().flatMap(ids => {
val list = ids.toList.map(filterRepo.read)
Future.sequence(list)
})
}
private def getJsonArray(list: List[Filter]) = {
val entities = list.filter(e => !e.deleted)
val entries = entities.map(filterFormat.writes)
val json = JsArray(entries)
Ok(json)
}
/**
* Get a single Generator instance
* @param id Identifier of Generator
* @param request The request
* @return The result
*/
def get(id: UUID)(request: SecuredRequest[ZetaEnv, AnyContent]): Future[Result] = {
filterRepo.read(id).flatMap(entity => {
Future(Ok(filterFormat.writes(entity)))
}).recover {
case e: Exception =>
error("Exception while trying to read a single `Filter` from DB", e)
Results.BadRequest(e.getMessage)
}
}
/**
* Flag Filter as deleted
* @param id Identifier of Filter
* @param request The request
* @return The result
*/
def delete(id: UUID)(request: SecuredRequest[ZetaEnv, AnyContent]): Future[Result] = {
flagAsDeleted(id).map(_ => Ok("")).recover {
case e: Exception =>
error("Exception while trying to flag `Filter` as deleted at DB", e)
BadRequest(e.getMessage)
}
}
private def flagAsDeleted(id: UUID): Future[Filter] = {
filterRepo.update(id, e => e.copy(deleted = true))
}
/**
* Add new Filter into DB
* @param request The request
* @return The result
*/
def insert(request: SecuredRequest[ZetaEnv, JsValue]): Future[Result] = {
parseJson(request.body).flatMap(result => {
result.fold(
errors => jsErrorToResult(errors),
filter => insertDb(filter).map(_ => Ok("")).recover {
case e: Exception =>
error("Exception while trying to insert a `Filter` into DB", e)
BadRequest(e.getMessage)
}
)
})
}
private def parseJson(json: JsValue): Future[JsResult[Filter]] = {
json.validate(filterFormat) match {
case s: JsSuccess[Filter] => Future.successful(s)
case e: JsError => Future.successful(e)
}
}
private def jsErrorToResult(errors: Seq[(JsPath, Seq[JsonValidationError])]): Future[Result] = {
val json = JsError.toJson(errors)
val result = BadRequest(json)
Future.successful(result)
}
private def insertDb(entity: Filter): Future[Filter] = {
for {
file <- createFile("filter.scala")
filter <- createFilter(entity, file)
} yield {
filter
}
}
private def createFile(name: String): Future[File] = {
val file = File(
id = UUID.randomUUID(),
name,
content = fileTemplate()
)
fileRepo.create(file)
}
private def fileTemplate(): String = {
s"""
|class Filter() extends BaseFilter {
| def filter(entity: GraphicalDslInstance): Boolean = {
| true
| }
|}
""".stripMargin.trim
}
private def createFilter(filter: Filter, file: File): Future[Filter] = {
val files = Map(file.id -> file.name)
val entity = filter.copy(files = files)
filterRepo.create(entity)
}
}
| Zeta-Project/zeta | api/server/app/de/htwg/zeta/server/controller/restApi/FilterRestApi.scala | Scala | bsd-2-clause | 4,728 |
trait Unapply {
def unapply(x: Int): Option[Int] = Some(x)
}
object X {
val A: Any with Unapply = null
}
22 match {
case X.A(<caret>) =>
}
//Int | loskutov/intellij-scala | testdata/parameterInfo/patternParameterInfo/unapply/CompoundTypeField.scala | Scala | apache-2.0 | 151 |
package models.daos
import javax.inject.Inject
import models.daos.drivers.{Neo4j, NeoParsers}
import models.{Feedback, Score}
import play.api.libs.json.{JsValue, Json}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
class ScoreDAO @Inject()(neo: Neo4j,
parser: NeoParsers) {
/**
* Saves a score into the data store.
*
* @param username username of the user who score the repo
* @param repoName repository which was scored
* @param score score given by the user to the repository
* @return saved score.
*/
def save(username: String, repoName: String, score: Score): Future[Option[Score]] = {
neo.cypher(
"""
MATCH (u:User),(r:Repository)
WHERE u.username={username} AND r.name={repoName}
CREATE UNIQUE (u)-[c:SCORED {props}]->(r)
RETURN c
""",
Json.obj(
"username" -> username,
"repoName" -> repoName,
"props" -> Json.toJson(score)
)
).map(parser.parseNeoScore)
}
/**
* Find a score from a user to a repo
*
* @param username username of the user who score the repo
* @param repoName repository which was scored
* @return saved score.
*/
def find(username: String, repoName: String): Future[Option[Score]] = {
neo.cypher(
"""
MATCH (u:User)-[c:SCORED]->(r:Repository)
WHERE u.username={username} AND r.name={repoName}
RETURN c
""",
Json.obj(
"username" -> username,
"repoName" -> repoName
)
).map(parser.parseNeoScore)
}
/**
* Delete a score from a user to a repo
*
* @param username username of the user who score the repo
* @param repoName repository which was scored
* @return Empty JsValue
*/
def delete(username: String, repoName: String): Future[JsValue] = {
neo.cypher(
"""
MATCH (u:User)-[c:SCORED]->(r:Repository)
WHERE u.username={username} AND r.name={repoName}
DELETE c
""",
Json.obj(
"username" -> username,
"repoName" -> repoName
)
)
}
/**
* Update a score from a user to a repo
*
* @param username username of the user who score the repo
* @param repoName repository which was scored
* @return
*/
def update(username: String, repoName: String,score: Score): Future[Option[Score]] = {
neo.cypher(
"""
MATCH (u:User)-[c:SCORED]->(r:Repository)
WHERE u.username={username} AND r.name={repoName}
SET c={props}
RETURN c
""",
Json.obj(
"username" -> username,
"repoName" -> repoName,
"props" -> Json.toJson(score)
)
).map(parser.parseNeoScore)
}
/**
* Get all the scoring made for a repository corresponding to the page and items per page specified arguments.
*
* @param repoName name of the repository to get the scores from ("owner/repo")
* @param page page number to get from the database. Default value to 1
* @param itemsPerPage number of items to display in a database page
* @return Seq of Feedback.
*/
def findRepositoryFeedback(repoName: String, page: Int = 1, itemsPerPage: Int = 10): Future[Seq[Feedback]] = {
if (page < 1){
throw new Exception("Page must be a positive non null integer")
}
neo.cypher(
"""
MATCH (u:User)-[c:SCORED]->(r:Repository)
WHERE r.name={repoName}
RETURN c, u ORDER BY c.timestamp DESC SKIP {feedbackSkip} LIMIT {pageSize}
""", Json.obj(
"repoName" -> repoName,
"feedbackSkip" -> (page - 1) * itemsPerPage,
"pageSize" -> itemsPerPage
)
).map(parser.parseNeoFeedbackList)
}
/**
* Get all the scoring made for a repository corresponding to the page and items per page specified arguments.
*
* @param repoName name of the repository to get the scores from ("owner/repo")
* @param page page number to get from the database. Default value to 1
* @param itemsPerPage number of items to display in a database page
* @return Seq of Scores.
*/
def findRepositoryScores(repoName: String, page: Int=1, itemsPerPage: Int=10): Future[Seq[Score]] = {
if (page < 1){
throw new Exception("Page must be a positive non null integer")
}
neo.cypher(
"""
MATCH (u:User)-[c:SCORED]->(r:Repository)
WHERE r.name={repoName}
RETURN c ORDER BY c.timestamp DESC SKIP {scoreSkip} LIMIT {pageSize}
""", Json.obj(
"repoName" -> repoName,
"scoreSkip" -> (page - 1) * itemsPerPage,
"pageSize" -> itemsPerPage
)
).map(parser.parseNeoScoreList)
}
/**
* Gets the number of feedback there is for a given repository
*
* @param repoName name of the repository to get the feedback count from
* @return
*/
def countRepositoryFeedback(repoName: String): Future[Int] = {
neo.cypher(
"""
MATCH (u:User)-[c:SCORED]->(r:Repository)
WHERE r.name={repoName}
RETURN count(c) AS feedbackCount
""",
Json.obj("repoName" -> repoName)
).map(json => (((json \\ "results")(0) \\ "data")(0) \\ "row")(0).as[Int])
}
}
| gitlinks/gitrank-web | app/models/daos/ScoreDAO.scala | Scala | apache-2.0 | 5,197 |
package xyz.seto.obscene
/**
* An instance represents a sample if the label is available or a query if the
* label is null.
*/
object Instance {
val PATCH_SAMPLE_SIZE = 16;
val SEQUENCE_SAMPLE_SIZE = 16;
val ORIENTATIONS = (
(0.toFloat to math.Pi.toFloat by (math.Pi/4).toFloat) ++
(0.toFloat to -math.Pi.toFloat by -(math.Pi/4).toFloat)
).toList
def createInstance(sequenceType: Int, orientationType: Int, gesture: Gesture, label: String): Instance = {
if (sequenceType == GestureStore.SEQUENCE_SENSITIVE)
new Instance(
gesture.id,
temporalSampler(orientationType, gesture),
label
).normalize
else
new Instance(gesture.id, spatialSampler(gesture), label);
}
def temporalSampler(orientationType: Int, gesture: Gesture): List[Float] = {
def orientation(pts: List[Float], center: List[Float]): Float = math.atan2(pts(1) - center(1), pts(0) - center(0)).toFloat
def makeAdjustment(orientation: Float): Float = {
def loop(adj: Float, orients: List[Float]): Float = orients match {
case o :: os =>
if (math.abs(o - orientation) < math.abs(adj))
loop(math.abs(o - adj), os)
else
loop(adj, os)
case _ => adj
}
if (orientationType != GestureStore.ORIENTATION_INVARIANT)
loop(-orientation, ORIENTATIONS)
else
-orientation
}
val pts = GestureUtils.temporalSampling(gesture.strokes(0), SEQUENCE_SAMPLE_SIZE)
val center = GestureUtils.computeCentroid(pts)
GestureUtils.rotate(
GestureUtils.translate(pts, -center(0), -center(1)),
makeAdjustment(orientation(pts, center))
).toList
}
def spatialSampler(gesture: Gesture): List[Float] =
GestureUtils.spatialSampling(gesture, PATCH_SAMPLE_SIZE, false).toList
}
class Instance(val id: Long, val vector: List[Float], val label: String) {
def normalize: Instance = {
def norm(pts: List[Float], sum: Float): List[Float] = {
pts map {_ / sum}
}
new Instance(id, norm(vector, magnitude), label)
}
def magnitude: Float = {
def sum(pts: List[Float], acu: Float): Float = pts match {
case p :: ps => sum(ps, acu + (p * p))
case _ => acu
}
math.sqrt(sum(vector, 0)).toFloat
}
}
| chrisseto/obscene | core/src/main/scala/Instance.scala | Scala | apache-2.0 | 2,292 |
package org.jetbrains.plugins.scala.lang.resolve2
/**
* Pavel.Fatin, 02.02.2010
*/
class ScopePriorityTest extends ResolveTestBase {
override def folderPath: String = {
super.folderPath + "scope/priority/"
}
def testBlock11 = doTest
def testBlock12 = doTest
def testBlock21 = doTest
def testBlock22 = doTest
def testBlockAndCount = doTest
def testBlockAndType = doTest
def testBlockNested = doTest
//TODO packageobject
// def testPackageObject = doTest
} | LPTK/intellij-scala | test/org/jetbrains/plugins/scala/lang/resolve2/ScopePriorityTest.scala | Scala | apache-2.0 | 485 |
package scan
import java.nio.file._
import scala.compat.java8.StreamConverters._
import scala.collection.SortedSet
import cats._
import cats.data._
import cats.implicits._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import cats.effect._
import org.atnos.eff.addon.cats.effect.IOEffect._
import org.atnos.eff.syntax.addon.cats.effect._
object Scanner {
type R = Fx1[IO]
def main(args: Array[String]): Unit = run[R](args).unsafeRunSync
def run[R: _io](args: Array[String]): Eff[R, Unit] = for {
r <- scanReport(Paths.get(args(0)), 10)
} yield println(r)
def scanReport[R: _io](base: Path, topN: Int): Eff[R, String] = for {
scan <- pathScan(base, topN)
} yield ReportFormat.largeFilesReport(scan, base.toString)
def pathScan[R: _io](path: Path, topN: Int): Eff[R, PathScan] = for {
fp <- FilePath(path)
scan <- fp match {
case File(_) => for {
fs <- FileSize.ofFile(path)
} yield PathScan(SortedSet(fs), fs.size, 1)
case Directory(_) => for {
files <- ioDelay {
val jstream = Files.list(path)
try jstream.toScala[List]
finally jstream.close()
}
subScans <- files.traverse(pathScan(_, topN))
} yield subScans.combineAll(PathScan.topNMonoid(topN))
case Other(_) =>
PathScan.empty.pureEff[R]
}
} yield scan
}
sealed trait FilePath {
def path: String
}
object FilePath {
def apply[R: _io](path: Path): Eff[R, FilePath] = ioDelay(
if (Files.isRegularFile(path))
File(path.toString)
else if (Files.isDirectory(path))
Directory(path.toString)
else
Other(path.toString)
)
}
case class File(path: String) extends FilePath
case class Directory(path: String) extends FilePath
case class Other(path: String) extends FilePath
case class PathScan(largestFiles: SortedSet[FileSize], totalSize: Long, totalCount: Long)
object PathScan {
def empty = PathScan(SortedSet.empty, 0, 0)
def topNMonoid(n: Int): Monoid[PathScan] = new Monoid[PathScan] {
def empty: PathScan = PathScan.empty
def combine(p1: PathScan, p2: PathScan): PathScan = PathScan(
p1.largestFiles.union(p2.largestFiles).take(n),
p1.totalSize + p2.totalSize,
p1.totalCount + p2.totalCount
)
}
}
case class FileSize(path: Path, size: Long)
object FileSize {
def ofFile[R: _io](file: Path) = ioDelay(FileSize(file, Files.size(file)))
implicit val ordering: Ordering[FileSize] = Ordering.by[FileSize, Long](_.size).reverse
}
object ReportFormat {
def largeFilesReport(scan: PathScan, rootDir: String): String = {
if (scan.largestFiles.nonEmpty) {
s"Largest ${scan.largestFiles.size} file(s) found under path: $rootDir\\n" +
scan.largestFiles.map(fs => s"${(fs.size * 100)/scan.totalSize}% ${formatByteString(fs.size)} ${fs.path}").mkString("", "\\n", "\\n") +
s"${scan.totalCount} total files found, having total size ${formatByteString(scan.totalSize)} bytes.\\n"
}
else
s"No files found under path: $rootDir"
}
def formatByteString(bytes: Long): String = {
if (bytes < 1000)
s"${bytes} B"
else {
val exp = (Math.log(bytes) / Math.log(1000)).toInt
val pre = "KMGTPE".charAt(exp - 1)
s"%.1f ${pre}B".format(bytes / Math.pow(1000, exp))
}
}
}
| benhutchison/GettingWorkDoneWithExtensibleEffects | solutions/exercise2io/src/main/scala/scan/Scanner.scala | Scala | apache-2.0 | 3,354 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.matchers
import org.scalatest._
import org.scalatest.prop.Checkers
import org.scalacheck._
import Arbitrary._
import Prop._
import Integer.{MAX_VALUE, MIN_VALUE}
import org.scalatest.exceptions.TestFailedException
class ShouldOrderedSpec extends Spec with ShouldMatchers with Checkers with ReturnsNormallyThrowsAssertion {
// Checking for a specific size
object `The 'be >/</>=/<= (x)' syntax` {
object `on Int` {
def `should do nothing if the comparison holds true` {
check((left: Int, right: Int) => left < right ==> returnsNormally(left should be < (right)))
check((left: Int, right: Int) => left <= right ==> returnsNormally(left should be <= (right)))
check((left: Int, right: Int) => left > right ==> returnsNormally(left should be > (right)))
check((left: Int, right: Int) => left >= right ==> returnsNormally(left should be >= (right)))
}
def `should do nothing if the comparison fails and used with not` {
check((left: Int, right: Int) => left >= right ==> returnsNormally(left should not be < (right)))
check((left: Int, right: Int) => left > right ==> returnsNormally(left should not be <= (right)))
check((left: Int, right: Int) => left <= right ==> returnsNormally(left should not be > (right)))
check((left: Int, right: Int) => left < right ==> returnsNormally(left should not be >= (right)))
check((left: Int, right: Int) => left >= right ==> returnsNormally(left should not (be < (right))))
check((left: Int, right: Int) => left > right ==> returnsNormally(left should not (be <= (right))))
check((left: Int, right: Int) => left <= right ==> returnsNormally(left should not (be > (right))))
check((left: Int, right: Int) => left < right ==> returnsNormally(left should not (be >= (right))))
}
def `should do nothing when comparison succeeds and used in a logical-and expression` {
check((left: Int, right: Int) => ((left < right) && (right < MAX_VALUE)) ==> returnsNormally(left should ((be < (right)) and (be < (right + 1)))))
check((left: Int, right: Int) => ((left < right) && (right < MAX_VALUE)) ==> returnsNormally(left should (be < (right) and (be < (right + 1)))))
check((left: Int, right: Int) => ((left < right) && (right < MAX_VALUE)) ==> returnsNormally(left should (be < (right) and be < (right + 1))))
check((left: Int, right: Int) => ((left <= right) && (right < MAX_VALUE)) ==> returnsNormally(left should ((be <= (right)) and (be <= (right + 1)))))
check((left: Int, right: Int) => ((left <= right) && (right < MAX_VALUE)) ==> returnsNormally(left should (be <= (right) and (be <= (right + 1)))))
check((left: Int, right: Int) => ((left <= right) && (right < MAX_VALUE)) ==> returnsNormally(left should (be <= (right) and be <= (right + 1))))
check((left: Int, right: Int) => ((left > right) && (right > MIN_VALUE)) ==> returnsNormally(left should ((be > (right)) and (be > (right - 1)))))
check((left: Int, right: Int) => ((left > right) && (right > MIN_VALUE)) ==> returnsNormally(left should (be > (right) and (be > (right - 1)))))
check((left: Int, right: Int) => ((left > right) && (right > MIN_VALUE)) ==> returnsNormally(left should (be > (right) and be > (right - 1))))
check((left: Int, right: Int) => ((left >= right) && (right > MIN_VALUE)) ==> returnsNormally(left should ((be >= (right)) and (be >= (right - 1)))))
check((left: Int, right: Int) => ((left >= right) && (right > MIN_VALUE)) ==> returnsNormally(left should (be >= (right) and (be >= (right - 1)))))
check((left: Int, right: Int) => ((left >= right) && (right > MIN_VALUE)) ==> returnsNormally(left should (be >= (right) and be >= (right - 1))))
}
def `should do nothing when array size matches and used in a logical-or expression` {
check((left: Int, right: Int) => ((left < right) && (right < MAX_VALUE)) ==> returnsNormally(left should ((be < (right - 1)) or (be < (right + 1)))))
check((left: Int, right: Int) => ((left < right) && (right < MAX_VALUE)) ==> returnsNormally(left should (be < (right - 1) or (be < (right + 1)))))
check((left: Int, right: Int) => ((left < right) && (right < MAX_VALUE)) ==> returnsNormally(left should (be < (right - 1) or be < (right + 1))))
check((left: Int, right: Int) => ((left <= right) && (right < MAX_VALUE)) ==> returnsNormally(left should ((be <= (right - 1)) or (be <= (right + 1)))))
check((left: Int, right: Int) => ((left <= right) && (right < MAX_VALUE)) ==> returnsNormally(left should (be <= (right - 1) or (be <= (right + 1)))))
check((left: Int, right: Int) => ((left <= right) && (right < MAX_VALUE)) ==> returnsNormally(left should (be <= (right - 1) or be <= (right + 1))))
check((left: Int, right: Int) => ((left > right) && (right > MIN_VALUE)) ==> returnsNormally(left should ((be > (right + 1)) or (be > (right - 1)))))
check((left: Int, right: Int) => ((left > right) && (right > MIN_VALUE)) ==> returnsNormally(left should (be > (right + 1) or (be > (right - 1)))))
check((left: Int, right: Int) => ((left > right) && (right > MIN_VALUE)) ==> returnsNormally(left should (be > (right + 1) or be > (right - 1))))
check((left: Int, right: Int) => ((left >= right) && (right > MIN_VALUE)) ==> returnsNormally(left should ((be >= (right + 1)) or (be >= (right - 1)))))
check((left: Int, right: Int) => ((left >= right) && (right > MIN_VALUE)) ==> returnsNormally(left should (be >= (right + 1) or (be >= (right - 1)))))
check((left: Int, right: Int) => ((left >= right) && (right > MIN_VALUE)) ==> returnsNormally(left should (be >= (right + 1) or be >= (right - 1))))
check((left: Int, right: Int) => returnsNormally(left should (be >= (right) or be < (right))))
check((left: Int, right: Int) => returnsNormally(left should (be > (right) or be <= (right))))
}
def `should do nothing when comparison fails and used in a logical-and expression with not` {
check((left: Int, right: Int) => left > right ==> returnsNormally(left should (not (be < (right)) and not (be < (right + 1)))))
check((left: Int, right: Int) => left > right ==> returnsNormally(left should ((not be < (right)) and (not be < (right + 1)))))
check((left: Int, right: Int) => left > right ==> returnsNormally(left should (not be < (right) and not be < (right + 1))))
check((left: Int, right: Int) => left > right ==> returnsNormally(left should (not (be <= (right)) and not (be <= (right)))))
check((left: Int, right: Int) => left > right ==> returnsNormally(left should ((not be <= (right)) and (not be <= (right)))))
check((left: Int, right: Int) => left > right ==> returnsNormally(left should (not be <= (right) and not be <= (right))))
check((left: Int, right: Int) => left < right ==> returnsNormally(left should (not (be > (right)) and not (be > (right - 1)))))
check((left: Int, right: Int) => left < right ==> returnsNormally(left should ((not be > (right)) and (not be > (right - 1)))))
check((left: Int, right: Int) => left < right ==> returnsNormally(left should (not be > (right) and not be > (right - 1))))
check((left: Int, right: Int) => left < right ==> returnsNormally(left should (not (be >= (right)) and not (be >= (right)))))
check((left: Int, right: Int) => left < right ==> returnsNormally(left should ((not be >= (right)) and (not be >= (right)))))
check((left: Int, right: Int) => left < right ==> returnsNormally(left should (not be >= (right) and not be >= (right))))
}
def `should do nothing when comparison fails and used in a logical-or expression with not` {
check((left: Int, right: Int) => left > right ==> returnsNormally(left should (not (be >= (right)) or not (be < (right)))))
check((left: Int, right: Int) => left > right ==> returnsNormally(left should ((not be >= (right)) or (not be < (right)))))
check((left: Int, right: Int) => left > right ==> returnsNormally(left should (not be >= (right) or not be < (right))))
check((left: Int, right: Int) => left >= right ==> returnsNormally(left should (not (be > (right)) or not (be <= (right)))))
check((left: Int, right: Int) => left >= right ==> returnsNormally(left should ((not be > (right)) or (not be <= (right)))))
check((left: Int, right: Int) => left >= right ==> returnsNormally(left should (not be > (right) or not be <= (right))))
check((left: Int, right: Int) => left < right ==> returnsNormally(left should (not (be <= (right)) or not (be > (right)))))
check((left: Int, right: Int) => left < right ==> returnsNormally(left should ((not be <= (right)) or (not be > (right)))))
check((left: Int, right: Int) => left < right ==> returnsNormally(left should (not be <= (right) or not be > (right))))
check((left: Int, right: Int) => left <= right ==> returnsNormally(left should (not (be < (right)) or not (be >= (right)))))
check((left: Int, right: Int) => left <= right ==> returnsNormally(left should ((not be < (right)) or (not be >= (right)))))
check((left: Int, right: Int) => left <= right ==> returnsNormally(left should (not be < (right) or not be >= (right))))
}
def `should throw TestFailedException if comparison does not succeed` {
val caught1 = intercept[TestFailedException] {
1 should be < (1)
}
assert(caught1.getMessage === "1 was not less than 1")
check((left: Int, right: Int) => left >= right ==> throwsTestFailedException(left should be < (right)))
val caught2 = intercept[TestFailedException] {
2 should be <= (1)
}
assert(caught2.getMessage === "2 was not less than or equal to 1")
check((left: Int, right: Int) => left > right ==> throwsTestFailedException(left should be <= (right)))
val caught3 = intercept[TestFailedException] {
1 should be > (1)
}
assert(caught3.getMessage === "1 was not greater than 1")
check((left: Int, right: Int) => left <= right ==> throwsTestFailedException(left should be > (right)))
val caught4 = intercept[TestFailedException] {
1 should be >= (2)
}
assert(caught4.getMessage === "1 was not greater than or equal to 2")
check((left: Int, right: Int) => left < right ==> throwsTestFailedException(left should be >= (right)))
}
def `should throw TestFailedException if comparison succeeds but used with not` {
val caught1 = intercept[TestFailedException] {
1 should not be < (2)
}
assert(caught1.getMessage === "1 was less than 2")
check((left: Int, right: Int) => left < right ==> throwsTestFailedException(left should not be < (right)))
val caught2 = intercept[TestFailedException] {
1 should not be <= (1)
}
assert(caught2.getMessage === "1 was less than or equal to 1")
check((left: Int, right: Int) => left <= right ==> throwsTestFailedException(left should not be <= (right)))
val caught3 = intercept[TestFailedException] {
2 should not be > (1)
}
assert(caught3.getMessage === "2 was greater than 1")
check((left: Int, right: Int) => left > right ==> throwsTestFailedException(left should not be > (right)))
val caught4 = intercept[TestFailedException] {
1 should not be >= (1)
}
assert(caught4.getMessage === "1 was greater than or equal to 1")
check((left: Int, right: Int) => left >= right ==> throwsTestFailedException(left should not be >= (right)))
}
// Comparison with and
def `should throw an assertion error when less than comparison doesn't succeed and used in a logical-and expression` {
val caught1 = intercept[TestFailedException] {
2 should { be < (5) and (be < (2)) }
}
assert(caught1.getMessage === "2 was less than 5, but 2 was not less than 2")
val caught2 = intercept[TestFailedException] {
2 should ((be < (5)) and (be < (2)))
}
assert(caught2.getMessage === "2 was less than 5, but 2 was not less than 2")
val caught3 = intercept[TestFailedException] {
2 should (be < (5) and be < (2))
}
assert(caught3.getMessage === "2 was less than 5, but 2 was not less than 2")
}
def `should throw an assertion error when greater than comparison doesn't succeed and used in a logical-and expression` {
val caught1 = intercept[TestFailedException] {
7 should { be > (5) and (be > (12)) }
}
assert(caught1.getMessage === "7 was greater than 5, but 7 was not greater than 12")
val caught2 = intercept[TestFailedException] {
7 should ((be > (5)) and (be > (12)))
}
assert(caught2.getMessage === "7 was greater than 5, but 7 was not greater than 12")
val caught3 = intercept[TestFailedException] {
7 should (be > (5) and be > (12))
}
assert(caught3.getMessage === "7 was greater than 5, but 7 was not greater than 12")
}
def `should throw an assertion error when less than or equal to comparison doesn't succeed and used in a logical-and expression` {
val caught1 = intercept[TestFailedException] {
2 should { be <= (2) and (be <= (1)) }
}
assert(caught1.getMessage === "2 was less than or equal to 2, but 2 was not less than or equal to 1")
val caught2 = intercept[TestFailedException] {
2 should ((be <= (2)) and (be <= (1)))
}
assert(caught2.getMessage === "2 was less than or equal to 2, but 2 was not less than or equal to 1")
val caught3 = intercept[TestFailedException] {
2 should (be <= (2) and be <= (1))
}
assert(caught3.getMessage === "2 was less than or equal to 2, but 2 was not less than or equal to 1")
}
def `should throw an assertion error when greater than or equal to comparison doesn't succeed and used in a logical-and expression` {
val caught1 = intercept[TestFailedException] {
7 should { be >= (7) and (be >= (8)) }
}
assert(caught1.getMessage === "7 was greater than or equal to 7, but 7 was not greater than or equal to 8")
val caught2 = intercept[TestFailedException] {
7 should ((be >= (7)) and (be >= (8)))
}
assert(caught2.getMessage === "7 was greater than or equal to 7, but 7 was not greater than or equal to 8")
val caught3 = intercept[TestFailedException] {
7 should (be >= (7) and be >= (8))
}
assert(caught3.getMessage === "7 was greater than or equal to 7, but 7 was not greater than or equal to 8")
}
// Comparison with or
def `should throw an assertion error when less than comparison doesn't succeed and used in a logical-or expression` {
val caught1 = intercept[TestFailedException] {
2 should { be < (2) or (be < (1)) }
}
assert(caught1.getMessage === "2 was not less than 2, and 2 was not less than 1")
val caught2 = intercept[TestFailedException] {
2 should ((be < (2)) or (be < (1)))
}
assert(caught2.getMessage === "2 was not less than 2, and 2 was not less than 1")
val caught3 = intercept[TestFailedException] {
2 should (be < (2) or be < (1))
}
assert(caught3.getMessage === "2 was not less than 2, and 2 was not less than 1")
}
def `should throw an assertion error when greater than comparison doesn't succeed and used in a logical-or expression` {
val caught1 = intercept[TestFailedException] {
1 should { be > (5) or (be > (12)) }
}
assert(caught1.getMessage === "1 was not greater than 5, and 1 was not greater than 12")
val caught2 = intercept[TestFailedException] {
1 should ((be > (5)) or (be > (12)))
}
assert(caught2.getMessage === "1 was not greater than 5, and 1 was not greater than 12")
val caught3 = intercept[TestFailedException] {
1 should (be > (5) or be > (12))
}
assert(caught3.getMessage === "1 was not greater than 5, and 1 was not greater than 12")
}
def `should throw an assertion error when less than or equal to comparison doesn't succeed and used in a logical-or expression` {
val caught1 = intercept[TestFailedException] {
3 should { be <= (2) or (be <= (1)) }
}
assert(caught1.getMessage === "3 was not less than or equal to 2, and 3 was not less than or equal to 1")
val caught2 = intercept[TestFailedException] {
3 should ((be <= (2)) or (be <= (1)))
}
assert(caught2.getMessage === "3 was not less than or equal to 2, and 3 was not less than or equal to 1")
val caught3 = intercept[TestFailedException] {
3 should (be <= (2) or be <= (1))
}
assert(caught3.getMessage === "3 was not less than or equal to 2, and 3 was not less than or equal to 1")
}
def `should throw an assertion error when greater than or equal to comparison doesn't succeed and used in a logical-or expression` {
val caught1 = intercept[TestFailedException] {
6 should { be >= (7) or (be >= (8)) }
}
assert(caught1.getMessage === "6 was not greater than or equal to 7, and 6 was not greater than or equal to 8")
val caught2 = intercept[TestFailedException] {
6 should ((be >= (7)) or (be >= (8)))
}
assert(caught2.getMessage === "6 was not greater than or equal to 7, and 6 was not greater than or equal to 8")
val caught3 = intercept[TestFailedException] {
6 should (be >= (7) or be >= (8))
}
assert(caught3.getMessage === "6 was not greater than or equal to 7, and 6 was not greater than or equal to 8")
}
// Comparison with and not
def `should throw an assertion error when less than comparison doesn't succeed and used in a logical-and expression used with not` {
val caught1 = intercept[TestFailedException] {
5 should { not { be < (2) } and not { be < (6) }}
}
assert(caught1.getMessage === "5 was not less than 2, but 5 was less than 6")
val caught2 = intercept[TestFailedException] {
5 should ((not be < (2)) and (not be < (6)))
}
assert(caught2.getMessage === "5 was not less than 2, but 5 was less than 6")
val caught3 = intercept[TestFailedException] {
5 should (not be < (2) and not be < (6))
}
assert(caught3.getMessage === "5 was not less than 2, but 5 was less than 6")
}
def `should throw an assertion error when greater than comparison doesn't succeed and used in a logical-and expression used with not` {
val caught1 = intercept[TestFailedException] {
7 should { not { be > (8) } and not (be > (6)) }
}
assert(caught1.getMessage === "7 was not greater than 8, but 7 was greater than 6")
val caught2 = intercept[TestFailedException] {
7 should ((not be > (8)) and (not be > (6)))
}
assert(caught2.getMessage === "7 was not greater than 8, but 7 was greater than 6")
val caught3 = intercept[TestFailedException] {
7 should (not be > (8) and not be > (6))
}
assert(caught3.getMessage === "7 was not greater than 8, but 7 was greater than 6")
}
def `should throw an assertion error when less than or equal to comparison doesn't succeed and used in a logical-and expression used with not` {
val caught1 = intercept[TestFailedException] {
2 should { not { be <= (1) } and (not be <= (2)) }
}
assert(caught1.getMessage === "2 was not less than or equal to 1, but 2 was less than or equal to 2")
val caught2 = intercept[TestFailedException] {
2 should ((not be <= (1)) and (not be <= (2)))
}
assert(caught2.getMessage === "2 was not less than or equal to 1, but 2 was less than or equal to 2")
val caught3 = intercept[TestFailedException] {
2 should (not be <= (1) and not be <= (2))
}
assert(caught3.getMessage === "2 was not less than or equal to 1, but 2 was less than or equal to 2")
}
def `should throw an assertion error when greater than or equal to comparison doesn't succeed and used in a logical-and expression used with not` {
val caught1 = intercept[TestFailedException] {
7 should { not { be >= (8) } and not (be >= (6)) }
}
assert(caught1.getMessage === "7 was not greater than or equal to 8, but 7 was greater than or equal to 6")
val caught2 = intercept[TestFailedException] {
7 should ((not be >= (8)) and (not be >= (6)))
}
assert(caught2.getMessage === "7 was not greater than or equal to 8, but 7 was greater than or equal to 6")
val caught3 = intercept[TestFailedException] {
7 should (not be >= (8) and not be >= (6))
}
assert(caught3.getMessage === "7 was not greater than or equal to 8, but 7 was greater than or equal to 6")
}
// Comparison with or not
def `should throw an assertion error when less than comparison doesn't succeed and used in a logical-or expression used with not` {
val caught1 = intercept[TestFailedException] {
5 should { not { be < (7) } or not { be < (8) }}
}
assert(caught1.getMessage === "5 was less than 7, and 5 was less than 8")
val caught2 = intercept[TestFailedException] {
5 should ((not be < (7)) or (not be < (8)))
}
assert(caught2.getMessage === "5 was less than 7, and 5 was less than 8")
val caught3 = intercept[TestFailedException] {
5 should (not be < (7) or not be < (8))
}
assert(caught3.getMessage === "5 was less than 7, and 5 was less than 8")
}
def `should throw an assertion error when greater than comparison doesn't succeed and used in a logical-or expression used with not` {
val caught1 = intercept[TestFailedException] {
7 should { not { be > (5) } or not (be > (6)) }
}
assert(caught1.getMessage === "7 was greater than 5, and 7 was greater than 6")
val caught2 = intercept[TestFailedException] {
7 should ((not be > (5)) or (not be > (6)))
}
assert(caught2.getMessage === "7 was greater than 5, and 7 was greater than 6")
val caught3 = intercept[TestFailedException] {
7 should (not be > (5) or not be > (6))
}
assert(caught3.getMessage === "7 was greater than 5, and 7 was greater than 6")
}
def `should throw an assertion error when less than or equal to comparison doesn't succeed and used in a logical-or expression used with not` {
val caught1 = intercept[TestFailedException] {
2 should { not { be <= (3) } or (not be <= (2)) }
}
assert(caught1.getMessage === "2 was less than or equal to 3, and 2 was less than or equal to 2")
val caught2 = intercept[TestFailedException] {
2 should ((not be <= (3)) or (not be <= (2)))
}
assert(caught2.getMessage === "2 was less than or equal to 3, and 2 was less than or equal to 2")
val caught3 = intercept[TestFailedException] {
2 should (not be <= (3) or not be <= (2))
}
assert(caught3.getMessage === "2 was less than or equal to 3, and 2 was less than or equal to 2")
}
def `should throw an assertion error when greater than or equal to comparison doesn't succeed and used in a logical-or expression used with not` {
val caught1 = intercept[TestFailedException] {
8 should { not { be >= (7) } or not (be >= (6)) }
}
assert(caught1.getMessage === "8 was greater than or equal to 7, and 8 was greater than or equal to 6")
val caught2 = intercept[TestFailedException] {
8 should ((not be >= (7)) or (not be >= (6)))
}
assert(caught2.getMessage === "8 was greater than or equal to 7, and 8 was greater than or equal to 6")
val caught3 = intercept[TestFailedException] {
8 should (not be >= (7) or not be >= (6))
}
assert(caught3.getMessage === "8 was greater than or equal to 7, and 8 was greater than or equal to 6")
}
}
object `on String` {
def `should do nothing if the comparison holds true` {
check((left: String, right: String) => left < right ==> returnsNormally(left should be < (right)))
check((left: String, right: String) => left <= right ==> returnsNormally(left should be <= (right)))
check((left: String, right: String) => left > right ==> returnsNormally(left should be > (right)))
check((left: String, right: String) => left >= right ==> returnsNormally(left should be >= (right)))
}
def `should do nothing if the comparison fails and used with not` {
check((left: String, right: String) => left >= right ==> returnsNormally(left should not be < (right)))
check((left: String, right: String) => left > right ==> returnsNormally(left should not be <= (right)))
check((left: String, right: String) => left <= right ==> returnsNormally(left should not be > (right)))
check((left: String, right: String) => left < right ==> returnsNormally(left should not be >= (right)))
check((left: String, right: String) => left >= right ==> returnsNormally(left should not (be < (right))))
check((left: String, right: String) => left > right ==> returnsNormally(left should not (be <= (right))))
check((left: String, right: String) => left <= right ==> returnsNormally(left should not (be > (right))))
check((left: String, right: String) => left < right ==> returnsNormally(left should not (be >= (right))))
}
def `should do nothing when comparison succeeds and used in a logical-and expression` {
check((left: String, right: String) => left < right ==> returnsNormally(left should ((be < (right)) and (be < (right)))))
check((left: String, right: String) => left < right ==> returnsNormally(left should (be < (right) and (be < (right)))))
check((left: String, right: String) => left < right ==> returnsNormally(left should (be < (right) and be < (right))))
check((left: String, right: String) => left <= right ==> returnsNormally(left should ((be <= (right)) and (be <= (right)))))
check((left: String, right: String) => left <= right ==> returnsNormally(left should (be <= (right) and (be <= (right)))))
check((left: String, right: String) => left <= right ==> returnsNormally(left should (be <= (right) and be <= (right))))
check((left: String, right: String) => left > right ==> returnsNormally(left should ((be > (right)) and (be > (right)))))
check((left: String, right: String) => left > right ==> returnsNormally(left should (be > (right) and (be > (right)))))
check((left: String, right: String) => left > right ==> returnsNormally(left should (be > (right) and be > (right))))
check((left: String, right: String) => left >= right ==> returnsNormally(left should ((be >= (right)) and (be >= (right)))))
check((left: String, right: String) => left >= right ==> returnsNormally(left should (be >= (right) and (be >= (right)))))
check((left: String, right: String) => left >= right ==> returnsNormally(left should (be >= (right) and be >= (right))))
}
def `should do nothing when array size matches and used in a logical-or expression` {
check((left: String, right: String) => left < right ==> returnsNormally(left should ((be < (right)) or (be < (right)))))
check((left: String, right: String) => left < right ==> returnsNormally(left should (be < (right) or (be < (right)))))
check((left: String, right: String) => left < right ==> returnsNormally(left should (be < (right) or be < (right))))
check((left: String, right: String) => left <= right ==> returnsNormally(left should ((be <= (right)) or (be <= (right)))))
check((left: String, right: String) => left <= right ==> returnsNormally(left should (be <= (right) or (be <= (right)))))
check((left: String, right: String) => left <= right ==> returnsNormally(left should (be <= (right) or be <= (right))))
check((left: String, right: String) => left > right ==> returnsNormally(left should ((be > (right)) or (be > (right)))))
check((left: String, right: String) => left > right ==> returnsNormally(left should (be > (right) or (be > (right)))))
check((left: String, right: String) => left > right ==> returnsNormally(left should (be > (right) or be > (right))))
check((left: String, right: String) => left >= right ==> returnsNormally(left should ((be >= (right)) or (be >= (right)))))
check((left: String, right: String) => left >= right ==> returnsNormally(left should (be >= (right) or (be >= (right)))))
check((left: String, right: String) => left >= right ==> returnsNormally(left should (be >= (right) or be >= (right))))
check((left: String, right: String) => returnsNormally(left should (be >= (right) or be < (right))))
check((left: String, right: String) => returnsNormally(left should (be > (right) or be <= (right))))
}
def `should do nothing when comparison fails and used in a logical-and expression with not` {
check((left: String, right: String) => left >= right ==> returnsNormally(left should (not (be < (right)) and not (be < (right)))))
check((left: String, right: String) => left >= right ==> returnsNormally(left should ((not be < (right)) and (not be < (right)))))
check((left: String, right: String) => left >= right ==> returnsNormally(left should (not be < (right) and not be < (right))))
check((left: String, right: String) => left > right ==> returnsNormally(left should (not (be <= (right)) and not (be <= (right)))))
check((left: String, right: String) => left > right ==> returnsNormally(left should ((not be <= (right)) and (not be <= (right)))))
check((left: String, right: String) => left > right ==> returnsNormally(left should (not be <= (right) and not be <= (right))))
check((left: String, right: String) => left <= right ==> returnsNormally(left should (not (be > (right)) and not (be > (right)))))
check((left: String, right: String) => left <= right ==> returnsNormally(left should ((not be > (right)) and (not be > (right)))))
check((left: String, right: String) => left <= right ==> returnsNormally(left should (not be > (right) and not be > (right))))
check((left: String, right: String) => left < right ==> returnsNormally(left should (not (be >= (right)) and not (be >= (right)))))
check((left: String, right: String) => left < right ==> returnsNormally(left should ((not be >= (right)) and (not be >= (right)))))
check((left: String, right: String) => left < right ==> returnsNormally(left should (not be >= (right) and not be >= (right))))
}
def `should do nothing when comparison fails and used in a logical-or expression with not` {
check((left: String, right: String) => left > right ==> returnsNormally(left should (not (be >= (right)) or not (be < (right)))))
check((left: String, right: String) => left > right ==> returnsNormally(left should ((not be >= (right)) or (not be < (right)))))
check((left: String, right: String) => left > right ==> returnsNormally(left should (not be >= (right) or not be < (right))))
check((left: String, right: String) => left >= right ==> returnsNormally(left should (not (be > (right)) or not (be <= (right)))))
check((left: String, right: String) => left >= right ==> returnsNormally(left should ((not be > (right)) or (not be <= (right)))))
check((left: String, right: String) => left >= right ==> returnsNormally(left should (not be > (right) or not be <= (right))))
check((left: String, right: String) => left < right ==> returnsNormally(left should (not (be <= (right)) or not (be > (right)))))
check((left: String, right: String) => left < right ==> returnsNormally(left should ((not be <= (right)) or (not be > (right)))))
check((left: String, right: String) => left < right ==> returnsNormally(left should (not be <= (right) or not be > (right))))
check((left: String, right: String) => left <= right ==> returnsNormally(left should (not (be < (right)) or not (be >= (right)))))
check((left: String, right: String) => left <= right ==> returnsNormally(left should ((not be < (right)) or (not be >= (right)))))
check((left: String, right: String) => left <= right ==> returnsNormally(left should (not be < (right) or not be >= (right))))
}
def `should throw TestFailedException if comparison does not succeed` {
val caught1 = intercept[TestFailedException] {
"aaa" should be < ("aaa")
}
assert(caught1.getMessage === "\\"aaa\\" was not less than \\"aaa\\"")
check((left: String, right: String) => left >= right ==> throwsTestFailedException(left should be < (right)))
val caught2 = intercept[TestFailedException] {
"bbb" should be <= ("aaa")
}
assert(caught2.getMessage === "\\"bbb\\" was not less than or equal to \\"aaa\\"")
check((left: String, right: String) => left > right ==> throwsTestFailedException(left should be <= (right)))
val caught3 = intercept[TestFailedException] {
"aaa" should be > ("aaa")
}
assert(caught3.getMessage === "\\"aaa\\" was not greater than \\"aaa\\"")
check((left: String, right: String) => left <= right ==> throwsTestFailedException(left should be > (right)))
val caught4 = intercept[TestFailedException] {
"aaa" should be >= ("bbb")
}
assert(caught4.getMessage === "\\"aaa\\" was not greater than or equal to \\"bbb\\"")
check((left: String, right: String) => left < right ==> throwsTestFailedException(left should be >= (right)))
}
def `should throw TestFailedException if comparison succeeds but used with not` {
val caught1 = intercept[TestFailedException] {
"aaa" should not be < ("bbb")
}
assert(caught1.getMessage === "\\"aaa\\" was less than \\"bbb\\"")
check((left: String, right: String) => left < right ==> throwsTestFailedException(left should not be < (right)))
val caught2 = intercept[TestFailedException] {
"aaa" should not be <= ("aaa")
}
assert(caught2.getMessage === "\\"aaa\\" was less than or equal to \\"aaa\\"")
check((left: String, right: String) => left <= right ==> throwsTestFailedException(left should not be <= (right)))
val caught3 = intercept[TestFailedException] {
"bbb" should not be > ("aaa")
}
assert(caught3.getMessage === "\\"bbb\\" was greater than \\"aaa\\"")
check((left: String, right: String) => left > right ==> throwsTestFailedException(left should not be > (right)))
val caught4 = intercept[TestFailedException] {
"aaa" should not be >= ("aaa")
}
assert(caught4.getMessage === "\\"aaa\\" was greater than or equal to \\"aaa\\"")
check((left: String, right: String) => left >= right ==> throwsTestFailedException(left should not be >= (right)))
}
// Comparison with and
def `should throw an assertion error when less than comparison doesn't succeed and used in a logical-and expression` {
val caught1 = intercept[TestFailedException] {
"2" should { be < ("5") and (be < ("2")) }
}
assert(caught1.getMessage === "\\"2\\" was less than \\"5\\", but \\"2\\" was not less than \\"2\\"")
val caught2 = intercept[TestFailedException] {
"2" should ((be < ("5")) and (be < ("2")))
}
assert(caught2.getMessage === "\\"2\\" was less than \\"5\\", but \\"2\\" was not less than \\"2\\"")
val caught3 = intercept[TestFailedException] {
"2" should (be < ("5") and be < ("2"))
}
assert(caught3.getMessage === "\\"2\\" was less than \\"5\\", but \\"2\\" was not less than \\"2\\"")
}
def `should throw an assertion error when greater than comparison doesn't succeed and used in a logical-and expression` {
val caught1 = intercept[TestFailedException] {
"7" should { be > ("5") and (be > ("9")) }
}
assert(caught1.getMessage === "\\"7\\" was greater than \\"5\\", but \\"7\\" was not greater than \\"9\\"")
val caught2 = intercept[TestFailedException] {
"7" should ((be > ("5")) and (be > ("9")))
}
assert(caught2.getMessage === "\\"7\\" was greater than \\"5\\", but \\"7\\" was not greater than \\"9\\"")
val caught3 = intercept[TestFailedException] {
"7" should (be > ("5") and be > ("9"))
}
assert(caught3.getMessage === "\\"7\\" was greater than \\"5\\", but \\"7\\" was not greater than \\"9\\"")
}
def `should throw an assertion error when less than or equal to comparison doesn't succeed and used in a logical-and expression` {
val caught1 = intercept[TestFailedException] {
"2" should { be <= ("2") and (be <= ("1")) }
}
assert(caught1.getMessage === "\\"2\\" was less than or equal to \\"2\\", but \\"2\\" was not less than or equal to \\"1\\"")
val caught2 = intercept[TestFailedException] {
"2" should ((be <= ("2")) and (be <= ("1")))
}
assert(caught2.getMessage === "\\"2\\" was less than or equal to \\"2\\", but \\"2\\" was not less than or equal to \\"1\\"")
val caught3 = intercept[TestFailedException] {
"2" should (be <= ("2") and be <= ("1"))
}
assert(caught3.getMessage === "\\"2\\" was less than or equal to \\"2\\", but \\"2\\" was not less than or equal to \\"1\\"")
}
def `should throw an assertion error when greater than or equal to comparison doesn't succeed and used in a logical-and expression` {
val caught1 = intercept[TestFailedException] {
"7" should { be >= ("7") and (be >= ("8")) }
}
assert(caught1.getMessage === "\\"7\\" was greater than or equal to \\"7\\", but \\"7\\" was not greater than or equal to \\"8\\"")
val caught2 = intercept[TestFailedException] {
"7" should ((be >= ("7")) and (be >= ("8")))
}
assert(caught2.getMessage === "\\"7\\" was greater than or equal to \\"7\\", but \\"7\\" was not greater than or equal to \\"8\\"")
val caught3 = intercept[TestFailedException] {
"7" should (be >= ("7") and be >= ("8"))
}
assert(caught3.getMessage === "\\"7\\" was greater than or equal to \\"7\\", but \\"7\\" was not greater than or equal to \\"8\\"")
}
// Comparison with or
def `should throw an assertion error when less than comparison doesn't succeed and used in a logical-or expression` {
val caught1 = intercept[TestFailedException] {
"2" should { be < ("2") or (be < ("1")) }
}
assert(caught1.getMessage === "\\"2\\" was not less than \\"2\\", and \\"2\\" was not less than \\"1\\"")
val caught2 = intercept[TestFailedException] {
"2" should ((be < ("2")) or (be < ("1")))
}
assert(caught2.getMessage === "\\"2\\" was not less than \\"2\\", and \\"2\\" was not less than \\"1\\"")
val caught3 = intercept[TestFailedException] {
"2" should (be < ("2") or be < ("1"))
}
assert(caught3.getMessage === "\\"2\\" was not less than \\"2\\", and \\"2\\" was not less than \\"1\\"")
}
def `should throw an assertion error when greater than comparison doesn't succeed and used in a logical-or expression` {
val caught1 = intercept[TestFailedException] {
"1" should { be > ("5") or (be > ("9")) }
}
assert(caught1.getMessage === "\\"1\\" was not greater than \\"5\\", and \\"1\\" was not greater than \\"9\\"")
val caught2 = intercept[TestFailedException] {
"1" should ((be > ("5")) or (be > ("9")))
}
assert(caught2.getMessage === "\\"1\\" was not greater than \\"5\\", and \\"1\\" was not greater than \\"9\\"")
val caught3 = intercept[TestFailedException] {
"1" should (be > ("5") or be > ("9"))
}
assert(caught3.getMessage === "\\"1\\" was not greater than \\"5\\", and \\"1\\" was not greater than \\"9\\"")
}
def `should throw an assertion error when less than or equal to comparison doesn't succeed and used in a logical-or expression` {
val caught1 = intercept[TestFailedException] {
"3" should { be <= ("2") or (be <= ("1")) }
}
assert(caught1.getMessage === "\\"3\\" was not less than or equal to \\"2\\", and \\"3\\" was not less than or equal to \\"1\\"")
val caught2 = intercept[TestFailedException] {
"3" should ((be <= ("2")) or (be <= ("1")))
}
assert(caught2.getMessage === "\\"3\\" was not less than or equal to \\"2\\", and \\"3\\" was not less than or equal to \\"1\\"")
val caught3 = intercept[TestFailedException] {
"3" should (be <= ("2") or be <= ("1"))
}
assert(caught3.getMessage === "\\"3\\" was not less than or equal to \\"2\\", and \\"3\\" was not less than or equal to \\"1\\"")
}
def `should throw an assertion error when greater than or equal to comparison doesn't succeed and used in a logical-or expression` {
val caught1 = intercept[TestFailedException] {
"6" should { be >= ("7") or (be >= ("8")) }
}
assert(caught1.getMessage === "\\"6\\" was not greater than or equal to \\"7\\", and \\"6\\" was not greater than or equal to \\"8\\"")
val caught2 = intercept[TestFailedException] {
"6" should ((be >= ("7")) or (be >= ("8")))
}
assert(caught2.getMessage === "\\"6\\" was not greater than or equal to \\"7\\", and \\"6\\" was not greater than or equal to \\"8\\"")
val caught3 = intercept[TestFailedException] {
"6" should (be >= ("7") or be >= ("8"))
}
assert(caught3.getMessage === "\\"6\\" was not greater than or equal to \\"7\\", and \\"6\\" was not greater than or equal to \\"8\\"")
}
// Comparison with and not
def `should throw an assertion error when less than comparison doesn't succeed and used in a logical-and expression used with not` {
val caught1 = intercept[TestFailedException] {
"5" should { not { be < ("2") } and not { be < ("6") }}
}
assert(caught1.getMessage === "\\"5\\" was not less than \\"2\\", but \\"5\\" was less than \\"6\\"")
val caught2 = intercept[TestFailedException] {
"5" should ((not be < ("2")) and (not be < ("6")))
}
assert(caught2.getMessage === "\\"5\\" was not less than \\"2\\", but \\"5\\" was less than \\"6\\"")
val caught3 = intercept[TestFailedException] {
"5" should (not be < ("2") and not be < ("6"))
}
assert(caught3.getMessage === "\\"5\\" was not less than \\"2\\", but \\"5\\" was less than \\"6\\"")
}
def `should throw an assertion error when greater than comparison doesn't succeed and used in a logical-and expression used with not` {
val caught1 = intercept[TestFailedException] {
"7" should { not { be > ("8") } and not (be > ("6")) }
}
assert(caught1.getMessage === "\\"7\\" was not greater than \\"8\\", but \\"7\\" was greater than \\"6\\"")
val caught2 = intercept[TestFailedException] {
"7" should ((not be > ("8")) and (not be > ("6")))
}
assert(caught2.getMessage === "\\"7\\" was not greater than \\"8\\", but \\"7\\" was greater than \\"6\\"")
val caught3 = intercept[TestFailedException] {
"7" should (not be > ("8") and not be > ("6"))
}
assert(caught3.getMessage === "\\"7\\" was not greater than \\"8\\", but \\"7\\" was greater than \\"6\\"")
}
def `should throw an assertion error when less than or equal to comparison doesn't succeed and used in a logical-and expression used with not` {
val caught1 = intercept[TestFailedException] {
"2" should { not { be <= ("1") } and (not be <= ("2")) }
}
assert(caught1.getMessage === "\\"2\\" was not less than or equal to \\"1\\", but \\"2\\" was less than or equal to \\"2\\"")
val caught2 = intercept[TestFailedException] {
"2" should ((not be <= ("1")) and (not be <= ("2")))
}
assert(caught2.getMessage === "\\"2\\" was not less than or equal to \\"1\\", but \\"2\\" was less than or equal to \\"2\\"")
val caught3 = intercept[TestFailedException] {
"2" should (not be <= ("1") and not be <= ("2"))
}
assert(caught3.getMessage === "\\"2\\" was not less than or equal to \\"1\\", but \\"2\\" was less than or equal to \\"2\\"")
}
def `should throw an assertion error when greater than or equal to comparison doesn't succeed and used in a logical-and expression used with not` {
val caught1 = intercept[TestFailedException] {
"7" should { not { be >= ("8") } and not (be >= ("6")) }
}
assert(caught1.getMessage === "\\"7\\" was not greater than or equal to \\"8\\", but \\"7\\" was greater than or equal to \\"6\\"")
val caught2 = intercept[TestFailedException] {
"7" should ((not be >= ("8")) and (not be >= ("6")))
}
assert(caught2.getMessage === "\\"7\\" was not greater than or equal to \\"8\\", but \\"7\\" was greater than or equal to \\"6\\"")
val caught3 = intercept[TestFailedException] {
"7" should (not be >= ("8") and not be >= ("6"))
}
assert(caught3.getMessage === "\\"7\\" was not greater than or equal to \\"8\\", but \\"7\\" was greater than or equal to \\"6\\"")
}
// Comparison with or not
def `should throw an assertion error when less than comparison doesn't succeed and used in a logical-or expression used with not` {
val caught1 = intercept[TestFailedException] {
"5" should { not { be < ("7") } or not { be < ("8") }}
}
assert(caught1.getMessage === "\\"5\\" was less than \\"7\\", and \\"5\\" was less than \\"8\\"")
val caught2 = intercept[TestFailedException] {
"5" should ((not be < ("7")) or (not be < ("8")))
}
assert(caught2.getMessage === "\\"5\\" was less than \\"7\\", and \\"5\\" was less than \\"8\\"")
val caught3 = intercept[TestFailedException] {
"5" should (not be < ("7") or not be < ("8"))
}
assert(caught3.getMessage === "\\"5\\" was less than \\"7\\", and \\"5\\" was less than \\"8\\"")
}
def `should throw an assertion error when greater than comparison doesn't succeed and used in a logical-or expression used with not` {
val caught1 = intercept[TestFailedException] {
"7" should { not { be > ("5") } or not (be > ("6")) }
}
assert(caught1.getMessage === "\\"7\\" was greater than \\"5\\", and \\"7\\" was greater than \\"6\\"")
val caught2 = intercept[TestFailedException] {
"7" should ((not be > ("5")) or (not be > ("6")))
}
assert(caught2.getMessage === "\\"7\\" was greater than \\"5\\", and \\"7\\" was greater than \\"6\\"")
val caught3 = intercept[TestFailedException] {
"7" should (not be > ("5") or not be > ("6"))
}
assert(caught3.getMessage === "\\"7\\" was greater than \\"5\\", and \\"7\\" was greater than \\"6\\"")
}
def `should throw an assertion error when less than or equal to comparison doesn't succeed and used in a logical-or expression used with not` {
val caught1 = intercept[TestFailedException] {
"2" should { not { be <= ("3") } or (not be <= ("2")) }
}
assert(caught1.getMessage === "\\"2\\" was less than or equal to \\"3\\", and \\"2\\" was less than or equal to \\"2\\"")
val caught2 = intercept[TestFailedException] {
"2" should ((not be <= ("3")) or (not be <= ("2")))
}
assert(caught2.getMessage === "\\"2\\" was less than or equal to \\"3\\", and \\"2\\" was less than or equal to \\"2\\"")
val caught3 = intercept[TestFailedException] {
"2" should (not be <= ("3") or not be <= ("2"))
}
assert(caught3.getMessage === "\\"2\\" was less than or equal to \\"3\\", and \\"2\\" was less than or equal to \\"2\\"")
}
def `should throw an assertion error when greater than or equal to comparison doesn't succeed and used in a logical-or expression used with not` {
val caught1 = intercept[TestFailedException] {
"8" should { not { be >= ("7") } or not (be >= ("6")) }
}
assert(caught1.getMessage === "\\"8\\" was greater than or equal to \\"7\\", and \\"8\\" was greater than or equal to \\"6\\"")
val caught2 = intercept[TestFailedException] {
"8" should ((not be >= ("7")) or (not be >= ("6")))
}
assert(caught2.getMessage === "\\"8\\" was greater than or equal to \\"7\\", and \\"8\\" was greater than or equal to \\"6\\"")
val caught3 = intercept[TestFailedException] {
"8" should (not be >= ("7") or not be >= ("6"))
}
assert(caught3.getMessage === "\\"8\\" was greater than or equal to \\"7\\", and \\"8\\" was greater than or equal to \\"6\\"")
}
}
}
}
| travisbrown/scalatest | src/test/scala/org/scalatest/matchers/ShouldOrderedSpec.scala | Scala | apache-2.0 | 50,629 |
// See LICENSE.txt for license details.
package examples
import chisel3._
object VecSearchTest {
val pattern = Array(0, 4, 15, 14, 2, 5, 13)
}
class VecSearch extends Module {
val io = IO(new Bundle {
val out = Output(UInt(4.W))
})
val index = RegInit(0.U(3.W))
val elts = VecInit(VecSearchTest.pattern.map(_.asUInt(4.W)))
index := index + 1.U
io.out := elts(index)
}
| timtian090/Playground | chiselTutorial/src/main/scala/examples/VecSearch.scala | Scala | mit | 390 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.