code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
package x7c1.wheat.macros.database
import android.content.ContentValues
import android.database.Cursor
import scala.language.dynamics
import scala.language.experimental.macros
import scala.reflect.macros.blackbox
object TypedCursor {
def apply[A <: TypedFields]
(cursor: Cursor): A with TypedCursor[A] = macro TypedColumnImpl.create[A]
}
trait TypedCursor[X] {
self: X =>
def moveTo(n: Int): Boolean
def moveToFind[A](n: Int)(f: => A): Option[A] = {
if (moveTo(n)) Some(f)
else None
}
def moveToHead[A](f: X => A): Option[A] = {
if (moveTo(0)) Some(f(this))
else None
}
def freezeAt(n: Int): Option[X]
}
trait TypedFields {
type -->[A, B] = FieldTransform[A, B]
}
object TypedFields {
def expose[A <: TypedFields]: A = macro TypedContentValues.extract[A]
def toContentValues[A](pairs: A*): ContentValues = macro TypedContentValues.unwrap[A]
def toArgs[A](pairs: A*): Seq[(String, String)] = macro TypedFieldsParser.toSelectionArgs[A]
}
trait FieldTransform[A, B]{
def raw: A
def typed: B
}
trait FieldConvertible[A, B]{
def wrap(value: A): B
def unwrap(value: B): A
}
object FieldConvertible {
implicit object intToBoolean extends FieldConvertible[Int, Boolean]{
override def wrap(value: Int): Boolean = value == 1
override def unwrap(value: Boolean): Int = if (value) 1 else 0
}
}
private object TypedColumnImpl {
def create[A: c.WeakTypeTag](c: blackbox.Context)(cursor: c.Tree): c.Tree = {
import c.universe._
val definition = weakTypeOf[A]
val methods = definition.members filter { symbol =>
!symbol.fullName.startsWith("java.") &&
!symbol.fullName.startsWith("scala.") &&
!symbol.isConstructor && symbol.isMethod
} map (_.asMethod) filter (_.paramLists.isEmpty)
def getValue(tpe: Type, indexKey: TermName): Tree = tpe match {
case x if x =:= typeOf[String] => q"$cursor.getString($indexKey)"
case x if x =:= typeOf[Option[String]] => q"Option($cursor.getString($indexKey))"
case x if x =:= typeOf[Long] => q"$cursor.getLong($indexKey)"
case x if x =:= typeOf[Int] => q"$cursor.getInt($indexKey)"
case x if x =:= typeOf[Double] => q"$cursor.getDouble($indexKey)"
case x if x =:= typeOf[Option[Long]] =>
/*
cannot use cursor.getLong here
because it returns 0 when target value is null
*/
q"Option($cursor.getString($indexKey)).map(_.toLong)"
case x if x =:= typeOf[Option[Int]] =>
q"Option($cursor.getString($indexKey)).map(_.toInt)"
case x if x <:< typeOf[FieldTransform[_, _]] =>
val Seq(from, to) = tpe.typeArgs
val value = getValue(from, indexKey)
val convertible = appliedType(typeOf[FieldConvertible[_, _]].typeConstructor, from, to)
val transform = appliedType(typeOf[FieldTransform[_, _]].typeConstructor, from, to)
q"""
new $transform {
override val raw = $value
override val typed = implicitly[$convertible].wrap($value)
}"""
case x =>
throw new IllegalArgumentException(s"unsupported type: $x")
}
val kvs = methods map { method =>
val key = method.name.toString
val indexKey = TermName(c.freshName(key + "_index_"))
val value = getValue(method.returnType, indexKey)
(indexKey, key, value)
}
val overrides = kvs flatMap { case (indexKey, key, value) =>
Seq(
q"lazy val $indexKey = $cursor.getColumnIndexOrThrow($key)",
q"override def ${TermName(key)} = $value"
)
}
val freeze = kvs map {
case (_, key, value) =>
q"override val ${TermName(key)} = $value"
}
val typedCursor = appliedType(
typeOf[TypedCursor[_]].typeConstructor, definition
)
val tree = q"""
new $definition with $typedCursor {
..$overrides
override def moveTo(n: Int) = $cursor.moveToPosition(n)
override def freezeAt(n: Int) =
if (moveTo(n)) Some(new $definition { ..$freeze })
else None
}
"""
// println(showCode(tree))
tree
}
}
|
x7c1/Linen
|
wheat-macros/src/main/scala/x7c1/wheat/macros/database/TypedCursor.scala
|
Scala
|
mit
| 4,117
|
package im.actor.server.api.rpc.service.groups
import java.time.Instant
import akka.actor.ActorSystem
import akka.http.scaladsl.util.FastFuture
import cats.data.Xor
import com.github.ghik.silencer.silent
import im.actor.api.rpc.PeerHelpers._
import im.actor.api.rpc._
import im.actor.api.rpc.files.ApiFileLocation
import im.actor.api.rpc.groups._
import im.actor.api.rpc.misc.{ ResponseSeq, ResponseSeqDate, ResponseVoid }
import im.actor.api.rpc.peers.{ ApiGroupOutPeer, ApiUserOutPeer }
import im.actor.api.rpc.sequence.ApiUpdateOptimization
import im.actor.api.rpc.users.ApiUser
import im.actor.concurrent.FutureExt
import im.actor.server.acl.ACLUtils
import im.actor.server.db.DbExtension
import im.actor.server.dialog.DialogExtension
import im.actor.server.file.{ FileErrors, ImageUtils }
import im.actor.server.group._
import im.actor.server.model.GroupInviteToken
import im.actor.server.names.GlobalNamesStorageKeyValueStorage
import im.actor.server.persist.{ GroupInviteTokenRepo, GroupUserRepo }
import im.actor.server.presences.GroupPresenceExtension
import im.actor.server.sequence.{ SeqState, SeqStateDate, SeqUpdatesExtension }
import im.actor.server.user.UserExtension
import im.actor.util.ThreadLocalSecureRandom
import im.actor.util.misc.{ IdUtils, StringUtils }
import slick.dbio.DBIO
import slick.driver.PostgresDriver.api._
import scala.concurrent.{ ExecutionContext, Future }
final class GroupsServiceImpl(groupInviteConfig: GroupInviteConfig)(implicit actorSystem: ActorSystem) extends GroupsService {
import EntitiesHelpers._
import FileHelpers._
import FutureResultRpc._
import GroupCommands._
import IdUtils._
import ImageUtils._
case object NoSeqStateDate extends RuntimeException("No SeqStateDate in response from group found")
override implicit val ec: ExecutionContext = actorSystem.dispatcher
private val db: Database = DbExtension(actorSystem).db
private val groupExt = GroupExtension(actorSystem)
private val seqUpdExt = SeqUpdatesExtension(actorSystem)
private val userExt = UserExtension(actorSystem)
private val groupPresenceExt = GroupPresenceExtension(actorSystem)
private val globalNamesStorage = new GlobalNamesStorageKeyValueStorage
private val dialogExt = DialogExtension(actorSystem)
/**
* Loading Full Groups
*
* @param groups Groups to load
*/
override protected def doHandleLoadFullGroups(
groups: IndexedSeq[ApiGroupOutPeer],
clientData: ClientData
): Future[HandlerResult[ResponseLoadFullGroups]] =
authorized(clientData) { implicit client โ
withGroupOutPeers(groups) {
for {
fullGroups โ FutureExt.ftraverse(groups)(group โ groupExt.getApiFullStruct(group.groupId, client.userId))
} yield Ok(ResponseLoadFullGroups(fullGroups.toVector))
}
}
/**
* Make user admin
*
* @param groupPeer Group's peer
* @param userPeer User's peer
*/
override protected def doHandleMakeUserAdmin(groupPeer: ApiGroupOutPeer, userPeer: ApiUserOutPeer, clientData: ClientData): Future[HandlerResult[ResponseSeqDate]] = {
authorized(clientData) { implicit client โ
withGroupOutPeer(groupPeer) {
withUserOutPeer(userPeer) {
(for {
_ โ fromFutureBoolean(GroupRpcErrors.CantGrantToBot)(userExt.getUser(userPeer.userId) map (!_.isBot))
resp โ fromFuture(groupExt.makeUserAdmin(groupPeer.groupId, client.userId, client.authId, userPeer.userId))
(_, SeqStateDate(seq, state, date)) = resp
} yield ResponseSeqDate(seq, state.toByteArray, date)).value
}
}
}
}
override def doHandleDismissUserAdmin(groupPeer: ApiGroupOutPeer, userPeer: ApiUserOutPeer, clientData: ClientData): Future[HandlerResult[ResponseSeq]] =
authorized(clientData) { implicit client โ
withGroupOutPeer(groupPeer) {
withUserOutPeer(userPeer) {
(for {
_ โ fromFutureBoolean(GroupRpcErrors.CantGrantToBot)(userExt.getUser(userPeer.userId) map (!_.isBot))
seqState โ fromFuture(groupExt.dismissUserAdmin(groupPeer.groupId, client.userId, client.authId, userPeer.userId))
} yield ResponseSeq(seqState.seq, seqState.state.toByteArray)).value
}
}
}
override def doHandleLoadAdminSettings(groupPeer: ApiGroupOutPeer, clientData: ClientData): Future[HandlerResult[ResponseLoadAdminSettings]] =
authorized(clientData) { client โ
withGroupOutPeer(groupPeer) {
for {
settings โ groupExt.loadAdminSettings(groupPeer.groupId, client.userId)
} yield Ok(ResponseLoadAdminSettings(settings))
}
}
override def doHandleSaveAdminSettings(groupPeer: ApiGroupOutPeer, settings: ApiAdminSettings, clientData: ClientData): Future[HandlerResult[ResponseVoid]] =
authorized(clientData) { client โ
withGroupOutPeer(groupPeer) {
for {
_ โ groupExt.updateAdminSettings(groupPeer.groupId, client.userId, settings)
} yield Ok(ResponseVoid)
}
}
/**
* Loading group members
*
* @param groupPeer Group peer
* @param limit Limit members
* @param next Load more reference
*/
override protected def doHandleLoadMembers(
groupPeer: ApiGroupOutPeer,
limit: Int,
next: Option[Array[Byte]],
clientData: ClientData
): Future[HandlerResult[ResponseLoadMembers]] = {
authorized(clientData) { implicit client โ
withGroupOutPeer(groupPeer) {
for {
(members, nextOffset) โ groupExt.loadMembers(groupPeer.groupId, client.userId, limit, next)
membersAndPeers โ FutureExt.ftraverse(members) { member โ
userExt.getAccessHash(member.userId, client.authId) map (hash โ member โ ApiUserOutPeer(member.userId, hash))
}
(members, peers) = membersAndPeers.unzip
} yield Ok(ResponseLoadMembers(peers.toVector, nextOffset, members.toVector))
}
}
}
/**
* Transfer ownership of group
*
* @param groupPeer Group's peer
* @param newOwner New group's owner
*/
override protected def doHandleTransferOwnership(groupPeer: ApiGroupOutPeer, newOwner: ApiUserOutPeer, clientData: ClientData): Future[HandlerResult[ResponseSeqDate]] =
authorized(clientData) { implicit client โ
withGroupOutPeer(groupPeer) {
withUserOutPeer(newOwner) {
(for {
_ โ fromFutureBoolean(GroupRpcErrors.CantGrantToBot)(userExt.getUser(newOwner.userId) map (!_.isBot))
seqState โ fromFuture(groupExt.transferOwnership(groupPeer.groupId, client.userId, client.authId, newOwner.userId))
} yield ResponseSeqDate(
seq = seqState.seq,
state = seqState.state.toByteArray,
date = Instant.now.toEpochMilli
)).value
}
}
}
override def doHandleEditGroupAvatar(
groupPeer: ApiGroupOutPeer,
randomId: Long,
fileLocation: ApiFileLocation,
optimizations: IndexedSeq[ApiUpdateOptimization.Value],
clientData: ClientData
): Future[HandlerResult[ResponseEditGroupAvatar]] =
authorized(clientData) { implicit client โ
withGroupOutPeer(groupPeer) {
addOptimizations(optimizations)
val action = withFileLocation(fileLocation, AvatarSizeLimit) {
scaleAvatar(fileLocation.fileId) flatMap {
case Right(avatar) โ
for {
UpdateAvatarAck(avatar, seqStateDate) โ DBIO.from(groupExt.updateAvatar(groupPeer.groupId, client.userId, client.authId, Some(avatar), randomId))
SeqStateDate(seq, state, date) = seqStateDate.getOrElse(throw NoSeqStateDate)
} yield Ok(ResponseEditGroupAvatar(
avatar.get,
seq,
state.toByteArray,
date
))
case Left(e) โ
throw FileErrors.LocationInvalid
}
}
db.run(action)
}
}
override def doHandleRemoveGroupAvatar(
groupPeer: ApiGroupOutPeer,
randomId: Long,
optimizations: IndexedSeq[ApiUpdateOptimization.Value],
clientData: ClientData
): Future[HandlerResult[ResponseSeqDate]] =
authorized(clientData) { implicit client โ
addOptimizations(optimizations)
for {
UpdateAvatarAck(avatar, seqStateDate) โ groupExt.updateAvatar(
groupPeer.groupId,
client.userId,
client.authId,
avatarOpt = None,
randomId
)
SeqStateDate(seq, state, date) = seqStateDate.getOrElse(throw NoSeqStateDate)
} yield Ok(ResponseSeqDate(
seq,
state.toByteArray,
date
))
}
override def doHandleKickUser(
groupPeer: ApiGroupOutPeer,
randomId: Long,
userOutPeer: ApiUserOutPeer,
optimizations: IndexedSeq[ApiUpdateOptimization.Value],
clientData: ClientData
): Future[HandlerResult[ResponseSeqDate]] =
authorized(clientData) { implicit client โ
addOptimizations(optimizations)
withGroupOutPeer(groupPeer) {
withUserOutPeer(userOutPeer) {
for {
SeqStateDate(seq, state, date) โ groupExt.kickUser(groupPeer.groupId, userOutPeer.userId, randomId)
} yield {
groupPresenceExt.notifyGroupUserRemoved(groupPeer.groupId, userOutPeer.userId)
Ok(ResponseSeqDate(seq, state.toByteArray, date))
}
}
}
}
override def doHandleLeaveGroup(
groupPeer: ApiGroupOutPeer,
randomId: Long,
optimizations: IndexedSeq[ApiUpdateOptimization.Value],
clientData: ClientData
): Future[HandlerResult[ResponseSeqDate]] =
authorized(clientData) { implicit client โ
addOptimizations(optimizations)
withGroupOutPeer(groupPeer) {
for {
SeqStateDate(seq, state, date) โ groupExt.leaveGroup(groupPeer.groupId, randomId)
} yield {
groupPresenceExt.notifyGroupUserRemoved(groupPeer.groupId, client.userId)
Ok(ResponseSeqDate(seq, state.toByteArray, date))
}
}
}
override def doHandleLeaveAndDelete(groupPeer: ApiGroupOutPeer, clientData: ClientData): Future[HandlerResult[ResponseSeq]] =
authorized(clientData) { implicit client โ
withGroupOutPeer(groupPeer) {
for {
_ โ groupExt.leaveGroup(groupPeer.groupId, ACLUtils.randomLong())
SeqState(seq, state) โ dialogExt.delete(client.userId, client.authId, groupPeer.asModel)
} yield {
groupPresenceExt.notifyGroupUserRemoved(groupPeer.groupId, client.userId)
Ok(ResponseSeq(seq, state.toByteArray))
}
}
}
override def doHandleCreateGroup(
randomId: Long,
title: String,
users: IndexedSeq[ApiUserOutPeer],
groupType: Option[ApiGroupType.Value],
optimizations: IndexedSeq[ApiUpdateOptimization.Value],
clientData: ClientData
): Future[HandlerResult[ResponseCreateGroup]] =
authorized(clientData) { implicit client โ
addOptimizations(optimizations)
withUserOutPeers(users) {
val stripEntities = optimizations.contains(ApiUpdateOptimization.STRIP_ENTITIES)
val groupId = nextIntId()
val typ = groupType map {
case ApiGroupType.GROUP โ GroupType.General
case ApiGroupType.CHANNEL โ GroupType.Channel
} getOrElse GroupType.General
for {
CreateAck(_, seqStateDate) โ groupExt.create(
groupId,
client.userId,
client.authId,
title,
randomId,
userIds = users.map(_.userId).toSet,
typ
)
SeqStateDate(seq, state, date) = seqStateDate.getOrElse(throw NoSeqStateDate)
group โ groupExt.getApiStruct(groupId, client.userId)
memberIds = GroupUtils.getUserIds(group)
(users, userPeers) โ usersOrPeers(memberIds.toVector, stripEntities)
} yield Ok(ResponseCreateGroup(
seq = seq,
state = state.toByteArray,
group = group,
users = users,
userPeers = userPeers,
date = date
))
}
}
override def doHandleInviteUser(
groupPeer: ApiGroupOutPeer,
randomId: Long,
userOutPeer: ApiUserOutPeer,
optimizations: IndexedSeq[ApiUpdateOptimization.Value],
clientData: ClientData
): Future[HandlerResult[ResponseSeqDate]] =
authorized(clientData) { implicit client โ
addOptimizations(optimizations)
withGroupOutPeer(groupPeer) {
withUserOutPeer(userOutPeer) {
for {
SeqStateDate(seq, state, date) โ groupExt.inviteToGroup(groupPeer.groupId, userOutPeer.userId, randomId)
} yield {
groupPresenceExt.notifyGroupUserAdded(groupPeer.groupId, userOutPeer.userId)
Ok(ResponseSeqDate(seq, state.toByteArray, date))
}
}
}
}
override def doHandleEditGroupTitle(
groupPeer: ApiGroupOutPeer,
randomId: Long,
title: String,
optimizations: IndexedSeq[ApiUpdateOptimization.Value],
clientData: ClientData
): Future[HandlerResult[ResponseSeqDate]] =
authorized(clientData) { implicit client โ
addOptimizations(optimizations)
withGroupOutPeer(groupPeer) {
for {
SeqStateDate(seq, state, date) โ groupExt.updateTitle(groupPeer.groupId, client.userId, client.authId, title, randomId)
} yield Ok(ResponseSeqDate(seq, state.toByteArray, date))
}
}
override def doHandleGetGroupInviteUrl(groupPeer: ApiGroupOutPeer, clientData: ClientData): Future[HandlerResult[ResponseInviteUrl]] =
authorized(clientData) { implicit client โ
groupExt.getApiFullStruct(groupPeer.groupId, client.userId) flatMap { group โ
val isMember = group.members.exists(_.userId == client.userId)
if (!isMember) {
FastFuture.successful(Error(GroupRpcErrors.NotAMember))
} else {
withGroupOutPeer(groupPeer) {
for {
inviteString โ group.shortName match {
case Some(name) โ FastFuture.successful(name)
case None โ
db.run((GroupInviteTokenRepo.find(groupPeer.groupId, client.userId): @silent).headOption flatMap {
case Some(invToken) โ DBIO.successful(invToken.token)
case None โ
val token = ACLUtils.accessToken()
val inviteToken = GroupInviteToken(groupPeer.groupId, client.userId, token)
for (_ โ GroupInviteTokenRepo.create(inviteToken): @silent) yield token
})
}
} yield Ok(ResponseInviteUrl(genInviteUrl(inviteString)))
}
}
}
}
override def doHandleJoinGroup(
joinStringOrUrl: String,
optimizations: IndexedSeq[ApiUpdateOptimization.Value],
clientData: ClientData
): Future[HandlerResult[ResponseJoinGroup]] =
authorized(clientData) { implicit client โ
addOptimizations(optimizations)
val stripEntities = optimizations.contains(ApiUpdateOptimization.STRIP_ENTITIES)
val action = for {
joinSting โ fromOption(GroupRpcErrors.InvalidInviteUrl)(extractJoinString(joinStringOrUrl))
joinInfo โ joinSting match {
case Xor.Left(token) โ
for {
info โ fromFutureOption(GroupRpcErrors.InvalidInviteToken)(db.run(GroupInviteTokenRepo.findByToken(token): @silent))
} yield info.groupId โ Some(info.creatorId)
case Xor.Right(groupName) โ
for {
groupId โ fromFutureOption(GroupRpcErrors.InvalidInviteGroup)(globalNamesStorage.getGroupId(groupName))
} yield groupId โ None
}
(groupId, optInviter) = joinInfo
joinResp โ fromFuture(groupExt.joinGroup(
groupId = groupId,
joiningUserId = client.userId,
joiningUserAuthId = client.authId,
invitingUserId = optInviter
))
((SeqStateDate(seq, state, date), userIds, randomId)) = joinResp
up โ fromFuture(usersOrPeers(userIds, stripEntities))
(users, userPeers) = up
groupStruct โ fromFuture(groupExt.getApiStruct(groupId, client.userId))
} yield ResponseJoinGroup(
groupStruct,
seq,
state.toByteArray,
date,
users,
randomId,
userPeers
)
action.value
}
override def doHandleJoinGroupByPeer(groupPeer: ApiGroupOutPeer, clientData: ClientData): Future[HandlerResult[ResponseSeq]] =
authorized(clientData) { implicit client โ
withGroupOutPeer(groupPeer) {
val action = for {
apiGroup โ fromFuture(groupExt.getApiStruct(groupPeer.groupId, client.userId))
_ โ fromBoolean(GroupRpcErrors.CantJoinGroup)(canJoin(apiGroup.permissions))
joinResp โ fromFuture(groupExt.joinGroup(
groupId = groupPeer.groupId,
joiningUserId = client.userId,
joiningUserAuthId = client.authId,
invitingUserId = None
))
SeqStateDate(seq, state, _) = joinResp._1
} yield ResponseSeq(seq, state.toByteArray)
action.value
}
}
private def canJoin(permissions: Option[Long]) =
permissions exists (p โ (p & (1 << 4)) != 0) // TODO: make wrapper around permissions
override def doHandleRevokeInviteUrl(groupPeer: ApiGroupOutPeer, clientData: ClientData): Future[HandlerResult[ResponseInviteUrl]] =
authorized(clientData) { implicit client โ
withGroupOutPeer(groupPeer) {
val token = ACLUtils.accessToken()
db.run(
for {
_ โ GroupInviteTokenRepo.revoke(groupPeer.groupId, client.userId): @silent
_ โ GroupInviteTokenRepo.create(
GroupInviteToken(groupPeer.groupId, client.userId, token)
): @silent
} yield Ok(ResponseInviteUrl(genInviteUrl(token)))
)
}
}
override def doHandleEditGroupTopic(
groupPeer: ApiGroupOutPeer,
randomId: Long,
topic: Option[String],
optimizations: IndexedSeq[ApiUpdateOptimization.Value],
clientData: ClientData
): Future[HandlerResult[ResponseSeqDate]] = {
authorized(clientData) { implicit client โ
addOptimizations(optimizations)
withGroupOutPeer(groupPeer) {
for {
SeqStateDate(seq, state, date) โ groupExt.updateTopic(groupPeer.groupId, client.userId, client.authId, topic, randomId) //isV2(optimizations)
} yield Ok(ResponseSeqDate(seq, state.toByteArray, date))
}
}
}
override def doHandleEditGroupAbout(
groupPeer: ApiGroupOutPeer,
randomId: Long,
about: Option[String],
optimizations: IndexedSeq[ApiUpdateOptimization.Value],
clientData: ClientData
): Future[HandlerResult[ResponseSeqDate]] = {
authorized(clientData) { implicit client โ
addOptimizations(optimizations)
withGroupOutPeer(groupPeer) {
for {
SeqStateDate(seq, state, date) โ groupExt.updateAbout(groupPeer.groupId, client.userId, client.authId, about, randomId)
} yield Ok(ResponseSeqDate(seq, state.toByteArray, date))
}
}
}
override def doHandleEditGroupShortName(groupPeer: ApiGroupOutPeer, shortName: Option[String], clientData: ClientData): Future[HandlerResult[ResponseSeq]] =
authorized(clientData) { client โ
withGroupOutPeer(groupPeer) {
for {
SeqState(seq, state) โ groupExt.updateShortName(groupPeer.groupId, client.userId, client.authId, shortName)
} yield Ok(ResponseSeq(seq, state.toByteArray))
}
}
protected def doHandleDeleteGroup(groupPeer: ApiGroupOutPeer, clientData: ClientData): Future[HandlerResult[ResponseSeq]] =
authorized(clientData) { client โ
withGroupOutPeer(groupPeer) {
for {
SeqState(seq, state) โ groupExt.deleteGroup(groupPeer.groupId, client.userId, client.authId)
} yield Ok(ResponseSeq(seq, state.toByteArray))
}
}
protected def doHandleShareHistory(groupPeer: ApiGroupOutPeer, clientData: ClientData): Future[HandlerResult[ResponseSeq]] =
authorized(clientData) { client โ
withGroupOutPeer(groupPeer) {
for {
SeqState(seq, state) โ groupExt.makeHistoryShared(groupPeer.groupId, client.userId, client.authId)
} yield Ok(ResponseSeq(seq, state.toByteArray))
}
}
private val inviteUriBase = s"${groupInviteConfig.baseUrl}/join/"
private def genInviteUrl(token: String) = s"$inviteUriBase$token"
private def extractJoinString(urlOrTokenOrGroupName: String): Option[String Xor String] = {
val extracted = if (urlOrTokenOrGroupName.startsWith(groupInviteConfig.baseUrl))
urlOrTokenOrGroupName.drop(inviteUriBase.length).takeWhile(c โ c != '?' && c != '#')
else
urlOrTokenOrGroupName
if (StringUtils.validGroupInviteToken(extracted)) {
Some(Xor.left(extracted))
} else if (StringUtils.validGlobalName(extracted)) {
Some(Xor.right(extracted))
} else {
None
}
}
private def addOptimizations(opts: IndexedSeq[ApiUpdateOptimization.Value])(implicit client: AuthorizedClientData): Unit =
seqUpdExt.addOptimizations(client.userId, client.authId, opts map (_.id))
//TODO: move to separate trait
override def doHandleCreateGroupObsolete(randomId: Long, title: String, users: IndexedSeq[ApiUserOutPeer], clientData: ClientData): Future[HandlerResult[ResponseCreateGroupObsolete]] =
authorized(clientData) { implicit client โ
withUserOutPeers(users) {
val groupId = nextIntId(ThreadLocalSecureRandom.current())
val userIds = users.map(_.userId).toSet
for {
CreateAck(accessHash, seqStateDate) โ groupExt.create(
groupId,
client.userId,
client.authId,
title,
randomId,
userIds
)
SeqStateDate(seq, state, date) = seqStateDate.getOrElse(throw NoSeqStateDate)
} yield Ok(ResponseCreateGroupObsolete(
groupPeer = ApiGroupOutPeer(groupId, accessHash),
seq = seq,
state = state.toByteArray,
users = (userIds + client.userId).toVector,
date = date
))
}
}
//TODO: move to separate trait
override def doHandleEnterGroupObsolete(groupPeer: ApiGroupOutPeer, clientData: ClientData): Future[HandlerResult[ResponseEnterGroupObsolete]] =
authorized(clientData) { implicit client โ
withGroupOutPeer(groupPeer) {
for {
// TODO: what should it be? was
// isPublic โ groupExt.isPublic(groupPeer.groupId)
isHistoryShared โ groupExt.isHistoryShared(groupPeer.groupId)
result โ if (isHistoryShared) {
db.run(
for {
member โ GroupUserRepo.find(groupPeer.groupId, client.userId): @silent
response โ member match {
case Some(_) โ DBIO.successful(Error(GroupRpcErrors.AlreadyInvited))
case None โ
for {
groupStruct โ DBIO.from(groupExt.getApiStruct(groupPeer.groupId, client.userId))
(seqstatedate, userIds, randomId) โ DBIO.from(groupExt.joinGroup(groupPeer.groupId, client.userId, client.authId, Some(groupStruct.creatorUserId)))
userStructs โ DBIO.from(Future.sequence(userIds.map(userExt.getApiStruct(_, client.userId, client.authId))))
} yield Ok(ResponseEnterGroupObsolete(groupStruct, userStructs, randomId, seqstatedate.seq, seqstatedate.state.toByteArray, seqstatedate.date))
}
} yield response
)
} else {
FastFuture.successful(Error(GroupRpcErrors.GroupNotPublic))
}
} yield result
}
}
/**
* only admin can give another group member admin rights
* if this user id already admin - `GroupErrors.UserAlreadyAdmin` will be returned
* it could be many admins in one group
*/
//TODO: move to separate trait
override def doHandleMakeUserAdminObsolete(
groupPeer: ApiGroupOutPeer,
userPeer: ApiUserOutPeer,
clientData: ClientData
): Future[HandlerResult[ResponseMakeUserAdminObsolete]] = {
authorized(clientData) { implicit client โ
withGroupOutPeer(groupPeer) {
withUserOutPeer(userPeer) {
for {
(members, SeqStateDate(seq, state, _)) โ groupExt.makeUserAdmin(groupPeer.groupId, client.userId, client.authId, userPeer.userId)
} yield Ok(ResponseMakeUserAdminObsolete(members, seq, state.toByteArray))
}
}
}
}
override def onFailure: PartialFunction[Throwable, RpcError] = recoverCommon orElse {
case GroupErrors.NotAMember โ CommonRpcErrors.forbidden("Not a group member!")
case GroupErrors.NotAdmin โ CommonRpcErrors.forbidden("Only group admin can perform this action.")
case GroupErrors.NotOwner โ CommonRpcErrors.forbidden("Only group owner can perform this action.")
case GroupErrors.UserAlreadyAdmin โ GroupRpcErrors.UserAlreadyAdmin
case GroupErrors.UserAlreadyNotAdmin โ GroupRpcErrors.UserAlreadyNotAdmin
case GroupErrors.InvalidTitle โ GroupRpcErrors.InvalidTitle
case GroupErrors.AboutTooLong โ GroupRpcErrors.AboutTooLong
case GroupErrors.TopicTooLong โ GroupRpcErrors.TopicTooLong
case GroupErrors.BlockedByUser โ GroupRpcErrors.BlockedByUser
case FileErrors.LocationInvalid โ FileRpcErrors.LocationInvalid
case GroupErrors.UserAlreadyInvited โ GroupRpcErrors.AlreadyInvited
case GroupErrors.UserAlreadyJoined โ GroupRpcErrors.AlreadyJoined
case GroupErrors.GroupIdAlreadyExists(_) โ GroupRpcErrors.GroupIdAlreadyExists
case GroupErrors.InvalidShortName โ GroupRpcErrors.InvalidShortName
case GroupErrors.ShortNameTaken โ GroupRpcErrors.ShortNameTaken
case GroupErrors.NoPermission โ GroupRpcErrors.NoPermission
case GroupErrors.CantLeaveGroup โ GroupRpcErrors.CantLeaveGroup
case GroupErrors.UserIsBanned โ GroupRpcErrors.UserIsBanned
}
}
|
EaglesoftZJ/actor-platform
|
actor-server/actor-rpc-api/src/main/scala/im/actor/server/api/rpc/service/groups/GroupsServiceImpl.scala
|
Scala
|
agpl-3.0
| 26,627
|
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.batchupdater
import play.api.libs.functional.syntax._
import play.api.libs.json._
import uk.gov.hmrc.batchupdater.SingleResult._
case class BatchUpdateResult[ID](
tried: Int,
succeeded: Int,
alreadyUpdated: Int,
invalidState: Int,
notFound: List[ID],
updateFailed: List[ID],
auditFailed: List[ID]) {
def add(result: SingleResult, id: ID): BatchUpdateResult[ID] = result match {
case _: Succeeded => copy(tried = tried + 1, succeeded = succeeded + 1)
case _: AlreadyUpdated => copy(tried = tried + 1, alreadyUpdated = alreadyUpdated + 1)
case _: InvalidState => copy(tried = tried + 1, invalidState = invalidState + 1)
case _: NotFound => copy(tried = tried + 1, notFound = notFound :+ id)
case _: UpdateFailed => copy(tried = tried + 1, updateFailed = updateFailed :+ id)
case AuditFailed => copy(auditFailed = auditFailed :+ id)
}
}
object BatchUpdateResult {
def empty[ID]: BatchUpdateResult[ID] = BatchUpdateResult[ID](
tried = 0,
succeeded = 0,
alreadyUpdated = 0,
invalidState = 0,
notFound = List.empty,
updateFailed = List.empty,
auditFailed = List.empty
)
implicit def batchUpdateResultWrites[ID](implicit idFormat: Writes[ID]): Writes[BatchUpdateResult[ID]] =
(
(__ \\ "tried").write[Int] and
(__ \\ "succeeded").write[Int] and
(__ \\ "alreadyUpdated").write[Int] and
(__ \\ "invalidState").write[Int] and
(__ \\ "notFound").write[List[ID]] and
(__ \\ "updateFailed").write[List[ID]] and
(__ \\ "auditFailed").write[List[ID]]
)(unlift(BatchUpdateResult.unapply[ID]))
}
|
hmrc/batch-updater
|
src/main/scala/uk/gov/hmrc/batchupdater/BatchUpdateResult.scala
|
Scala
|
apache-2.0
| 2,260
|
package com.kodekutters
import scala.util.control.Breaks._
import play.api.libs.functional.syntax._
import play.api.libs.json.Reads._
import play.api.libs.json.Writes._
import play.api.libs.json.{JsValue, _}
import play.api.libs.json._
import scala.language.implicitConversions
import scala.language.postfixOps
// todo All this must be redone
/**
* the filters.
*
* due to type erasure when using generic Arrays this turned into this mad and ugly code.
*
*/
object FilterSupport {
sealed trait FilterType
object FilterType {
val theReads = new Reads[FilterType] {
def reads(json: JsValue): JsResult[FilterType] = {
StringArrayFilter.fmt.reads(json) |
IntArrayFilter.fmt.reads(json) |
BooleanArrayFilter.fmt.reads(json) |
DoubleArrayFilter.fmt.reads(json) |
MinMaxString.fmt.reads(json) |
MinMaxInt.fmt.reads(json) |
MinMaxDouble.fmt.reads(json) |
MinMaxArrayDouble.fmt.reads(json) |
MinMaxArrayInt.fmt.reads(json) |
MinMaxArrayString.fmt.reads(json) |
ArrayOfMinMaxInt.fmt.reads(json) |
ArrayOfMinMaxDouble.fmt.reads(json) |
ArrayOfMinMaxString.fmt.reads(json) |
ArrayOfMinMaxArrayString.fmt.reads(json) |
ArrayOfMinMaxArrayInt.fmt.reads(json) |
ArrayOfMinMaxArrayDouble.fmt.reads(json) |
ArrayFilterType.fmt.reads(json) |
NestedFilterType.fmt.reads(json)
}
}
val theWrites = new Writes[FilterType] {
def writes(msg: FilterType) = {
msg match {
case s: StringArrayFilter => StringArrayFilter.fmt.writes(s)
case s: IntArrayFilter => IntArrayFilter.fmt.writes(s)
case s: DoubleArrayFilter => DoubleArrayFilter.fmt.writes(s)
case s: BooleanArrayFilter => BooleanArrayFilter.fmt.writes(s)
case s: MinMaxString => MinMaxString.fmt.writes(s)
case s: MinMaxInt => MinMaxInt.fmt.writes(s)
case s: MinMaxDouble => MinMaxDouble.fmt.writes(s)
case s: MinMaxArrayDouble => MinMaxArrayDouble.fmt.writes(s)
case s: MinMaxArrayInt => MinMaxArrayInt.fmt.writes(s)
case s: MinMaxArrayString => MinMaxArrayString.fmt.writes(s)
case s: ArrayOfMinMaxInt => ArrayOfMinMaxInt.fmt.writes(s)
case s: ArrayOfMinMaxDouble => ArrayOfMinMaxDouble.fmt.writes(s)
case s: ArrayOfMinMaxString => ArrayOfMinMaxString.fmt.writes(s)
case s: ArrayOfMinMaxArrayString => ArrayOfMinMaxArrayString.fmt.writes(s)
case s: ArrayOfMinMaxArrayInt => ArrayOfMinMaxArrayInt.fmt.writes(s)
case s: ArrayOfMinMaxArrayDouble => ArrayOfMinMaxArrayDouble.fmt.writes(s)
case s: ArrayFilterType => ArrayFilterType.fmt.writes(s)
case s: NestedFilterType => NestedFilterType.fmt.writes(s)
case _ => JsNull
}
}
}
implicit val fmt: Format[FilterType] = Format(theReads, theWrites)
}
case class StringArrayFilter(value: Array[String]) extends FilterType
object StringArrayFilter {
implicit def fmt: Format[StringArrayFilter] = new Format[StringArrayFilter] {
def reads(json: JsValue): JsResult[StringArrayFilter] = {
json.asOpt[Array[String]] match {
case Some(x) => JsSuccess(new StringArrayFilter(x))
case None => JsError("could not read StringArrayFilter: " + json)
}
}
def writes(f: StringArrayFilter) = Json.toJson(f.value)
}
}
case class IntArrayFilter(value: Array[Int]) extends FilterType
object IntArrayFilter {
implicit def fmt: Format[IntArrayFilter] = new Format[IntArrayFilter] {
def reads(json: JsValue): JsResult[IntArrayFilter] =
json.asOpt[Array[Int]] match {
case Some(x) => JsSuccess(new IntArrayFilter(x))
case None => JsError("could not read IntArrayFilter: " + json)
}
def writes(f: IntArrayFilter) = Json.toJson(f.value)
}
}
case class DoubleArrayFilter(value: Array[Double]) extends FilterType
object DoubleArrayFilter {
implicit def fmt: Format[DoubleArrayFilter] = new Format[DoubleArrayFilter] {
def reads(json: JsValue): JsResult[DoubleArrayFilter] = {
json.asOpt[Array[Double]] match {
case Some(x) => JsSuccess(new DoubleArrayFilter(x))
case None => JsError("could not read DoubleArrayFilter: " + json)
}
}
def writes(f: DoubleArrayFilter) = Json.toJson(f.value)
}
}
case class BooleanArrayFilter(value: Array[Boolean]) extends FilterType
object BooleanArrayFilter {
implicit def fmt: Format[BooleanArrayFilter] = new Format[BooleanArrayFilter] {
def reads(json: JsValue): JsResult[BooleanArrayFilter] = {
json.asOpt[Array[Boolean]] match {
case Some(x) => JsSuccess(new BooleanArrayFilter(x))
case None => JsError("could not read BooleanArrayFilter: " + json)
}
}
def writes(f: BooleanArrayFilter) = Json.toJson(f.value)
}
}
/**
* to use in filters for testing values between the min and max values
*/
// case class MinMaxRange[T](min: T, max: T) extends FilterType
//
// object MinMaxRange {
// implicit def fmt[T](implicit fmt: Format[T]): Format[MinMaxRange[T]] = new Format[MinMaxRange[T]] {
// def reads(json: JsValue): JsResult[MinMaxRange[T]] = JsSuccess(new MinMaxRange[T]((json \ "min").as[T], (json \ "max").as[T]))
//
// def writes(r: MinMaxRange[T]) = JsObject(Seq("min" -> Json.toJson(r.min), "max" -> Json.toJson(r.max)))
// }
// }
/**
* to use in filters for testing values between the min and max values
*
* @param min the minimum lexicographical value
* @param max the maximum lexicographical value
*/
case class MinMaxString(min: String, max: String) extends FilterType {
def isInRange(test: String): Boolean = {
test.compare(min) match {
case 0 | 1 =>
test.compare(max) match {
case 0 | -1 => true
case 1 => false
}
case -1 => false
}
}
}
object MinMaxString {
implicit val fmt = Json.format[MinMaxString]
}
case class MinMaxArrayString(min: Array[String], max: Array[String]) extends FilterType
object MinMaxArrayString {
implicit val fmt = Json.format[MinMaxArrayString]
}
case class ArrayOfMinMaxArrayString(v: Array[MinMaxArrayString]) extends FilterType
object ArrayOfMinMaxArrayString {
val theReads = new Reads[ArrayOfMinMaxArrayString] {
def reads(js: JsValue): JsResult[ArrayOfMinMaxArrayString] = {
js.asOpt[Array[MinMaxArrayString]] match {
case Some(x) => JsSuccess(new ArrayOfMinMaxArrayString(x))
case None => JsError(s"Error reading ArrayOfMinMaxArrayString: $js")
}
}
}
val theWrites = new Writes[ArrayOfMinMaxArrayString] {
def writes(arr: ArrayOfMinMaxArrayString) = JsArray(for (i <- arr.v) yield Json.toJson(i))
}
implicit val fmt: Format[ArrayOfMinMaxArrayString] = Format(theReads, theWrites)
}
case class ArrayOfMinMaxString(value: Array[MinMaxString]) extends FilterType
object ArrayOfMinMaxString {
val theReads = new Reads[ArrayOfMinMaxString] {
def reads(js: JsValue): JsResult[ArrayOfMinMaxString] = {
js.asOpt[Array[MinMaxString]] match {
case Some(x) => JsSuccess(new ArrayOfMinMaxString(x))
case None => JsError(s"Error reading ArrayOfMinMaxString: $js")
}
}
}
val theWrites = new Writes[ArrayOfMinMaxString] {
def writes(arr: ArrayOfMinMaxString) = JsArray(for (i <- arr.value) yield Json.toJson(i))
}
implicit val fmt: Format[ArrayOfMinMaxString] = Format(theReads, theWrites)
}
/**
* to use in filters for testing values between the min and max values
*
* @param min the minimum value
* @param max the maximum value
*/
case class MinMaxInt(min: Int, max: Int) extends FilterType {
def isInRange(test: Int): Boolean = if (test >= min && test <= max) true else false
}
object MinMaxInt {
implicit val fmt = Json.format[MinMaxInt]
}
case class ArrayOfMinMaxInt(value: Array[MinMaxInt]) extends FilterType
object ArrayOfMinMaxInt {
val theReads = new Reads[ArrayOfMinMaxInt] {
def reads(js: JsValue): JsResult[ArrayOfMinMaxInt] = {
js.asOpt[Array[MinMaxInt]] match {
case Some(x) => JsSuccess(new ArrayOfMinMaxInt(x))
case None => JsError(s"Error reading ArrayOfMinMaxInt: $js")
}
}
}
val theWrites = new Writes[ArrayOfMinMaxInt] {
def writes(arr: ArrayOfMinMaxInt) = JsArray(for (i <- arr.value) yield Json.toJson(i))
}
implicit val fmt: Format[ArrayOfMinMaxInt] = Format(theReads, theWrites)
}
case class MinMaxArrayInt(min: Array[Int], max: Array[Int]) extends FilterType {
def isInRange(test: Int): Boolean = {
var result = false
breakable {
// assume min.length == max.length
for (i <- 0 to min.length) {
if (test >= min(i) && test <= max(i)) result = true else {
result = false
break
}
}
}
result
}
}
object MinMaxArrayInt {
implicit val fmt = Json.format[MinMaxArrayInt]
}
case class ArrayOfMinMaxArrayInt(value: Array[MinMaxArrayInt]) extends FilterType
object ArrayOfMinMaxArrayInt {
val theReads = new Reads[ArrayOfMinMaxArrayInt] {
def reads(js: JsValue): JsResult[ArrayOfMinMaxArrayInt] = {
js.asOpt[Array[MinMaxArrayInt]] match {
case Some(x) => JsSuccess(new ArrayOfMinMaxArrayInt(x))
case None => JsError(s"Error reading ArrayOfMinMaxArrayInt: $js")
}
}
}
val theWrites = new Writes[ArrayOfMinMaxArrayInt] {
def writes(arr: ArrayOfMinMaxArrayInt) = JsArray(for (i <- arr.value) yield Json.toJson(i))
}
implicit val fmt: Format[ArrayOfMinMaxArrayInt] = Format(theReads, theWrites)
}
/**
* to use in filters for testing values between the min and max values
*
* @param min the minimum array values
* @param max the maximum array values
*/
case class MinMaxDouble(min: Double, max: Double) extends FilterType {
def isInRange(test: Double): Boolean = if (test >= min && test <= max) true else false
}
object MinMaxDouble {
implicit val fmt = Json.format[MinMaxDouble]
}
case class ArrayOfMinMaxDouble(value: Array[MinMaxDouble]) extends FilterType
object ArrayOfMinMaxDouble {
val theReads = new Reads[ArrayOfMinMaxDouble] {
def reads(js: JsValue): JsResult[ArrayOfMinMaxDouble] = {
js.asOpt[Array[MinMaxDouble]] match {
case Some(x) => JsSuccess(new ArrayOfMinMaxDouble(x))
case None => JsError(s"Error reading ArrayOfMinMaxDouble: $js")
}
}
}
val theWrites = new Writes[ArrayOfMinMaxDouble] {
def writes(arr: ArrayOfMinMaxDouble) = JsArray(for (i <- arr.value) yield Json.toJson(i))
}
implicit val fmt: Format[ArrayOfMinMaxDouble] = Format(theReads, theWrites)
}
/**
* to use in filters for testing values between the min and max values
*
* @param min the minimum array values
* @param max the maximum array values
*/
case class MinMaxArrayDouble(min: Array[Double], max: Array[Double]) extends FilterType
object MinMaxArrayDouble {
implicit val fmt = Json.format[MinMaxArrayDouble]
}
case class ArrayOfMinMaxArrayDouble(v: Array[MinMaxArrayDouble]) extends FilterType
object ArrayOfMinMaxArrayDouble {
val theReads = new Reads[ArrayOfMinMaxArrayDouble] {
def reads(js: JsValue): JsResult[ArrayOfMinMaxArrayDouble] = {
js.asOpt[Array[MinMaxArrayDouble]] match {
case Some(x) => JsSuccess(new ArrayOfMinMaxArrayDouble(x))
case None => JsError(s"Error reading ArrayOfMinMaxArrayDouble: $js")
}
}
}
val theWrites = new Writes[ArrayOfMinMaxArrayDouble] {
def writes(arr: ArrayOfMinMaxArrayDouble) = JsArray(for (i <- arr.v) yield Json.toJson(i))
}
implicit val fmt: Format[ArrayOfMinMaxArrayDouble] = Format(theReads, theWrites)
}
/**
* nested filters
*
* @param value
*/
case class NestedFilterType(value: Filter) extends FilterType
object NestedFilterType {
val theReads = new Reads[NestedFilterType] {
def reads(js: JsValue): JsResult[NestedFilterType] = {
js.asOpt[Filter] match {
case Some(x) => JsSuccess(new NestedFilterType(x))
case None => JsError(s"Error reading NestedFilterType: $js")
}
}
}
val theWrites = new Writes[NestedFilterType] {
def writes(x: NestedFilterType) = Json.toJson(x)
}
implicit val fmt: Format[NestedFilterType] = Format(theReads, theWrites)
}
/**
* nested array of filters
*
* @param value
*/
case class ArrayFilterType(value: Array[Filter]) extends FilterType
object ArrayFilterType {
val theReads = new Reads[ArrayFilterType] {
def reads(js: JsValue): JsResult[ArrayFilterType] = {
js.asOpt[Array[Filter]] match {
case Some(x) => JsSuccess(new ArrayFilterType(x))
case None => JsError(s"Error reading ArrayFilterType: $js")
}
}
}
val theWrites = new Writes[ArrayFilterType] {
def writes(arr: ArrayFilterType) = JsArray(for (i <- arr.value) yield Json.toJson(i))
}
implicit val fmt: Format[ArrayFilterType] = Format(theReads, theWrites)
}
//-------------------------------------------------------------------------------------
//------------------filter expressions-------------------------------------------------
//-------------------------------------------------------------------------------------
sealed trait FilterExpType
/**
* "and" filter expression accept an array of filters,
* and passes if and only if every filter passes.
*
* @param value an Array of Filters
*/
case class FilterAnd(value: Array[Filter]) extends FilterExpType {
val key = FilterAnd.key
}
object FilterAnd {
val key = "and"
val theReads = new Reads[FilterAnd] {
def reads(js: JsValue): JsResult[FilterAnd] = {
js.asOpt[Array[Filter]] match {
case Some(filterArr) => JsSuccess(new FilterAnd(filterArr))
case None => JsError("could not read FilterAnd: " + js)
}
}
}
val theWrites = new Writes[FilterAnd] {
def writes(arr: FilterAnd) = JsArray(for (i <- arr.value) yield Json.toJson(i))
}
implicit val fmt = Format(theReads, theWrites)
}
/**
* "or" filter expression accept an array of filters,
* and passes if and only if at least one filter passes.
*
* @param value an Array of Filters
*/
case class FilterOr(value: Array[Filter]) extends FilterExpType {
val key = FilterOr.key
}
object FilterOr {
val key = "or"
val theReads = new Reads[FilterOr] {
def reads(js: JsValue): JsResult[FilterOr] = {
js.asOpt[Array[Filter]] match {
case Some(filterArr) => JsSuccess(new FilterOr(filterArr))
case None => JsError("could not read FilterOr: " + js)
}
}
}
val theWrites = new Writes[FilterOr] {
def writes(arr: FilterOr) = JsArray(for (i <- arr.value) yield Json.toJson(i))
}
implicit val fmt = Format(theReads, theWrites)
}
/**
* "or" filter expression accept an array of filters,
* and passes if and only if every filter passes.
*
* @param value an Array of Filters
*/
case class FilterNot(value: Filter) extends FilterExpType {
val key = FilterNot.key
}
object FilterNot {
val key = "not"
val theReads = new Reads[FilterNot] {
def reads(js: JsValue): JsResult[FilterNot] = {
js.asOpt[Filter] match {
case Some(filter) => JsSuccess(new FilterNot(filter))
case None => JsError("could not read FilterNot: " + js)
}
}
}
val theWrites = new Writes[FilterNot] {
def writes(arr: FilterNot) = Json.toJson[Filter](arr.value)
}
implicit val fmt = Format(theReads, theWrites)
}
}
|
workingDog/weblvcScala
|
src/main/scala/com/kodekutters/FilterSupport.scala
|
Scala
|
apache-2.0
| 16,238
|
package app.components.semanticui
import japgolly.scalajs.react
import japgolly.scalajs.react.vdom.VdomNode
import japgolly.scalajs.react.{Callback, Children}
import scala.scalajs.js
import japgolly.scalajs.react.vdom.html_<^._
object Confirm {
val component = react.JsComponent[js.Object, Children.Varargs, Null](SemanticUiComponents.Confirm)
def apply(content: Option[VdomNode] = None,
cancelButton: Option[VdomNode] = Some("Cancel"),
confirmButton: Option[VdomNode] = Some("OK"),
onCancel: Callback = Callback.empty,
onConfirm: Callback = Callback.empty,
open: js.UndefOr[Boolean] = js.undefined,
)(children: VdomNode*) = {
val props = js.Dynamic.literal(
content = content.map(_.rawNode).getOrElse(js.undefined).asInstanceOf[js.Any],
cancelButton = cancelButton.map(_.rawNode).getOrElse(js.undefined).asInstanceOf[js.Any],
confirmButton = confirmButton.map(_.rawNode).getOrElse(js.undefined).asInstanceOf[js.Any],
onCancel = onCancel.toJsCallback,
onConfirm = onConfirm.toJsCallback,
open = open,
)
component(props)(children:_*)
}
}
|
Igorocky/lesn
|
client/src/main/scala/app/components/semanticui/Confirm.scala
|
Scala
|
mit
| 1,167
|
package com.github.j5ik2o.reactive.redis.command.strings
final case class StartAndEnd(start: Int, end: Int)
|
j5ik2o/reactive-redis
|
core/src/main/scala/com/github/j5ik2o/reactive/redis/command/strings/StartAndEnd.scala
|
Scala
|
mit
| 109
|
package org.bowlerframework.view.scalate
import org.bowlerframework.controller.Controller
/**
* Created by IntelliJ IDEA.
* User: wfaler
* Date: 20/04/2011
* Time: 23:46
* To change this template use File | Settings | File Templates.
*/
trait LayoutScope{ this: Controller => {
}
//activeLayout(activeLayout)
}
|
rkpandey/Bowler
|
core/src/main/scala/org/bowlerframework/view/scalate/LayoutScope.scala
|
Scala
|
bsd-3-clause
| 324
|
package scala.meta
package internal
package prettyprinters
import scala.meta.prettyprinters._
import Show.{ sequence => s }
import scala.meta.tokens._
object TokenStructure {
def apply[T <: Token]: Structure[T] = {
Structure(x => {
implicit val dialect = x.dialect
val prefix = (x: Token) match {
case x: Token.Tab => "\\\\t"
case x: Token.CR => "\\\\r"
case x: Token.LF => "\\\\n"
case x: Token.FF => "\\\\f"
case x: Token.LFLF => "\\\\n\\\\n"
case x: Token.BOF => "BOF"
case x: Token.EOF => "EOF"
case x => x.syntax
}
s(prefix, " [", x.start.toString, "..", x.end.toString, ")")
})
}
}
|
MasseGuillaume/scalameta
|
scalameta/tokens/shared/src/main/scala/scala/meta/internal/prettyprinters/TokenStructure.scala
|
Scala
|
bsd-3-clause
| 678
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.communication.security
import javax.crypto.Mac
import javax.crypto.spec.SecretKeySpec
import org.apache.toree.communication.security.HmacAlgorithm.HmacAlgorithm
object HmacAlgorithm extends Enumeration {
type HmacAlgorithm = Value
def apply(key: String) = Value(key)
val MD5 = Value("HmacMD5")
val SHA1 = Value("HmacSHA1")
val SHA256 = Value("HmacSHA256")
}
object Hmac {
def apply(key: String, algorithm: HmacAlgorithm = HmacAlgorithm.SHA256) =
new Hmac(key, algorithm)
def newMD5(key: String): Hmac = this(key, HmacAlgorithm.MD5)
def newSHA1(key: String): Hmac = this(key, HmacAlgorithm.SHA1)
def newSHA256(key: String): Hmac = this(key, HmacAlgorithm.SHA256)
}
class Hmac(
val key: String,
val algorithm: HmacAlgorithm = HmacAlgorithm.SHA256
) {
private var mac: Mac = _
private var secretKeySpec: SecretKeySpec = _
if (key.nonEmpty) {
mac = Mac.getInstance(algorithm.toString)
secretKeySpec = new SecretKeySpec(key.getBytes, algorithm.toString)
mac.init(secretKeySpec)
}
def apply(items: String*): String = digest(items)
def digest(items: Seq[String]): String = if (key.nonEmpty) {
mac synchronized {
items.map(_.getBytes("UTF-8")).foreach(mac.update)
mac.doFinal().map("%02x" format _).mkString
}
} else ""
}
|
chipsenkbeil/incubator-toree
|
communication/src/main/scala/org/apache/toree/communication/security/Hmac.scala
|
Scala
|
apache-2.0
| 2,158
|
/*
* Copyright 2018 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package unit.controllers
import com.kenshoo.play.metrics.Metrics
import config.WSHttp
import connectors.HodsApiConnector
import controllers._
import model.RcsDataSet
import org.joda.time.LocalDate
import org.scalatest.TestData
import org.scalatest.concurrent.{IntegrationPatience, ScalaFutures}
import org.scalatest.mockito.MockitoSugar
import org.scalatestplus.play.OneAppPerTest
import play.api.Application
import play.api.inject.guice.GuiceApplicationBuilder
import play.api.test.FakeHeaders
import uk.gov.hmrc.http.{HeaderCarrier, HttpResponse}
import uk.gov.hmrc.play.HeaderCarrierConverter
import uk.gov.hmrc.play.config.inject.ServicesConfig
import uk.gov.hmrc.play.test.UnitSpec
import unit.{SftpConfiguration, WorldpayMerchantConfiguration}
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
class EmisReportServiceSpec extends UnitSpec with ScalaFutures with SftpConfiguration with WorldpayMerchantConfiguration with IntegrationPatience with OneAppPerTest with MockitoSugar {
val allMandatoryConfig = MandatorySftpConfig ++ MandatoryMerchantConfig
override def newAppForTest(testData: TestData): Application = GuiceApplicationBuilder().configure(allMandatoryConfig).build()
def emisReportService = app.injector.instanceOf(classOf[EmisReportService])
"The EMISReportService" should {
implicit def hc = HeaderCarrierConverter.fromHeadersAndSession(FakeHeaders())
"allow a re-submission if the existing datasets are not released or unknown status" in {
val connector = hodsApiConnector(Seq(RcsDataSet("1", "Deleted"), RcsDataSet("2", "Open")))
emisReportService.canSubmitDataset(connector)(LocalDate.now()).futureValue shouldBe AllowSubmission
}
"prevent a re-submission if there is an existing dataset already released" in {
Seq("Released", "Sending", "Sent", "Failed Recon", "Tech Failure") foreach { preventStatus =>
val connector = hodsApiConnector(Seq(RcsDataSet("1", preventStatus), RcsDataSet("2", "Open")))
emisReportService.canSubmitDataset(connector)(LocalDate.now()).futureValue shouldBe PreventSubmission
}
}
"prevent a re-submission if any of the status is unknown" in {
val connector = hodsApiConnector(Seq(RcsDataSet("1", "Surprise"), RcsDataSet("2", "Open")))
emisReportService.canSubmitDataset(connector)(LocalDate.now()).futureValue shouldBe UnknownStatus(RcsDataSet("1", "Surprise"))
}
}
def hodsApiConnector(datasetResults:Seq[RcsDataSet]): HodsApiConnector = new HodsApiConnector(mock[WSHttp], mock[Metrics], mock[ServicesConfig]) {
override val rcsPaymentsUrl: String = ""
override def listDataSets(fromDate: LocalDate, toDate: LocalDate, hc: HeaderCarrier): Future[Seq[RcsDataSet]] = {
Future.successful(datasetResults)
}
override def deleteDataset(datasetId: String, hc: HeaderCarrier): Future[HttpResponse] = {
Future.successful(HttpResponse(200))
}
}
}
|
hmrc/worldpay-downloader
|
test/unit/controllers/EmisReportServiceSpec.scala
|
Scala
|
apache-2.0
| 3,573
|
/*
* Copyright (C) 2017 Radicalbit
*
* This file is part of flink-JPMML
*
* flink-JPMML is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* flink-JPMML is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with flink-JPMML. If not, see <http://www.gnu.org/licenses/>.
*/
package io.radicalbit.flink.pmml.scala.api
package object exceptions {
/** Models conformity failure between PMML model and input [[org.apache.flink.streaming.api.scala.DataStream]]
*
*/
private[scala] class InputValidationException(msg: String) extends RuntimeException(msg)
/** Models [[org.jpmml.evaluator.EvaluatorUtil.prepare()]] method failure
*
*/
private[scala] class InputPreparationException(msg: String) extends RuntimeException(msg)
/** Models empty result from [[org.jpmml.evaluator.ModelEvaluator]] evaluation
*
*/
private[scala] class JPMMLExtractionException(msg: String) extends RuntimeException(msg)
/** Models failure on loading PMML model from distributed system
*
*/
private[scala] class ModelLoadingException(msg: String, throwable: Throwable)
extends RuntimeException(msg, throwable)
/** Prediction failure due to [[io.radicalbit.flink.pmml.scala.api.EmptyEvaluator]]
*
*/
private[scala] class EmptyEvaluatorException(msg: String) extends NoSuchElementException(msg)
/** Parsing of ModelId has failed
*
*/
private[scala] class WrongModelIdFormat(msg: String) extends ArrayIndexOutOfBoundsException(msg)
}
|
maocorte/flink-jpmml
|
flink-jpmml-scala/src/main/scala/io/radicalbit/flink/pmml/scala/api/exceptions/package.scala
|
Scala
|
agpl-3.0
| 1,962
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.evaluation
import org.apache.spark.annotation.{Experimental, Since}
import org.apache.spark.ml.linalg.{Vector, VectorUDT}
import org.apache.spark.ml.param._
import org.apache.spark.ml.param.shared._
import org.apache.spark.ml.util.{DefaultParamsReadable, DefaultParamsWritable, Identifiable, SchemaUtils}
import org.apache.spark.mllib.evaluation.BinaryClassificationMetrics
import org.apache.spark.sql.{Dataset, Row}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.DoubleType
/**
* :: Experimental ::
* Evaluator for binary classification, which expects two input columns: rawPrediction and label.
* The rawPrediction column can be of type double (binary 0/1 prediction, or probability of label 1)
* or of type vector (length-2 vector of raw predictions, scores, or label probabilities).
*/
@Since("1.2.0")
@Experimental
class BinaryClassificationEvaluator @Since("1.4.0") (@Since("1.4.0") override val uid: String)
extends Evaluator with HasRawPredictionCol with HasLabelCol with DefaultParamsWritable {
@Since("1.2.0")
def this() = this(Identifiable.randomUID("binEval"))
/**
* param for metric name in evaluation (supports `"areaUnderROC"` (default), `"areaUnderPR"`)
* @group param
*/
@Since("1.2.0")
val metricName: Param[String] = {
val allowedParams = ParamValidators.inArray(Array("areaUnderROC", "areaUnderPR"))
new Param(
this, "metricName", "metric name in evaluation (areaUnderROC|areaUnderPR)", allowedParams)
}
/** @group getParam */
@Since("1.2.0")
def getMetricName: String = $(metricName)
/** @group setParam */
@Since("1.2.0")
def setMetricName(value: String): this.type = set(metricName, value)
/** @group setParam */
@Since("1.5.0")
def setRawPredictionCol(value: String): this.type = set(rawPredictionCol, value)
/** @group setParam */
@Since("1.2.0")
def setLabelCol(value: String): this.type = set(labelCol, value)
setDefault(metricName -> "areaUnderROC")
@Since("2.0.0")
override def evaluate(dataset: Dataset[_]): Double = {
val schema = dataset.schema
SchemaUtils.checkColumnTypes(schema, $(rawPredictionCol), Seq(DoubleType, new VectorUDT))
SchemaUtils.checkNumericType(schema, $(labelCol))
// TODO: When dataset metadata has been implemented, check rawPredictionCol vector length = 2.
val scoreAndLabels =
dataset.select(col($(rawPredictionCol)), col($(labelCol)).cast(DoubleType)).rdd.map {
case Row(rawPrediction: Vector, label: Double) => (rawPrediction(1), label)
case Row(rawPrediction: Double, label: Double) => (rawPrediction, label)
}
val metrics = new BinaryClassificationMetrics(scoreAndLabels)
val metric = $(metricName) match {
case "areaUnderROC" => metrics.areaUnderROC()
case "areaUnderPR" => metrics.areaUnderPR()
}
metrics.unpersist()
metric
}
@Since("1.5.0")
override def isLargerBetter: Boolean = $(metricName) match {
case "areaUnderROC" => true
case "areaUnderPR" => true
}
@Since("1.4.1")
override def copy(extra: ParamMap): BinaryClassificationEvaluator = defaultCopy(extra)
}
@Since("1.6.0")
object BinaryClassificationEvaluator extends DefaultParamsReadable[BinaryClassificationEvaluator] {
@Since("1.6.0")
override def load(path: String): BinaryClassificationEvaluator = super.load(path)
}
|
mike0sv/spark
|
mllib/src/main/scala/org/apache/spark/ml/evaluation/BinaryClassificationEvaluator.scala
|
Scala
|
apache-2.0
| 4,191
|
//
// Hamming.scala -- Scala benchmark Hamming sequence
// Project OrcTests
//
// Copyright (c) 2018 The University of Texas at Austin. All rights reserved.
//
// Use and redistribution of this file is governed by the license terms in
// the LICENSE file found in the project's top-level directory and also found at
// URL: http://orc.csres.utexas.edu/license.shtml .
//
package orc.test.item.scalabenchmarks
import scala.collection.mutable.Buffer
import scala.concurrent.Channel
object Hamming extends BenchmarkApplication[Unit, List[Int]] with ExpectedBenchmarkResult[List[Int]] {
import Util._
type I = Int
val I = Int
type Putter[A] = (A) => Unit
type Getter[A] = () => A
val threads = Buffer[ControllableThread[Any]]()
implicit class ChannelOps[A](val chan: Channel[A]) extends AnyVal {
def put: Putter[A] = (x) => {
//println(s"Write to channel: $x -> $chan")
chan.write(x)
}
def get: Getter[A] = () => {
//println(s"Reading from channel: $chan")
chan.read
}
}
def Fanout[A](c: Getter[A], cs: List[Putter[A]]) = thread {
while (true) {
val x = c()
cs.foreach(_(x))
}
}
def Fanin[A](cs: List[Getter[A]], c: Putter[A]) = {
cs.map(c1 => thread {
while (true) {
c(c1())
}
})
}
def Trans[A, B](f: (A) => B, in: Getter[A], out: Putter[B]) = thread {
while (true) {
out(f(in()))
}
}
def UniqueMerge(is: List[Getter[I]], o: Putter[I]) = {
val n = is.size
val tops = Array.fill[Option[I]](is.size)(None)
def fillTops() = {
(0 until n).map(i => {
if (tops(i) == None)
tops(i) = Some(is(i)())
})
}
def getMin(): I = {
fillTops()
val (mi, mv, _) = tops.foldLeft((-1, None: Option[I], 0)) { (acc, v) =>
val (mi, mv, i) = acc
if (mv.isDefined && v.get >= mv.get)
(mi, mv, i + 1)
else
(i, v, i + 1)
}
(0 until n).map(i => {
if (tops(i) == mv)
tops(i) = None
})
mv.get
}
thread {
while (true) {
o(getMin())
}
}
}
def getN[A](n: Int, chan: Channel[A]): List[A] = {
(for (_ <- 0 until n) yield chan.read).toList
}
def makeChannels[A](n: Int) = (0 until n).map(i => new Channel[A]()).toList
val N = BenchmarkConfig.problemSizeScaledInt(400)
def benchmark(ctx: Unit): List[I] = {
val chans @ List(out, out1, x2, x3, x5, x21, x31, x51) = makeChannels[I](8)
threads += Fanout(out.get, List(x2.put, x3.put, x5.put, out1.put))
threads += UniqueMerge(List(x21.get, x31.get, x51.get), out.put)
threads += Trans[I, I](_ * 2, x2.get, x21.put)
threads += Trans[I, I](_ * 3, x3.get, x31.put)
threads += Trans[I, I](_ * 5, x5.get, x51.put)
out.put(1)
val r = getN(N, out1)
threads.foreach(_.terminate())
r
}
val name: String = "Hamming"
def setup(): Unit = ()
val size: Int = N
val expectedMap: Map[Int, Int] = Map(
1 -> 0x978c0600,
10 -> 0xe2d103,
100 -> 0x34c9642,
)
}
|
orc-lang/orc
|
OrcTests/src/orc/test/item/scalabenchmarks/Hamming.scala
|
Scala
|
bsd-3-clause
| 3,082
|
package org.jetbrains.plugins.scala.lang.refactoring.introduceVariable
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.module.Module
import com.intellij.openapi.project.Project
import com.intellij.openapi.util.text.StringUtil
import com.intellij.psi._
import com.intellij.psi.search.{GlobalSearchScope, GlobalSearchScopesCore, PsiSearchHelper}
import com.intellij.psi.util.PsiTreeUtil
import com.intellij.util.Processor
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScTypeElement
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScTypeParametersOwner
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.packaging.ScPackaging
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates.{ScExtendsBlock, ScTemplateBody}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef._
import org.jetbrains.plugins.scala.lang.psi.impl.ScPackageImpl
import org.jetbrains.plugins.scala.lang.psi.types.api.designator.ScProjectionType
import org.jetbrains.plugins.scala.lang.refactoring.namesSuggester.NameSuggester
import org.jetbrains.plugins.scala.lang.refactoring.util._
import org.jetbrains.plugins.scala.worksheet.actions.RunWorksheetAction
import scala.annotation.tailrec
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
/**
* Created by Kate Ustyuzhanina
* on 8/12/15
*/
object ScopeSuggester {
def suggestScopes(conflictsReporter: ConflictsReporter,
project: Project,
editor: Editor,
file: PsiFile,
currentElement: ScTypeElement): Array[ScopeItem] = {
def getParent(element: PsiElement, isScriptFile: Boolean): PsiElement = {
if (isScriptFile)
PsiTreeUtil.getParentOfType(element, classOf[ScTemplateBody], classOf[ScalaFile])
else
PsiTreeUtil.getParentOfType(element, classOf[ScTemplateBody])
}
def isSuitableParent(owners: Seq[ScTypeParametersOwner], parent: PsiElement): Boolean = {
var result = true
for (elementOwner <- owners) {
val pparent = PsiTreeUtil.getParentOfType(parent, classOf[ScTemplateDefinition])
if (pparent != null && (!elementOwner.isAncestorOf(pparent) || !elementOwner.isInstanceOf[ScTemplateDefinition])) {
result = false
}
}
result
}
val isScriptFile = currentElement.getContainingFile.asInstanceOf[ScalaFile].isScriptFile()
val owners = ScalaRefactoringUtil.getTypeParameterOwnerList(currentElement) ++ ScalaRefactoringUtil.getTypeAliasOwnersList(currentElement)
var parent = getParent(currentElement, isScriptFile)
//forbid to work with no template definition level
var noContinue = owners.exists(!_.isInstanceOf[ScTemplateDefinition])
val result: ArrayBuffer[ScopeItem] = new ArrayBuffer[ScopeItem]()
while (parent != null && !noContinue) {
var occInCompanionObj: Array[ScTypeElement] = Array[ScTypeElement]()
val name = parent match {
case fileType: ScalaFile => "file " + fileType.getName
case _ =>
PsiTreeUtil.getParentOfType(parent, classOf[ScTemplateDefinition]) match {
case classType: ScClass =>
"class " + classType.name
case objectType: ScObject =>
occInCompanionObj = getOccurrencesFromCompanionObject(currentElement, objectType)
"object " + objectType.name
case traitType: ScTrait =>
"trait " + traitType.name
}
}
//parent != null here
//check can we use upper scope
noContinue = currentElement.calcType match {
case projectionType: ScProjectionType =>
//we can't use typeAlias outside scope where it was defined
parent.asInstanceOf[ScTemplateBody].isAncestorOf(projectionType.actualElement)
case _ => false
}
if (!isSuitableParent(owners, parent)) {
noContinue = true
}
val occurrences = ScalaRefactoringUtil.getTypeElementOccurrences(currentElement, parent)
val validator = ScalaTypeValidator(conflictsReporter, project, currentElement, parent, occurrences.isEmpty)
val possibleNames = NameSuggester.suggestNamesByType(currentElement.calcType)
.map(validator.validateName(_, increaseNumber = true))
result += SimpleScopeItem(name, parent, occurrences, occInCompanionObj, validator, possibleNames.toArray)
parent = getParent(parent, isScriptFile)
}
val scPackage = PsiTreeUtil.getParentOfType(currentElement, classOf[ScPackaging])
//forbid to use typeParameter type outside the class
if ((scPackage != null) && owners.isEmpty && !noContinue) {
val allPackages = getAllAvailablePackages(scPackage.fullPackageName, currentElement)
for ((resultPackage, resultDirectory) <- allPackages) {
result += PackageScopeItem(resultPackage.getQualifiedName, resultDirectory, needDirectoryCreating = false, Array(NameSuggester.suggestNamesByType(currentElement.calcType).apply(0).capitalize))
}
}
result.toArray
}
private def getOccurrencesFromCompanionObject(typeElement: ScTypeElement,
objectType: ScObject): Array[ScTypeElement] = {
val parent: PsiElement = objectType.getParent
val name = objectType.name
val companion = parent.getChildren.find({
case classType: ScClass if classType.name == name =>
true
case traitType: ScTrait if traitType.name == name =>
true
case _ => false
})
if (companion.isDefined)
ScalaRefactoringUtil.getTypeElementOccurrences(typeElement, companion.get)
else
Array[ScTypeElement]()
}
//return Array of (package, containing directory)
protected def getAllAvailablePackages(packageName: String, typeElement: ScTypeElement): Array[(PsiPackage, PsiDirectory)] = {
def getDirectoriesContainigfile(file: PsiFile): Array[PsiDirectory] = {
val result: ArrayBuffer[PsiDirectory] = new ArrayBuffer[PsiDirectory]()
var parent = file.getContainingDirectory
while (parent != null) {
result += parent
parent = parent.getParentDirectory
}
result.toArray
}
@tailrec
def getDirectoriesContainigFileAndPackage(currentPackage: PsiPackage,
module: Module,
result: ArrayBuffer[(PsiPackage, PsiDirectory)],
dirContainingFile: Array[PsiDirectory]): ArrayBuffer[(PsiPackage, PsiDirectory)] = {
if (currentPackage != null && currentPackage.getName != null) {
val subPackages = currentPackage.getSubPackages(GlobalSearchScope.moduleScope(module))
val filesNoRecursive = currentPackage.getFiles(GlobalSearchScope.moduleScope(module))
// don't choose package if ther is only one subpackage
if ((subPackages.length != 1) || filesNoRecursive.nonEmpty) {
val packageDirectories = currentPackage.getDirectories(GlobalSearchScope.moduleScope(module))
val containingDirectory = packageDirectories.intersect(dirContainingFile)
val resultDirectory: PsiDirectory = if (containingDirectory.length > 0) {
containingDirectory.apply(0)
} else {
typeElement.getContainingFile.getContainingDirectory
}
result += ((currentPackage, resultDirectory))
}
getDirectoriesContainigFileAndPackage(currentPackage.getParentPackage, module, result, dirContainingFile)
} else {
result
}
}
val currentPackage = ScPackageImpl.findPackage(typeElement.getProject, packageName).asInstanceOf[PsiPackage]
val directoriesContainingFile = getDirectoriesContainigfile(typeElement.getContainingFile)
val module = RunWorksheetAction.getModuleFor(typeElement.getContainingFile)
val result: ArrayBuffer[(PsiPackage, PsiDirectory)] = new ArrayBuffer[(PsiPackage, PsiDirectory)]()
getDirectoriesContainigFileAndPackage(currentPackage, module, result, directoriesContainingFile)
result.toArray
}
def handleOnePackage(typeElement: ScTypeElement, inPackageName: String, containinDirectory: PsiDirectory,
conflictsReporter: ConflictsReporter, project: Project, editor: Editor, isReplaceAll: Boolean, inputName: String): PackageScopeItem = {
def getFilesToSearchIn(currentDirectory: PsiDirectory): Array[ScalaFile] = {
if (!isReplaceAll) {
Array(typeElement.getContainingFile.asInstanceOf[ScalaFile])
} else {
def oneRound(word: String, bufResult: ArrayBuffer[ArrayBuffer[ScalaFile]]) = {
val buffer = new ArrayBuffer[ScalaFile]()
val processor = new Processor[PsiFile] {
override def process(file: PsiFile): Boolean = {
file match {
case scalaFile: ScalaFile =>
buffer += scalaFile
}
true
}
}
val helper: PsiSearchHelper = PsiSearchHelper.SERVICE.getInstance(typeElement.getProject)
helper.processAllFilesWithWord(word, GlobalSearchScopesCore.directoryScope(currentDirectory, true), processor, true)
bufResult += buffer
}
val typeName = typeElement.calcType.presentableText
val words = StringUtil.getWordsIn(typeName).asScala.toArray
val resultBuffer = new ArrayBuffer[ArrayBuffer[ScalaFile]]()
words.foreach(oneRound(_, resultBuffer))
var intersectionResult = resultBuffer(0)
def intersect(inBuffer: ArrayBuffer[ScalaFile]) = {
intersectionResult = intersectionResult.intersect(inBuffer)
}
resultBuffer.foreach((element: ArrayBuffer[ScalaFile]) => intersect(element))
intersectionResult.toList.reverse.toArray
}
}
val inPackage = ScPackageImpl.findPackage(typeElement.getProject, inPackageName)
val projectSearchScope = GlobalSearchScope.projectScope(typeElement.getProject)
val packageObject = inPackage.findPackageObject(projectSearchScope)
val fileEncloser = if (packageObject.isDefined)
PsiTreeUtil.getChildOfType(PsiTreeUtil.getChildOfType(packageObject.get, classOf[ScExtendsBlock]), classOf[ScTemplateBody])
else
containinDirectory
val allOcurrences: mutable.MutableList[Array[ScTypeElement]] = mutable.MutableList()
val allValidators: mutable.MutableList[ScalaTypeValidator] = mutable.MutableList()
def handleOneFile(file: ScalaFile) {
if (packageObject.exists((x: ScTypeDefinition) => x.getContainingFile == file)) {
} else {
val occurrences = ScalaRefactoringUtil.getTypeElementOccurrences(typeElement, file)
allOcurrences += occurrences
val parent = file match {
case scalaFile: ScalaFile if scalaFile.isScriptFile() =>
file
case _ => PsiTreeUtil.findChildOfType(file, classOf[ScTemplateBody])
}
if (parent != null) {
allValidators += ScalaTypeValidator(conflictsReporter, project, typeElement, parent, occurrences.isEmpty)
}
}
}
val collectedFiles = getFilesToSearchIn(containinDirectory)
val needNewDir = inPackage.getDirectories.isEmpty
// if we have no files in package, then we work with package in file
if (needNewDir) {
val classes = inPackage.getClasses
for (clazz <- classes) {
val occurrences = ScalaRefactoringUtil.getTypeElementOccurrences(typeElement, clazz)
allOcurrences += occurrences
val parent = PsiTreeUtil.findChildOfType(clazz, classOf[ScTemplateBody])
allValidators += ScalaTypeValidator(conflictsReporter, project, typeElement, parent, occurrences.isEmpty)
}
} else {
collectedFiles.foreach(handleOneFile)
}
val occurrences = allOcurrences.foldLeft(Array[ScTypeElement]())((a, b) => a ++ b)
val validator = ScalaCompositeTypeValidator(allValidators.toList, conflictsReporter, project, typeElement,
occurrences.isEmpty, containinDirectory, containinDirectory)
val suggested = inputName
val possibleNames = Array(validator.validateName(suggested, increaseNumber = true))
val result = PackageScopeItem(inPackage.getName, fileEncloser, needNewDir, possibleNames.toArray)
result.occurrences = occurrences
result.validator = validator
result
}
}
abstract class ScopeItem(name: String, availableNames: Array[String]) {
def getName = name
def getAvailableNames = availableNames
override def toString: String = name
}
case class SimpleScopeItem(name: String,
fileEncloser: PsiElement,
usualOccurrences: Array[ScTypeElement],
occurrencesInCompanion: Array[ScTypeElement],
typeValidator: ScalaTypeValidator,
availableNames: Array[String]) extends ScopeItem(name, availableNames) {
var occurrencesFromInheretors: Array[ScTypeElement] = Array[ScTypeElement]()
val usualOccurrencesRanges = usualOccurrences.map((x: ScTypeElement) => (x.getTextRange, x.getContainingFile))
val fileEncloserRange = (fileEncloser.getTextRange, fileEncloser.getContainingFile)
def setInheretedOccurrences(occurrences: Array[ScTypeElement]) = {
if (occurrences != null) {
occurrencesFromInheretors = occurrences
}
}
def revalidate(newName: String): ScopeItem = {
val revalidatedOccurrences = usualOccurrencesRanges.map {
case (range, containigFile) =>
PsiTreeUtil.findElementOfClassAtRange(containigFile, range.getStartOffset, range.getEndOffset, classOf[ScTypeElement])
}
val newNames = if ((newName == "") || availableNames.contains(newName)) {
availableNames
} else {
newName +: availableNames
}
val updatedFileEncloser = fileEncloserRange match {
case (range, containingFile) =>
PsiTreeUtil.findElementOfClassAtRange(containingFile, range.getStartOffset, range.getEndOffset, classOf[PsiElement])
}
val updatedValidator = new ScalaTypeValidator(typeValidator.conflictsReporter, typeValidator.myProject,
typeValidator.selectedElement, typeValidator.noOccurrences, updatedFileEncloser, updatedFileEncloser)
new SimpleScopeItem(name, updatedFileEncloser,
revalidatedOccurrences, occurrencesInCompanion, updatedValidator, newNames)
}
def isTrait: Boolean = {
name.startsWith("trait")
}
def isClass: Boolean = {
name.startsWith("class")
}
def isObject: Boolean = {
name.startsWith("object")
}
}
case class PackageScopeItem(name: String,
fileEncloser: PsiElement,
needDirectoryCreating: Boolean,
availableNames: Array[String]) extends ScopeItem(name, availableNames) {
var occurrences = Array[ScTypeElement]()
var validator: ScalaCompositeTypeValidator = null
override def toString = "package " + name
}
|
whorbowicz/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/ScopeSuggester.scala
|
Scala
|
apache-2.0
| 15,140
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package config
import org.scalatestplus.play.PlaySpec
import org.scalatestplus.play.guice.GuiceOneServerPerSuite
class ApplicationConfigSpec extends PlaySpec with GuiceOneServerPerSuite{
implicit val ac=app.injector.instanceOf[ApplicationConfig]
"Application Config" must {
"load errors properties file" in {
(ac.globalErrors.getString("56010.reason")) must be("gmp.error.reason.56010")
}
}
}
|
hmrc/gmp-frontend
|
test/config/ApplicationConfigSpec.scala
|
Scala
|
apache-2.0
| 1,026
|
package com.stackmob.customcode.dev
package test
package server
package sdk
package data
package dataservice
import org.specs2.Specification
import org.specs2.mock.Mockito
import scala.util.Try
import com.stackmob.customcode.dev.server.sdk.data.smValue
import com.stackmob.core.DatastoreException
private[dataservice] trait DeleteObject extends BaseTestGroup { this: Specification with CustomMatchers with Mockito =>
protected case class DeleteObject() extends BaseTestContext {
private val (_, _, datastore, svc) = defaults
def worksOnSchema = {
val objId = "a"
val ret: Boolean = svc.deleteObject(schemaName, smValue(objId))
val correctReturn = ret must beTrue
val correctSchema = datastore.deleteCalls.get(0).schema must beEqualTo(s"$schemaName/$objId")
correctReturn and correctSchema
}
def throwIfNoObjectIDSMString = {
Try(svc.deleteObject(schemaName, smValue(1))).toEither must beThrowableInstance[DatastoreException]
}
}
}
|
matthewfarwell/stackmob-customcode-dev
|
src/test/scala/com/stackmob/customcode/dev/test/server/sdk/data/dataservice/DeleteObject.scala
|
Scala
|
apache-2.0
| 996
|
package gsn.config
import org.scalatest._
class VsConfigTest extends FunSpec with Matchers {
describe("gps vs config"){
val vs=VsConf.load("src/test/resources/conf/vs/gps.xml")
it("should read params"){
vs.name shouldBe "GPSVS"
vs.description should startWith ("Virtual sensor producing random")
vs.address.size shouldBe 1
vs.address("type") shouldBe "test-sensor"
vs.poolSize shouldBe Some(10)
vs.priority shouldBe 100
vs.storageSize shouldBe Some("1m")
vs.storage shouldBe None
vs.processing.className shouldBe "gsn.vsensor.BridgeVirtualSensor"
vs.processing.uniqueTimestamp shouldBe true
vs.processing.initParams.size shouldBe 0
val coms=vs.processing.webInput.get.commands
coms.size shouldBe 2
coms(0).name shouldBe "ploppy"
coms(0).params(1).name shouldBe "plop2"
coms(0).params(1).dataType shouldBe "*checkbox:apple|orange|banana"
coms(0).params(1).description shouldBe "two"
vs.processing .output(1).name shouldBe "longitude"
val str = vs.streams(0)
vs.streams.size shouldBe 1
str.name shouldBe "sensor1"
str.query shouldBe "select * from source1"
str.sources(0).alias shouldBe "source1"
str.sources(0).wrappers(0).wrapper shouldBe "gps-test"
str.sources(0).wrappers(0).params("rate") shouldBe "1000"
}
}
}
|
EliasFarhan/gsn
|
gsn-tools/src/test/scala/gsn/config/VsConfigTest.scala
|
Scala
|
gpl-3.0
| 1,409
|
package com.singlestore.spark
import java.sql.SQLTransientConnectionException
import com.github.mrpowers.spark.daria.sql.SparkSessionExt._
import org.apache.spark.sql.SaveMode
import org.apache.spark.sql.types.{DecimalType, IntegerType, StringType}
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
import scala.util.Try
class MaxErrorsTest extends IntegrationSuiteBase with BeforeAndAfterEach with BeforeAndAfterAll {
def testMaxErrors(tableName: String, maxErrors: Int, duplicateItems: Int): Unit = {
val df = spark.createDF(
List.fill(duplicateItems + 1)((1, "Alice", 213: BigDecimal)),
List(("id", IntegerType, true),
("name", StringType, false),
("age", DecimalType(10, 0), true))
)
val result = Try {
df.write
.format(DefaultSource.SINGLESTORE_SOURCE_NAME_SHORT)
.option("tableKey.primary", "name")
.option("maxErrors", maxErrors)
.mode(SaveMode.Ignore)
.save(s"testdb.$tableName")
}
if (duplicateItems > maxErrors) {
assert(result.isFailure)
result.failed.get.getCause match {
case _: SQLTransientConnectionException =>
case _ => fail("SQLTransientConnectionException should be thrown")
}
} else {
assert(result.isSuccess)
}
}
describe("small dataset") {
it("hit maxErrors") {
testMaxErrors("hitMaxErrorsSmall", 1, 2)
}
it("not hit maxErrors") {
testMaxErrors("notHitMaxErrorsSmall", 1, 1)
}
}
describe("big dataset") {
it("hit maxErrors") {
testMaxErrors("hitMaxErrorsBig", 10000, 10001)
}
it("not hit maxErrors") {
testMaxErrors("notHitMaxErrorsBig", 10000, 10000)
}
}
it("wrong configuration") {
val df = spark.createDF(
List((1, "Alice", 213: BigDecimal)),
List(("id", IntegerType, true),
("name", StringType, false),
("age", DecimalType(10, 0), true))
)
val result = Try {
df.write
.format(DefaultSource.SINGLESTORE_SOURCE_NAME_SHORT)
.option("onDuplicateKeySQL", "id=id")
.option("maxErrors", 1)
.mode(SaveMode.Ignore)
.save(s"testdb.someTable")
}
assert(result.isFailure)
result.failed.get match {
case ex: IllegalArgumentException
if ex.getMessage.equals("can't use both `onDuplicateKeySQL` and `maxErrors` options") =>
succeed
case _ => fail()
}
}
}
|
memsql/memsql-spark-connector
|
src/test/scala/com/singlestore/spark/MaxErrorsTest.scala
|
Scala
|
apache-2.0
| 2,478
|
/* - Coeus web framework -------------------------
*
* Licensed under the Apache License, Version 2.0.
*
* Author: Spiros Tzavellas
*/
package com.tzavellas.coeus.validation.vspec.constraint
import com.tzavellas.coeus.bind.Error
import java.util.Calendar
trait CalendarConstraints {
def isBefore(cal: Calendar) = new Constraint[Calendar] {
def isValid(value: Calendar) = value == null || value.before(cal)
def getError(targetClass: Class[_], field: String, value: Calendar)=
Error.validationFailure("calendar.before", field, targetClass, value, cal)
}
def isAfter(cal: Calendar) = new Constraint[Calendar] {
def isValid(value: Calendar) = value == null || value.after(cal)
def getError(targetClass: Class[_], field: String, value: Calendar) =
Error.validationFailure("calendar.after", field, targetClass, value, cal)
}
def isCalInThePast = new Constraint[Calendar] {
def isValid(value: Calendar) =
value == null || value.before(Calendar.getInstance)
def getError(targetClass: Class[_], field: String, value: Calendar) =
Error.validationFailure("calendar.past", field, targetClass, value)
}
def isCalInTheFuture = new Constraint[Calendar] {
def isValid(value: Calendar) =
value == null || value.after(Calendar.getInstance)
def getError(targetClass: Class[_], field: String, value: Calendar) =
Error.validationFailure("calendar.future", field, targetClass, value)
}
}
|
sptz45/coeus
|
src/main/scala/com/tzavellas/coeus/validation/vspec/constraint/CalendarConstraints.scala
|
Scala
|
apache-2.0
| 1,513
|
package com.twitter.finatra.http.integration.tweetexample.main.controllers
import com.twitter.concurrent.exp.AsyncStream
import com.twitter.finagle.http.{Request, Response}
import com.twitter.finatra.http.Controller
import com.twitter.finatra.http.integration.tweetexample.main.domain.Tweet
import com.twitter.finatra.http.integration.tweetexample.main.services.TweetsRepository
import com.twitter.finatra.http.response.StreamingResponse
import com.twitter.io.Buf
import com.twitter.util.Future
import javax.inject.Inject
class TweetsController @Inject()(
tweetsRepository: TweetsRepository)
extends Controller {
get("/tweets/hello") { request: Request =>
"hello world"
}
post("/tweets/") { tweet: Tweet =>
"tweet with id " + tweet.id + " is valid"
}
post("/tweets/streaming") { ids: AsyncStream[Long] =>
tweetsRepository.getByIds(ids)
}
get("/tweets/streaming_json") { request: Request =>
tweetsRepository.getByIds(
AsyncStream(0, 1, 2, 3, 4, 5))
}
get("/tweets/streaming_custom_tobuf") { request: Request =>
StreamingResponse(Buf.Utf8.apply) {
AsyncStream("A", "B", "C")
}
}
get("/tweets/streaming_manual_writes") { request: Request =>
val response = Response()
response.setChunked(true)
response.writer.write(Buf.Utf8("hello")) before {
response.writer.write(Buf.Utf8("world")) ensure {
response.close()
}
}
Future(response)
}
get("/tweets/") { request: Request =>
"tweets root"
}
get("/tweets/:id") { request: Request =>
val id = request.params("id").toLong
tweetsRepository.getById(id)
}
get("/tweets/test/:id/") { request: Request =>
request.params("id")
}
}
|
nkhuyu/finatra
|
http/src/test/scala/com/twitter/finatra/http/integration/tweetexample/main/controllers/TweetsController.scala
|
Scala
|
apache-2.0
| 1,712
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.kinesis
import java.util.UUID
import java.util.concurrent.ConcurrentHashMap
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.util.control.NonFatal
import com.amazonaws.services.kinesis.clientlibrary.interfaces.{IRecordProcessor, IRecordProcessorCheckpointer, IRecordProcessorFactory}
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.{KinesisClientLibConfiguration, Worker}
import com.amazonaws.services.kinesis.model.Record
import org.apache.spark.internal.Logging
import org.apache.spark.storage.{StorageLevel, StreamBlockId}
import org.apache.spark.streaming.Duration
import org.apache.spark.streaming.kinesis.KinesisInitialPositions.AtTimestamp
import org.apache.spark.streaming.receiver.{BlockGenerator, BlockGeneratorListener, Receiver}
import org.apache.spark.util.Utils
/**
* Custom AWS Kinesis-specific implementation of Spark Streaming's Receiver.
* This implementation relies on the Kinesis Client Library (KCL) Worker as described here:
* https://github.com/awslabs/amazon-kinesis-client
*
* The way this Receiver works is as follows:
*
* - The receiver starts a KCL Worker, which is essentially runs a threadpool of multiple
* KinesisRecordProcessor
* - Each KinesisRecordProcessor receives data from a Kinesis shard in batches. Each batch is
* inserted into a Block Generator, and the corresponding range of sequence numbers is recorded.
* - When the block generator defines a block, then the recorded sequence number ranges that were
* inserted into the block are recorded separately for being used later.
* - When the block is ready to be pushed, the block is pushed and the ranges are reported as
* metadata of the block. In addition, the ranges are used to find out the latest sequence
* number for each shard that can be checkpointed through the DynamoDB.
* - Periodically, each KinesisRecordProcessor checkpoints the latest successfully stored sequence
* number for it own shard.
*
* @param streamName Kinesis stream name
* @param endpointUrl Url of Kinesis service (e.g., https://kinesis.us-east-1.amazonaws.com)
* @param regionName Region name used by the Kinesis Client Library for
* DynamoDB (lease coordination and checkpointing) and CloudWatch (metrics)
* @param initialPosition Instance of [[KinesisInitialPosition]]
* In the absence of Kinesis checkpoint info, this is the
* worker's initial starting position in the stream.
* The values are either the beginning of the stream
* per Kinesis' limit of 24 hours
* ([[KinesisInitialPositions.TrimHorizon]]) or
* the tip of the stream ([[KinesisInitialPositions.Latest]]).
* @param checkpointAppName Kinesis application name. Kinesis Apps are mapped to Kinesis Streams
* by the Kinesis Client Library. If you change the App name or Stream name,
* the KCL will throw errors. This usually requires deleting the backing
* DynamoDB table with the same name this Kinesis application.
* @param checkpointInterval Checkpoint interval for Kinesis checkpointing.
* See the Kinesis Spark Streaming documentation for more
* details on the different types of checkpoints.
* @param storageLevel Storage level to use for storing the received objects
* @param kinesisCreds SparkAWSCredentials instance that will be used to generate the
* AWSCredentialsProvider passed to the KCL to authorize Kinesis API calls.
* @param cloudWatchCreds Optional SparkAWSCredentials instance that will be used to generate the
* AWSCredentialsProvider passed to the KCL to authorize CloudWatch API
* calls. Will use kinesisCreds if value is None.
* @param dynamoDBCreds Optional SparkAWSCredentials instance that will be used to generate the
* AWSCredentialsProvider passed to the KCL to authorize DynamoDB API calls.
* Will use kinesisCreds if value is None.
*/
private[kinesis] class KinesisReceiver[T](
val streamName: String,
endpointUrl: String,
regionName: String,
initialPosition: KinesisInitialPosition,
checkpointAppName: String,
checkpointInterval: Duration,
storageLevel: StorageLevel,
messageHandler: Record => T,
kinesisCreds: SparkAWSCredentials,
dynamoDBCreds: Option[SparkAWSCredentials],
cloudWatchCreds: Option[SparkAWSCredentials])
extends Receiver[T](storageLevel) with Logging { receiver =>
/*
* =================================================================================
* The following vars are initialize in the onStart() method which executes in the
* Spark worker after this Receiver is serialized and shipped to the worker.
* =================================================================================
*/
/**
* workerId is used by the KCL should be based on the ip address of the actual Spark Worker
* where this code runs (not the driver's IP address.)
*/
@volatile private var workerId: String = null
/**
* Worker is the core client abstraction from the Kinesis Client Library (KCL).
* A worker can process more than one shards from the given stream.
* Each shard is assigned its own IRecordProcessor and the worker run multiple such
* processors.
*/
@volatile private var worker: Worker = null
@volatile private var workerThread: Thread = null
/** BlockGenerator used to generates blocks out of Kinesis data */
@volatile private var blockGenerator: BlockGenerator = null
/**
* Sequence number ranges added to the current block being generated.
* Accessing and updating of this map is synchronized by locks in BlockGenerator.
*/
private val seqNumRangesInCurrentBlock = new mutable.ArrayBuffer[SequenceNumberRange]
/** Sequence number ranges of data added to each generated block */
private val blockIdToSeqNumRanges = new ConcurrentHashMap[StreamBlockId, SequenceNumberRanges]
/**
* The centralized kinesisCheckpointer that checkpoints based on the given checkpointInterval.
*/
@volatile private var kinesisCheckpointer: KinesisCheckpointer = null
/**
* Latest sequence number ranges that have been stored successfully.
* This is used for checkpointing through KCL */
private val shardIdToLatestStoredSeqNum = new ConcurrentHashMap[String, String]
/**
* This is called when the KinesisReceiver starts and must be non-blocking.
* The KCL creates and manages the receiving/processing thread pool through Worker.run().
*/
override def onStart() {
blockGenerator = supervisor.createBlockGenerator(new GeneratedBlockHandler)
workerId = Utils.localHostName() + ":" + UUID.randomUUID()
kinesisCheckpointer = new KinesisCheckpointer(receiver, checkpointInterval, workerId)
val kinesisProvider = kinesisCreds.provider
val kinesisClientLibConfiguration = {
val baseClientLibConfiguration = new KinesisClientLibConfiguration(
checkpointAppName,
streamName,
kinesisProvider,
dynamoDBCreds.map(_.provider).getOrElse(kinesisProvider),
cloudWatchCreds.map(_.provider).getOrElse(kinesisProvider),
workerId)
.withKinesisEndpoint(endpointUrl)
.withTaskBackoffTimeMillis(500)
.withRegionName(regionName)
// Update the Kinesis client lib config with timestamp
// if InitialPositionInStream.AT_TIMESTAMP is passed
initialPosition match {
case ts: AtTimestamp =>
baseClientLibConfiguration.withTimestampAtInitialPositionInStream(ts.getTimestamp)
case _ =>
baseClientLibConfiguration.withInitialPositionInStream(initialPosition.getPosition)
}
}
/*
* RecordProcessorFactory creates impls of IRecordProcessor.
* IRecordProcessor adapts the KCL to our Spark KinesisReceiver via the
* IRecordProcessor.processRecords() method.
* We're using our custom KinesisRecordProcessor in this case.
*/
val recordProcessorFactory = new IRecordProcessorFactory {
override def createProcessor: IRecordProcessor =
new KinesisRecordProcessor(receiver, workerId)
}
worker = new Worker(recordProcessorFactory, kinesisClientLibConfiguration)
workerThread = new Thread() {
override def run(): Unit = {
try {
worker.run()
} catch {
case NonFatal(e) =>
restart("Error running the KCL worker in Receiver", e)
}
}
}
blockIdToSeqNumRanges.clear()
blockGenerator.start()
workerThread.setName(s"Kinesis Receiver ${streamId}")
workerThread.setDaemon(true)
workerThread.start()
logInfo(s"Started receiver with workerId $workerId")
}
/**
* This is called when the KinesisReceiver stops.
* The KCL worker.shutdown() method stops the receiving/processing threads.
* The KCL will do its best to drain and checkpoint any in-flight records upon shutdown.
*/
override def onStop() {
if (workerThread != null) {
if (worker != null) {
worker.shutdown()
worker = null
}
workerThread.join()
workerThread = null
logInfo(s"Stopped receiver for workerId $workerId")
}
workerId = null
if (kinesisCheckpointer != null) {
kinesisCheckpointer.shutdown()
kinesisCheckpointer = null
}
}
/** Add records of the given shard to the current block being generated */
private[kinesis] def addRecords(shardId: String, records: java.util.List[Record]): Unit = {
if (records.size > 0) {
val dataIterator = records.iterator().asScala.map(messageHandler)
val metadata = SequenceNumberRange(streamName, shardId,
records.get(0).getSequenceNumber(), records.get(records.size() - 1).getSequenceNumber(),
records.size())
blockGenerator.addMultipleDataWithCallback(dataIterator, metadata)
}
}
/** Return the current rate limit defined in [[BlockGenerator]]. */
private[kinesis] def getCurrentLimit: Int = {
assert(blockGenerator != null)
math.min(blockGenerator.getCurrentLimit, Int.MaxValue).toInt
}
/** Get the latest sequence number for the given shard that can be checkpointed through KCL */
private[kinesis] def getLatestSeqNumToCheckpoint(shardId: String): Option[String] = {
Option(shardIdToLatestStoredSeqNum.get(shardId))
}
/**
* Set the checkpointer that will be used to checkpoint sequence numbers to DynamoDB for the
* given shardId.
*/
def setCheckpointer(shardId: String, checkpointer: IRecordProcessorCheckpointer): Unit = {
assert(kinesisCheckpointer != null, "Kinesis Checkpointer not initialized!")
kinesisCheckpointer.setCheckpointer(shardId, checkpointer)
}
/**
* Remove the checkpointer for the given shardId. The provided checkpointer will be used to
* checkpoint one last time for the given shard. If `checkpointer` is `null`, then we will not
* checkpoint.
*/
def removeCheckpointer(shardId: String, checkpointer: IRecordProcessorCheckpointer): Unit = {
assert(kinesisCheckpointer != null, "Kinesis Checkpointer not initialized!")
kinesisCheckpointer.removeCheckpointer(shardId, checkpointer)
}
/**
* Remember the range of sequence numbers that was added to the currently active block.
* Internally, this is synchronized with `finalizeRangesForCurrentBlock()`.
*/
private def rememberAddedRange(range: SequenceNumberRange): Unit = {
seqNumRangesInCurrentBlock += range
}
/**
* Finalize the ranges added to the block that was active and prepare the ranges buffer
* for next block. Internally, this is synchronized with `rememberAddedRange()`.
*/
private def finalizeRangesForCurrentBlock(blockId: StreamBlockId): Unit = {
blockIdToSeqNumRanges.put(blockId, SequenceNumberRanges(seqNumRangesInCurrentBlock.toArray))
seqNumRangesInCurrentBlock.clear()
logDebug(s"Generated block $blockId has $blockIdToSeqNumRanges")
}
/** Store the block along with its associated ranges */
private def storeBlockWithRanges(
blockId: StreamBlockId, arrayBuffer: mutable.ArrayBuffer[T]): Unit = {
val rangesToReportOption = Option(blockIdToSeqNumRanges.remove(blockId))
if (rangesToReportOption.isEmpty) {
stop("Error while storing block into Spark, could not find sequence number ranges " +
s"for block $blockId")
return
}
val rangesToReport = rangesToReportOption.get
var attempt = 0
var stored = false
var throwable: Throwable = null
while (!stored && attempt <= 3) {
try {
store(arrayBuffer, rangesToReport)
stored = true
} catch {
case NonFatal(th) =>
attempt += 1
throwable = th
}
}
if (!stored) {
stop("Error while storing block into Spark", throwable)
}
// Update the latest sequence number that have been successfully stored for each shard
// Note that we are doing this sequentially because the array of sequence number ranges
// is assumed to be
rangesToReport.ranges.foreach { range =>
shardIdToLatestStoredSeqNum.put(range.shardId, range.toSeqNumber)
}
}
/**
* Class to handle blocks generated by this receiver's block generator. Specifically, in
* the context of the Kinesis Receiver, this handler does the following.
*
* - When an array of records is added to the current active block in the block generator,
* this handler keeps track of the corresponding sequence number range.
* - When the currently active block is ready to sealed (not more records), this handler
* keep track of the list of ranges added into this block in another H
*/
private class GeneratedBlockHandler extends BlockGeneratorListener {
/**
* Callback method called after a data item is added into the BlockGenerator.
* The data addition, block generation, and calls to onAddData and onGenerateBlock
* are all synchronized through the same lock.
*/
def onAddData(data: Any, metadata: Any): Unit = {
rememberAddedRange(metadata.asInstanceOf[SequenceNumberRange])
}
/**
* Callback method called after a block has been generated.
* The data addition, block generation, and calls to onAddData and onGenerateBlock
* are all synchronized through the same lock.
*/
def onGenerateBlock(blockId: StreamBlockId): Unit = {
finalizeRangesForCurrentBlock(blockId)
}
/** Callback method called when a block is ready to be pushed / stored. */
def onPushBlock(blockId: StreamBlockId, arrayBuffer: mutable.ArrayBuffer[_]): Unit = {
storeBlockWithRanges(blockId,
arrayBuffer.asInstanceOf[mutable.ArrayBuffer[T]])
}
/** Callback called in case of any error in internal of the BlockGenerator */
def onError(message: String, throwable: Throwable): Unit = {
reportError(message, throwable)
}
}
}
|
WindCanDie/spark
|
external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisReceiver.scala
|
Scala
|
apache-2.0
| 15,987
|
/*
* Skylark
* http://skylark.io
*
* Copyright 2012-2017 Quantarray, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.quantarray.skylark.measure
import com.quantarray.skylark.measure.implicits._
import com.quantarray.skylark.measure.measures._
import org.scalatest.{FlatSpec, Matchers}
class AnyMeasureParsersSpec extends FlatSpec with Matchers with AnyMeasureParsers
{
val measureAtoms: Map[String, AnyMeasure] = Seq(USD, bbl, m, s, kg).map(measure => measure.name -> measure).toMap
"USD" should "be parsable" in
{
parseMeasure("USD").get should equal(USD)
}
"USD / bbl" should "be parsable" in
{
parseMeasure("USD / bbl").get should equal(USD / bbl)
}
"(m / s) ^ 3" should "be parsable" in
{
parseMeasure("(m / s) ^ 3").get should equal((m / s) ^ 3)
}
"kg * ((m / s) ^ 2)" should "be parsable" in
{
parseMeasure("kg * ((m / s) ^ 2)").get should equal(kg * ((m / s) ^ 2))
}
"XYZ" should "not be parsable" in
{
intercept[MatchError]
{
parseMeasure("XYZ")
}
}
}
|
quantarray/skylark
|
skylark-measure/src/test/scala/com/quantarray/skylark/measure/AnyMeasureParsersSpec.scala
|
Scala
|
apache-2.0
| 1,604
|
package uk.gov.gds.ier.model
case class PostcodeAnywhereResponse(Items:List[Map[String,String]])
|
michaeldfallen/ier-frontend
|
app/uk/gov/gds/ier/model/PostcodeAnywhereResponse.scala
|
Scala
|
mit
| 98
|
package org.thinkmeta.smp.core
import language.experimental.macros
import reflect.macros.Context
/**
* @author Hossam Karim
*/
object HostedLisp {
trait Target
case object TermTreesTarget extends Target
case object TypeTreesTarget extends Target
case object CasesTarget extends Target
case object PatternTarget extends Target
def operand(c: Context)(e: Operand): c.Tree =
e match {
//casts are here to satisfy the IDE, the compiler doesn't need the casts
case x: Name โ name(c)(x).asInstanceOf[c.Tree]
case x: IntegerLiteral โ integerLiteral(c)(x).asInstanceOf[c.Tree]
case x: DoubleLiteral โ doubleLiteral(c)(x).asInstanceOf[c.Tree]
case x: StringLiteral โ stringLiteral(c)(x).asInstanceOf[c.Tree]
case x: ValueDef โ valdef(c)(x).asInstanceOf[c.Tree]
case x: LambdaDef โ lambdadef(c)(x).asInstanceOf[c.Tree]
case x: LambdaArg โ lambdaarg(c)(x).asInstanceOf[c.Tree]
case x: CaseExpr โ casedef(c)(x).asInstanceOf[c.Tree]
case x: FunApp โ funapp(c)(x).asInstanceOf[c.Tree]
}
def name(c: Context)(e: Name): c.Tree = {
import c.universe._
if(e.value.contains("."))
e.value.split('.').toList match {
case x :: y :: Nil โ
Select(Ident(newTermName(x)), newTermName(y))
case x :: y :: ys โ
ys.foldLeft(Select(Ident(newTermName(x)), newTermName(y))) {
(acc,next) โ Select(acc,newTermName(next))
}
}
else
q"${c.universe.newTermName(e.value)}"
}
def integerLiteral(c: Context)(e: IntegerLiteral): c.Tree = {
import c.universe._
q"${c.literal(e.value)}"
}
def doubleLiteral(c: Context)(e: DoubleLiteral): c.Tree = {
import c.universe._
q"${c.literal(e.value)}"
}
def stringLiteral(c: Context)(e: StringLiteral): c.Tree = {
import c.universe._
q"${c.literal(e.value)}"
}
def typeName(c: Context)(e: TypeName): c.Tree = {
import c.universe._
val t = e.value match {
case "int" โ newTypeName("Int")
case "double" โ newTypeName("Double")
case "string" โ newTypeName("String")
case tn โ newTypeName(tn)
}
q"$t"
}
def valdef(c: Context)(e: ValueDef): c.universe.ValDef = {
import c.universe._
val valName = newTermName(e.name.value)
val valValue = operand(c)(e.value)
e.typeName.map(t โ typeName(c)(t)) match {
case Some(tn) โ q"val $valName:$tn = $valValue"
case _ โ q"val $valName = $valValue"
}
}
def arg(c: Context)(e: Arg): c.universe.ValDef = {
import c.universe._
val argName = newTermName(e.name.value)
val argType = typeName(c)(e.typeName)
q"val $argName:$argType"
}
def args(c: Context)(e: Args): List[c.universe.ValDef] =
e.args.map(a โ arg(c)(a)).asInstanceOf[List[c.universe.ValDef]] // cast for the IDE only
def lambdaarg(c: Context)(e: LambdaArg): c.universe.ValDef = {
import c.universe._
val argName = newTermName(e.name.value)
e.typeName.map(typeName(c)(_)) match {
case Some(tn) โ q"val $argName:$tn"
case _ โ ValDef(NoMods, argName,TypeTree(), EmptyTree)
}
}
def lambdadef(c: Context)(e: LambdaDef): c.Tree = {
import c.universe._
val args = e.args.map(lambdaarg(c)(_))
val body = operand(c)(e.body)
q"(..$args) โ ($body)"
}
def casedef(c: Context)(e: CaseExpr): c.Tree = {
import c.universe._
def q2pq(q: c.Tree): c.Tree = q match {
case Apply( n,args ) => Apply( n, args map(q โ q2pq(q)) )
case Ident( term ) => Bind(term, Ident(nme.WILDCARD))
case x => x
}
val lhs = q2pq(operand(c)(e.lhs))
val rhs = operand(c)(e.rhs)
val guard = e.guard.map(operand(c)(_))
guard match {
case Some(op) โ cq"$lhs if $op โ $rhs"
case None โ cq"$lhs โ $rhs"
}
}
def selectorForNew(c: Context)(e: Name): c.Tree = {
import c.universe._
if(e.value.contains(".")) /*a.b.c...*/{
val list = e.value.split('.').toList
val init = list.init
val last = list.last
val selected = init match {
case x :: y :: Nil โ
Select(Ident(newTermName(x)), newTermName(y))
case x :: y :: ys โ
ys.foldLeft(Select(Ident(newTermName(x)), newTermName(y))) {
(acc,next) โ Select(acc,newTermName(next))
}
case _ โ throw new IllegalArgumentException(s"Can't get a selector for $e")
}
Select(selected, newTypeName(last))
} else /* just a */{
Ident(newTypeName(e.value))
}
}
// A pure hack to support the dot syntax
def chain(c: Context)(e: FunApp): c.Tree = {
import c.universe._
// (chain (List 1 2 3)
// (map [x| (+ x 1)])
// (reduce [x y| (+ x y)]))
// โ
// {
// val $1 = List(1,2,3)
// val $2 = $1.map(_+1)
// val $3 = $2.reduce(_+_)
// $3
// }
val length = e.actualArgs.length
if(length < 2)
throw new IllegalArgumentException("chain takes at least 2 parameters")
def compose(arg: Operand, prevValueDef: Option[ValueDef]) = {
val prev = prevValueDef.map(_.name.value)
val currentName = c.fresh()
// transform the ast to prefix the name of current arg with 'prev.'
val currentValueAst = (arg,prev) match {
case (FunApp(Name(n),args), Some(p)) โ FunApp(Name(p+"."+n),args)
case (Name(n), Some(p)) โ Name(p+"."+n)
case x@(_, Some(p)) โ
throw new UnsupportedOperationException(s"Unsupported: $x")// probably literals
case (x, _) โ x
}
ValueDef(Name(currentName), None, currentValueAst)
}
val valsReversed =
e.actualArgs.tail.foldLeft(List(compose(e.actualArgs.head, None))) {
(acc,next) โ
compose(next,Some(acc.head)) :: acc
}
val lastValue = newTermName(valsReversed.head.name.value)
val vals = valsReversed.reverse.map(valdef(c)(_))
q"""
{
..$vals
$lastValue
}
"""
}
def new$(c: Context)(e: FunApp): c.Tree = {
import c.universe._
e.actualArgs match {
case (n@Name(_)) :: Nil โ
val clsname = selectorForNew(c)(n)
q"new $clsname"
case (n@Name(_)) :: ops โ
val clsname = selectorForNew(c)(n)
val ctrargs = ops.map(op โ operand(c)(op))
q"new $clsname(..$ctrargs)"
case _ โ
throw new IllegalArgumentException(s"Can't handle expression $e")
}
}
def matchcase(c: Context)(e: FunApp): c.Tree = {
import c.universe._
e.actualArgs match {
case head :: tail โ
val headOp = operand(c)(head)
val tailOp =
tail
.asInstanceOf[List[CaseExpr]]
.map(casedef(c)(_))
.collect { case x: CaseDef โ x }
Match(headOp, tailOp)
case _ โ
throw new IllegalArgumentException("Malformed match expression")
}
}
// The compiler once said: Quasiquotes can only be used with literal strings
/*
def termTree(c: Context)(pattern: String*)(trees: c.Tree*): c.Tree = {
import c.universe._
StringContext(pattern:_*).q(trees:_*)
}
*/
def funapp(c: Context)(e: FunApp): c.Tree = {
import c.universe._
e.name.value match {
case "__chain__" โ chain(c)(e)
case "__new__" โ new$(c)(e)
case "__match__" โ matchcase(c)(e)
case _ โ
val fname = name(c)(e.name)
val args = e.actualArgs.map(op โ operand(c)(op))
q"$fname(..$args)"
}
}
def fundef(c: Context)(e: FunDef): c.Tree = {
import c.universe._
val fname = newTermName(e.name.value)
val tpe = e.typeName.map(tn โ typeName(c)(tn))
val fargs = e.formalArgs match {
case Some(largs) โ args(c)(largs)
case None โ Nil
}
val fbody = e.body.map(operand(c)(_))
tpe match {
case Some(t) โ
q"""
def $fname(..$fargs):$t = {
..$fbody
}
"""
case None โ
q"""
def $fname(..$fargs) = {
..$fbody
}
"""
}
}
def using(c: Context)(e: Using): c.Tree = {
import c.universe._
val path = e.path.map(_.value) //++ (if(e.wildcard) List("_") else Nil)
path match {
case x :: Nil if e.wildcard โ
val tx = newTermName(x)
Import(
Ident(tx),
List(ImportSelector(nme.WILDCARD, 0, null, -1)))
case x :: y :: Nil if e.wildcard โ
val tx = newTermName(x)
val ty = newTermName(y)
Import(
Select(Ident(tx), ty),
List(ImportSelector(nme.WILDCARD, 11, null, -1)))
case x :: y :: Nil โ
val tx = newTermName(x)
val ty = newTermName(y)
Import(
Ident(tx),
List(ImportSelector(ty, 0, ty, 0)))
case x :: y :: ys if e.wildcard โ
val tx = newTermName(x)
val ty = newTermName(y)
Import(
ys.foldLeft(Select(Ident(tx), ty))( (acc,next) โ
Select(acc,newTermName(next))),
List(ImportSelector(nme.WILDCARD, 0, null, -1)))
case l@(_::_) โ
val (x::y::ys) = l.init
val tx = newTermName(x)
val ty = newTermName(y)
val tz = newTermName(l.last)
Import(
ys.foldLeft(Select(Ident(tx), ty))( (acc,next) โ
Select(acc,newTermName(next))),
List(ImportSelector(tz, 0, tz, 0)))
}
}
def moduledef(c: Context)(e: Module): c.Tree = {
import c.universe._
def mkModule(module: Module, topLevel: Boolean = false): c.Tree = {
val functions = module.blocks.collect {
case f: FunDef โ fundef(c)(f)
}
val values = module.blocks.collect {
case v: ValueDef โ valdef(c)(v)
}
val mname = newTermName(module.name.value)
val innerModules: List[Module] =
module.blocks.collect {case m: Module โ m}
val imports = module.blocks.collect {case u: Using โ u}.map(using(c)(_))
val inner = innerModules.map(m โ mkModule(m))
val objectTree =
q"""
object $mname {
import org.thinkmeta.smp.core.StdLib._
..$imports
..$values
..$functions
..$inner
}
"""
if(topLevel)
q"""
$objectTree
$mname
"""
else
objectTree
}
mkModule(e, topLevel = true)
}
// hack to get around the compiler error:
// reflective toolbox has failed: cannot operate on trees that are already typed
// Context.eval(expression) did not work!
def extractString(c: Context)(e: c.Expr[String]): String = {
import c.universe._
val definition: List[String] =
e.tree.collect { case Literal(Constant(s: String)) โ s }
if(definition.headOption.isEmpty)
throw new IllegalArgumentException("Empty Definition")
definition.head
}
def functionm(c: Context)(e: c.Expr[String]): c.Expr[AnyRef] = {
import c.universe._
val code = extractString(c)(e)
val funast = UselessLisp.astFunDef(code)
val compiled = fundef(c)(funast)
val objectName = newTermName(c.fresh())
val expr =
q"""
import org.thinkmeta.smp.core.StdLib._
object $objectName{ $compiled }
$objectName
"""
c.Expr(expr)
}
def modulem(c: Context)(e: c.Expr[String]): c.Expr[AnyRef] = {
import c.universe._
val code = extractString(c)(e)
val module = UselessLisp.astModule(code)
val expr = moduledef(c)(module)
c.Expr(expr)
}
def module(e: String) = macro modulem
def function(e: String) = macro functionm
}
|
hkarim/macros-playground
|
core/src/main/scala/org/thinkmeta/smp/core/HostedLisp.scala
|
Scala
|
apache-2.0
| 11,812
|
/*
* Copyright 2012-2019 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.xfinity.sirius.uberstore.data
import java.io.{EOFException, InputStream}
import org.mockito.Mockito._
import scala.util.Random
class BufferedFileReadHandleTest extends UberDataFileHandleTest {
val readBufferSize: Int = 8 * 1024
describe("eof") {
it("returns false when read returns a valid unsigned byte") {
val inputStream = mock[InputStream]
val expected = Random.nextInt(256)
doReturn(expected).when(inputStream).read()
val underTest = new BufferedFileReadHandle(inputStream, 0L)
val result = underTest.eof()
assert(!result)
verify(inputStream).mark(1)
verify(inputStream).reset()
}
it("returns true when read returns -1") {
val inputStream = mock[InputStream]
val expected = -1
doReturn(expected).when(inputStream).read()
val underTest = new BufferedFileReadHandle(inputStream, 0L)
val result = underTest.eof()
assert(result)
verify(inputStream).mark(1)
verify(inputStream).reset()
}
}
describe("readInt") {
it("returns 32-bit Integer and advances offset") {
val expected = Random.nextInt()
writeInts(expected)
val underTest = BufferedFileReadHandle(tempPath.toString, 0L, readBufferSize)
val result = underTest.readInt()
assert(result == expected)
assert(underTest.offset() == 4)
}
it("reads 32-bit Integer at offset and advances offset") {
val expected = Random.nextInt()
val baseOffset = Random.nextInt(500)
writeRandomBytes(baseOffset)
writeInts(expected)
val underTest = BufferedFileReadHandle(tempPath.toString, baseOffset, readBufferSize)
val result = underTest.readInt()
assert(result == expected)
assert(underTest.offset() == baseOffset + 4)
}
it("throws EOFException when EOF") {
val underTest = BufferedFileReadHandle(tempPath.toString, 0L, readBufferSize)
intercept[EOFException] {
val _ = underTest.readInt()
}
}
}
describe("readLong") {
it("returns 64-bit Integer and advances offset") {
val expected = Random.nextLong()
writeLongs(expected)
val underTest = BufferedFileReadHandle(tempPath.toString, 0L, readBufferSize)
val result = underTest.readLong()
assert(result == expected)
assert(underTest.offset() == 8)
}
it("returns 64-bit Integer at offset and advances offset") {
val baseOffset = Random.nextInt(500)
val expected = Random.nextLong()
writeRandomBytes(baseOffset)
writeLongs(expected)
val underTest = BufferedFileReadHandle(tempPath.toString, baseOffset, readBufferSize)
val result = underTest.readLong()
assert(result == expected)
assert(underTest.offset() == baseOffset + 8)
}
it("throws EOFException when EOF") {
val underTest = BufferedFileReadHandle(tempPath.toString, 0L, readBufferSize)
intercept[EOFException] {
val _ = underTest.readLong()
}
}
}
describe("readFully") {
it("fills array and advances offset") {
val expected = randomBytes(10)
writeBytes(expected: _*)
val underTest = BufferedFileReadHandle(tempPath.toString, 0L, readBufferSize)
val array = new Array[Byte](10)
underTest.readFully(array)
assert(array.sameElements(expected))
assert(underTest.offset() == 10)
}
it("fills array at offset and advances offset") {
val baseOffset = Random.nextInt(500)
val expected = randomBytes(10)
writeRandomBytes(baseOffset)
writeBytes(expected: _*)
val underTest = BufferedFileReadHandle(tempPath.toString, baseOffset, readBufferSize)
val array = new Array[Byte](10)
underTest.readFully(array)
assert(array.sameElements(expected))
assert(underTest.offset() == baseOffset + 10)
}
it("throws EOFException when EOF") {
val underTest = BufferedFileReadHandle(tempPath.toString, 0L, readBufferSize)
intercept[EOFException] {
val array = new Array[Byte](10)
underTest.readFully(array)
}
}
it("throws EOFException when less than required bytes remaining") {
writeRandomBytes(7)
val underTest = BufferedFileReadHandle(tempPath.toString, 0L, readBufferSize)
intercept[EOFException] {
val array = new Array[Byte](10)
underTest.readFully(array)
}
}
}
describe("close") {
it("closes the underlying InputStream") {
val mockInputStream = mock[InputStream]
val underTest = new BufferedFileReadHandle(mockInputStream, 0L)
underTest.close()
verify(mockInputStream).close()
}
}
}
|
Comcast/sirius
|
src/test/scala/com/comcast/xfinity/sirius/uberstore/data/BufferedFileReadHandleTest.scala
|
Scala
|
apache-2.0
| 5,312
|
package org.revenj.patterns
import scala.io.Source
import scala.reflect.ClassTag
import scala.xml.Elem
import scala.xml.parsing.ConstructingParser
import org.joda.time.DateTime
import org.joda.time.LocalDate
import org.joda.time.format.DateTimeFormat
import com.fasterxml.jackson.core.JsonGenerator
import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.core.Version
import com.fasterxml.jackson.databind.DeserializationContext
import com.fasterxml.jackson.databind.DeserializationFeature
import com.fasterxml.jackson.databind.InjectableValues
import com.fasterxml.jackson.databind.JsonDeserializer
import com.fasterxml.jackson.databind.JsonSerializer
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.databind.SerializerProvider
import com.fasterxml.jackson.databind.module.SimpleModule
import com.fasterxml.jackson.module.scala.DefaultScalaModule
class JsonSerialization extends ISerialization[String] {
//TODO: fix this
private val dateFormat = DateTimeFormat.forPattern("y-MM-dd'T00:00:00")
private val dateSerializer = new JsonSerializer[LocalDate] {
override def serialize(value: LocalDate, generator: JsonGenerator, x: SerializerProvider) =
generator.writeString(dateFormat.print(value))
}
private val dateDeserializer = new JsonDeserializer[LocalDate] {
override def deserialize(parser: JsonParser, context: DeserializationContext) =
dateFormat.parseLocalDate(parser.getValueAsString())
}
// -----------------------------------------------------------------------------
private val timestampSerializer = new JsonSerializer[DateTime] {
override def serialize(value: DateTime, generator: JsonGenerator, x: SerializerProvider) =
generator.writeString(value.toString())
}
private val timestampDeserializer = new JsonDeserializer[DateTime] {
override def deserialize(parser: JsonParser, context: DeserializationContext) =
new DateTime(parser.getValueAsString())
}
// -----------------------------------------------------------------------------
private val bigDecimalSerializer = new JsonSerializer[BigDecimal] {
override def serialize(value: BigDecimal, generator: JsonGenerator, x: SerializerProvider) =
generator.writeString(value.toString)
}
private val bigDecimalDeserializer = new JsonDeserializer[BigDecimal] {
override def deserialize(parser: JsonParser, context: DeserializationContext) =
BigDecimal(parser.getValueAsString())
}
// -----------------------------------------------------------------------------
private val elemSerializer = new JsonSerializer[Elem] {
override def serialize(value: Elem, generator: JsonGenerator, x: SerializerProvider) =
generator.writeString(value.toString())
}
private val elemDeserializer = new JsonDeserializer[Elem] {
override def deserialize(parser: JsonParser, context: DeserializationContext) =
ConstructingParser
.fromSource(Source.fromString(parser.getValueAsString()), true)
.document.docElem.asInstanceOf[Elem]
}
// -----------------------------------------------------------------------------
private val version = new Version(0, 5, 0, "SNAPSHOT", "org.revenj.patterns", "revenj-scala-core")
private val serializationModule =
new SimpleModule("SerializationModule", version)
.addSerializer(classOf[LocalDate], dateSerializer)
.addSerializer(classOf[DateTime], timestampSerializer)
.addSerializer(classOf[BigDecimal], bigDecimalSerializer)
.addSerializer(classOf[Elem], elemSerializer)
private val serializationMapper =
new ObjectMapper()
.registerModule(DefaultScalaModule)
.registerModule(serializationModule)
override def serialize[T](t: T): String =
serializationMapper.writer.writeValueAsString(t)
// -----------------------------------------------------------------------------
private val deserializationModule =
new SimpleModule("DeserializationModule", version)
.addDeserializer(classOf[LocalDate], dateDeserializer)
.addDeserializer(classOf[DateTime], timestampDeserializer)
.addDeserializer(classOf[BigDecimal], bigDecimalDeserializer)
.addDeserializer(classOf[Elem], elemDeserializer)
override def deserialize[T](data: String, locator: IServiceLocator)(implicit ev: ClassTag[T]): T =
new ObjectMapper()
.registerModule(DefaultScalaModule)
.registerModule(deserializationModule)
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
.setInjectableValues(new InjectableValues.Std addValue("__locator", locator))
.readValue(data, ev.runtimeClass.asInstanceOf[Class[T]])
}
|
tferega/revenj
|
scala/Core/src/main/scala/org/revenj/patterns/JsonSerialization.scala
|
Scala
|
bsd-3-clause
| 4,679
|
/*
* This file is a part of the "sur la plaque" toolkit for cycling
* data analytics and visualization.
*
* Copyright (c) 2013--2014 William C. Benton and Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.freevariable.surlaplaque.app;
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.mllib.clustering._
import com.freevariable.surlaplaque.importer._
import com.freevariable.surlaplaque.data._
import com.freevariable.surlaplaque.app._
object PowerBestsApp extends Common with ActivitySliding with PointClustering {
import org.json4s._
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
import org.apache.spark.rdd.RDD
import org.apache.spark.SparkConf
import com.freevariable.surlaplaque.geometry.LineString
import com.freevariable.surlaplaque.geometry.LineString._
import scala.collection.immutable.TreeSet
case class PBOptions(periodColors: Map[Int, Tuple3[Short,Short,Short]], val clusters: Int, val iterations: Int, val files: List[String], val defaultOpacity: Short, val outputFile: String, val httpEndpoint: Option[String]) {
def periodMap = {
if (periodColors.size == 0)
Map(getEnvValue("SLP_MMP_PERIOD", "60").toInt -> (255,0,0))
else
periodColors
}
def withPeriodColoring(period: Int, r: Short, g: Short, b: Short) = this.copy(periodColors = this.periodColors + ((period, (r,g,b))))
def withClusters(clusters: Int) = this.copy(clusters = clusters)
def withIterations(iterations: Int) = this.copy(iterations = iterations)
def withFile(file: String) = this.copy(files = file::this.files)
def withFiles(fs: List[String]) = this.copy(files = fs ++ this.files)
def withDefaultOpacity(op: Short) = this.copy(defaultOpacity = op)
def withOutputFile(f: String) = this.copy(outputFile = f)
def withEndpoint(url: String) = this.copy(httpEndpoint = Some(url))
}
object PBOptions {
val default = new PBOptions(Map(), getEnvValue("SLP_CLUSTERS", "256").toInt, getEnvValue("SLP_ITERATIONS", "10").toInt, List(), 128, getEnvValue("SLP_OUTPUT_FILE", "slp.json"), None)
}
def parseArgs(args: Array[String]) = {
val hexRGB = "^((?:[0-9a-fA-F]){2})((?:[0-9a-fA-F]){2})((?:[0-9a-fA-F]){2})$".r
val dirPattern = "^-d(.*)$".r
def phelper(params: List[String], options: PBOptions): PBOptions = {
params match {
case Nil => options
case dirPattern(dir) :: rest => phelper(rest, options.withFiles(SLP.listFilesInDir(dir)))
case "--activity-dir" :: dir :: rest => phelper(rest, options.withFiles(SLP.listFilesInDir(dir)))
case "--period-coloring" :: period :: hexRGB(r,g,b) :: rest => {
val rb = Integer.parseInt(r, 16) % 256
val gb = Integer.parseInt(g, 16) % 256
val bb = Integer.parseInt(b, 16) % 256
phelper(rest, options.withPeriodColoring(period.toInt, rb.toShort, gb.toShort, bb.toShort))
}
case "--clusters" :: c :: rest => phelper(rest, options.withClusters(c.toInt))
case "--iterations" :: it :: rest => phelper(rest, options.withIterations(it.toInt))
case "--opacity" :: op :: rest => phelper(rest, options.withDefaultOpacity(op.toShort))
case "--output-file" :: f :: rest => phelper(rest, options.withOutputFile(f))
case "--url" :: url :: rest => phelper(rest, options.withEndpoint(url))
case "--" :: rest => options.withFiles(rest)
case bogusOpt if bogusOpt(0) == "-" => throw new RuntimeException(s"unrecognized option $bogusOpt")
case file :: rest => phelper(rest, options.withFile(file))
}
}
phelper(args.toList, PBOptions.default)
}
def appMain(args: Array[String]) {
val options = parseArgs(args)
val struct = run(options)
val out = outputFile(options.outputFile)
val renderedStruct = pretty(render(struct))
out.println(renderedStruct)
maybePut(options, renderedStruct)
out.close
}
def run(options: PBOptions) = {
val conf = new SparkConf()
.setMaster(master)
.setAppName(appName)
.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
val app = new SLP(new SparkContext(conf))
addExitHook(app.stop)
val data = app.processFiles(options.files)
val bests = bestsByEndpointClusters(options, data, app)
val struct = ("type"->"FeatureCollection") ~ ("features"->bests)
struct
}
import Math.abs
type AO = Tuple2[String, Int]
type AOWatts = Tuple2[AO, Double]
case class BasicTrackpoint(latlong: Coordinates, watts: Double) {}
case class Effort(mmp: Double, activity: String, startTimestamp: Long, endTimestamp: Long) {}
def stripTrackpoints(tp: Trackpoint) = BasicTrackpoint(tp.latlong, tp.watts)
def bestsByEndpointClusters(options: PBOptions, data: RDD[Trackpoint], app: SLP) = {
val model = app.context.broadcast(clusterPoints(data, options.clusters, options.iterations))
def bestsForPeriod(data: RDD[Trackpoint], period: Int, app: SLP, model: Broadcast[KMeansModel]) = {
val clusteredMMPs = applyWindowedNoZip(data, period, {
case (activity:String, samples:Seq[Trackpoint]) =>
(
(closestCenter(samples.head.latlong, model.value), closestCenter(samples.last.latlong, model.value)),
Effort(samples.map(_.watts).reduce(_ + _) / samples.size, activity, samples.head.timestamp, samples.last.timestamp)
)
})
clusteredMMPs
.reduceByKey ((a, b) => if (a.mmp > b.mmp) a else b)
.takeOrdered(20)(Ordering.by[((Int, Int), Effort), Double] { case (_, e:Effort) => -e.mmp })
.map {
case (_, e: Effort) => (
e.mmp,
data.filter {
case tp: Trackpoint => tp.activity.getOrElse("UNKNOWN") == e.activity && tp.timestamp <= e.endTimestamp && tp.timestamp >= e.startTimestamp
}.collect
)
}
}
data.cache
options.periodMap.flatMap { case(period: Int, color: Tuple3[Short,Short,Short]) =>
val bests = bestsForPeriod(data, period, app, model)
val best = bests.head._1
bests.map {case (watts, samples) => LineString(samples.map(_.latlong), Map("stroke" -> rgba(color._1, color._2, color._3, (options.defaultOpacity * (watts / best)).toShort), "stroke-width" -> "7", "label" -> s"$watts watts"))}
}
}
def bestsWithoutTemporalOverlap(options: PBOptions, data: RDD[Trackpoint], app: SLP) = {
def bestsForPeriod(data: RDD[Trackpoint], period: Int, app: SLP) = {
val windowedSamples = windowsForActivities(data, period).cache
val mmps = windowedSamples.map {case ((activity, offset), samples) => (samples.map(_.watts).reduce(_ + _) / samples.size, (activity, offset))}
val sorted = mmps.sortByKey(false).map {case (watts, (activity, offset)) => ((activity, offset), watts)}.take(1000) // FIXME: KLUDGE
val trimmed = topWithoutOverlaps(period, 20, sorted.toList)
val top20 = app.context.parallelize(trimmed).cache
top20.join(windowedSamples).map {case ((activity, offset), (watts, samples)) => (watts, samples)}
}
def topWithoutOverlaps(period: Int, count: Int, candidates: List[AOWatts]) = {
def thelper(activityPeriods: TreeSet[AO],
kept: List[AOWatts],
cs: List[AOWatts]): List[AOWatts] = {
if (kept.length == count) {
kept
} else {
cs match {
case Nil => kept
case first @ ((activity, offset), watts) :: rest =>
if (activityPeriods.filter({case (a,o) => a == activity && abs(o - offset) < period}).size == 0) {
thelper(activityPeriods + ((activity, offset)), (((activity, offset), watts))::kept, rest)
} else {
thelper(activityPeriods, kept, rest)
}
}
}
}
thelper(TreeSet[AO](), List[AOWatts](), candidates).reverse
}
options.periodMap.flatMap { case(period: Int, color: Tuple3[Short,Short,Short]) =>
bestsForPeriod(data, period, app).collect.map {case (watts, samples) => LineString(samples.map(_.latlong), Map("color" -> rgba(color._1, color._2, color._3, 128), "label" -> s"$watts watts"))}
}
}
def rgba(r: Short, g: Short, b: Short, a: Short) = s"rgba($r, $g, $b, $a)"
def maybePut(options: PBOptions, document: String) {
import dispatch._
import scala.concurrent.ExecutionContext.Implicits.global
options.httpEndpoint match {
case Some(endpoint) => {
val request = url(endpoint).PUT
.setBody(document)
.addHeader("Content-type", "application/json")
for (result <- Http(request OK as.String)) yield result
}
case None => {}
}
}
}
|
willb/sur-la-plaque
|
analysis/src/main/scala/com/freevariable/surlaplaque/app/power_bests.scala
|
Scala
|
apache-2.0
| 9,499
|
package com.twitter.finagle.netty3
import com.twitter.finagle.benchmark.StdBenchAnnotations
import com.twitter.io.Buf
import java.nio
import org.jboss.netty.buffer.ChannelBuffers
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
@State(Scope.Benchmark)
class ChannelBufferBufBenchmark extends StdBenchAnnotations {
@Param(Array("1000"))
var size: Int = 1000
private[this] var channelBufferBuf: Buf = _
private[this] var all: Array[Buf] = _
@Setup(Level.Iteration)
def setup(): Unit = {
val cap = size * 2
val start = cap / 4
val end = start + size
val raw = 0.until(cap).map(_.toByte).toArray
val bb = java.nio.ByteBuffer.wrap(raw, start, size)
val cb = ChannelBuffers.wrappedBuffer(raw, start, size)
channelBufferBuf = ChannelBufferBuf.Owned(cb)
val byteArrayBuf = Buf.ByteArray.Owned(raw, start, end)
val byteBufferBuf = Buf.ByteBuffer.Owned(bb)
val concatBuf = byteArrayBuf.slice(0, size / 2).concat(byteArrayBuf.slice(size / 2, size))
all = Array(byteArrayBuf, byteBufferBuf, concatBuf, channelBufferBuf)
}
@Benchmark
def equality(hole: Blackhole): Unit = {
var i = 0
while (i < all.length) {
hole.consume(channelBufferBuf == all(i))
hole.consume(all(i) == channelBufferBuf)
i += 1
}
}
@Benchmark
def hash(): Int =
channelBufferBuf.hashCode
@Benchmark
def slice(): Buf =
channelBufferBuf.slice(size / 4, size / 4 + size / 2)
@Benchmark
def extractByteBuffer(): nio.ByteBuffer =
Buf.ByteBuffer.Owned.extract(channelBufferBuf)
@Benchmark
def extractByteArray(): Array[Byte] =
Buf.ByteArray.Owned.extract(channelBufferBuf)
@Benchmark
def length(): Int =
channelBufferBuf.length
}
|
sveinnfannar/finagle
|
finagle-benchmark/src/main/scala/com/twitter/finagle/netty3/ChannelBufferBufBenchmark.scala
|
Scala
|
apache-2.0
| 1,756
|
/*
* Copyright 2014-2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.core.model
import com.netflix.atlas.core.stacklang.BaseWordSuite
import com.netflix.atlas.core.stacklang.Interpreter
import com.netflix.atlas.core.stacklang.StandardVocabulary
import com.netflix.atlas.core.stacklang.Word
class InWordSuite extends BaseWordSuite {
def interpreter: Interpreter =
Interpreter(QueryVocabulary.allWords ::: StandardVocabulary.allWords)
def word: Word = QueryVocabulary.In
def shouldMatch: List[(String, List[Any])] = List(
"a,(,)" -> List(Query.False),
"a,(,b,)" -> List(Query.Equal("a", "b")),
"a,(,b,c,d,)" -> List(Query.In("a", List("b", "c", "d")))
)
def shouldNotMatch: List[String] = List("", "a")
}
|
brharrington/atlas
|
atlas-core/src/test/scala/com/netflix/atlas/core/model/InWordSuite.scala
|
Scala
|
apache-2.0
| 1,304
|
/*
* Copyright (c) 2014-2015 by its authors. Some rights reserved.
* See the project homepage at: http://www.monifu.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monifu.reactive.internals.operators
import monifu.reactive.Ack.{Cancel, Continue}
import monifu.reactive.{Ack, Observer, Observable}
import scala.concurrent.Future
import monifu.reactive.internals._
private[reactive] object misc {
/**
* Implements [[Observable.complete]].
*/
def complete[T](source: Observable[T]): Observable[Nothing] =
Observable.create { subscriber =>
import subscriber.{scheduler => s}
source.onSubscribe(new Observer[T] {
def onNext(elem: T) = Continue
def onError(ex: Throwable): Unit =
subscriber.onError(ex)
def onComplete(): Unit =
subscriber.onComplete()
})
}
/**
* Implements [[Observable.error]].
*/
def error[T](source: Observable[T]): Observable[Throwable] =
Observable.create { subscriber =>
import subscriber.{scheduler => s}
source.onSubscribe(new Observer[T] {
def onNext(elem: T) =
Continue
def onComplete(): Unit =
subscriber.onComplete()
def onError(ex: Throwable): Unit = {
subscriber.onNext(ex)
.onContinueSignalComplete(subscriber)
}
})
}
/**
* Implementation for [[monifu.reactive.Observable.defaultIfEmpty]].
*/
def defaultIfEmpty[T](source: Observable[T], default: T): Observable[T] =
Observable.create { subscriber =>
import subscriber.{scheduler => s}
source.onSubscribe(new Observer[T] {
private[this] var isEmpty = true
def onNext(elem: T): Future[Ack] = {
if (isEmpty) isEmpty = false
subscriber.onNext(elem)
}
def onError(ex: Throwable): Unit = {
subscriber.onError(ex)
}
def onComplete(): Unit = {
if (isEmpty)
subscriber.onNext(default)
.onContinueSignalComplete(subscriber)
else
subscriber.onComplete()
}
})
}
/**
* Implements [[Observable.endWithError]].
*/
def endWithError[T](source: Observable[T])(error: Throwable): Observable[T] =
Observable.create { subscriber =>
import subscriber.{scheduler => s}
source.onSubscribe(new Observer[T] {
def onNext(elem: T) = subscriber.onNext(elem)
def onError(ex: Throwable) = subscriber.onError(ex)
def onComplete() = subscriber.onError(error)
})
}
/**
* Implements [[Observable.isEmpty]].
*/
def isEmpty[T](source: Observable[T]): Observable[Boolean] =
Observable.create[Boolean] { subscriber =>
import subscriber.{scheduler => s}
source.onSubscribe(new Observer[T] {
def onNext(elem: T): Future[Ack] = {
subscriber.onNext(false).onContinueSignalComplete(subscriber)
Cancel
}
def onError(ex: Throwable): Unit =
subscriber.onError(ex)
def onComplete(): Unit = {
// if we get here, it means that `onNext` never happened
subscriber.onNext(true).onContinueSignalComplete(subscriber)
}
})
}
}
|
virtualirfan/monifu
|
monifu/shared/src/main/scala/monifu/reactive/internals/operators/misc.scala
|
Scala
|
apache-2.0
| 3,751
|
package cl.asa.init
/**
* ASAใงไฝฟ็จใใ่พๆธใๆๅฎใใใฏใฉใน
* ไปใฏใใใซ็ดๆธใใ ใ๏ผใใๆนๆณใใใใฐๅคๆดใใใ
*/
class YamlFile {
//val frame: String = "yaml/new_frames2.yaml"
//val dicframe: String = "yaml/new_frames2.dic"
val frame: String = "yaml/new_argframes.yaml"
val dicframe: String = "yaml/new_argframes.dic"
val cchart: String = "yaml/ccharts.yaml"
val diccchart: String = "yaml/ccharts.dic"
val verb: String = "yaml/verbs.yaml"
val category: String = "yaml/new_categorys.yaml"
val idiom: String = "yaml/idioms.yaml"
val filter: String = "yaml/filters.yaml"
val dicfilter: String = "yaml/filters.dic"
val compoundPredicate: String = "yaml/compoundPredicates.yaml"
val noun: String = "NounTest.yaml"
def getFrame(): String = {
return frame
}
def getCchart(): String = {
return cchart
}
def getVerb(): String = {
return verb
}
def getCategory(): String = {
return category
}
def getIdiom(): String = {
return idiom
}
def getFilter(): String = {
return filter
}
def getCompoundPredicate(): String = {
return compoundPredicate
}
def getNoun():String ={
return noun
}
}
|
Takeuchi-Lab-LM/scala_asa3
|
ASA/src/main/scala/cl/asa/init/YamlFile.scala
|
Scala
|
mit
| 1,175
|
package scala.pickling.binary
import scala.pickling._
import scala.pickling.internal._
import scala.language.implicitConversions
import scala.reflect.runtime.universe.Mirror
import java.io.InputStream
import java.nio.ByteBuffer
abstract class BinaryPickle extends Pickle {
type PickleFormatType = BinaryPickleFormat
type ValueType = Array[Byte]
val value: Array[Byte]
def createReader(format: BinaryPickleFormat): PReader
}
case class BinaryPickleArray(data: Array[Byte]) extends BinaryPickle {
val value: Array[Byte] = data
def createReader(format: BinaryPickleFormat): PReader =
new BinaryPickleReader(new ByteArrayInput(data), format)
override def toString = s"""BinaryPickle(${value.mkString("[", ",", "]")})"""
}
case class BinaryInputPickle(input: BinaryInput) extends BinaryPickle {
val value: Array[Byte] = Array.ofDim[Byte](0)
def createReader(format: BinaryPickleFormat): PReader =
new BinaryPickleReader(input, format)
/* Do not override def toString to avoid traversing the input stream. */
}
object BinaryPickle {
def apply(a: Array[Byte]): BinaryPickle = new BinaryPickleArray(a)
def apply(a: BinaryInput): BinaryPickle = new BinaryInputPickle(a)
def apply(a: InputStream): BinaryPickle = new BinaryInputPickle(new StreamInput(a))
def apply(a: ByteBuffer): BinaryPickle = new BinaryInputPickle(new ByteBufferInput(a))
}
class BinaryPickleBuilder(format: BinaryPickleFormat, out: BinaryOutput) extends BinaryPBuilder with PickleTools {
import format._
private var output: BinaryOutput = out
@inline private[this] def mkOutput(knownSize: Int): Unit = {
if (output == null)
output = if (knownSize != -1) new FixedByteArrayOutput(knownSize)
else new ByteArrayOutput
else
output.ensureCapacity(knownSize)
}
@inline def beginEntry(picklee: Any): PBuilder = withHints { hints =>
mkOutput(hints.knownSize)
if (picklee == null) {
output.putByte( NULL_TAG)
} else if (hints.oid != -1) {
output.putByte( REF_TAG)
output.putInt( hints.oid)
} else {
if (!hints.isElidedType) {
// quickly decide whether we should use picklee.getClass instead
val ts =
if (hints.tag.key.contains("anonfun$")) picklee.getClass.getName
else hints.tag.key
output.putString( ts)
}
// NOTE: it looks like we don't have to write object ids at all
// traversals employed by pickling and unpickling are exactly the same
// hence when unpickling it's enough to just increment the nextUnpicklee counter
// and everything will work out automatically!
hints.tag.key match { // PERF: should store typestring once in hints.
case KEY_UNIT =>
output.putByte(UNIT_TAG)
case KEY_NULL =>
output.putByte(NULL_TAG)
case KEY_BYTE =>
output.putByte(picklee.asInstanceOf[Byte])
case KEY_SHORT =>
output.putShort(picklee.asInstanceOf[Short])
case KEY_CHAR =>
output.putChar(picklee.asInstanceOf[Char])
case KEY_INT =>
output.putInt(picklee.asInstanceOf[Int])
case KEY_LONG =>
output.putLong(picklee.asInstanceOf[Long])
case KEY_BOOLEAN =>
output.putBoolean(picklee.asInstanceOf[Boolean])
case KEY_FLOAT =>
output.putFloat(picklee.asInstanceOf[Float])
case KEY_DOUBLE =>
output.putDouble(picklee.asInstanceOf[Double])
case KEY_STRING =>
output.putString(picklee.asInstanceOf[String])
case KEY_ARRAY_BYTE =>
output.putByteArray(picklee.asInstanceOf[Array[Byte]])
case KEY_ARRAY_CHAR =>
output.putCharArray(picklee.asInstanceOf[Array[Char]])
case KEY_ARRAY_SHORT =>
output.putShortArray(picklee.asInstanceOf[Array[Short]])
case KEY_ARRAY_INT =>
output.putIntArray(picklee.asInstanceOf[Array[Int]])
case KEY_ARRAY_LONG =>
output.putLongArray(picklee.asInstanceOf[Array[Long]])
case KEY_ARRAY_BOOLEAN =>
output.putBooleanArray(picklee.asInstanceOf[Array[Boolean]])
case KEY_ARRAY_FLOAT =>
output.putFloatArray(picklee.asInstanceOf[Array[Float]])
case KEY_ARRAY_DOUBLE =>
output.putDoubleArray(picklee.asInstanceOf[Array[Double]])
case _ =>
if (hints.isElidedType) output.putByte(ELIDED_TAG)
}
}
this
}
@inline def putField(name: String, pickler: PBuilder => Unit): PBuilder = {
// can skip writing name if we pickle/unpickle in the same order
pickler(this)
this
}
@inline def endEntry(): Unit = { /* do nothing */ }
@inline def beginCollection(length: Int): PBuilder = {
output.putInt(length)
this
}
@inline def putElement(pickler: PBuilder => Unit): PBuilder = {
pickler(this)
this
}
@inline def endCollection(): Unit = {
}
@inline def result() = {
BinaryPickle(output.result)
}
}
abstract class AbstractBinaryReader() {
protected var _lastTypeStringRead: String = null
// TODO - ok to hack this?
def lastTagRead: String = _lastTypeStringRead
}
class BinaryPickleReader(in: BinaryInput, format: BinaryPickleFormat) extends AbstractBinaryReader() with PReader with PickleTools {
import format._
def beginEntry: String = {
val res: Any = withHints { hints =>
if (hints.isElidedType && nullablePrimitives.contains(hints.tag.key)) {
val lookahead = in.getByte()
lookahead match {
case UNIT_TAG => FastTypeTag.Unit
case NULL_TAG => FastTypeTag.Null
case REF_TAG => FastTypeTag.Ref
case _ => in.setLookahead(lookahead); hints.tag
}
} else if (hints.isElidedType && primitives.contains(hints.tag.key)) {
hints.tag
} else {
val lookahead = in.getByte()
lookahead match {
case NULL_TAG =>
FastTypeTag.Null
case ELIDED_TAG =>
hints.tag
case REF_TAG =>
FastTypeTag.Ref
case _ =>
// do not consume lookahead byte
val res = try {
in.getStringWithLookahead(lookahead)
} catch {
case PicklingException(msg, cause) =>
val primInfo = if (hints.tag == null) ""
else s"\\nnullable prim: ${nullablePrimitives.contains(hints.tag.key)}\\nprim: ${primitives.contains(hints.tag.key)}"
throw PicklingException(s"error decoding type string. debug info: $hints$primInfo\\ncause:$msg")
}
res
}
}
}
if (res.isInstanceOf[String]) {
_lastTypeStringRead = res.asInstanceOf[String]
_lastTypeStringRead
} else {
_lastTypeStringRead = res.asInstanceOf[FastTypeTag[_]].key
_lastTypeStringRead
}
}
//def beginEntry(): FastTypeTag[_] = {
// beginEntryNoTag()
// lastTagRead
//}
def atPrimitive: Boolean = primitives.contains(lastTagRead)
def readPrimitive(): Any = {
val res = lastTagRead match {
case KEY_NULL => null
case KEY_REF => lookupUnpicklee(in.getInt)
case KEY_BYTE => in.getByte
case KEY_SHORT => in.getShort
case KEY_CHAR => in.getChar
case KEY_INT => in.getInt
case KEY_LONG => in.getLong
case KEY_BOOLEAN => in.getBoolean
case KEY_FLOAT => in.getFloat
case KEY_DOUBLE => in.getDouble
case KEY_STRING => in.getString
case KEY_ARRAY_BYTE => in.getByteArray
case KEY_ARRAY_SHORT => in.getShortArray
case KEY_ARRAY_CHAR => in.getCharArray
case KEY_ARRAY_INT => in.getIntArray
case KEY_ARRAY_LONG => in.getLongArray
case KEY_ARRAY_BOOLEAN => in.getBooleanArray
case KEY_ARRAY_FLOAT => in.getFloatArray
case KEY_ARRAY_DOUBLE => in.getDoubleArray
}
res
}
def atObject: Boolean = !atPrimitive
def readField(name: String): BinaryPickleReader =
this
def endEntry(): Unit = { /* do nothing */ }
def beginCollection(): PReader = this
def readLength(): Int = in.getInt
def readElement(): PReader = this
def endCollection(): Unit = { /* do nothing */ }
}
|
phaller/pickling
|
core/src/main/scala/scala/pickling/binary/BinaryPickle.scala
|
Scala
|
bsd-3-clause
| 8,250
|
package com.lambtors.poker_api.module.poker.application.table.find
import cats.implicits._
import com.lambtors.poker_api.module.poker.domain.PokerGameRepository
import com.lambtors.poker_api.module.poker.domain.error.PokerGameNotFound
import com.lambtors.poker_api.module.poker.domain.model.{GameId, TableCardsResponse}
import com.lambtors.poker_api.module.shared.domain.types.ThrowableTypeClasses.MonadErrorThrowable
final class TableCardsFinder[P[_]: MonadErrorThrowable](repository: PokerGameRepository[P]) {
def find(gameId: GameId): P[TableCardsResponse] =
repository
.search(gameId)
.fold[P[TableCardsResponse]](MonadErrorThrowable[P].raiseError(PokerGameNotFound(gameId)))(game =>
TableCardsResponse(game.tableCards).pure[P])
.flatten
}
|
lambtors/poker-api
|
src/main/scala/com/lambtors/poker_api/module/poker/application/table/find/TableCardsFinder.scala
|
Scala
|
mit
| 778
|
package im.actor.api.rpc
import im.actor.api.rpc.peers.{ ApiGroupOutPeer, ApiOutPeer, ApiPeer, ApiPeerType }
import im.actor.server.model.{ Peer, PeerType }
trait PeersImplicits {
implicit class ExtPeer(peer: ApiPeer) {
lazy val asModel: Peer =
Peer(PeerType.fromValue(peer.`type`.id), peer.id)
}
implicit class ExtOutPeer(outPeer: ApiOutPeer) {
lazy val asPeer: ApiPeer =
ApiPeer(outPeer.`type`, outPeer.id)
lazy val asModel: Peer =
Peer(PeerType.fromValue(outPeer.`type`.id), outPeer.id)
}
implicit class ExtGroupOutPeer(groupOutPeer: ApiGroupOutPeer) {
lazy val asOutPeer: ApiOutPeer =
ApiOutPeer(ApiPeerType.Group, groupOutPeer.groupId, groupOutPeer.accessHash)
lazy val asPeer: ApiPeer =
ApiPeer(ApiPeerType.Group, groupOutPeer.groupId)
lazy val asModel: Peer =
Peer(PeerType.Group, groupOutPeer.groupId)
}
implicit class ExtPeerModel(model: Peer) {
lazy val asStruct: ApiPeer =
ApiPeer(ApiPeerType(model.typ.value), model.id)
}
implicit class ExtPeerCompanion(companion: com.trueaccord.scalapb.GeneratedMessageCompanion[Peer]) {
def privat(userId: Int) = Peer(PeerType.Private, userId)
def group(groupId: Int) = Peer(PeerType.Group, groupId)
}
}
|
EaglesoftZJ/actor-platform
|
actor-server/actor-core/src/main/scala/im/actor/api/rpc/PeersImplicits.scala
|
Scala
|
agpl-3.0
| 1,261
|
// see http://www.ittc.ku.edu/~andygill/papers/reifyGraph.pdf
// and https://hackage.haskell.org/package/data-reify
package leibniz
import cats.Applicative
import cats.syntax.all._
|
alexknvl/leibniz
|
src/test/scala/MuRef.scala
|
Scala
|
mit
| 184
|
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.dev
import scala.collection.immutable
object PortAssigner {
private[lagom] case class ProjectName(name: String) {
def withTls = ProjectName(name + "-tls")
}
private[lagom] object ProjectName {
implicit object OrderingProjectName extends Ordering[ProjectName] {
def compare(x: ProjectName, y: ProjectName): Int = x.name.compare(y.name)
}
}
private[lagom] case class PortRange(min: Int, max: Int) {
require(min > 0, "Bottom port range must be greater than 0")
require(max < Integer.MAX_VALUE, "Upper port range must be smaller than " + Integer.MAX_VALUE)
require(min <= max, "Bottom port range must be smaller than the upper port range")
val delta: Int = max - min + 1
def includes(value: Int): Boolean = value >= min && value <= max
}
private[lagom] object Port {
final val Unassigned = Port(-1)
}
private[lagom] case class Port(value: Int) extends AnyVal {
def next: Port = Port(value + 1)
}
def computeProjectsPort(
range: PortRange,
projectNames: Seq[ProjectName],
enableSsl: Boolean
): Map[ProjectName, Port] = {
val lagomProjects = projectNames.to[immutable.SortedSet]
val projects =
// duplicate the project list by adding the tls variant
if (enableSsl) lagomProjects.flatMap { plainName =>
Seq(plainName, plainName.withTls)
} else lagomProjects
val doubleMessage =
if (enableSsl) "The number of ports available must be at least twice the number of projects."
else ""
require(
projects.size <= range.delta,
s"""A larger port range is needed, as you have ${lagomProjects.size} Lagom projects and only ${range.delta}
|ports available. $doubleMessage
|You should increase the range passed for the lagomPortRange build setting.
""".stripMargin
)
@annotation.tailrec
def findFirstAvailablePort(port: Port, unavailable: Set[Port]): Port = {
// wrap around if the port's number equal the portRange max limit
if (!range.includes(port.value)) findFirstAvailablePort(Port(range.min), unavailable)
else if (unavailable(port)) findFirstAvailablePort(port.next, unavailable)
else port
}
@annotation.tailrec
def assignProjectPort(
projectNames: Seq[ProjectName],
assignedPort: Set[Port],
unassigned: Vector[ProjectName],
result: Map[ProjectName, Port]
): Map[ProjectName, Port] =
projectNames match {
case Nil if unassigned.nonEmpty =>
// if we are here there are projects with colliding hash that still need to get their port assigned. As expected, this step is carried out after assigning
// a port to all non-colliding projects.
val proj = unassigned.head
val projectedPort = projectedPortFor(proj)
val port = findFirstAvailablePort(projectedPort, assignedPort)
assignProjectPort(projectNames, assignedPort + port, unassigned.tail, result + (proj -> port))
case Nil => result
case proj +: rest =>
val projectedPort = projectedPortFor(proj)
if (assignedPort(projectedPort)) assignProjectPort(rest, assignedPort, unassigned :+ proj, result)
else assignProjectPort(rest, assignedPort + projectedPort, unassigned, result + (proj -> projectedPort))
}
def projectedPortFor(name: ProjectName): Port = {
val hash = Math.abs(name.hashCode())
val portDelta = hash % range.delta
Port(range.min + portDelta)
}
assignProjectPort(projects.toSeq, Set.empty[Port], Vector.empty[ProjectName], Map.empty[ProjectName, Port])
}
}
|
rcavalcanti/lagom
|
dev/build-tool-support/src/main/scala/com/lightbend/lagom/dev/PortAssigner.scala
|
Scala
|
apache-2.0
| 3,788
|
package org.scaladebugger.api.pipelines
import org.scaladebugger.test.helpers.ParallelMockFunSpec
class FilterOperationSpec extends ParallelMockFunSpec {
describe("FilterOperation") {
describe("#process") {
it("should filter data to include only predicates resolving to true") {
val expected = Seq(2, 4)
val data = Seq(1, 2, 3, 4, 5)
val operation = new FilterOperation[Int](_ % 2 == 0)
val actual = operation.process(data)
actual should be (expected)
}
}
}
}
|
ensime/scala-debugger
|
scala-debugger-api/src/test/scala/org/scaladebugger/api/pipelines/FilterOperationSpec.scala
|
Scala
|
apache-2.0
| 528
|
/*
* Copyright 2017 Datamountaineer.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datamountaineer.streamreactor.connect.elastic6
import com.datamountaineer.streamreactor.connect.elastic6.config.{ElasticConfig, ElasticConfigConstants, ElasticSettings}
import com.sksamuel.elastic4s.http.ElasticNodeEndpoint
import scala.util.{Failure, Success, Try}
object ElasticWriter {
/**
* Construct a JSONWriter.
*
* @param config An elasticSinkConfig to extract settings from.
* @return An ElasticJsonWriter to write records from Kafka to ElasticSearch.
**/
def apply(config: ElasticConfig): ElasticJsonWriter = {
val hostNames = config.getString(ElasticConfigConstants.HOSTS).split(",")
val protocol = config.getString(ElasticConfigConstants.PROTOCOL)
val port = config.getInt(ElasticConfigConstants.ES_PORT)
val prefix = Try(config.getString(ElasticConfigConstants.ES_PREFIX)) match {
case Success("") => None
case Success(configString) => Some(configString)
case Failure(_) => None
}
val settings = ElasticSettings(config)
new ElasticJsonWriter(
KElasticClient.createHttpClient(settings, endpoints(hostNames, protocol, port, prefix)),
settings
)
}
private def endpoints(hostNames: Array[String], protocol: String, port: Integer, prefix: Option[String]) = {
hostNames
.map(hostname => ElasticNodeEndpoint(protocol, hostname, port, prefix))
}
}
|
datamountaineer/stream-reactor
|
kafka-connect-elastic6/src/main/scala/com/datamountaineer/streamreactor/connect/elastic6/ElasticWriter.scala
|
Scala
|
apache-2.0
| 1,973
|
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs
// License: http://www.apache.org/licenses/LICENSE-2.0
package org.ensime.api
import java.io.File
trait EnsimeTestData {
// duplicating utils to minimise dependencies
private def canon(s: String): RawFile = {
val file = new File(s)
val canonised = try file.getCanonicalFile
catch {
case t: Throwable => file.getAbsoluteFile
}
RawFile(canonised.toPath)
}
val typeInfo = BasicTypeInfo("type1", DeclaredAs.Method, "FOO.type1", Nil, Nil, None, Nil)
val interfaceInfo = new InterfaceInfo(typeInfo, Some("DEF"))
val typeInspectInfo = new TypeInspectInfo(typeInfo, List(interfaceInfo))
val paramSectionInfo = new ParamSectionInfo(List(("ABC", typeInfo)), false)
val symFile = canon("/abc")
val symbolDesignations = SymbolDesignations(
symFile,
List(
SymbolDesignation(7, 9, ObjectSymbol),
SymbolDesignation(11, 22, TraitSymbol)
)
)
val symbolInfo = new SymbolInfo("name", "localName", None, typeInfo)
val implicitInfos = List(
ImplicitConversionInfo(5, 6, symbolInfo),
ImplicitParamInfo(7, 8, symbolInfo, List(symbolInfo, symbolInfo), true)
)
val batchSourceFile = "/abc"
val rangePos1 = new ERangePosition(batchSourceFile, 75, 70, 90)
val rangePos2 = new ERangePosition(batchSourceFile, 85, 80, 100)
val packageInfo = new PackageInfo("name", "fullName", Nil)
val refactorFailure = RefactorFailure(7, "message")
val file1 = canon("/abc/def")
val file2 = canon("/test/test/")
val file3 = canon("/foo/abc")
val file4 = canon("/foo/def")
val file5 = canon("/foo/hij")
val refactorDiffEffect = new RefactorDiffEffect(9, RefactorType.AddImport, file2.file.toFile)
val sourcePos1 = new LineSourcePosition(file1, 57)
val sourcePos2 = new LineSourcePosition(file1, 59)
val sourcePos3 = new EmptySourcePosition()
val sourcePos4 = new OffsetSourcePosition(file1, 456)
val breakPoint1 = new Breakpoint(RawFile(file1.file), sourcePos1.line)
val breakPoint2 = new Breakpoint(RawFile(file1.file), sourcePos2.line)
val breakpointList = BreakpointList(List(breakPoint1), List(breakPoint2))
val debugStackLocal1 = DebugStackLocal(3, "name1", "summary1", "type1")
val debugStackLocal2 = DebugStackLocal(4, "name2", "summary2", "type2")
val debugStackFrame = DebugStackFrame(7, List(debugStackLocal1, debugStackLocal2), 4, "class1", "method1", sourcePos1, DebugObjectId(7))
val debugBacktrace = DebugBacktrace(List(debugStackFrame), DebugThreadId(17), "thread1")
val analyzerFile = canon("Analyzer.scala")
val fooFile = canon("Foo.scala")
val abd = canon("/abd")
val methodSearchRes = MethodSearchResult("abc", "a", DeclaredAs.Method, Some(LineSourcePosition(abd, 10)), "ownerStr")
val typeSearchRes = TypeSearchResult("abc", "a", DeclaredAs.Trait, Some(LineSourcePosition(abd, 10)))
val importSuggestions = new ImportSuggestions(List(List(methodSearchRes, typeSearchRes)))
val symbolSearchResults = new SymbolSearchResults(List(methodSearchRes, typeSearchRes))
val completionInfoCList = CompletionInfoList("fooBar", List(completionInfo))
val fileRange = FileRange("/abc", 7, 9)
val debugLocObjectRef: DebugLocation = DebugObjectReference(57L)
val debugNullValue = DebugNullValue("typeNameStr")
val debugArrayInstValue = DebugArrayInstance(3, "typeName", "elementType", DebugObjectId(5L))
val debugPrimitiveValue = DebugPrimitiveValue("summaryStr", "typeNameStr")
val debugClassField = DebugClassField(19, "nameStr", "typeNameStr", "summaryStr")
val debugStringValue = DebugStringInstance("summaryStr", List(debugClassField), "typeNameStr", DebugObjectId(6L))
val note1 = new Note("file1", "note1", NoteError, 23, 33, 19, 8)
val note2 = new Note("file1", "note2", NoteWarn, 23, 33, 19, 8)
val noteList = NewScalaNotesEvent(isFull = true, List(note1, note2))
val entityInfo: TypeInfo = new ArrowTypeInfo("Arrow1", "example.Arrow1", typeInfo, List(paramSectionInfo), Nil)
val typeParamA = BasicTypeInfo("A", DeclaredAs.Nil, "example.Arrow1.A", Nil, Nil, None, Nil)
val typeParamB = BasicTypeInfo("B", DeclaredAs.Nil, "example.Arrow1.B", Nil, Nil, None, Nil)
val entityInfoTypeParams: TypeInfo = new ArrowTypeInfo("Arrow1", "example.Arrow1", typeInfo, List(paramSectionInfo), List(typeParamA, typeParamB))
val completionInfo = CompletionInfo(Some(typeInfo), "name", 90, Some("BAZ"))
val completionInfo2 = CompletionInfo(None, "nam", 91, None, true)
val completionInfoList = List(completionInfo, completionInfo2)
val sourceFileInfo = SourceFileInfo(file1, Some("{/* code here */}"), Some(file2.file.toFile))
val sourceFileInfo2 = SourceFileInfo(file1)
val dtid = DebugThreadId(13)
val debugLocationArray = DebugArrayElement(DebugObjectId(13), 14)
val structureView = StructureView(List(
StructureViewMember(
keyword = "class",
name = "StructureView",
position = sourcePos1,
members = Nil
),
StructureViewMember(
keyword = "object",
name = "StructureView",
position = sourcePos2,
members = List(
StructureViewMember(
keyword = "type",
name = "BasicType",
position = sourcePos4,
members = Nil
)
)
)
))
val astInfo = AstInfo(
"List(Apply(Select(Literal(Constant(1)), TermName(\\"$plus\\")), List(Literal(Constant(1)))))"
)
}
|
pascr/ensime-server
|
api/src/test/scala/org/ensime/api/EnsimeTestData.scala
|
Scala
|
gpl-3.0
| 5,432
|
package fpinscala.exercises
import fpinscala.exercises.Exercise4.uncurry
/**
* See [[fpinscala.gettingstarted.MyModule]]
*/
object Exercise4 {
// Exercise 4: Implement `uncurry`
def uncurry[A, B, C](f: A => B => C): (A, B) => C = {
(a, b) => f(a)(b)
}
/*
NB: There is a method on the `Function` object in the standard library,
`Function.uncurried` that you can use for uncurrying.
Note that we can go back and forth between the two forms. We can curry
and uncurry and the two forms are in some sense "the same". In FP jargon,
we say that they are _isomorphic_ ("iso" = same; "morphe" = shape, form),
a term we inherit from category theory.
*/
}
object Exercise4Test extends App {
val add: String => String => String = a => b => a + b
assert(add("A")("B") == "AB")
val uncurried: (String, String) => String = uncurry(add)
assert(uncurried("A", "B") == "AB")
}
|
tobyweston/fpinscala
|
exercises/src/main/scala/fpinscala/exercises/Exercise4.scala
|
Scala
|
mit
| 907
|
package scala.macros.internal
package trees
import scala.macros.internal.prettyprinters.EOL
object Errors {
final val QuasiquotesRequireCompilerSupport =
"new-style quasiquotes require " +
"""addCompilerPlugin("org.scalamacros" %% "scalac-plugin" % "..." cross CrossVersion.full);""" +
" consult http://scalamacros.org for more information."
def QuasiquoteRankMismatch(found: Int, required: Int, hint: String = ""): String = {
val s_found = "." * (found + 1) + "$"
val s_required = 0.to(required + 1).filter(_ != 1).map(i => "." * i + "$").mkString(" or ")
var message = s"rank mismatch when unquoting;$EOL found : $s_found$EOL required: $s_required"
if (hint.nonEmpty) message = message + EOL + hint
message
}
def QuasiquoteAdjacentEllipsesInPattern(rank: Int): String = {
val hint = {
"Note that you can extract a list into an unquote when pattern matching," + EOL +
"it just cannot follow another list either directly or indirectly."
}
QuasiquoteRankMismatch(rank, rank - 1, hint)
}
def QuasiquoteTripleDotImplementationRestriction: String = {
"implementation restriction: can't mix ...$ with anything else in parameter lists." + EOL +
"See https://github.com/scalameta/scalameta/issues/406 for details."
}
}
|
xeno-by/scalamacros
|
core/src/main/scala/scala/macros/internal/trees/Errors.scala
|
Scala
|
bsd-3-clause
| 1,305
|
/*
* Copyright (c) 2012-2016 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.enrich
package common
package utils
import scala.util.control.NonFatal
// Scalaz
import scalaz._
import Scalaz._
// Scalaj
import scalaj.http._
object HttpClient {
/**
* Blocking method to get body of HTTP response
*
* @param request assembled request object
* @return validated body of HTTP request
*/
def getBody(request: HttpRequest): Validation[Throwable, String] = {
try {
val res = request.asString
if (res.isSuccess) res.body.success
else new Exception(s"Request failed with status ${res.code} and body ${res.body}").failure
} catch {
case NonFatal(e) => e.failure
}
}
/**
* Build HTTP request object
*
* @param uri full URI to request
* @param authUser optional username for basic auth
* @param authPassword optional password for basic auth
* @param method HTTP method
* @return HTTP request
*/
def buildRequest(
uri: String,
authUser: Option[String],
authPassword: Option[String],
method: String = "GET"
): HttpRequest = {
val req = Http(uri).method(method)
if (authUser.isDefined || authPassword.isDefined) {
req.auth(authUser.getOrElse(""), authPassword.getOrElse(""))
} else {
req
}
}
}
|
sspinc/snowplow
|
3-enrich/scala-common-enrich/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/HttpClient.scala
|
Scala
|
apache-2.0
| 1,981
|
package org.jetbrains.plugins.scala.codeInspection.monads
import com.intellij.codeInspection.ProblemsHolder
import com.intellij.psi.PsiElement
import org.jetbrains.plugins.scala.codeInspection.AbstractInspection
import org.jetbrains.plugins.scala.codeInspection.monads.NestedStatefulMonadsInspection._
import org.jetbrains.plugins.scala.codeInspection.monads.StatefulMonads._
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScMethodCall
import org.jetbrains.plugins.scala.lang.psi.types.api.ParameterizedType
/**
* @author Sergey Tolmachev (tolsi.ru@gmail.com)
* @since 29.09.15
*/
object NestedStatefulMonadsInspection {
private[monads] final val Annotation = "Nested stateful monads"
}
final class NestedStatefulMonadsInspection extends AbstractInspection(Annotation) {
override def actionFor(holder: ProblemsHolder): PartialFunction[PsiElement, Unit] = {
case call: ScMethodCall =>
val project = call.getProject
call.getType().getOrAny match {
case outer@ParameterizedType(_, typeArgs)
if isStatefulMonadType(outer, project) && typeArgs.exists(isStatefulMonadType(_, project)) =>
holder.registerProblem(call, Annotation)
case _ =>
}
}
}
|
ilinum/intellij-scala
|
src/org/jetbrains/plugins/scala/codeInspection/monads/NestedStatefulMonadsInspection.scala
|
Scala
|
apache-2.0
| 1,214
|
package kiambogo.scrava.models
/**
* Created by christopher on 14-10-23.
*/
case class Photo(
id: Int,
activity_id: Int,
resource_state: Int,
ref: String,
uid: String,
caption: String,
`type`: String,
uploaded_at: String,
created_at: String,
location: Option[List[Float]])
|
kiambogo/scrava
|
src/main/scala/kiambogo/scrava/models/Photo.scala
|
Scala
|
mit
| 275
|
package mesosphere.marathon.io.storage
import java.io._
import mesosphere.marathon.io.IO
/**
* The local file system implementation.
*
* @param file the underlying file
* @param path the relative path, this item is identified with.
*/
case class FileStorageItem(file: File, basePath: File, path: String, baseUrl: String) extends StorageItem {
def store(fn: OutputStream => Unit): FileStorageItem = {
IO.createDirectory(file.getParentFile)
IO.using(new FileOutputStream(file)) { fn }
this
}
def moveTo(path: String): FileStorageItem = {
val to = new File(basePath, path)
IO.moveFile(file, to)
cleanUpDir(file.getParentFile)
FileStorageItem(to, basePath, path, url)
}
def url: String = s"$baseUrl/$path"
def inputStream(): InputStream = new FileInputStream(file)
def lastModified: Long = file.lastModified()
def length: Long = file.length()
def exists: Boolean = file.exists()
def delete() {
file.delete()
cleanUpDir(file.getParentFile)
}
private def cleanUpDir(dir: File) {
if (!dir.isFile && dir != basePath && dir.list().isEmpty) {
dir.delete()
cleanUpDir(dir.getParentFile)
}
}
}
/**
* The local file system storage implementation.
*
* @param basePath the base path to the managed asset directory
*/
class FileStorageProvider(val url: String, val basePath: File) extends StorageProvider {
require(basePath.exists(), "Base path does not exist: %s. Configuration error?".format(basePath.getAbsolutePath))
def item(path: String): FileStorageItem = {
val file: File = new File(basePath, path)
//make sure, no file from outside base path is created
if (!file.getCanonicalPath.startsWith(basePath.getCanonicalPath)) throw new IOException("Access Denied")
new FileStorageItem(file, basePath, path, url)
}
}
|
EasonYi/marathon
|
src/main/scala/mesosphere/marathon/io/storage/FileStorageProvider.scala
|
Scala
|
apache-2.0
| 1,834
|
package de.htwg.zeta.persistence.fixtures
import java.util.UUID
import de.htwg.zeta.common.models.entity.EventDrivenTask
object EventDrivenTaskFixtures {
val entity1 = EventDrivenTask(
id = UUID.randomUUID,
name = "eventDrivenTask1",
generatorId = UUID.randomUUID,
filterId = UUID.randomUUID,
event = "event1"
)
val entity2 = EventDrivenTask(
id = UUID.randomUUID,
name = "eventDrivenTask2",
generatorId = UUID.randomUUID,
filterId = UUID.randomUUID,
event = "event2"
)
val entity2Updated: EventDrivenTask = entity2.copy(filterId = UUID.randomUUID)
val entity3 = EventDrivenTask(
id = UUID.randomUUID,
name = "eventDrivenTask3",
generatorId = UUID.randomUUID,
filterId = UUID.randomUUID,
event = "event3"
)
}
|
Zeta-Project/zeta
|
api/persistence/src/test/scala/de/htwg/zeta/persistence/fixtures/EventDrivenTaskFixtures.scala
|
Scala
|
bsd-2-clause
| 792
|
package blended.testsupport
import java.util.concurrent.atomic.AtomicInteger
import akka.actor.ActorSystem
import akka.testkit.TestKit
import scala.concurrent.duration._
import scala.concurrent.Await
object TestActorSys {
val uniqueId = new AtomicInteger(0)
def apply(f : TestKit => Unit) = new TestActorSys("TestActorSys%05d".format(uniqueId.incrementAndGet()), f)
}
class TestActorSys(name : String, f : TestKit => Unit)
extends TestKit(ActorSystem(name)) {
try {
system.log.info("Start TestKit[{}]", system.name)
f(this)
}
finally {
system.log.info("Shutting down TestKit[{}]", system.name)
Await.result(system.terminate(), 10.seconds)
}
}
|
lefou/blended
|
blended.testsupport/src/main/scala/blended/testsupport/TestActorSys.scala
|
Scala
|
apache-2.0
| 680
|
/*
* BaseMacros.scala
*
* Copyright (c) 2013 Lonnie Pryor III
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fulcrum.code
import language.implicitConversions
import org.scalatest.matchers.{
BeMatcher,
MatchResult
}
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.matchers.Matcher
import java.io.{
ByteArrayInputStream,
ByteArrayOutputStream,
ObjectInputStream,
ObjectOutputStream
}
import sun.misc.{
BASE64Encoder,
BASE64Decoder
}
/**
* Base trait for testing macro bundles.
*/
trait BaseMacros extends Code with ShouldMatchers { self: Macro =>
import c.universe._
/** Support for extra operators on types. */
implicit def typeToTypeOptionOps(tpe: Type): TypeOptionOps = new TypeOptionOps(Option(tpe))
/** Support for extra operators on type options. */
implicit def typeOptionToTypeOptionOps(tpe: Option[Type]): TypeOptionOps = new TypeOptionOps(tpe)
/** Alias for using the "=:=" type matcher. */
val =:= = TypeMatching
/** Alias for using the "equalsStructure" tree matcher. */
val equalStructure = TreeMatching
/** Returns an expression that throws the exception thrown by `tests` or a literal unit if no exception is thrown. */
def test(tests: => Unit): Expr[Unit] = {
val result = c.literalUnit
try {
tests
result
} catch {
case e: Exception =>
val bytes = new ByteArrayOutputStream
val out = new ObjectOutputStream(bytes)
out.writeObject(e)
out.flush()
val encoded = c.literal(new BASE64Encoder().encode(bytes.toByteArray))
reify(throw new ObjectInputStream(new ByteArrayInputStream(new BASE64Decoder().decodeBuffer(encoded.splice)))
.readObject().asInstanceOf[Exception])
}
}
/** Adds matcher factory methods to type options. */
final class TypeOptionOps(val tpe: Option[Type]) {
/** Creates a matcher for the underlying type and specified name. */
def ::(string: String): BeMatcher[$id[Any]] = new BeMatcher[$id[Any]] {
override def apply(e: $id[Any]) = {
val stringsMatch = StringMatching.check(e.name.decoded, string)
val typesMatch = TypeMatching.check(e.tpe, tpe)
MatchResult(
stringsMatch && typesMatch,
if (stringsMatch)
TypeMatching.doesNotMatchMsg(e.tpe, tpe)
else if (typesMatch)
StringMatching.doesNotMatchMsg(e.name.decoded, string)
else
StringMatching.doesNotMatchMsg(e.name.decoded, string) + ", " + TypeMatching.doesNotMatchMsg(e.tpe, tpe),
if (!stringsMatch)
TypeMatching.matchesMsg(e.tpe, tpe)
else if (!typesMatch)
StringMatching.matchesMsg(e.name.decoded, string)
else
StringMatching.matchesMsg(e.name.decoded, string) + ", " + TypeMatching.matchesMsg(e.tpe, tpe)
)
}
}
/** Creates a matcher for the underlying type and specified name. */
def ::(name: Name): BeMatcher[$id[Any]] = new BeMatcher[$id[Any]] {
override def apply(e: $id[Any]) = {
val namesMatch = NameMatching.check(e.name, name)
val typesMatch = TypeMatching.check(e.tpe, tpe)
MatchResult(
namesMatch && typesMatch,
if (namesMatch)
TypeMatching.doesNotMatchMsg(e.tpe, tpe)
else if (typesMatch)
NameMatching.doesNotMatchMsg(e.name, name)
else
NameMatching.doesNotMatchMsg(e.name, name) + ", " + TypeMatching.doesNotMatchMsg(e.tpe, tpe),
if (!namesMatch)
TypeMatching.matchesMsg(e.tpe, tpe)
else if (!typesMatch)
NameMatching.matchesMsg(e.name, name)
else
NameMatching.matchesMsg(e.name, name) + ", " + TypeMatching.matchesMsg(e.tpe, tpe)
)
}
}
/** Creates a matcher for the underlying type and specified tree. */
def ::(tree: Tree): BeMatcher[$expr[Any]] = new BeMatcher[$expr[Any]] {
override def apply(e: $expr[Any]) = {
val treesMatch = TreeMatching.check(e.tree, tree)
val typesMatch = TypeMatching.check(e.tpe, tpe)
MatchResult(
treesMatch && typesMatch,
if (treesMatch)
TypeMatching.doesNotMatchMsg(e.tpe, tpe)
else if (typesMatch)
TreeMatching.doesNotMatchMsg(e.tree, tree)
else
TreeMatching.doesNotMatchMsg(e.tree, tree) + ", " + TypeMatching.doesNotMatchMsg(e.tpe, tpe),
if (!treesMatch)
TypeMatching.matchesMsg(e.tpe, tpe)
else if (!typesMatch)
TreeMatching.matchesMsg(e.tree, tree)
else
TreeMatching.matchesMsg(e.tree, tree) + ", " + TypeMatching.matchesMsg(e.tpe, tpe)
)
}
}
/** Creates a matcher for the underlying type and specified expression's tree. */
def ::(expr: Expr[Any]): BeMatcher[$expr[Any]] = expr.tree :: this
}
/**
* Base trait for matcher factories.
*/
trait Matching[T] {
/** The string to prefix messages with. */
def msgPrefix: String = ""
/** Returns a matcher that implements this instance's matching logic. */
def apply(expected: T): Matcher[T] = new Matcher[T] {
def apply(actual: T): MatchResult = MatchResult(
check(actual, expected),
doesNotMatchMsg(actual, expected),
matchesMsg(actual, expected)
)
}
/** Returns true if the specified items match. */
def check(left: T, right: T): Boolean
/** Returns the message that describes a match. */
def matchesMsg(left: T, right: T): String =
Matching.genericMatchesMsg(msgPrefix, left, right)
/** Returns the message that describes a failure to match. */
def doesNotMatchMsg(left: T, right: T): String =
Matching.genericDoesNotMatchMsg(msgPrefix, left, right)
}
object Matching {
/** Returns the message that describes a match. */
def genericMatchesMsg(msgPrefix: String, left: Any, right: Any): String =
"%s \\"%s\\" matches \\"%s\\"" format (msgPrefix, left, right)
/** Returns the message that describes a failure to match. */
def genericDoesNotMatchMsg(msgPrefix: String, left: Any, right: Any): String =
"%s \\"%s\\" does not match \\"%s\\"" format (msgPrefix, left, right)
}
/**
* Base trait for matcher factories.
*/
trait OptionMatching[T] extends Matching[T] {
/** Returns a matcher that implements this instance's matching logic with optional values. */
def apply(expected: Option[T]): Matcher[Option[T]] = new Matcher[Option[T]] {
def apply(actual: Option[T]): MatchResult = MatchResult(
check(actual, expected),
doesNotMatchMsg(actual, expected),
matchesMsg(actual, expected)
)
}
/** Returns true if the specified optional items match. */
def check(left: Option[T], right: Option[T]): Boolean =
left.isEmpty == right.isEmpty && left.forall(check(_, right.get))
/** Returns the message that describes a match of optional values. */
def matchesMsg(left: Option[T], right: Option[T]): String =
Matching.genericMatchesMsg(msgPrefix, left, right)
/** Returns the message that describes a failure to match optional values. */
def doesNotMatchMsg(left: Option[T], right: Option[T]): String =
Matching.genericDoesNotMatchMsg(msgPrefix, left, right)
}
/**
* Utility for matching equivalent base names.
*/
object StringMatching extends Matching[String] {
/** @inheritdoc */
override def msgPrefix = "Base name"
/** @inheritdoc */
override def check(left: String, right: String) = left != right && left.contains(right)
}
/**
* Utility for matching equivalent names.
*/
object NameMatching extends Matching[Name] {
/** @inheritdoc */
override def msgPrefix = "Name"
/** @inheritdoc */
override def check(left: Name, right: Name) = left == right
}
/**
* Utility for matching equivalent types and type options.
*/
object TypeMatching extends OptionMatching[Type] {
/** @inheritdoc */
override def msgPrefix = "Type"
/** @inheritdoc */
override def check(left: Type, right: Type) = left =:= right
}
/**
* Utility for matching structurally-equivalent trees.
*/
object TreeMatching extends Matching[Tree] {
/** @inheritdoc */
override def msgPrefix = "Tree"
/** @inheritdoc */
override def check(left: Tree, right: Tree) = left equalsStructure right
}
}
|
lpryor/fulcrum-spike
|
code/src/test/scala/fulcrum/code/BaseMacros.scala
|
Scala
|
apache-2.0
| 8,997
|
/* ___ _ ___ _ _ *\\
** / __| |/ (_) | | The SKilL Generator **
** \\__ \\ ' <| | | |__ (c) 2013 University of Stuttgart **
** |___/_|\\_\\_|_|____| see LICENSE **
\\* */
package de.ust.skill.generator.scala.internal
import java.io.PrintWriter
import de.ust.skill.generator.scala.GeneralOutputMaker
trait FieldDeclarationMaker extends GeneralOutputMaker {
abstract override def make {
super.make
val out = open("internal/FieldDeclaration.scala")
//package
out.write(s"""package ${packagePrefix}internal
import scala.collection.mutable.ListBuffer
/**
* Chunks contain information on where some field data can be found.
*
* @param begin position of the first byte of the first instance's data
* @param end position of the last byte, i.e. the first byte that is not read
* @param bpsi the index of the first instance
* @param count the number of instances in this chunk
*
* @note indices of recipient of the field data is not necessarily continuous; make use of staticInstances!
* @note begin and end are vars, because they will contain relative offsets while parsing a type block
*
* @author Timm Felden
*/
sealed abstract class ChunkInfo(var begin : Long, var end : Long, val count : Int);
final class SimpleChunkInfo(begin : Long, end : Long, val bpsi : Int, count : Int) extends ChunkInfo(begin, end, count);
final class BulkChunkInfo(begin : Long, end : Long, count : Int) extends ChunkInfo(begin, end, count);
/**
* Blocks contain information about the type of an index range.
*
* @param bpsi the index of the first instance
* @param count the number of instances in this chunk
* @author Timm Felden
*/
case class BlockInfo(val bpsi : Int, val count : Int);
/**
* A field decalariation, as it occurs during parsing of a type blocks header.
*
* @author Timm Felden
* @param t the actual type of the field; can be an intermediate type, while parsing a block
* @param name the name of the field
* @param index the index of this field, starting from 0; required for append operations
*/
class FieldDeclaration(var t : FieldType, val name : String, val index : Int) {
/**
* Data chunk information, as it is required for later parsing.
*/
val dataChunks = ListBuffer[ChunkInfo]();
override def toString = t.toString+" "+name
override def equals(obj : Any) = obj match {
case f : FieldDeclaration โ name == f.name && t == f.t
case _ โ false
}
override def hashCode = name.hashCode ^ t.hashCode
}
""")
//class prefix
out.close()
}
}
|
XyzNobody/skill
|
src/main/scala/de/ust/skill/generator/scala/internal/FieldDeclarationMaker.scala
|
Scala
|
bsd-3-clause
| 2,774
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.master
import java.net.ServerSocket
import org.apache.commons.lang3.RandomUtils
import org.apache.curator.test.TestingServer
import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.rpc.{RpcEndpoint, RpcEnv}
import org.apache.spark.serializer.{Serializer, JavaSerializer}
import org.apache.spark.util.Utils
class PersistenceEngineSuite extends SparkFunSuite {
test("FileSystemPersistenceEngine") {
val dir = Utils.createTempDir()
try {
val conf = new SparkConf()
testPersistenceEngine(conf, serializer =>
new FileSystemPersistenceEngine(dir.getAbsolutePath, serializer)
)
} finally {
Utils.deleteRecursively(dir)
}
}
test("ZooKeeperPersistenceEngine") {
val conf = new SparkConf()
// TestingServer logs the port conflict exception rather than throwing an exception.
// So we have to find a free port by ourselves. This approach cannot guarantee always starting
// zkTestServer successfully because there is a time gap between finding a free port and
// starting zkTestServer. But the failure possibility should be very low.
val zkTestServer = new TestingServer(findFreePort(conf))
try {
testPersistenceEngine(conf, serializer => {
conf.set("spark.deploy.zookeeper.url", zkTestServer.getConnectString)
new ZooKeeperPersistenceEngine(conf, serializer)
})
} finally {
zkTestServer.stop()
}
}
private def testPersistenceEngine(
conf: SparkConf, persistenceEngineCreator: Serializer => PersistenceEngine): Unit = {
val serializer = new JavaSerializer(conf)
val persistenceEngine = persistenceEngineCreator(serializer)
persistenceEngine.persist("test_1", "test_1_value")
assert(Seq("test_1_value") === persistenceEngine.read[String]("test_"))
persistenceEngine.persist("test_2", "test_2_value")
assert(Set("test_1_value", "test_2_value") === persistenceEngine.read[String]("test_").toSet)
persistenceEngine.unpersist("test_1")
assert(Seq("test_2_value") === persistenceEngine.read[String]("test_"))
persistenceEngine.unpersist("test_2")
assert(persistenceEngine.read[String]("test_").isEmpty)
// Test deserializing objects that contain RpcEndpointRef
val testRpcEnv = RpcEnv.create("test", "localhost", 12345, conf, new SecurityManager(conf))
try {
// Create a real endpoint so that we can test RpcEndpointRef deserialization
val workerEndpoint = testRpcEnv.setupEndpoint("worker", new RpcEndpoint {
override val rpcEnv: RpcEnv = testRpcEnv
})
val workerToPersist = new WorkerInfo(
id = "test_worker",
host = "127.0.0.1",
port = 10000,
cores = 0,
memory = 0,
endpoint = workerEndpoint,
webUiPort = 0,
publicAddress = ""
)
persistenceEngine.addWorker(workerToPersist)
val (storedApps, storedDrivers, storedWorkers) =
persistenceEngine.readPersistedData(testRpcEnv)
assert(storedApps.isEmpty)
assert(storedDrivers.isEmpty)
// Check deserializing WorkerInfo
assert(storedWorkers.size == 1)
val recoveryWorkerInfo = storedWorkers.head
assert(workerToPersist.id === recoveryWorkerInfo.id)
assert(workerToPersist.host === recoveryWorkerInfo.host)
assert(workerToPersist.port === recoveryWorkerInfo.port)
assert(workerToPersist.cores === recoveryWorkerInfo.cores)
assert(workerToPersist.memory === recoveryWorkerInfo.memory)
assert(workerToPersist.endpoint === recoveryWorkerInfo.endpoint)
assert(workerToPersist.webUiPort === recoveryWorkerInfo.webUiPort)
assert(workerToPersist.publicAddress === recoveryWorkerInfo.publicAddress)
} finally {
testRpcEnv.shutdown()
testRpcEnv.awaitTermination()
}
}
private def findFreePort(conf: SparkConf): Int = {
val candidatePort = RandomUtils.nextInt(1024, 65536)
Utils.startServiceOnPort(candidatePort, (trialPort: Int) => {
val socket = new ServerSocket(trialPort)
socket.close()
(null, trialPort)
}, conf)._2
}
}
|
ArvinDevel/onlineAggregationOnSparkV2
|
core/src/test/scala/org/apache/spark/deploy/master/PersistenceEngineSuite.scala
|
Scala
|
apache-2.0
| 4,969
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import org.scalatest.BeforeAndAfterAll
import org.apache.spark.network.TransportContext
import org.apache.spark.network.netty.SparkTransportConf
import org.apache.spark.network.server.TransportServer
import org.apache.spark.network.shuffle.{ExternalShuffleBlockHandler, ExternalShuffleClient}
/**
* This suite creates an external shuffle server and routes all shuffle fetches through it.
* Note that failures in this suite may arise due to changes in Spark that invalidate expectations
* set up in [[ExternalShuffleBlockHandler]], such as changing the format of shuffle files or how
* we hash files into folders.
*/
class ExternalShuffleServiceSuite extends ShuffleSuite with BeforeAndAfterAll {
var server: TransportServer = _
var rpcHandler: ExternalShuffleBlockHandler = _
override def beforeAll() {
val transportConf = SparkTransportConf.fromSparkConf(conf, numUsableCores = 2)
rpcHandler = new ExternalShuffleBlockHandler(transportConf)
val transportContext = new TransportContext(transportConf, rpcHandler)
server = transportContext.createServer()
conf.set("spark.shuffle.manager", "sort")
conf.set("spark.shuffle.service.enabled", "true")
conf.set("spark.shuffle.service.port", server.getPort.toString)
}
override def afterAll() {
server.close()
}
// This test ensures that the external shuffle service is actually in use for the other tests.
test("using external shuffle service") {
sc = new SparkContext("local-cluster[2,1,1024]", "test", conf)
sc.env.blockManager.externalShuffleServiceEnabled should equal(true)
sc.env.blockManager.shuffleClient.getClass should equal(classOf[ExternalShuffleClient])
// In a slow machine, one slave may register hundreds of milliseconds ahead of the other one.
// If we don't wait for all slaves, it's possible that only one executor runs all jobs. Then
// all shuffle blocks will be in this executor, ShuffleBlockFetcherIterator will directly fetch
// local blocks from the local BlockManager and won't send requests to ExternalShuffleService.
// In this case, we won't receive FetchFailed. And it will make this test fail.
// Therefore, we should wait until all slaves are up
sc.jobProgressListener.waitUntilExecutorsUp(2, 10000)
val rdd = sc.parallelize(0 until 1000, 10).map(i => (i, 1)).reduceByKey(_ + _)
rdd.count()
rdd.count()
// Invalidate the registered executors, disallowing access to their shuffle blocks (without
// deleting the actual shuffle files, so we could access them without the shuffle service).
rpcHandler.applicationRemoved(sc.conf.getAppId, false /* cleanupLocalDirs */)
// Now Spark will receive FetchFailed, and not retry the stage due to "spark.test.noStageRetry"
// being set.
val e = intercept[SparkException] {
rdd.count()
}
e.getMessage should include ("Fetch failure will not retry stage due to testing config")
}
}
|
ArvinDevel/onlineAggregationOnSparkV2
|
core/src/test/scala/org/apache/spark/ExternalShuffleServiceSuite.scala
|
Scala
|
apache-2.0
| 3,778
|
/*
* Copyright 2016 by Simba Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.spark.sql.simba.execution.join
import org.apache.spark.sql.simba.execution.SimbaPlan
import org.apache.spark.sql.simba.index.RTree
import org.apache.spark.sql.simba.partitioner.{MapDPartition, STRPartition}
import org.apache.spark.sql.simba.spatial.{MBR, Point}
import org.apache.spark.sql.simba.util.ShapeUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{Attribute, Expression, JoinedRow, Literal}
import org.apache.spark.sql.catalyst.plans.physical.{Partitioning, UnknownPartitioning}
import org.apache.spark.sql.execution.SparkPlan
import scala.collection.mutable
/**
* Created by dong on 1/20/16.
* KNN Join based on Two-Level R-Tree Structure
*/
case class RKJSpark(left_key: Expression, right_key: Expression, l: Literal,
left: SparkPlan, right: SparkPlan) extends SimbaPlan {
override def output: Seq[Attribute] = left.output ++ right.output
final val num_partitions = simbaSessionState.simbaConf.joinPartitions
final val sample_rate = simbaSessionState.simbaConf.sampleRate
final val transfer_threshold = simbaSessionState.simbaConf.transferThreshold
final val theta_boost = simbaSessionState.simbaConf.thetaBoost
final val max_entries_per_node = simbaSessionState.simbaConf.maxEntriesPerNode
final val k = l.value.asInstanceOf[Number].intValue()
override def outputPartitioning: Partitioning = UnknownPartitioning(num_partitions)
override protected def doExecute(): RDD[InternalRow] = {
val left_rdd = left.execute().map(row =>
(ShapeUtils.getShape(left_key, left.output, row).asInstanceOf[Point], row)
)
val right_rdd = right.execute().map(row =>
(ShapeUtils.getShape(right_key, right.output, row).asInstanceOf[Point], row)
)
val right_sampled = right_rdd
.sample(withReplacement = false, sample_rate, System.currentTimeMillis())
.map(_._1).collect().zipWithIndex
val right_rt = RTree(right_sampled, max_entries_per_node)
val dimension = right_sampled.head._1.coord.length
val (left_partitioned, left_mbr_bound) =
STRPartition(left_rdd, dimension, num_partitions, sample_rate,
transfer_threshold, max_entries_per_node)
val dim = new Array[Int](dimension)
var remaining = theta_boost.toDouble
for (i <- 0 until dimension) {
dim(i) = Math.ceil(Math.pow(remaining, 1.0 / (dimension - i))).toInt
remaining /= dim(i)
}
val refined_mbr_bound = left_partitioned.mapPartitionsWithIndex {(id, iter) =>
if (iter.hasNext) {
val data = iter.map(_._1).toArray
def recursiveGroupPoint(entries: Array[Point], cur_dim: Int, until_dim: Int)
: Array[(Point, Double)] = {
val len = entries.length.toDouble
val grouped = entries.sortWith(_.coord(cur_dim) < _.coord(cur_dim))
.grouped(Math.ceil(len / dim(cur_dim)).toInt).toArray
if (cur_dim < until_dim) grouped.flatMap(now => recursiveGroupPoint(now, cur_dim + 1, until_dim))
else grouped.map {list =>
val min = new Array[Double](dimension).map(x => Double.MaxValue)
val max = new Array[Double](dimension).map(x => Double.MinValue)
list.foreach { now =>
for (i <- min.indices) min(i) = Math.min(min(i), now.coord(i))
for (i <- max.indices) max(i) = Math.max(max(i), now.coord(i))
}
val mbr = MBR(new Point(min), new Point(max))
var cur_max = 0.0
list.foreach(now => {
val cur_dis = mbr.centroid.minDist(now)
if (cur_dis > cur_max) cur_max = cur_dis
})
(mbr.centroid, cur_max)
}
}
recursiveGroupPoint(data, 0, dimension - 1).map(x => (x._1, x._2, id)).iterator
} else Array().iterator
}.collect()
val theta = new Array[Double](refined_mbr_bound.length)
for (i <- refined_mbr_bound.indices) {
val query = refined_mbr_bound(i)._1
val knn_mbr_ans = right_rt.kNN(query, k, keepSame = false)
theta(i) = knn_mbr_ans.last._1.minDist(query) + (refined_mbr_bound(i)._2 * 2.0)
}
val bc_theta = sparkContext.broadcast(theta)
val right_dup = right_rdd.flatMap(x => {
var list = mutable.ListBuffer[(Int, (Point, InternalRow))]()
val set = new mutable.HashSet[Int]()
for (i <- refined_mbr_bound.indices) {
val pid = refined_mbr_bound(i)._3
if (!set.contains(pid) && refined_mbr_bound(i)._1.minDist(x._1) < bc_theta.value(i)) {
list += ((pid, x))
set += pid
}
}
list
})
val right_dup_partitioned = MapDPartition(right_dup, left_mbr_bound.length).map(_._2)
left_partitioned.zipPartitions(right_dup_partitioned) {
(leftIter, rightIter) =>
val ans = mutable.ListBuffer[InternalRow]()
val right_data = rightIter.toArray
if (right_data.length > 0) {
val right_index = RTree(right_data.map(_._1).zipWithIndex, max_entries_per_node)
leftIter.foreach(now =>
ans ++= right_index.kNN(now._1, k, keepSame = false)
.map(x => new JoinedRow(now._2, right_data(x._2)._2))
)
}
ans.iterator
}
}
override def children: Seq[SparkPlan] = Seq(left, right)
}
|
InitialDLab/Simba
|
src/main/scala/org/apache/spark/sql/simba/execution/join/RKJSpark.scala
|
Scala
|
apache-2.0
| 5,952
|
package types
/**
* Generic types example
*
*/
class LinkedList[A] {
private class Node[A](elem: A) {
var next: Node[A] = _
override def toString = elem.toString
}
private var head: Node[A] = _
def add(elem: A) {
val n = new Node(elem)
n.next = head
head = n
}
private def printNodes(n: Node[A]) {
if (n != null) {
println(n)
printNodes(n.next)
}
}
def printAll() {
printNodes(head)
}
}
object LinkedListExample extends App {
val ints = new LinkedList[Int]
ints.add(1)
ints.add(2)
ints.add(3)
ints.printAll()
val strings = new LinkedList[String]()
strings.add("Nacho")
strings.add("Libre")
strings.printAll()
}
|
rafaelkyrdan/cookbook
|
src/main/scala/types/LinkedList.scala
|
Scala
|
mit
| 708
|
/*
* Copyright 2017-2018 Iaroslav Zeigerman
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package akkeeper.storage.zookeeper.async
import java.util.concurrent.Executors
import akkeeper.storage._
import akkeeper.storage.zookeeper._
import AsyncZookeeperClient._
import org.apache.curator.framework.CuratorFramework
import org.apache.curator.framework.api.{BackgroundCallback, CuratorEvent}
import org.apache.zookeeper.CreateMode
import org.apache.zookeeper.KeeperException.Code
import org.apache.zookeeper.data.Stat
import scala.collection.JavaConverters._
import scala.concurrent.{ExecutionContext, Future, Promise}
import scala.util.Try
private[zookeeper] class AsyncZookeeperClient(config: ZookeeperClientConfig,
createMode: CreateMode)
extends ZookeeperClient(config) {
private val executor = Executors
.newFixedThreadPool(config.clientThreads.getOrElse(DefaultClientThreads))
final def create(path: String, data: Array[Byte]): Future[String] = {
val (callback, future) = callbackWithFuture(_.getPath)
client.create()
.creatingParentsIfNeeded
.withMode(createMode)
.inBackground(callback, executor)
.forPath(normalizePath(path), data)
future
}
final def create(path: String): Future[String] = {
create(path, Array.empty)
}
final def update(path: String, data: Array[Byte]): Future[String] = {
val (callback, future) = callbackWithFuture(_.getPath)
client.setData().inBackground(callback, executor).forPath(normalizePath(path), data)
future
}
final def get(path: String): Future[Array[Byte]] = {
val (callback, future) = callbackWithFuture(_.getData)
client.getData.inBackground(callback, executor).forPath(normalizePath(path))
future
}
final def delete(path: String): Future[String] = {
val (callback, future) = callbackWithFuture(_.getPath)
client.delete().inBackground(callback, executor).forPath(normalizePath(path))
future
}
final def exists(path: String): Future[Stat] = {
val (callback, future) = callbackWithFuture(_.getStat)
client.checkExists().inBackground(callback, executor).forPath(normalizePath(path))
future
}
final def children(path: String): Future[Seq[String]] = {
val (callback, future) = callbackWithFuture(_.getChildren.asScala)
client.getChildren.inBackground(callback, executor).forPath(normalizePath(path))
future
}
final def getExecutionContext: ExecutionContext = {
ExecutionContext.fromExecutor(executor)
}
override def stop(): Unit = {
executor.shutdown()
super.stop()
}
}
object AsyncZookeeperClient {
val DefaultClientThreads = 5
private object ResultCodeErrorExtractor {
def unapply(code: Int): Option[Throwable] = {
Code.get(code) match {
case Code.OK => None
case Code.NONODE => Some(RecordNotFoundException("ZK node was not found"))
case Code.NODEEXISTS => Some(RecordAlreadyExistsException("ZK node already exists"))
case other =>
Some(ZookeeperException(s"ZK operation failed (${other.toString})", other.intValue()))
}
}
}
private def asyncCallback[T](promise: Promise[T])(f: CuratorEvent => T): BackgroundCallback = {
new BackgroundCallback {
override def processResult(client: CuratorFramework, event: CuratorEvent): Unit = {
event.getResultCode match {
case ResultCodeErrorExtractor(error) =>
promise failure error
case _ =>
promise complete Try(f(event))
}
}
}
}
private def callbackWithFuture[T](f: CuratorEvent => T): (BackgroundCallback, Future[T]) = {
val promise = Promise[T]()
val callback = asyncCallback(promise)(f)
(callback, promise.future)
}
private def normalizePath(path: String): String = {
if (path.startsWith("/")) path.trim else s"/$path".trim
}
}
|
akkeeper-project/akkeeper
|
akkeeper/src/main/scala/akkeeper/storage/zookeeper/async/AsyncZookeeperClient.scala
|
Scala
|
apache-2.0
| 4,419
|
package uk.co.odinconsultants.bitcoin.hbase
import org.apache.hadoop.hbase.client.{Put, Table}
import org.apache.hadoop.hbase.util.Bytes.toBytes
import uk.co.odinconsultants.bitcoin.core.Logging
import uk.co.odinconsultants.bitcoin.hbase.HBaseSetup.qualifier
import uk.co.odinconsultants.bitcoin.parsing.MetaStore.Batch
import uk.co.odinconsultants.bitcoin.parsing.{DomainOps, MetaStore}
import scala.collection.JavaConversions._
class HBaseMetaStore(table: Table, familyName: String) extends MetaStore with Logging {
val familyNameAsBytes: Array[Byte] = toBytes(familyName)
def apply(batch: Batch): Unit = {
val puts = batch.map { case (backReference, publicKey) =>
val (hash, index) = backReference
val key = DomainOps.appendAndSalt(hash, index)
val aPut = new Put(key)
aPut.addColumn(familyNameAsBytes, qualifier.getBytes, publicKey)
}
table.put(puts)
}
}
|
PhillHenry/Cryptorigin
|
bitcoin/src/main/scala/uk/co/odinconsultants/bitcoin/hbase/HBaseMetaStore.scala
|
Scala
|
gpl-2.0
| 969
|
package orc.android
import android.app.Activity
import android.os.Bundle
import android.widget.Button
import android.view.View
import android.widget.Toast
import android.util.Log
import android.widget.TextView
import android.os.AsyncTask
import android.os.Handler
import android.app.AlertDialog
import android.widget.EditText
import android.content.DialogInterface
import android.app.Dialog
import android.app.ProgressDialog
import java.util.LinkedList
import java.lang.StringBuffer
import java.io._
import java.util.Hashtable
import OrchardCompileLogger.CompileMessage
import orc.compile.StandardOrcCompiler
import orc.compile.parse.OrcStringInputContext
import orc.progress.NullProgressMonitor$
import orc.error.compiletime.CompileLogger
import orc.run.StandardOrcRuntime
import orc.OrcEventAction
import orc.OrcEvent
import orc.lib.str.PrintEvent
import orc.lib.util.PromptEvent
import orc.lib.util.PromptCallback
import orc.lib.util.PromptCallback
import orc.compile.parse.OrcStringInputContext
import orc.run.StandardOrcRuntime
import orc.OrcEventAction
import orc.lib.str.PrintEvent
import orc.lib.util.PromptEvent
import orc.compile.StandardOrcCompiler
import orc.OrcEvent
import org.apache.http.impl.client.DefaultHttpClient
import org.apache.http.client.HttpClient
import org.apache.http.HttpResponse
import org.apache.http.util.EntityUtils
import org.apache.http.HttpEntity
import org.apache.http.client.methods.HttpGet
import android.content.ClipData.Item
import orc.ast.oil.xml.OrcXML
/*
* http://stackoverflow.com/questions/9924015/eclipse-android-scala-made-easy-but-still-does-not-work/11084146#11084146
*
*/
/**
* @author Joao Barbosa, Ricardo Bernardino
*/
class RunActivity extends Activity {
lazy val my_button = findViewById(R.id.button).asInstanceOf[Button]
lazy val my_tv: TextView = findViewById(R.id.tvtext).asInstanceOf[TextView]
lazy val path: String = getIntent().getExtras().get("path").asInstanceOf[String]
lazy val TAG = "RunActivity"
/* Alternative to using runOnUiThread() method, by using handler.post(runnable) */
var handler: Handler = null
var input: EditText = null
/* hashtable containing the callback function of the given prompt. The key is the toString() of the AlertDialog */
lazy val callbackPrompts: Hashtable[String, PromptCallback] = new Hashtable[String, PromptCallback]()
/* hashtable containing the input of the given prompt. The key is the toString() of the AlertDialog */
lazy val inputPrompts: Hashtable[String, EditText] = new Hashtable[String, EditText]()
/* output string */
lazy val resultString: StringBuffer = new StringBuffer()
lazy val fileContent: StringBuffer = new StringBuffer()
implicit def func2OnClickListener(func: (View) => Unit) = {
new View.OnClickListener() {
override def onClick(v: View) = func(v)
}
}
implicit def alertdialogOnClickListener(func: (DialogInterface, Int) => Unit) = {
new DialogInterface.OnClickListener() {
override def onClick(dialog: DialogInterface, whichButton: Int) = func(dialog, whichButton)
}
}
override def onCreate(savedInstanceState: Bundle) {
super.onCreate(savedInstanceState)
setContentView(R.layout.run_activity)
handler = new Handler()
/*
* If the path string starts with http:// we know that we were previously
* on the ExamplesSectionFragment class which explores the try_orc website.
* The path is sent via the activity intent.
*/
if (path.startsWith("http://")) {
new GetOrcFilesTask().execute()
}
}
/* "Run" button callback function, specified in layouts/run_activity.xml */
def startProgress(view: View) = {
// Do something long
val runnable: Runnable = new Runnable() {
override def run() = {
var str: String = ""
if (!path.startsWith("http://")) { // Read from file
/*
* Read simple.orc file -> res/raw/simple.orc
* val is: InputStream = getResources().openRawResource(R.raw.simple)
*/
fileContent.setLength(0) // Clean StringBuffer
/* Read file from sdcard */
val is: InputStream = new FileInputStream(path)
val reader: BufferedReader = new BufferedReader(new InputStreamReader(is))
if (is != null) {
while ({ str = reader.readLine(); str != null }) {
fileContent.append(str + "\\n")
}
}
is.close()
}
var result: orc.ast.oil.nameless.Expression = null
lazy val options = new OrcBindings()
if (path.endsWith(".orc")) { /* Compile .orc file */
val orc_string: OrcStringInputContext = new OrcStringInputContext(fileContent.toString())
val compiler: StandardOrcCompiler = new StandardOrcCompiler()
val compileMsgs: LinkedList[CompileMessage] = new LinkedList[CompileMessage]
val cl: CompileLogger = new OrchardCompileLogger(compileMsgs)
options.usePrelude = false
result = compiler.apply(orc_string, options, cl, NullProgressMonitor$.MODULE$)
} else { /* .oil file */
val oil_is: InputStream = new ByteArrayInputStream(fileContent.toString().getBytes());
result = OrcXML.readOilFromStream(oil_is)
}
/*
* If you want the output of a single program on the console
* just reset resultString: resultString.setLength(0)
*/
resultString.append("--------\\n")
val exec: StandardOrcRuntime = new StandardOrcRuntime("Orc")
/*
* In order to display the outputs of the program,
* we will need to override the callback functions:
* - PrintEvent: to display Print/Println calls
* - PromptEvent: create a Dialog Pop-Up in Android
* - pusblished: to display the values to be published on the console
* - caught: handle the error messages from the execution
*/
val event: OrcEventAction = new OrcEventAction() {
override def other(event: OrcEvent) {
Log.i(TAG, "entered other")
event match {
case PrintEvent(text) => {
resultString.append(text + " ")
}
case PromptEvent(prompt, callback) => {
val alert: AlertDialog.Builder = new AlertDialog.Builder(RunActivity.this)
alert.setTitle(prompt)
Log.i(TAG, prompt)
alert.setPositiveButton("Ok", positiveButtonOnClick _)
alert.setNegativeButton("Cancel", negativeButtonOnClick _)
lazy val alert2: AlertDialog = alert.create()
/* In order to alter the UI, we have got to run it on the main thread */
runOnUiThread(new Runnable() {
override def run() = {
// Set an EditText view to get user input
input = new EditText(getApplication())
alert.setView(input)
println(alert2)
callbackPrompts.put(alert2.toString(), callback)
inputPrompts.put(alert2.toString(), input)
alert2.show()
}
})
}
case e => {
handler.post(new Runnable() {
override def run() = {
Toast.makeText(getApplicationContext(), "Unhandled event.", Toast.LENGTH_LONG).show();
}
});
}
}
}
override def published(value: AnyRef) {
resultString.append(value + "\\n")
/* In order to alter the UI, we have got to run it on the main thread */
handler.post(new Runnable() {
override def run() = {
my_tv.setText(resultString.toString())
}
});
}
override def caught(e: Throwable) {
Log.i(TAG, "Error " + e.printStackTrace())
handler.post(new Runnable() {
override def run() = {
Toast.makeText(getApplicationContext(), "An error occurred while running the program.", Toast.LENGTH_LONG).show();
}
});
}
}
/* Execute the compiled Orc program */
exec.runSynchronous(result, event.asFunction(_), options)
exec.stop()
}
}
new Thread(new ThreadGroup("orc"), runnable, "orc", 40000).start() // 40000 bytes stack size
/*
* Due to the high memory requirements of Orc and the Android system
* stack size restrictions on the Main Thread (8KB), we always have to create
* our own Thread so that we can specify how many bytes we would like the stack to have.
* NOTE: It is said on the Android Documentation, that the stack size value may be
* ignored in some systems.
*/
}
/* Prompt positive button callback */
def positiveButtonOnClick(dialog: DialogInterface, whichButton: Int): Unit = {
val dialogNew = dialog.asInstanceOf[AlertDialog]
println(dialogNew);
val callback = callbackPrompts.get(dialogNew.toString())
val text = inputPrompts.get(dialogNew.toString()).getText().toString()
callback.respondToPrompt(text)
callbackPrompts.remove(dialogNew.toString())
}
/* Prompt negative button callback */
def negativeButtonOnClick(dialog: DialogInterface, whichButton: Int): Unit = {
val dialogNew = dialog.asInstanceOf[AlertDialog]
val callback = callbackPrompts.get(dialogNew.toString())
callback.cancelPrompt()
callbackPrompts.remove(dialogNew.toString())
}
/*
* On Android, we cannot access the network through the Main Thread,
* so either we use the AsyncTask class (as shown below), or we create
* a new Thread. We chose the former due to its simplicity and the ability
* to communicate with the Main Thread for the UI changes.
*/
class GetOrcFilesTask() extends AsyncTask[AnyRef, Unit, Unit] {
var response: String = ""
var client: HttpClient = null
var dialog: Dialog = null
/* can use UI thread here */
override def onPreExecute() {
dialog = ProgressDialog.show(RunActivity.this, "", "Getting Orc file...", true);
client = new DefaultHttpClient()
}
/* automatically done on worker thread (separate from UI thread) */
override def doInBackground(params: AnyRef*): Unit = {
val httpget: HttpGet = new HttpGet(path)
var response: HttpResponse = null
var response_file: String = null
try {
response = client.execute(httpget)
response_file = getBodyFromResponse(response)
} catch {
case e: Exception =>
Log.e("doInBackGround", e.toString())
null
}
fileContent.append(response_file)
}
/* can use UI thread here */
override def onPostExecute(params: Unit) = {
dialog.dismiss()
}
def getBodyFromResponse(response: HttpResponse): String = {
var result: String = ""
val entity: HttpEntity = response.getEntity()
try {
if (entity != null) {
var len: Long = entity.getContentLength();
if ((len != -1L) && (len < 2048L)) {
result = EntityUtils.toString(entity);
} else
result = EntityUtils.toString(entity);
}
} catch {
case e: Exception => Log.e("getBodyFromResponse", e.toString())
}
result
}
}
}
|
orc-lang/orc
|
OrcAndroid/src/orc/android/RunActivity.scala
|
Scala
|
bsd-3-clause
| 11,489
|
/*
* The MIT License (MIT)
*
* Copyright (c) 2016 Ian McIntosh
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package f3
package web
import diode.Action
import f3.core._
case class LoadLeagues(leagues: List[League]) extends Action
case class SelectLeague(league: Option[Int]) extends Action
case class SelectSeason(season: Int) extends Action
case class SelectWeek(week: Int) extends Action
case class SelectContent(contentType: ContentType) extends Action
|
cranst0n/f3
|
modules/web/src/main/scala/f3/web/action.scala
|
Scala
|
mit
| 1,493
|
/*
Copyright (c) 2016, Elliot Stirling
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
* Neither the name of the {organization} nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package evolve.example
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Paths}
import java.text.NumberFormat
import java.util.concurrent.Executors
import evolve.core.Evolver.EvolverStrategy
import evolve.core._
import evolve.util.EvolveUtil
import scala.annotation.tailrec
import scala.concurrent.duration.Duration.Inf
import scala.concurrent.{Await, ExecutionContext, ExecutionContextExecutor, Future}
object FourBitFullAdderNAND {
def main(args: Array[String]): Unit = {
import evolve.functions.BooleanFunctions.scoreFunc
object Nop extends Function[Boolean] {
override val instructionSize: Int = 3
override val argumentSize: Int = 9
override val arguments: Int = 1
override val cost: Int = 1
override def getLabel(inst: Instruction): String = "Nop"
override def apply(inst: Instruction, arguments: List[Boolean]): Boolean = {
arguments.head
}
}
object NAnd1 extends Function[Boolean] {
override val instructionSize: Int = 3
override val argumentSize: Int = 9
override val cost: Int = 5
override val arguments: Int = 1
override def getLabel(inst: Instruction): String = "!&"
override def apply(inst: Instruction, arguments: List[Boolean]): Boolean = {
!arguments.head
}
}
object NAnd2 extends Function[Boolean] {
override val instructionSize: Int = 3
override val argumentSize: Int = 9
override val cost: Int = 6
override val arguments: Int = 2
override def getLabel(inst: Instruction): String = "!&"
override def apply(inst: Instruction, arguments: List[Boolean]): Boolean = {
val a = arguments.head
val b = arguments(1)
!(a&b)
}
}
object NAnd3 extends Function[Boolean] {
override val instructionSize: Int = 3
override val argumentSize: Int = 9
override val cost: Int = 7
override val arguments: Int = 3
override def getLabel(inst: Instruction): String = "!&"
override def apply(inst: Instruction, arguments: List[Boolean]): Boolean = {
val a = arguments.head
val b = arguments(1)
val c = arguments(2)
!(a&b&c)
}
}
implicit val functions: Seq[Function[Boolean]] = Seq[Function[Boolean]](
Nop, NAnd1, NAnd2, NAnd3
)
implicit val evolveStrategy: EvolverStrategy = EvolverStrategy(48, 0.00025, optimiseForPipeline = true)
implicit val ec: ExecutionContextExecutor = ExecutionContext.fromExecutor( Executors.newFixedThreadPool( Runtime.getRuntime.availableProcessors() ) )
def bitsToBools(value: Int, bits: Int): List[Boolean] = {
require(value >= 0 && value <= math.pow(2, bits))
(0 until bits)
.map( i => ((0x1 << i) & value) != 0x0 )
.reverse
.toList
}
val testCases: TestCases[Boolean] = TestCases((for {
l <- 0 until 16
r <- 0 until 16
} yield TestCase[Boolean](bitsToBools(l, 4) ::: bitsToBools(r, 4), bitsToBools(l + r, 5))).toList)(Manifest.Boolean)
@tailrec def function(program: Program, generation: Long, optimise: Boolean = false): Program = {
/*
* Evolving against the whole list of test cases caused the evolver to be risk averse. As such evolution would
* stall at about a score of 64k and still be going after 1 million generations. To combat this the test cases
* are randomly shuffled into 4 groups of 64 cases and scored against the curr)ent program. The worst group of
* 64 is selected for a short run of evolution. This allows the evolver to evolve to selectively solve some
* cases at the expense of others. CGP lends itself towards this as 'unused-genes' persist between generations.
* Result: Once implemented this evolve function regularly solves in under 200k generations.
*/
implicit val manifest = Manifest.Boolean
val partial = EvolveUtil.worstSubGroup(program, 64, 100, testCases)
val result = EvolveUtil.fitness(partial, 0, 900, testCases, optimise)
val score = testCases.score(result)
if (score == 0) {
println( s"Solution found after $generation-${generation+1000} generations." )
result
} else {
val usage = program.used.count( _ == true ).toDouble / (program.length + program.inputCount).toDouble
// create mutant children and score them
val popF: Future[Seq[Double]] = Future.sequence( Seq.fill(evolveStrategy.children)( Future {
val child = Generator.repair( Mutator( program, evolveStrategy.factor ) )
testCases.score( child )( scoreFunc, functions )
} ) )
// score the population
val populationScore =
Await
.result(popF, Inf)
.sum / evolveStrategy.children
println( s"Processed ${generation+1000} generations. Current generation score: $populationScore. Current parent score: $score. Current size: ${program.length}. Used genes ${NumberFormat.getPercentInstance.format(usage)}" )
if( generation % 10000 == 0 ) {
function(result, generation + 1000, !optimise)
} else {
function(result, generation + 1000, optimise)
}
}
}
val solution = function(EvolveUtil.startup(Generator(Nop.instructionSize, 504, 8, 5), testCases), 0).denop.deduplicate.shrink
Files.write(Paths.get("solution.dot"), DotGraph(solution).getBytes(StandardCharsets.UTF_8) )
// three rounds of optimisation and shrinking
val optimised1 = EvolveUtil.counted(solution, 10000, optimise = true, testCases).denop.deduplicate.shrink
val optimised2 = EvolveUtil.counted(optimised1, 10000, optimise = true, testCases).denop.deduplicate.shrink
val optimised3 = EvolveUtil.counted(optimised2, 10000, optimise = true, testCases).denop.deduplicate.shrink
Files.write(Paths.get("optimised.dot"), DotGraph(optimised3).getBytes(StandardCharsets.UTF_8) )
val pipelined = optimised3.pipeline.deduplicate.pipeline.shrink
Files.write(Paths.get("pipelined.dot"), DotGraph(pipelined).getBytes(StandardCharsets.UTF_8) )
System.exit(0)
}
}
|
Trugath/Evolve
|
src/main/scala/evolve/example/FourBitFullAdderNAND.scala
|
Scala
|
bsd-3-clause
| 7,686
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.graphx.util
import scala.annotation.tailrec
import scala.reflect.ClassTag
import scala.util._
import org.apache.spark._
import org.apache.spark.graphx._
import org.apache.spark.internal.Logging
import org.apache.spark.rdd.RDD
/** A collection of graph generating functions. */
object GraphGenerators extends Logging {
val RMATa = 0.45
val RMATb = 0.15
val RMATd = 0.25
/**
* Generate a graph whose vertex out degree distribution is log normal.
*
* The default values for mu and sigma are taken from the Pregel paper:
*
* Grzegorz Malewicz, Matthew H. Austern, Aart J.C Bik, James C. Dehnert,
* Ilan Horn, Naty Leiser, and Grzegorz Czajkowski. 2010.
* Pregel: a system for large-scale graph processing. SIGMOD '10.
*
* If the seed is -1 (default), a random seed is chosen. Otherwise, use
* the user-specified seed.
*
* @param sc Spark Context
* @param numVertices number of vertices in generated graph
* @param numEParts (optional) number of partitions
* @param mu (optional, default: 4.0) mean of out-degree distribution
* @param sigma (optional, default: 1.3) standard deviation of out-degree distribution
* @param seed (optional, default: -1) seed for RNGs, -1 causes a random seed to be chosen
* @return Graph object
*/
def logNormalGraph(
sc: SparkContext, numVertices: Int, numEParts: Int = 0, mu: Double = 4.0,
sigma: Double = 1.3, seed: Long = -1): Graph[Long, Int] = {
val evalNumEParts = if (numEParts == 0) sc.defaultParallelism else numEParts
// Enable deterministic seeding
val seedRand = if (seed == -1) new Random() else new Random(seed)
val seed1 = seedRand.nextInt()
val seed2 = seedRand.nextInt()
val vertices: RDD[(VertexId, Long)] = sc.parallelize(0 until numVertices, evalNumEParts).map {
src => (src, sampleLogNormal(mu, sigma, numVertices, seed = (seed1 ^ src)))
}
val edges = vertices.flatMap { case (src, degree) =>
generateRandomEdges(src.toInt, degree.toInt, numVertices, seed = (seed2 ^ src))
}
Graph(vertices, edges, 0)
}
// Right now it just generates a bunch of edges where
// the edge data is the weight (default 1)
val RMATc = 0.15
def generateRandomEdges(
src: Int, numEdges: Int, maxVertexId: Int, seed: Long = -1): Array[Edge[Int]] = {
val rand = if (seed == -1) new Random() else new Random(seed)
Array.fill(numEdges) { Edge[Int](src, rand.nextInt(maxVertexId), 1) }
}
/**
* Randomly samples from a log normal distribution whose corresponding normal distribution has
* the given mean and standard deviation. It uses the formula `X = exp(m+s*Z)` where `m`,
* `s` are the mean, standard deviation of the lognormal distribution and
* `Z ~ N(0, 1)`. In this function,
* `m = e^(mu+sigma^2/2)` and `s = sqrt[(e^(sigma^2) - 1)(e^(2*mu+sigma^2))]`.
*
* @param mu the mean of the normal distribution
* @param sigma the standard deviation of the normal distribution
* @param maxVal exclusive upper bound on the value of the sample
* @param seed optional seed
*/
private[spark] def sampleLogNormal(
mu: Double, sigma: Double, maxVal: Int, seed: Long = -1): Int = {
val rand = if (seed == -1) new Random() else new Random(seed)
val sigmaSq = sigma * sigma
val m = math.exp(mu + sigmaSq / 2.0)
// expm1 is exp(m)-1 with better accuracy for tiny m
val s = math.sqrt(math.expm1(sigmaSq) * math.exp(2*mu + sigmaSq))
// Z ~ N(0, 1)
var X: Double = maxVal
while (X >= maxVal) {
val Z = rand.nextGaussian()
X = math.exp(mu + sigma*Z)
}
math.floor(X).toInt
}
/**
* A random graph generator using the R-MAT model, proposed in
* "R-MAT: A Recursive Model for Graph Mining" by Chakrabarti et al.
*
* See http://www.cs.cmu.edu/~christos/PUBLICATIONS/siam04.pdf.
*/
def rmatGraph(sc: SparkContext, requestedNumVertices: Int, numEdges: Int): Graph[Int, Int] = {
// let N = requestedNumVertices
// the number of vertices is 2^n where n=ceil(log2[N])
// This ensures that the 4 quadrants are the same size at all recursion levels
val numVertices = math.round(
math.pow(2.0, math.ceil(math.log(requestedNumVertices) / math.log(2.0)))).toInt
val numEdgesUpperBound =
math.pow(2.0, 2 * ((math.log(numVertices) / math.log(2.0)) - 1)).toInt
if (numEdgesUpperBound < numEdges) {
throw new IllegalArgumentException(
s"numEdges must be <= $numEdgesUpperBound but was $numEdges")
}
var edges: Set[Edge[Int]] = Set()
while (edges.size < numEdges) {
if (edges.size % 100 == 0) {
logDebug(edges.size + " edges")
}
edges += addEdge(numVertices)
}
outDegreeFromEdges(sc.parallelize(edges.toList))
}
private def outDegreeFromEdges[ED: ClassTag](edges: RDD[Edge[ED]]): Graph[Int, ED] = {
val vertices = edges.flatMap { edge => List((edge.srcId, 1)) }
.reduceByKey(_ + _)
.map{ case (vid, degree) => (vid, degree) }
Graph(vertices, edges, 0)
}
/**
* @param numVertices Specifies the total number of vertices in the graph (used to get
* the dimensions of the adjacency matrix
*/
private def addEdge(numVertices: Int): Edge[Int] = {
// val (src, dst) = chooseCell(numVertices/2.0, numVertices/2.0, numVertices/2.0)
val v = math.round(numVertices.toFloat/2.0).toInt
val (src, dst) = chooseCell(v, v, v)
Edge[Int](src, dst, 1)
}
/**
* This method recursively subdivides the adjacency matrix into quadrants
* until it picks a single cell. The naming conventions in this paper match
* those of the R-MAT paper. There are a power of 2 number of nodes in the graph.
* The adjacency matrix looks like:
* <pre>
*
* dst ->
* (x,y) *************** _
* | | | |
* | a | b | |
* src | | | |
* | *************** | T
* \\|/ | | | |
* | c | d | |
* | | | |
* *************** -
* </pre>
*
* where this represents the subquadrant of the adj matrix currently being
* subdivided. (x,y) represent the upper left hand corner of the subquadrant,
* and T represents the side length (guaranteed to be a power of 2).
*
* After choosing the next level subquadrant, we get the resulting sets
* of parameters:
* {{{
* quad = a, x'=x, y'=y, T'=T/2
* quad = b, x'=x+T/2, y'=y, T'=T/2
* quad = c, x'=x, y'=y+T/2, T'=T/2
* quad = d, x'=x+T/2, y'=y+T/2, T'=T/2
* }}}
*/
@tailrec
private def chooseCell(x: Int, y: Int, t: Int): (Int, Int) = {
if (t <= 1) {
(x, y)
} else {
val newT = math.round(t.toFloat/2.0).toInt
pickQuadrant(RMATa, RMATb, RMATc, RMATd) match {
case 0 => chooseCell(x, y, newT)
case 1 => chooseCell(x + newT, y, newT)
case 2 => chooseCell(x, y + newT, newT)
case 3 => chooseCell(x + newT, y + newT, newT)
}
}
}
private def pickQuadrant(a: Double, b: Double, c: Double, d: Double): Int = {
if (a + b + c + d != 1.0) {
throw new IllegalArgumentException("R-MAT probability parameters sum to " + (a + b + c + d)
+ ", should sum to 1.0")
}
val rand = new Random()
val result = rand.nextDouble()
result match {
case x if x < a => 0 // 0 corresponds to quadrant a
case x if (x >= a && x < a + b) => 1 // 1 corresponds to b
case x if (x >= a + b && x < a + b + c) => 2 // 2 corresponds to c
case _ => 3 // 3 corresponds to d
}
}
/**
* Create `rows` by `cols` grid graph with each vertex connected to its
* row+1 and col+1 neighbors. Vertex ids are assigned in row major
* order.
*
* @param sc the spark context in which to construct the graph
* @param rows the number of rows
* @param cols the number of columns
*
* @return A graph containing vertices with the row and column ids
* as their attributes and edge values as 1.0.
*/
def gridGraph(sc: SparkContext, rows: Int, cols: Int): Graph[(Int, Int), Double] = {
// Convert row column address into vertex ids (row major order)
def sub2ind(r: Int, c: Int): VertexId = r * cols + c
val vertices: RDD[(VertexId, (Int, Int))] = sc.parallelize(0 until rows).flatMap { r =>
(0 until cols).map( c => (sub2ind(r, c), (r, c)) )
}
val edges: RDD[Edge[Double]] =
vertices.flatMap{ case (vid, (r, c)) =>
(if (r + 1 < rows) { Seq( (sub2ind(r, c), sub2ind(r + 1, c))) } else { Seq.empty }) ++
(if (c + 1 < cols) { Seq( (sub2ind(r, c), sub2ind(r, c + 1))) } else { Seq.empty })
}.map{ case (src, dst) => Edge(src, dst, 1.0) }
Graph(vertices, edges)
} // end of gridGraph
/**
* Create a star graph with vertex 0 being the center.
*
* @param sc the spark context in which to construct the graph
* @param nverts the number of vertices in the star
*
* @return A star graph containing `nverts` vertices with vertex 0
* being the center vertex.
*/
def starGraph(sc: SparkContext, nverts: Int): Graph[Int, Int] = {
val edges: RDD[(VertexId, VertexId)] = sc.parallelize(1 until nverts).map(vid => (vid, 0))
Graph.fromEdgeTuples(edges, 1)
} // end of starGraph
} // end of Graph Generators
|
aokolnychyi/spark
|
graphx/src/main/scala/org/apache/spark/graphx/util/GraphGenerators.scala
|
Scala
|
apache-2.0
| 10,153
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import kafka.network._
import kafka.utils._
import kafka.metrics.KafkaMetricsGroup
import java.util.concurrent.TimeUnit
/**
* A thread that answers kafka requests.
*/
class KafkaRequestHandler(id: Int, brokerId: Int, val requestChannel: RequestChannel, apis: KafkaApis) extends Runnable with Logging {
this.logIdent = "[Kafka Request Handler " + id + " on Broker " + brokerId + "], "
def run() {
while(true) {
try {
val req = requestChannel.receiveRequest()
if(req eq RequestChannel.AllDone) {
trace("receives shut down command, shut down".format(brokerId, id))
return
}
req.dequeueTimeMs = SystemTime.milliseconds
debug("handles request " + req)
apis.handle(req)
} catch {
case e: Throwable => error("exception when handling request", e)
}
}
}
def shutdown(): Unit = requestChannel.sendRequest(RequestChannel.AllDone)
}
class KafkaRequestHandlerPool(val brokerId: Int,
val requestChannel: RequestChannel,
val apis: KafkaApis,
numThreads: Int) extends Logging {
this.logIdent = "[Kafka Request Handler on Broker " + brokerId + "], "
val threads = new Array[Thread](numThreads)
val runnables = new Array[KafkaRequestHandler](numThreads)
for(i <- 0 until numThreads) {
runnables(i) = new KafkaRequestHandler(i, brokerId, requestChannel, apis)
threads(i) = Utils.daemonThread("kafka-request-handler-" + i, runnables(i))
threads(i).start()
}
def shutdown() {
info("shutting down")
for(handler <- runnables)
handler.shutdown
for(thread <- threads)
thread.join
info("shut down completely")
}
}
class BrokerTopicMetrics(name: String) extends KafkaMetricsGroup {
val messagesInRate = newMeter(name + "MessagesInPerSec", "messages", TimeUnit.SECONDS)
val bytesInRate = newMeter(name + "BytesInPerSec", "bytes", TimeUnit.SECONDS)
val bytesOutRate = newMeter(name + "BytesOutPerSec", "bytes", TimeUnit.SECONDS)
val failedProduceRequestRate = newMeter(name + "FailedProduceRequestsPerSec", "requests", TimeUnit.SECONDS)
val failedFetchRequestRate = newMeter(name + "FailedFetchRequestsPerSec", "requests", TimeUnit.SECONDS)
}
object BrokerTopicStats extends Logging {
private val valueFactory = (k: String) => new BrokerTopicMetrics(k)
private val stats = new Pool[String, BrokerTopicMetrics](Some(valueFactory))
private val allTopicsStats = new BrokerTopicMetrics("AllTopics")
def getBrokerAllTopicsStats(): BrokerTopicMetrics = allTopicsStats
def getBrokerTopicStats(topic: String): BrokerTopicMetrics = {
stats.getAndMaybePut(topic + "-")
}
}
|
akosiaris/kafka
|
core/src/main/scala/kafka/server/KafkaRequestHandler.scala
|
Scala
|
apache-2.0
| 3,559
|
package sbt
package std
import reflect.macros._
import Def.Initialize
import sbt.internal.util.complete.Parser
import sbt.internal.util.appmacro.{ Convert, Converted }
object InputInitConvert extends Convert {
def apply[T: c.WeakTypeTag](c: blackbox.Context)(nme: String, in: c.Tree): Converted[c.type] =
nme match {
case InputWrapper.WrapInitName => Converted.Success[c.type](in)
case InputWrapper.WrapInitTaskName => Converted.Failure[c.type](in.pos, initTaskErrorMessage)
case _ => Converted.NotApplicable[c.type]
}
private def initTaskErrorMessage = "Internal sbt error: initialize+task wrapper not split"
}
/** Converts an input `Tree` of type `Parser[T]` or `State => Parser[T]` into a `Tree` of type `State => Parser[T]`.*/
object ParserConvert extends Convert {
def apply[T: c.WeakTypeTag](c: blackbox.Context)(nme: String, in: c.Tree): Converted[c.type] =
nme match {
case ParserInput.WrapName => Converted.Success[c.type](in)
case ParserInput.WrapInitName => Converted.Failure[c.type](in.pos, initParserErrorMessage)
case _ => Converted.NotApplicable[c.type]
}
private def initParserErrorMessage = "Internal sbt error: initialize+parser wrapper not split"
}
/** Convert instance for plain `Task`s not within the settings system. */
object TaskConvert extends Convert {
def apply[T: c.WeakTypeTag](c: blackbox.Context)(nme: String, in: c.Tree): Converted[c.type] =
if (nme == InputWrapper.WrapTaskName) Converted.Success[c.type](in)
else Converted.NotApplicable[c.type]
}
/** Converts an input `Tree` of type `Initialize[T]`, `Initialize[Task[T]]`, or `Task[T]` into a `Tree` of type `Initialize[Task[T]]`.*/
object FullConvert extends Convert {
import InputWrapper._
def apply[T: c.WeakTypeTag](c: blackbox.Context)(nme: String, in: c.Tree): Converted[c.type] =
nme match {
case WrapInitTaskName => Converted.Success[c.type](in)
case WrapPreviousName => Converted.Success[c.type](in)
case WrapInitName => wrapInit[T](c)(in)
case WrapTaskName => wrapTask[T](c)(in)
case _ => Converted.NotApplicable[c.type]
}
private def wrapInit[T: c.WeakTypeTag](c: blackbox.Context)(tree: c.Tree): Converted[c.type] = {
val i = c.Expr[Initialize[T]](tree)
val t = c.universe.reify(Def.toITask(i.splice)).tree
Converted.Success[c.type](t)
}
private def wrapTask[T: c.WeakTypeTag](c: blackbox.Context)(tree: c.Tree): Converted[c.type] = {
val i = c.Expr[Task[T]](tree)
val t = c.universe.reify(Def.valueStrict[Task[T]](i.splice)).tree
Converted.Success[c.type](t)
}
}
/**
* Converts an input `Tree` of type `State => Parser[T]` or `Initialize[State => Parser[T]]`
* into a `Tree` of type `Initialize[State => Parser[T]]`.
*/
object InitParserConvert extends Convert {
def apply[T: c.WeakTypeTag](c: blackbox.Context)(nme: String, in: c.Tree): Converted[c.type] =
nme match {
case ParserInput.WrapName => wrap[T](c)(in)
case ParserInput.WrapInitName => Converted.Success[c.type](in)
case _ => Converted.NotApplicable[c.type]
}
private def wrap[T: c.WeakTypeTag](c: blackbox.Context)(tree: c.Tree): Converted[c.type] = {
val e = c.Expr[State => Parser[T]](tree)
val t = c.universe.reify { Def.valueStrict[State => Parser[T]](e.splice) }
Converted.Success[c.type](t.tree)
}
}
|
Duhemm/sbt
|
main-settings/src/main/scala/sbt/std/InputConvert.scala
|
Scala
|
bsd-3-clause
| 3,478
|
type Empty[X] = EmptyTuple
type Twice[X] = (X, X)
def test =
val a1: EmptyTuple = ??? : Tuple.Fold[EmptyTuple, Nothing, Tuple2]
val a2: (Int, (String, Nothing)) = ??? : Tuple.Fold[(Int, String), Nothing, Tuple2]
val a3: Int | String | Char = ??? : Tuple.Fold[(Int, String, Char), Nothing, [X, Y] =>> X | Y]
|
dotty-staging/dotty
|
tests/pos/tuple-fold.scala
|
Scala
|
apache-2.0
| 315
|
package org.singingwizard.auto2up
import org.rogach.scallop._
import org.apache.pdfbox.pdmodel._
import java.io.File
import resource._
import org.apache.pdfbox.pdmodel.common.PDRectangle
import org.apache.pdfbox.multipdf.LayerUtility
import org.apache.pdfbox.util.Matrix
import org.apache.pdfbox.pdmodel.graphics.image.JPEGFactory
import org.apache.pdfbox.pdmodel.graphics.image.LosslessFactory
import org.apache.pdfbox.rendering.PDFRenderer
import org.apache.pdfbox.rendering.ImageType
import scala.collection.JavaConversions._
import javax.imageio.ImageWriter
import javax.imageio.ImageIO
import org.rogach.scallop.exceptions.ScallopException
import java.io.IOException
import java.util.zip.GZIPInputStream
import java.io.FileInputStream
import java.util.zip.ZipException
import java.io.PipedOutputStream
import java.io.PipedInputStream
class Conf(args: Array[String]) extends ScallopConf(args) {
version("auto2up 0.1")
banner("""Usage: auto2up ... input
|auto2up 2-ups PDFs in a slightly smart way.
|The input can be PS or Gzip'd PS if GhostScript's ps2pdf is available in the PATH.
|Options:
|""".stripMargin)
footer("\n72 points is 1 inch. 1/8 inch is 9 points.\n")
mainOptions = Seq(outputFilename)
val inputFilename = trailArg[String]("input", descr = "Input filename (really required, BUG)", required = false)
val outputFilename = opt[String]("output", descr = "Output filename",
default = inputFilename.get.map(outputFilenameForFilename))
val drawBox = opt[Boolean]("draw-box", 'b', descr = "Draw boxes around placed pages", default = Some(false))
val inkThreshold = opt[Float]("ink-threshold", 't',
descr = "The gray level to consider 'ink' on the page. (1.0 is white, 0.0 is full black)",
default = Some(0.95f))
val dpi = opt[Float]("dpi", 'd',
descr = "Rendering resolution used for analysis in DPI.",
default = Some(60f))
val inkMargin = opt[Float]("ink-margin", 'n',
descr = "Margin to leave around ink when cropping in points.",
default = Some(3f))
val shortEdgeMargin = opt[Float]("short-edge-margin", 's',
descr = "Margin on short paper edge in points.",
default = Some(9f))
val longEdgeMargin = opt[Float]("long-edge-margin", 'l',
descr = "Margin on long paper edge in points.",
default = Some(9f * 4))
val interMargin = opt[Float]("inter-margin", 'i',
descr = "Gap between pages placed on one sheet in points.",
default = Some(9f / 2))
val recenter = toggle("recenter",
descrYes = "Recenter all pages by cropping each page based on it's own ink box.",
descrNo = "Crop all pages in the exact same way based on the combined ink box.",
default = Some(false))
val verbose = tally("verbose", descr = "Increase verbosity")
val quiet = tally("quiet", descr = "Decrease verbosity")
def verbosity = verbose() - quiet()
val help = opt[Boolean]("help", descr = "Show this help", default = Some(false))
verify()
/** Remove the extension from a filename and replace it with "-2up.pdf".
*/
def outputFilenameForFilename(fn: String): String = {
val nameStart = fn.lastIndexOf(File.separator) max 0
def negNone(i: Int): Option[Int] = if (i < 0) None else Some(i)
val extStart = negNone(fn.lastIndexOf(".pdf")).orElse(negNone(fn.lastIndexOf(".ps"))).orElse(negNone(fn.lastIndexOf(".")))
val sansExt = extStart.map(fn.substring(0, _)).getOrElse(fn)
sansExt + "-2up.pdf"
}
}
object Main {
def main(args: Array[String]): Unit = {
try {
val conf = new Conf(args)
if (!conf.help() && conf.inputFilename.isDefined) {
main(conf)
} else {
conf.printHelp()
}
} catch {
case ScallopException(msg) โ
println(msg)
}
}
def main(conf: Conf): Unit = {
implicit val _conf = conf
// Extract margin config for later use
val shortEdgeMargin = conf.shortEdgeMargin()
val longEdgeMargin = conf.longEdgeMargin()
val interMargin = conf.interMargin()
// The output page size
val pageSize = new PDRectangle(PDRectangle.LETTER.getHeight, PDRectangle.LETTER.getWidth)
// The amount of the page we will actually use.
val usedSize = new PDRectangle(pageSize.getWidth - shortEdgeMargin * 2 - interMargin, pageSize.getHeight - longEdgeMargin * 2)
// Load/open PDFs
val inputFile = new File(conf.inputFilename())
val outputFile = new File(conf.outputFilename())
if (!(inputFile.isFile() && inputFile.canRead()))
throw new IOException(s"Input file not readable: $inputFile")
if (outputFile.exists() && (!outputFile.canWrite() || !outputFile.isFile()))
throw new IOException(s"Output file not writable (check permissions and what is there now): $outputFile")
if (outputFile.exists())
trace(-1, s"$outputFile already exists. Overwriting.")
trace(0, s"Input file: $inputFile")
for {
in โ managed(loadInput(inputFile))
out โ managed(new PDDocument())
} {
// Setup helper classes
val layerUtility = new LayerUtility(out)
implicit val renderer = new PDFRenderer(in)
// Get the ink boxes of each page
val inkBoxes = for (pageNo โ 0 until in.getNumberOfPages) yield findInkBox(pageNo)
// Compute the largest SIZE the can contain any of the boxes.
// If we are not recentering compute the union of all the boxes instead.
val inkBoxSize = inkBoxes.map(r โ if (conf.recenter()) r.createRetranslatedRectangle() else r).reduce(unionRect)
// Compute the scale needed to fit the input pages perfectly in the width and height
val scaleForWidth = usedSize.getWidth / (inkBoxSize.getWidth * 2)
val scaleForHeight = usedSize.getHeight / inkBoxSize.getHeight
// Set scale to the minimum of the two so everything will fit
val scale = scaleForHeight min scaleForWidth
trace(0, s"Scale: ${(scale * 100).formatted("%.1f")}")
// Compute the margins needed to horizontally center everything properly for pages of size inkBoxSize
val shortEdgeCenteringMargin = (pageSize.getWidth - inkBoxSize.getWidth * 2 * scale) / 3
val interCenteringMargin = (pageSize.getWidth - inkBoxSize.getWidth * 2 * scale) / 3
// Iterate over pairs of pages
for (pageNo โ 0 until in.getNumberOfPages by 2) {
// Create and setup the output page
val page = new PDPage()
page.setMediaBox(pageSize)
out.addPage(page)
// Compute the actual ink box for this pair of pages based on the pair itself
val pairInkBox = if (pageNo + 1 < in.getNumberOfPages)
unionRect(inkBoxes(pageNo), inkBoxes(pageNo + 1))
else
inkBoxes(pageNo)
val inkBox = if (conf.recenter()) pairInkBox else inkBoxSize
// Vertically centering margin
val longEdgeCenteringMargin = (pageSize.getHeight - inkBox.getHeight * scale) / 2
// Setup the drawing context for this page
for (contentStream โ managed(new PDPageContentStream(out, page, PDPageContentStream.AppendMode.APPEND, false))) {
// Iterate over the two input pages
for (subPage โ 0 to 1 if pageNo + subPage < in.getNumberOfPages) {
val n = pageNo + subPage
// Import the page as a Form XObject
val embPage = layerUtility.importPageAsForm(in, n)
contentStream.saveGraphicsState()
// Compute the transform for this page
val pageTransform = new Matrix()
// Translate to the corner of the page space
pageTransform.translate(shortEdgeCenteringMargin, longEdgeCenteringMargin)
// Translate over by the inter-margin and the scaled page width based on which sub-page we are rendering.
pageTransform.translate(subPage * (interCenteringMargin + inkBoxSize.getWidth * scale), 0)
// Scale it
pageTransform.scale(scale, scale)
// Translate so the corner of the ink on the page will be at 0,0
pageTransform.translate(-inkBox.getLowerLeftX, -inkBox.getLowerLeftY)
contentStream.transform(pageTransform)
// Draw clipped page
contentStream.saveGraphicsState()
markRectangle(contentStream, inkBox)
contentStream.clip()
// Draw the input page to the output
contentStream.drawForm(embPage)
contentStream.restoreGraphicsState()
// Draw the box if requested. On top of page without clipping.
if (conf.drawBox()) {
markRectangle(contentStream, inkBox)
contentStream.closeAndStroke()
}
contentStream.restoreGraphicsState()
}
}
}
trace(0, s"Output file: $outputFile")
out.save(outputFile)
}
}
val degrees: Float = (Math.PI / 180).toFloat
/** Compute the union of two axis-aligned rectangle as a new AA rectangle.
*/
def unionRect(a: PDRectangle, b: PDRectangle): PDRectangle = {
val r = new PDRectangle
r.setLowerLeftX(a.getLowerLeftX min b.getLowerLeftX)
r.setLowerLeftY(a.getLowerLeftY min b.getLowerLeftY)
r.setUpperRightX(a.getUpperRightX max b.getUpperRightX)
r.setUpperRightY(a.getUpperRightY max b.getUpperRightY)
r
}
/** Open inputFile as a PDF converting from PS if needed.
*/
def loadInput(inputFile: File) = {
try {
PDDocument.load(inputFile)
} catch {
case _: IOException โ {
// Assume the PDF parse failed
val psStream = try {
new GZIPInputStream(new FileInputStream(inputFile))
} catch {
case _: ZipException โ {
new FileInputStream(inputFile)
}
}
import scala.sys.process._
val pipeOutput = new PipedOutputStream()
val pipeInput = new PipedInputStream(pipeOutput)
val proc = "ps2pdf - -" #< psStream #> pipeOutput run ()
PDDocument.load(pipeInput)
}
}
}
/** Compute the AABB of the non-white areas on the page.
*/
def findInkBox(pageNo: Int)(implicit conf: Conf, renderer: PDFRenderer) = {
val dpi = conf.dpi()
val marginOfError = (dpi / 72 * conf.inkMargin()) max 1
val threshold = 255 * conf.inkThreshold()
val img = renderer.renderImageWithDPI(pageNo, dpi, ImageType.GRAY)
//ImageIO.write(img, "PNG", new File("tmp.png"))
//val pdimg = LosslessFactory.createFromImage(out, img)
val rast = img.getData()
val pixelBuf = new Array[Int](rast.getNumBands)
var maxX = Float.MinValue
var maxY = Float.MinValue
var minX = Float.MaxValue
var minY = Float.MaxValue
for {
x โ 0 until rast.getWidth
y โ 0 until rast.getHeight
v = rast.getPixel(x, y, pixelBuf)(0)
if v < threshold
} {
//println(x,y, v)
maxX = maxX max x
maxY = maxY max y
minX = minX min x
minY = minY min y
}
maxX = (maxX + marginOfError) min rast.getWidth
maxY = (maxY + marginOfError) min rast.getHeight
minX = (minX - marginOfError) max 0
minY = (minY - marginOfError) max 0
val scale = 72 / dpi
val r = new PDRectangle
// Y is flipped between raster and PDF
r.setLowerLeftX(minX * scale)
r.setLowerLeftY((rast.getHeight - maxY) * scale)
r.setUpperRightX(maxX * scale)
r.setUpperRightY((rast.getHeight - minY) * scale)
r
}
/** Outline a rectangle to the content stream using moveTo/lineTo.
*/
def markRectangle(contentStream: PDPageContentStream, box: PDRectangle) = {
contentStream.moveTo(box.getLowerLeftX, box.getLowerLeftY)
contentStream.lineTo(box.getLowerLeftX, box.getUpperRightY)
contentStream.lineTo(box.getUpperRightX, box.getUpperRightY)
contentStream.lineTo(box.getUpperRightX, box.getLowerLeftY)
contentStream.lineTo(box.getLowerLeftX, box.getLowerLeftY)
}
def trace(level: Int, msg: โ String)(implicit conf: Conf) {
if (level <= conf.verbosity)
println(msg)
}
}
|
arthurp/auto2up
|
src/main/scala/org/singingwizard/auto2up/Main.scala
|
Scala
|
gpl-3.0
| 12,026
|
package ru.biocad.ig.alicont.conts.affine
import ru.biocad.ig.alicont.algorithms.affine.GlobalAlignment
import ru.biocad.ig.alicont.conts.AffineAlicont
/**
* Created with IntelliJ IDEA.
* User: pavel
* Date: 27.11.13
* Time: 23:32
*/
class AlicontGlobal(maxheight : Int, query : String, gap_open : Double, gap_ext : Double, score_matrix : Array[Array[Double]])
extends AffineAlicont(maxheight : Int, query : String, gap_open : Double, gap_ext : Double , score_matrix : Array[Array[Double]]) {
def push(s : String) : Unit = {
_strings.push(s)
GlobalAlignment.extendMatrix(s, _query, _gap_open, _gap_ext, _score,
horizontal_matrix = _horizontalMatrix,
vertical_matrix = _verticalMatrix,
substitution_matrix = _substitutionMatrix,
matrix = _matrix)
}
def alignment() : (Double, (String, String)) =
GlobalAlignment.traceback(target, _query,
horizontal_matrix = _horizontalMatrix,
vertical_matrix = _verticalMatrix,
substitution_matrix = _substitutionMatrix,
matrix = _matrix)
}
|
zmactep/igcat
|
lib/ig-alicont/src/main/scala/ru/biocad/ig/alicont/conts/affine/AlicontGlobal.scala
|
Scala
|
bsd-2-clause
| 1,255
|
/*
* Copyright 2020 Spotify AB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.spotify.scio.redis.instances
import com.spotify.scio.coders.Coder
import com.spotify.scio.redis.types._
trait CoderInstances {
implicit def appendCoder[T: Coder: RedisType]: Coder[Append[T]] = Coder.gen[Append[T]]
implicit def setCoder[T: Coder: RedisType]: Coder[Set[T]] = Coder.gen[Set[T]]
implicit def incrByCoder[T: Coder: RedisType]: Coder[IncrBy[T]] = Coder.gen[IncrBy[T]]
implicit def decrByCoder[T: Coder: RedisType]: Coder[DecrBy[T]] = Coder.gen[DecrBy[T]]
implicit def sAddCoder[T: Coder: RedisType]: Coder[SAdd[T]] = Coder.gen[SAdd[T]]
implicit def lPushCoder[T: Coder: RedisType]: Coder[LPush[T]] = Coder.gen[LPush[T]]
implicit def rPushCoder[T: Coder: RedisType]: Coder[RPush[T]] = Coder.gen[RPush[T]]
implicit def pfAddCoder[T: Coder: RedisType]: Coder[PFAdd[T]] = Coder.gen[PFAdd[T]]
implicit def zAddCoder[T: Coder: RedisType]: Coder[ZAdd[T]] = Coder.gen[ZAdd[T]]
private[this] def coders: Map[Int, Coder[_]] = Map(
1 -> appendCoder[String],
2 -> appendCoder[Array[Byte]],
3 -> setCoder[String],
4 -> setCoder[Array[Byte]],
5 -> incrByCoder[String],
6 -> incrByCoder[Array[Byte]],
7 -> decrByCoder[String],
8 -> decrByCoder[Array[Byte]],
9 -> sAddCoder[String],
10 -> sAddCoder[Array[Byte]],
11 -> lPushCoder[String],
12 -> lPushCoder[Array[Byte]],
13 -> rPushCoder[String],
14 -> rPushCoder[Array[Byte]],
15 -> pfAddCoder[String],
16 -> pfAddCoder[Array[Byte]],
17 -> zAddCoder[String],
18 -> zAddCoder[Array[Byte]]
)
implicit def redisMutationCoder[T <: RedisMutation]: Coder[T] =
Coder.disjunction[T, Int]("RedisMutation", coders.asInstanceOf[Map[Int, Coder[T]]]) {
case RedisMutation(_: Append[String @unchecked], RedisType.StringRedisType) => 1
case RedisMutation(_: Append[Array[Byte] @unchecked], RedisType.ByteArrayRedisType) => 2
case RedisMutation(_: Set[String @unchecked], RedisType.StringRedisType) => 3
case RedisMutation(_: Set[Array[Byte] @unchecked], RedisType.ByteArrayRedisType) => 4
case RedisMutation(_: IncrBy[String @unchecked], RedisType.StringRedisType) => 5
case RedisMutation(_: IncrBy[Array[Byte] @unchecked], RedisType.ByteArrayRedisType) => 6
case RedisMutation(_: DecrBy[String @unchecked], RedisType.StringRedisType) => 7
case RedisMutation(_: DecrBy[Array[Byte] @unchecked], RedisType.ByteArrayRedisType) => 8
case RedisMutation(_: SAdd[String @unchecked], RedisType.StringRedisType) => 9
case RedisMutation(_: SAdd[Array[Byte] @unchecked], RedisType.ByteArrayRedisType) => 10
case RedisMutation(_: LPush[String @unchecked], RedisType.StringRedisType) => 11
case RedisMutation(_: LPush[Array[Byte] @unchecked], RedisType.ByteArrayRedisType) => 12
case RedisMutation(_: RPush[String @unchecked], RedisType.StringRedisType) => 13
case RedisMutation(_: RPush[Array[Byte] @unchecked], RedisType.ByteArrayRedisType) => 14
case RedisMutation(_: PFAdd[String @unchecked], RedisType.StringRedisType) => 15
case RedisMutation(_: PFAdd[Array[Byte] @unchecked], RedisType.ByteArrayRedisType) => 16
case RedisMutation(_: ZAdd[String @unchecked], RedisType.StringRedisType) => 17
case RedisMutation(_: ZAdd[Array[Byte] @unchecked], RedisType.ByteArrayRedisType) => 18
}
}
|
spotify/scio
|
scio-redis/src/main/scala/com/spotify/scio/redis/instances/CoderInstances.scala
|
Scala
|
apache-2.0
| 4,025
|
package org.finra.datagenerator.scaffolding.messaging.config
import org.finra.datagenerator.scaffolding.messaging.response.Response
import org.finra.datagenerator.scaffolding.messaging.{MessageContext, Replies}
/**
* Created by dkopel on 8/22/16.
*/
/**
* Used to define the configuration of each intention.
* If `waitForReplies()` is false then the function is fire and forget
* If the predicate is found to be true then the function continues to block.
* If there are replies then the `apply()` method is invoked with the collected
* replies. The return value of the replies is passed to the initial calling `Action`
*
*/
trait ResponseConfiguration {
def test(context: MessageContext): Boolean
def apply(replies: Replies): Response[_]
def waitForReplies: Boolean
val body: Any
var response: Response[_]
}
|
FINRAOS/DataGenerator
|
rubber-scaffolding/rubber-commons/src/main/scala/org/finra/datagenerator/scaffolding/messaging/config/ResponseConfiguration.scala
|
Scala
|
apache-2.0
| 848
|
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp.
package scala
object Product2 {
def unapply[T1, T2](x: Product2[T1, T2]): Option[Product2[T1, T2]] =
Some(x)
}
/** Product2 is a Cartesian product of 2 components.
*/
trait Product2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, Double) +T2] extends Any with Product {
/** The arity of this product.
* @return 2
*/
override def productArity: Int = 2
/** Returns the n-th projection of this product if 0 <= n < productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
* @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 2).
*/
@throws(classOf[IndexOutOfBoundsException])
override def productElement(n: Int): Any = n match {
case 0 => _1
case 1 => _2
case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 1)")
}
/** A projection of element 1 of this Product.
* @return A projection of element 1.
*/
def _1: T1
/** A projection of element 2 of this Product.
* @return A projection of element 2.
*/
def _2: T2
}
|
scala/scala
|
src/library/scala/Product2.scala
|
Scala
|
apache-2.0
| 1,573
|
package scrabble
import scala.util.{ Try, Success, Failure }
import scala.util.Failure
import scalaz.NonEmptyList
import org.specs2.matcher.MatchResult
class MoveTest extends ScrabbleTest {
def withGameAndPositions(game: Option[Game], placed: Option[NonEmptyList[(Pos, Tile)]])(
behaviour: ValidInputPlaceLettersMove => Unit) = {
game must beSome
placed must beSome
placed foreach {
place =>
game foreach {
game =>
val valid = PlaceLettersMove(game, place).validate
valid.toOption must not beNone
PlaceLettersMove(game, place).validate foreach behaviour
}
}
}
def builtToStr(lists: List[List[(Pos, Square, Tile)]]): List[String] = lists.map { list =>
list.map { case (pos, sq, letter) => letter.letter }.mkString
}
val playedGame = game.flatMap { game =>
crossedWords.map {
words =>
game.copy(board = words, moves = 1)
}
}
val gibberishPlace = {
addPlaceLists(toPlace("T", false, pos(6, 4)), toPlace("ESTS", false, pos(6, 6)))
}
val ravinePlaced = {
blankGame flatMap {
game =>
toPlace("ravine", true, pos(8, 8)) flatMap {
place =>
PlaceLettersMove(game, place).validate flatMap (_.makeMove) toOption
}
}
}
val coversTwoBonuses: Option[ValidInputPlaceLettersMove] = {
val place = addPlaceLists(toPlace("ven", false, pos(11, 5)), toPlace(
"son", false, pos(11, 9)))
val placed = pos(11, 9) flatMap {
pos =>
safeUpdateTile(place, 3, BlankLetter('S'))
}
placed flatMap {
placed =>
ravinePlaced flatMap {
game1 =>
PlaceLettersMove(game1, placed).validate.toOption
}
}
}
coversTwoBonuses must not beNone
val modifiedPlayer = playedGame flatMap (_.currentPlayer.map(_.replaceLetters(toLetters("tory"))))
val modifiedGame = {
modifiedPlayer flatMap {
player => playedGame map (g => g.copy(players = g.players.updated(g.playersMove, player)))
}
}
def checkBuiltWords(game: Option[Game], place: Option[NonEmptyList[PosTile]], shouldContain: List[String]) = {
withGameAndPositions(game, place) {
move =>
move.formedWords.toOption must not beNone
move.formedWords foreach {
built =>
val words = builtToStr(built.getWords)
words must containAllOf(shouldContain)
words must have size shouldContain.size
}
}
}
"a move should" should {
"fail if the first move does not intersect with the star square" in {
val place = toPlace("test", true, pos(11, 2))
withGameAndPositions(game, place)(_.makeMove must beEqualTo(Failure(FirstMovePositionWrong(0))))
}
"not place letters on top of occupied squares" in {
val place = toPlace("hello", true, pos(3, 5))
place must beSome
playedGame must beSome
playedGame foreach {
game =>
place foreach {
place =>
PlaceLettersMove(game, place).validate must be equalTo (Failure(SquareOccupiedClientError()))
}
}
// withGameAndPositions(playedGame, place)(_.makeMove must beEqualTo(Failure(SquareOccupiedClientError(6))))
}
"not place letters that the player does not have" in {
val place = toPlace("tone", true, pos(8, 3))
withGameAndPositions(modifiedGame, place)(_.makeMove must beEqualTo(Failure(playerDoesNotHaveLettersClientError(7))))
}
"fail if the word is not attached to an existing word" in {
val place = toPlace("test", true, pos(1, 1))
withGameAndPositions(playedGame, place)(_.makeMove must beEqualTo(Failure(NotAttachedToWord(2))))
}
"build multiple words from letters placed adjacent to other squares from horizontally placed letters" in {
val place = addPlaceLists(toPlace("o", true, pos(6, 6)), toPlace("e", true, pos(8, 6)))
checkBuiltWords(playedGame, place, List("TO", "ORE", "RE"))
}
"build multiple words from letters placed adjacent to other squares from vertically placed letters " in {
checkBuiltWords(playedGame, gibberishPlace, List("TTESTS", "TC", "ER", "SE", "TS"))
}
"extend an existing word horizontally" in {
val place = toPlace("tares", true, pos(8, 8))
checkBuiltWords(playedGame, place, "STARES" :: Nil)
}
"extend an existing word vertically" in {
val place = toPlace("sdf", false, pos(7, 9))
checkBuiltWords(playedGame, place, "SCORESSDF" :: Nil)
}
"extend an existing word on the left and right" in {
val place = addPlaceLists(toPlace("SM", true, pos(1, 5)), toPlace("S", true, pos(10, 5)))
checkBuiltWords(playedGame, place, "SMHISTORYS" :: Nil)
}
"extend an existing word above and below" in {
val place = addPlaceLists(toPlace("SM", false, pos(7, 1)), toPlace("ST", false, pos(7, 9)))
checkBuiltWords(playedGame, place, "SMSCORESST" :: Nil)
}
def checkMisplaced(game: Option[Game], place: Option[NonEmptyList[PosTile]], errorAt: (Int, Int)) = {
withGameAndPositions(game, place) {
move =>
move.formedWords must beEqualTo(Failure(MisPlacedLetters(errorAt._1, errorAt._2)))
}
}
"warn about misplaced letters" in {
val place = toPlace("test", true, pos(1, 1))
val first = safeUpdateTile(place, 1, Pos.at(3, 2), 'C')
val place2 = toPlace("test", true, pos(1, 1))
val second = safeUpdateTile(place2, 3, pos(5, 1), 'C')
val toPlace3 = addPlaceLists(toPlace("T", true, pos(2, 5)), toPlace("fd", true, pos(11, 5)))
// Square placed outside the 'line' (i.e above)
checkMisplaced(playedGame, first, (3, 1))
// linear, but missing a square to complete the the 'line'
checkMisplaced(playedGame, second, (3, 1))
// Start to complete a word at one side, but misplace letter at the other
checkMisplaced(playedGame, toPlace3, (2, 5))
}
"reject invalid words" in {
withGameAndPositions(playedGame, gibberishPlace) {
move =>
val words = "TTESTS" :: "TC" :: "SE" :: "TS" :: Nil
move.makeMove.get must throwA[WordsNotInDictionary].like {
case e: WordsNotInDictionary =>
e.words must have size 4 // 'RE' should be the only valid word
e.words must containAllOf(words)
}
}
}
def checkScore(game: Option[Game], place: Option[NonEmptyList[PosTile]], totalScore: Int) = {
withGameAndPositions(game, place) {
move =>
val score = move.score
score.toOption must not beNone
score.foreach {
score => score.overAllScore must beEqualTo(totalScore)
}
}
}
"calculate scores correctly" in {
// normal
val placeNormal = addPlaceLists(toPlace("wa", false, pos(5, 3)), toPlace("p", false, pos(5, 6)))
checkScore(playedGame, placeNormal, 9)
// double letter
val placeDoubleLetter = toPlace("tyle", true, pos(8, 3))
checkScore(playedGame, placeDoubleLetter, 12)
// Double word
val doublePlace = toPlace("stair", true, pos(2, 3))
checkScore(playedGame, doublePlace, 12)
// triple letter
val tripleLetterPlace = toPlace("ale", true, pos(9, 6))
checkScore(playedGame, tripleLetterPlace, 10)
// triple word
val tripleWordPlace = toPlace("TAO", false, pos(8, 1))
checkScore(playedGame, tripleWordPlace, 11)
// Multiple words
//@TODO: refactor
val playedFurther = game flatMap {
game =>
val place = addPlaceLists(toPlace("wa", false, pos(5, 3)), toPlace("p", false, pos(5, 6)))
crossedWords flatMap {
words =>
placeSquares(words, place) map {
ohgodthemeanderingmapsareover =>
game.copy(board = ohgodthemeanderingmapsareover)
}
}
}
// val playedFurther = furtherGame(playedGame, addPlaceLists(toPlace("wa", false, pos(5, 3)), toPlace("p", false, pos(5, 6))))
val multipleWordPlace = toPlace("YA", false, pos(6, 2))
withGameAndPositions(playedFurther, multipleWordPlace) {
move =>
val score = move.score
score.toOption must not beNone
score foreach {
score =>
score.overAllScore must beEqualTo(19)
score.individualScores must contain("YA" -> 13)
score.individualScores must contain("WAS" -> 6)
}
}
// Covering multiple bonus squares
coversTwoBonuses foreach { move =>
val score = move.score.toOption
score.toOption must not beNone
score foreach (_.overAllScore must beEqualTo(36))
}
}
"place one letter" in {
val game1 = furtherGame(blankGame, toPlace("ravine", true, pos(8, 8)))
val place = toPlace("O", true, pos(8, 7))
checkBuiltWords(game1, place, "OR" :: Nil)
// @TODO: Horizontal single letter
val placeHor = toPlace("O", true, pos(7, 8))
checkBuiltWords(game1, placeHor, "ORAVINE" :: Nil)
// @TODO: Build multiple words
}
"handle blank letters" in {
// ENVO_NIL
coversTwoBonuses foreach {
move =>
val words = move.formedWords
words.toOption must beSome
words foreach {
wrds =>
builtToStr(wrds.getWords) must contain("VENISON")
}
}
}
val predictableGame = {
val place = toPlace("lurid", true, pos(8, 8))
furtherGame(predictableLetterbagGame, place)
}
predictableGame must beSome
"replace the letters that a player played" in {
predictableGame foreach {
predictableGame =>
val player = predictableGame.players.get(0)
player must beSome
player foreach {
player =>
player.letters must containAllOf(toLetters("SV"))
player.letters must containAllOf(toLetters("EIYUR"))
}
}
}
"transition the game state correctly" in {
predictableGame foreach {
predictableGame =>
predictableGame.moves must beEqualTo(1)
pos(7, 8) foreach {
pos =>
predictableGame.board.lettersRight(pos).map { case (pos, sq, let) => let.letter }.mkString must beEqualTo("LURID")
}
predictableGame.bag.lettersAsString must beEqualTo(
"ADYEICBLEDHMSIXNFERAIWOANETGAELGFIUT_TJHAI_BDONENOECTRIEEREKOAZPVETONSASURAPMNOTO")
predictableGame.currentPlayer must beSome
predictableGame.getPlayer(0) must beSome
predictableGame.getPlayer(0).foreach { _.score must be equalTo (16) }
predictableGame.currentPlayer foreach { _.letters.map(_.letter).mkString must beEqualTo("IGQAWLO") }
predictableGame.playersMove must beEqualTo(1)
}
}
"handle exchange moves correctly" in {
predictableGame foreach {
game =>
val move = ExchangeMove(game, toLetters("IGQ"))
val moveMade = move.makeMove
moveMade must not be equalTo(Failure(PlayerDoesNotHaveLettersToExchange()))
moveMade must not be equalTo(Failure(MustExchangeSameNumberofLetters()))
moveMade.toOption must not beNone
moveMade foreach {
newGame =>
val player = newGame.players get (game.playersMove)
player must not beNone
player foreach {
player =>
val test = player.letters map (c => c.letter) mkString
test must beEqualTo("ADYAWLO")
}
newGame.moves must beEqualTo(game.moves + 1)
newGame.bag.letters.intersect(toLetters("IGQ") ::: game.bag.letters.drop(3)).size must beEqualTo(newGame.bag.size)
}
}
}
"handle pass moves correctly" in {
val passed = game.foreach {
game =>
val pass = PassMove(game).makeMove
pass.toOption must beSome
pass.foreach {
passGame =>
passGame.playersMove must beEqualTo(1)
passGame.consecutivePasses must beEqualTo(1)
}
}
}
"not allow a move on a finished game" in {
pending
}
}
}
|
Happy0/scalascrabble
|
src/test/scala/MoveTest.scala
|
Scala
|
gpl-2.0
| 12,399
|
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.utils.geohash
import com.vividsolutions.jts.geom.{Coordinate, GeometryFactory, Point, PrecisionModel}
import org.locationtech.geomesa.utils.geohash.GeoHashIterator._
import scala.collection.mutable
object GeoHashIterator {
val geometryFactory = new GeometryFactory(new PrecisionModel, 4326)
/**
* Given points, return the two GeoHashes that bound the rectangle that are suitable for
* iteration.
*
* @param points a collection of points for which a minimum-bounding-rectangle (MBR) is sought
* @param precision the precision, in bits, of the GeoHashes sought
* @param radiusInMeters the buffer distance in meters
* @return the lower-left and upper-right corners of the bounding box, as GeoHashes at the specified precision
*/
def getBoundingGeoHashes(points: Traversable[Point],
precision: Int,
radiusInMeters: Double): (GeoHash, GeoHash) = {
val (lonMin, lonMax, latMin, latMax) =
points.foldLeft((Long.MaxValue, Long.MinValue, Long.MaxValue, Long.MinValue))(
{case ((xMin, xMax, yMin, yMax), point) => {
val Array(y, x) = GeoHash.gridIndicesForLatLong(GeoHash.apply(point, precision))
(math.min(xMin, x), math.max(xMax, x), math.min(yMin, y), math.max(yMax, y))
}})
val ptLL = {
val gh = GeoHash.composeGeoHashFromBitIndicesAndPrec(latMin, lonMin, precision)
val point = geometryFactory.createPoint(new Coordinate(gh.x, gh.y))
val left = VincentyModel.moveWithBearingAndDistance(point, -90, radiusInMeters)
VincentyModel.moveWithBearingAndDistance(left, 180, radiusInMeters)
}
val ptUR = {
val gh = GeoHash.composeGeoHashFromBitIndicesAndPrec(latMax, lonMax, precision)
val point = geometryFactory.createPoint(new Coordinate(gh.x, gh.y))
val right = VincentyModel.moveWithBearingAndDistance(point, 0, radiusInMeters)
VincentyModel.moveWithBearingAndDistance(right, 90, radiusInMeters)
}
(GeoHash.apply(ptLL, precision), GeoHash.apply(ptUR, precision))
}
/**
* Longitude ranges over the entire circumference of the Earth, while latitude only ranges over half.
* Hence, the precision (in meters-per-bit) is twice as refined for latitude as it is for longitude.
* (The first bit latitude represents ~10,000 Km, while the first bit longitude represents ~20,000 Km.)
*
* In addition, latitude spans a slightly smaller range due to the asymmetry of the Earth.
*
* @param isLatitude whether the dimension requested is latitude
* @param dimensionBits the number of bits used
* @return how many meters each bit of this dimension represents
*/
def getDimensionPrecisionInMeters(nearLatitude: Double,
isLatitude: Boolean,
dimensionBits: Int): Double = {
if (isLatitude) 20004000.0 / (1 << dimensionBits).asInstanceOf[Double]
else {
val radiusAtEquator = 40075160.0
val radiusNearLatitude = radiusAtEquator * Math.cos(nearLatitude * Math.PI / 180.0)
val circumferenceNearLatitude = radiusNearLatitude * 2.0 * Math.PI
circumferenceNearLatitude / (1 << dimensionBits).toDouble
}
}
// TODO: none of the utility methods below this point are used within acc-geo; can we delete them?
/**
* Given a radius in meters, what is the worst-case size in degrees that it might represent?
* Note: This is almost entirely useless as a measure.
*
* @param meters a distance in meters
* @return a distance in degrees
*/
def convertRadiusInMetersToDegrees(meters: Double): Double = {
val point = geometryFactory.createPoint(new Coordinate(67.5, 35.0))
getSegmentLengthInDegrees(point,
VincentyModel.moveWithBearingAndDistance(point, 45.0, meters))
}
/**
* Utility storage for converting degrees to meters.
*/
private final val mapDegreesToMeters = mutable.Map[Double, Double]()
private final val precisionDegreesToMeters = 1e5
/**
* Given a radius in degrees, what is a blended-estimate size in meters that it might represent?
* Note: This is almost entirely useless as a measure.
*
* @param degreeRadius a distance in degrees
* @return a distance in meters
*/
def convertRadiusInDegreesToMeters(degreeRadius: Double): Double = {
val degrees = Math.round(degreeRadius * precisionDegreesToMeters) / precisionDegreesToMeters
mapDegreesToMeters.getOrElseUpdate(degrees, convertRadiusInDegreesToMetersViaIntervalHalving(degrees, 45.0))
}
/**
* Given a radius in degrees, what is a blended-estimate size in meters that it might represent?
* Note: This is almost entirely useless as a measure.
*
* @param degrees a distance in degrees
* @return a distance in meters
*/
def convertRadiusInDegreesToMetersViaIntervalHalving(degrees: Double, azimuth: Double): Double = {
val a = geometryFactory.createPoint(new Coordinate(67.5, 35.0))
var minMeters = 0.01
var maxMeters = 10000000.0
var midMeters = 0.5 * (minMeters + maxMeters)
var midDegrees = getSegmentLengthInDegrees(a,
VincentyModel.moveWithBearingAndDistance(a, azimuth, midMeters))
while (Math.abs(midMeters - minMeters) > 0.01) {
if (midDegrees == degrees) return midMeters
if (midDegrees > degrees) {
maxMeters = midMeters
}
else if (midDegrees < degrees) {
minMeters = midMeters
}
midMeters = 0.5 * (minMeters + maxMeters)
midDegrees = getSegmentLengthInDegrees(a,
VincentyModel.moveWithBearingAndDistance(a, azimuth, midMeters))
}
midMeters
}
/**
* Utility function to express the distance between two points in degrees.
* Note: This is almost entirely useless as a measure.
*
* @param a one segment end-point
* @param b one segment end-point
* @return the distance in degrees between these two points; note that this can only be
* an estimate, as horizontal and vertical degrees do not represent equal distances
*/
def getSegmentLengthInDegrees(a: Point, b: Point): Double =
Math.hypot(a.getX - b.getX, a.getY - b.getY)
/**
* Utility function to express the distance between two points in meters.
*
* @param a one segment end-point
* @param b one segment end-point
* @return the distance in meters between these two points
*/
def getSegmentLengthInMeters(a: Point, b: Point): Double =
VincentyModel.getDistanceBetweenTwoPoints(a, b).getDistanceInMeters
}
abstract class GeoHashIterator(latitudeLL: Double,
longitudeLL: Double,
latitudeUR: Double,
longitudeUR: Double,
precision: Int)
extends Iterator[GeoHash] {
private val (llgh, urgh) = {
val ll = geometryFactory.createPoint(new Coordinate(longitudeLL, latitudeLL))
val ur = geometryFactory.createPoint(new Coordinate(longitudeUR, latitudeUR))
getBoundingGeoHashes(Seq(ll, ur), precision, 0.0)
}
val Array(latBitsLL, lonBitsLL) = GeoHash.gridIndicesForLatLong(llgh)
val Array(latBitsUR, lonBitsUR) = GeoHash.gridIndicesForLatLong(urgh)
protected val midLatitude = 0.5 * llgh.y + urgh.y
val latPrecision = (precision >> 1)
val lonPrecision = latPrecision + (precision % 2)
def latPrecisionInMeters = getDimensionPrecisionInMeters(midLatitude, true, latPrecision)
def lonPrecisionInMeters = getDimensionPrecisionInMeters(midLatitude, false, lonPrecision)
val incLatitudeDegrees = 180.0 / Math.pow(2.0, latPrecision)
val incLongitudeDegrees = 360.0 / Math.pow(2.0, lonPrecision)
def spanBitsLat: Long = latBitsUR - latBitsLL + 1
def spanBitsLon: Long = lonBitsUR - lonBitsLL + 1
// Internal iterator state that IS mutable and IS updated on advance()
private var ghCurrent: GeoHash = null
private var ghPrevious: GeoHash = null
protected var doesHaveNext: Boolean = false
protected var latPosition = latBitsLL
protected var lonPosition = lonBitsLL
def currentPoint = ghCurrent.getPoint
advance()
/**
* Internal method that figures out whether the iterator is finished, and if not, updates the
* current GeoHash and advances the counters.
*
* As a general scheme, we start in the lower-left corner, and iterate in a row-major way
* until we exceed the upper-right corner of the rectangle.
*
* @return whether the iteration is over
*/
protected def advance(): Boolean
/**
* Fetch the current result, and advance to the next (at least internally).
*
* @return the current GeoHash result
*/
@Override
override def next(): GeoHash = {
val gh = ghCurrent
doesHaveNext = advance
gh
}
/**
* Allows the user to query whether there is another GeoHash cell to return.
*
* @return whether there is another cell to return
*/
@Override
override def hasNext: Boolean = doesHaveNext
protected def setCurrentGeoHash(newCurrentGeoHash: GeoHash) {
ghPrevious = ghCurrent
ghCurrent = newCurrentGeoHash
}
protected def getSpanAspectRatioSkew: Double = {
val nx = spanBitsLon.asInstanceOf[Double]
val ny = spanBitsLat.asInstanceOf[Double]
Math.min(nx, ny) / (nx + ny)
}
}
|
tkunicki/geomesa
|
geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/geohash/GeoHashIterator.scala
|
Scala
|
apache-2.0
| 9,856
|
package scommons.client.ui.select
import org.scalajs.dom.window
import scommons.react._
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.scalajs.concurrent.JSExecutionContext.Implicits.queue
import scala.scalajs.js
import scala.util.{Failure, Success}
case class SearchSelectProps(selected: Option[SelectData],
onLoad: String => Future[List[SelectData]] = _ => Future.successful(Nil),
onChange: Option[SelectData] => Unit = _ => (),
isClearable: Boolean = false,
readOnly: Boolean = false)
object SearchSelect extends ClassComponent[SearchSelectProps] {
private[select] var global: js.Dynamic = window.asInstanceOf[js.Dynamic]
private case class SearchSelectState(isLoading: Boolean = false,
value: String = "",
handleId: Option[js.Any] = None,
options: List[SelectData] = Nil)
protected def create(): ReactClass = createClass[SearchSelectState](
getInitialState = { _ =>
SearchSelectState()
},
render = { self =>
val props = self.props.wrapped
<(SingleSelect())(^.wrapped := SingleSelectProps(
selected = props.selected,
options = self.state.options,
onSelectChange = props.onChange,
isClearable = props.isClearable,
readOnly = props.readOnly,
isSearchable = true,
isLoading = self.state.isLoading,
onInputChange = Some({ value =>
self.state.handleId.foreach { handleId =>
// clear intermediate load schedule
global.clearTimeout(handleId)
}
var handleId: js.Any = 0
handleId = global.setTimeout({ () =>
global.clearTimeout(handleId)
self.setState(s => s.copy(isLoading = true, handleId = None))
val loadValue = self.state.value
props.onLoad(loadValue).onComplete {
case Success(list) if self.state.value == loadValue =>
self.setState(s => s.copy(isLoading = false, options = list))
case Failure(_) if self.state.value == loadValue =>
self.setState(s => s.copy(isLoading = false))
case _ =>
// ignore stale load results
}
}, 750.millis.toMillis.toDouble)
self.setState(s => s.copy(value = value, handleId = Some(handleId)))
})
))()
}
)
}
|
viktor-podzigun/scommons
|
ui/src/main/scala/scommons/client/ui/select/SearchSelect.scala
|
Scala
|
apache-2.0
| 2,601
|
package org.http4s
import cats.effect.IO
import fs2.Stream
import fs2.text.utf8Encode
import org.http4s.headers._
class ServerSentEventSpec extends Http4sSpec {
import ServerSentEvent._
def toStream(s: String): Stream[IO, Byte] =
Stream.emit(s).through(utf8Encode)
"decode" should {
"decode multi-line messages" in {
val stream = toStream("""
|data: YHOO
|data: +2
|data: 10
|""".stripMargin('|'))
stream.through(ServerSentEvent.decoder).compile.toVector.unsafeRunSync must_== Vector(
ServerSentEvent(data = "YHOO\\n+2\\n10")
)
}
"decode test stream" in {
val stream = toStream("""
|: test stream
|data: first event
|id: 1
|
|data:second event
|id
|
|data: third event
|""".stripMargin('|'))
//test stream\\n\\ndata: first event\\nid: 1\\n\\ndata:second event\\nid\\n\\ndata: third event\\n")
stream.through(ServerSentEvent.decoder).compile.toVector.unsafeRunSync must_== Vector(
ServerSentEvent(data = "first event", id = Some(EventId("1"))),
ServerSentEvent(data = "second event", id = Some(EventId.reset)),
ServerSentEvent(data = " third event", id = None)
)
}
"fire empty events" in {
val stream = toStream("""
|data
|
|data
|data
|
|data:
|""".stripMargin('|'))
//test stream\\n\\ndata: first event\\nid: 1\\n\\ndata:second event\\nid\\n\\ndata: third event\\n")
stream.through(ServerSentEvent.decoder).compile.toVector.unsafeRunSync must_== Vector(
ServerSentEvent(data = ""),
ServerSentEvent(data = "\\n"),
ServerSentEvent(data = "")
)
}
"ignore single space after colon" in {
val stream = toStream("""
|data:test
|
|data: test
|""".stripMargin('|'))
//test stream\\n\\ndata: first event\\nid: 1\\n\\ndata:second event\\nid\\n\\ndata: third event\\n")
stream.through(ServerSentEvent.decoder).compile.toVector.unsafeRunSync must_== Vector(
ServerSentEvent(data = "test"),
ServerSentEvent(data = "test")
)
}
}
"encode" should {
"be consistent with decode" in prop { sses: Vector[ServerSentEvent] =>
val roundTrip = Stream
.emits(sses)
.covary[IO]
.through(ServerSentEvent.encoder)
.through(ServerSentEvent.decoder)
.compile
.toVector
.unsafeRunSync
roundTrip must_== sses
}
"handle leading spaces" in {
// This is a pathological case uncovered by scalacheck
val sse = ServerSentEvent(" a", Some(" b"), Some(EventId(" c")), Some(1L))
Stream
.emit(sse)
.covary[IO]
.through(ServerSentEvent.encoder)
.through(ServerSentEvent.decoder)
.compile
.last
.unsafeRunSync must beSome(sse)
}
}
"EntityEncoder[ServerSentEvent]" should {
val eventStream: Stream[IO, ServerSentEvent] =
Stream.range(0, 5).map(i => ServerSentEvent(data = i.toString))
"set Content-Type to text/event-stream" in {
Response[IO]().withEntity(eventStream).contentType must beSome(
`Content-Type`(MediaType.`text/event-stream`))
}
"decode to original event stream" in {
val resp = Response[IO]().withEntity(eventStream)
resp.body
.through(ServerSentEvent.decoder)
.compile
.toVector
.unsafeRunSync must_== eventStream.compile.toVector.unsafeRunSync
}
}
}
|
ChristopherDavenport/http4s
|
tests/src/test/scala/org/http4s/ServerSentEventSpec.scala
|
Scala
|
apache-2.0
| 3,507
|
/*
* MIT License
*
* Copyright (c) 2016 Ramjet Anvil
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.ramjetanvil.cqrs.experimental
import com.ramjetanvil.cqrs.Core.{CommandHandler, CommandHandlerResult, EventHandler}
import scala.util.Success
object PartialHandlers {
type PartialCommandHandler[TCommand, TState, TEvent] = PartialFunction[(TState, TCommand), CommandHandlerResult[TEvent]]
type PartialEventHandler[TState, TEvent] = PartialFunction[(TState, TEvent), TState]
// TODO Factories don't work because of erasure
// class CommandHandlerFactory[TCommand, TState, TEvent] {
// def handler[T <: TCommand](handler: CommandHandler[T, TState, TEvent]): PartialCommandHandler[TCommand, TState, TEvent] = {
// case (state, command: T) => handler(state, command)
// }
// }
def combineCommandHandlers[TCommand, TState, TEvent](handlers: PartialCommandHandler[TCommand, TState, TEvent]*): CommandHandler[TCommand, TState, TEvent] = {
val liftedHandler = handlers.reduce((h, singleHandler) => {
h.orElse(singleHandler)
}).lift
val emptyResult = Success(Seq.empty[TEvent])
// TODO Warn whenever an unhandled command is being processed
(state, command) => liftedHandler(state, command).getOrElse(emptyResult)
}
// TODO Factories don't work because of erasure
// class EventHandlerFactory[TState, TEvent] {
// def handler[T <: TEvent](handler: EventHandler[TState, T]): PartialEventHandler[TState, TEvent] = {
// case (state, event: T) => handler(state, event)
// }
// }
def combineEventHandlers[TState, TEvent](handlers: PartialEventHandler[TState, TEvent]*): EventHandler[TState, TEvent] = {
val liftedHandler = handlers.reduce((h, singleHandler) => {
h.orElse(singleHandler)
}).lift
// TODO Warn whenever an unhandled event is being processed
(state, event) => liftedHandler(state, event).getOrElse(state)
}
}
|
RamjetAnvil/padrone
|
server/src/main/scala/com/ramjetanvil/cqrs/experimental/PartialHandlers.scala
|
Scala
|
mit
| 2,953
|
/*
* Copyright (C) 2015 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.plugin.environment.batch.storage
import java.io.{ ByteArrayInputStream, File, InputStream }
import java.nio.file.Files
import gridscale._
import org.openmole.core.communication.storage._
import org.openmole.core.workspace._
import org.openmole.plugin.environment.batch.environment.{ BatchEnvironment, AccessControl }
import org.openmole.tool.file._
import org.openmole.tool.stream._
object StorageInterface {
def remote[S: StorageInterface: HierarchicalStorageInterface](s: S, communicationDirectory: String) =
new RemoteStorage {
override def upload(src: File, dest: Option[String], options: TransferOptions)(implicit newFile: TmpDirectory): String = StorageService.uploadInDirectory(s, src, communicationDirectory, options)
override def download(src: String, dest: File, options: TransferOptions)(implicit newFile: TmpDirectory): Unit = StorageService.download(s, src, dest, options)
}
def upload(compressed: Boolean, uploadStream: (() โ InputStream, String) โ Unit)(src: File, dest: String, options: TransferOptions = TransferOptions.default): Unit = {
def fileStream() = src.bufferedInputStream()
if (compressed) {
def compressedFileStream() = src.bufferedInputStream().toGZiped
if (!options.raw) uploadStream(compressedFileStream, dest) else uploadStream(fileStream, dest)
}
else uploadStream(fileStream, dest)
}
def download(compressed: Boolean, downloadStream: (String, InputStream โ Unit) โ Unit)(src: String, dest: File, options: TransferOptions = TransferOptions.default): Unit = {
def downloadFile(is: InputStream) = Files.copy(is, dest.toPath)
if (compressed) {
def uncompressed(is: InputStream) = downloadFile(is.toGZ)
if (!options.raw) downloadStream(src, uncompressed) else downloadStream(src, downloadFile)
}
else downloadStream(src, downloadFile)
}
def isDirectory(name: String) = name.endsWith("/")
}
trait StorageInterface[T] {
def exists(t: T, path: String): Boolean
def rmFile(t: T, path: String): Unit
def upload(t: T, src: File, dest: String, options: TransferOptions = TransferOptions.default): Unit
def download(t: T, src: String, dest: File, options: TransferOptions = TransferOptions.default): Unit
}
trait HierarchicalStorageInterface[T] {
def rmDir(t: T, path: String): Unit
def makeDir(t: T, path: String): Unit
def child(t: T, parent: String, child: String): String
def list(t: T, path: String): Seq[ListEntry]
def parent(t: T, path: String): Option[String]
def name(t: T, path: String): String
}
trait EnvironmentStorage[S] {
def id(s: S): String
def environment(s: S): BatchEnvironment
}
|
openmole/openmole
|
openmole/plugins/org.openmole.plugin.environment.batch/src/main/scala/org/openmole/plugin/environment/batch/storage/StorageInterface.scala
|
Scala
|
agpl-3.0
| 3,374
|
package walfie.gbf.raidfinder
import twitter4j._
import walfie.gbf.raidfinder.domain._
import scala.util.Try
object StatusParser {
/** Regexes to match raid request tweets */
val RaidRegexJapanese = "((?s).*)ๅๅ ่
ๅ้๏ผๅๆฆID๏ผ([0-9A-F]+)\\n(.+)\\n?(.*)".r
val RaidRegexEnglish = "((?s).*)I need backup!Battle ID: ([0-9A-F]+)\\n(.+)\\n?(.*)".r
/**
* Regex to get boss level from full name
* e.g., "Lv100 ใชใชใพใฉใใณ" or "Lvl 100 Ozorotter"
*/
val BossRegex = "Lv(?:l )?([0-9]+) (.*)".r
/** The source value for the official Granblue Twitter app */
val GranblueSource =
"""<a href="http://granbluefantasy.jp/" rel="nofollow">ใฐใฉใณใใซใผ ใใกใณใฟใธใผ</a>"""
def isValidName(name: BossName): Boolean = !name.contains("http")
private def isValidUrl(url: String): Boolean = url.isEmpty || url.matches("https?://[^ ]+")
def parse(status: Status): Option[RaidInfo] = status.getText match {
case _ if status.getSource != GranblueSource => None
case RaidRegexJapanese(extraText, raidId, boss, url) if isValidName(boss) && isValidUrl(url) =>
Some(TweetParts(status, extraText, raidId, boss).toRaidInfo(Language.Japanese))
case RaidRegexEnglish(extraText, raidId, boss, url) if isValidName(boss) && isValidUrl(url) =>
Some(TweetParts(status, extraText, raidId, boss).toRaidInfo(Language.English))
case _ => None
}
private case class TweetParts(status: Status, extraText: String, raidId: String, boss: String) {
def toRaidInfo(language: Language): RaidInfo = {
val bossName = boss.trim
val raidTweet = RaidTweet(
tweetId = status.getId,
screenName = status.getUser.getScreenName,
bossName = bossName,
raidId = raidId.trim,
profileImage = status.getUser.getProfileImageURLHttps,
text = extraText.trim,
createdAt = status.getCreatedAt,
language = language
)
val defaultLevel = 0
val bossLevel = bossName match {
case BossRegex(level, name) =>
Try(level.toInt).toOption.getOrElse(defaultLevel)
case _ => defaultLevel
}
val raidBoss = RaidBoss(
name = bossName,
level = bossLevel,
image = getImageFromStatus(status),
lastSeen = status.getCreatedAt,
language = language
)
RaidInfo(raidTweet, raidBoss)
}
}
private def getImageFromStatus(status: Status): Option[RaidImage] = {
status.getMediaEntities.headOption.map(_.getMediaURLHttps)
}
}
|
gnawnoraa/GBF-Raider-Copy
|
stream/src/main/scala/walfie/gbf/raidfinder/StatusParser.scala
|
Scala
|
mit
| 2,534
|
package ch.ninecode.model
import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.Serializer
import com.esotericsoftware.kryo.io.Input
import com.esotericsoftware.kryo.io.Output
import org.apache.spark.sql.Row
import ch.ninecode.cim.CIMClassInfo
import ch.ninecode.cim.CIMContext
import ch.ninecode.cim.CIMParseable
import ch.ninecode.cim.CIMRelationship
import ch.ninecode.cim.CIMSerializer
/**
* Used to apply user standard names to TopologicalNodes.
*
* Associated with one or more terminals that are normally connected with the bus name. The associated terminals are normally connected by non-retained switches. For a ring bus station configuration, all BusbarSection terminals in the ring are typically associated. For a breaker and a half scheme, both BusbarSections would normally be associated. For a ring bus, all BusbarSections would normally be associated. For a "straight" busbar configuration, normally only the main terminal at the BusbarSection would be associated.
*
* @param IdentifiedObject [[ch.ninecode.model.IdentifiedObject IdentifiedObject]] Reference to the superclass object.
* @param priority Priority of bus name marker for use as topology bus name.
* Use 0 for do not care. Use 1 for highest priority. Use 2 as priority is less than 1 and so on.
* @param ReportingGroup [[ch.ninecode.model.ReportingGroup ReportingGroup]] The reporting group to which this bus name marker belongs.
* @param Terminal [[ch.ninecode.model.ACDCTerminal ACDCTerminal]] The terminals associated with this bus name marker.
* @param TopologicalNode [[ch.ninecode.model.TopologicalNode TopologicalNode]] A user defined topological node that was originally defined in a planning model not yet having topology described by ConnectivityNodes.
* Once ConnectivityNodes has been created they may linked to user defined ToplogicalNdes using BusNameMarkers.
* @group Topology
* @groupname Topology Package Topology
* @groupdesc Topology An extension to the Core Package that, in association with the Terminal class, models Connectivity, that is the physical definition of how equipment is connected together. In addition it models Topology, that is the logical definition of how equipment is connected via closed switches. The Topology definition is independent of the other electrical characteristics.
*/
final case class BusNameMarker
(
IdentifiedObject: IdentifiedObject = null,
priority: Int = 0,
ReportingGroup: String = null,
Terminal: List[String] = null,
TopologicalNode: String = null
)
extends
Element
{
/**
* Return the superclass object.
*
* @return The typed superclass nested object.
* @group Hierarchy
* @groupname Hierarchy Class Hierarchy Related
* @groupdesc Hierarchy Members related to the nested hierarchy of CIM classes.
*/
override def sup: IdentifiedObject = IdentifiedObject
//
// Row overrides
//
/**
* Return a copy of this object as a Row.
*
* Creates a clone of this object for use in Row manipulations.
*
* @return The copy of the object.
* @group Row
* @groupname Row SQL Row Implementation
* @groupdesc Row Members related to implementing the SQL Row interface
*/
override def copy (): Row =
{
clone().asInstanceOf[Row]
}
override def export_fields: String =
{
implicit val s: StringBuilder = new StringBuilder(sup.export_fields)
implicit val clz: String = BusNameMarker.cls
def emitelem (position: Int, value: Any): Unit = if (mask(position)) emit_element(BusNameMarker.fields(position), value)
def emitattr (position: Int, value: Any): Unit = if (mask(position)) emit_attribute(BusNameMarker.fields(position), value)
def emitattrs (position: Int, value: List[String]): Unit = if (mask(position) && (null != value)) value.foreach(x => emit_attribute(BusNameMarker.fields(position), x))
emitelem(0, priority)
emitattr(1, ReportingGroup)
emitattrs(2, Terminal)
emitattr(3, TopologicalNode)
s.toString
}
override def export: String =
{
"\\t<cim:BusNameMarker rdf:%s=\\"%s\\">\\n%s\\t</cim:BusNameMarker>".format(if (about) "about" else "ID", id, export_fields)
}
}
object BusNameMarker
extends
CIMParseable[BusNameMarker]
{
override val fields: Array[String] = Array[String](
"priority",
"ReportingGroup",
"Terminal",
"TopologicalNode"
)
override val relations: List[CIMRelationship] = List(
CIMRelationship("ReportingGroup", "ReportingGroup", "0..1", "0..*"),
CIMRelationship("Terminal", "ACDCTerminal", "1..*", "0..1"),
CIMRelationship("TopologicalNode", "TopologicalNode", "0..1", "0..*")
)
val priority: Fielder = parse_element(element(cls, fields(0)))
val ReportingGroup: Fielder = parse_attribute(attribute(cls, fields(1)))
val Terminal: FielderMultiple = parse_attributes(attribute(cls, fields(2)))
val TopologicalNode: Fielder = parse_attribute(attribute(cls, fields(3)))
def parse (context: CIMContext): BusNameMarker =
{
implicit val ctx: CIMContext = context
implicit val bitfields: Array[Int] = Array(0)
val ret = BusNameMarker(
IdentifiedObject.parse(context),
toInteger(mask(priority(), 0)),
mask(ReportingGroup(), 1),
masks(Terminal(), 2),
mask(TopologicalNode(), 3)
)
ret.bitfields = bitfields
ret
}
def serializer: Serializer[BusNameMarker] = BusNameMarkerSerializer
}
object BusNameMarkerSerializer extends CIMSerializer[BusNameMarker]
{
def write (kryo: Kryo, output: Output, obj: BusNameMarker): Unit =
{
val toSerialize: Array[() => Unit] = Array(
() => output.writeInt(obj.priority),
() => output.writeString(obj.ReportingGroup),
() => writeList(obj.Terminal, output),
() => output.writeString(obj.TopologicalNode)
)
IdentifiedObjectSerializer.write(kryo, output, obj.sup)
implicit val bitfields: Array[Int] = obj.bitfields
writeBitfields(output)
writeFields(toSerialize)
}
def read (kryo: Kryo, input: Input, cls: Class[BusNameMarker]): BusNameMarker =
{
val parent = IdentifiedObjectSerializer.read(kryo, input, classOf[IdentifiedObject])
implicit val bitfields: Array[Int] = readBitfields(input)
val obj = BusNameMarker(
parent,
if (isSet(0)) input.readInt else 0,
if (isSet(1)) input.readString else null,
if (isSet(2)) readList(input) else null,
if (isSet(3)) input.readString else null
)
obj.bitfields = bitfields
obj
}
}
/**
* An electrically connected subset of the network.
*
* Topological islands can change as the current network state changes, e.g. due to:
* - disconnect switches or breakers changing state in a SCADA/EMS.
* - manual creation, change or deletion of topological nodes in a planning tool.
* Only energised TopologicalNode-s shall be part of the topological island.
*
* @param IdentifiedObject [[ch.ninecode.model.IdentifiedObject IdentifiedObject]] Reference to the superclass object.
* @param AngleRefTopologicalNode [[ch.ninecode.model.TopologicalNode TopologicalNode]] The angle reference for the island.
* Normally there is one TopologicalNode that is selected as the angle reference for each island. Other reference schemes exist, so the association is typically optional.
* @param TopologicalNodes [[ch.ninecode.model.TopologicalNode TopologicalNode]] A topological node belongs to a topological island.
* @group Topology
* @groupname Topology Package Topology
* @groupdesc Topology An extension to the Core Package that, in association with the Terminal class, models Connectivity, that is the physical definition of how equipment is connected together. In addition it models Topology, that is the logical definition of how equipment is connected via closed switches. The Topology definition is independent of the other electrical characteristics.
*/
final case class TopologicalIsland
(
IdentifiedObject: IdentifiedObject = null,
AngleRefTopologicalNode: String = null,
TopologicalNodes: List[String] = null
)
extends
Element
{
/**
* Return the superclass object.
*
* @return The typed superclass nested object.
* @group Hierarchy
* @groupname Hierarchy Class Hierarchy Related
* @groupdesc Hierarchy Members related to the nested hierarchy of CIM classes.
*/
override def sup: IdentifiedObject = IdentifiedObject
//
// Row overrides
//
/**
* Return a copy of this object as a Row.
*
* Creates a clone of this object for use in Row manipulations.
*
* @return The copy of the object.
* @group Row
* @groupname Row SQL Row Implementation
* @groupdesc Row Members related to implementing the SQL Row interface
*/
override def copy (): Row =
{
clone().asInstanceOf[Row]
}
override def export_fields: String =
{
implicit val s: StringBuilder = new StringBuilder(sup.export_fields)
implicit val clz: String = TopologicalIsland.cls
def emitattr (position: Int, value: Any): Unit = if (mask(position)) emit_attribute(TopologicalIsland.fields(position), value)
def emitattrs (position: Int, value: List[String]): Unit = if (mask(position) && (null != value)) value.foreach(x => emit_attribute(TopologicalIsland.fields(position), x))
emitattr(0, AngleRefTopologicalNode)
emitattrs(1, TopologicalNodes)
s.toString
}
override def export: String =
{
"\\t<cim:TopologicalIsland rdf:%s=\\"%s\\">\\n%s\\t</cim:TopologicalIsland>".format(if (about) "about" else "ID", id, export_fields)
}
}
object TopologicalIsland
extends
CIMParseable[TopologicalIsland]
{
override val fields: Array[String] = Array[String](
"AngleRefTopologicalNode",
"TopologicalNodes"
)
override val relations: List[CIMRelationship] = List(
CIMRelationship("AngleRefTopologicalNode", "TopologicalNode", "0..1", "0..1"),
CIMRelationship("TopologicalNodes", "TopologicalNode", "1..*", "0..1")
)
val AngleRefTopologicalNode: Fielder = parse_attribute(attribute(cls, fields(0)))
val TopologicalNodes: FielderMultiple = parse_attributes(attribute(cls, fields(1)))
def parse (context: CIMContext): TopologicalIsland =
{
implicit val ctx: CIMContext = context
implicit val bitfields: Array[Int] = Array(0)
val ret = TopologicalIsland(
IdentifiedObject.parse(context),
mask(AngleRefTopologicalNode(), 0),
masks(TopologicalNodes(), 1)
)
ret.bitfields = bitfields
ret
}
def serializer: Serializer[TopologicalIsland] = TopologicalIslandSerializer
}
object TopologicalIslandSerializer extends CIMSerializer[TopologicalIsland]
{
def write (kryo: Kryo, output: Output, obj: TopologicalIsland): Unit =
{
val toSerialize: Array[() => Unit] = Array(
() => output.writeString(obj.AngleRefTopologicalNode),
() => writeList(obj.TopologicalNodes, output)
)
IdentifiedObjectSerializer.write(kryo, output, obj.sup)
implicit val bitfields: Array[Int] = obj.bitfields
writeBitfields(output)
writeFields(toSerialize)
}
def read (kryo: Kryo, input: Input, cls: Class[TopologicalIsland]): TopologicalIsland =
{
val parent = IdentifiedObjectSerializer.read(kryo, input, classOf[IdentifiedObject])
implicit val bitfields: Array[Int] = readBitfields(input)
val obj = TopologicalIsland(
parent,
if (isSet(0)) input.readString else null,
if (isSet(1)) readList(input) else null
)
obj.bitfields = bitfields
obj
}
}
/**
* For a detailed substation model a topological node is a set of connectivity nodes that, in the current network state, are connected together through any type of closed switches, including jumpers.
*
* Topological nodes change as the current network state changes (i.e., switches, breakers, etc. change state).
* For a planning model, switch statuses are not used to form topological nodes. Instead they are manually created or deleted in a model builder tool. Topological nodes maintained this way are also called "busses".
*
* @param IdentifiedObject [[ch.ninecode.model.IdentifiedObject IdentifiedObject]] Reference to the superclass object.
* @param pInjection The active power injected into the bus at this location in addition to injections from equipment.
* Positive sign means injection into the TopologicalNode (bus).
* Starting value for a steady state solution.
* @param qInjection The reactive power injected into the bus at this location in addition to injections from equipment.
* Positive sign means injection into the TopologicalNode (bus).
* Starting value for a steady state solution.
* @param AngleRefTopologicalIsland [[ch.ninecode.model.TopologicalIsland TopologicalIsland]] The island for which the node is an angle reference.
* Normally there is one angle reference node for each island.
* @param BaseVoltage [[ch.ninecode.model.BaseVoltage BaseVoltage]] The base voltage of the topological node.
* @param BusNameMarker [[ch.ninecode.model.BusNameMarker BusNameMarker]] BusnameMarkers that may refer to a pre defined TopologicalNode.
* @param ConnectivityNodeContainer [[ch.ninecode.model.ConnectivityNodeContainer ConnectivityNodeContainer]] The connectivity node container to which the topological node belongs.
* @param ConnectivityNodes [[ch.ninecode.model.ConnectivityNode ConnectivityNode]] The connectivity nodes combine together to form this topological node.
* May depend on the current state of switches in the network.
* @param ReportingGroup [[ch.ninecode.model.ReportingGroup ReportingGroup]] The reporting group to which the topological node belongs.
* @param SvInjection [[ch.ninecode.model.SvInjection SvInjection]] The injection flows state variables associated with the topological node.
* @param SvVoltage [[ch.ninecode.model.SvVoltage SvVoltage]] The state voltage associated with the topological node.
* @param Terminal [[ch.ninecode.model.Terminal Terminal]] The terminals associated with the topological node.
* This can be used as an alternative to the connectivity node path to terminal, thus making it unnecessary to model connectivity nodes in some cases. Note that if connectivity nodes are in the model, this association would probably not be used as an input specification.
* @param TopologicalIsland [[ch.ninecode.model.TopologicalIsland TopologicalIsland]] A topological node belongs to a topological island.
* @group Topology
* @groupname Topology Package Topology
* @groupdesc Topology An extension to the Core Package that, in association with the Terminal class, models Connectivity, that is the physical definition of how equipment is connected together. In addition it models Topology, that is the logical definition of how equipment is connected via closed switches. The Topology definition is independent of the other electrical characteristics.
*/
final case class TopologicalNode
(
IdentifiedObject: IdentifiedObject = null,
pInjection: Double = 0.0,
qInjection: Double = 0.0,
AngleRefTopologicalIsland: String = null,
BaseVoltage: String = null,
BusNameMarker: List[String] = null,
ConnectivityNodeContainer: String = null,
ConnectivityNodes: List[String] = null,
ReportingGroup: String = null,
SvInjection: List[String] = null,
SvVoltage: List[String] = null,
Terminal: List[String] = null,
TopologicalIsland: String = null
)
extends
Element
{
/**
* Return the superclass object.
*
* @return The typed superclass nested object.
* @group Hierarchy
* @groupname Hierarchy Class Hierarchy Related
* @groupdesc Hierarchy Members related to the nested hierarchy of CIM classes.
*/
override def sup: IdentifiedObject = IdentifiedObject
//
// Row overrides
//
/**
* Return a copy of this object as a Row.
*
* Creates a clone of this object for use in Row manipulations.
*
* @return The copy of the object.
* @group Row
* @groupname Row SQL Row Implementation
* @groupdesc Row Members related to implementing the SQL Row interface
*/
override def copy (): Row =
{
clone().asInstanceOf[Row]
}
override def export_fields: String =
{
implicit val s: StringBuilder = new StringBuilder(sup.export_fields)
implicit val clz: String = TopologicalNode.cls
def emitelem (position: Int, value: Any): Unit = if (mask(position)) emit_element(TopologicalNode.fields(position), value)
def emitattr (position: Int, value: Any): Unit = if (mask(position)) emit_attribute(TopologicalNode.fields(position), value)
def emitattrs (position: Int, value: List[String]): Unit = if (mask(position) && (null != value)) value.foreach(x => emit_attribute(TopologicalNode.fields(position), x))
emitelem(0, pInjection)
emitelem(1, qInjection)
emitattr(2, AngleRefTopologicalIsland)
emitattr(3, BaseVoltage)
emitattrs(4, BusNameMarker)
emitattr(5, ConnectivityNodeContainer)
emitattrs(6, ConnectivityNodes)
emitattr(7, ReportingGroup)
emitattrs(8, SvInjection)
emitattrs(9, SvVoltage)
emitattrs(10, Terminal)
emitattr(11, TopologicalIsland)
s.toString
}
override def export: String =
{
"\\t<cim:TopologicalNode rdf:%s=\\"%s\\">\\n%s\\t</cim:TopologicalNode>".format(if (about) "about" else "ID", id, export_fields)
}
}
object TopologicalNode
extends
CIMParseable[TopologicalNode]
{
override val fields: Array[String] = Array[String](
"pInjection",
"qInjection",
"AngleRefTopologicalIsland",
"BaseVoltage",
"BusNameMarker",
"ConnectivityNodeContainer",
"ConnectivityNodes",
"ReportingGroup",
"SvInjection",
"SvVoltage",
"Terminal",
"TopologicalIsland"
)
override val relations: List[CIMRelationship] = List(
CIMRelationship("AngleRefTopologicalIsland", "TopologicalIsland", "0..1", "0..1"),
CIMRelationship("BaseVoltage", "BaseVoltage", "0..1", "0..*"),
CIMRelationship("BusNameMarker", "BusNameMarker", "0..*", "0..1"),
CIMRelationship("ConnectivityNodeContainer", "ConnectivityNodeContainer", "0..1", "0..*"),
CIMRelationship("ConnectivityNodes", "ConnectivityNode", "0..*", "0..1"),
CIMRelationship("ReportingGroup", "ReportingGroup", "0..1", "0..*"),
CIMRelationship("SvInjection", "SvInjection", "0..*", "1"),
CIMRelationship("SvVoltage", "SvVoltage", "0..*", "1"),
CIMRelationship("Terminal", "Terminal", "0..*", "0..1"),
CIMRelationship("TopologicalIsland", "TopologicalIsland", "0..1", "1..*")
)
val pInjection: Fielder = parse_element(element(cls, fields(0)))
val qInjection: Fielder = parse_element(element(cls, fields(1)))
val AngleRefTopologicalIsland: Fielder = parse_attribute(attribute(cls, fields(2)))
val BaseVoltage: Fielder = parse_attribute(attribute(cls, fields(3)))
val BusNameMarker: FielderMultiple = parse_attributes(attribute(cls, fields(4)))
val ConnectivityNodeContainer: Fielder = parse_attribute(attribute(cls, fields(5)))
val ConnectivityNodes: FielderMultiple = parse_attributes(attribute(cls, fields(6)))
val ReportingGroup: Fielder = parse_attribute(attribute(cls, fields(7)))
val SvInjection: FielderMultiple = parse_attributes(attribute(cls, fields(8)))
val SvVoltage: FielderMultiple = parse_attributes(attribute(cls, fields(9)))
val Terminal: FielderMultiple = parse_attributes(attribute(cls, fields(10)))
val TopologicalIsland: Fielder = parse_attribute(attribute(cls, fields(11)))
def parse (context: CIMContext): TopologicalNode =
{
implicit val ctx: CIMContext = context
implicit val bitfields: Array[Int] = Array(0)
val ret = TopologicalNode(
IdentifiedObject.parse(context),
toDouble(mask(pInjection(), 0)),
toDouble(mask(qInjection(), 1)),
mask(AngleRefTopologicalIsland(), 2),
mask(BaseVoltage(), 3),
masks(BusNameMarker(), 4),
mask(ConnectivityNodeContainer(), 5),
masks(ConnectivityNodes(), 6),
mask(ReportingGroup(), 7),
masks(SvInjection(), 8),
masks(SvVoltage(), 9),
masks(Terminal(), 10),
mask(TopologicalIsland(), 11)
)
ret.bitfields = bitfields
ret
}
def serializer: Serializer[TopologicalNode] = TopologicalNodeSerializer
}
object TopologicalNodeSerializer extends CIMSerializer[TopologicalNode]
{
def write (kryo: Kryo, output: Output, obj: TopologicalNode): Unit =
{
val toSerialize: Array[() => Unit] = Array(
() => output.writeDouble(obj.pInjection),
() => output.writeDouble(obj.qInjection),
() => output.writeString(obj.AngleRefTopologicalIsland),
() => output.writeString(obj.BaseVoltage),
() => writeList(obj.BusNameMarker, output),
() => output.writeString(obj.ConnectivityNodeContainer),
() => writeList(obj.ConnectivityNodes, output),
() => output.writeString(obj.ReportingGroup),
() => writeList(obj.SvInjection, output),
() => writeList(obj.SvVoltage, output),
() => writeList(obj.Terminal, output),
() => output.writeString(obj.TopologicalIsland)
)
IdentifiedObjectSerializer.write(kryo, output, obj.sup)
implicit val bitfields: Array[Int] = obj.bitfields
writeBitfields(output)
writeFields(toSerialize)
}
def read (kryo: Kryo, input: Input, cls: Class[TopologicalNode]): TopologicalNode =
{
val parent = IdentifiedObjectSerializer.read(kryo, input, classOf[IdentifiedObject])
implicit val bitfields: Array[Int] = readBitfields(input)
val obj = TopologicalNode(
parent,
if (isSet(0)) input.readDouble else 0.0,
if (isSet(1)) input.readDouble else 0.0,
if (isSet(2)) input.readString else null,
if (isSet(3)) input.readString else null,
if (isSet(4)) readList(input) else null,
if (isSet(5)) input.readString else null,
if (isSet(6)) readList(input) else null,
if (isSet(7)) input.readString else null,
if (isSet(8)) readList(input) else null,
if (isSet(9)) readList(input) else null,
if (isSet(10)) readList(input) else null,
if (isSet(11)) input.readString else null
)
obj.bitfields = bitfields
obj
}
}
private[ninecode] object _Topology
{
def register: List[CIMClassInfo] =
{
List(
BusNameMarker.register,
TopologicalIsland.register,
TopologicalNode.register
)
}
}
|
derrickoswald/CIMScala
|
CIMReader/src/main/scala/ch/ninecode/model/Topology.scala
|
Scala
|
mit
| 24,008
|
import org.apache.spark.ml.feature._
import org.apache.spark.sql.DataFrame
object FeatureBuilder {
def createVectors(
dataDFCompleted: DataFrame,
predictDFCompleted: DataFrame
): (StringIndexerModel, VectorAssembler, IndexToString, Seq[StringIndexerModel], DataFrame, DataFrame, String) = {
val numericFeatColNames = Seq("Age", "SibSp", "Parch", "Fare", "FamilySize")
val categoricalFeatColNames = Seq("Pclass", "Sex", "Embarked", "Title")
val labelColName = "SurvivedString"
val featColName = "Features"
val idColName = "PassengerId"
val idxdLabelColName = "SurvivedIndexed"
val idxdCategoricalFeatColName = categoricalFeatColNames.map(_ + "Indexed")
val allFeatColNames = numericFeatColNames ++ categoricalFeatColNames
val allIdxdFeatColNames = numericFeatColNames ++ idxdCategoricalFeatColName
val allPredictColNames = allFeatColNames ++ Seq(idColName)
val dataDFFiltered = dataDFCompleted.select(labelColName, allPredictColNames: _*)
val predictDFFiltered = predictDFCompleted.select(labelColName, allPredictColNames: _*)
val allData = dataDFFiltered.union(predictDFFiltered)
allData.cache()
val stringIndexers = categoricalFeatColNames.map { colName =>
new StringIndexer()
.setInputCol(colName)
.setOutputCol(colName + "Indexed")
.fit(allData)
}
val labelIndexer = new StringIndexer().setInputCol(labelColName).setOutputCol(idxdLabelColName).fit(allData)
// vector assembler
val assembler = new VectorAssembler().setInputCols(Array(allIdxdFeatColNames: _*)).setOutputCol(featColName)
val labelConverter = new IndexToString().setInputCol("prediction").setOutputCol("predictedLabel").setLabels(labelIndexer.labels)
(labelIndexer, assembler, labelConverter, stringIndexers, dataDFFiltered, predictDFFiltered, featColName)
}
}
|
multivacplatform/multivac-kaggle-titanic
|
src/main/scala/FeatureBuilder.scala
|
Scala
|
mit
| 1,925
|
package net.tomaserman.specus.node.plugin
import net.tomasherman.specus.common.api.plugin.definitions.PluginDefinitionLoader
import net.tomasherman.specus.common.api.plugin.{PluginEventManager, SimplePluginManager}
import net.tomasherman.specus.common.api.plugin.config.PluginConfig
import akka.actor.ActorRef
import akka.actor.Actor.actorOf
import net.tomasherman.specus.node.api.plugin.NodePlugin
import net.tomasherman.specus.node.api.event.SimplePacketProcessor
import net.tomasherman.specus.node.api.event.message.AddProcessor
/**
* This file is part of Specus.
*
* Specus is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Specus is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*
* You should have received a copy of the GNU General Public License
* along with Specus. If not, see <http://www.gnu.org/licenses/>.
*
*/
class NodePluginManager(env: {
val pluginDefinitionLoader: PluginDefinitionLoader
val pluginEventManager: PluginEventManager
val pluginConfig: PluginConfig
val packetProcessorManager: ActorRef
val redis: ActorRef
}) extends SimplePluginManager[NodePlugin](env) {
def postDependencyCheck() {
this.plugins.foreach({x =>
x._2._2.packetProcessor.foreach({ x =>
val xx = x.newInstance()
val a = actorOf(new SimplePacketProcessor(xx,env))
a.start()
xx.canProcess.foreach(env.packetProcessorManager ! AddProcessor(_,a))
})
})
}
}
|
tomasherman/specus
|
node/src/main/scala/plugin/NodePluginManager.scala
|
Scala
|
gpl-3.0
| 1,801
|
package uk.ac.ncl.openlab.intake24.services.fooddb.images
sealed trait ImageServiceError {
val e: Throwable
}
case class IOError(e: Throwable) extends ImageServiceError
case class FileTypeNotAllowed(e: Throwable) extends ImageServiceError
case class ImageStorageError(e: Throwable) extends ImageServiceError
case class ImageProcessorError(e: Throwable) extends ImageServiceError
|
digitalinteraction/intake24
|
FoodDataServices/src/main/scala/uk/ac/ncl/openlab/intake24/services/fooddb/images/Errors.scala
|
Scala
|
apache-2.0
| 386
|
/*
Copyright 2013 Crossing-Tech
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package akka.osgi.sample.activation
import akka.osgi.ActorSystemActivator
import akka.actor.{Props, ActorSystem}
import akka.osgi.sample.internal.Table
import akka.osgi.sample.service.DiningHakkersServiceImpl
import akka.osgi.sample.api.DiningHakkersService
import akka.event.{LogSource, Logging}
import org.osgi.framework.{ServiceRegistration, BundleContext}
import scala.collection.mutable.ListBuffer
class Activator extends ActorSystemActivator {
import Activator._
val services: ListBuffer[ServiceRegistration[_]] = ListBuffer()
def configure(context: BundleContext, system: ActorSystem) {
val log = Logging(system, this)
log.info("Core bundle configured")
system.actorOf(Props[Table], "table")
registerHakkersService(context, system)
log.info("Hakker service registred")
}
def registerHakkersService(context: BundleContext, system: ActorSystem) {
val hakkersService = new DiningHakkersServiceImpl(system)
services += context.registerService(classOf[DiningHakkersService], hakkersService, null)
services += context.registerService(classOf[ActorSystem], system, null)
}
override def stop(context: BundleContext) {
unregisterServices(context)
println("Hakker service unregistred")
super.stop(context)
}
def unregisterServices(context: BundleContext) {
services foreach (_.unregister())
}
override def getActorSystemName(context: BundleContext): String = "akka-osgi-sample"
}
object Activator {
implicit val logSource: LogSource[AnyRef] = new LogSource[AnyRef] {
def genString(o: AnyRef): String = o.getClass.getName
override def getClazz(o: AnyRef): Class[_] = o.getClass
}
}
|
Crossing-Tech/akka-osgi-sample
|
core/src/main/scala/akka/osgi/sample/activation/Activator.scala
|
Scala
|
apache-2.0
| 2,233
|
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity, TensorModule}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.serializer._
import com.intel.analytics.bigdl.utils.serializer.converters.DataConverter
import com.intel.analytics.bigdl.serialization.Bigdl.{AttrValue, BigDLModule}
import com.intel.analytics.bigdl.utils.Shape
import scala.reflect.ClassTag
import scala.reflect.runtime.universe
/**
* The `forward(input)` reshape the input tensor into a
* `size(0) * size(1) * ...` tensor, taking the elements row-wise.
*
* @param size the reshape size
* @param batchMode It is a optional argument. If it is set to `Some(true)`,
* the first dimension of input is considered as batch dimension,
* and thus keep this dimension size fixed. This is necessary
* when dealing with batch sizes of one. When set to `Some(false)`,
* it forces the entire input (including the first dimension) to be reshaped
* to the input size. Default is `None`, which means the module considers
* inputs with more elements than the product of provided sizes (size(0) *
* size(1) * ..) to be batches, otherwise in no batch mode.
*
*/
@SerialVersionUID(- 830146931795053244L)
class Reshape[T: ClassTag](
val size: Array[Int], var batchMode: Option[Boolean] = None)(
implicit ev: TensorNumeric[T]) extends TensorModule[T] {
val batchSize = new Array[Int](size.length + 1)
var nElement: Int = 1
for (i <- 1 to size.length) {
batchSize(i) = size(i - 1)
nElement *= size(i - 1)
}
// whether share the storage between input and output
// in this layer, if input is contiguous, inplace is true. otherwise, inplace is false
private var inplace: Boolean = true
override def updateOutput(input: Tensor[T]): Tensor[T] = {
if ((batchMode.nonEmpty && !batchMode.get) ||
(input.nElement() == nElement && batchMode.isEmpty && input.size(1) != 1)) {
require(input.nElement() == nElement, s"element number must match Reshape size. " +
s"But In ${this.getName()} : element number is: ${ input.nElement() } , " +
s"reshape size is: ${nElement}")
if (input.isContiguous()) output =
input.view(size)
else {
output = input.contiguous().view(size)
inplace = false
}
}
else {
require(input.nElement() == nElement * input.size(1),
s"element number must match Reshape size. " +
s"But In ${this.getName()} : element number is: ${ input.nElement() } , " +
s"reshape size is: ${ nElement * input.size(1) }")
batchSize(0) = input.size(1)
if (input.isContiguous()) {
output = input.view(batchSize)
} else {
output = input.contiguous().view(batchSize)
inplace = false
}
}
output
}
override def updateGradInput(input: Tensor[T], gradOutput: Tensor[T]): Tensor[T] = {
if (gradOutput.isContiguous()) {
gradInput = gradOutput.view(input.size())
} else {
gradInput = gradOutput.contiguous().view(input.size())
}
gradInput
}
override def equals(obj: Any): Boolean = {
if (!super.equals(obj)) {
return false
}
if (!obj.isInstanceOf[Reshape[T]]) {
return false
}
val other = obj.asInstanceOf[Reshape[T]]
if (this.eq(other)) {
return true
}
var i = 0
while (i < batchSize.length) {
if (batchSize(i) != other.batchSize(i)) {
return false
}
i += 1
}
nElement == other.nElement &&
batchMode == other.batchMode
}
override def hashCode() : Int = {
val seed = 37
var hash = super.hashCode()
var i = 0
while (i < batchSize.length) {
hash = hash * seed + batchSize(i).hashCode()
i += 1
}
hash = hash * seed + nElement.hashCode()
hash = hash * seed + batchMode.hashCode()
hash
}
override def toString(): String = {
s"${getPrintName}(${size.mkString("x")})"
}
override def clearState(): this.type = {
if (!inplace) {
super.clearState()
}
this
}
override def computeOutputShape(inputShape: Shape): Shape = {
val input = inputShape.toSingle().toArray
val output = if ((batchMode.nonEmpty && !batchMode.get) ||
(input.product == nElement && batchMode.isEmpty && input(0) != 1)) {
size
} else {
Array(input(0)) ++ batchSize.slice(1, batchSize.length)
}
Shape(output)
}
}
object Reshape extends ModuleSerializable {
def apply[T: ClassTag](
size: Array[Int],
batchMode: Option[Boolean] = None)(implicit ev: TensorNumeric[T]) : Reshape[T] = {
new Reshape[T](size, batchMode)
}
override def doLoadModule[T: ClassTag](context: DeserializeContext)
(implicit ev: TensorNumeric[T]) : AbstractModule[Activity, Activity, T] = {
val attrMap = context.bigdlModule.getAttrMap
val size = DataConverter.getAttributeValue(context, attrMap.get("size")).
asInstanceOf[Array[Int]]
val batchModeV = DataConverter.getAttributeValue(context, attrMap.get("batchMode")).
asInstanceOf[Int]
var batchMode : Option[Boolean] = None
if (batchModeV == 1) {
batchMode = Some(false)
} else if (batchModeV == 2) {
batchMode = Some(true)
}
Reshape(size, batchMode).asInstanceOf[AbstractModule[Activity, Activity, T]]
}
override def doSerializeModule[T: ClassTag](context: SerializeContext[T],
reshapeBuilder : BigDLModule.Builder)
(implicit ev: TensorNumeric[T]) : Unit = {
val reshape = context.moduleData.module.asInstanceOf[Reshape[T]]
val sizeBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, sizeBuilder, reshape.size,
universe.typeOf[Array[Int]])
reshapeBuilder.putAttr("size", sizeBuilder.build)
var batchMode = 0
if (reshape.batchMode != None) {
batchMode = if (reshape.batchMode.get == false) 1 else 2
}
val batchModeBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, batchModeBuilder, batchMode,
universe.typeOf[Int])
reshapeBuilder.putAttr("batchMode", batchModeBuilder.build)
}
}
|
wzhongyuan/BigDL
|
spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/Reshape.scala
|
Scala
|
apache-2.0
| 7,038
|
/*
* Copyright 2013 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.storehaus.algebra
import com.twitter.algebird.{SummingQueue, Semigroup, Monoid}
import com.twitter.storehaus.{ StoreProperties, JMapStore }
import org.scalacheck.Properties
object BufferingStoreProperties extends Properties("BufferingStore") {
import StoreProperties.sparseStoreTest
import MergeableStoreProperties._
import MergeableStore.enrich
property("BufferingStore [Map[Int,String]] obeys the store properties") =
sparseStoreTest { opt: Option[Map[Int, String]] =>
opt.filter(Monoid.isNonZero(_)).orElse(Some(Monoid.zero[Map[Int,String]]))
} {
newSparseStore[String, Map[Int, String]].withSummer(new SummerConstructor[String] {
def apply[V](sg: Semigroup[V]) = {
implicit val semi = sg
SummingQueue[Map[String, V]](10)
}
})
}
property("BufferingStore [Map[Int,Int]] obeys the store properties") =
sparseStoreTest { opt: Option[Map[Int, Int]] =>
opt.filter(Monoid.isNonZero(_)).orElse(Some(Monoid.zero[Map[Int,Int]]))
} {
newSparseStore[String, Map[Int, Int]].withSummer(new SummerConstructor[String] {
def apply[V](sg: Semigroup[V]) = {
implicit val semi = sg
SummingQueue[Map[String, V]](10)
}
})
}
}
|
tresata/storehaus
|
storehaus-algebra/src/test/scala/com/twitter/storehaus/algebra/BufferingStoreProperties.scala
|
Scala
|
apache-2.0
| 1,878
|
/*
* Copyright (C) 2009-2019 Lightbend Inc. <https://www.lightbend.com>
*/
// this is copy/pasted from https://github.com/akka/akka/blob/5576c233d063b3ee4cfc05d8e73c614a3dea478d/project/CrossJava.scalas
package akka
import java.io.File
import scala.annotation.tailrec
import scala.collection.immutable.ListMap
import sbt._
import sbt.librarymanagement.SemanticSelector
import sbt.librarymanagement.VersionNumber
import akka.CrossJava.nullBlank
/*
* Tools for discovering different Java versions,
* will be in sbt 1.3.0 (https://github.com/sbt/sbt/pull/4139 et al)
* but until that time replicated here
*/
case class JavaVersion(numbers: Vector[Long], vendor: Option[String]) {
def numberStr: String = numbers.mkString(".")
def withVendor(vendor: Option[String]) = copy(vendor = vendor)
def withVendor(vendor: String) = copy(vendor = Option(vendor))
def withNumbers(numbers: Vector[Long]) = copy(numbers = numbers)
override def toString: String = {
vendor.map(_ + "@").getOrElse("") + numberStr
}
}
object JavaVersion {
val specificationVersion: String = sys.props("java.specification.version")
val version: String = sys.props("java.version")
def isJdk8: Boolean =
VersionNumber(specificationVersion).matchesSemVer(SemanticSelector(s"=1.8"))
val isJdk11orHigher: Boolean =
VersionNumber(specificationVersion).matchesSemVer(SemanticSelector(">=11"))
def apply(version: String): JavaVersion = CrossJava.parseJavaVersion(version)
def apply(numbers: Vector[Long], vendor: String): JavaVersion = new JavaVersion(numbers, Option(vendor))
def notOnJdk8[T](values: Seq[T]): Seq[T] = if (isJdk8) Seq.empty[T] else values
def sourceAndTarget(fullJavaHome: File): Seq[String] =
if (isJdk8) Seq.empty
else Seq("-source", "8", "-target", "8", "-bootclasspath", fullJavaHome + "/jre/lib/rt.jar")
}
object CrossJava {
object Keys {
val discoveredJavaHomes = settingKey[Map[String, File]]("Discovered Java home directories")
val javaHomes = settingKey[Map[String, File]]("The user-defined additional Java home directories")
val fullJavaHomes = settingKey[Map[String, File]]("Combines discoveredJavaHomes and custom javaHomes.")
}
import Keys._
val crossJavaSettings = Seq(
discoveredJavaHomes := CrossJava.discoverJavaHomes,
javaHomes := ListMap.empty,
fullJavaHomes := CrossJava.expandJavaHomes(discoveredJavaHomes.value ++ javaHomes.value)
)
// parses jabba style version number adopt@1.8
def parseJavaVersion(version: String): JavaVersion = {
def splitDot(s: String): Vector[Long] =
Option(s) match {
case Some(x) => x.split('.').toVector.filterNot(_ == "").map(_.toLong)
case _ => Vector()
}
def splitAt(s: String): Vector[String] =
Option(s) match {
case Some(x) => x.split('@').toVector
case _ => Vector()
}
splitAt(version) match {
case Vector(vendor, rest) => JavaVersion(splitDot(rest), Option(vendor))
case Vector(rest) => JavaVersion(splitDot(rest), None)
case _ => sys.error(s"Invalid JavaVersion: $version")
}
}
def discoverJavaHomes: ListMap[String, File] = {
ListMap(JavaDiscoverConfig.configs.flatMap { _.javaHomes }.sortWith(versionOrder): _*)
}
sealed trait JavaDiscoverConf {
def javaHomes: Vector[(String, File)]
}
def versionOrder(left: (_, File), right: (_, File)): Boolean =
versionOrder(left._2.getName, right._2.getName)
// Sort version strings, considering 1.8.0 < 1.8.0_45 < 1.8.0_121
@tailrec
def versionOrder(left: String, right: String): Boolean = {
val Pattern = """.*?([0-9]+)(.*)""".r
left match {
case Pattern(leftNumber, leftRest) =>
right match {
case Pattern(rightNumber, rightRest) =>
if (Integer.parseInt(leftNumber) < Integer.parseInt(rightNumber)) true
else if (Integer.parseInt(leftNumber) > Integer.parseInt(rightNumber)) false
else versionOrder(leftRest, rightRest)
case _ =>
false
}
case _ =>
true
}
}
object JavaDiscoverConfig {
private val JavaHomeDir = """(java-|jdk-?|adoptopenjdk-)(1\.)?([0-9]+).*""".r
class LinuxDiscoverConfig(base: File) extends JavaDiscoverConf {
def javaHomes: Vector[(String, File)] =
wrapNull(base.list()).collect {
case dir @ JavaHomeDir(_, m, n) => JavaVersion(nullBlank(m) + n).toString -> (base / dir)
}
}
class MacOsDiscoverConfig extends JavaDiscoverConf {
val base: File = file("/Library") / "Java" / "JavaVirtualMachines"
def javaHomes: Vector[(String, File)] =
wrapNull(base.list()).collect {
case dir @ JavaHomeDir(_, m, n) =>
JavaVersion(nullBlank(m) + n).toString -> (base / dir / "Contents" / "Home")
}
}
class WindowsDiscoverConfig extends JavaDiscoverConf {
val base: File = file("C://Program Files/Java")
def javaHomes: Vector[(String, File)] =
wrapNull(base.list()).collect {
case dir @ JavaHomeDir(_, m, n) => JavaVersion(nullBlank(m) + n).toString -> (base / dir)
}
}
// See https://github.com/shyiko/jabba
class JabbaDiscoverConfig extends JavaDiscoverConf {
val base: File = Path.userHome / ".jabba" / "jdk"
val JavaHomeDir = """([\w\-]+)\@(1\.)?([0-9]+).*""".r
def javaHomes: Vector[(String, File)] =
wrapNull(base.list()).collect {
case dir @ JavaHomeDir(_, m, n) =>
val v = JavaVersion(nullBlank(m) + n).toString
if ((base / dir / "Contents" / "Home").exists) v -> (base / dir / "Contents" / "Home")
else v -> (base / dir)
}
}
class JavaHomeDiscoverConfig extends JavaDiscoverConf {
def javaHomes: Vector[(String, File)] =
sys.env
.get("JAVA_HOME")
.map(new java.io.File(_))
.filter(_.exists())
.flatMap { javaHome =>
val base = javaHome.getParentFile
javaHome.getName match {
case dir @ JavaHomeDir(_, m, n) => Some(JavaVersion(nullBlank(m) + n).toString -> (base / dir))
case _ => None
}
}
.toVector
}
val configs = Vector(
new JabbaDiscoverConfig,
new LinuxDiscoverConfig(file("/usr") / "java"),
new LinuxDiscoverConfig(file("/usr") / "lib" / "jvm"),
new MacOsDiscoverConfig,
new WindowsDiscoverConfig,
new JavaHomeDiscoverConfig
)
}
def nullBlank(s: String): String =
if (s eq null) ""
else s
// expand Java versions to 1-20 to 1.x, and vice versa to accept both "1.8" and "8"
private val oneDot = Map((1L to 20L).toVector.flatMap { i =>
Vector(Vector(i) -> Vector(1L, i), Vector(1L, i) -> Vector(i))
}: _*)
def expandJavaHomes(hs: Map[String, File]): Map[String, File] =
hs.flatMap {
case (k, v) =>
val jv = JavaVersion(k)
if (oneDot.contains(jv.numbers))
Vector(k -> v, jv.withNumbers(oneDot(jv.numbers)).toString -> v)
else Vector(k -> v)
}
def wrapNull(a: Array[String]): Vector[String] =
if (a eq null) Vector()
else a.toVector
}
|
rcavalcanti/lagom
|
docs/project/CrossJava.scala
|
Scala
|
apache-2.0
| 7,356
|
package scala.collection.mutable;
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra._
import org.openjdk.jmh.runner.IterationType
import org.openjdk.jol.info.GraphLayout
import benchmark._
import java.util.concurrent.TimeUnit
/** Utilities for the [[OpenHashMapBenchmark]].
*
* The method calls are tested by looping to the size desired for the map;
* instead of using the JMH harness, which iterates for a fixed length of time.
*/
private object OpenHashMapBenchmark {
/** Abstract state container for the `put()` bulk calling tests.
*
* Provides an array of adequately-sized, empty maps to each invocation,
* so that hash table allocation won't be done during measurement.
* Provides enough maps to make each invocation long enough to avoid timing artifacts.
* Performs a GC after re-creating the empty maps before every invocation,
* so that only the GCs caused by the invocation contribute to the measurement.
*
* Records the memory used by all the maps in the last invocation of each iteration.
*
* @tparam K type of the map keys to be used in the test
*/
@State(Scope.Thread)
private[this] abstract class BulkPutState[K](implicit keyBuilder: KeySeqBuilder[K]) {
/** A lower-bound estimate of the number of nanoseconds per `put()` call */
private[this] val nanosPerPut: Double = 5
/** Minimum number of nanoseconds per invocation, so as to avoid timing artifacts. */
private[this] val minNanosPerInvocation = 1000000 // one millisecond
/** Size of the maps created in this trial. */
private[this] var size: Int = _
/** Total number of entries in all of the `maps` combined. */
private[this] var _mapEntries: Int = _
protected def mapEntries = _mapEntries
/** Number of operations performed in the current invocation. */
private[this] var _operations: Int = _
protected def operations = _operations
/** Bytes of memory used in the object graphs of all the maps. */
private[this] var _memory: Long = _
protected def memory = _memory
/** The sequence of keys to store into a map. */
private[this] var _keys: KeySeq[K] = _
def keys() = _keys
var maps: Array[OpenHashMap[K,Int]] = null
@Setup
def threadSetup(params: BenchmarkParams) {
size = params.getParam("size").toInt
val n = math.ceil(minNanosPerInvocation / (nanosPerPut * size)).toInt
_mapEntries = size * n
_keys = keyBuilder.build(size)
maps = new Array(n)
}
@Setup(Level.Iteration)
def iterationSetup {
_operations = 0
}
@Setup(Level.Invocation)
def setup(params: IterationParams) {
for (i <- 0 until maps.length) maps(i) = new OpenHashMap[K,Int](size)
if (params.getType == IterationType.MEASUREMENT) {
_operations += _mapEntries
System.gc() // clean up after last invocation
}
}
@TearDown(Level.Iteration)
def iterationTeardown(params: IterationParams) {
if (params.getType == IterationType.MEASUREMENT) {
// limit to smaller cases to avoid OOM
_memory =
if (_mapEntries <= 1000000) GraphLayout.parseInstance(maps(0), maps.tail).totalSize
else 0
}
}
}
/** Abstract state container for the `get()` bulk calling tests.
*
* Provides a thread-scoped map of the expected size.
* Performs a GC after loading the map.
*
* @tparam K type of the map keys to be used in the test
*/
@State(Scope.Thread)
private[this] abstract class BulkGetState[K](implicit keyBuilder: KeySeqBuilder[K]) {
/** The sequence of keys to store into a map. */
private[this] var _keys: KeySeq[K] = _
def keys() = _keys
val map = new OpenHashMap[K,Int].empty
/** Load the map with keys from `1` to `size`. */
@Setup
def setup(params: BenchmarkParams) {
val size = params.getParam("size").toInt
_keys = keyBuilder.build(size)
put(map, keys, 0, size)
System.gc()
}
}
/** Abstract state container for the `get()` bulk calling tests with deleted entries.
*
* Provides a thread-scoped map of the expected size, from which entries have been removed.
* Performs a GC after loading the map.
*
* @tparam K type of the map keys to be used in the test
*/
@State(Scope.Thread)
private[this] abstract class BulkRemovedGetState[K](implicit keyBuilder: KeySeqBuilder[K]) {
/** The sequence of keys to store into a map. */
private[this] var _keys: KeySeq[K] = _
def keys() = _keys
val map = new OpenHashMap[K,Int].empty
/** Load the map with keys from `1` to `size`, removing half of them. */
@Setup
def setup(params: BenchmarkParams) {
val size = params.getParam("size").toInt
_keys = keyBuilder.build(size)
put_remove(map, keys)
System.gc()
}
}
/* In order to use `@AuxCounters` on a class hierarchy (as of JMH 1.11.3),
* it's necessary to place it on the injected (sub)class, and to make the
* counters visible as explicit public members of the that class. JMH doesn't
* scan the ancestor classes for counters.
*/
@AuxCounters
private class IntBulkPutState extends BulkPutState[Int] {
override def mapEntries = super.mapEntries
override def operations = super.operations
override def memory = super.memory
}
private class IntBulkGetState extends BulkGetState[Int]
private class IntBulkRemovedGetState extends BulkRemovedGetState[Int]
@AuxCounters
private class AnyRefBulkPutState extends BulkPutState[AnyRef] {
override def mapEntries = super.mapEntries
override def operations = super.operations
override def memory = super.memory
}
private class AnyRefBulkGetState extends BulkGetState[AnyRef]
private class AnyRefBulkRemovedGetState extends BulkRemovedGetState[AnyRef]
/** Put entries into the given map.
* Adds entries using a range of keys from the given list.
*
* @param from lowest index in the range of keys to add
* @param to highest index in the range of keys to add, plus one
*/
private[this] def put[K](map: OpenHashMap[K,Int], keys: KeySeq[K], from: Int, to: Int) {
var i = from
while (i < to) { // using a `for` expression instead adds significant overhead
map.put(keys(i), i)
i += 1
}
}
/** Put entries into the given map.
* Adds entries using all of the keys from the given list.
*/
private def put[K](map: OpenHashMap[K,Int], keys: KeySeq[K]): Unit =
put(map, keys, 0, keys.size)
/** Put entries into the given map, removing half of them as they're added.
*
* @param keys list of keys to use
*/
private def put_remove[K](map: OpenHashMap[K,Int], keys: KeySeq[K]) {
val blocks = 25 // should be a non-trivial factor of `size`
val size = keys.size
val blockSize: Int = size / blocks
var base = 0
while (base < size) {
put(map, keys, base, base + blockSize)
// remove every other entry
var i = base
while (i < base + blockSize) {
map.remove(keys(i))
i += 2
}
base += blockSize
}
}
/** Get elements from the given map. */
private def get[K](map: OpenHashMap[K,Int], keys: KeySeq[K]) = {
val size = keys.size
var i = 0
var sum = 0
while (i < size) {
sum += map.get(keys(i)).getOrElse(0)
i += 1
}
sum
}
}
/** Benchmark for the library's [[OpenHashMap]]. */
@BenchmarkMode(Array(Mode.AverageTime))
@Fork(5)
@Threads(1)
@Warmup(iterations = 20)
@Measurement(iterations = 5)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@State(Scope.Benchmark)
class OpenHashMapBenchmark {
import OpenHashMapBenchmark._
@Param(Array("50", "100", "1000", "10000", "100000", "1000000", "2500000",
"5000000", "7500000", "10000000", "25000000"))
var size: Int = _
// Tests with Int keys
/** Test putting elements to a map of `Int` to `Int`. */
@Benchmark
def put_Int(state: IntBulkPutState) {
var i = 0
while (i < state.maps.length) {
put(state.maps(i), state.keys)
i += 1
}
}
/** Test putting and removing elements to a growing map of `Int` to `Int`. */
@Benchmark
def put_remove_Int(state: IntBulkPutState) {
var i = 0
while (i < state.maps.length) {
put_remove(state.maps(i), state.keys)
i += 1
}
}
/** Test getting elements from a map of `Int` to `Int`. */
@Benchmark
def get_Int_after_put(state: IntBulkGetState) =
get(state.map, state.keys)
/** Test getting elements from a map of `Int` to `Int` from which elements have been removed.
* Note that half of these queries will fail to find their keys, which have been removed.
*/
@Benchmark
def get_Int_after_put_remove(state: IntBulkRemovedGetState) =
get(state.map, state.keys)
// Tests with AnyRef keys
/** Test putting elements to a map of `AnyRef` to `Int`. */
@Benchmark
def put_AnyRef(state: AnyRefBulkPutState) {
var i = 0
while (i < state.maps.length) {
put(state.maps(i), state.keys)
i += 1
}
}
/** Test putting and removing elements to a growing map of `AnyRef` to `Int`. */
@Benchmark
def put_remove_AnyRef(state: AnyRefBulkPutState) {
var i = 0
while (i < state.maps.length) {
put_remove(state.maps(i), state.keys)
i += 1
}
}
/** Test getting elements from a map of `AnyRef` to `Int`. */
@Benchmark
def get_AnyRef_after_put(state: AnyRefBulkGetState) =
get(state.map, state.keys)
/** Test getting elements from a map of `AnyRef` to `Int` from which elements have been removed.
* Note that half of these queries will fail to find their keys, which have been removed.
*/
@Benchmark
def get_AnyRef_after_put_remove(state: AnyRefBulkRemovedGetState) =
get(state.map, state.keys)
}
|
felixmulder/scala
|
test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala
|
Scala
|
bsd-3-clause
| 9,862
|
import stainless.lang._
import stainless.annotation._
import scala.annotation.meta.field
import scala.collection.concurrent.TrieMap
object IgnoredField {
case class TrieMapWrapper[K, V](
@(ignore @field)
@(pure @field)
@extern
theMap: TrieMap[K, V]
) {
@extern
def contains(k: K): Boolean = {
theMap contains k
}
@extern
def insert(k: K, v: V): TrieMapWrapper[K, V] = {
TrieMapWrapper(theMap += (k -> v))
} ensuring { _.contains(k) }
@extern
def apply(k: K): V = {
require(contains(k))
theMap(k)
}
}
object TrieMapWrapper {
@extern
def empty[K, V]: TrieMapWrapper[K, V] = {
TrieMapWrapper(TrieMap.empty[K, V])
} ensuring { res =>
forall((k: K) => !res.contains(k))
}
}
def test = {
val wrapper = TrieMapWrapper.empty[BigInt, BigInt]
assert(!wrapper.contains(1))
assert(wrapper.insert(1, 2).contains(1))
}
}
|
epfl-lara/stainless
|
frontends/benchmarks/verification/valid/MicroTests/IgnoredField.scala
|
Scala
|
apache-2.0
| 947
|
package net.stoerr.gappenginescalatemplate
import java.util.logging.Logger
import javax.servlet.http.{HttpServletResponse, HttpServletRequest, HttpServlet}
/**
* @author <a href="http://www.stoerr.net/">Hans-Peter Stoerr</a>
* @since 20.02.2015
*/
class HelloWorldServlet extends HttpServlet {
val logger = Logger.getLogger(getClass.toString)
override def doGet(req: HttpServletRequest, resp: HttpServletResponse) = {
resp.setHeader("Cache-Control", "no-cache, no-store, must-revalidate") // HTTP 1.1
resp.setHeader("Pragma", "no-cache") // HTTP 1.0
resp.setDateHeader("Expires", 0)
req.setAttribute("title", "Hello world page")
req.setAttribute("heading", "Hello world!")
req.setAttribute("body", <p>Some body, hu hu hu</p>)
getServletContext.getRequestDispatcher("/jsp/frame.jsp").forward(req, resp)
}
}
|
stoerr/GoogleAppengineScalaTemplate
|
src/main/scala/net/stoerr/gappenginescalatemplate/HelloWorldServlet.scala
|
Scala
|
apache-2.0
| 848
|
package demo
package components
package materialui.svgicons
import chandu0101.scalajs.react.components.materialui.MuiSvgIcon
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
object CommunicationCall {
val component = ReactComponentB[Unit]("CommunicationCall")
.render(P => {
MuiSvgIcon()(
<.svg.path(^.key := "acg", ^.svg.d := "M6.62 10.79c1.44 2.83 3.76 5.14 6.59 6.59l2.2-2.2c.27-.27.67-.36 1.02-.24 1.12.37 2.33.57 3.57.57.55 0 1 .45 1 1V20c0 .55-.45 1-1 1-9.39 0-17-7.61-17-17 0-.55.45-1 1-1h3.5c.55 0 1 .45 1 1 0 1.25.2 2.45.57 3.57.11.35.03.74-.25 1.02l-2.2 2.2z")
)
}).buildU
def apply() = component()
}
|
tpdi/scalajs-react-components
|
demo/src/main/scala/demo/components/materialui/svgicons/CommunicationCall.scala
|
Scala
|
apache-2.0
| 677
|
package com.github.mlangc.experiments.refactoring
import org.scalameter.picklers.noPickler._
import org.scalameter.api._
import scala.tools.refactoring.util.SourceWithMarker.Movement
import scala.tools.refactoring.util.SourceWithMarker.Movements
import scala.tools.refactoring.util.SourceWithMarker
object SourceWithMarkerBenchmark extends Bench.LocalTime with LocalResourceSupport {
def testSource = {
SourceWithMarker(localResourceAsString("LongScalaSource.txt"))
}
val input = Gen.single("testSource")(testSource)
def runTest(name: String, mvnt: Movement) = {
performance of name in {
measure method "apply" in {
using(input) in { src =>
require(mvnt(src).isDefined)
}
}
}
}
import Movements._
val packageDef = "package" ~ space.atLeastOnce ~ id ~ ('.' ~ id).zeroOrMore
val toEnd = until("/*END*/")
val toStartOfLastTest = toEnd ~ (until("@Test", skipping = (stringLiteral | space | comment)).backward)
runTest("any", any)
runTest("parsePreamble", commentsAndSpaces ~ packageDef ~ commentsAndSpaces ~ packageDef)
runTest("toEnd", toEnd)
runTest("parseTestBackwards", toStartOfLastTest)
}
|
mlangc/scala-refactoring-experiments
|
src/test/scala/com/github/mlangc/experiments/refactoring/SourceWithMarkerBenchmark.scala
|
Scala
|
bsd-3-clause
| 1,176
|
/*
`take` first checks if n==0. In that case we need not look at the stream at all.
*/
def take(n: Int): Stream[A] =
if (n > 0) this match {
case Cons(h, t) if n == 1 => cons(h(), Stream.empty) // we can say Stream.empty
case Cons(h, t) => cons(h(), t().take(n-1))
case _ => Stream.empty
}
else Stream() // or Stream()
/*
Unlike `take`, `drop` is not incremental. That is, it doesn't generate the
answer lazily. It must traverse the first `n` elements of the stream eagerly.
*/
def drop(n: Int): Stream[A] = {
@annotation.tailrec
def go(s: Stream[A], n: Int): Stream[A] =
if (n <= 0) s
else s match {
case Cons(h,t) => go(t(), n-1)
case _ => Stream()
}
go(this, n)
}
|
fpinscala-muc/fpinscala-kdziubli
|
answerkey/laziness/02.answer.scala
|
Scala
|
mit
| 729
|
package play.boilerplate.generators
import java.io.File.{separator, separatorChar}
import play.boilerplate.parser.model.Parameter
trait StringUtils {
def decapitalize(s: String): String = {
if (s == null) null
else if (s.length == 0) ""
else {
val chars = s.toCharArray
chars(0) = chars(0).toLower
new String(chars)
}
}
def getFileName(fileName: String): String = {
val sep = if (separatorChar == 92.toChar) "\\\\\\\\" else separator
fileName.split(sep).toList.last
}
def sanitizeFileName(fileName: String): String = {
getFileName(fileName)
.replace(".yaml", "")
.replace(".json", "")
}
def objectNameFromFileName(fileName: String, obj: String, skipNotValidChars: Boolean = true): String = {
stringToValidIdentifier(sanitizeFileName(fileName), skipNotValidChars).capitalize + obj
}
def stringToValidIdentifier(str: String, skipNotValidChars: Boolean): String = {
val sb = new StringBuilder()
str.toCharArray.foldLeft {
if (!Character.isJavaIdentifierStart(str.charAt(0))) {
sb.append('_')
true
} else {
false
}
} { case (afterUnderscore, c) =>
if (Character.isJavaIdentifierPart(c)) {
if (afterUnderscore) sb.append(c.toUpper) else sb.append(c)
false
} else if (!afterUnderscore) {
if (!skipNotValidChars) sb.append('_')
true
} else {
true
}
}
sb.mkString
}
def getParameterIdentifier(param: Parameter): String = {
decapitalize(stringToValidIdentifier(param.name, skipNotValidChars = true))
}
def padTo(n: Int, s: String): String = s + " " * (n - s.length max 0)
def cleanDuplicateSlash(s: String): String = s.replaceAll("//+", "/")
def composeName(parts: String*): String = {
parts.filterNot(_.isEmpty).mkString(".")
}
def classNameToPath(packageName: String, className: String, ext: String): String = {
val path = (packageName.split('.') :+ className).filterNot(_.isEmpty).mkString(separator)
Seq(path, ext.dropWhile(_ == '.')).filterNot(_.isEmpty).mkString(".")
}
}
|
Romastyi/sbt-play-boilerplate
|
sbt-plugin/lib/src/main/scala/play/boilerplate/generators/StringUtils.scala
|
Scala
|
apache-2.0
| 2,123
|
package controllers
import play.api._
import play.api.mvc._
import views._
case class Box (x:Int, y: Int)
class Application extends Controller {
def index = Action { implicit request =>
Ok(views.html.report4())
}
}
|
fikrimuhal/animated-potato
|
backend/GraphicPlay8/app/controllers/Application.scala
|
Scala
|
gpl-3.0
| 225
|
package com.github.morikuni.locest.util
/** ใใผใฟใฎๆฐธ็ถๅใ่กใใ
*
* @tparam A ็ฎก็ใใๅ
*/
trait Repository[A]
|
morikuni/locest
|
util/src/main/scala/com/github/morikuni/locest/util/Repository.scala
|
Scala
|
mit
| 138
|
package org.jetbrains.plugins.scala
package codeInspection
package valInTraitInspection
import com.intellij.codeInspection.{ProblemHighlightType, ProblemsHolder}
import com.intellij.psi.PsiElement
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScValueDeclaration, ScVariableDeclaration}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates.ScTemplateBody
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTrait
/**
* User: Alexander Podkhalyuzin
* Date: 02.06.2009
*/
class AbstractValueInTraitInspection
extends AbstractInspection("ScalaAbstractValueInTrait", "Abstract Value in Trait") {
override def actionFor(implicit holder: ProblemsHolder): PartialFunction[PsiElement, Unit] = {
//todo: we should use dataflow analysis to get if it's safe to use declaration here
case v: ScValueDeclaration if v.getParent.isInstanceOf[ScTemplateBody] =>
v.containingClass match {
case _: ScTrait =>
holder.registerProblem(v, "Abstract value used in trait", ProblemHighlightType.GENERIC_ERROR_OR_WARNING)
case _ =>
}
case v: ScVariableDeclaration =>
v.containingClass match {
case _: ScTrait =>
holder.registerProblem(v, "Abstract variable used in trait", ProblemHighlightType.GENERIC_ERROR_OR_WARNING)
case _ =>
}
}
}
|
loskutov/intellij-scala
|
src/org/jetbrains/plugins/scala/codeInspection/valInTraitInspection/AbstractValueInTraitInspection.scala
|
Scala
|
apache-2.0
| 1,357
|
/*
* Copyright 2019 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.play.filters
import org.mockito.ArgumentCaptor
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.mock.MockitoSugar
import org.scalatest.{Matchers, WordSpecLike}
import org.scalatestplus.play.OneAppPerTest
import play.api.http.HeaderNames
import play.api.inject.guice.GuiceApplicationBuilder
import play.api.mvc.{Result, _}
import play.api.test._
import scala.concurrent.Future
class DefaultToNoCacheFilterSpec extends WordSpecLike with Matchers with MockitoSugar with ScalaFutures with OneAppPerTest {
private trait Setup extends Results {
def action(headers: (String, String)*) = {
val mockAction = mock[(RequestHeader) => Future[Result]]
val outgoingResponse = Future.successful(Ok.withHeaders(headers:_*))
when(mockAction.apply(any())).thenReturn(outgoingResponse)
mockAction
}
def getResult(headers: (String, String)*) = {
DefaultToNoCacheFilter(action(headers:_*))(FakeRequest()).futureValue
}
}
"During result post-processing, the filter" should {
"add a cache-control header if there isn't one" in new Setup {
getResult() should be(Ok.withHeaders(CommonHeaders.NoCacheHeader))
}
"preserve the cache-control header if there is one" in new Setup {
getResult(HeaderNames.CACHE_CONTROL -> "max-age=300") should be(Ok.withHeaders(HeaderNames.CACHE_CONTROL -> "max-age=300"))
}
"leave any other headers alone adding No Cache" in new Setup {
val otherHeaders = Seq(
"header1" -> "value1",
"header2" -> "value2"
)
val expHeaders = otherHeaders :+ CommonHeaders.NoCacheHeader
getResult(otherHeaders:_*) should be(Ok.withHeaders(expHeaders:_*))
}
"preserve all headers" in new Setup {
val headers = Seq(
"header1" -> "value1",
HeaderNames.CACHE_CONTROL -> "max-age:765",
"header2" -> "value2"
)
getResult(headers:_*) should be(Ok.withHeaders(headers:_*))
}
}
}
|
hmrc/play-filters
|
src/test/scala/uk/gov/hmrc/play/filters/NoCacheByDefaultFilterSpec.scala
|
Scala
|
apache-2.0
| 2,652
|
package eu.brosbit.opos.lib
import _root_.net.liftweb.http.ResourceServer
import _root_.scala.xml.NodeSeq
import _root_.net.liftweb.http.{LiftRules}
import _root_.net.liftweb.http.js._
import JsCmds._
import JE._
import jquery.JqJE._
//import org.specs2.internal.scalaz.effect.Resource
// Allows the user to type stuff like Sorting.DSC to specify that a column should be sorted
// decening instead of just a plain integer that the js api expects.
object DataTableOption {
/*
* Add data to render by script - replace data in html table
* headers length must be the same data inner list length
*/
trait BaseOption {
val emptyJsObj = new JsObj {
def props = Nil
}
def toJsObj: JsObj
}
object Sorting extends Enumeration {
type Sorting = Value
val ASC = Value("asc")
val DSC = Value("desc")
}
case class DataOption(headers: List[String], data: List[List[String]]) extends BaseOption {
override def toJsObj = {
if (headers.length == 0) emptyJsObj
else {
val colArr = JsArray(headers.map(h => JsObj("sTitle" -> Str(h))))
if (data.length == 0) {
JsObj("aoColumns" -> colArr)
}
else {
val dataArr = JsArray(data.map(d => JsArray(d.map(Str(_)): _*)): _*)
JsObj("aoColumns" -> colArr, "aaData" -> dataArr)
}
}
}
}
/*
* CommboBox menu in widget. Choice for visible length of table showed at once.
* @param: len - default size of table
* @param: dispSizes - list of table sizes available to choice
*/
case class DisplayLengthOption(len: Int, dispSizes: List[Int]) extends BaseOption {
override def toJsObj = {
val array = JsArray(dispSizes.map(Num(_)): _*)
JsObj("iDisplayLength" -> len, "aLengthMenu" -> array)
}
}
/*
* Options for datatable - look at http://datatables.net/examples/
* For example:
* "sPaginationType": "two_button",
* "bFilter": true,
* "bLengthChange": true,
*/
case class ExtraOptions(option: Map[String, String]) extends BaseOption {
def toJsObj = {
val obj = option.map(o => JsObj(o._1 -> o._2)).toSeq
obj.foldLeft[JsObj](emptyJsObj)(_ +* _)
}
}
/*
* internationalization for datatable
*/
case class LanguageOption(lang: String = "") extends BaseOption {
def toJsObj = {
if (lang == "") emptyJsObj
else {
val langFileName = "/" + LiftRules.resourceServerPath + "/datatable/language/" + lang + ".lang"
JsObj("oLanguage" -> JsObj("sUrl" -> langFileName))
}
}
}
case class ColumnNotSearchAndHidden(notsearchable: List[Int], hidden: List[Int]) extends BaseOption {
def toJsObj = {
val sSeq = notsearchable.map(Num(_)).toSeq
val hSeq = hidden.map(Num(_)).toSeq
val search = JsObj("bSearchable" -> JsFalse, "aTargets" -> JsArray(sSeq: _*))
val hidde = JsObj("bVisible" -> JsFalse, "aTargets" -> JsArray(hSeq: _*))
JsObj("aoColumnDefs" -> JsArray(search, hidde))
}
}
/*
* Sorting options
*/
case class SortingOption(sorting: Map[Int, Sorting.Sorting]) extends BaseOption {
def toJsObj = {
val obj = sorting.map(o => JsArray(Num(o._1), o._2.toString)).toSeq
JsObj("aaSorting" -> JsArray(obj: _*))
}
}
}
object DataTable {
private val emptyJsObj = new JsObj {
def props = Nil
}
import DataTableOption._
def apply(selector: String) = renderOnLoad(selector, emptyJsObj)
def apply(selector: String, options: BaseOption*) = {
val opt = options.map(opt => opt.toJsObj).toList.reduce((a, b) => a +* b)
renderOnLoad(selector, opt)
}
/**
* Initializes the widget. You have to call this in boot for the widget to work.
*/
def init() {
ResourceServer.allow({
case "datatable" :: tail => true
})
}
def renderOnLoad(selector: String, options: JsObj): NodeSeq = {
val onLoad = """ oTable = jQuery('""" + selector + """').dataTable(""" + options.toJsCmd + """); """
Script(JsRaw(onLoad))
}
def mergeSources(html: NodeSeq): NodeSeq = <link rel="stylesheet" href={"/" + LiftRules.resourceServerPath + "/datatable/themes/jquery.dataTables.css"} type="text/css" id=" " media="print, projection, screen"/> ++
<script type="text/javascript" src={"/" + LiftRules.resourceServerPath + "/datatable/jquery.dataTables.js"}/>
/**
* Transforms a regular table into a datatable
*/
def jsRender(selector: String, options: JsObj): JsExp =
JqId(selector) ~> new JsRaw("datatable(" + options.toJsCmd + ");") with JsMember
def jsRender(selector: String): JsExp = jsRender(selector, emptyJsObj)
}
|
mikolajs/osp
|
src/main/scala/eu/brosbit/opos/lib/DataTable.scala
|
Scala
|
agpl-3.0
| 4,660
|
package beam.agentsim.agents.memberships
import org.matsim.api.core.v01.Id
import org.matsim.api.core.v01.population.Person
import org.matsim.households.{Household, Households}
import org.matsim.vehicles.Vehicle
import scala.collection.concurrent.TrieMap
import scala.collection.{mutable, JavaConverters}
case class HouseholdMembershipAllocator(
households: Households,
implicit val population: org.matsim.api.core.v01.population.Population
) {
import beam.agentsim.agents.memberships.Memberships.RankedGroup._
val memberships: Map[Id[Person], Household] = allocateMembership()
private val vehicleAllocationsByRank: TrieMap[Id[Household], mutable.Map[Id[Person], Id[Vehicle]]] =
TrieMap()
def lookupMemberRank(id: Id[Person]): Option[Int] =
memberships(id).lookupMemberRank(id)
def lookupVehicleForRankedPerson(personId: Id[Person]): Option[Id[Vehicle]] = {
val household = memberships(personId)
vehicleAllocationsByRank
.getOrElseUpdate(
household.getId, {
val vehicleRes: mutable.Map[Id[Person], Id[Vehicle]] =
mutable.Map()
val householdVehicles =
JavaConverters
.collectionAsScalaIterable(household.getVehicleIds)
.toIndexedSeq
for (i <- householdVehicles.indices.toSet ++ household.rankedMembers.indices.toSet) {
if (i < householdVehicles.size & i < household.rankedMembers.size) {
vehicleRes += (household
.rankedMembers(i)
.memberId -> householdVehicles(i))
}
}
vehicleRes
}
)
.get(personId)
}
private def allocateMembership(): Map[Id[Person], Household] = {
JavaConverters
.mapAsScalaMap(households.getHouseholds)
.flatMap({
case (_, hh) =>
JavaConverters
.asScalaBuffer(hh.getMemberIds)
.map(personId => personId -> hh)
})
.toMap
}
}
|
colinsheppard/beam
|
src/main/scala/beam/agentsim/agents/memberships/HouseholdMembershipAllocator.scala
|
Scala
|
gpl-3.0
| 1,965
|
package com.sksamuel.elastic4s.requests.searches.aggs
import com.sksamuel.elastic4s.requests.common.RefreshPolicy
import com.sksamuel.elastic4s.requests.searches.DateHistogramInterval
import com.sksamuel.elastic4s.requests.searches.aggs.responses.Aggregations
import com.sksamuel.elastic4s.requests.searches.aggs.responses.bucket.DateHistogram
import com.sksamuel.elastic4s.requests.searches.sort.{FieldSort, SortOrder}
import com.sksamuel.elastic4s.testkit.DockerTests
import org.scalatest.freespec.AnyFreeSpec
import org.scalatest.matchers.should.Matchers
import scala.util.Try
class BucketSortPipelineAggHttpTest extends AnyFreeSpec with DockerTests with Matchers {
Try {
client.execute {
deleteIndex("bucketsortagg")
}.await
}
client.execute {
createIndex("bucketsortagg").mapping(
properties(
dateField("date"),
doubleField("value").stored(true)
)
)
}.await
client.execute(
bulk(
indexInto("bucketsortagg") fields("date" -> "2017-01-01", "value" -> 1000.0),
indexInto("bucketsortagg") fields("date" -> "2017-01-02", "value" -> 1000.0),
indexInto("bucketsortagg") fields("date" -> "2017-02-01", "value" -> 2000.0),
indexInto("bucketsortagg") fields("date" -> "2017-02-01", "value" -> 2000.0),
indexInto("bucketsortagg") fields("date" -> "2017-03-01", "value" -> 3000.0),
indexInto("bucketsortagg") fields("date" -> "2017-03-02", "value" -> 3000.0)
).refresh(RefreshPolicy.Immediate)
).await
"bucket sort pipeline agg" - {
"should return sorted buckets" in {
val resp = client.execute {
search("bucketsortagg").matchAllQuery().aggs(
dateHistogramAgg("sales_per_month", "date")
.interval(DateHistogramInterval.Month)
.subaggs(
sumAgg("sales", "value"),
bucketSortAggregation("sales_bucket_sort",
Seq(FieldSort("sales").order(SortOrder.DESC)))
)
)
}.await.result
resp.totalHits shouldBe 6
val buckets = resp.aggs.result[DateHistogram]("sales_per_month").buckets
buckets.size shouldBe 3
Aggregations(buckets.head.data).sum("sales").value shouldBe 6000.0
Aggregations(buckets(1).data).sum("sales").value shouldBe 4000.0
Aggregations(buckets(2).data).sum("sales").value shouldBe 2000.0
}
}
"should limit sorted buckets" in {
val resp = client.execute {
search("bucketsortagg").matchAllQuery().aggs(
dateHistogramAgg("sales_per_month", "date")
.interval(DateHistogramInterval.Month)
.subaggs (
sumAgg("sales", "value"),
bucketSortAggregation("sales_bucket_sort",
Seq(FieldSort("sales").order(SortOrder.DESC)))
.size(1)
.from(1)
)
)
}.await.result
resp.totalHits shouldBe 6
val buckets = resp.aggs.result[DateHistogram]("sales_per_month").buckets
buckets.size shouldBe 1
Aggregations(buckets.head.data).sum("sales").value shouldBe 4000.0
}
}
|
stringbean/elastic4s
|
elastic4s-tests/src/test/scala/com/sksamuel/elastic4s/requests/searches/aggs/BucketSortPipelineAggHttpTest.scala
|
Scala
|
apache-2.0
| 3,070
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.annotation
import scala.annotation.StaticAnnotation
import scala.annotation.meta._
/**
* A Scala annotation that specifies the Spark version when a definition was added.
* Different from the `@since` tag in JavaDoc, this annotation does not require explicit JavaDoc and
* hence works for overridden methods that inherit API documentation directly from parents.
* The limitation is that it does not show up in the generated Java API documentation.
*/
@param @field @getter @setter @beanGetter @beanSetter
private[spark] class Since(version: String) extends StaticAnnotation
|
ArvinDevel/onlineAggregationOnSparkV2
|
core/src/main/scala/org/apache/spark/annotation/Since.scala
|
Scala
|
apache-2.0
| 1,407
|
package wandou.util.pinyin
import wandou.util.pinyin.format.HanyuPinyinOutputFormat
import wandou.util.pinyin.format.exception.BadHanyuPinyinOutputFormatCombination
/**
* A class provides several utility functions to convert Chinese characters
* (both Simplified and Tranditional) into various Chinese Romanization
* representations
*
*/
object PinyinHelper {
/**
* Get all unformmatted Hanyu Pinyin presentations of a single Chinese
* character (both Simplified and Tranditional)
*
* <p>
* For example, <br/> If the input is '้ด', the return will be an array with
* two Hanyu Pinyin strings: <br/> "jian1" <br/> "jian4" <br/> <br/> If the
* input is 'ๆ', the return will be an array with single Hanyu Pinyin
* string: <br/> "li3"
*
* <p>
* <b>Special Note</b>: If the return is "none0", that means the input
* Chinese character exists in Unicode CJK talbe, however, it has no
* pronounciation in Chinese
*
* @param ch
* the given Chinese character
*
* @return a String array contains all unformmatted Hanyu Pinyin
* presentations with tone numbers; null for non-Chinese character
*
*/
def toHanyuPinyins(ch: Char): Array[String] =
getUnformattedHanyuPinyins(ch)
/**
* Get all Hanyu Pinyin presentations of a single Chinese character (both
* Simplified and Tranditional)
*
* <p>
* For example, <br/> If the input is '้ด', the return will be an array with
* two Hanyu Pinyin strings: <br/> "jian1" <br/> "jian4" <br/> <br/> If the
* input is 'ๆ', the return will be an array with single Hanyu Pinyin
* string: <br/> "li3"
*
* <p>
* <b>Special Note</b>: If the return is "none0", that means the input
* Chinese character is in Unicode CJK talbe, however, it has no
* pronounciation in Chinese
*
* @param ch
* the given Chinese character
* @param outputFormat
* describes the desired format of returned Hanyu Pinyin String
*
* @return a String array contains all Hanyu Pinyin presentations with tone
* numbers; return null for non-Chinese character
*
* @throws BadHanyuPinyinOutputFormatCombination
* if certain combination of output formats happens
*
* @see HanyuPinyinOutputFormat
* @see BadHanyuPinyinOutputFormatCombination
*
*/
@throws(classOf[BadHanyuPinyinOutputFormatCombination])
def toHanyuPinyins(ch: Char, outputFormat: HanyuPinyinOutputFormat): Set[String] =
getFormattedHanyuPinyins(ch, outputFormat)
/**
* Return the formatted Hanyu Pinyin representations of the given Chinese
* character (both in Simplified and Tranditional) in array format.
*
* @param ch
* the given Chinese character
* @param outputFormat
* Describes the desired format of returned Hanyu Pinyin string
* @return The formatted Hanyu Pinyin representations of the given codepoint
* in array format; null if no record is found in the hashtable.
*/
@throws(classOf[BadHanyuPinyinOutputFormatCombination])
private def getFormattedHanyuPinyins(ch: Char, outputFormat: HanyuPinyinOutputFormat): Set[String] = {
val pinyins = getUnformattedHanyuPinyins(ch)
var pinyinSet = Set[String]()
var i = 0
while (i < pinyins.length) {
pinyinSet += PinyinFormatter.formatHanyuPinyin(pinyins(i), outputFormat)
i += 1
}
pinyinSet
}
/**
* Delegate function
*
* @param ch
* the given Chinese character
* @return unformatted Hanyu Pinyin strings; null if the record is not found
*/
private def getUnformattedHanyuPinyins(ch: Char): Array[String] =
ChineseToPinyinConverter.getHanyuPinyins(ch)
/**
* Get all unformmatted Tongyong Pinyin presentations of a single Chinese
* character (both Simplified and Tranditional)
*
* @param ch
* the given Chinese character
*
* @return a String array contains all unformmatted Tongyong Pinyin
* presentations with tone numbers; null for non-Chinese character
*
* @see #toHanyuPinyinStringArray(char)
*
*/
def toTongyongPinyins(ch: Char): Set[String] =
convertToTargetPinyins(ch, PinyinRomanizationType.TONGYONG_PINYIN)
/**
* Get all unformmatted Wade-Giles presentations of a single Chinese
* character (both Simplified and Tranditional)
*
* @param ch
* the given Chinese character
*
* @return a String array contains all unformmatted Wade-Giles presentations
* with tone numbers; null for non-Chinese character
*
* @see #toHanyuPinyinStringArray(char)
*
*/
def toWadeGilesPinyins(ch: Char): Set[String] =
convertToTargetPinyins(ch, PinyinRomanizationType.WADEGILES_PINYIN)
/**
* Get all unformmatted MPS2 (Mandarin Phonetic Symbols 2) presentations of
* a single Chinese character (both Simplified and Tranditional)
*
* @param ch
* the given Chinese character
*
* @return a String array contains all unformmatted MPS2 (Mandarin Phonetic
* Symbols 2) presentations with tone numbers; null for non-Chinese
* character
*
* @see #toHanyuPinyinStringArray(char)
*
*/
def toMPS2Pinyins(ch: Char): Set[String] =
convertToTargetPinyins(ch, PinyinRomanizationType.MPS2_PINYIN)
/**
* Get all unformmatted Yale Pinyin presentations of a single Chinese
* character (both Simplified and Tranditional)
*
* @param ch
* the given Chinese character
*
* @return a String array contains all unformmatted Yale Pinyin
* presentations with tone numbers; null for non-Chinese character
*
* @see #toHanyuPinyinStringArray(char)
*
*/
def toYalePinyins(ch: Char): Set[String] =
convertToTargetPinyins(ch, PinyinRomanizationType.YALE_PINYIN)
/**
* @param ch
* the given Chinese character
* @param targetPinyinSystem
* indicates target Chinese Romanization system should be
* converted to
* @return string representations of target Chinese Romanization system
* corresponding to the given Chinese character in array format;
* null if error happens
*
* @see PinyinRomanizationType
*/
private def convertToTargetPinyins(ch: Char, targetPinyinSystem: PinyinRomanizationType): Set[String] = {
val hanyuPinyins = getUnformattedHanyuPinyins(ch)
var pinyinSet = Set[String]()
var i = 0
while (i < hanyuPinyins.length) {
pinyinSet += PinyinRomanizationConverter.convertRomanizationSystem(hanyuPinyins(i), PinyinRomanizationType.HANYU_PINYIN, targetPinyinSystem)
i += 1
}
pinyinSet
}
/**
* Get all unformmatted Gwoyeu Romatzyh presentations of a single Chinese
* character (both Simplified and Tranditional)
*
* @param ch
* the given Chinese character
*
* @return a String array contains all unformmatted Gwoyeu Romatzyh
* presentations with tone numbers; null for non-Chinese character
*
* @see #toHanyuPinyinStringArray(char)
*
*/
def toGwoyeuRomatzyhs(ch: Char): Set[String] =
convertToGwoyeuRomatzyhs(ch)
/**
* @param ch
* the given Chinese character
*
* @return Gwoyeu Romatzyh string representations corresponding to the given
* Chinese character in array format; null if error happens
*
* @see PinyinRomanizationType
*/
private def convertToGwoyeuRomatzyhs(ch: Char): Set[String] = {
val hanyuPinyins = getUnformattedHanyuPinyins(ch)
var pinyinSet = Set[String]()
var i = 0
while (i < hanyuPinyins.length) {
pinyinSet += GwoyeuRomatzyhConverter.convertHanyuPinyinToGwoyeuRomatzyh(hanyuPinyins(i))
i += 1
}
pinyinSet
}
}
|
wandoulabs/wandou-math
|
wandou-util/src/main/scala/wandou/util/pinyin/PinyinHelper.scala
|
Scala
|
apache-2.0
| 8,058
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.api.ml
import org.apache.spark.rdd.RDD
import java.io.File
import org.apache.spark.SparkContext
import org.apache.spark.ml.{ Estimator, Model }
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.types.StructType
import org.apache.spark.ml.param.{ DoubleParam, Param, ParamMap, Params }
import org.apache.sysml.runtime.matrix.MatrixCharacteristics
import org.apache.sysml.runtime.matrix.data.MatrixBlock
import org.apache.sysml.runtime.DMLRuntimeException
import org.apache.sysml.runtime.instructions.spark.utils.{ RDDConverterUtilsExt => RDDConverterUtils }
import org.apache.sysml.api.mlcontext._
import org.apache.sysml.api.mlcontext.ScriptFactory._
object LinearRegression {
final val scriptPathCG = "scripts" + File.separator + "algorithms" + File.separator + "LinearRegCG.dml"
final val scriptPathDS = "scripts" + File.separator + "algorithms" + File.separator + "LinearRegDS.dml"
}
// algorithm = "direct-solve", "conjugate-gradient"
class LinearRegression(override val uid: String, val sc: SparkContext, val solver: String = "direct-solve")
extends Estimator[LinearRegressionModel]
with HasIcpt
with HasRegParam
with HasTol
with HasMaxOuterIter
with BaseSystemMLRegressor {
def setIcpt(value: Int) = set(icpt, value)
def setMaxIter(value: Int) = set(maxOuterIter, value)
def setRegParam(value: Double) = set(regParam, value)
def setTol(value: Double) = set(tol, value)
override def copy(extra: ParamMap): Estimator[LinearRegressionModel] = {
val that = new LinearRegression(uid, sc, solver)
copyValues(that, extra)
}
def getTrainingScript(isSingleNode: Boolean): (Script, String, String) = {
val script = dml(
ScriptsUtils.getDMLScript(
if (solver.compareTo("direct-solve") == 0) LinearRegression.scriptPathDS
else if (solver.compareTo("newton-cg") == 0) LinearRegression.scriptPathCG
else throw new DMLRuntimeException("The algorithm should be direct-solve or newton-cg")
)
).in("$X", " ")
.in("$Y", " ")
.in("$B", " ")
.in("$Log", " ")
.in("$fmt", "binary")
.in("$icpt", toDouble(getIcpt))
.in("$reg", toDouble(getRegParam))
.in("$tol", toDouble(getTol))
.in("$maxi", toDouble(getMaxOuterIte))
.out("beta_out")
(script, "X", "y")
}
def fit(X_file: String, y_file: String): LinearRegressionModel = {
mloutput = baseFit(X_file, y_file, sc)
new LinearRegressionModel(this)
}
def fit(X_mb: MatrixBlock, y_mb: MatrixBlock): LinearRegressionModel = {
mloutput = baseFit(X_mb, y_mb, sc)
new LinearRegressionModel(this)
}
def fit(df: ScriptsUtils.SparkDataType): LinearRegressionModel = {
mloutput = baseFit(df, sc)
new LinearRegressionModel(this)
}
}
class LinearRegressionModel(override val uid: String)(estimator: LinearRegression, val sc: SparkContext)
extends Model[LinearRegressionModel]
with HasIcpt
with HasRegParam
with HasTol
with HasMaxOuterIter
with BaseSystemMLRegressorModel {
override def copy(extra: ParamMap): LinearRegressionModel = {
val that = new LinearRegressionModel(uid)(estimator, sc)
copyValues(that, extra)
}
def transform_probability(X: MatrixBlock): MatrixBlock = throw new DMLRuntimeException("Unsupported method")
def transform_probability(X_file: String): String = throw new DMLRuntimeException("Unsupported method")
def baseEstimator(): BaseSystemMLEstimator = estimator
def this(estimator: LinearRegression) = {
this("model")(estimator, estimator.sc)
}
def getPredictionScript(isSingleNode: Boolean): (Script, String) =
PredictionUtils.getGLMPredictionScript(estimator.mloutput.getMatrix("beta_out"), isSingleNode)
def modelVariables(): List[String] = List[String]("beta_out")
def transform(df: ScriptsUtils.SparkDataType): DataFrame = baseTransform(df, sc, "means")
def transform(X: MatrixBlock): MatrixBlock = baseTransform(X, sc, "means")
def transform(X_file: String): String = baseTransform(X_file, sc, "means")
}
|
deroneriksson/incubator-systemml
|
src/main/scala/org/apache/sysml/api/ml/LinearRegression.scala
|
Scala
|
apache-2.0
| 4,901
|
// Copyright: 2010 - 2018 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.indexer
import akka.actor._
import akka.event.slf4j.SLF4JLogging
import org.apache.commons.vfs2._
import org.ensime.api._
import org.ensime.config.richconfig._
import org.ensime.util.Debouncer
import org.ensime.vfs._
import org.ensime.util.file._
import org.ensime.util.fileobject._
import org.ensime.util.list._
import org.ensime.util.map._
import scala.annotation.tailrec
import scala.concurrent.duration._
// mutable: lookup of user's source files are atomically updated
class SourceResolver(
config: EnsimeConfig
)(
implicit
actorSystem: ActorSystem,
vfs: EnsimeVFS
) extends FileChangeListener
with SLF4JLogging {
def fileAdded(f: FileObject) = if (relevant(f)) debouncedUpdate.call()
def fileRemoved(f: FileObject) = debouncedUpdate.call()
def fileChanged(f: FileObject) = {}
def relevant(f: FileObject): Boolean = f.getName.isFile && {
val file = f.asLocalFile
(file.isScala || file.isJava) && !file.getPath.contains(".ensime_cache")
}
def resolve(clazz: PackageName, source: RawSource): Option[FileObject] = {
@tailrec
def loop(clazzes: List[PackageName]): Option[FileObject] =
clazzes match {
case Nil => None
case h :: t =>
resolveClazz(h, source) match {
case None => loop(t)
case s @ Some(_) => s
}
}
val size = clazz.path.size
val combinations =
clazz.path.tails.flatMap(_.inits).filterNot(_.isEmpty).toList
// Quite offen people put stuff into the root package,
// so we add empty package after parent packages, just
// before we try other possible packages
val combinationsWithEmpty =
(combinations.take(size) ::: List
.empty[String] :: combinations.drop(size))
.map(PackageName.apply)
loop(combinationsWithEmpty)
}
// we only support the case where RawSource has a Some(filename)
private def resolveClazz(clazz: PackageName,
source: RawSource): Option[FileObject] =
source.filename match {
case None => None
case Some(filename) =>
all.get(clazz).flatMap {
_.find(_.getName.getBaseName == filename)
}
}
def update(): Unit = {
log.debug("updating sources")
all = recalculate
}
private def scan(f: FileObject) = f.findFiles(SourceSelector) match {
case null => Nil
case res => res.toList
}
private val depSources = {
val srcJars = config.referenceSourceJars.toSet ++ {
for {
project <- config.projects
srcArchive <- project.librarySources.map(_.file.toFile)
} yield srcArchive
}
for {
srcJarFile <- srcJars.toList
// interestingly, this is able to handle zip files
srcJar = vfs.vjar(srcJarFile)
srcEntry <- scan(srcJar)
inferred = infer(srcJar, srcEntry)
// continue to hold a reference to source jars
// so that we can access their contents elsewhere.
// this does mean we have a file handler, sorry.
//_ = vfs.nuke(srcJar)
} yield (inferred, srcEntry)
}.toMultiMapSet
private def userSources = {
for {
project <- config.projects
root <- project.sources.map(_.file.toFile)
dir = vfs.vfile(root)
file <- scan(dir)
} yield (infer(dir, file), file)
}.toMultiMapSet
private def recalculate = depSources merge userSources
private var all = recalculate
val debouncedUpdate = {
import actorSystem.dispatcher
Debouncer("SourceResolver",
actorSystem.scheduler,
delay = 5.seconds,
maxDelay = 1.hour) { () =>
this.update()
}
}
private def infer(base: FileObject, file: FileObject): PackageName = {
// getRelativeName feels the wrong way round, but this is correct
val relative = base.getName.getRelativeName(file.getName)
// vfs separator char is always /
PackageName((relative split "/").toList.init)
}
}
|
yyadavalli/ensime-server
|
core/src/main/scala/org/ensime/indexer/SourceResolver.scala
|
Scala
|
gpl-3.0
| 4,097
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.