code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package gitbucket.core.util
import gitbucket.core.service.RepositoryService
import org.eclipse.jgit.api.Git
import Directory._
import StringUtil._
import ControlUtil._
import scala.annotation.tailrec
import scala.collection.JavaConverters._
import org.eclipse.jgit.lib._
import org.eclipse.jgit.revwalk._
import org.eclipse.jgit.revwalk.filter._
import org.eclipse.jgit.treewalk._
import org.eclipse.jgit.treewalk.filter._
import org.eclipse.jgit.diff.DiffEntry.ChangeType
import org.eclipse.jgit.errors.{ConfigInvalidException, MissingObjectException}
import org.eclipse.jgit.transport.RefSpec
import java.util.Date
import org.eclipse.jgit.api.errors.{JGitInternalException, InvalidRefNameException, RefAlreadyExistsException, NoHeadException}
import org.eclipse.jgit.dircache.DirCacheEntry
import org.slf4j.LoggerFactory
/**
* Provides complex JGit operations.
*/
object JGitUtil {
private val logger = LoggerFactory.getLogger(JGitUtil.getClass)
/**
* The repository data.
*
* @param owner the user name of the repository owner
* @param name the repository name
* @param url the repository URL
* @param commitCount the commit count. If the repository has over 1000 commits then this property is 1001.
* @param branchList the list of branch names
* @param tags the list of tags
*/
case class RepositoryInfo(owner: String, name: String, url: String, commitCount: Int, branchList: List[String], tags: List[TagInfo]){
def this(owner: String, name: String, baseUrl: String) = {
this(owner, name, s"${baseUrl}/git/${owner}/${name}.git", 0, Nil, Nil)
}
}
/**
* The file data for the file list of the repository viewer.
*
* @param id the object id
* @param isDirectory whether is it directory
* @param name the file (or directory) name
* @param message the last commit message
* @param commitId the last commit id
* @param time the last modified time
* @param author the last committer name
* @param mailAddress the committer's mail address
* @param linkUrl the url of submodule
*/
case class FileInfo(id: ObjectId, isDirectory: Boolean, name: String, message: String, commitId: String,
time: Date, author: String, mailAddress: String, linkUrl: Option[String])
/**
* The commit data.
*
* @param id the commit id
* @param shortMessage the short message
* @param fullMessage the full message
* @param parents the list of parent commit id
* @param authorTime the author time
* @param authorName the author name
* @param authorEmailAddress the mail address of the author
* @param commitTime the commit time
* @param committerName the committer name
* @param committerEmailAddress the mail address of the committer
*/
case class CommitInfo(id: String, shortMessage: String, fullMessage: String, parents: List[String],
authorTime: Date, authorName: String, authorEmailAddress: String,
commitTime: Date, committerName: String, committerEmailAddress: String){
def this(rev: org.eclipse.jgit.revwalk.RevCommit) = this(
rev.getName,
rev.getShortMessage,
rev.getFullMessage,
rev.getParents().map(_.name).toList,
rev.getAuthorIdent.getWhen,
rev.getAuthorIdent.getName,
rev.getAuthorIdent.getEmailAddress,
rev.getCommitterIdent.getWhen,
rev.getCommitterIdent.getName,
rev.getCommitterIdent.getEmailAddress)
val summary = getSummaryMessage(fullMessage, shortMessage)
val description = defining(fullMessage.trim.indexOf("\\n")){ i =>
if(i >= 0){
Some(fullMessage.trim.substring(i).trim)
} else None
}
def isDifferentFromAuthor: Boolean = authorName != committerName || authorEmailAddress != committerEmailAddress
}
case class DiffInfo(changeType: ChangeType, oldPath: String, newPath: String, oldContent: Option[String], newContent: Option[String],
oldIsImage: Boolean, newIsImage: Boolean, oldObjectId: Option[String], newObjectId: Option[String])
/**
* The file content data for the file content view of the repository viewer.
*
* @param viewType "image", "large" or "other"
* @param content the string content
* @param charset the character encoding
*/
case class ContentInfo(viewType: String, content: Option[String], charset: Option[String]){
/**
* the line separator of this content ("LF" or "CRLF")
*/
val lineSeparator: String = if(content.exists(_.indexOf("\\r\\n") >= 0)) "CRLF" else "LF"
}
/**
* The tag data.
*
* @param name the tag name
* @param time the tagged date
* @param id the commit id
*/
case class TagInfo(name: String, time: Date, id: String)
/**
* The submodule data
*
* @param name the module name
* @param path the path in the repository
* @param url the repository url of this module
*/
case class SubmoduleInfo(name: String, path: String, url: String)
case class BranchMergeInfo(ahead: Int, behind: Int, isMerged: Boolean)
case class BranchInfo(name: String, committerName: String, commitTime: Date, committerEmailAddress:String, mergeInfo: Option[BranchMergeInfo], commitId: String)
case class BlameInfo(id: String, authorName: String, authorEmailAddress: String, authorTime:java.util.Date,
prev: Option[String], prevPath: Option[String], commitTime:java.util.Date, message:String, lines:Set[Int])
/**
* Returns RevCommit from the commit or tag id.
*
* @param git the Git object
* @param objectId the ObjectId of the commit or tag
* @return the RevCommit for the specified commit or tag
*/
def getRevCommitFromId(git: Git, objectId: ObjectId): RevCommit = {
val revWalk = new RevWalk(git.getRepository)
val revCommit = revWalk.parseAny(objectId) match {
case r: RevTag => revWalk.parseCommit(r.getObject)
case _ => revWalk.parseCommit(objectId)
}
revWalk.dispose
revCommit
}
/**
* Returns the repository information. It contains branch names and tag names.
*/
def getRepositoryInfo(owner: String, repository: String, baseUrl: String): RepositoryInfo = {
using(Git.open(getRepositoryDir(owner, repository))){ git =>
try {
// get commit count
val commitCount = git.log.all.call.iterator.asScala.map(_ => 1).take(10001).sum
RepositoryInfo(
owner, repository, s"${baseUrl}/git/${owner}/${repository}.git",
// commit count
commitCount,
// branches
git.branchList.call.asScala.map { ref =>
ref.getName.stripPrefix("refs/heads/")
}.toList,
// tags
git.tagList.call.asScala.map { ref =>
val revCommit = getRevCommitFromId(git, ref.getObjectId)
TagInfo(ref.getName.stripPrefix("refs/tags/"), revCommit.getCommitterIdent.getWhen, revCommit.getName)
}.sortBy(_.time).toList
)
} catch {
// not initialized
case e: NoHeadException => RepositoryInfo(
owner, repository, s"${baseUrl}/git/${owner}/${repository}.git", 0, Nil, Nil)
}
}
}
/**
* Returns the file list of the specified path.
*
* @param git the Git object
* @param revision the branch name or commit id
* @param path the directory path (optional)
* @return HTML of the file list
*/
def getFileList(git: Git, revision: String, path: String = "."): List[FileInfo] = {
using(new RevWalk(git.getRepository)){ revWalk =>
val objectId = git.getRepository.resolve(revision)
if(objectId==null) return Nil
val revCommit = revWalk.parseCommit(objectId)
def useTreeWalk(rev:RevCommit)(f:TreeWalk => Any): Unit = if (path == ".") {
val treeWalk = new TreeWalk(git.getRepository)
treeWalk.addTree(rev.getTree)
using(treeWalk)(f)
} else {
val treeWalk = TreeWalk.forPath(git.getRepository, path, rev.getTree)
if(treeWalk != null){
treeWalk.enterSubtree
using(treeWalk)(f)
}
}
@tailrec
def simplifyPath(tuple: (ObjectId, FileMode, String, Option[String], RevCommit)): (ObjectId, FileMode, String, Option[String], RevCommit) = tuple match {
case (oid, FileMode.TREE, name, _, commit ) =>
(using(new TreeWalk(git.getRepository)) { walk =>
walk.addTree(oid)
// single tree child, or None
if(walk.next() && walk.getFileMode(0) == FileMode.TREE){
Some((walk.getObjectId(0), walk.getFileMode(0), name + "/" + walk.getNameString, None, commit)).filterNot(_ => walk.next())
} else {
None
}
}) match {
case Some(child) => simplifyPath(child)
case _ => tuple
}
case _ => tuple
}
def tupleAdd(tuple:(ObjectId, FileMode, String, Option[String]), rev:RevCommit) = tuple match {
case (oid, fmode, name, opt) => (oid, fmode, name, opt, rev)
}
@tailrec
def findLastCommits(result:List[(ObjectId, FileMode, String, Option[String], RevCommit)],
restList:List[((ObjectId, FileMode, String, Option[String]), Map[RevCommit, RevCommit])],
revIterator:java.util.Iterator[RevCommit]): List[(ObjectId, FileMode, String, Option[String], RevCommit)] ={
if(restList.isEmpty){
result
}else if(!revIterator.hasNext){ // maybe, revCommit has only 1 log. other case, restList be empty
result ++ restList.map{ case (tuple, map) => tupleAdd(tuple, map.values.headOption.getOrElse(revCommit)) }
}else{
val newCommit = revIterator.next
val (thisTimeChecks,skips) = restList.partition{ case (tuple, parentsMap) => parentsMap.contains(newCommit) }
if(thisTimeChecks.isEmpty){
findLastCommits(result, restList, revIterator)
}else{
var nextRest = skips
var nextResult = result
// Map[(name, oid), (tuple, parentsMap)]
val rest = scala.collection.mutable.Map(thisTimeChecks.map{ t => (t._1._3 -> t._1._1) -> t }:_*)
lazy val newParentsMap = newCommit.getParents.map(_ -> newCommit).toMap
useTreeWalk(newCommit){ walk =>
while(walk.next){
rest.remove(walk.getNameString -> walk.getObjectId(0)).map{ case (tuple, _) =>
if(newParentsMap.isEmpty){
nextResult +:= tupleAdd(tuple, newCommit)
}else{
nextRest +:= tuple -> newParentsMap
}
}
}
}
rest.values.map{ case (tuple, parentsMap) =>
val restParentsMap = parentsMap - newCommit
if(restParentsMap.isEmpty){
nextResult +:= tupleAdd(tuple, parentsMap(newCommit))
}else{
nextRest +:= tuple -> restParentsMap
}
}
findLastCommits(nextResult, nextRest, revIterator)
}
}
}
var fileList: List[(ObjectId, FileMode, String, Option[String])] = Nil
useTreeWalk(revCommit){ treeWalk =>
while (treeWalk.next()) {
val linkUrl =if (treeWalk.getFileMode(0) == FileMode.GITLINK) {
getSubmodules(git, revCommit.getTree).find(_.path == treeWalk.getPathString).map(_.url)
} else None
fileList +:= (treeWalk.getObjectId(0), treeWalk.getFileMode(0), treeWalk.getNameString, linkUrl)
}
}
revWalk.markStart(revCommit)
val it = revWalk.iterator
val lastCommit = it.next
val nextParentsMap = Option(lastCommit).map(_.getParents.map(_ -> lastCommit).toMap).getOrElse(Map())
findLastCommits(List.empty, fileList.map(a => a -> nextParentsMap), it)
.map(simplifyPath)
.map { case (objectId, fileMode, name, linkUrl, commit) =>
FileInfo(
objectId,
fileMode == FileMode.TREE || fileMode == FileMode.GITLINK,
name,
getSummaryMessage(commit.getFullMessage, commit.getShortMessage),
commit.getName,
commit.getAuthorIdent.getWhen,
commit.getAuthorIdent.getName,
commit.getAuthorIdent.getEmailAddress,
linkUrl)
}.sortWith { (file1, file2) =>
(file1.isDirectory, file2.isDirectory) match {
case (true , false) => true
case (false, true ) => false
case _ => file1.name.compareTo(file2.name) < 0
}
}.toList
}
}
/**
* Returns the first line of the commit message.
*/
private def getSummaryMessage(fullMessage: String, shortMessage: String): String = {
defining(fullMessage.trim.indexOf("\\n")){ i =>
defining(if(i >= 0) fullMessage.trim.substring(0, i).trim else fullMessage){ firstLine =>
if(firstLine.length > shortMessage.length) shortMessage else firstLine
}
}
}
/**
* get all file list by revision. only file.
*/
def getTreeId(git: Git, revision: String): Option[String] = {
using(new RevWalk(git.getRepository)){ revWalk =>
val objectId = git.getRepository.resolve(revision)
if(objectId==null) return None
val revCommit = revWalk.parseCommit(objectId)
Some(revCommit.getTree.name)
}
}
/**
* get all file list by tree object id.
*/
def getAllFileListByTreeId(git: Git, treeId: String): List[String] = {
using(new RevWalk(git.getRepository)){ revWalk =>
val objectId = git.getRepository.resolve(treeId+"^{tree}")
if(objectId==null) return Nil
using(new TreeWalk(git.getRepository)){ treeWalk =>
treeWalk.addTree(objectId)
treeWalk.setRecursive(true)
var ret: List[String] = Nil
if(treeWalk != null){
while (treeWalk.next()) {
ret +:= treeWalk.getPathString
}
}
ret.reverse
}
}
}
/**
* Returns the commit list of the specified branch.
*
* @param git the Git object
* @param revision the branch name or commit id
* @param page the page number (1-)
* @param limit the number of commit info per page. 0 (default) means unlimited.
* @param path filters by this path. default is no filter.
* @return a tuple of the commit list and whether has next, or the error message
*/
def getCommitLog(git: Git, revision: String, page: Int = 1, limit: Int = 0, path: String = ""): Either[String, (List[CommitInfo], Boolean)] = {
val fixedPage = if(page <= 0) 1 else page
@scala.annotation.tailrec
def getCommitLog(i: java.util.Iterator[RevCommit], count: Int, logs: List[CommitInfo]): (List[CommitInfo], Boolean) =
i.hasNext match {
case true if(limit <= 0 || logs.size < limit) => {
val commit = i.next
getCommitLog(i, count + 1, if(limit <= 0 || (fixedPage - 1) * limit <= count) logs :+ new CommitInfo(commit) else logs)
}
case _ => (logs, i.hasNext)
}
using(new RevWalk(git.getRepository)){ revWalk =>
defining(git.getRepository.resolve(revision)){ objectId =>
if(objectId == null){
Left(s"${revision} can't be resolved.")
} else {
revWalk.markStart(revWalk.parseCommit(objectId))
if(path.nonEmpty){
revWalk.setTreeFilter(AndTreeFilter.create(PathFilter.create(path), TreeFilter.ANY_DIFF))
}
Right(getCommitLog(revWalk.iterator, 0, Nil))
}
}
}
}
def getCommitLogs(git: Git, begin: String, includesLastCommit: Boolean = false)
(endCondition: RevCommit => Boolean): List[CommitInfo] = {
@scala.annotation.tailrec
def getCommitLog(i: java.util.Iterator[RevCommit], logs: List[CommitInfo]): List[CommitInfo] =
i.hasNext match {
case true => {
val revCommit = i.next
if(endCondition(revCommit)){
if(includesLastCommit) logs :+ new CommitInfo(revCommit) else logs
} else {
getCommitLog(i, logs :+ new CommitInfo(revCommit))
}
}
case false => logs
}
using(new RevWalk(git.getRepository)){ revWalk =>
revWalk.markStart(revWalk.parseCommit(git.getRepository.resolve(begin)))
getCommitLog(revWalk.iterator, Nil).reverse
}
}
/**
* Returns the commit list between two revisions.
*
* @param git the Git object
* @param from the from revision
* @param to the to revision
* @return the commit list
*/
// TODO swap parameters 'from' and 'to'!?
def getCommitLog(git: Git, from: String, to: String): List[CommitInfo] =
getCommitLogs(git, to)(_.getName == from)
/**
* Returns the latest RevCommit of the specified path.
*
* @param git the Git object
* @param path the path
* @param revision the branch name or commit id
* @return the latest commit
*/
def getLatestCommitFromPath(git: Git, path: String, revision: String): Option[RevCommit] =
getLatestCommitFromPaths(git, List(path), revision).get(path)
/**
* Returns the list of latest RevCommit of the specified paths.
*
* @param git the Git object
* @param paths the list of paths
* @param revision the branch name or commit id
* @return the list of latest commit
*/
def getLatestCommitFromPaths(git: Git, paths: List[String], revision: String): Map[String, RevCommit] = {
val start = getRevCommitFromId(git, git.getRepository.resolve(revision))
paths.map { path =>
val commit = git.log.add(start).addPath(path).setMaxCount(1).call.iterator.next
(path, commit)
}.toMap
}
/**
* Returns the tuple of diff of the given commit and the previous commit id.
*/
def getDiffs(git: Git, id: String, fetchContent: Boolean = true): (List[DiffInfo], Option[String]) = {
@scala.annotation.tailrec
def getCommitLog(i: java.util.Iterator[RevCommit], logs: List[RevCommit]): List[RevCommit] =
i.hasNext match {
case true if(logs.size < 2) => getCommitLog(i, logs :+ i.next)
case _ => logs
}
using(new RevWalk(git.getRepository)){ revWalk =>
revWalk.markStart(revWalk.parseCommit(git.getRepository.resolve(id)))
val commits = getCommitLog(revWalk.iterator, Nil)
val revCommit = commits(0)
if(commits.length >= 2){
// not initial commit
val oldCommit = if(revCommit.getParentCount >= 2) {
// merge commit
revCommit.getParents.head
} else {
commits(1)
}
(getDiffs(git, oldCommit.getName, id, fetchContent), Some(oldCommit.getName))
} else {
// initial commit
using(new TreeWalk(git.getRepository)){ treeWalk =>
treeWalk.addTree(revCommit.getTree)
val buffer = new scala.collection.mutable.ListBuffer[DiffInfo]()
while(treeWalk.next){
val newIsImage = FileUtil.isImage(treeWalk.getPathString)
buffer.append((if(!fetchContent){
DiffInfo(ChangeType.ADD, null, treeWalk.getPathString, None, None, false, newIsImage, None, Option(treeWalk.getObjectId(0)).map(_.name))
} else {
DiffInfo(ChangeType.ADD, null, treeWalk.getPathString, None,
JGitUtil.getContentFromId(git, treeWalk.getObjectId(0), false).filter(FileUtil.isText).map(convertFromByteArray),
false, newIsImage, None, Option(treeWalk.getObjectId(0)).map(_.name))
}))
}
(buffer.toList, None)
}
}
}
}
def getDiffs(git: Git, from: String, to: String, fetchContent: Boolean): List[DiffInfo] = {
val reader = git.getRepository.newObjectReader
val oldTreeIter = new CanonicalTreeParser
oldTreeIter.reset(reader, git.getRepository.resolve(from + "^{tree}"))
val newTreeIter = new CanonicalTreeParser
newTreeIter.reset(reader, git.getRepository.resolve(to + "^{tree}"))
import scala.collection.JavaConverters._
git.getRepository.getConfig.setString("diff", null, "renames", "copies")
git.diff.setNewTree(newTreeIter).setOldTree(oldTreeIter).call.asScala.map { diff =>
val oldIsImage = FileUtil.isImage(diff.getOldPath)
val newIsImage = FileUtil.isImage(diff.getNewPath)
if(!fetchContent || oldIsImage || newIsImage){
DiffInfo(diff.getChangeType, diff.getOldPath, diff.getNewPath, None, None, oldIsImage, newIsImage, Option(diff.getOldId).map(_.name), Option(diff.getNewId).map(_.name))
} else {
DiffInfo(diff.getChangeType, diff.getOldPath, diff.getNewPath,
JGitUtil.getContentFromId(git, diff.getOldId.toObjectId, false).filter(FileUtil.isText).map(convertFromByteArray),
JGitUtil.getContentFromId(git, diff.getNewId.toObjectId, false).filter(FileUtil.isText).map(convertFromByteArray),
oldIsImage, newIsImage, Option(diff.getOldId).map(_.name), Option(diff.getNewId).map(_.name))
}
}.toList
}
/**
* Returns the list of branch names of the specified commit.
*/
def getBranchesOfCommit(git: Git, commitId: String): List[String] =
using(new RevWalk(git.getRepository)){ revWalk =>
defining(revWalk.parseCommit(git.getRepository.resolve(commitId + "^0"))){ commit =>
git.getRepository.getAllRefs.entrySet.asScala.filter { e =>
(e.getKey.startsWith(Constants.R_HEADS) && revWalk.isMergedInto(commit, revWalk.parseCommit(e.getValue.getObjectId)))
}.map { e =>
e.getValue.getName.substring(org.eclipse.jgit.lib.Constants.R_HEADS.length)
}.toList.sorted
}
}
/**
* Returns the list of tags of the specified commit.
*/
def getTagsOfCommit(git: Git, commitId: String): List[String] =
using(new RevWalk(git.getRepository)){ revWalk =>
defining(revWalk.parseCommit(git.getRepository.resolve(commitId + "^0"))){ commit =>
git.getRepository.getAllRefs.entrySet.asScala.filter { e =>
(e.getKey.startsWith(Constants.R_TAGS) && revWalk.isMergedInto(commit, revWalk.parseCommit(e.getValue.getObjectId)))
}.map { e =>
e.getValue.getName.substring(org.eclipse.jgit.lib.Constants.R_TAGS.length)
}.toList.sorted.reverse
}
}
def initRepository(dir: java.io.File): Unit =
using(new RepositoryBuilder().setGitDir(dir).setBare.build){ repository =>
repository.create
setReceivePack(repository)
}
def cloneRepository(from: java.io.File, to: java.io.File): Unit =
using(Git.cloneRepository.setURI(from.toURI.toString).setDirectory(to).setBare(true).call){ git =>
setReceivePack(git.getRepository)
}
def isEmpty(git: Git): Boolean = git.getRepository.resolve(Constants.HEAD) == null
private def setReceivePack(repository: org.eclipse.jgit.lib.Repository): Unit =
defining(repository.getConfig){ config =>
config.setBoolean("http", null, "receivepack", true)
config.save
}
def getDefaultBranch(git: Git, repository: RepositoryService.RepositoryInfo,
revstr: String = ""): Option[(ObjectId, String)] = {
Seq(
Some(if(revstr.isEmpty) repository.repository.defaultBranch else revstr),
repository.branchList.headOption
).flatMap {
case Some(rev) => Some((git.getRepository.resolve(rev), rev))
case None => None
}.find(_._1 != null)
}
def createBranch(git: Git, fromBranch: String, newBranch: String) = {
try {
git.branchCreate().setStartPoint(fromBranch).setName(newBranch).call()
Right("Branch created.")
} catch {
case e: RefAlreadyExistsException => Left("Sorry, that branch already exists.")
// JGitInternalException occurs when new branch name is 'a' and the branch whose name is 'a/*' exists.
case _: InvalidRefNameException | _: JGitInternalException => Left("Sorry, that name is invalid.")
}
}
def createDirCacheEntry(path: String, mode: FileMode, objectId: ObjectId): DirCacheEntry = {
val entry = new DirCacheEntry(path)
entry.setFileMode(mode)
entry.setObjectId(objectId)
entry
}
def createNewCommit(git: Git, inserter: ObjectInserter, headId: AnyObjectId, treeId: AnyObjectId,
ref: String, fullName: String, mailAddress: String, message: String): ObjectId = {
val newCommit = new CommitBuilder()
newCommit.setCommitter(new PersonIdent(fullName, mailAddress))
newCommit.setAuthor(new PersonIdent(fullName, mailAddress))
newCommit.setMessage(message)
if(headId != null){
newCommit.setParentIds(List(headId).asJava)
}
newCommit.setTreeId(treeId)
val newHeadId = inserter.insert(newCommit)
inserter.flush()
inserter.release()
val refUpdate = git.getRepository.updateRef(ref)
refUpdate.setNewObjectId(newHeadId)
refUpdate.update()
newHeadId
}
/**
* Read submodule information from .gitmodules
*/
def getSubmodules(git: Git, tree: RevTree): List[SubmoduleInfo] = {
val repository = git.getRepository
getContentFromPath(git, tree, ".gitmodules", true).map { bytes =>
(try {
val config = new BlobBasedConfig(repository.getConfig(), bytes)
config.getSubsections("submodule").asScala.map { module =>
val path = config.getString("submodule", module, "path")
val url = config.getString("submodule", module, "url")
SubmoduleInfo(module, path, url)
}
} catch {
case e: ConfigInvalidException => {
logger.error("Failed to load .gitmodules file for " + repository.getDirectory(), e)
Nil
}
}).toList
} getOrElse Nil
}
/**
* Get object content of the given path as byte array from the Git repository.
*
* @param git the Git object
* @param revTree the rev tree
* @param path the path
* @param fetchLargeFile if false then returns None for the large file
* @return the byte array of content or None if object does not exist
*/
def getContentFromPath(git: Git, revTree: RevTree, path: String, fetchLargeFile: Boolean): Option[Array[Byte]] = {
@scala.annotation.tailrec
def getPathObjectId(path: String, walk: TreeWalk): Option[ObjectId] = walk.next match {
case true if(walk.getPathString == path) => Some(walk.getObjectId(0))
case true => getPathObjectId(path, walk)
case false => None
}
using(new TreeWalk(git.getRepository)){ treeWalk =>
treeWalk.addTree(revTree)
treeWalk.setRecursive(true)
getPathObjectId(path, treeWalk)
} flatMap { objectId =>
getContentFromId(git, objectId, fetchLargeFile)
}
}
def getContentInfo(git: Git, path: String, objectId: ObjectId): ContentInfo = {
// Viewer
using(git.getRepository.getObjectDatabase){ db =>
val loader = db.open(objectId)
val large = FileUtil.isLarge(loader.getSize)
val viewer = if(FileUtil.isImage(path)) "image" else if(large) "large" else "other"
val bytes = if(viewer == "other") JGitUtil.getContentFromId(git, objectId, false) else None
if(viewer == "other"){
if(bytes.isDefined && FileUtil.isText(bytes.get)){
// text
ContentInfo("text", Some(StringUtil.convertFromByteArray(bytes.get)), Some(StringUtil.detectEncoding(bytes.get)))
} else {
// binary
ContentInfo("binary", None, None)
}
} else {
// image or large
ContentInfo(viewer, None, None)
}
}
}
/**
* Get object content of the given object id as byte array from the Git repository.
*
* @param git the Git object
* @param id the object id
* @param fetchLargeFile if false then returns None for the large file
* @return the byte array of content or None if object does not exist
*/
def getContentFromId(git: Git, id: ObjectId, fetchLargeFile: Boolean): Option[Array[Byte]] = try {
using(git.getRepository.getObjectDatabase){ db =>
val loader = db.open(id)
if(fetchLargeFile == false && FileUtil.isLarge(loader.getSize)){
None
} else {
Some(loader.getBytes)
}
}
} catch {
case e: MissingObjectException => None
}
/**
* Returns all commit id in the specified repository.
*/
def getAllCommitIds(git: Git): Seq[String] = if(isEmpty(git)) {
Nil
} else {
val existIds = new scala.collection.mutable.ListBuffer[String]()
val i = git.log.all.call.iterator
while(i.hasNext){
existIds += i.next.name
}
existIds.toSeq
}
def processTree(git: Git, id: ObjectId)(f: (String, CanonicalTreeParser) => Unit) = {
using(new RevWalk(git.getRepository)){ revWalk =>
using(new TreeWalk(git.getRepository)){ treeWalk =>
val index = treeWalk.addTree(revWalk.parseTree(id))
treeWalk.setRecursive(true)
while(treeWalk.next){
f(treeWalk.getPathString, treeWalk.getTree(index, classOf[CanonicalTreeParser]))
}
}
}
}
/**
* Returns the identifier of the root commit (or latest merge commit) of the specified branch.
*/
def getForkedCommitId(oldGit: Git, newGit: Git,
userName: String, repositoryName: String, branch: String,
requestUserName: String, requestRepositoryName: String, requestBranch: String): String =
defining(getAllCommitIds(oldGit)){ existIds =>
getCommitLogs(newGit, requestBranch, true) { commit =>
existIds.contains(commit.name) && getBranchesOfCommit(oldGit, commit.getName).contains(branch)
}.head.id
}
/**
* Fetch pull request contents into refs/pull/${issueId}/head and return (commitIdTo, commitIdFrom)
*/
def updatePullRequest(userName: String, repositoryName:String, branch: String, issueId: Int,
requestUserName: String, requestRepositoryName: String, requestBranch: String):(String, String) =
using(Git.open(Directory.getRepositoryDir(userName, repositoryName)),
Git.open(Directory.getRepositoryDir(requestUserName, requestRepositoryName))){ (oldGit, newGit) =>
oldGit.fetch
.setRemote(Directory.getRepositoryDir(requestUserName, requestRepositoryName).toURI.toString)
.setRefSpecs(new RefSpec(s"refs/heads/${requestBranch}:refs/pull/${issueId}/head").setForceUpdate(true))
.call
val commitIdTo = oldGit.getRepository.resolve(s"refs/pull/${issueId}/head").getName
val commitIdFrom = getForkedCommitId(oldGit, newGit,
userName, repositoryName, branch,
requestUserName, requestRepositoryName, requestBranch)
(commitIdTo, commitIdFrom)
}
/**
* Returns the last modified commit of specified path
* @param git the Git object
* @param startCommit the search base commit id
* @param path the path of target file or directory
* @return the last modified commit of specified path
*/
def getLastModifiedCommit(git: Git, startCommit: RevCommit, path: String): RevCommit = {
return git.log.add(startCommit).addPath(path).setMaxCount(1).call.iterator.next
}
def getBranches(owner: String, name: String, defaultBranch: String, origin: Boolean): Seq[BranchInfo] = {
using(Git.open(getRepositoryDir(owner, name))){ git =>
val repo = git.getRepository
val defaultObject = if (repo.getAllRefs.keySet().contains(defaultBranch)) {
repo.resolve(defaultBranch)
} else {
git.branchList().call().iterator().next().getObjectId
}
git.branchList.call.asScala.map { ref =>
val walk = new RevWalk(repo)
try {
val defaultCommit = walk.parseCommit(defaultObject)
val branchName = ref.getName.stripPrefix("refs/heads/")
val branchCommit = if(branchName == defaultBranch){
defaultCommit
} else {
walk.parseCommit(ref.getObjectId)
}
val when = branchCommit.getCommitterIdent.getWhen
val committer = branchCommit.getCommitterIdent.getName
val committerEmail = branchCommit.getCommitterIdent.getEmailAddress
val mergeInfo = if(origin && branchName == defaultBranch){
None
} else {
walk.reset()
walk.setRevFilter( RevFilter.MERGE_BASE )
walk.markStart(branchCommit)
walk.markStart(defaultCommit)
val mergeBase = walk.next()
walk.reset()
walk.setRevFilter(RevFilter.ALL)
Some(BranchMergeInfo(
ahead = RevWalkUtils.count(walk, branchCommit, mergeBase),
behind = RevWalkUtils.count(walk, defaultCommit, mergeBase),
isMerged = walk.isMergedInto(branchCommit, defaultCommit)))
}
BranchInfo(branchName, committer, when, committerEmail, mergeInfo, ref.getObjectId.name)
} finally {
walk.dispose();
}
}
}
}
def getBlame(git: Git, id: String, path: String): Iterable[BlameInfo] = {
Option(git.getRepository.resolve(id)).map{ commitId =>
val blamer = new org.eclipse.jgit.api.BlameCommand(git.getRepository);
blamer.setStartCommit(commitId)
blamer.setFilePath(path)
val blame = blamer.call()
var blameMap = Map[String, JGitUtil.BlameInfo]()
var idLine = List[(String, Int)]()
val commits = 0.to(blame.getResultContents().size()-1).map{ i =>
val c = blame.getSourceCommit(i)
if(!blameMap.contains(c.name)){
blameMap += c.name -> JGitUtil.BlameInfo(
c.name,
c.getAuthorIdent.getName,
c.getAuthorIdent.getEmailAddress,
c.getAuthorIdent.getWhen,
Option(git.log.add(c).addPath(blame.getSourcePath(i)).setSkip(1).setMaxCount(2).call.iterator.next)
.map(_.name),
if(blame.getSourcePath(i)==path){ None }else{ Some(blame.getSourcePath(i)) },
c.getCommitterIdent.getWhen,
c.getShortMessage,
Set.empty)
}
idLine :+= (c.name, i)
}
val limeMap = idLine.groupBy(_._1).mapValues(_.map(_._2).toSet)
blameMap.values.map{b => b.copy(lines=limeMap(b.id))}
}.getOrElse(Seq.empty)
}
/**
* Returns sha1
* @param owner repository owner
* @param name repository name
* @param revstr A git object references expression
* @return sha1
*/
def getShaByRef(owner:String, name:String,revstr: String): Option[String] = {
using(Git.open(getRepositoryDir(owner, name))){ git =>
Option(git.getRepository.resolve(revstr)).map(ObjectId.toString(_))
}
}
}
| intermezzo-fr/gitbucket | src/main/scala/gitbucket/core/util/JGitUtil.scala | Scala | apache-2.0 | 34,662 |
package by.pavelverk.hardwrite.http.routes
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Directives._
import by.pavelverk.hardwrite.core.auth.AuthService
import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport
import io.circe.generic.auto._
import io.circe.syntax._
import scala.concurrent.ExecutionContext
class AuthRoute(authService: AuthService)(implicit executionContext: ExecutionContext) extends FailFastCirceSupport {
import StatusCodes._
import authService._
val route = pathPrefix("auth") {
path("signIn") {
pathEndOrSingleSlash {
post {
entity(as[LoginPassword]) { loginPassword =>
complete(
signIn(loginPassword.login, loginPassword.password).map {
case Some(token) => OK -> token.asJson
case None => BadRequest -> None.asJson
}
)
}
}
}
} ~
path("signUp") {
pathEndOrSingleSlash {
post {
entity(as[UsernamePasswordEmail]) { userEntity =>
complete(Created -> signUp(userEntity.username, userEntity.email, userEntity.password))
}
}
}
}
}
private case class LoginPassword(login: String, password: String)
private case class UsernamePasswordEmail(username: String, email: String, password: String)
}
| VerkhovtsovPavel/BSUIR_Labs | Master/back/akka-http-rest-master/src/main/scala/by/pavelverk/hardwrite/http/routes/AuthRoute.scala | Scala | mit | 1,387 |
package myexample
import com.example.protos.demo._
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.Dataset
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.SQLContext
import org.apache.spark.rdd.RDD
import scalapb.spark.Implicits._
import scalapb.spark.ProtoSQL
object RunDemo {
def main(Args: Array[String]): Unit = {
val spark = SparkSession.builder().appName("ScalaPB Demo").getOrCreate()
val sc = spark.sparkContext
val personsDF: DataFrame = ProtoSQL.createDataFrame(spark, testData)
val personsDS1: Dataset[Person] = personsDF.as[Person]
val personsDS2: Dataset[Person] = spark.createDataset(testData)
personsDS1.show()
personsDS2.show()
personsDF.createOrReplaceTempView("persons")
spark.sql("SELECT name, age, gender, size(addresses) FROM persons").show()
spark.sql("SELECT name, age, gender, size(addresses) FROM persons WHERE age > 30")
.collect
.foreach(println)
}
val testData: Seq[Person] = Seq(
Person().update(
_.name := "Joe",
_.age := 32,
_.gender := Gender.MALE),
Person().update(
_.name := "Mark",
_.age := 21,
_.gender := Gender.MALE,
_.addresses := Seq(
Address(city = Some("San Francisco"), street=Some("3rd Street"))
)),
Person().update(
_.name := "Steven",
_.gender := Gender.MALE,
_.addresses := Seq(
Address(city = Some("San Francisco"), street=Some("5th Street")),
Address(city = Some("Sunnyvale"), street=Some("Wolfe"))
)),
Person().update(
_.name := "Batya",
_.age := 11,
_.gender := Gender.FEMALE))
}
| thesamet/sparksql-scalapb-test | src/main/scala/RunDemo.scala | Scala | apache-2.0 | 1,748 |
package com.eharmony.aloha.dataset
import scala.language.implicitConversions
package object implicits {
implicit def any2string(a: Any): String = a.toString
implicit def opt2string(a: Option[Any]): String = a.getOrElse("").toString
}
| eHarmony/aloha | aloha-core/src/main/scala/com/eharmony/aloha/dataset/implicits/package.scala | Scala | mit | 244 |
package org.jetbrains.plugins.scala
package codeInspection
package packageNameInspection
import com.intellij.codeInspection.{LocalQuickFix, ProblemDescriptor}
import com.intellij.openapi.project.Project
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.util.ScalaUtils
import scala.tools.scalap.scalax.util.StringUtil
/**
* User: Alexander Podkhalyuzin
* Date: 08.07.2009
*/
class ScalaRenamePackageQuickFix(file: ScalaFile, name: String) extends LocalQuickFix {
def applyFix(project: Project, descriptor: ProblemDescriptor): Unit = {
ScalaUtils.runWriteAction(new Runnable {
def run: Unit = {
file.setPackageName(name)
}
}, project, "Rename Package QuickFix")
}
def getName: String = if (name == null || name.isEmpty) "Remove package statement" else s"Rename Package to $name"
def getFamilyName: String = "Rename Package"
} | triggerNZ/intellij-scala | src/org/jetbrains/plugins/scala/codeInspection/packageNameInspection/ScalaRenamePackageQuickFix.scala | Scala | apache-2.0 | 913 |
import breeze.linalg.DenseVector
import com.jmatio.types.MLDouble
import io.github.mandar2812.dynaml.DynaMLPipe.identityPipe
import io.github.mandar2812.dynaml.analysis.{DifferentiableMap, PartitionedVectorField, PushforwardMap, VectorField}
import io.github.mandar2812.dynaml.dataformat.MAT
import io.github.mandar2812.dynaml.kernels._
import io.github.mandar2812.dynaml.models.gp.{GPRegression, WarpedGPModel}
import io.github.mandar2812.dynaml.models.sgp.ESGPModel
import io.github.mandar2812.dynaml.optimization.{CoupledSimulatedAnnealing, GridSearch}
import io.github.mandar2812.dynaml.pipes.{DataPipe, Encoder}
import scala.util.Random
val psdProfile = """/Users/mandar/Google Drive/CWI/PSD_data/psdprofiles_rbsp_mu_700.0_k_0.1.mat"""
val psdData = (MAT.read > MAT.content)(psdProfile)
val psd = psdData("PSD_arr").asInstanceOf[MLDouble]
val time = psdData("Time_arr").asInstanceOf[MLDouble]
val l_star = psdData("Lstar_arr").asInstanceOf[MLDouble]
val Array(rows, cols) = psd.getDimensions
val data = for(rowIndex <- 0 until rows; colIndex <- 0 until cols)
yield (
DenseVector(time.getReal(rowIndex, 0).toDouble, l_star.getReal(0, colIndex).toDouble),
math.log(psd.getReal(rowIndex, colIndex).toDouble))
val filteredData =
Random.shuffle(
data
.filterNot(_._2.isNaN)
.map(c => (c._1 - DenseVector(735143.0,0.0), c._2))
.toStream)
val (training, test) = (filteredData.take(1000), filteredData.takeRight(1000))
implicit val trans = DataPipe((s: Stream[(DenseVector[Double], Double)]) => s.toSeq)
implicit val ev = VectorField(2)
val tKernel = new TStudentKernel(0.01)
//tKernel.block_all_hyper_parameters
val mlpKernel = new MLPKernel(1.25, 1.5)
val enc = Encoder[Map[String, Double], (DenseVector[Double], DenseVector[Double])](
(c: Map[String, Double]) => {
val (centerConf, scaleConf) = (
c.filter(k => k._1.contains("c")).map(k => (k._1.split("_").last.toInt, k._2)),
c.filter(k => k._1.contains("s")).map(k => (k._1.split("_").last.toInt, k._2)))
(
DenseVector.tabulate[Double](centerConf.size)(i => centerConf(i)),
DenseVector.tabulate[Double](scaleConf.size)(i => scaleConf(i)))
},
(vecs: (DenseVector[Double], DenseVector[Double])) => {
vecs._1.toArray.zipWithIndex.map((c) => ("c_"+c._2, c._1)).toMap ++
vecs._2.toArray.zipWithIndex.map((c) => ("s_"+c._2, c._1)).toMap
}
)
val gaussianSMKernel = GaussianSpectralKernel(DenseVector(2.5, 2.5), DenseVector(0.5, 10.0), enc)
val kernel = mlpKernel + tKernel
val noise = new DiracKernel(1.0)
val gpModel = new GPRegression(gaussianSMKernel, noise, training)
val sgpModel = ESGPModel(kernel, noise, DataPipe((x: DenseVector[Double]) => 0.0), 1.5, 0.5)(training)
implicit val detImpl = identityPipe[Double]
val h: PushforwardMap[Double, Double, Double] = PushforwardMap(
DataPipe((x: Double) => math.exp(-x)),
DifferentiableMap(
(x: Double) => -math.log(x),
(x: Double) => -1.0/x)
)
val h1: PushforwardMap[Double, Double, Double] = PushforwardMap(
DataPipe((x: Double) => -math.log(x)),
DifferentiableMap(
(x: Double) => math.exp(-x),
(x: Double) => -math.exp(-x))
)
implicit val pVF = PartitionedVectorField(1000, 1000)
implicit val t = Encoder(
identityPipe[Seq[(DenseVector[Double], Double)]],
identityPipe[Seq[(DenseVector[Double], Double)]])
val startConf = kernel.effective_state ++ noise.effective_state ++ Map("skewness" -> 1.5, "cutoff" -> 0.5)
val wGP = new WarpedGPModel(gpModel)(h)
val gs = new GridSearch[sgpModel.type ](sgpModel).setGridSize(2).setStepSize(0.45).setLogScale(true)
val csa =
new CoupledSimulatedAnnealing[sgpModel.type](sgpModel).setGridSize(1).setStepSize(0.5).setLogScale(true).setMaxIterations(40)
val (optModel, conf) = csa.optimize(startConf, Map())
val res = optModel.test(test).map(c => (c._3, c._2))
| mandar2812/PlasmaML | vanAllen/scripts/testPSDModel.scala | Scala | lgpl-2.1 | 3,838 |
/**
* Copyright 2013-2015 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.paypal.cascade.http.tests.resource
import akka.actor.Actor
/**
* DevNullActor is an Actor that accepts all messages and does nothing
*/
class DevNullActor extends Actor {
override def receive: Receive = {
case _ =>
}
}
| 2rs2ts/cascade | http/src/test/scala/com/paypal/cascade/http/tests/resource/DevNullActor.scala | Scala | apache-2.0 | 835 |
package endpoints
package documented
package delegate
/**
* Interpreter for [[algebra.Assets]] that ignores information related
* to documentation and delegates to another [[endpoints.algebra.Assets]] interpreter.
*/
trait Assets
extends algebra.Assets
with Endpoints {
val delegate: endpoints.algebra.Assets
type AssetRequest = delegate.AssetRequest
type AssetPath = delegate.AssetPath
type AssetResponse = delegate.AssetResponse
def assetSegments(name: String): Path[AssetPath] = delegate.assetSegments
def assetsEndpoint(url: Url[AssetPath], documentation: String, notFoundDocumentation: String): Endpoint[AssetRequest, AssetResponse] =
delegate.assetsEndpoint(url)
def digests: Map[String, String] = delegate.digests
}
| Krever/endpoints | openapi/openapi/src/main/scala/endpoints/documented/delegate/Assets.scala | Scala | mit | 760 |
package fs2
package io
package file
import scala.concurrent.ExecutionContext
import java.nio.ByteBuffer
import java.nio.channels.{AsynchronousFileChannel, FileChannel, FileLock}
import cats.effect.{Effect, Sync}
import cats.implicits._
/**
* Provides the ability to read/write/lock/inspect a file in the effect `F`.
*
* To construct a `FileHandle`, use the methods in the [[fs2.io.file.pulls]] object.
*/
trait FileHandle[F[_]] {
/** Opaque type representing an exclusive lock on a file. */
type Lock
/**
* Force any updates for the underlying file to storage.
* @param metaData If true, also attempts to force file metadata updates to storage.
*/
def force(metaData: Boolean): F[Unit]
/**
* Acquire an exclusive lock on the underlying file.
* @return a lock object which can be used to unlock the file.
*/
def lock: F[Lock]
/**
* Acquire a lock on the specified region of the underlying file.
* @param position the start of the region to lock.
* @param size the size of the region to lock.
* @param shared to request a shared lock across process boundaries (may be converted to an exclusive lock on some operating systems).
* @return a lock object which can be used to unlock the region.
*/
def lock(position: Long, size: Long, shared: Boolean): F[Lock]
/**
* Read the specified number of bytes at a particular offset.
* @param numBytes the number of bytes to read.
* @param offset the offset from the start of the file.
* @return a number of bytes from the file (at most, numBytes in size).
*/
def read(numBytes: Int, offset: Long): F[Option[Chunk[Byte]]]
/**
* Report the current size of the file.
* @return the size of the file.
*/
def size: F[Long]
/**
* Truncate the underlying file to the specified size.
* @param size the size of the file after truncation.
*/
def truncate(size: Long): F[Unit]
/**
* Attempt to acquire an exclusive lock on the underlying file.
* @return if the lock could be acquired, a lock object which can be used to unlock the file.
*/
def tryLock: F[Option[Lock]]
/**
* Attempt to acquire a lock on the specified region of the underlying file.
* @param position the start of the region to lock.
* @param size the size of the region to lock.
* @param shared to request a shared lock across process boundaries (may be converted to an exclusive lock on some operating systems).
* @return if the lock could be acquired, a lock object which can be used to unlock the region.
*/
def tryLock(position: Long, size: Long, shared: Boolean): F[Option[Lock]]
/**
* Unlock the (exclusive or regional) lock represented by the supplied `Lock`.
* @param lock the lock object which represents the locked file or region.
*/
def unlock(lock: Lock): F[Unit]
/**
* Write the specified bytes at a particular offset.
* @param bytes the bytes to write to the `FileHandle`.
* @param offset the offset at which to write the bytes.
* @return the number of bytes written.
*/
def write(bytes: Chunk[Byte], offset: Long): F[Int]
}
private[file] object FileHandle {
/**
* Creates a `FileHandle[F]` from a `java.nio.channels.AsynchronousFileChannel`.
*
* Uses a `java.nio.Channels.CompletionHandler` to handle callbacks from IO operations.
*/
private[file] def fromAsynchronousFileChannel[F[_]](
chan: AsynchronousFileChannel)(implicit F: Effect[F], ec: ExecutionContext): FileHandle[F] =
new FileHandle[F] {
type Lock = FileLock
override def force(metaData: Boolean): F[Unit] =
F.delay(chan.force(metaData))
override def lock: F[Lock] =
asyncCompletionHandler[F, Lock](f => chan.lock(null, f))
override def lock(position: Long, size: Long, shared: Boolean): F[Lock] =
asyncCompletionHandler[F, Lock](f => chan.lock(position, size, shared, null, f))
override def read(numBytes: Int, offset: Long): F[Option[Chunk[Byte]]] =
F.delay(ByteBuffer.allocate(numBytes)).flatMap { buf =>
asyncCompletionHandler[F, Integer](f => chan.read(buf, offset, null, f)).map { len =>
if (len < 0) None
else if (len == 0) Some(Chunk.empty)
else Some(Chunk.bytes(buf.array, 0, len))
}
}
override def size: F[Long] =
F.delay(chan.size)
override def truncate(size: Long): F[Unit] =
F.delay { chan.truncate(size); () }
override def tryLock: F[Option[Lock]] =
F.map(F.delay(chan.tryLock()))(Option.apply)
override def tryLock(position: Long, size: Long, shared: Boolean): F[Option[Lock]] =
F.map(F.delay(chan.tryLock(position, size, shared)))(Option.apply)
override def unlock(f: Lock): F[Unit] =
F.delay(f.release())
override def write(bytes: Chunk[Byte], offset: Long): F[Int] =
F.map(asyncCompletionHandler[F, Integer](f =>
chan.write(bytes.toBytes.toByteBuffer, offset, null, f)))(
i => i.toInt
)
}
/**
* Creates a `FileHandle[F]` from a `java.nio.channels.FileChannel`.
*/
private[file] def fromFileChannel[F[_]](chan: FileChannel)(implicit F: Sync[F]): FileHandle[F] =
new FileHandle[F] {
type Lock = FileLock
override def force(metaData: Boolean): F[Unit] =
F.delay(chan.force(metaData))
override def lock: F[Lock] =
F.delay(chan.lock)
override def lock(position: Long, size: Long, shared: Boolean): F[Lock] =
F.delay(chan.lock(position, size, shared))
override def read(numBytes: Int, offset: Long): F[Option[Chunk[Byte]]] =
F.delay(ByteBuffer.allocate(numBytes)).flatMap { buf =>
F.delay(chan.read(buf, offset)).map { len =>
if (len < 0) None
else if (len == 0) Some(Chunk.empty)
else Some(Chunk.bytes(buf.array, 0, len))
}
}
override def size: F[Long] =
F.delay(chan.size)
override def truncate(size: Long): F[Unit] =
F.delay { chan.truncate(size); () }
override def tryLock: F[Option[Lock]] =
F.delay(Option(chan.tryLock()))
override def tryLock(position: Long, size: Long, shared: Boolean): F[Option[Lock]] =
F.delay(Option(chan.tryLock(position, size, shared)))
override def unlock(f: Lock): F[Unit] =
F.delay(f.release())
override def write(bytes: Chunk[Byte], offset: Long): F[Int] =
F.delay(chan.write(bytes.toBytes.toByteBuffer, offset))
}
}
| zaneli/fs2 | io/src/main/scala/fs2/io/file/FileHandle.scala | Scala | mit | 6,584 |
package com.getjenny.starchat.services
import akka.event.{Logging, LoggingAdapter}
import com.getjenny.analyzer.entities.{AnalyzersDataInternal, Context}
import com.getjenny.starchat.SCActorSystem
import com.getjenny.starchat.analyzer.analyzers.StarChatAnalyzer
import com.getjenny.starchat.entities.io.{BayesOperatorCacheServiceResponse, RefreshPolicy}
import com.getjenny.starchat.services.esclient.crud.EsCrudBase
import com.getjenny.starchat.services.esclient.{BayesOperatorCacheElasticClient, ElasticClient}
import com.getjenny.starchat.utils.Index
import org.elasticsearch.action.get.GetResponse
import org.elasticsearch.common.xcontent.XContentBuilder
import org.elasticsearch.common.xcontent.XContentFactory._
import org.elasticsearch.index.query.QueryBuilders
import scalaz.Scalaz._
import scala.collection.JavaConverters._
import scala.concurrent.Future
import scala.util.{Failure, Success, Try}
case class BayesOperatorCacheDocument(key: String, value: Option[Double]) {
def toBuilder: XContentBuilder = {
jsonBuilder()
.startObject()
.field("value", value.getOrElse(0d))
.endObject()
}
}
object BayesOperatorCacheDocument {
def apply(response: GetResponse): BayesOperatorCacheDocument = {
new BayesOperatorCacheDocument(
response.getId,
response.getSourceAsMap.asScala.get("value").map(_.asInstanceOf[Double])
)
}
}
object BayesOperatorCacheService extends AbstractDataService {
override protected[this] val elasticClient: ElasticClient = BayesOperatorCacheElasticClient
private[this] val indexName = Index.indexName(elasticClient.indexName, elasticClient.indexSuffix)
private[this] val log: LoggingAdapter = Logging(SCActorSystem.system, this.getClass.getCanonicalName)
private[this] val esCrudBase = new EsCrudBase(elasticClient, indexName)
private[this] val bayesOperator = "BayesOperator"
def loadAsync(indexName: String): BayesOperatorCacheServiceResponse = {
Future(performLoad(indexName))
.onComplete {
handleLoadResponse(indexName, _)
}
BayesOperatorCacheServiceResponse(indexName, status = true, "Loading async")
}
def load(indexName: String): BayesOperatorCacheServiceResponse = {
handleLoadResponse(indexName, Try {
performLoad(indexName)
})
}
private[this] def handleLoadResponse(indexName: String, response: Try[Int]): BayesOperatorCacheServiceResponse = response match {
case Failure(exception) =>
log.error(exception, "Error during load bayes operator cache:")
BayesOperatorCacheServiceResponse(indexName, status = false, s"Error while loading cache ${exception.getMessage}")
case Success(value) =>
BayesOperatorCacheServiceResponse(indexName, status = true, s"Loaded $value items in cache")
}
private[this] def performLoad(indexName: String): Int = {
val analyzerMap = AnalyzerService.analyzersMap
.getOrElse(indexName, throw new IllegalArgumentException(s"No analyzer map found for index: $indexName"))
.analyzerMap
.toMap
val analyzers = analyzerMap
.map { case (state, decisionTableRuntimeItem) => state -> decisionTableRuntimeItem.analyzer.analyzer }
.collect { case (state, Some(analyzer)) => state -> analyzer }
val allQueries = analyzerMap.values
.flatMap { x => x.queries }
val allBayesOperators = analyzers
.map { case (state, analyzer: StarChatAnalyzer) =>
state -> analyzer.firstOccurrenceOfOperator(bayesOperator)
}.collect { case (state, Some(bayes)) => state -> bayes }
val scores = allQueries.par
.flatMap(q =>
allBayesOperators
.map { case (state, analyzer) =>
(indexName, state) -> analyzer.evaluate(q, AnalyzersDataInternal(Context(indexName, state))).score
}
.filter { case (_, analyzer) => analyzer =/= 0 }
).toMap
.toList
scores.map { case ((indexName, state), analyzer) => (indexName, state, analyzer) }
.grouped(1000)
.foreach(x => bulkPut(x, RefreshPolicy.`false`))
log.info("Calculated {} scores", scores.length)
scores.length
}
def put(index: String, state: String, value: Double, refreshPolicy: RefreshPolicy.Value): Unit = {
val key = createKey(index, state)
val document = BayesOperatorCacheDocument(key, Some(value))
val response = esCrudBase.update(document.key, document.toBuilder, upsert = true, refreshPolicy = refreshPolicy)
log.info("BayesOperatorCache put - key: {}, value: {}, operation status: {}", key, value, response.status())
}
def bulkPut(scoresList: List[(String, String, Double)], refreshPolicy: RefreshPolicy.Value): Unit = {
val elements = scoresList.map { case (index, state, v) => val key = createKey(index, state)
key -> BayesOperatorCacheDocument(key, Some(v)).toBuilder
}
val response = esCrudBase.bulkUpdate(elements, upsert = true, refreshPolicy)
log.info("BayesOperatorCache bulk put {} elements, operation status: {}", response.getItems.length, response.status())
}
def get(index: String, state: String): Option[Double] = {
val key = createKey(index, state)
val response = esCrudBase.read(key)
if (response.isExists) {
BayesOperatorCacheDocument(response).value
} else {
None
}
}
def getOrElseUpdate(index: String, state: String,
refreshPolicy: RefreshPolicy.Value)(updateFunction: () => Double): Double = {
this.get(index, state).getOrElse {
val value = updateFunction()
this.put(index, state, value, refreshPolicy)
value
}
}
private[this] def createKey(index: String, state: String) = s"$index-$state"
def clear: Unit = {
val response = esCrudBase.delete(QueryBuilders.matchAllQuery(), RefreshPolicy.`false`)
log.info("BayesOperatorCache cleared {} entries", response.getDeleted)
}
}
| GetJenny/starchat | src/main/scala/com/getjenny/starchat/services/BayesOperatorCacheService.scala | Scala | gpl-2.0 | 5,854 |
package mqfiletransfercoordinator.actors
import java.util.UUID
import scala.collection.mutable.Map
import akka.actor.Actor
import akka.actor.ActorLogging
import akka.actor.ActorRef
import akka.actor.actorRef2Scala
import akka.event.LoggingReceive
import mqfiletransfercoordinator.messages.CancelTransfer
import mqfiletransfercoordinator.messages.CommandMessage
import mqfiletransfercoordinator.messages.InitiateTransfer
import mqfiletransfercoordinator.messages.TransferFailure
import mqfiletransfercoordinator.messages.TransferProgress
import mqfiletransfercoordinator.messages.TransferQuery
import mqfiletransfercoordinator.messages.TransferSuccess
import mqfiletransfercoordinator.messages.CancelTransferRequest
import mqfiletransfercoordinator.messages.TransferQueryAck
import mqfiletransfercoordinator.messages.TransferInitiated
import mqfiletransfercoordinator.messages.TransferSuccessfulReply
import mqfiletransfercoordinator.messages.TransferFailedReply
import mqfiletransfercoordinator.messages.InitiateAgentTransfer
case class TransferRecord(transferId: String, sourceServer: String, sourcePath: String, targetServer: String, targetPath: String, var status: String, requestorQueueName: String)
class TransferCoordinator(commandQueueProducer: ActorRef, agentCoordinator: ActorRef) extends Actor with ActorLogging {
import TransferCoordinator._
def receive = LoggingReceive {
case message: InitiateTransfer => {
val guid = UUID.randomUUID().toString()
transferMap += (guid -> TransferRecord(guid, message.sourceServer, message.sourcePath, message.targetServer, message.targetPath, "TransferInitiated", message.requestorQueueName))
agentCoordinator ! InitiateAgentTransfer(guid, message.sourceServer, message.sourcePath, message.targetServer, message.targetPath)
commandQueueProducer ! TransferInitiated(guid, message.requestorQueueName, message.correlationId)
}
case message: CancelTransferRequest => {
val record = transferMap.get(message.transferId)
record.map({ record =>
record.status = "Cancelled"
agentCoordinator ! CancelTransfer(record.transferId, record.sourceServer)
agentCoordinator ! CancelTransfer(record.transferId, record.targetServer)
})
}
case message: TransferProgress => transferMap.get(message.transferId).map(_.status = s"In Progess(${message.segmentNumber}/${message.segmentsTotal})")
case message: TransferSuccess => transferMap.get(message.transferId) map { record =>
record.status = "Success"
commandQueueProducer ! TransferSuccessfulReply(record.requestorQueueName, message.transferId)
}
case message: TransferFailure => transferMap.get(message.transferId) map { record =>
record.status = "Failed"
commandQueueProducer ! TransferFailedReply(record.requestorQueueName, message.transferId)
}
case message: TransferQuery => transferMap.get(message.transferId).map((record: TransferRecord) => commandQueueProducer ! TransferQueryAck(message.queryReplyQueueName, message.transferId, record.status))
case _ => log.warning("Unknown message type")
}
}
object TransferCoordinator {
val transferMap = Map[String, TransferRecord]()
} | antongerbracht/MQFileTransfer | MQFileTransferCoordinator/src/main/scala/mqfiletransfercoordinator/actors/TransferCoordinator.scala | Scala | apache-2.0 | 3,189 |
object test {
def foo(x: Int, y: Int) = new Object
}
object test2 {
import test.{foo => bar}
val x = bar(<caret>)
}
//x: Int, y: Int | ilinum/intellij-scala | testdata/parameterInfo/functionParameterInfo/simple/AliasedMethod.scala | Scala | apache-2.0 | 140 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.sumologic.shellbase.interrupts
import java.util.concurrent.TimeoutException
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future, Promise}
import scala.util.Try
/**
* Encapsulates a thread that supports:
* - waiting for the thread to complete
* - "killing" the thread (calling interrupt(), waiting a bit, then calling stop() if needed)
*/
class KillableSingleThread[T](fn: => T) {
private val resultPromise = Promise[T]()
private val thread = new Thread("killable-thread") {
override def run(): Unit = {
resultPromise.tryComplete(Try(fn))
}
}
private def interrupt(): Unit = {
thread.interrupt()
}
private def stop(): Unit = {
//noinspection ScalaDeprecation
thread.stop()
resultPromise.tryFailure(new ThreadDeath)
}
def future: Future[T] = resultPromise.future
def start(): Unit = {
thread.start()
}
def waitForCompletion(waitDuration: Duration): Boolean = {
try {
Await.ready(resultPromise.future, waitDuration)
true
} catch {
case _: TimeoutException => false
case _: Throwable => future.isCompleted
}
}
def kill(gracePeriod: Duration): Unit = {
interrupt()
if (!waitForCompletion(gracePeriod)) {
stop()
}
}
}
| SumoLogic/shellbase | shellbase-core/src/main/scala/com/sumologic/shellbase/interrupts/KillableSingleThread.scala | Scala | apache-2.0 | 2,094 |
/*
* Copyright 2014-2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.wiki.pages
import com.netflix.atlas.core.model.StyleVocabulary
import com.netflix.atlas.core.stacklang.Vocabulary
import com.netflix.atlas.core.stacklang.Word
import com.netflix.atlas.wiki.StackWordPage
case object DesEpicViz extends StackWordPage {
val vocab: Vocabulary = StyleVocabulary
val word: Word = vocab.words.find(_.name == "des-epic-viz").get
override def signature: String =
s"""
|```
|TimeSeriesExpr
|training:Int
|alpha:Double
|beta:Double
|maxPercent:Double
|minPercent:Double
|noise:Double -- TimeSeriesExpr
|```
""".stripMargin
override def summary: String =
"""
|Helper for configuring [[DES]] in a manner compatible with legacy epic alerts. For more
|information see the [epic macros](DES#epic-macros) section of the DES page.
""".stripMargin.trim
}
| brharrington/atlas | atlas-wiki/src/main/scala/com/netflix/atlas/wiki/pages/DesEpicViz.scala | Scala | apache-2.0 | 1,509 |
package org.libss.lift.util
import net.liftweb.http.js.JsCmd
/**
* Created by Kaa
* on 30.08.2016 at 00:35.
*/
trait HeadComponentsLoadable {
/**
* @return the map of key of pack of libraries to list of library resources to be loaded
*/
def headComponents: Map[String, List[String]]
/**
* @return JS to be run after all libraries components loading is done
*/
def afterComponentsLoaded: Option[() => JsCmd] = None
}
object HeadComponents extends ResourcePathHelper {
def validationEngine(language: String) = Map("validation-engine" -> List(
"/js/jquery/jquery.validationEngine.js",
s"/js/jquery/jquery.validationEngine-$language.js",
"/css/validation/validationEngine.jquery.css"
).map(inClassPath))
} | kanischev/libss | libss-web/src/main/scala/org/libss/lift/util/HeadComponentsLoadable.scala | Scala | apache-2.0 | 752 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.testsuite.compiler
import org.junit.Test
import org.junit.Assert._
import org.junit.Assume._
import org.scalajs.testsuite.utils.AssertThrows.assertThrows
import org.scalajs.testsuite.utils.Platform._
class LongTest {
import LongTest._
/* Short builders, used for historical reasons.
* In practice, they are always called with constants, which ensures that the
* optimizer can constant-fold them away at the IR level, without even
* looking inside RuntimeLong.
*/
@inline def lg(lo: Int, hi: Int): Long =
(hi.toLong << 32) | (lo.toLong & 0xffffffffL)
@inline def lg(i: Int): Long =
i.toLong
// Helpers
@noinline def hideFromOptimizer(x: Long): Long = x
@noinline def hideDoubleFromOptimizer(x: Double): Double = x
@noinline def hideAnyFromOptimizer(x: Any): Any = x
// Common values
def MaxVal: Long = lg(0xffffffff, 0x7fffffff)
def MinVal: Long = lg(0, 0x80000000)
def IntMaxVal: Long = lg(Int.MaxValue)
def IntMinVal: Long = lg(Int.MinValue)
def IntMaxValPlus1: Long = lg(0x80000000, 0)
def IntMinValMinus1: Long = lg(2147483647, -1)
def MaxSafeDouble: Long = lg(-1, 2097151)
def TwoPow53: Long = lg(0, 2097152)
def MinSafeDouble: Long = lg(1, -2097152)
def NegTwoPow53: Long = lg(0, -2097152)
// Tests
@Test def sanity_of_equality_tests(): Unit = {
assertEquals(1958505087099L, lg(123, 456))
assertEquals(528280977864L, lg(456, 123))
assertNotEquals(17179869307L, lg(123, 456))
assertNotEquals(lg(123, 4), lg(123, 456))
assertNotEquals(1958505086977L, lg(123, 456))
assertNotEquals(lg(1, 456), lg(123, 456))
assertNotEquals(123L, lg(123, 456))
}
@Test def equals_Any(): Unit = {
@inline def test(expected: Boolean, lhs: Long, rhs: Any): Unit = {
assertEquals(expected, lhs.equals(rhs))
assertEquals(expected, hideFromOptimizer(lhs).equals(rhs))
assertEquals(expected, lhs.equals(hideAnyFromOptimizer(rhs)))
assertEquals(expected,
hideFromOptimizer(lhs).equals(hideAnyFromOptimizer(rhs)))
}
test(false, lg(0, 0), 0)
test(false, lg(0, 0), null)
test(true, lg(0, 0), lg(0, 0))
test(true, lg(123, 456), lg(123, 456))
test(true, lg(-123, 456), lg(-123, 456))
test(true, lg(-123, -456), lg(-123, -456))
test(false, lg(123, 456), lg(-123, 456))
test(false, lg(123, 456), lg(123, -456))
test(false, lg(-123, -456), lg(123, -456))
test(false, lg(-123, -456), lg(-123, 456))
}
@Test def `should_correctly_handle_literals`(): Unit = {
assertEquals(105L, 5L + 100L)
assertEquals(2147483651L, 2147483649L + 2L)
assertEquals(-8589934592L, -2147483648L * 4)
assertEquals(-18014398509482040L, 4503599627370510L * (-4))
}
@Test def `should_correctly_dispatch_unary_ops_on_Longs`(): Unit = {
val x = 10L
assertEquals(-10L, -x)
val y = 5L
assertEquals(-5L, -y)
assertEquals(5L, +y)
assertEquals(-6L, ~y)
}
@Test def `should_correctly_dispatch_binary_ops_on_Longs`(): Unit = {
assertEquals(25F, 5L * 5F, 0F)
assertEquals(1F, 5L % 4F, 0F)
assertEquals(20F, 5F * 4L, 0F)
}
@Test def `should_support_shifts_with_Longs_#622`(): Unit = {
def l(x: Long): Long = x
def i(x: Int): Int = x
assertEquals(268435455L, l(-7L) >>> 100L)
assertEquals(-1L, l(-7L) >> 100L)
assertEquals(-1L, l(-7L) >> 100)
assertEquals(268435455L, l(-7L) >>> 100)
assertEquals(-481036337152L, l(-7L) << 100L)
assertEquals(-481036337152L, l(-7L) << 100)
assertEquals(481036337152L, l(7L) << 100L)
assertEquals(549755813888L, l(8L) << 100L)
assertEquals(1152921504606846975L, l(-7L) >>> 4)
assertEquals(112, i(7) << 100)
assertEquals(-1, i(-7) >> 100)
assertEquals(268435455, i(-7) >>> 100)
assertEquals(-5, i(-65) >> 100)
assertEquals(-5, i(-65) >> 4)
}
@Test def `primitives_should_convert_to_Long`(): Unit = {
// Byte
assertEquals(112L, 112.toByte.toLong)
// Short
assertEquals(-10L, (-10).toShort.toLong)
// Char
assertEquals(65L, 'A'.toLong)
// Int
assertEquals(5L, 5.toLong)
// Long
assertEquals(10L, 10L.toLong)
// Float
assertEquals(100000L, 100000.6f.toLong)
// Double
assertEquals(100000L, 100000.6.toLong)
}
@Test def `should_support_hashCode`(): Unit = {
assertEquals(0, 0L.hashCode())
assertEquals(55, 55L.hashCode())
assertEquals(11, (-12L).hashCode())
assertEquals(10006548, 10006548L.hashCode())
assertEquals(1098747, (-1098748L).hashCode())
assertEquals(-825638905, 613354684553L.hashCode())
assertEquals(1910653900, 9863155567412L.hashCode())
assertEquals(1735398658, 3632147899696541255L.hashCode())
assertEquals(-1689438124, 7632147899696541255L.hashCode())
}
@Test def `should_support_hash_hash`(): Unit = {
assertEquals(0, 0L.##)
assertEquals(55, 55L.##)
assertEquals(-12, (-12L).##)
assertEquals(10006548, 10006548L.##)
assertEquals(-1098748, (-1098748L).##)
assertEquals(1910653900, 9863155567412L.##)
assertEquals(1735398658, 3632147899696541255L.##)
assertEquals(-825638905, 613354684553L.##)
assertEquals(-1689438124, 7632147899696541255L.##)
}
@Test def `should_have_correct_hash_in_case_classes`(): Unit = {
if (scalaVersion.startsWith("2.10.") || scalaVersion.startsWith("2.11.") ||
scalaVersion.startsWith("2.12.") || scalaVersion == "2.13.0-M5") {
assertEquals(-1669410282, HashTestBox(0L).##)
assertEquals(-1561146018, HashTestBox(55L).##)
assertEquals(-1266055417, HashTestBox(-12L).##)
assertEquals(-1383472436, HashTestBox(10006548L).##)
assertEquals(1748124846, HashTestBox(-1098748L).##)
assertEquals(1291324266, HashTestBox(9863155567412L).##)
assertEquals(-450677189, HashTestBox(3632147899696541255L).##)
assertEquals(259268522, HashTestBox(1461126709984L).##)
assertEquals(818387364, HashTestBox(1L).##)
} else {
assertEquals(1445817443, HashTestBox(0L).##)
assertEquals(536512430, HashTestBox(55L).##)
assertEquals(2131034006, HashTestBox(-12L).##)
assertEquals(713468002, HashTestBox(10006548L).##)
assertEquals(-1926941956, HashTestBox(-1098748L).##)
assertEquals(1150870245, HashTestBox(9863155567412L).##)
assertEquals(-1713893803, HashTestBox(3632147899696541255L).##)
assertEquals(-1901418683, HashTestBox(1461126709984L).##)
assertEquals(-491089524, HashTestBox(1L).##)
}
}
@Test def `should_correctly_concat_to_string`(): Unit = {
val x = 20L
assertEquals("asdf520hello", "asdf" + 5L + x + "hello")
assertEquals("20hello", x + "hello")
}
@Test def `string_should_convert_to_Long`(): Unit = {
assertEquals(45678901234567890L, "45678901234567890".toLong)
}
@Test def `should_correctly_implement_is/asInstanceOf_Longs`(): Unit = {
val dyn: Any = 5L
val stat: Long = 5L
assertEquals(5L, stat.asInstanceOf[Long])
// models current scala behavior. See SI-1448
assertEquals(5, stat.asInstanceOf[Int])
assertTrue(stat.isInstanceOf[Long])
assertFalse(stat.isInstanceOf[Int])
assertEquals(5L, dyn.asInstanceOf[Long])
assertTrue(dyn.isInstanceOf[Long])
assertFalse(dyn.isInstanceOf[Int])
}
@Test def `should_correctly_compare_to_other_numeric_types`(): Unit = {
assertTrue(5L == 5)
assertTrue(5 == 5L)
assertTrue(4 != 5L)
assertTrue('A' == 65L)
}
@Test def hashCodeTest(): Unit = {
@inline def test(expected: Int, x: Long): Unit = {
assertEquals(expected, x.hashCode())
assertEquals(expected, hideFromOptimizer(x).hashCode())
}
test(0, lg(0))
test(0, lg(-1))
test(55, lg(55))
test(11, lg(-12))
test(10006548, lg(10006548))
test(1098747, lg(-1098748))
test(957662195, lg(579906195, 461662560))
test(-1075860794, lg(-1403218312, 327367870))
test(1425294575, lg(-1152051636, -274640221))
test(-1863811248, lg(1026519507, -1379463549))
test(-881942797, lg(363765329, -557842270))
test(548587254, lg(21652572, 569942698))
test(-1328999812, lg(55820229, -1281708615))
test(-1756412154, lg(-1843678104, 89453422))
test(-529144798, lg(-1928579430, 1836700344))
test(-1163319584, lg(-181377900, 1335444084))
test(2070477069, lg(1189983760, 1032146717))
test(-1718642695, lg(-1982789145, 274636318))
test(260982265, lg(-2087901827, -1945935740))
test(-385578983, lg(-1911332808, 1729620001))
test(-1362397169, lg(-1920965295, 592125278))
test(1419211160, lg(2017870028, 751907156))
test(-1851816270, lg(1506336851, -933796127))
test(112959880, lg(-1747722429, -1855422773))
test(1715333902, lg(-2139132623, -431847873))
test(-453690224, lg(739274932, -924496860))
test(-1503679197, lg(-1482800071, 29485338))
test(1950154296, lg(237609240, 2048220960))
test(2037562473, lg(-431092385, -1623412426))
test(220707473, lg(2144172772, 1927987317))
test(1902658020, lg(971459211, 1217334127))
test(840583449, lg(-530209544, -763367967))
test(2065572837, lg(-1322671605, -902331922))
test(407536450, lg(1361976000, 1231329666))
test(-1678479110, lg(-96547475, 1640676759))
test(-1558558486, lg(1799144078, -936998300))
test(-110470482, lg(221720683, -195204411))
test(992932874, lg(2080474705, 1194291803))
test(2035378556, lg(-1962255291, -228903623))
test(542449527, lg(-1961045404, -1421226733))
test(-1824846728, lg(1762001719, -96661681))
test(-985103709, lg(568630982, -458482587))
test(37361715, lg(-1237704639, -1275053966))
test(-1555729529, lg(936273516, -1802824213))
test(1534845437, lg(-870754516, -1755138351))
test(-715250396, lg(964079858, -332884522))
test(2003953821, lg(1769001167, 503396434))
test(1631287431, lg(811930233, 1365142270))
test(-1393125048, lg(-280291442, 1136496326))
test(926193137, lg(439731659, 755060794))
test(1141998463, lg(-561661919, -1701561506))
test(480895538, lg(1556104387, 1080665841))
test(-849143869, lg(1931061917, -1099252386))
test(-1840233445, lg(2086961898, -298531087))
test(47538111, lg(-1148008529, -1186490352))
test(540301593, lg(807317094, 271251327))
test(1903332829, lg(1077071399, 826295290))
test(-1325859168, lg(781949710, -1637653074))
test(-1476869146, lg(1778433204, -839352494))
test(84316181, lg(-2038023199, -2088719372))
test(524038724, lg(-1764916235, -1980649039))
test(-794988445, lg(-1796682086, 1148567289))
test(-1285356617, lg(-1606200144, 320886535))
test(1441713710, lg(755146140, 2028753842))
test(365800340, lg(-1851453861, -2073516593))
test(2130603708, lg(-543327214, -1587342674))
test(-1414171289, lg(506958308, -1249713021))
test(-262714124, lg(-2097389477, 1923820719))
test(158195454, lg(-374932306, -523558320))
test(50128093, lg(-902905695, -925752196))
test(-825145129, lg(-397013030, 646399757))
test(-1344834498, lg(1764398539, -956440075))
test(-103814738, lg(-1750710329, 1852419689))
test(-1354282241, lg(-1664538473, 864969320))
test(1408148925, lg(-500471847, -1312439708))
test(1910019874, lg(14748928, 1899600418))
test(1877620608, lg(-1985642880, -431011584))
test(-378358620, lg(494530531, -200582329))
test(492633155, lg(-2067225228, -1718331081))
test(-1581166836, lg(-1799546135, 897340901))
test(174532880, lg(25821759, 200092463))
test(-629188646, lg(403690141, -1032813241))
test(2139225425, lg(-1843541251, -308529236))
test(200043623, lg(1643311840, 1780391559))
test(1992690082, lg(1531597671, 764172997))
test(754072038, lg(638938496, 182932582))
test(-139359279, lg(309356043, -440275494))
test(-1669264515, lg(-541225182, 1128039519))
test(25583899, lg(-387355169, -378598204))
test(1822592670, lg(1787244135, 103129337))
test(1468680630, lg(-1654639624, -890602930))
test(2103231504, lg(-1867306675, -303043235))
test(1159389820, lg(1255224728, 265017316))
test(776506096, lg(119985367, 695098919))
test(-1303579924, lg(-332671386, 1583817866))
test(1108767081, lg(1610629865, 571880320))
test(-1101969936, lg(727577343, -1794328817))
test(-1022615009, lg(730759795, -394092436))
test(-1221218252, lg(-148400203, 1074931585))
test(410005178, lg(181091802, 314250080))
test(1180107886, lg(-1934827635, -889463837))
test(425308062, lg(-1067099255, -650316777))
test(1727927187, lg(1821917070, 174468125))
test(-759140792, lg(474121453, -830281051))
test(1698140938, lg(-402668999, -2100801229))
test(512144461, lg(-615008378, -976157749))
}
@Test def toStringTest(): Unit = {
@inline def test(expected: String, x: Long): Unit = {
assertEquals(expected, x.toString())
assertEquals(expected, hideFromOptimizer(x).toString())
}
test("0", lg(0))
test("1", lg(1))
test("-1", lg(-1))
test("2147483647", IntMaxVal)
test("2147483648", IntMaxValPlus1)
test("-2147483648", IntMinVal)
test("-2147483649", IntMinValMinus1)
test("999999999", lg(999999999))
test("1000000000", lg(1000000000))
test("9007199254740991", MaxSafeDouble)
test("9007199254740992", TwoPow53)
test("-9007199254740991", MinSafeDouble)
test("-9007199254740992", NegTwoPow53)
test("-86922", lg(-86922, -1))
test("0", lg(0, 0))
test("-21874015", lg(-21874015, -1))
test("-2098921896914", lg(1317110830, -489))
test("80985205273168", lg(-698060208, 18855))
test("-12451732102972849", lg(858389071, -2899145))
test("3350", lg(3350, 0))
test("-92511590195450", lg(2005360390, -21540))
test("-2", lg(-2, -1))
test("446248293253325286", lg(1492984294, 103900277))
test("499596119314678396", lg(116015740, 116321286))
test("-3205893", lg(-3205893, -1))
test("-88762100292970", lg(1988813462, -20667))
test("-1278004", lg(-1278004, -1))
test("-1", lg(-1, -1))
test("-305393", lg(-305393, -1))
test("-2", lg(-2, -1))
test("80295210784300943", lg(-1678336113, 18695185))
test("5", lg(5, 0))
test("21", lg(21, 0))
test("64", lg(64, 0))
test("39146094", lg(39146094, 0))
test("-1725731", lg(-1725731, -1))
test("-768047304243556260", lg(-874655652, -178824949))
test("-2726923242838", lg(380990122, -635))
test("-1781092907033", lg(1318520807, -415))
test("-213275", lg(-213275, -1))
test("7662405832810", lg(184176746, 1784))
test("-154157877107", lg(460945549, -36))
test("-929963900939521435", lg(1586508389, -216524094))
test("-6872", lg(-6872, -1))
test("31842553544728", lg(-333987816, 7413))
test("567569520305426", lg(-1817926382, 132147))
test("19649016", lg(19649016, 0))
test("-1349346", lg(-1349346, -1))
test("9479824673588660", lg(-1372338764, 2207193))
test("3521781", lg(3521781, 0))
test("1740", lg(1740, 0))
test("0", lg(0, 0))
test("92834698468", lg(-1654582044, 21))
test("-80139798970631138", lg(100400158, -18659001))
test("30058", lg(30058, 0))
test("-611022189550002", lg(1332815438, -142265))
test("514941281681226", lg(472694602, 119894))
test("2454759250363", lg(-1962042949, 571))
test("14860137468144958", lg(1595551038, 3459895))
test("-79255", lg(-79255, -1))
test("2290122305310796", lg(-1501556660, 533210))
test("-755641947927852310", lg(-463451414, -175936602))
test("-2621852156570472370", lg(-771329970, -610447526))
test("-37956135735", lg(698569929, -9))
test("853219", lg(853219, 0))
test("901", lg(901, 0))
test("4385596303898", lg(434694682, 1021))
test("-972597865", lg(-972597865, -1))
test("-8057379", lg(-8057379, -1))
test("-14968", lg(-14968, -1))
test("-98204964", lg(-98204964, -1))
test("335479", lg(335479, 0))
test("-429441918886", lg(54810714, -100))
test("9798741", lg(9798741, 0))
test("135908509698671494", lg(-896875642, 31643665))
test("-141095409221912371", lg(233027789, -32851335))
test("-9040837797787104", lg(-359183840, -2104985))
test("-889", lg(-889, -1))
test("3222082994", lg(-1072884302, 0))
test("-1454853", lg(-1454853, -1))
test("547641844425", lg(-2113969463, 127))
test("2528132853", lg(-1766834443, 0))
test("242", lg(242, 0))
test("-1655763891", lg(-1655763891, -1))
test("82", lg(82, 0))
test("-120254181", lg(-120254181, -1))
test("-210088", lg(-210088, -1))
test("-2", lg(-2, -1))
test("250255458324299", lg(598888267, 58267))
test("-100656997", lg(-100656997, -1))
test("-24097181761", lg(1672622015, -6))
test("206088", lg(206088, 0))
test("-593", lg(-593, -1))
test("-99542049", lg(-99542049, -1))
test("421501", lg(421501, 0))
test("-2", lg(-2, -1))
test("-101", lg(-101, -1))
test("3", lg(3, 0))
test("14967492854", lg(2082590966, 3))
test("-1528445803513883", lg(-86853659, -355870))
test("26760588095306", lg(-1353126070, 6230))
test("12452686330472", lg(1576139368, 2899))
test("-130630407827875", lg(1022479965, -30415))
test("-10281777615", lg(-1691843023, -3))
test("-90497242609445", lg(2013284571, -21071))
test("-13935178716929", lg(1990158591, -3245))
test("-11308540", lg(-11308540, -1))
test("545166", lg(545166, 0))
test("-1043705339124703", lg(1778574369, -243007))
test("510", lg(510, 0))
test("-2485453027", lg(1809514269, -1))
test("-15103", lg(-15103, -1))
test("-168776672025670194", lg(-779514418, -39296382))
}
@Test def toByte(): Unit = {
@inline def test(expected: Byte, x: Long): Unit = {
assertEquals(expected, x.toByte)
assertEquals(expected, hideFromOptimizer(x).toByte)
}
test(0, lg(0))
test(-1, lg(-1))
test(0x98.toByte, lg(0xfedcba98, 0x76543210))
test(102, lg(-1755353242, -1245269156))
test(77, lg(-359135667, 1391746928))
test(-47, lg(-957203503, 1516742479))
test(-22, lg(-1928741654, 1162703256))
test(-113, lg(-1698228849, 1497186951))
test(-84, lg(-68041812, -2115448390))
test(33, lg(1534301729, 1468418695))
test(113, lg(1101829489, -514588123))
test(12, lg(-1437577204, 1896338488))
test(86, lg(-857671082, -1304076936))
test(-36, lg(-292818212, -1485650549))
test(88, lg(1044510040, 147719255))
test(107, lg(-1166136469, 78076997))
test(61, lg(500131901, 248541787))
test(99, lg(1863435363, -1465266670))
test(-76, lg(136483252, 1662447178))
test(0, lg(1787939584, 1303926235))
test(-69, lg(2105657787, 845433223))
test(26, lg(-1298285542, -1826340261))
test(64, lg(-766959552, -326327606))
}
@Test def toShort(): Unit = {
@inline def test(expected: Short, x: Long): Unit = {
assertEquals(expected, x.toShort)
assertEquals(expected, hideFromOptimizer(x).toShort)
}
test(0, lg(0))
test(-1, lg(-1))
test(0xba98.toShort, lg(0xfedcba98, 0x76543210))
test(-670, lg(1925512546, -812328457))
test(-15861, lg(2028716555, -1639243756))
test(9963, lg(-1970657557, -1904990267))
test(18394, lg(-1012119590, -1704668195))
test(-7956, lg(848486636, -810351120))
test(21453, lg(2103989197, 955793808))
test(22979, lg(-237938237, -703399620))
test(8452, lg(666247428, -1109641927))
test(-26563, lg(1824561213, -872828437))
test(-5754, lg(-10950266, -1779965318))
test(11796, lg(1251814932, -491043391))
test(18020, lg(-117750172, -366379322))
test(3768, lg(-2095575368, 965048164))
test(-4579, lg(-177410531, 1454361289))
test(-29102, lg(-359035310, -790126871))
test(30020, lg(1486058820, 1675509542))
test(-13051, lg(268881157, -342358099))
test(-2720, lg(-1089211040, 747294820))
test(4726, lg(1163661942, 1708185440))
test(-16878, lg(-1363821038, -1952481751))
}
@Test def toInt(): Unit = {
@inline def test(expected: Int, x: Long): Unit = {
assertEquals(expected, x.toInt)
assertEquals(expected, hideFromOptimizer(x).toInt)
}
test(0, lg(0))
test(-1, lg(-1))
test(0xfedcba98, lg(0xfedcba98, 0x76543210))
test(-1869423218, lg(-1869423218, -5516698))
test(450655357, lg(450655357, -521592408))
test(-596464514, lg(-596464514, 629510497))
test(1668957409, lg(1668957409, 1231040344))
test(-313016061, lg(-313016061, 283507721))
test(-406779255, lg(-406779255, 1389322213))
test(-1125423893, lg(-1125423893, -436921025))
test(1491309031, lg(1491309031, 948401259))
test(360542935, lg(360542935, -1033853853))
test(178673916, lg(178673916, -2045867551))
test(-1167644863, lg(-1167644863, 738699232))
test(-1852739075, lg(-1852739075, 950841298))
test(-1965326912, lg(-1965326912, 1694989583))
test(-141857741, lg(-141857741, -1197558189))
test(-938893686, lg(-938893686, 1763555645))
test(-1178638558, lg(-1178638558, 299067184))
test(-1296424902, lg(-1296424902, -1694453755))
test(204387309, lg(204387309, -240738711))
test(-942136876, lg(-942136876, -527367452))
test(-1703892744, lg(-1703892744, 240186844))
}
@Test def toLong(): Unit = {
@inline def test(expected: Long, x: Long): Unit = {
assertEquals(expected, x.toLong)
assertEquals(expected, hideFromOptimizer(x).toLong)
}
test(0L, lg(0))
test(-1L, lg(-1))
test(0x76543210fedcba98L, lg(0xfedcba98, 0x76543210))
test(6907420169189163269L, lg(-85753595, 1608259083))
test(-6558938415102325809L, lg(539593679, -1527121853))
test(-7633462319206780754L, lg(-379998034, -1777303946))
test(-4051533910437546682L, lg(-655641274, -943321249))
test(-3890339056676572253L, lg(1727460259, -905790147))
test(-3091543614186826784L, lg(1824805856, -719806090))
test(2806266116723834799L, lg(948567983, 653384746))
test(-1741184441450532748L, lg(-957910924, -405401095))
test(3395924718030703835L, lg(-433042213, 790675337))
test(-7712245542997911283L, lg(889526541, -1795647094))
test(-2751064647855401745L, lg(1316066543, -640532153))
test(5225909624054208018L, lg(1913378322, 1216751901))
test(1334025594846136121L, lg(-434813127, 310602037))
test(-1574909139329823322L, lg(1689963942, -366687109))
test(-9142211941778525044L, lg(754250892, -2128587091))
test(-5517402195275269807L, lg(-1817691823, -1284620305))
test(7612683537409046411L, lg(-222627957, 1772466007))
test(-2955859733488660001L, lg(-1282993697, -688214725))
test(462084382441397543L, lg(799857959, 107587404))
test(8801656334077465992L, lg(2076251528, 2049295309))
}
@Test def toFloat_strict(): Unit = {
assumeTrue("Assumed strict floats", hasStrictFloats)
@inline def test(expected: Float, x: Long, epsilon: Float = 0.0f): Unit = {
assertEquals(expected, x.toFloat, epsilon)
assertEquals(expected, hideFromOptimizer(x).toFloat, epsilon)
}
test(0, lg(0))
test(-1, lg(-1))
// Closure seems to incorrectly rewrite the constant on the right :-(
val epsilon = if (isInFullOpt) 1E4f else 0.0f
test(9.223372E18f, MaxVal, epsilon)
test(-9.223372E18f, MinVal, epsilon)
test(4.7971489E18f, lg(-1026388143, 1116923232))
test(-2.24047663E18f, lg(-1288678667, -521651607))
test(4.59211416E18f, lg(1192262605, 1069184891))
test(3.38942079E18f, lg(-180353617, 789161022))
test(-6.8076878E18f, lg(-1158443188, -1585038363))
test(7.4159717E18f, lg(906981906, 1726665521))
test(-1.85275997E18f, lg(2042933575, -431379283))
test(5.7344188E18f, lg(599900903, 1335148382))
test(3.20410168E18f, lg(1458166084, 746013039))
test(-7.2310311E18f, lg(1956524672, -1683605603))
test(7.7151362E18f, lg(478583639, 1796320118))
test(1.41365268E18f, lg(-1645816617, 329141676))
test(-3.03197918E18f, lg(184187116, -705937657))
test(-4.04287594E18f, lg(659513335, -941305424))
test(-7.8204678E18f, lg(770505156, -1820844549))
test(-5.9733025E18f, lg(929928858, -1390767911))
test(1.1261721E18f, lg(-1475096259, 262207373))
test(4.00884963E18f, lg(787691795, 933383012))
test(-1.43511611E18f, lg(1189057493, -334139018))
test(3.81415059E18f, lg(-618946450, 888051141))
}
@Test def toDouble(): Unit = {
@inline def test(expected: Double, x: Long, epsilon: Double = 0.0): Unit = {
assertEquals(expected, x.toDouble, epsilon)
assertEquals(expected, hideFromOptimizer(x).toDouble, epsilon)
}
test(0, lg(0))
test(-1, lg(-1))
// Closure seems to incorrectly rewrite the constant on the right :-(
val epsilon = if (isInFullOpt) 1E4 else 0.0
test(9.223372036854776E18, MaxVal, epsilon)
test(-9.223372036854776E18, MinVal, epsilon)
test(3.4240179834317537E18, lg(-151011088, 797216310))
test(8.5596043411285968E16, lg(-508205099, 19929381))
test(-3.1630346897289943E18, lg(1249322201, -736451403))
test(-4.4847682439933604E18, lg(483575860, -1044191477))
test(-6.4014772289576371E17, lg(-1526343930, -149046007))
test(-1.76968119148756736E18, lg(531728928, -412036011))
test(-8.5606671350959739E18, lg(-734111585, -1993185640))
test(-9.0403963253949932E18, lg(-1407864332, -2104881296))
test(-6.4988752582247977E18, lg(-1712351423, -1513137310))
test(-7.7788492399114394E17, lg(1969244733, -181115448))
test(7.6357174849871442E18, lg(-907683842, 1777829016))
test(1.25338659134517658E18, lg(-815927209, 291826806))
test(-3.1910241505692349E18, lg(463523496, -742968207))
test(7.4216510087652332E18, lg(1482622807, 1727987781))
test(-8.189046896086654E18, lg(1170040143, -1906661060))
test(6.8316272807487539E18, lg(-85609173, 1590612176))
test(-8.0611115909320561E18, lg(-1212811257, -1876873801))
test(1.7127521901359959E18, lg(-648802816, 398781194))
test(-6.4442523492577423E18, lg(-1484519186, -1500419423))
test(-1.71264450938175027E18, lg(-2016996893, -398756124))
}
@Test def fromDouble(): Unit = {
@inline def test(expected: Long, x: Double): Unit = {
assertEquals(expected, x.toLong)
assertEquals(expected, hideDoubleFromOptimizer(x).toLong)
}
val twoPow63 = 9.223372036854776E18
val twoPow63NextUp = 9.223372036854778E18
val twoPow63NextDown = 9.2233720368547748E18
// Specials
test(lg(0), 0.0)
test(lg(0), -0.0)
test(lg(0), Double.NaN)
test(MaxVal, Double.PositiveInfinity)
test(MinVal, Double.NegativeInfinity)
// Corner cases
test(lg(0), Double.MinPositiveValue)
test(lg(0), -Double.MinPositiveValue)
test(MaxVal, twoPow63)
test(MaxVal, twoPow63NextUp)
if (!isInFullOpt) {
// GCC incorrectly rewrites the Double constants on the rhs
test(lg(-1024, 2147483647), twoPow63NextDown)
test(MinVal, -twoPow63)
}
test(MinVal, -twoPow63NextUp)
test(lg(1024, -2147483648), -twoPow63NextDown)
// Absolute value too big
test(MaxVal, 1.5623101234432471E19)
test(MaxVal, 1.0425697303244048E19)
test(MaxVal, 1.500625248806836E19)
test(MinVal, -1.5623101234432471E19)
test(MinVal, -1.0425697303244048E19)
test(MinVal, -1.500625248806836E19)
// Normal cases
test(lg(-235867169, -1408375), -6.048920506403873E15)
test(lg(-69250108, 1979931), 8.503743119053764E15)
test(lg(-305079043, 917242), 3.939528382405885E15)
test(lg(687182505, -933310), -4.008535239847255E15)
test(lg(-268193171, -177333), -7.61635408727443E14)
test(lg(-1529111384, 564485), 2.424447379938472E15)
test(lg(1128309745, -1082296), -4.648424796281871E15)
test(lg(-418524847, 1986827), 8.533360864252241E15)
test(lg(615477490, -646039), -2.774715761463054E15)
test(lg(-1546293262, 815087), 3.500774757068786E15)
test(lg(455797153, -1037726), -4.456998776411743E15)
test(lg(587409995, 1185272), 5.090705064274507E15)
test(lg(-1405692887, -769407), -3.304575013039063E15)
test(lg(667130924, 412), 1.770193656876E12)
test(lg(632602096, -506779), -2.176598598697488E15)
test(lg(1820137888, 955044), 4.101884566378912E15)
test(lg(682339811, 951155), 4.085180300766691E15)
test(lg(1394139649, -1084392), -4.657426781904383E15)
test(lg(-677499131, 663585), 2.850079490584325E15)
test(lg(805667746, 1417318), 6.087335263699874E15)
test(lg(990918920, -1563103), -6.713475274360568E15)
test(lg(-1427573595, 969167), 4.162543436756133E15)
test(lg(-699306484, -1852353), -7.955791959986676E15)
test(lg(-1807820942, 1218020), 5.231358553020274E15)
test(lg(1243383338, 349241), 1.499979916805674E15)
test(lg(-479557118, 1183372), 5.08254785441229E15)
test(lg(1413560577, 654135), 2.809489845729537E15)
test(lg(-2047369879, 1135596), 4.877349929065833E15)
test(lg(-741161617, -1594192), -6.846998949739153E15)
test(lg(-2115502919, 1443312), 6.198980017388729E15)
test(lg(1015092168, 1152178), 4.948567844262856E15)
test(lg(-1340352375, -863152), -3.707206656862071E15)
test(lg(1990353383, -2017544), -8.665283507887641E15)
test(lg(-1683508387, -666397), -2.862150709693603E15)
test(lg(2095665836, 369587), 1.587366173692588E15)
test(lg(229204175, 77510), 3.32903144317135E14)
test(lg(-1988104885, 1374301), 5.902580156722507E15)
test(lg(-1032158224, -233238), -1.001746319375376E15)
test(lg(1321723055, -121058), -5.19938829196113E14)
test(lg(-1959869514, -1892991), -8.130332101524554E15)
test(lg(-1173650161, -412038), -1.769686613392113E15)
test(lg(-1692936735, -1697943), -7.292607053441567E15)
test(lg(-1368921565, 621023), 2.667276401109539E15)
}
@Test def comparisons(): Unit = {
@inline def testInner(x: Long, y: Long, expected: Int): Unit = {
assertEquals(expected, x.compareTo(y).signum)
assertEquals(expected, x.compareTo(y: java.lang.Long).signum)
assertEquals(expected == 0, x.equals(y))
assertEquals(expected != 0, !x.equals(y))
assertEquals(expected < 0, x < y)
assertEquals(expected <= 0, x <= y)
assertEquals(expected > 0, x > y)
assertEquals(expected >= 0, x >= y)
}
@inline def test(x: Long, y: Long, expected: Int): Unit = {
testInner(x, y, expected)
testInner(hideFromOptimizer(x), y, expected)
testInner(x, hideFromOptimizer(y), expected)
testInner(hideFromOptimizer(x), hideFromOptimizer(y), expected)
}
test(lg(0), lg(0), 0)
test(lg(0), lg(1), -1)
test(lg(0), lg(-1), 1)
test(MaxVal, MinVal, 1)
test(MinVal, MaxVal, -1)
// Positive and negative numbers requiring lo to be compared via unsigned
test(lg(0x87654321, 0x654789ab), lg(0x12345678, 0x654789ab), 1)
test(lg(0x87654321, 0x89abcdef), lg(0x12345678, 0x89abcdef), 1)
// Whitebox corner cases
test(lg(-1, 0), lg(0, 0), 1)
test(lg(0, 0), lg(-1, 0), -1)
test(lg(173547161, -1884162399), lg(173547161, -1884162399), 0)
test(lg(-1131022787, -472928681), lg(-1131022787, -472928681), 0)
test(lg(-1426164191, 1230100202), lg(-1426164191, 1230100202), 0)
test(lg(-865774626, 1656835920), lg(-865774626, 1656835920), 0)
test(lg(323675568, -725625271), lg(323675568, -725625271), 0)
test(lg(-480943595, -1454872354), lg(-480943595, -1454872354), 0)
test(lg(-626788852, 1037229194), lg(-626788852, 1037229194), 0)
test(lg(-717389653, 232764759), lg(-717389653, 232764759), 0)
test(lg(-861190423, -1233377930), lg(-861190423, -1233377930), 0)
test(lg(-424759090, 2081288998), lg(-424759090, 2081288998), 0)
test(lg(-1092215366, 753517982), lg(349136582, -103427916), 1)
test(lg(363609757, -1151024787), lg(472951646, -1802702403), 1)
test(lg(604332601, 1869576376), lg(1642523661, 1083165388), 1)
test(lg(309732766, 1349689861), lg(1287300335, 1464464808), -1)
test(lg(-1309668929, -965374553), lg(-1952664258, 53355972), -1)
test(lg(1881957750, 388099413), lg(1843907319, -1819358211), 1)
test(lg(-969542710, 864289013), lg(-1025874755, 1102102911), -1)
test(lg(-1425636748, -220185411), lg(1184140796, 40447497), -1)
test(lg(242386079, 452246653), lg(435337552, -956883630), 1)
test(lg(-1007383056, 344856628), lg(-195994328, 635205577), -1)
test(lg(-1652098619, 2042392045), lg(819672742, -2139008380), 1)
test(lg(1423590080, 1919857862), lg(918443721, 1202178673), 1)
test(lg(-1726296442, 302493002), lg(314727886, 1583734481), -1)
test(lg(-2124336701, 769721099), lg(461146322, -591528218), 1)
test(lg(1544826993, -689540243), lg(-1107003972, -1622786326), 1)
test(lg(2050227802, 951848379), lg(-774454951, 1675192386), -1)
test(lg(251298779, -327163776), lg(767615943, 1531730165), -1)
test(lg(1890888425, 761833495), lg(1870917399, 2027251288), -1)
test(lg(594868313, 126374530), lg(-1567484882, -1199917303), 1)
test(lg(-914360997, -703435655), lg(2049249771, -1581791194), 1)
test(lg(-732484281, -738997306), lg(1445589646, 1910084021), -1)
test(lg(340771740, 1351224018), lg(459324247, 1301544548), 1)
test(lg(-940710332, 1344186742), lg(-1143672211, 1112189558), 1)
test(lg(-804347876, 364046111), lg(-4317439, -1733157379), 1)
test(lg(914214836, -1226397169), lg(-299522125, 1393423940), -1)
test(lg(1244546642, 1821771770), lg(44151604, -1398558064), 1)
test(lg(-2094640323, -1469168677), lg(-263524564, 88152070), -1)
test(lg(-124567753, -93039352), lg(-200449699, -30383890), -1)
test(lg(161119306, -1098626173), lg(-137189625, 1289988889), -1)
test(lg(-2052616761, 846341515), lg(-150583666, 1044666783), -1)
test(lg(-10359669, -1628837253), lg(165345114, 1529503183), -1)
test(lg(1717988228, 1622548180), lg(834798590, -1907713185), 1)
test(lg(-1416372109, -353311343), lg(-722195813, -2060788759), 1)
test(lg(980620531, -300588346), lg(-889348218, 1805452697), -1)
test(lg(-465681479, 556544868), lg(-684386776, 724207906), -1)
test(lg(1720493596, 1118244444), lg(2048914469, -789300492), 1)
test(lg(-1259678249, -1557339417), lg(-1908141376, -468055129), -1)
test(lg(1374750478, 1591281700), lg(1107931774, 1073828802), 1)
test(lg(1307860622, -1769647645), lg(-1521056504, 1476896409), -1)
test(lg(1870719065, -606069057), lg(1219817813, -1063559023), 1)
test(lg(-526519712, 1166848880), lg(-748095992, 59925642), 1)
test(lg(-1011429486, -2053277854), lg(537284118, 1714076830), -1)
test(lg(-669104363, -107157886), lg(1647426475, -1784147450), 1)
test(lg(-389860398, 693324889), lg(1047633230, -1757663140), 1)
test(lg(-200206281, 96771163), lg(613429570, -1206384633), 1)
test(lg(-1436571081, -2050819200), lg(-665572561, 644211697), -1)
test(lg(620796821, -567816428), lg(-109412350, -624638338), 1)
test(lg(858464866, -2104597302), lg(-987329519, 1189618105), -1)
test(lg(-1342634556, -1517778924), lg(-693373055, 142499537), -1)
test(lg(1839280888, -168388422), lg(-1645740821, -1967920957), 1)
}
@Test def bitwise_not_~(): Unit = {
@inline def test(expected: Long, x: Long): Unit = {
assertEquals(expected, ~x)
assertEquals(expected, ~hideFromOptimizer(x))
}
test(lg(1664374422, 327449892), lg(-1664374423, -327449893))
test(lg(-2033180390, -1179462631), lg(2033180389, 1179462630))
test(lg(-1134559214, 581653069), lg(1134559213, -581653070))
test(lg(-304074638, -795726117), lg(304074637, 795726116))
test(lg(-1711832787, 1153070599), lg(1711832786, -1153070600))
test(lg(-1526506637, 966114536), lg(1526506636, -966114537))
test(lg(4362923, 1155261397), lg(-4362924, -1155261398))
test(lg(-1976846289, -68873334), lg(1976846288, 68873333))
test(lg(-980717878, -1171857118), lg(980717877, 1171857117))
test(lg(1087568370, 543704246), lg(-1087568371, -543704247))
test(lg(466027718, 693030605), lg(-466027719, -693030606))
test(lg(457333958, 1344424074), lg(-457333959, -1344424075))
test(lg(-1195369388, -1211454825), lg(1195369387, 1211454824))
test(lg(1637646574, 618600148), lg(-1637646575, -618600149))
test(lg(1882417448, 81477816), lg(-1882417449, -81477817))
test(lg(-755550612, -520392566), lg(755550611, 520392565))
test(lg(-754282895, -1550447287), lg(754282894, 1550447286))
test(lg(949172349, -708028075), lg(-949172350, 708028074))
test(lg(1587810906, -1344614950), lg(-1587810907, 1344614949))
test(lg(-1761617639, -353615615), lg(1761617638, 353615614))
test(lg(-153730678, 249152220), lg(153730677, -249152221))
test(lg(-189227914, 2071190797), lg(189227913, -2071190798))
test(lg(-853867870, 445686068), lg(853867869, -445686069))
test(lg(-779434875, 417640992), lg(779434874, -417640993))
test(lg(1997707715, -1100729422), lg(-1997707716, 1100729421))
test(lg(1171311729, -1236578928), lg(-1171311730, 1236578927))
test(lg(-833922040, 1773972621), lg(833922039, -1773972622))
test(lg(1414648869, 1222586075), lg(-1414648870, -1222586076))
test(lg(1123832582, -1270176018), lg(-1123832583, 1270176017))
test(lg(1163066309, 237396271), lg(-1163066310, -237396272))
test(lg(-1826566063, 509270117), lg(1826566062, -509270118))
test(lg(-450318543, 1650640099), lg(450318542, -1650640100))
test(lg(1461907704, -27364749), lg(-1461907705, 27364748))
test(lg(1012261256, 1691289854), lg(-1012261257, -1691289855))
test(lg(-1929178874, 1804481536), lg(1929178873, -1804481537))
test(lg(-888719200, -1846455123), lg(888719199, 1846455122))
test(lg(984231682, -867292444), lg(-984231683, 867292443))
test(lg(2105026705, -16146223), lg(-2105026706, 16146222))
test(lg(1742028653, -1648876191), lg(-1742028654, 1648876190))
test(lg(1922039594, -60702355), lg(-1922039595, 60702354))
test(lg(264728648, 275960741), lg(-264728649, -275960742))
test(lg(1237639032, -1761272007), lg(-1237639033, 1761272006))
test(lg(1118919822, 901486922), lg(-1118919823, -901486923))
test(lg(18001220, -1121574637), lg(-18001221, 1121574636))
test(lg(2122002356, -1370943785), lg(-2122002357, 1370943784))
test(lg(2006182035, -1422441078), lg(-2006182036, 1422441077))
test(lg(1314896174, 460075839), lg(-1314896175, -460075840))
test(lg(1829402918, -1031934892), lg(-1829402919, 1031934891))
test(lg(-2138673173, -107590306), lg(2138673172, 107590305))
test(lg(1382443514, -56307753), lg(-1382443515, 56307752))
}
@Test def bitwise_or_|(): Unit = {
@inline def test(expected: Long, x: Long, y: Long): Unit = {
assertEquals(expected, x | y)
assertEquals(expected, hideFromOptimizer(x) | y)
assertEquals(expected, x | hideFromOptimizer(y))
assertEquals(expected, hideFromOptimizer(x) | hideFromOptimizer(y))
}
test(lg(1467334397, -608514), lg(1198889513, -170491266), lg(356560637, 1244673694))
test(lg(-1645778056, 796647391), lg(-1930990792, 627822941), lg(-1849669008, 185716690))
test(lg(2121785322, -3735189), lg(711185578, -154795743), lg(1446469570, -104529814))
test(lg(401988479, 1357601567), lg(356565628, 275405582), lg(380967239, 1356925723))
test(lg(-167780425, -167778583), lg(1968397619, -447093015), lg(-1242708043, 1353146913))
test(lg(-34603479, -565777), lg(-2121965024, -76133937), lg(2104409609, -1365814226))
test(lg(-537280529, -10535202), lg(1496398822, -548061626), lg(-556169301, -245689186))
test(lg(2132402169, -1093993487), lg(856203065, -1102382704), lg(1276763344, 377524977))
test(lg(500957183, -5777537), lg(474066920, -215674305), lg(366737695, 530830706))
test(lg(-1077937506, 1876426559), lg(-1543310820, 664058893), lg(1002387606, 1826081595))
test(lg(-2121745, -302649859), lg(1606847457, -857707283), lg(-82108753, 628476252))
test(lg(2113649662, -9748643), lg(703699686, -1218298019), lg(1575693246, -565500071))
test(lg(1845274268, 1608495102), lg(1281663616, 1255777790), lg(1708663964, 1604300502))
test(lg(-174066179, 1861146349), lg(-1315547660, 1726760037), lg(-442781559, 235328140))
test(lg(2139059199, -40115785), lg(2014986997, -1130692301), lg(124088654, 1637408903))
test(lg(-4195861, -679630869), lg(1653153899, 1412277603), lg(-1615398494, -682581111))
test(lg(601802239, 1937620978), lg(551077237, 1349033186), lg(597575118, 1662855120))
test(lg(-1383162189, -1107312899), lg(613289137, -1123701660), lg(-1383294317, 369006329))
test(lg(-141299717, -576585865), lg(-418175046, -593383309), lg(1468132939, 360734532))
test(lg(1998808831, -86066691), lg(1428236018, -1294026291), lg(572735565, 1213340152))
test(lg(-1680360554, -738459673), lg(-1949058688, -1013245209), lg(416580246, 300148007))
test(lg(-1073808964, -183288105), lg(-1746245220, 1427323605), lg(-1185613404, -469621610))
test(lg(1475346349, 1845485055), lg(1445648649, 701317455), lg(1407661733, 1287118327))
test(lg(-33566733, -268503975), lg(-1861500445, 764080137), lg(-33812527, -411163560))
test(lg(-286605413, 1602191341), lg(-1408712806, 393166157), lg(1323973395, 1580353248))
test(lg(-553947394, -2013546505), lg(-2072304578, -2142600249), lg(-625840402, -2018265417))
test(lg(-553746946, -140321), lg(450125308, 1742298015), lg(-999674466, -89794491))
test(lg(-16643, -68193313), lg(1239068904, -68194107), lg(-1092247939, -639552609))
test(lg(-52733444, -1159005505), lg(-2075047684, -1706497393), lg(-119858776, -1461536706))
test(lg(-121509406, 1048526839), lg(-1065293728, 1045575815), lg(943802850, 4130803))
test(lg(1844952571, -1327497834), lg(1688647147, -1327540094), lg(1767049400, -1609892586))
test(lg(-5046291, -1345721876), lg(-207425559, 231270892), lg(515004644, -1349918716))
test(lg(-1075861506, -67698709), lg(781813534, 1274454635), lg(-1814682890, -1182466103))
test(lg(2144796219, -17303617), lg(1792206347, -54265949), lg(931436592, -625499620))
test(lg(-874545153, -1611301156), lg(-1957992337, 421859924), lg(1138122674, -1896513908))
test(lg(-1218644010, -67141891), lg(-1220262128, 1790926509), lg(-2107837994, -245286664))
test(lg(-2555905, 2146160604), lg(-485426246, 2122993116), lg(-1077361187, 795578180))
test(lg(999978447, 2129346287), lg(713580935, 2059541733), lg(957494730, 1688940106))
test(lg(-836113, 1983903423), lg(-181332639, 608154803), lg(787627150, 1378378253))
test(lg(-273220891, -1242040457), lg(-944448827, -1528432780), lg(-374967708, 364320051))
test(lg(-52433921, -1615929419), lg(1822361801, -1626992863), lg(-1865553026, -1867721804))
test(lg(-1646593, -1583649), lg(-333036705, -39743141), lg(-136127263, -404241201))
test(lg(-105959457, -50406273), lg(1342309595, 143297662), lg(-1448137844, -50933699))
test(lg(-480707585, -87100434), lg(-514802766, 718197230), lg(1113082335, -259890518))
test(lg(-73693249, -555903498), lg(-476348284, -1025699402), lg(1518405435, 1545110880))
test(lg(-1646871041, -403194029), lg(-2058311589, 1135057747), lg(-1664731675, -1535754941))
test(lg(-203423937, -34342961), lg(333362997, -34482226), lg(-205173969, 1754490115))
test(lg(2083487743, -159909991), lg(2083354303, -2043490039), lg(1344953817, -195725679))
test(lg(-134268937, -680984614), lg(-942983837, -683124136), lg(909452980, -1021249590))
test(lg(-17107060, -35914117), lg(-402624124, -505696678), lg(-688199800, 2110291577))
}
@Test def bitwise_and_&(): Unit = {
@inline def test(expected: Long, x: Long, y: Long): Unit = {
assertEquals(expected, x & y)
assertEquals(expected, hideFromOptimizer(x) & y)
assertEquals(expected, x & hideFromOptimizer(y))
assertEquals(expected, hideFromOptimizer(x) & hideFromOptimizer(y))
}
test(lg(-2012982272, 17896961), lg(-1973652216, 353474049), lg(-576365513, -1546420349))
test(lg(440467456, -805024688), lg(2054268182, -735220496), lg(-1706223071, -653894309))
test(lg(-1073741824, -2144861952), lg(-761230816, -1888512251), lg(-988806710, -256349768))
test(lg(-1977056222, -1878455803), lg(-834874333, -101893315), lg(-1964333382, -1877225849))
test(lg(-1069166300, 304091682), lg(-767041747, 1403541430), lg(-320482908, 442929698))
test(lg(193986570, 67633664), lg(1538292767, 67928849), lg(261587146, 2097883842))
test(lg(167772308, 35669040), lg(448790964, 1852174074), lg(-284620129, 35804464))
test(lg(540801, 554500096), lg(123267521, 1965916169), lg(-401979731, 588194498))
test(lg(-1878826824, 268436097), lg(-1725202754, 324931273), lg(-1240211271, 948007557))
test(lg(306780164, 8388625), lg(1044995460, -1447811559), lg(1381579300, 378161591))
test(lg(29904144, 12096051), lg(1640550232, -1980050765), lg(-1613988461, 381206391))
test(lg(-963297278, 537741320), lg(-810205145, 832395272), lg(-153237294, -1368559681))
test(lg(-2138566639, -1881372656), lg(-2087037677, -539042218), lg(-1930915595, -1879201391))
test(lg(348136448, 1461360), lg(936077102, 1888906741), lg(-590306112, 153013360))
test(lg(-2147459072, 50628864), lg(-1520343420, -480326676), lg(-1031638712, 463833361))
test(lg(-805279656, -972355264), lg(-603625122, -837874740), lg(-266310439, -433325742))
test(lg(1763723264, 1095287337), lg(2101242821, 1363798717), lg(-337523686, -1007893653))
test(lg(1296302405, 1947206722), lg(-849542331, 2084521938), lg(1866786159, -179258269))
test(lg(1275593362, 814484868), lg(1283984114, 1922846117), lg(-42342754, 948944324))
test(lg(1081520, 35397649), lg(18451376, 39592223), lg(-300891980, 43819665))
test(lg(539714600, -1617688304), lg(1772840110, -1611388521), lg(876572201, -1080057992))
test(lg(268660738, 1111507460), lg(-1792575438, 1131693597), lg(2026108738, -691967420))
test(lg(-1977139054, 2393104), lg(-1977130853, 1105495064), lg(-289941322, 37545108))
test(lg(-2145341308, -1333516032), lg(-1590955612, -1330697458), lg(-924798828, -1177272879))
test(lg(-1503395487, -299827136), lg(-285931035, -293654078), lg(-1486596765, -31342500))
test(lg(1233401994, 34091008), lg(1237743775, -1293389691), lg(1803860874, 1175174664))
test(lg(-932558672, 270533826), lg(-839976008, 900736195), lg(-362132238, -668577850))
test(lg(117477888, 473995424), lg(1202887172, 484547048), lg(793351913, -1622877017))
test(lg(302600257, -2030040226), lg(1393155525, -2025583778), lg(-1164217783, -416769026))
test(lg(145293649, 536871648), lg(-658787467, -1534848013), lg(770509273, 861439716))
test(lg(1546608834, 302001248), lg(1550840002, 1588870758), lg(2084528882, 302148833))
test(lg(201606209, -695465177), lg(481609689, -152204489), lg(1279544421, -561242137))
test(lg(608207492, -2112820352), lg(-1529763097, -1978531900), lg(641783708, -2039026814))
test(lg(270672860, -1476361723), lg(887514076, -129985897), lg(423346174, -1364800691))
test(lg(606102544, -503185240), lg(1736270961, -223672071), lg(748709016, -498985816))
test(lg(144970344, 74547586), lg(413438572, 628333003), lg(-1964689415, -2039117914))
test(lg(0, 33646849), lg(-1441786846, -952014445), lg(1364118108, 582220621))
test(lg(886489100, -1836576552), lg(-167845571, -610782244), lg(920048140, -1832380167))
test(lg(181408260, 8425760), lg(1070668735, 1223734716), lg(1255200260, 310500128))
test(lg(18633796, 1494253868), lg(565998918, 2102701486), lg(1230790357, -651115716))
test(lg(1242169472, 1074954242), lg(1259021457, -988117846), lg(-95497780, 2025257730))
test(lg(202639938, 134272082), lg(236334914, 210367602), lg(-1388488109, 672191707))
test(lg(955253125, 1994661641), lg(2029259749, 2012495659), lg(-1125022313, -17866867))
test(lg(134242336, 1377566768), lg(2078335024, -748696528), lg(-1944488853, 1455161657))
test(lg(883214088, 536873986), lg(1962270604, 747650594), lg(1051641707, -1606005365))
test(lg(203000132, 19923458), lg(504991188, 623990339), lg(-1919047324, 331123498))
test(lg(274893395, 1881151488), lg(409659995, 1887189252), lg(384277491, 1973591160))
test(lg(115235, 335685459), lg(872793907, 353626075), lg(34859627, 1988247415))
test(lg(538493100, 441057288), lg(-1407266644, 441386073), lg(1635378940, -548742904))
test(lg(839516176, 671232089), lg(844761371, 1022505085), lg(1930384912, 688275291))
}
@Test def bitwise_xor_^(): Unit = {
@inline def test(expected: Long, x: Long, y: Long): Unit = {
assertEquals(expected, x ^ y)
assertEquals(expected, hideFromOptimizer(x) ^ y)
assertEquals(expected, x ^ hideFromOptimizer(y))
assertEquals(expected, hideFromOptimizer(x) ^ hideFromOptimizer(y))
}
test(lg(1342248740, -313223199), lg(690404572, -1279287229), lg(2032643064, 1592473506))
test(lg(-1691405730, 274213753), lg(1880634009, 1433776255), lg(-348716857, 1160616710))
test(lg(882329013, -513228751), lg(-958227509, 287282926), lg(-227156354, -260614433))
test(lg(1416185065, -1664302164), lg(-266860160, 1815641996), lg(-1536078487, -252396512))
test(lg(-1268929640, 1388542260), lg(1278830943, 22194981), lg(-127614265, 1402065425))
test(lg(2107251545, -1588280474), lg(-865349911, -84319450), lg(-1309551184, 1538105408))
test(lg(-1128180942, 150893828), lg(-1973252863, -1969367363), lg(916708915, -2107399239))
test(lg(-721878765, 35051090), lg(2098389933, -3394272), lg(-1444158786, -35986574))
test(lg(-1863503396, 535478572), lg(533612062, -1712875225), lg(-1893500990, -2045945845))
test(lg(1732708730, -1611595623), lg(799833325, 2072025633), lg(1223390615, -462316872))
test(lg(-757432261, -1755342186), lg(570370215, 1665373667), lg(-215635812, -199487627))
test(lg(755676969, 926086823), lg(-1440978805, 1756956707), lg(-2028544094, 1603010180))
test(lg(1331057947, 1347408402), lg(-1788434031, -203193594), lg(-634323830, -1548988140))
test(lg(596183682, -256181831), lg(-1101798994, 1399594232), lg(-1646597332, -1546197695))
test(lg(1360009516, 182700672), lg(-1432962218, -1631098948), lg(-75062662, -1809535684))
test(lg(594798246, -124892913), lg(699430210, 902448324), lg(180589540, -851178037))
test(lg(-1331407219, 1819608371), lg(-1873118605, -20501824), lg(553528574, -1833816077))
test(lg(1679931669, 470452622), lg(-693963147, 616673404), lg(-1300017312, 952842738))
test(lg(1861159718, -1488989292), lg(1250421224, 1104113895), lg(610853582, -420437133))
test(lg(1056597675, -102857583), lg(-611286212, -1550148499), lg(-445979241, 1514412284))
test(lg(255992058, 1610836280), lg(1704771515, 1382796179), lg(1792974657, 845718187))
test(lg(315376042, 566682776), lg(1042258124, 728098489), lg(752081254, 178455073))
test(lg(-185728083, -2076881789), lg(-1887944331, 1039677246), lg(2073445080, -1177715779))
test(lg(22829354, 1511361245), lg(1986213921, -1875380784), lg(2000642315, -903708915))
test(lg(-1209040105, 1698106233), lg(365179043, -418125319), lg(-1574194252, -2111511936))
test(lg(-2034371369, -364230501), lg(-376038790, 1936322298), lg(1865150125, -1725716895))
test(lg(-324294323, -1435696355), lg(182372182, -1389399582), lg(-428511717, 121795327))
test(lg(-1632322296, 110394084), lg(408417754, -547668779), lg(-2031925038, -640727503))
test(lg(1545363539, -418308022), lg(1515701412, 860890032), lg(105620727, -733936646))
test(lg(-2124553361, 1571601224), lg(144626057, 2121098703), lg(-1983696154, 599907975))
test(lg(-508527758, 679546956), lg(1716685092, -647833300), lg(-2015169962, -236730016))
test(lg(-703803607, -1904715404), lg(-2016515438, -1674300757), lg(1371710907, 306998239))
test(lg(-1295788899, 1052686696), lg(-547404938, -860356684), lg(1838979051, -234273060))
test(lg(-1416482745, -1744821078), lg(1034397763, 1158948099), lg(-1774872572, -585891415))
test(lg(-420256974, -1759976200), lg(1755131065, -847055172), lg(-1905373301, 1520046660))
test(lg(-1978435977, -1613559541), lg(755114159, 1707687361), lg(-1492035880, -98945846))
test(lg(1517584033, -1108617107), lg(1110955283, -394871226), lg(407088050, 1436378667))
test(lg(1706214170, -555203143), lg(729918767, -1047522396), lg(1311993397, 527980061))
test(lg(-278231087, -1148948163), lg(-1533968339, 1826223468), lg(1274742780, -681737135))
test(lg(-204001370, 1220298027), lg(230297309, -219465279), lg(-26402437, -1168671510))
test(lg(-1169385448, -2039889677), lg(-1364422220, 1487677662), lg(350226860, -557455315))
test(lg(791138554, 668046473), lg(-1049451753, 1883174397), lg(-296389651, 1475305844))
test(lg(2103687665, 1121138741), lg(-895088167, 1303802204), lg(-1211781080, 258296169))
test(lg(-387978954, 908804328), lg(1409034242, -1162000487), lg(-1155284684, -1936324751))
test(lg(1265820840, 1142688859), lg(861082066, -475962819), lg(2015491450, -1480757658))
test(lg(1490973918, -277478122), lg(-288714491, 1935424926), lg(-1240144421, -1674954616))
test(lg(1839163014, 362842460), lg(-699164585, -731232280), lg(-1144193327, -1043673420))
test(lg(634920094, -2001579101), lg(683993930, 248552821), lg(220002260, -2040344874))
test(lg(-831642917, -817908795), lg(640417317, 298956382), lg(-398074626, -554826341))
test(lg(857398449, 1711937081), lg(-1493347776, 1187436882), lg(-1779986703, 550293355))
}
@Test def shift_left_<<(): Unit = {
@inline def test(expected: Long, x: Long, y: Int): Unit = {
assertEquals(expected, x << y)
assertEquals(expected, hideFromOptimizer(x) << y)
assertEquals(expected, x << hideFromOptimizer(y))
assertEquals(expected, hideFromOptimizer(x) << hideFromOptimizer(y))
}
test(lg(1065353216, -691528727), lg(-1875389825, 1268606893), -73329513)
test(lg(671088640, -1046568266), lg(869553861, -291578632), -339545061)
test(lg(0, 0), lg(543726956, -1753066291), -809014658)
test(lg(-754974720, -1479892363), lg(-895322669, 847749031), 1030973528)
test(lg(0, 1696595968), lg(1598039634, 819660072), 82069876)
test(lg(0, -763223040), lg(-151740279, -595601314), 503039850)
test(lg(0, -1360527360), lg(-1702267427, 1115684531), 1171866675)
test(lg(508125184, -784066052), lg(-807341493, 286689824), -1938771891)
test(lg(-551288832, 439734876), lg(-382832750, -2134078182), 1537970769)
test(lg(-1409069728, 1129787), lg(-580904341, 939559401), 1856717061)
test(lg(1711276032, 1295846454), lg(-198125160, 663832884), 1561097110)
test(lg(-1004724328, -940313723), lg(-1199332365, -1728151952), 858801923)
test(lg(-1029298112, -1523092059), lg(773140802, -181814355), 1110910853)
test(lg(536870912, 200145086), lg(1601160689, 869229832), -338843811)
test(lg(0, -1735502848), lg(-1919381932, -201750119), -813015128)
test(lg(-1727917056, 2104066035), lg(-52019067, -102802849), -2122946486)
test(lg(0, 771751936), lg(-456947922, 1170727731), 2126487160)
test(lg(0, -710836224), lg(1756719200, -1702547414), -32425558)
test(lg(0, -1073741824), lg(97072750, 409070577), 1222452733)
test(lg(0, -1182793728), lg(1177105779, 212324545), -834196361)
test(lg(0, 1543503872), lg(1395605166, -1743726419), -1762017159)
test(lg(0, -67108864), lg(703808254, 1939941481), 1042647417)
test(lg(0, 1207959552), lg(-702184622, -618243162), -753853766)
test(lg(-58458112, -1619174179), lg(-1368457662, 1747275710), 1382741393)
test(lg(0, -299542812), lg(-74885703, 1342895995), 1929734882)
test(lg(0, -1585446912), lg(-61401466, -496528012), -129147274)
test(lg(1888485376, 630678170), lg(-660169692, 1479330149), 289081298)
test(lg(0, -536870912), lg(-421237721, 1011668330), 370873533)
test(lg(0, 102137856), lg(-821818323, -2029348763), -916638609)
test(lg(0, -1073741824), lg(-1246065172, -1572087360), 1493241980)
test(lg(1156516188, -1812425640), lg(578258094, -906212820), 2074806145)
test(lg(0, 1370357760), lg(61151968, -1770168701), -2062208020)
test(lg(-402653184, 1642287002), lg(1013576541, 460756940), -902835237)
test(lg(-1744830464, 1690731362), lg(-1731171245, 771836652), 868975579)
test(lg(-417260032, 563566725), lg(1123258511, 1049676716), 575477257)
test(lg(411626816, -1915897795), lg(-779579692, 1222433667), 1238257604)
test(lg(0, -2147483648), lg(-1102469156, -543766743), 553354173)
test(lg(0, -1909156352), lg(843520587, -517185932), 1899246569)
test(lg(0, -487976960), lg(-510775647, -896837143), 1487779500)
test(lg(-1148788736, -847308273), lg(-1594115986, -186853391), -119255604)
test(lg(0, 1940424228), lg(-588635767, 1047291343), 2089738146)
test(lg(1726279680, 2137615428), lg(-1002017201, -986188138), 800913356)
test(lg(0, 1650633728), lg(1813551275, -400674286), -1609938966)
test(lg(-1207959552, 897838789), lg(-1333929801, 254558182), -1518372133)
test(lg(0, -1104224256), lg(834127324, 878312672), -923142549)
test(lg(-504160320, 305586753), lg(126340223, -2008491127), -252023418)
test(lg(0, 0), lg(510931784, -1313923431), 1174528765)
test(lg(-1449390900, -1602240664), lg(711394099, -400560166), -967606846)
test(lg(0, 1162928128), lg(1319282800, -1994311032), 1237159401)
test(lg(-1749421258, 1809275319), lg(-874710629, -1242845989), 484063041)
}
@Test def shift_logical_right_>>>(): Unit = {
@inline def test(expected: Long, x: Long, y: Int): Unit = {
assertEquals(expected, x >>> y)
assertEquals(expected, hideFromOptimizer(x) >>> y)
assertEquals(expected, x >>> hideFromOptimizer(y))
assertEquals(expected, hideFromOptimizer(x) >>> hideFromOptimizer(y))
}
test(lg(1982185809, 4856), lg(88517143, 1273092247), 2099569298)
test(lg(40, 0), lg(-1987462914, 1361836721), -2053535175)
test(lg(258, 0), lg(1513792977, 1085974656), -303705162)
test(lg(-1589724844, 2), lg(-2071249600, 1411897130), 1015183069)
test(lg(827423626, 419765), lg(-1560865755, 214919778), 1191603401)
test(lg(376475826, 25773988), lg(944265510, -995896821), 485744647)
test(lg(291969293, 528), lg(1131824263, -2080089658), -386336938)
test(lg(185, 0), lg(-827478170, -1185129975), 2048537528)
test(lg(45022, 0), lg(-916869993, -1344352401), -791372688)
test(lg(587, 0), lg(588931659, -1830830904), -1259543946)
test(lg(-684574597, 28915), lg(473794659, 947514265), -1409717873)
test(lg(3, 0), lg(471518489, -940479957), -847604034)
test(lg(11, 0), lg(-818287716, 1547586919), -216455813)
test(lg(266, 0), lg(-2088976011, -2057680935), 787633143)
test(lg(-800511856, 59336150), lg(306848777, -497453644), 1584315654)
test(lg(25694, 0), lg(-1689341833, -927188015), 1300572337)
test(lg(237982231, 3229829), lg(396954515, 413418119), 1180537031)
test(lg(1319611409, 10188), lg(1478732342, 1335401807), -1668840943)
test(lg(-530293557, 9), lg(-1326271298, -1643756084), -2118687716)
test(lg(26, 0), lg(1205635051, 875594107), 350453433)
test(lg(1698203097, 57089), lg(-2049358216, -553556680), -1203541232)
test(lg(-308392901, 40188), lg(1278981121, -1661145698), 254766480)
test(lg(-1667461656, 7259908), lg(1313272948, 929268302), 1175504903)
test(lg(99018, 0), lg(1982277801, -1050318135), 629735727)
test(lg(16237, 0), lg(-610510955, 1064153335), 577897264)
test(lg(689994, 0), lg(1859860682, 1413109554), 243415787)
test(lg(4088, 0), lg(1757351444, -7991214), -1844808396)
test(lg(48441534, 0), lg(-1277568919, -1194709070), -2102413146)
test(lg(42961906, 0), lg(-1768551066, 1342559), 365466523)
test(lg(1946, 0), lg(1051996382, -213518283), -717261067)
test(lg(-605712863, 10), lg(451444747, -1380034334), -675522340)
test(lg(8, 0), lg(605006440, -1956088854), 192236860)
test(lg(-152492078, 258), lg(-384174131, -2122615661), -1278414057)
test(lg(-1650335224, 9146646), lg(-1579022332, -1953425763), 2134440904)
test(lg(175996054, 0), lg(-433112808, -1479030417), -1873327132)
test(lg(771890457, 0), lg(-1786180708, 385945228), 1526047775)
test(lg(868056695, -1200391723), lg(868056695, -1200391723), 93595840)
test(lg(88233, 0), lg(1335240662, -1403745666), 1625850351)
test(lg(21, 0), lg(-681452715, -1446696044), -742234373)
test(lg(200097858, 0), lg(301750839, 1600782865), 1678034787)
test(lg(1, 0), lg(-2077889650, 445749598), 363036476)
test(lg(-1160719403, 3135), lg(-1633078438, 1644025478), -1297864237)
test(lg(27660, 0), lg(1159483779, 906375175), -1204888593)
test(lg(1096217739, 131290637), lg(179807326, 1050325098), -1598422013)
test(lg(61, 0), lg(952383136, -193355640), 415626042)
test(lg(12362394, 0), lg(972435428, -1130194211), -1259042456)
test(lg(-924965860, 8483), lg(605823642, 555993310), 1780437072)
test(lg(88, 0), lg(665774635, 184915839), 1729784373)
test(lg(27109, 0), lg(-263808048, -741669613), -204793551)
test(lg(-5828381, 10), lg(-954198224, 369053217), 768150041)
}
@Test def shift_arithmetic_right_>>(): Unit = {
@inline def test(expected: Long, x: Long, y: Int): Unit = {
assertEquals(expected, x >> y)
assertEquals(expected, hideFromOptimizer(x) >> y)
assertEquals(expected, x >> hideFromOptimizer(y))
assertEquals(expected, hideFromOptimizer(x) >> hideFromOptimizer(y))
}
test(lg(144041519, 2813487), lg(-1780076655, 720252680), -1316031160)
test(lg(1519, 0), lg(234061537, 796729805), 1452874739)
test(lg(-935479627, 124), lg(1523206972, 1046748891), 1356453463)
test(lg(-15335, -1), lg(1866043067, -2009962307), 393061105)
test(lg(5, 0), lg(89507691, 183545611), -1980770119)
test(lg(-1283367734, 14309038), lg(-1062312593, 1831556953), 1545082311)
test(lg(523169438, 0), lg(-1568293714, 523169438), -2119005984)
test(lg(-1704853904, -731301), lg(-2013675422, -748851607), 511130378)
test(lg(345569760, -46), lg(-521585277, -770402055), -1176556648)
test(lg(1777038301, 61), lg(-145701849, 257587932), -1512809002)
test(lg(-51, -1), lg(-973180026, -1694110170), 2083093369)
test(lg(-5, -1), lg(1761120319, -539393529), -207994821)
test(lg(-587262921, -3246345), lg(-30904807, -1662128199), -638486135)
test(lg(-10706, -1), lg(1812122560, -701571284), 611632432)
test(lg(7484398, 100362842), lg(119750375, 1605805472), 244039684)
test(lg(1, 0), lg(269986751, 1459449758), -439796226)
test(lg(7, 0), lg(-1969890020, 2011804532), -652735044)
test(lg(-2130588861, 98), lg(-1582649974, 826310885), 613066583)
test(lg(-669931160, -697), lg(756433442, -1459944907), -775565931)
test(lg(933146972, -1), lg(1678061064, -1680910162), -531660641)
test(lg(1601141595, 1298147), lg(1870355258, 332325727), -434372344)
test(lg(-1047936567, -129548), lg(1886551280, -2122502046), -763866098)
test(lg(-72307, -1), lg(-1169141408, -592336405), -1841005139)
test(lg(72262, 0), lg(686282122, 295988927), 69079212)
test(lg(-1582088844, -23862710), lg(1825529126, -1527213400), 1371712838)
test(lg(70395261, 0), lg(633149491, 1126324183), 1948323684)
test(lg(-329, -1), lg(-363762029, -1377253181), -1243200330)
test(lg(1924403917, -21), lg(-1694234908, -689608667), 728732313)
test(lg(-62655, -1), lg(1319661865, -2053067582), -777879057)
test(lg(-1472236443, 19900875), lg(-1472236443, 19900875), 373478400)
test(lg(-1, -1), lg(-1719111010, -1766452468), 942391743)
test(lg(5131, 0), lg(-624682758, 1345231635), -813574478)
test(lg(9, 0), lg(1316519660, 314590421), -641829383)
test(lg(-14492, -1), lg(-1380652891, -474856510), -920501329)
test(lg(40, 0), lg(-2084688189, 1352268039), -177471111)
test(lg(-868447412, 13901269), lg(507881044, 1779362534), -508943033)
test(lg(-37529, -1), lg(1742323077, -1229747072), 401183471)
test(lg(376386, 0), lg(346182810, 770838817), 797274667)
test(lg(-1822, -1), lg(828281422, -477411393), 1298272370)
test(lg(1021967080, -2560), lg(-341778503, -671026265), 532386578)
test(lg(-1683940185, 34921), lg(-1907127360, 1144311248), -2131012273)
test(lg(-121723, -1), lg(756366897, -1994294687), -1642432978)
test(lg(-644688038, 9473), lg(-1363894143, 1241756453), 1681307793)
test(lg(-278047, -1), lg(1708006412, -1138876437), 2010442220)
test(lg(872834, 0), lg(-664430929, 446891142), -1707024855)
test(lg(-1, -1), lg(-1904131429, -938887), -829231944)
test(lg(-2101780246, 11998), lg(-1043053889, 1572668786), 309495249)
test(lg(-11427, -1), lg(563683687, -1497656119), -176819791)
test(lg(201, 0), lg(-627312011, 421917318), 2056663541)
test(lg(-104838948, -3), lg(-904956287, -543423347), -617227620)
}
@Test def negate_-(): Unit = {
@inline def test(expected: Long, x: Long): Unit = {
assertEquals(expected, -x)
assertEquals(expected, -hideFromOptimizer(x))
}
test(lg(0), lg(0))
test(lg(1), lg(-1))
test(lg(-1), lg(1))
test(lg(1, -2147483648), MaxVal)
test(MinVal, MinVal)
test(lg(0, -1), lg(0, 1))
test(lg(792771844, -1518464955), lg(-792771844, 1518464954))
test(lg(1313283210, -1172119606), lg(-1313283210, 1172119605))
test(lg(-1034897743, -341494686), lg(1034897743, 341494685))
test(lg(-924881290, 1614058538), lg(924881290, -1614058539))
test(lg(-1636891236, -1405401040), lg(1636891236, 1405401039))
test(lg(2044349674, -477271433), lg(-2044349674, 477271432))
test(lg(1426086684, -1493816436), lg(-1426086684, 1493816435))
test(lg(-2125201680, 1667846199), lg(2125201680, -1667846200))
test(lg(161054645, -1272528725), lg(-161054645, 1272528724))
test(lg(-1013390126, -1323844683), lg(1013390126, 1323844682))
test(lg(-1028806094, -691441881), lg(1028806094, 691441880))
test(lg(1060422114, -11477649), lg(-1060422114, 11477648))
test(lg(1366334123, -2046238761), lg(-1366334123, 2046238760))
test(lg(1307711795, 940346049), lg(-1307711795, -940346050))
test(lg(421687960, -250174762), lg(-421687960, 250174761))
test(lg(379452754, -843386803), lg(-379452754, 843386802))
test(lg(-1251296999, 1144268297), lg(1251296999, -1144268298))
test(lg(-690359429, -1676679602), lg(690359429, 1676679601))
test(lg(1952563749, -882544420), lg(-1952563749, 882544419))
test(lg(-1420900897, -1865273591), lg(1420900897, 1865273590))
test(lg(115947827, -832851217), lg(-115947827, 832851216))
test(lg(-1834973959, -1423776005), lg(1834973959, 1423776004))
test(lg(1376766876, 1519617584), lg(-1376766876, -1519617585))
test(lg(-1845217535, 724725865), lg(1845217535, -724725866))
test(lg(-1133294381, 699400553), lg(1133294381, -699400554))
test(lg(113507585, 615978889), lg(-113507585, -615978890))
test(lg(-1839784424, 1163726652), lg(1839784424, -1163726653))
test(lg(1065777168, 1301742163), lg(-1065777168, -1301742164))
test(lg(334075220, -1058529734), lg(-334075220, 1058529733))
test(lg(1443112398, 1148167880), lg(-1443112398, -1148167881))
test(lg(1647739462, 12310882), lg(-1647739462, -12310883))
test(lg(1461318149, 518941731), lg(-1461318149, -518941732))
test(lg(56833825, -162898592), lg(-56833825, 162898591))
test(lg(-680096727, -1760413869), lg(680096727, 1760413868))
test(lg(461541717, -1103626950), lg(-461541717, 1103626949))
test(lg(1287248387, 1483137214), lg(-1287248387, -1483137215))
test(lg(-1681467124, -1197977023), lg(1681467124, 1197977022))
test(lg(-310946355, 885055747), lg(310946355, -885055748))
test(lg(-717629012, -1299204708), lg(717629012, 1299204707))
test(lg(800584851, 350245993), lg(-800584851, -350245994))
test(lg(1911014238, -441020786), lg(-1911014238, 441020785))
test(lg(-1647080824, -1197295589), lg(1647080824, 1197295588))
test(lg(-925751968, -479541400), lg(925751968, 479541399))
test(lg(-656919119, 1574890072), lg(656919119, -1574890073))
test(lg(-1833364814, 432106462), lg(1833364814, -432106463))
test(lg(-315730911, -1990201785), lg(315730911, 1990201784))
test(lg(1218524771, -572482048), lg(-1218524771, 572482047))
test(lg(276668811, 2002398729), lg(-276668811, -2002398730))
test(lg(1489416833, 834462753), lg(-1489416833, -834462754))
test(lg(2066446588, 688546120), lg(-2066446588, -688546121))
}
@Test def plus_+(): Unit = {
@inline def test(expected: Long, x: Long, y: Long): Unit = {
assertEquals(expected, x + y)
assertEquals(expected, hideFromOptimizer(x) + y)
assertEquals(expected, x + hideFromOptimizer(y))
assertEquals(expected, hideFromOptimizer(x) + hideFromOptimizer(y))
}
test(lg(802149732, -566689627), lg(-202981355, -566689628), lg(1005131087, 0))
test(lg(902769101, 1674149440), lg(1153016325, 1674149440), lg(-250247224, -1))
test(lg(1128646485, -1965159800), lg(1701699755, -1965159800), lg(-573053270, -1))
test(lg(66936416, -973893589), lg(-1183294843, -973893590), lg(1250231259, 0))
test(lg(-155818001, 449544496), lg(-2145882999, 449544496), lg(1990064998, 0))
test(lg(-1244599644, -917980205), lg(-528276750, -917980205), lg(-716322894, -1))
test(lg(580594010, 1794016499), lg(-1061043923, 1794016498), lg(1641637933, 0))
test(lg(-1874551871, 1883156001), lg(-315483661, 1883156001), lg(-1559068210, -1))
test(lg(-611587809, 95409025), lg(-1899047326, 95409025), lg(1287459517, 0))
test(lg(-1393747885, 1167571449), lg(-705065818, 1167571449), lg(-688682067, -1))
test(lg(1135734754, -607437553), lg(-192210545, -607437554), lg(1327945299, 0))
test(lg(545472170, -2007097641), lg(11453726, -2007097641), lg(534018444, 0))
test(lg(-1984029353, -1191350400), lg(1809973610, -1191350400), lg(500964333, 0))
test(lg(1031291620, 108684756), lg(972641234, 108684756), lg(58650386, 0))
test(lg(-1375760766, 127758048), lg(-1511325903, 127758048), lg(135565137, 0))
test(lg(640679472, 429508922), lg(-942832491, 429508921), lg(1583511963, 0))
test(lg(-820503583, -594798242), lg(1500842230, -594798242), lg(1973621483, 0))
test(lg(1875301895, 910473912), lg(-1088230684, 910473912), lg(-1331434717, -1))
test(lg(-1755864971, 378724963), lg(798219431, 378724963), lg(1740882894, 0))
test(lg(468052904, -683558197), lg(-1763683665, -683558197), lg(-2063230727, -1))
test(lg(-1488850347, -1636478025), lg(627629519, -1636478024), lg(-2116479866, -1))
test(lg(915882407, -338305025), lg(-526665240, -338305026), lg(1442547647, 0))
test(lg(-950882103, -466473801), lg(-1265295286, -466473801), lg(314413183, 0))
test(lg(-673278223, -1417005301), lg(-1412852606, -1417005301), lg(739574383, 0))
test(lg(-1565299836, -2035157269), lg(708993121, -2035157269), lg(2020674339, 0))
test(lg(638729196, 1182702858), lg(847269791, 1182702858), lg(-208540595, -1))
test(lg(-1453651445, -1902383955), lg(97084677, -1902383954), lg(-1550736122, -1))
test(lg(1116569659, -606967004), lg(-267181534, -606967005), lg(1383751193, 0))
test(lg(529048030, 1063184820), lg(-904322265, 1063184819), lg(1433370295, 0))
test(lg(-499260224, 101142421), lg(1841727454, 101142421), lg(1953979618, 0))
test(lg(1452864874, 1045175929), lg(-1716387490, 1045175929), lg(-1125714932, -1))
test(lg(982736721, 1506316757), lg(-1020814821, 1506316756), lg(2003551542, 0))
test(lg(-1478064805, 1107506955), lg(467820886, 1107506956), lg(-1945885691, -1))
test(lg(1436947166, -57552832), lg(-103701719, -57552833), lg(1540648885, 0))
test(lg(3887456, -414981457), lg(1280780483, -414981457), lg(-1276893027, -1))
test(lg(939083871, 606376864), lg(-1505747919, 606376864), lg(-1850135506, -1))
test(lg(-1161495325, -606274238), lg(-1797917239, -606274238), lg(636421914, 0))
test(lg(2146013782, 52949338), lg(-551974000, 52949338), lg(-1596979514, -1))
test(lg(-159062053, -623553409), lg(484182807, -623553408), lg(-643244860, -1))
test(lg(1680160313, 371486519), lg(1170065239, 371486519), lg(510095074, 0))
test(lg(-2071737549, -251530660), lg(553737773, -251530660), lg(1669491974, 0))
test(lg(793877651, -324566030), lg(1363264202, -324566030), lg(-569386551, -1))
test(lg(1897556965, 1255689015), lg(1461362302, 1255689015), lg(436194663, 0))
test(lg(-540868058, 718534179), lg(-1463314706, 718534179), lg(922446648, 0))
test(lg(2547531, -716998232), lg(-1684072850, -716998233), lg(1686620381, 0))
test(lg(-1709813271, -2086072551), lg(-183257712, -2086072551), lg(-1526555559, -1))
test(lg(-2134341942, -1223154956), lg(-485818523, -1223154956), lg(-1648523419, -1))
test(lg(1634619686, -1934382665), lg(392330048, -1934382665), lg(1242289638, 0))
test(lg(-1409927090, -75135322), lg(1907808353, -75135322), lg(977231853, 0))
test(lg(-1393001322, 1362535802), lg(88305723, 1362535803), lg(-1481307045, -1))
}
@Test def minus_-(): Unit = {
@inline def test(expected: Long, x: Long, y: Long): Unit = {
assertEquals(expected, x - y)
assertEquals(expected, hideFromOptimizer(x) - y)
assertEquals(expected, x - hideFromOptimizer(y))
assertEquals(expected, hideFromOptimizer(x) - hideFromOptimizer(y))
}
// Whitebox corner case
test(lg(-1), lg(0), lg(1))
test(lg(1318078695, 462416044), lg(406229717, 462416044), lg(-911848978, -1))
test(lg(459412414, 466142261), lg(873646396, 466142261), lg(414233982, 0))
test(lg(1749422706, -573388520), lg(-2077914189, -573388520), lg(467630401, 0))
test(lg(855866353, -1980988131), lg(-789253983, -1980988132), lg(-1645120336, -1))
test(lg(1858485462, 1825277273), lg(-482388232, 1825277273), lg(1954093602, 0))
test(lg(1211608504, -1077757379), lg(-1616159373, -1077757379), lg(1467199419, 0))
test(lg(-1391411781, -1825579414), lg(-105778670, -1825579414), lg(1285633111, 0))
test(lg(1573921037, -2018677385), lg(1306759468, -2018677385), lg(-267161569, -1))
test(lg(2075838974, -289291128), lg(618139116, -289291128), lg(-1457699858, -1))
test(lg(600013127, -1980710784), lg(1736445522, -1980710784), lg(1136432395, 0))
test(lg(-558434179, 21136449), lg(-1970971750, 21136449), lg(-1412537571, -1))
test(lg(-343650116, 229693364), lg(-1491842755, 229693364), lg(-1148192639, -1))
test(lg(1686071974, -2064363005), lg(2125082313, -2064363005), lg(439010339, 0))
test(lg(-1587252411, -1887690341), lg(922634658, -1887690341), lg(-1785080227, -1))
test(lg(-992416688, 1754335328), lg(478015362, 1754335329), lg(1470432050, 0))
test(lg(1718268050, -845578935), lg(-1788952896, -845578935), lg(787746350, 0))
test(lg(1316319511, -1479013672), lg(-1177368338, -1479013672), lg(1801279447, 0))
test(lg(1568876561, -2147323821), lg(1761081661, -2147323821), lg(192205100, 0))
test(lg(-1122491731, 1604940224), lg(261772552, 1604940225), lg(1384264283, 0))
test(lg(1556996455, 1018615990), lg(-1441241840, 1018615990), lg(1296729001, 0))
test(lg(-52258673, -155632234), lg(907527568, -155632233), lg(959786241, 0))
test(lg(1911811399, 1534910973), lg(1509034771, 1534910973), lg(-402776628, -1))
test(lg(1234505303, -718856464), lg(-344668006, -718856465), lg(-1579173309, -1))
test(lg(1263823751, 1792314521), lg(-2096618226, 1792314521), lg(934525319, 0))
test(lg(-1901870284, -977488448), lg(1861956484, -977488448), lg(-531140528, -1))
test(lg(170060904, -1532994269), lg(-691455907, -1532994270), lg(-861516811, -1))
test(lg(-417244722, -946809431), lg(-693769914, -946809431), lg(-276525192, -1))
test(lg(1392505816, -834216711), lg(-1698674051, -834216711), lg(1203787429, 0))
test(lg(339105023, -930632047), lg(1453492556, -930632047), lg(1114387533, 0))
test(lg(1588670098, -422836102), lg(-516102112, -422836103), lg(-2104772210, -1))
test(lg(-1793332542, 1839759286), lg(1194707556, 1839759286), lg(-1306927198, -1))
test(lg(-1933743595, -1652840750), lg(1188016800, -1652840750), lg(-1173206901, -1))
test(lg(1172675504, 1790839027), lg(-1268512415, 1790839027), lg(1853779377, 0))
test(lg(-2038245078, 275932678), lg(-777434907, 275932678), lg(1260810171, 0))
test(lg(-640120196, 658575618), lg(607917442, 658575619), lg(1248037638, 0))
test(lg(-939204613, -2089057829), lg(-1490388970, -2089057829), lg(-551184357, -1))
test(lg(-2089897031, 992436418), lg(-1342917439, 992436418), lg(746979592, 0))
test(lg(-767046771, -1192540532), lg(-1045496394, -1192540532), lg(-278449623, -1))
test(lg(735191894, -683257085), lg(1555450000, -683257085), lg(820258106, 0))
test(lg(2026420598, 481753248), lg(1022728181, 481753248), lg(-1003692417, -1))
test(lg(-2132649422, 1411964223), lg(2028304312, 1411964223), lg(-134013562, -1))
test(lg(1346424260, -217374406), lg(704117341, -217374406), lg(-642306919, -1))
test(lg(-692878557, 278237510), lg(313351245, 278237511), lg(1006229802, 0))
test(lg(-1545280043, 2054685372), lg(2076724262, 2054685372), lg(-672962991, -1))
test(lg(1156651977, 261806288), lg(1990098163, 261806288), lg(833446186, 0))
test(lg(-244547539, 1626774417), lg(1425435353, 1626774418), lg(1669982892, 0))
test(lg(-125857115, -1714068645), lg(2084724465, -1714068645), lg(-2084385716, -1))
test(lg(-2124426763, -543675020), lg(-1799809279, -543675020), lg(324617484, 0))
test(lg(-2145169231, -602489858), lg(1972622018, -602489858), lg(-177176047, -1))
test(lg(408960051, 967789979), lg(883147297, 967789979), lg(474187246, 0))
}
@Test def times_*(): Unit = {
@inline def test(expected: Long, x: Long, y: Long): Unit = {
assertEquals(expected, x * y)
assertEquals(expected, hideFromOptimizer(x) * y)
assertEquals(expected, x * hideFromOptimizer(y))
assertEquals(expected, hideFromOptimizer(x) * hideFromOptimizer(y))
}
test(lg(-1056314208, 1039912134), lg(-1436299491, 1172705251), lg(1721031968, 0))
test(lg(15417694, -1235494072), lg(-1754547158, 1592794750), lg(-850659149, -1))
test(lg(-1312839754, -486483117), lg(-582562130, 1508550574), lg(-2054981347, -1))
test(lg(-377676239, 1969822597), lg(-517256163, 1107889737), lg(324089381, 0))
test(lg(-1426078720, -1379092277), lg(1862517504, -2146745095), lg(2043533548, 0))
test(lg(-1611894400, 514550890), lg(-1341087062, 93674761), lg(1272468928, 0))
test(lg(88803236, -172420721), lg(-1911825604, 1026411170), lg(244738503, 0))
test(lg(1486387579, 668666773), lg(2102189793, 425022510), lg(750432219, 0))
test(lg(913918418, 2124658288), lg(-1628887094, 2043879870), lg(-1367964491, -1))
test(lg(-1067082241, 864193319), lg(454909009, -1096315634), lg(-461844145, -1))
test(lg(949541055, 403324299), lg(-1346593793, -331776468), lg(1495188289, 0))
test(lg(-232871624, -1943313306), lg(39946028, -363039140), lg(-1134101206, -1))
test(lg(-528828160, -1884969955), lg(769959254, -432157368), lg(-488368768, -1))
test(lg(913322937, -2105457977), lg(1975078475, 1181124823), lg(-1852476533, -1))
test(lg(1594278208, 943829214), lg(-2118478876, -1521449422), lg(-235907376, -1))
test(lg(-50678328, 2146883835), lg(-192590815, -1552754278), lg(990887112, 0))
test(lg(1779498513, -1732099612), lg(-74714605, 386143916), lg(1634792395, 0))
test(lg(982209626, 857499597), lg(1839773441, -590412588), lg(799604314, 0))
test(lg(1806268816, -990479821), lg(1395571130, -1228992407), lg(1440046952, 0))
test(lg(1683728223, -957382628), lg(-1094818235, 1759139279), lg(-156634285, -1))
test(lg(-1590791694, 595489480), lg(853844787, 525523561), lg(600761926, 0))
test(lg(1353714367, 146465211), lg(-903115469, 793487771), lg(1986597957, 0))
test(lg(1421874569, -1462441210), lg(-830036223, 830164681), lg(-1711884663, -1))
test(lg(-962035602, -2086325336), lg(1514898873, 1802395563), lg(1763957470, 0))
test(lg(213232144, -1084932179), lg(-1931885288, 136587512), lg(-241565738, -1))
test(lg(-915935202, 1495104097), lg(571274323, 1264898114), lg(1823828906, 0))
test(lg(1116543789, -1473151538), lg(-15708939, -2105030313), lg(48280153, 0))
test(lg(-1230228445, -570579388), lg(1792017337, -1626094957), lg(301685947, 0))
test(lg(1335719116, 1447187791), lg(-1942632452, -691115342), lg(-889918259, -1))
test(lg(1398640985, -1330552693), lg(-683458011, -1409200935), lg(-996910555, -1))
test(lg(-402621042, 1775759707), lg(562125786, -1303526635), lg(-1761056509, -1))
test(lg(129149596, -78429064), lg(2115902292, -1194658096), lg(-1549721205, -1))
test(lg(1706925885, 1413499189), lg(1852083423, 330104035), lg(1414822755, 0))
test(lg(-722178384, 1850552711), lg(-1623207532, 1442771787), lg(-948878276, -1))
test(lg(545021767, -1389368834), lg(-898643831, 773279296), lg(1294488911, 0))
test(lg(1541594150, 820379725), lg(421823854, 802578424), lg(1394107269, 0))
test(lg(-279324848, 1175391379), lg(1589092022, 237831212), lg(-763790472, -1))
test(lg(2089067814, 975727054), lg(-1247207721, -370556328), lg(1449901386, 0))
test(lg(-1977714127, -377823390), lg(109386811, 368962517), lg(1406834819, 0))
test(lg(1759713497, -312922364), lg(2135299059, -798752868), lg(-1861488893, -1))
test(lg(1030024362, -795941843), lg(-695671854, 1917612060), lg(2083344781, 0))
test(lg(-704748314, 388197332), lg(250669253, -442179349), lg(-552836178, -1))
test(lg(758103782, -158300478), lg(1237744278, 206295616), lg(-1547545223, -1))
test(lg(-629736326, 810097466), lg(492775518, 1691641907), lg(1172634963, 0))
test(lg(610754048, 1997636055), lg(-1549380722, 49835026), lg(-1645815552, -1))
test(lg(1696857284, 1549588995), lg(1850430325, -1942955614), lg(-295254732, -1))
test(lg(-66011146, -376837532), lg(-1276671498, -1984743584), lg(-1583554303, -1))
test(lg(2033040344, -167450557), lg(-2127158934, -2058421178), lg(1620104636, 0))
test(lg(-1886196376, -31345953), lg(69958717, -772556465), lg(21655944, 0))
test(lg(-38147573, -1269583268), lg(406538265, -107036516), lg(2077087683, 0))
test(8433193943336928478L, -304510477859059605L, -504694402761190L)
test(-12731773183499098L, -253162060478L, 50291L)
test(0L, 0L, -13850059L)
test(7569251612557229982L, -8660470952582643L, -874L)
test(-11988L, -1332L, 9L)
test(-8243580206627053600L, -29568141078178L, 1526544L)
test(2184313243348463060L, 328926390054L, -3638668370L)
test(46841680L, 205L, 228496L)
test(740594256954004614L, -19460467868573L, -81306407837343422L)
test(1686350941924289808L, 515501176792L, -14676235751610L)
test(6344118389112076765L, 414866483823975467L, 470120246452409879L)
test(-11349L, -117L, 97L)
test(-15402750L, 30L, -513425L)
test(-1358824991029065112L, 1375729456898L, 623181770548L)
test(15724552950015L, 80638733077L, 195L)
test(53496875011846994L, -12740384618206L, -4199L)
test(4976657084923555180L, 521743856055513645L, -2105465236503908L)
test(8L, -8L, -1L)
test(619912072L, -34L, -18232708L)
test(-4326473264912647477L, -4512416881041611L, 16599101951L)
test(1570555712220296245L, -2433585405235L, -645367L)
test(17255933394228520L, -774590L, -22277506028L)
test(5693979142683511208L, 36307900L, -19149614702538L)
test(5058564788733665886L, 22169162314093L, 18534166L)
test(-1912529786602316571L, -3848931303L, 82157326906201261L)
test(-5528746562555987920L, 47326191440L, -39094697833L)
test(-2254385599727553792L, -14641542L, -3714491797523081344L)
test(7866519297L, 771303L, 10199L)
test(0L, 0L, -14806105L)
test(47573376L, 3964448L, 12L)
test(-2609453654630496L, 77505454872L, -33668L)
test(-3686637842539497440L, 14929228532112L, 6555913938L)
test(4853210716974444062L, 431219964946864070L, 1181669L)
test(-14100245424035L, 2820049084807L, -5L)
test(8725676311339308590L, -16830140929953126L, 93975690486771L)
test(2367313232506909772L, 206188838L, 1249841574949634L)
test(-8124607316971866814L, 16674057030L, 1104000290638571L)
test(6446979988520042261L, -64710169253973867L, -23079009995647L)
test(654519384576L, 31096512L, 21048L)
test(153203910181224144L, 22L, 6963814099146552L)
test(1436126772314869678L, 1260318190682L, 1123567398313107L)
test(-75661570L, -5L, 15132314L)
test(6685324216344409292L, 9099845427374L, -126975734L)
test(-7100290L, 5L, -1420058L)
test(-1940696769116022576L, -28274L, 68638918056024L)
test(-7932625013377175292L, 193355246244L, 36593871833L)
test(-7L, 1L, -7L)
test(-48355929429178192L, 66789957775108L, -724L)
test(-10521672279471L, 10521672279471L, -1L)
test(4095350372293300139L, -8747667231979L, -105906241L)
// Random power of 2 tests
test(105129441230848L, 100259248L, 1048576L)
test(105129441230848L, 1048576L, 100259248L)
test(1297036692682702848L, 72L, 18014398509481984L)
test(1297036692682702848L, 18014398509481984L, 72L)
test(-11947131749269504L, -22253267L, 536870912L)
test(-11947131749269504L, 536870912L, -22253267L)
test(8659858730206101504L, 23022568162358L, 8388608L)
test(8659858730206101504L, 8388608L, 23022568162358L)
test(207805415948288L, 1548271L, 134217728L)
test(207805415948288L, 134217728L, 1548271L)
test(0L, -55880L, -9223372036854775808L)
test(0L, -9223372036854775808L, -55880L)
test(-2199023255552L, -1L, 2199023255552L)
test(-2199023255552L, 2199023255552L, -1L)
test(851968L, 13L, 65536L)
test(851968L, 65536L, 13L)
test(-17592186044416L, -1L, 17592186044416L)
test(-17592186044416L, 17592186044416L, -1L)
test(-7000097952840548352L, 222527207082L, 4398046511104L)
test(-7000097952840548352L, 4398046511104L, 222527207082L)
test(2449958197289549824L, 34L, 72057594037927936L)
test(2449958197289549824L, 72057594037927936L, 34L)
test(-16623318531928064L, -4058427375959L, 4096L)
test(-16623318531928064L, 4096L, -4058427375959L)
test(-8006274237557899264L, -2214324316485807900L, 281474976710656L)
test(-8006274237557899264L, 281474976710656L, -2214324316485807900L)
test(-61568356188160L, -14335L, 4294967296L)
test(-61568356188160L, 4294967296L, -14335L)
test(-3101185093760L, -48456017090L, 64L)
test(-3101185093760L, 64L, -48456017090L)
test(-20250091264L, -158203838L, 128L)
test(-20250091264L, 128L, -158203838L)
test(-118778880L, -115995L, 1024L)
test(-118778880L, 1024L, -115995L)
test(-4052041596928L, -483041L, 8388608L)
test(-4052041596928L, 8388608L, -483041L)
test(2511601217189183488L, 186114971352L, 35184372088832L)
test(2511601217189183488L, 35184372088832L, 186114971352L)
test(258064524401082304L, 8064516387533822L, 32L)
test(258064524401082304L, 32L, 8064516387533822L)
test(-13379618472345600L, -816627104025L, 16384L)
test(-13379618472345600L, 16384L, -816627104025L)
test(-56908316672L, -106L, 536870912L)
test(-56908316672L, 536870912L, -106L)
test(0L, 0L, 4096L)
test(0L, 4096L, 0L)
test(-513636086841344L, -61230193L, 8388608L)
test(-513636086841344L, 8388608L, -61230193L)
test(-3894698884001169408L, 41500264L, 8796093022208L)
test(-3894698884001169408L, 8796093022208L, 41500264L)
test(1055744L, 32992L, 32L)
test(1055744L, 32L, 32992L)
test(558955633836032L, 65071L, 8589934592L)
test(558955633836032L, 8589934592L, 65071L)
test(-7994738298998226944L, 76048351L, 137438953472L)
test(-7994738298998226944L, 137438953472L, 76048351L)
test(-4L, -2L, 2L)
test(-4L, 2L, -2L)
test(-4259162954240L, -8318677645L, 512L)
test(-4259162954240L, 512L, -8318677645L)
test(2922836158163451904L, -60717806L, 2251799813685248L)
test(2922836158163451904L, 2251799813685248L, -60717806L)
test(0L, 0L, 8192L)
test(0L, 8192L, 0L)
test(5531246788608L, 164844L, 33554432L)
test(5531246788608L, 33554432L, 164844L)
test(8574853690513424384L, -678234761L, 72057594037927936L)
test(8574853690513424384L, 72057594037927936L, -678234761L)
test(-2828260565988671488L, -103657850088L, 2251799813685248L)
test(-2828260565988671488L, 2251799813685248L, -103657850088L)
test(-9223372036854775808L, 138748537820112453L, -9223372036854775808L)
test(-9223372036854775808L, -9223372036854775808L, 138748537820112453L)
test(0L, 0L, 17179869184L)
test(0L, 17179869184L, 0L)
test(5526109039206858752L, -2489041709915087415L, 2147483648L)
test(5526109039206858752L, 2147483648L, -2489041709915087415L)
test(9663676416L, 9L, 1073741824L)
test(9663676416L, 1073741824L, 9L)
test(648518346341351424L, 379085341609132041L, 72057594037927936L)
test(648518346341351424L, 72057594037927936L, 379085341609132041L)
test(-4118880446592909312L, 218625848802439L, 65536L)
test(-4118880446592909312L, 65536L, 218625848802439L)
test(-7094878839486021632L, -422887732952L, 16777216L)
test(-7094878839486021632L, 16777216L, -422887732952L)
test(20894747459584L, 77839L, 268435456L)
test(20894747459584L, 268435456L, 77839L)
test(-5624995934585749504L, -3953804003778L, 2251799813685248L)
test(-5624995934585749504L, 2251799813685248L, -3953804003778L)
test(76001593786368L, 141564L, 536870912L)
test(76001593786368L, 536870912L, 141564L)
test(5942088864628736L, 11068003L, 536870912L)
test(5942088864628736L, 536870912L, 11068003L)
test(-417996242432L, -816398911L, 512L)
test(-417996242432L, 512L, -816398911L)
test(10122752L, 79084L, 128L)
test(10122752L, 128L, 79084L)
test(712964571136L, 166L, 4294967296L)
test(712964571136L, 4294967296L, 166L)
test(-3013501831155286016L, -120218862620908531L, 4294967296L)
test(-3013501831155286016L, 4294967296L, -120218862620908531L)
}
@Test def divide_/(): Unit = {
@inline def test(expected: Long, x: Long, y: Long): Unit = {
assertEquals(expected, x / y)
assertEquals(expected, hideFromOptimizer(x) / y)
assertEquals(expected, x / hideFromOptimizer(y))
assertEquals(expected, hideFromOptimizer(x) / hideFromOptimizer(y))
}
test(IntMaxValPlus1, IntMinVal, lg(-1))
test(lg(-1), IntMinVal, IntMaxValPlus1)
test(IntMinVal, IntMaxValPlus1, lg(-1))
test(lg(-1), IntMaxValPlus1, IntMinVal)
test(lg(1, -2147483648), MaxVal, lg(-1))
test(MinVal, MinVal, lg(1))
test(MinVal, MinVal, lg(-1))
// int32, int32
test(lg(1, 0), lg(-10426835, -1), lg(-6243356, -1))
test(lg(-291, -1), lg(49659080, 0), lg(-170373, -1))
test(lg(3, 0), lg(97420, 0), lg(27521, 0))
test(lg(26998, 0), lg(-9881291, -1), lg(-366, -1))
test(lg(0, 0), lg(-40, -1), lg(81, 0))
test(lg(0, 0), lg(-6007, -1), lg(-326806, -1))
test(lg(-1, -1), lg(202, 0), lg(-112, -1))
test(lg(0, 0), lg(0, 0), lg(47, 0))
test(lg(323816, 0), lg(22667160, 0), lg(70, 0))
test(lg(0, 0), lg(254, 0), lg(-307349204, -1))
test(lg(0, 0), lg(-17, -1), lg(-44648, -1))
test(lg(-40, -1), lg(39646, 0), lg(-976, -1))
test(lg(0, 0), lg(9, 0), lg(315779722, 0))
test(lg(0, 0), lg(-2674, -1), lg(-3051991, -1))
test(lg(0, 0), lg(-37697, -1), lg(2015928, 0))
test(lg(0, 0), lg(-13, -1), lg(-31, -1))
test(lg(0, 0), lg(6, 0), lg(-334, -1))
test(lg(8, 0), lg(-15989, -1), lg(-1918, -1))
test(lg(8746, 0), lg(-113261535, -1), lg(-12950, -1))
test(lg(55322, 0), lg(-6362112, -1), lg(-115, -1))
test(lg(0, 0), lg(455, 0), lg(13919, 0))
test(lg(36190, 0), lg(293468259, 0), lg(8109, 0))
test(lg(1, 0), lg(-48287007, -1), lg(-27531186, -1))
test(lg(349634, 0), lg(1048904, 0), lg(3, 0))
test(lg(0, 0), lg(-34, -1), lg(3949717, 0))
test(lg(-1, -1), lg(1449, 0), lg(-983, -1))
test(lg(-18537151, -1), lg(18537151, 0), lg(-1, -1))
test(lg(0, 0), lg(14037, 0), lg(23645, 0))
test(lg(-4, -1), lg(1785, 0), lg(-398, -1))
test(lg(0, 0), lg(346, 0), lg(2198158, 0))
test(lg(-802, -1), lg(-3517419, -1), lg(4381, 0))
test(lg(-6, -1), lg(6, 0), lg(-1, -1))
test(lg(39, 0), lg(-822, -1), lg(-21, -1))
test(lg(0, 0), lg(3629, 0), lg(282734, 0))
test(lg(-92367, -1), lg(-278856469, -1), lg(3019, 0))
test(lg(0, 0), lg(-13, -1), lg(37, 0))
test(lg(0, 0), lg(-4, -1), lg(47150459, 0))
test(lg(0, 0), lg(-26, -1), lg(-210691, -1))
test(lg(0, 0), lg(-21294, -1), lg(156839456, 0))
test(lg(0, 0), lg(-5, -1), lg(-25644, -1))
test(lg(0, 0), lg(-1009, -1), lg(28100, 0))
test(lg(-857, -1), lg(16282815, 0), lg(-18989, -1))
test(lg(-7, -1), lg(-2201086, -1), lg(276963, 0))
test(lg(-300, -1), lg(11412578, 0), lg(-37989, -1))
test(lg(0, 0), lg(8406900, 0), lg(239727371, 0))
test(lg(0, 0), lg(-1, -1), lg(-479069, -1))
test(lg(0, 0), lg(4, 0), lg(-21776, -1))
test(lg(-16812960, -1), lg(-16812960, -1), lg(1, 0))
test(lg(0, 0), lg(10873, 0), lg(57145, 0))
test(lg(0, 0), lg(-1, -1), lg(-7, -1))
// int32, int53
test(lg(0, 0), lg(-6975858, -1), lg(42227636, 14))
test(lg(0, 0), lg(-1, -1), lg(370644892, 82735))
test(lg(0, 0), lg(43, 0), lg(-1602218381, 49))
test(lg(0, 0), lg(4063968, 0), lg(973173538, 23810))
test(lg(0, 0), lg(-388987094, -1), lg(-241988155, 1723))
test(lg(0, 0), lg(5939808, 0), lg(-1882484681, 12))
test(lg(0, 0), lg(7, 0), lg(-385609304, 1342))
test(lg(0, 0), lg(-1175803932, -1), lg(297649103, 2408))
test(lg(0, 0), lg(464610492, 0), lg(829919518, 2777))
test(lg(0, 0), lg(214483, 0), lg(1502817270, 8078))
// int32, big
test(lg(0, 0), lg(211494165, 0), lg(1365318534, 14804989))
test(lg(0, 0), lg(5353, 0), lg(-1032992082, -394605386))
test(lg(0, 0), lg(2926, 0), lg(26982087, -226814570))
test(lg(0, 0), lg(-6, -1), lg(-1339229562, -580578613))
test(lg(0, 0), lg(-8, -1), lg(-108570365, 4920615))
test(lg(0, 0), lg(-585878041, -1), lg(551925027, -1296114209))
test(lg(0, 0), lg(-4, -1), lg(474545806, 64068407))
test(lg(0, 0), lg(34, 0), lg(-137127086, -18652281))
test(lg(0, 0), lg(785315, 0), lg(-881374655, 29722835))
test(lg(0, 0), lg(713146, 0), lg(1442548271, 2727525))
// int53, int32
test(lg(-578207, -1), lg(397755625, 53271), lg(-395701427, -1))
test(lg(-560062154, 0), lg(-1680186460, 2), lg(3, 0))
test(lg(-926675094, 18), lg(1514942014, 56), lg(3, 0))
test(lg(-162400270, -1), lg(713597492, 1154), lg(-30524, -1))
test(lg(-9, -1), lg(2028377478, 1), lg(-691707459, -1))
test(lg(135006, 0), lg(1387175556, 73), lg(2332622, 0))
test(lg(-200274428, -13), lg(1756997282, 1397), lg(-116, -1))
test(lg(1125157, 0), lg(-1655346723, 0), lg(2346, 0))
test(lg(997096, 0), lg(198249458, 5686), lg(24492497, 0))
test(lg(1369365326, -302), lg(873090497, 11162), lg(-37, -1))
test(lg(-2166511, -1), lg(360057887, 3519), lg(-6976354, -1))
test(lg(1680790298, -2), lg(1115898639, 48), lg(-30, -1))
test(lg(92036331, 1), lg(154624251, 955), lg(935, 0))
test(lg(23215066, 0), lg(806830498, 1063), lg(196698, 0))
test(lg(-13221428, -1), lg(-220365267, 21359), lg(-6938757, -1))
test(lg(-973041595, -2009), lg(759822848, 648657), lg(-323, -1))
test(lg(171873494, 1659), lg(-1180673754, 486098), lg(293, 0))
test(lg(1583541189, 785), lg(1387172319, 769661), lg(980, 0))
test(lg(-917576, -1), lg(-305851327, 2), lg(-13709, -1))
test(lg(456092, 0), lg(577374631, 17), lg(161353, 0))
test(lg(404991630, 376), lg(809983260, 752), lg(2, 0))
test(lg(495082175, 39), lg(495082175, 39), lg(1, 0))
test(lg(90893135, 0), lg(1455620681, 30929), lg(1461502, 0))
test(lg(799104733, 0), lg(1388707384, 34362), lg(184688, 0))
test(lg(1094556328, -70011), lg(2105854641, 140021), lg(-2, -1))
test(lg(-1819673734, 1), lg(1310105355, 427420), lg(271150, 0))
test(lg(-119338773, -6), lg(-236557650, 35455), lg(-7052, -1))
test(lg(32825, 0), lg(-1127581476, 0), lg(96492, 0))
test(lg(-57018115, -1), lg(2004387480, 7243), lg(-545624, -1))
test(lg(-5950946, -1), lg(381447319, 2213), lg(-1597249, -1))
test(lg(-811421531, -4249), lg(-1860702702, 12744), lg(-3, -1))
test(lg(4741011, 0), lg(-548164065, 6487), lg(5877480, 0))
test(lg(-1064193809, 45), lg(-476290317, 131491), lg(2874, 0))
test(lg(228327608, 0), lg(499912484, 1), lg(21, 0))
test(lg(99111506, 0), lg(-1509435894, 8467), lg(366943, 0))
test(lg(-1209485521, -1), lg(-1580093356, 5), lg(-20, -1))
test(lg(-319956618, -1), lg(1299112295, 55074), lg(-739295, -1))
test(lg(-62197, -1), lg(-1405948570, 43), lg(-3015755, -1))
test(lg(9087, 0), lg(1405130313, 57), lg(27093454, 0))
test(lg(345582531, 0), lg(-1804200888, 1989226), lg(24722497, 0))
test(lg(-1424974, -1), lg(-1642507127, 886), lg(-2672324, -1))
test(lg(1991351, 0), lg(-1276796892, 35), lg(77004, 0))
test(lg(1193137, 0), lg(-1200759296, 816), lg(2939970, 0))
test(lg(573585390, 0), lg(399171813, 123795), lg(926969, 0))
test(lg(1683063904, -942), lg(1649267984, 229752), lg(-244, -1))
test(lg(-6019138, -1), lg(-387146187, 7364), lg(-5255245, -1))
test(lg(-123416174, 28), lg(149703916, 19121), lg(660, 0))
test(lg(-40732946, -1), lg(-1582312743, 7920), lg(-835168, -1))
test(lg(715821610, 298), lg(1431643220, 596), lg(2, 0))
test(lg(-570078780, -1), lg(-1717918737, 8458), lg(-63727, -1))
// int53, int53
test(lg(1, 0), lg(-1232398900, 28871), lg(13989713, 22345))
test(lg(0, 0), lg(-916994839, 12266), lg(1713571419, 15301))
test(lg(32, 0), lg(1133414946, 229), lg(256531666, 7))
test(lg(368, 0), lg(134792921, 3907), lg(-1656790262, 10))
test(lg(1, 0), lg(1532393452, 52260), lg(-701373106, 31864))
test(lg(0, 0), lg(193990135, 1460), lg(867607428, 6918))
test(lg(0, 0), lg(867672590, 1), lg(-1315044816, 987593))
test(lg(0, 0), lg(-978844610, 2), lg(720710523, 209))
test(lg(0, 0), lg(-297570329, 1), lg(-2127979750, 195738))
test(lg(0, 0), lg(-1035330427, 5), lg(-2091513925, 70))
test(lg(0, 0), lg(1037142987, 15), lg(-485498951, 30819))
test(lg(0, 0), lg(744551901, 15), lg(-604684037, 1587))
test(lg(67766, 0), lg(1341710951, 232724), lg(1864827988, 3))
test(lg(694, 0), lg(-409318148, 157818), lg(517165426, 227))
test(lg(1, 0), lg(1908192460, 110512), lg(-61974596, 95795))
test(lg(0, 0), lg(946490654, 498), lg(-1889366637, 1163))
test(lg(12, 0), lg(1765257877, 34422), lg(728455544, 2851))
test(lg(0, 0), lg(-1725136864, 84), lg(1122821677, 14720))
test(lg(1, 0), lg(1854803780, 2), lg(-302860117, 1))
test(lg(131, 0), lg(380756581, 107), lg(-806772264, 0))
test(lg(0, 0), lg(1868292481, 1134), lg(691774521, 33775))
test(lg(0, 0), lg(-1515810361, 98), lg(2038289788, 198))
test(lg(315, 0), lg(-1943767475, 31777), lg(-1513506636, 100))
test(lg(0, 0), lg(1508904915, 18), lg(1834666309, 976))
test(lg(1, 0), lg(1430753947, 3772), lg(-1853122145, 3615))
test(lg(2340149, 0), lg(-1654852151, 1195820), lg(-2100231332, 0))
test(lg(0, 0), lg(1011710080, 18), lg(-616681449, 57))
test(lg(14, 0), lg(-495370429, 356832), lg(-34555439, 25233))
test(lg(131, 0), lg(744211838, 511), lg(-475809581, 3))
test(lg(0, 0), lg(1135128265, 67), lg(163864249, 972))
test(lg(1, 0), lg(954856869, 5120), lg(1474096435, 3606))
test(lg(0, 0), lg(1544045220, 1), lg(85376495, 2353))
test(lg(8, 0), lg(1367437144, 53), lg(2010850631, 6))
test(lg(0, 0), lg(-1398730804, 13), lg(-2055007528, 52))
test(lg(0, 0), lg(1598156017, 13), lg(-1006929331, 160))
test(lg(0, 0), lg(738323529, 41), lg(-1508093984, 10361))
test(lg(0, 0), lg(-1788797806, 31), lg(588557582, 575930))
test(lg(76, 0), lg(-913009845, 1002), lg(204577043, 13))
test(lg(0, 0), lg(1908599465, 6), lg(1058868127, 3383))
test(lg(0, 0), lg(-634312634, 75), lg(-850292534, 332928))
test(lg(0, 0), lg(-1679695022, 148), lg(-1395453213, 912))
test(lg(0, 0), lg(456310936, 71), lg(487720864, 1590813))
test(lg(0, 0), lg(-1724925398, 0), lg(-273170277, 38))
test(lg(0, 0), lg(-6742076, 15), lg(192793866, 175))
test(lg(50, 0), lg(337939061, 2094205), lg(880147944, 41142))
test(lg(0, 0), lg(-998413092, 0), lg(-1758700885, 29))
test(lg(0, 0), lg(1986052307, 3), lg(-2092246422, 47))
test(lg(0, 0), lg(-109615093, 1), lg(-2066395387, 20016))
test(lg(127, 0), lg(-1147373454, 901), lg(313439710, 7))
test(lg(0, 0), lg(-792716629, 66379), lg(2017337246, 250513))
// int53, big
test(lg(0, 0), lg(291278707, 13808), lg(941639833, -14430466))
test(lg(0, 0), lg(-857819626, 204588), lg(-1909684886, -709519130))
test(lg(0, 0), lg(-978105991, 7435), lg(-306472275, 158306339))
test(lg(0, 0), lg(75049741, 248171), lg(-1574105194, 64879257))
test(lg(0, 0), lg(136051120, 621), lg(-1671784392, 102642869))
test(lg(0, 0), lg(-448460356, 2858), lg(71740423, -16715717))
test(lg(0, 0), lg(-1266403435, 2), lg(-1022999838, 25812014))
test(lg(0, 0), lg(552733494, 22), lg(241731505, -33191170))
test(lg(0, 0), lg(1366167794, 115591), lg(191854687, -2136953))
test(lg(0, 0), lg(1329114439, 80951), lg(-51187101, 1471052997))
// big, int32
test(lg(422668131, 6), lg(-1495113094, 168518701), lg(27633219, 0))
test(lg(932715295, 204683), lg(-1211847018, -609137255), lg(-2976, -1))
test(lg(189814434, 0), lg(-457166837, -15040808), lg(-340331202, -1))
test(lg(-1116045071, -1131771), lg(-104570473, -117704108), lg(104, 0))
test(lg(-784306379, 14408), lg(453828098, -10187034), lg(-707, -1))
test(lg(-284027201, 2002401), lg(1911518920, 168201762), lg(84, 0))
test(lg(-862273257, -2), lg(610589058, 36481453), lg(-30381877, -1))
test(lg(-761280647, -71), lg(410700182, 503953004), lg(-7181145, -1))
test(lg(-1212582262, -2538), lg(194917334, -8806907), lg(3471, 0))
test(lg(-1201233065, 4), lg(852311155, 9671380), lg(2048884, 0))
test(lg(1324107666, 0), lg(-1028681544, 4163983), lg(13506586, 0))
test(lg(-354367044, 6361111), lg(-708734088, 12722223), lg(2, 0))
test(lg(-292170842, -76359), lg(1693696214, 18402294), lg(-241, -1))
test(lg(2104544550, -41349584), lg(-1932788158, 206747917), lg(-5, -1))
test(lg(-1928473941, -17816), lg(1427262980, -60732866), lg(3409, 0))
test(lg(-1929237164, -681), lg(-677896940, 2512898), lg(-3693, -1))
test(lg(1550060300, -35), lg(-926729663, -9677195), lg(279372, 0))
test(lg(-1706875941, 0), lg(-405257725, -2271799), lg(-3770075, -1))
test(lg(1540708852, 10909), lg(-1893733008, -6491069), lg(-595, -1))
test(lg(-1563665409, -358), lg(-1343018634, -2584815), lg(7233, 0))
test(lg(278715917, -374389), lg(-1224507547, 122799570), lg(-328, -1))
test(lg(1421525100, 0), lg(-2082712791, -15998594), lg(-48337828, -1))
test(lg(1574832373, -2193811), lg(-2147318181, -32907160), lg(15, 0))
test(lg(-1260116915, -61610), lg(1074158039, 118905936), lg(-1930, -1))
test(lg(130856059, -15612), lg(1270835097, -2201288), lg(141, 0))
test(lg(-110248455, 2347), lg(320077861, -446108079), lg(-189997, -1))
test(lg(-1659387265, 122), lg(1075676628, 54005547), lg(440453, 0))
test(lg(-144903831, 18), lg(-1800001035, 54578889), lg(2877683, 0))
test(lg(-1312994937, -23952), lg(-654120591, 33364168), lg(-1393, -1))
test(lg(-178073210, -1), lg(302695822, -2432394), lg(58667176, 0))
test(lg(1316938460, 142), lg(523451067, -54366538), lg(-382038, -1))
test(lg(-1457978633, 17556853), lg(-78968601, 52670560), lg(3, 0))
test(lg(-1760960552, 505129611), lg(-773046192, -1010259224), lg(-2, -1))
test(lg(1210355204, 2314), lg(1515488136, -21874592), lg(-9452, -1))
test(lg(-1625685934, 862807773), lg(-1043595428, -1725615548), lg(-2, -1))
test(lg(184379181, 4), lg(-1217231978, 1516494005), lg(375097846, 0))
test(lg(1243945230, 0), lg(-1873413508, -236381131), lg(-816152673, -1))
test(lg(-1540093941, -876), lg(265593875, 26513736), lg(-30289, -1))
test(lg(-1304692919, 543912), lg(106204837, -839801203), lg(-1544, -1))
test(lg(-806250591, 23), lg(815576040, -55524975), lg(-2331779, -1))
test(lg(-2106907248, -3), lg(-2053929476, -1795047022), lg(720742474, 0))
test(lg(893100234, -124), lg(1552099699, 65024502), lg(-525272, -1))
test(lg(-1109915706, 1255), lg(-194253417, -12405472), lg(-9879, -1))
test(lg(-1177955013, 0), lg(412309016, 112344162), lg(154800321, 0))
test(lg(-1975688052, -51023804), lg(343591192, -102047607), lg(2, 0))
test(lg(-728332094, -309956), lg(1756765281, 8058834), lg(-26, -1))
test(lg(10173004, 1227), lg(1762668787, -960735493), lg(-782994, -1))
test(lg(1157067129, 5766), lg(1523935530, -109345767), lg(-18963, -1))
test(lg(1226263794, 42306948), lg(-1256703941, 1438436241), lg(34, 0))
test(lg(1502167534, -439314), lg(-444491016, -6150392), lg(14, 0))
// big, int53
test(lg(88399, 0), lg(-1883357942, 360257606), lg(1478768728, 4075))
test(lg(-45459, -1), lg(-1991900757, -48856999), lg(-1087694619, 1074))
test(lg(4395497, 0), lg(518426119, 218946975), lg(-808940852, 49))
test(lg(3198134, 0), lg(-946567777, 600381050), lg(-1165957306, 187))
test(lg(470, 0), lg(257885254, 845979705), lg(792779187, 1798424))
test(lg(92, 0), lg(1278680372, 6485140), lg(1376461023, 70263))
test(lg(167728, 0), lg(1445602310, 420550818), lg(1397186900, 2507))
test(lg(25700177, 0), lg(1822058703, 522114268), lg(1355449555, 20))
test(lg(-35822646, -1), lg(532749659, -130990067), lg(-1474774415, 3))
test(lg(-348, -1), lg(1329707986, -2121642), lg(-63366094, 6086))
test(lg(-2179, -1), lg(1028585430, -118524228), lg(1655878874, 54392))
test(lg(1187, 0), lg(203502475, 42252914), lg(36519512, 35581))
test(lg(3223, 0), lg(341088508, 35053507), lg(917391400, 10874))
test(lg(23608500, 0), lg(1454135412, 69933847), lg(-162213744, 2))
test(lg(7286803, 0), lg(1674604578, 10565585), lg(1932570831, 1))
test(lg(-137450, -1), lg(-1910257093, -16610962), lg(-640594227, 120))
test(lg(114592, 0), lg(1080864951, 17606069), lg(-1542196664, 153))
test(lg(61, 0), lg(-1419644278, 13937517), lg(-919779905, 227700))
test(lg(-247360, -1), lg(-1958380469, -855713410), lg(1631833189, 3459))
test(lg(-61725, -1), lg(1951473618, -4122677), lg(-899615165, 66))
test(lg(2226, 0), lg(1521276132, 182952467), lg(346742782, 82171))
test(lg(-997, -1), lg(-1003647481, -7808320), lg(-228453385, 7826))
test(lg(36, 0), lg(-875689390, 4467236), lg(-590010750, 120938))
test(lg(56005, 0), lg(1189085620, 611543209), lg(1619962756, 10919))
test(lg(-90057, -1), lg(-1072173311, -18503031), lg(1971480267, 205))
test(lg(-9, -1), lg(767303802, -3407362), lg(-339044225, 352939))
test(lg(62240, 0), lg(427996893, 482974074), lg(-736462105, 7759))
test(lg(-1774, -1), lg(842450255, -4396651), lg(859272322, 2477))
test(lg(-153400, -1), lg(1640433988, -2618618), lg(302672196, 17))
test(lg(2145, 0), lg(-361322518, 63967358), lg(-1922353888, 29810))
test(lg(106042, 0), lg(-1774479550, 43276853), lg(472456506, 408))
test(lg(-381407, -1), lg(-1756338345, -38928780), lg(283612141, 102))
test(lg(1217514, 0), lg(-495049835, 37161263), lg(-2052025512, 30))
test(lg(-17, -1), lg(1606509747, -10876159), lg(1068727249, 635715))
test(lg(4880327, 0), lg(-1857686692, 1918485655), lg(454913535, 393))
test(lg(-1023070, -1), lg(-502107392, -511268482), lg(-1118977400, 499))
test(lg(439, 0), lg(-909192131, 45216813), lg(1442986382, 102923))
test(lg(2171202, 0), lg(259184089, 14858724), lg(-671961291, 6))
test(lg(-5332527, -1), lg(1737846340, -614952982), lg(1379175047, 115))
test(lg(-435180, -1), lg(-406629212, -528407898), lg(973577032, 1214))
test(lg(27837, 0), lg(-597461306, 538945619), lg(-1867966522, 19360))
test(lg(-396, -1), lg(-1906945200, -371170760), lg(151858506, 936902))
test(lg(-115583279, -1), lg(-1366510, -207691415), lg(-872314548, 1))
test(lg(-6783543, -1), lg(-1280665444, -104856505), lg(1964875665, 15))
test(lg(-1464006069, -1), lg(897601097, -1352132581), lg(-328204224, 0))
test(lg(11599107, 0), lg(-496529216, 32992512), lg(-668292521, 2))
test(lg(842, 0), lg(1819966537, 311969505), lg(-879441284, 370147))
test(lg(43514, 0), lg(433235702, 408255734), lg(573404298, 9382))
test(lg(-230, -1), lg(1693350453, -4127304), lg(-1671879801, 17931))
test(lg(249094, 0), lg(-492682302, 64433722), lg(-1408841594, 258))
// big, big
test(lg(-10, -1), lg(1450795502, -706709103), lg(742056886, 64843937))
test(lg(0, 0), lg(-392893244, 72026637), lg(1419676270, 875736789))
test(lg(-2, -1), lg(-1861146463, 8382761), lg(-724412724, -3000735))
test(lg(0, 0), lg(1373482238, 23344691), lg(1835527248, -294342355))
test(lg(-37, -1), lg(1956796392, 107480459), lg(-560958184, -2839471))
test(lg(3, 0), lg(422228275, 30436377), lg(-2023395425, 8226201))
test(lg(-3, -1), lg(1747624836, -215352612), lg(-1349940168, 58723974))
test(lg(2, 0), lg(-583006891, 16111063), lg(1853686630, 5479773))
test(lg(0, 0), lg(1498104050, 7322401), lg(-407388940, 2141575618))
test(lg(5, 0), lg(1943726712, 869895175), lg(-627430826, 169278540))
test(lg(0, 0), lg(1872895982, 98966340), lg(1347573135, 529034148))
test(lg(-2, -1), lg(16010610, 187913494), lg(-848952152, -81951424))
test(lg(0, 0), lg(830929771, -4393252), lg(1829525088, 52659897))
test(lg(22, 0), lg(-2093526384, 133319293), lg(-464927151, 6049576))
test(lg(0, 0), lg(1056318793, 13467735), lg(1970348162, -672507521))
test(lg(0, 0), lg(-28853693, -169722715), lg(-83877421, 770900857))
test(lg(-27, -1), lg(1743854071, -302158995), lg(80117835, 11113120))
test(lg(-6, -1), lg(635796581, -146765250), lg(441664676, 23716738))
test(lg(0, 0), lg(-1048312948, -37662905), lg(1319664078, 208772026))
test(lg(0, 0), lg(-784292680, -14102823), lg(2037268040, 744987722))
test(lg(176, 0), lg(-1116104092, -2073525743), lg(1766685765, -11731135))
test(lg(0, 0), lg(-1991687284, 19448294), lg(-1731357606, -202272807))
test(lg(6, 0), lg(-2042068328, -52956481), lg(370482897, -7759903))
test(lg(1, 0), lg(334395247, 1906338595), lg(342095090, 1248830168))
test(lg(0, 0), lg(-309616588, 44123460), lg(2040055580, -476494291))
test(lg(0, 0), lg(137178123, 36336421), lg(-360221107, -515689970))
test(lg(0, 0), lg(-422856762, -16760844), lg(-334268074, -43984484))
test(lg(0, 0), lg(-24820293, 25823996), lg(390711705, 288223876))
test(lg(0, 0), lg(1170265006, 2998984), lg(-134995170, -2123267074))
test(lg(0, 0), lg(-1501380980, -6088910), lg(-1175861016, -56027408))
test(lg(-56, -1), lg(307880183, 196786483), lg(-1107761890, -3480429))
test(lg(0, 0), lg(-588606997, -37732967), lg(-1124435958, -77404915))
test(lg(108, 0), lg(90560661, 990295925), lg(731139348, 9165999))
test(lg(0, 0), lg(46312609, -28251908), lg(1279863155, -519028300))
test(lg(0, 0), lg(1123427761, 55212863), lg(-1081219733, 233090714))
test(lg(0, 0), lg(1447869812, -3646400), lg(-1237950546, -27122943))
test(lg(-13, -1), lg(-1399920635, 110072031), lg(-398678056, -8069387))
test(lg(0, 0), lg(513704441, 14319377), lg(-796719013, 260081997))
test(lg(8, 0), lg(166886349, -190148673), lg(68245235, -21656365))
test(lg(0, 0), lg(-1594024534, -144937584), lg(177399758, 200473672))
test(lg(-1, -1), lg(447753993, -23591908), lg(1399162166, 12505918))
test(lg(0, 0), lg(1500283330, 5361180), lg(348398676, 156400271))
test(lg(-1, -1), lg(-216115001, 670826068), lg(1759253954, -470062110))
test(lg(0, 0), lg(-1251659767, 18831569), lg(-669341445, -34474821))
test(lg(31, 0), lg(817032953, 218701872), lg(-176557210, 6899121))
test(lg(-19, -1), lg(1365998269, 613319842), lg(319204438, -30758748))
test(lg(0, 0), lg(-428500325, 6610536), lg(-46648893, -105360271))
test(lg(0, 0), lg(784528299, -6958267), lg(1370662827, -774132635))
test(lg(-2, -1), lg(-769114167, 137614183), lg(-929091402, -67103082))
test(lg(8, 0), lg(1810734914, 124115952), lg(1149563530, 15197570))
}
@Test def divisionByZero(): Unit = {
@noinline def divNoInline(x: Long, y: Long): Long = x / y
@inline def divInline(x: Long, y: Long): Long = x / y
@inline def test(x: Long): Unit = {
assertThrows(classOf[ArithmeticException], x / 0L)
assertThrows(classOf[ArithmeticException], divNoInline(x, 0L))
assertThrows(classOf[ArithmeticException], divInline(x, 0L))
}
test(0L)
test(1L)
test(43L)
test(-3L)
// Eligible for constant-folded by scalac itself
assertThrows(classOf[ArithmeticException], 5L / 0L)
}
@Test def modulo_%(): Unit = {
@inline def test(expected: Long, x: Long, y: Long): Unit = {
assertEquals(expected, x % y)
assertEquals(expected, hideFromOptimizer(x) % y)
assertEquals(expected, x % hideFromOptimizer(y))
assertEquals(expected, hideFromOptimizer(x) % hideFromOptimizer(y))
}
test(lg(0), IntMinVal, lg(-1))
test(lg(0), IntMinVal, IntMaxValPlus1)
test(lg(0), IntMaxValPlus1, lg(-1))
test(lg(0), IntMaxValPlus1, IntMinVal)
test(lg(0), MaxVal, lg(-1))
test(lg(0), MinVal, lg(1))
test(lg(0), MinVal, lg(-1))
test(lg(-1, 2147483647), MaxVal, MinVal)
test(lg(0), MaxVal, MaxVal)
test(lg(0), MinVal, MinVal)
test(lg(-1), MinVal, MaxVal)
// int32, int32
test(lg(880, 0), lg(880, 0), lg(-219594, -1))
test(lg(-27, -1), lg(-49125, -1), lg(98, 0))
test(lg(-1194, -1), lg(-1922504, -1), lg(4195, 0))
test(lg(3, 0), lg(3, 0), lg(7963, 0))
test(lg(-626, -1), lg(-626, -1), lg(-484628621, -1))
test(lg(11315, 0), lg(11315, 0), lg(-3914076, -1))
test(lg(26241, 0), lg(15712341, 0), lg(-1045740, -1))
test(lg(-507, -1), lg(-855439, -1), lg(5213, 0))
test(lg(-259, -1), lg(-101026259, -1), lg(-500, -1))
test(lg(27720977, 0), lg(27720977, 0), lg(-42317657, -1))
test(lg(1, 0), lg(25954, 0), lg(-3, -1))
test(lg(6724180, 0), lg(338447650, 0), lg(-8505730, -1))
test(lg(10488, 0), lg(23967, 0), lg(-13479, -1))
test(lg(1, 0), lg(885202, 0), lg(-3, -1))
test(lg(0, 0), lg(692795590, 0), lg(-10, -1))
test(lg(-1, -1), lg(-1, -1), lg(156, 0))
test(lg(388, 0), lg(388, 0), lg(189523294, 0))
test(lg(352, 0), lg(352, 0), lg(-3257, -1))
test(lg(-9, -1), lg(-9, -1), lg(14653, 0))
test(lg(-1, -1), lg(-258745, -1), lg(8, 0))
test(lg(-21023, -1), lg(-206976653, -1), lg(34321, 0))
test(lg(-1, -1), lg(-1, -1), lg(-971, -1))
test(lg(59, 0), lg(59, 0), lg(388, 0))
test(lg(0, 0), lg(-7, -1), lg(1, 0))
test(lg(12, 0), lg(77, 0), lg(13, 0))
test(lg(224246, 0), lg(224246, 0), lg(719055, 0))
test(lg(-61296, -1), lg(-61296, -1), lg(-135723660, -1))
test(lg(549465, 0), lg(6897809, 0), lg(793543, 0))
test(lg(45, 0), lg(45, 0), lg(984210147, 0))
test(lg(0, 0), lg(-64, -1), lg(1, 0))
test(lg(2, 0), lg(379611734, 0), lg(4, 0))
test(lg(0, 0), lg(0, 0), lg(-263, -1))
test(lg(29, 0), lg(29, 0), lg(-117, -1))
test(lg(24, 0), lg(245094, 0), lg(-70, -1))
test(lg(0, 0), lg(0, 0), lg(5, 0))
test(lg(2, 0), lg(2, 0), lg(47787927, 0))
test(lg(-124, -1), lg(-124, -1), lg(-22714040, -1))
test(lg(412, 0), lg(412, 0), lg(-17176, -1))
test(lg(-11860, -1), lg(-11860, -1), lg(9506787, 0))
test(lg(-31, -1), lg(-31, -1), lg(-1544676, -1))
test(lg(-3, -1), lg(-1990315281, -1), lg(-7, -1))
test(lg(99, 0), lg(99, 0), lg(-277, -1))
test(lg(-86, -1), lg(-29227, -1), lg(-161, -1))
test(lg(106, 0), lg(106, 0), lg(-47032956, -1))
test(lg(18, 0), lg(18, 0), lg(510836179, 0))
test(lg(2, 0), lg(3543112, 0), lg(10, 0))
test(lg(534271, 0), lg(3547603, 0), lg(-1506666, -1))
test(lg(-16361, -1), lg(-16361, -1), lg(10637613, 0))
test(lg(8, 0), lg(606879016, 0), lg(-16, -1))
test(lg(-1, -1), lg(-1, -1), lg(46424570, 0))
// int32, int53
test(lg(-3, -1), lg(-3, -1), lg(206801065, 1))
test(lg(-57756, -1), lg(-57756, -1), lg(-1211050362, 13))
test(lg(0, 0), lg(0, 0), lg(-475702596, 10040))
test(lg(423524, 0), lg(423524, 0), lg(-2084961556, 16))
test(lg(38317, 0), lg(38317, 0), lg(-1699004544, 24))
test(lg(60291, 0), lg(60291, 0), lg(-458289291, 56))
test(lg(1, 0), lg(1, 0), lg(-1247681936, 1229953))
test(lg(296788, 0), lg(296788, 0), lg(183245860, 52))
test(lg(-2005515, -1), lg(-2005515, -1), lg(331735459, 17))
test(lg(-179812, -1), lg(-179812, -1), lg(-853047550, 5154))
test(lg(-3678, -1), lg(-3678, -1), lg(1751271067, 243605))
test(lg(-93867, -1), lg(-93867, -1), lg(-1925367590, 42))
test(lg(7600917, 0), lg(7600917, 0), lg(-1807424604, 95574))
test(lg(300012, 0), lg(300012, 0), lg(1951216728, 101))
test(lg(-6347, -1), lg(-6347, -1), lg(-438713154, 23))
test(lg(-41, -1), lg(-41, -1), lg(-1211982116, 459))
test(lg(3425, 0), lg(3425, 0), lg(-1580976156, 2))
test(lg(-25, -1), lg(-25, -1), lg(200240265, 25993))
test(lg(-8303, -1), lg(-8303, -1), lg(1353761386, 1921))
test(lg(274032571, 0), lg(274032571, 0), lg(1455543028, 255))
test(lg(-3, -1), lg(-3, -1), lg(1143775281, 729))
test(lg(-1124428, -1), lg(-1124428, -1), lg(-521284400, 339))
test(lg(-2, -1), lg(-2, -1), lg(-303859962, 2524))
test(lg(1, 0), lg(1, 0), lg(-402000545, 1))
test(lg(107013504, 0), lg(107013504, 0), lg(157604607, 3))
test(lg(4976822, 0), lg(4976822, 0), lg(-2046021074, 2230))
test(lg(-1, -1), lg(-1, -1), lg(-306200858, 41))
test(lg(80396, 0), lg(80396, 0), lg(-409002766, 13))
test(lg(937638, 0), lg(937638, 0), lg(-697219650, 26))
test(lg(756, 0), lg(756, 0), lg(-948806692, 1700920))
test(lg(5, 0), lg(5, 0), lg(646021801, 21350))
test(lg(262831839, 0), lg(262831839, 0), lg(1086270794, 10633))
test(lg(-2146273993, -1), lg(-2146273993, -1), lg(-1539129401, 0))
test(lg(59799, 0), lg(59799, 0), lg(1910837623, 102082))
test(lg(-5347, -1), lg(-5347, -1), lg(1965292799, 18))
test(lg(926, 0), lg(926, 0), lg(1939309159, 104206))
test(lg(1, 0), lg(1, 0), lg(1651864405, 1233))
test(lg(334, 0), lg(334, 0), lg(581635234, 20))
test(lg(-61747, -1), lg(-61747, -1), lg(-842193425, 1497))
test(lg(-1, -1), lg(-1, -1), lg(758739794, 79508))
test(lg(59605313, 0), lg(59605313, 0), lg(-1162319751, 0))
test(lg(12267518, 0), lg(12267518, 0), lg(1340161110, 568352))
test(lg(19230695, 0), lg(19230695, 0), lg(1844291137, 21))
test(lg(3950296, 0), lg(3950296, 0), lg(-848670202, 243))
test(lg(503276, 0), lg(503276, 0), lg(-1756374670, 1))
test(lg(30880536, 0), lg(30880536, 0), lg(-1380766565, 51064))
test(lg(5659804, 0), lg(5659804, 0), lg(-725339057, 1))
test(lg(11882277, 0), lg(11882277, 0), lg(243727355, 7))
test(lg(371783010, 0), lg(371783010, 0), lg(630143580, 14001))
test(lg(840, 0), lg(840, 0), lg(-1719362098, 109))
// int32, big
test(lg(-267334310, -1), lg(-267334310, -1), lg(1537718115, -134598983))
test(lg(57, 0), lg(57, 0), lg(-1668867109, -10100325))
test(lg(30332, 0), lg(30332, 0), lg(-615310153, -90004876))
test(lg(187, 0), lg(187, 0), lg(-590535223, 8244144))
test(lg(-2, -1), lg(-2, -1), lg(2125719729, 390762530))
test(lg(-4252915, -1), lg(-4252915, -1), lg(2070489053, 23484863))
test(lg(-2, -1), lg(-2, -1), lg(37507428, 96913792))
test(lg(10, 0), lg(10, 0), lg(-533680689, -79923599))
test(lg(-14, -1), lg(-14, -1), lg(-930313329, 2972085))
test(lg(-20155233, -1), lg(-20155233, -1), lg(-49989774, -25498857))
test(lg(-406, -1), lg(-406, -1), lg(2109762544, 126098611))
test(lg(43, 0), lg(43, 0), lg(598811771, 154269509))
test(lg(-4830, -1), lg(-4830, -1), lg(-1043650540, -2874494))
test(lg(-4271, -1), lg(-4271, -1), lg(-950378080, -106126516))
test(lg(126, 0), lg(126, 0), lg(-877412093, -90804729))
test(lg(40445345, 0), lg(40445345, 0), lg(-1461218790, 6749169))
test(lg(-1, -1), lg(-1, -1), lg(1776909778, 28425796))
test(lg(-2123811, -1), lg(-2123811, -1), lg(-51805125, 44153129))
test(lg(-25650126, -1), lg(-25650126, -1), lg(-1317209725, -16141386))
test(lg(30, 0), lg(30, 0), lg(712479950, 158765535))
test(lg(2494211, 0), lg(2494211, 0), lg(-432472367, 21859989))
test(lg(100937174, 0), lg(100937174, 0), lg(212873269, -74778594))
test(lg(901687, 0), lg(901687, 0), lg(-1225225931, -512562107))
test(lg(-422854, -1), lg(-422854, -1), lg(-1361503923, -98826041))
test(lg(2, 0), lg(2, 0), lg(386622050, -9945722))
test(lg(-465211, -1), lg(-465211, -1), lg(-418132599, -160175963))
test(lg(63, 0), lg(63, 0), lg(-1330189832, 180061391))
test(lg(47, 0), lg(47, 0), lg(1439978282, -16520554))
test(lg(233450563, 0), lg(233450563, 0), lg(-328511972, 377539644))
test(lg(-134912, -1), lg(-134912, -1), lg(1349244684, -12612862))
test(lg(-95441, -1), lg(-95441, -1), lg(511120357, 16112596))
test(lg(-1160726496, -1), lg(-1160726496, -1), lg(-913371934, -9441145))
test(lg(-502, -1), lg(-502, -1), lg(-1021329523, -377728463))
test(lg(3313324, 0), lg(3313324, 0), lg(-67454848, 442297818))
test(lg(-145, -1), lg(-145, -1), lg(-1010112762, 29724438))
test(lg(-19091, -1), lg(-19091, -1), lg(-1944488998, -173788926))
test(lg(-3331910, -1), lg(-3331910, -1), lg(2144172121, 73505274))
test(lg(56622, 0), lg(56622, 0), lg(-1451372835, 5219178))
test(lg(0, 0), lg(0, 0), lg(556032035, 32471322))
test(lg(800, 0), lg(800, 0), lg(-1649243607, 2299368))
test(lg(86949, 0), lg(86949, 0), lg(794150820, -1384562176))
test(lg(10, 0), lg(10, 0), lg(-790693444, 1000869239))
test(lg(-333236, -1), lg(-333236, -1), lg(-1020207444, 125043716))
test(lg(-598, -1), lg(-598, -1), lg(-93061561, -329975227))
test(lg(-19, -1), lg(-19, -1), lg(-1096862531, 163621631))
test(lg(465328283, 0), lg(465328283, 0), lg(-21925149, -52057346))
test(lg(-25837, -1), lg(-25837, -1), lg(677002620, 8643698))
test(lg(-383633650, -1), lg(-383633650, -1), lg(1609519787, 8262009))
test(lg(-66, -1), lg(-66, -1), lg(1917139359, 239618524))
test(lg(1676620, 0), lg(1676620, 0), lg(910745834, 82765572))
// int53 / int32
test(lg(15827410, 0), lg(1244623439, 3), lg(-231372097, -1))
test(lg(15118, 0), lg(-1392787378, 124), lg(-20252, -1))
test(lg(11, 0), lg(578165055, 72), lg(13, 0))
test(lg(42298679, 0), lg(-1836745385, 3), lg(-95630157, -1))
test(lg(17447610, 0), lg(-1766124150, 29), lg(-45315780, -1))
test(lg(0, 0), lg(540281958, 253606), lg(-11, -1))
test(lg(51980, 0), lg(-442404110, 7696), lg(1489246, 0))
test(lg(2, 0), lg(-631827526, 1455), lg(8, 0))
test(lg(5125741, 0), lg(1266390909, 49), lg(-34627848, -1))
test(lg(77691, 0), lg(-453014259, 21413), lg(149449, 0))
test(lg(521867604, 0), lg(1573062436, 653), lg(671211684, 0))
test(lg(14579368, 0), lg(-21113520, 0), lg(177469767, 0))
test(lg(0, 0), lg(-262825676, 31), lg(1, 0))
test(lg(24027362, 0), lg(-163968426, 1), lg(33341027, 0))
test(lg(6792805, 0), lg(668741217, 14380), lg(-11334498, -1))
test(lg(9, 0), lg(808041281, 1818), lg(-10, -1))
test(lg(204, 0), lg(-1601247507, 25), lg(-235, -1))
test(lg(61089, 0), lg(-1577206289, 0), lg(1618642, 0))
test(lg(289305533, 0), lg(863396135, 503), lg(-321808286, -1))
test(lg(7272892, 0), lg(-900149281, 55), lg(15166197, 0))
test(lg(3, 0), lg(1802954050, 3593), lg(7, 0))
test(lg(12036, 0), lg(800669146, 41901), lg(-20591, -1))
test(lg(29, 0), lg(-1055636867, 39), lg(48, 0))
test(lg(0, 0), lg(-491067123, 14), lg(1, 0))
test(lg(260441364, 0), lg(1420289126, 67), lg(1010219079, 0))
test(lg(3936541, 0), lg(1338756461, 32), lg(-4427443, -1))
test(lg(183313645, 0), lg(-820843233, 778), lg(-273780418, -1))
test(lg(91783, 0), lg(-1033566360, 561225), lg(-156677, -1))
test(lg(5, 0), lg(-1567070603, 38), lg(-8, -1))
test(lg(11214823, 0), lg(-1649343541, 185302), lg(-19368267, -1))
test(lg(75719, 0), lg(-591434325, 76351), lg(94212, 0))
test(lg(10941, 0), lg(235794528, 55), lg(17599, 0))
test(lg(5331, 0), lg(-763589741, 116), lg(-14942, -1))
test(lg(1, 0), lg(-1283158225, 237055), lg(-2, -1))
test(lg(24400, 0), lg(1537105400, 29108), lg(-37848, -1))
test(lg(95, 0), lg(-56778611, 994650), lg(-170, -1))
test(lg(9836, 0), lg(-2057746932, 7), lg(-10100, -1))
test(lg(30255783, 0), lg(1365793356, 12), lg(-38454651, -1))
test(lg(417, 0), lg(-2128793438, 4), lg(6825, 0))
test(lg(0, 0), lg(1667515072, 8), lg(2, 0))
test(lg(257, 0), lg(420324337, 980), lg(-845, -1))
test(lg(82991, 0), lg(-771084081, 8204), lg(105392, 0))
test(lg(691256, 0), lg(-332377894, 1), lg(882238, 0))
test(lg(0, 0), lg(1749263284, 11), lg(-20, -1))
test(lg(4, 0), lg(347303218, 1234317), lg(-13, -1))
test(lg(150, 0), lg(1199079324, 17271), lg(11033, 0))
test(lg(14, 0), lg(1196217208, 13), lg(-23, -1))
test(lg(256216433, 0), lg(-1078128939, 0), lg(740155481, 0))
test(lg(45583, 0), lg(-1354463473, 3691), lg(-63588, -1))
test(lg(459, 0), lg(-1255896801, 1469630), lg(-502, -1))
// int53, int53
test(lg(1805177178, 1), lg(1805177178, 1), lg(-1293833696, 410))
test(lg(-583440651, 2), lg(647007072, 1811985), lg(1091239449, 3))
test(lg(1346307032, 1), lg(1346307032, 1), lg(-672335266, 33))
test(lg(858355422, 81), lg(858355422, 81), lg(1490435172, 162402))
test(lg(744276027, 1), lg(-1299053281, 6330), lg(1042770708, 1))
test(lg(29273105, 0), lg(-88774269, 25), lg(775537355, 1))
test(lg(383200445, 2), lg(-962613261, 4309), lg(-529185362, 5))
test(lg(-171009725, 445), lg(-171009725, 445), lg(-1167557775, 307982))
test(lg(8166883, 15498), lg(1848497503, 78519), lg(1533824479, 15755))
test(lg(-1752533311, 17), lg(-1752533311, 17), lg(1904799096, 73566))
test(lg(-1641266817, 46), lg(-1641266817, 46), lg(-31936789, 751199))
test(lg(-350685679, 656), lg(-637954451, 32352), lg(-10259599, 1131))
test(lg(-1671876486, 0), lg(-1657673170, 122149), lg(-534342412, 0))
test(lg(-660565679, 235), lg(-660565679, 235), lg(-897090894, 14655))
test(lg(-1798560222, 612), lg(-1798560222, 612), lg(-236039758, 2924))
test(lg(-28767936, 5704), lg(1010899296, 62798), lg(-1974205776, 9515))
test(lg(-2004786867, 4), lg(1206965517, 91420), lg(880030876, 7))
test(lg(712148070, 3), lg(712148070, 3), lg(472319826, 2838))
test(lg(-1275175525, 44), lg(-1275175525, 44), lg(162799342, 861329))
test(lg(1187224322, 14), lg(-516916094, 191396), lg(-1920802608, 30))
test(lg(-1461747946, 0), lg(-1627551726, 4499), lg(1200735793, 1))
test(lg(453535447, 39039), lg(453535447, 39039), lg(520791957, 141909))
test(lg(216221627, 20), lg(216221627, 20), lg(-781572865, 8131))
test(lg(1611884803, 23), lg(-1999221053, 528), lg(1107934896, 25))
test(lg(1722095012, 0), lg(-701225584, 44), lg(-1403297482, 0))
test(lg(-232837834, 5049), lg(-232837834, 5049), lg(1000581509, 15836))
test(lg(-82376749, 239), lg(-82376749, 239), lg(-163409376, 7688))
test(lg(2063025646, 2), lg(941363778, 110), lg(336092572, 3))
test(lg(721574845, 383), lg(1004884706, 1133), lg(283309861, 750))
test(lg(-2004547354, 47), lg(1436404594, 1595), lg(1522987410, 70))
test(lg(1696970595, 8), lg(1696970595, 8), lg(-1168832286, 4163))
test(lg(-2033329312, 6), lg(-1244970780, 32), lg(394179266, 13))
test(lg(1864629418, 1), lg(1864629418, 1), lg(528888491, 970677))
test(lg(1596298266, 43057), lg(-1763600443, 962032), lg(1535552275, 102108))
test(lg(1181714932, 5), lg(1181714932, 5), lg(1296434411, 26359))
test(lg(-2140209952, 7), lg(1535735456, 276446), lg(-1930593680, 7))
test(lg(-1703068243, 11), lg(2079501385, 97596), lg(-1803771626, 21))
test(lg(-1025858772, 33402), lg(286993796, 174379), lg(656426284, 70488))
test(lg(-578045904, 11724), lg(221015334, 1635766), lg(-2014306775, 270673))
test(lg(-2080784768, 56), lg(-2103734262, 977), lg(-22949494, 920))
test(lg(-922083739, 29), lg(-922083739, 29), lg(2040148267, 19160))
test(lg(-1728890579, 468), lg(-559850131, 11989), lg(1366001936, 2880))
test(lg(1341547600, 13), lg(-1071198220, 2182), lg(1526886260, 17))
test(lg(-896451936, 45), lg(-896451936, 45), lg(2132477227, 164356))
test(lg(-1538011120, 53), lg(-561327714, 1420), lg(-368698210, 151))
test(lg(1880884956, 621), lg(2112956103, 118429), lg(-374507565, 859))
test(lg(902909663, 0), lg(380445410, 8), lg(-1822479769, 1))
test(lg(-652149100, 56), lg(-1867274924, 105813), lg(175641312, 79))
test(lg(-991170416, 37), lg(-991170416, 37), lg(1740161397, 88122))
test(lg(-31602776, 1), lg(-31602776, 1), lg(-503633567, 241909))
// int53, big
test(lg(-930109303, 3), lg(-930109303, 3), lg(1606982787, 925386547))
test(lg(-717668907, 16251), lg(-717668907, 16251), lg(2079100937, 7825426))
test(lg(265990345, 3), lg(265990345, 3), lg(-1140922127, -3108870))
test(lg(-1181318422, 1), lg(-1181318422, 1), lg(1489652251, 75207246))
test(lg(380276439, 59), lg(380276439, 59), lg(-1062351234, -3631372))
test(lg(1080382784, 7211), lg(1080382784, 7211), lg(572850722, -139092025))
test(lg(2020323378, 316), lg(2020323378, 316), lg(1716930349, -16333391))
test(lg(1302118364, 5), lg(1302118364, 5), lg(-442067036, 1941456592))
test(lg(-641137972, 602), lg(-641137972, 602), lg(1134212295, -135713760))
test(lg(-761172703, 499), lg(-761172703, 499), lg(769981236, 12756336))
test(lg(1601268090, 610), lg(1601268090, 610), lg(448513898, -160887452))
test(lg(-16483553, 0), lg(-16483553, 0), lg(-1253549192, -1748027086))
test(lg(-1284021361, 241), lg(-1284021361, 241), lg(13275221, -3818882))
test(lg(1499414278, 26), lg(1499414278, 26), lg(570654893, -17498947))
test(lg(-368610421, 5074), lg(-368610421, 5074), lg(685701351, 31070898))
test(lg(1200134796, 70), lg(1200134796, 70), lg(1230376618, -2490370))
test(lg(1537764087, 64483), lg(1537764087, 64483), lg(-1252591472, 66761881))
test(lg(-1981129198, 15), lg(-1981129198, 15), lg(1937978150, 8201544))
test(lg(32422964, 200), lg(32422964, 200), lg(2051327691, -20319622))
test(lg(1404616230, 30), lg(1404616230, 30), lg(-748420073, -120320053))
test(lg(-1860381107, 38), lg(-1860381107, 38), lg(392948122, 60098039))
test(lg(1050519262, 106431), lg(1050519262, 106431), lg(361773491, -6329760))
test(lg(460136491, 1681770), lg(460136491, 1681770), lg(1399049044, 759923035))
test(lg(2065599344, 11089), lg(2065599344, 11089), lg(-465681057, 3484544))
test(lg(1849358428, 418531), lg(1849358428, 418531), lg(1023666326, 3435570))
test(lg(1292603836, 80), lg(1292603836, 80), lg(-1114872574, 250120091))
test(lg(1456627133, 194844), lg(1456627133, 194844), lg(-1256385160, 59427917))
test(lg(-568179858, 160), lg(-568179858, 160), lg(1142846538, 154324747))
test(lg(-2133580755, 203337), lg(-2133580755, 203337), lg(111334842, 12695612))
test(lg(1961218705, 6687), lg(1961218705, 6687), lg(-245612957, 134017780))
test(lg(335350966, 55096), lg(335350966, 55096), lg(-1815119598, -120983980))
test(lg(-767561503, 211), lg(-767561503, 211), lg(554589640, -7873602))
test(lg(1476687067, 3767), lg(1476687067, 3767), lg(552659809, -753378142))
test(lg(-1107393223, 30), lg(-1107393223, 30), lg(-78383575, -52663801))
test(lg(607313614, 2), lg(607313614, 2), lg(-234099925, 59184919))
test(lg(-1542671184, 616882), lg(-1542671184, 616882), lg(1370026838, -45628731))
test(lg(525616384, 1001), lg(525616384, 1001), lg(1995646126, -11226360))
test(lg(2109958916, 21549), lg(2109958916, 21549), lg(-419960245, -115959896))
test(lg(-450913111, 32140), lg(-450913111, 32140), lg(-99267096, -3640047))
test(lg(1515870052, 198), lg(1515870052, 198), lg(1415757861, -110282301))
test(lg(124639649, 865615), lg(124639649, 865615), lg(-1354782388, 2569606))
test(lg(557119825, 7205), lg(557119825, 7205), lg(683150209, -15864187))
test(lg(992846513, 1385110), lg(992846513, 1385110), lg(1578961851, -8380578))
test(lg(1081385155, 4176), lg(1081385155, 4176), lg(1892231070, 31130825))
test(lg(-738492748, 8), lg(-738492748, 8), lg(-431212066, 687916944))
test(lg(-1448153936, 8101), lg(-1448153936, 8101), lg(-584523654, -4814205))
test(lg(-713251055, 243), lg(-713251055, 243), lg(261411225, 31444708))
test(lg(881178812, 47057), lg(881178812, 47057), lg(823893049, -5940358))
test(lg(-506817388, 0), lg(-506817388, 0), lg(-465610822, 10559551))
test(lg(-420315839, 112832), lg(-420315839, 112832), lg(-686319219, -666166549))
// big, int32
test(lg(-3, -1), lg(-412174169, -319069709), lg(-6, -1))
test(lg(464005, 0), lg(1634601702, 814446468), lg(825883, 0))
test(lg(34559370, 0), lg(-1005992901, 2694218), lg(108493743, 0))
test(lg(-286379, -1), lg(1534700309, -630528658), lg(-506616, -1))
test(lg(-62, -1), lg(-456613426, -23298167), lg(-206, -1))
test(lg(386945695, 0), lg(857770611, 2618490), lg(1225551197, 0))
test(lg(270232, 0), lg(2127943654, 2768088), lg(-291653, -1))
test(lg(277129, 0), lg(1085973072, 3470797), lg(-29714535, -1))
test(lg(15, 0), lg(1536124828, 1268901218), lg(-121, -1))
test(lg(1, 0), lg(371220141, 34588968), lg(2, 0))
test(lg(46669, 0), lg(-1712997009, 187259899), lg(129274, 0))
test(lg(-1508, -1), lg(586579000, -243530833), lg(-31235, -1))
test(lg(0, 0), lg(1745775262, -400161972), lg(-1, -1))
test(lg(-1680, -1), lg(-1564631310, -56487209), lg(2626, 0))
test(lg(53, 0), lg(-1848745069, 11533547), lg(59, 0))
test(lg(-1699972, -1), lg(-1415791920, -26215621), lg(-2142359, -1))
test(lg(-200041, -1), lg(-481609933, -25891343), lg(483607, 0))
test(lg(-13123232, -1), lg(-889674017, -4084771), lg(428648085, 0))
test(lg(0, 0), lg(1587465684, -367383975), lg(7, 0))
test(lg(-4528, -1), lg(811562260, -335104547), lg(5502, 0))
test(lg(-71, -1), lg(2107357891, -10075787), lg(110, 0))
test(lg(0, 0), lg(-1356326655, 5174156), lg(-1, -1))
test(lg(7872112, 0), lg(-1794856776, 3059124), lg(-29413816, -1))
test(lg(-37, -1), lg(-1118254374, -3629384), lg(-85, -1))
test(lg(14227, 0), lg(288539563, 70814306), lg(-14561, -1))
test(lg(-49, -1), lg(-719069745, -128562664), lg(-256, -1))
test(lg(6101, 0), lg(1530955727, 15829469), lg(195494, 0))
test(lg(-6, -1), lg(2144004402, -5408490), lg(11, 0))
test(lg(-137624717, -1), lg(-1766192560, -17443468), lg(-168087095, -1))
test(lg(-3592, -1), lg(-524619138, -371121095), lg(4765, 0))
test(lg(4335, 0), lg(-1960083221, 176122524), lg(-5564, -1))
test(lg(-271754, -1), lg(1528631102, -597885631), lg(-413908, -1))
test(lg(-361112, -1), lg(-1513123614, -30582360), lg(-496311, -1))
test(lg(-4, -1), lg(-1975522255, -46421733), lg(29, 0))
test(lg(414436, 0), lg(-1715879325, 3072313), lg(438221, 0))
test(lg(0, 0), lg(-1321015849, -300384564), lg(1, 0))
test(lg(-454, -1), lg(-1088390706, -277354665), lg(-1237, -1))
test(lg(586891857, 0), lg(-1012773943, 223943652), lg(707359548, 0))
test(lg(2, 0), lg(1097288344, 26740237), lg(-3, -1))
test(lg(-24053960, -1), lg(-1121404205, -87484234), lg(80229261, 0))
test(lg(-79944815, -1), lg(-1503637931, -163703901), lg(-983334452, -1))
test(lg(2600110, 0), lg(2012820970, 445991475), lg(1035472980, 0))
test(lg(74, 0), lg(2015362538, 2985510), lg(-148, -1))
test(lg(0, 0), lg(1764134228, 50881407), lg(-1, -1))
test(lg(106, 0), lg(-523555853, 77167937), lg(-563, -1))
test(lg(0, 0), lg(1531888651, -2389306), lg(1, 0))
test(lg(659, 0), lg(-181277952, 32599207), lg(-729, -1))
test(lg(968, 0), lg(223126732, 88838488), lg(13378, 0))
test(lg(920991, 0), lg(670834629, 46037187), lg(922370, 0))
test(lg(2462152, 0), lg(1098978850, 6541822), lg(-8405198, -1))
// big, int53
test(lg(1057995305, 4748), lg(2008672965, 41566313), lg(313991275, 18390))
test(lg(-1074209653, 18), lg(1922552561, 28139870), lg(-2083633557, 19))
test(lg(1480601143, -11310), lg(843627074, -173776705), lg(1451117493, 14364))
test(lg(-691687452, -38), lg(204865470, -6692402), lg(-645190286, 413))
test(lg(-1218791457, -31), lg(952830559, -214594684), lg(-1778162360, 378))
test(lg(-281609960, -1292), lg(1673740333, -69274846), lg(-1549261605, 2390))
test(lg(-860426348, 1), lg(-1276804811, 367022678), lg(-678111623, 11))
test(lg(-1244563205, -1264), lg(-1331527548, -33013551), lg(-1975438267, 2961))
test(lg(-935830326, 135167), lg(1067523314, 72606174), lg(-1716982106, 255179))
test(lg(-2025081444, -42140), lg(-937134490, -32649070), lg(-804857990, 57507))
test(lg(85696931, 194), lg(108363299, 1224097478), lg(1137551776, 281))
test(lg(-385517902, -5258), lg(-1965834834, -11053948), lg(-942300324, 6487))
test(lg(-755355475, 2268), lg(-3151939, 171473802), lg(-2071379940, 3914))
test(lg(-676865399, -663), lg(1465781759, -970108425), lg(-1251607207, 3003))
test(lg(2042443783, -22321), lg(919308511, -1689158617), lg(658566728, 36406))
test(lg(-903837593, 31415), lg(-418485001, 1000432592), lg(-1653953022, 31957))
test(lg(496274972, -48207), lg(-880302655, -14116770), lg(913871933, 118223))
test(lg(1210119082, -104892), lg(-525597278, -3790314), lg(2133284776, 127083))
test(lg(473810731, -5), lg(-393124913, -28106221), lg(958070140, 159))
test(lg(-1912903061, 25777), lg(6929245, 2749730), lg(1462129294, 43237))
test(lg(1099532724, -19), lg(708024745, -15568245), lg(1288198049, 56))
test(lg(920504149, 6836), lg(487601139, 13603229), lg(723875593, 45021))
test(lg(1778080723, 29), lg(-2070321133, 115478389), lg(-1799479616, 75))
test(lg(-720480381, 2735), lg(-307180735, 3049800), lg(1043781053, 3319))
test(lg(1473972065, -1), lg(-1073877839, -6538577), lg(-1408649838, 0))
test(lg(-1389255096, -200), lg(-1892822171, -1698321438), lg(96164237, 514))
test(lg(857386403, 29656), lg(-674980011, 2764943), lg(-445529419, 65125))
test(lg(-419043446, -22164), lg(2003347800, -46928389), lg(368897711, 128159))
test(lg(-1599543668, -6569), lg(-1929871429, -241628283), lg(202358381, 7645))
test(lg(581185953, 1), lg(419719197, 661188517), lg(2112360098, 1))
test(lg(-1880704128, 171407), lg(1092830824, 1600823129), lg(-1827462760, 172800))
test(lg(1210159480, -13), lg(-836779994, -27475595), lg(-417527207, 16))
test(lg(807846066, 1), lg(-1759597755, 9157722), lg(-987185779, 1))
test(lg(949995673, 1), lg(-1097231525, 20092165), lg(1106421078, 1))
test(lg(-712450167, 7), lg(390678483, 3835040), lg(1221250555, 14))
test(lg(1129531033, -4), lg(-284334384, -18425278), lg(-1111448031, 6))
test(lg(2094997010, 3022), lg(-233961390, 53260849), lg(-613558136, 3663))
test(lg(-496446555, 540290), lg(-3383211, 8039036), lg(-1668680584, 749874))
test(lg(1280740603, -9472), lg(804358887, -189240235), lg(179665302, 12347))
test(lg(2127427912, 6), lg(208769744, 280071599), lg(-325433064, 14))
test(lg(-722136158, -1), lg(-1527711901, -51564742), lg(-1019145455, 0))
test(lg(-1603688570, -2), lg(-159182038, -2145592347), lg(-483720705, 15))
test(lg(-256578646, 177817), lg(1059926378, 477886379), lg(924988992, 543468))
test(lg(1286157765, 80885), lg(-1800046387, 119696078), lg(436524799, 94037))
test(lg(251450065, 19154), lg(-822280387, 44882065), lg(-940828508, 22947))
test(lg(1310986115, 209), lg(1465101985, 269803551), lg(-1953360551, 334))
test(lg(1436855439, -5), lg(-567675197, -8838663), lg(1903221047, 6))
test(lg(296887390, -17), lg(689376065, -22622471), lg(1534988921, 63))
test(lg(1577958450, -39), lg(-2017356377, -57717216), lg(-1390284125, 42))
test(lg(661387374, 344542), lg(-128715878, 982583003), lg(2004099318, 988167))
// big, big
test(lg(-320078007, 205603273), lg(-320078007, 205603273), lg(2020227799, -360928021))
test(lg(408769930, -2221999), lg(-800732960, -371808530), lg(744251542, -11199592))
test(lg(1575977183, -2441606), lg(-56774921, -32434115), lg(1413374280, -2726592))
test(lg(-1897285736, 18894093), lg(1667937500, 228622683), lg(-243248020, 69909529))
test(lg(-1333815518, 2097776), lg(-1333815518, 2097776), lg(-1750106076, 18608702))
test(lg(-789967161, -4640836), lg(-162800691, -117885498), lg(-709007774, 8711127))
test(lg(-1909427145, -2824029), lg(-1909427145, -2824029), lg(2028036056, -660713154))
test(lg(14077923, 63046905), lg(14077923, 63046905), lg(-688765214, 375445962))
test(lg(272760540, 19525127), lg(272760540, 19525127), lg(-396955631, 848435537))
test(lg(-600396362, 406643261), lg(-600396362, 406643261), lg(-1533973181, 491661310))
test(lg(1801834226, 200420454), lg(1801834226, 200420454), lg(-1889418050, -328758068))
test(lg(361053022, 54544094), lg(1170836790, 510289402), lg(202445942, 113936327))
test(lg(1369752396, -3152427), lg(-378923036, -1036580478), lg(905093048, 5526353))
test(lg(1458911735, 21273958), lg(-2137034353, 1455139814), lg(1665353214, 27574343))
test(lg(-1350216191, -3821167), lg(-1350216191, -3821167), lg(-1333339390, -4746360))
test(lg(1166542449, -1370750), lg(-1289646201, -5193401), lg(1838778646, -3822651))
test(lg(301867174, 5185218), lg(301867174, 5185218), lg(157012848, -15464466))
test(lg(512572633, 48335882), lg(467711834, 155069651), lg(-44860799, 106733768))
test(lg(1624269582, 11007763), lg(1624269582, 11007763), lg(-158694824, -491219717))
test(lg(-1015519521, -163989350), lg(-1015519521, -163989350), lg(1652525166, 530116116))
test(lg(-2127450406, -89864400), lg(2001612518, -452587333), lg(1115217917, 90680733))
test(lg(-761803769, -6085789), lg(1039524645, -86121932), lg(1131434363, 13339357))
test(lg(-1922291990, 6439098), lg(-1922291990, 6439098), lg(-1083372307, -20634200))
test(lg(1508171882, 126457), lg(1408756974, 235847122), lg(-1813277898, -9066180))
test(lg(-496706473, -2657930), lg(1121009342, -1533788016), lg(-1724900447, -5821788))
test(lg(-1626361260, -113469353), lg(-1626361260, -113469353), lg(1216987736, -817139415))
test(lg(-433139577, -182483493), lg(-433139577, -182483493), lg(1019490766, -595625160))
test(lg(-1118452074, 1653764), lg(793542905, 198273616), lg(-82759497, -2621599))
test(lg(-1199275184, 1262327), lg(425605214, 249789222), lg(392156278, 6716943))
test(lg(213473729, 11660532), lg(213473729, 11660532), lg(-547058106, 894811834))
test(lg(-1550227391, 2847368), lg(-1550227391, 2847368), lg(-1996700003, 689370771))
test(lg(-1014778289, -3747071), lg(-144234222, -54239417), lg(-1102770075, -7213193))
test(lg(524484467, 15124083), lg(524484467, 15124083), lg(-1101379967, -39968226))
test(lg(-919997306, 2085072), lg(314758022, 5390195), lg(-1234755328, -3305123))
test(lg(580679232, -10426812), lg(580679232, -10426812), lg(-1964013803, -1738507605))
test(lg(225658926, -4189255), lg(1670083752, -254253193), lg(722212413, -125031969))
test(lg(-495749254, -1833207), lg(-1744001445, -5443198), lg(1248252191, 3609991))
test(lg(-1481543825, 608612), lg(-1786439869, 137339199), lg(1821158508, 2909161))
test(lg(1026706952, -6267613), lg(1273422584, -284542935), lg(1626032463, -17392208))
test(lg(-855876173, -4928311), lg(-513801887, -32580141), lg(-342074286, 27651829))
test(lg(-1027906958, 55543678), lg(-1027906958, 55543678), lg(-1936394792, 928937151))
test(lg(-1793811005, -17787029), lg(251585986, -50474191), lg(-2045396991, 32687162))
test(lg(-356034186, -2235041), lg(66679938, -917589429), lg(2124767660, -3454168))
test(lg(-924611099, -76507846), lg(-599564184, -209788131), lg(-325046915, 133280284))
test(lg(838338995, -12983151), lg(838338995, -12983151), lg(-842402530, 19411056))
test(lg(747658762, 18528439), lg(1444498155, 520850879), lg(851271837, 23920116))
test(lg(-2028924578, -3124146), lg(2096765386, -117024114), lg(-1726450785, -5694999))
test(lg(2056903464, -4954201), lg(-425905039, -180148939), lg(-1397064581, -15926795))
test(lg(-2055992988, 596420), lg(-920215872, 219325473), lg(1357686103, 54682263))
test(lg(1279110660, -10784541), lg(1279110660, -10784541), lg(278869448, 758126792))
}
@Test def moduloByZero(): Unit = {
@noinline def modNoInline(x: Long, y: Long): Long = x % y
@inline def modInline(x: Long, y: Long): Long = x % y
@inline def test(x: Long): Unit = {
assertThrows(classOf[ArithmeticException], x % 0L)
assertThrows(classOf[ArithmeticException], modNoInline(x, 0L))
assertThrows(classOf[ArithmeticException], modInline(x, 0L))
}
test(0L)
test(1L)
test(43L)
test(-3L)
// Eligible for constant-folded by scalac itself
assertThrows(classOf[ArithmeticException], 5L % 0L)
}
}
object LongTest {
case class HashTestBox(long: Long)
}
| nicolasstucki/scala-js | test-suite/shared/src/test/scala/org/scalajs/testsuite/compiler/LongTest.scala | Scala | apache-2.0 | 145,155 |
package com.tvunetworks.test
/**
* @author RichardYao
* @date 2017?4?22?
*/
object UseObject {
def main(args: Array[String]) {
println(ExtractorTest.unapply("richardyao"))
val obj = UseObject(5)
println(obj)
obj match {
/**
* 当在提取器对象中使用match语句时,unapply将自动执行
*/
case UseObject(num) => println(obj+" is " + num + " twice more")
case _ => println("Cannot count")
}
}
def apply(x: Int) = x*2
def unapply(z: Int): Option[Int] = if (z%2 == 0) Some(z/2) else None
} | richard-yao/ScalaTest | ScalaTest/src/main/scala/com/tvunetworks/test/UseObject.scala | Scala | apache-2.0 | 576 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util.collection
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.duration._
import scala.ref.WeakReference
import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.must.Matchers
import org.apache.spark._
import org.apache.spark.internal.config._
import org.apache.spark.internal.config.Tests.TEST_MEMORY
import org.apache.spark.io.CompressionCodec
import org.apache.spark.memory.MemoryTestingUtils
import org.apache.spark.util.CompletionIterator
class ExternalAppendOnlyMapSuite extends SparkFunSuite
with LocalSparkContext
with Eventually
with Matchers{
import TestUtils.{assertNotSpilled, assertSpilled}
private val allCompressionCodecs = CompressionCodec.ALL_COMPRESSION_CODECS
private def createCombiner[T](i: T) = ArrayBuffer[T](i)
private def mergeValue[T](buffer: ArrayBuffer[T], i: T): ArrayBuffer[T] = buffer += i
private def mergeCombiners[T](buf1: ArrayBuffer[T], buf2: ArrayBuffer[T]): ArrayBuffer[T] =
buf1 ++= buf2
private def createExternalMap[T] = {
val context = MemoryTestingUtils.fakeTaskContext(sc.env)
new ExternalAppendOnlyMap[T, T, ArrayBuffer[T]](
createCombiner[T], mergeValue[T], mergeCombiners[T], context = context)
}
private def createSparkConf(loadDefaults: Boolean, codec: Option[String] = None): SparkConf = {
val conf = new SparkConf(loadDefaults)
// Make the Java serializer write a reset instruction (TC_RESET) after each object to test
// for a bug we had with bytes written past the last object in a batch (SPARK-2792)
conf.set(SERIALIZER_OBJECT_STREAM_RESET, 1)
conf.set(SERIALIZER, "org.apache.spark.serializer.JavaSerializer")
conf.set(SHUFFLE_SPILL_COMPRESS, codec.isDefined)
conf.set(SHUFFLE_COMPRESS, codec.isDefined)
codec.foreach { c => conf.set(IO_COMPRESSION_CODEC, c) }
// Ensure that we actually have multiple batches per spill file
conf.set(SHUFFLE_SPILL_BATCH_SIZE, 10L)
conf
}
test("single insert") {
val conf = createSparkConf(loadDefaults = false)
sc = new SparkContext("local", "test", conf)
val map = createExternalMap[Int]
map.insert(1, 10)
val it = map.iterator
assert(it.hasNext)
val kv = it.next()
assert(kv._1 === 1 && kv._2 === ArrayBuffer[Int](10))
assert(!it.hasNext)
sc.stop()
}
test("multiple insert") {
val conf = createSparkConf(loadDefaults = false)
sc = new SparkContext("local", "test", conf)
val map = createExternalMap[Int]
map.insert(1, 10)
map.insert(2, 20)
map.insert(3, 30)
val it = map.iterator
assert(it.hasNext)
assert(it.toSet === Set[(Int, ArrayBuffer[Int])](
(1, ArrayBuffer[Int](10)),
(2, ArrayBuffer[Int](20)),
(3, ArrayBuffer[Int](30))))
sc.stop()
}
test("insert with collision") {
val conf = createSparkConf(loadDefaults = false)
sc = new SparkContext("local", "test", conf)
val map = createExternalMap[Int]
map.insertAll(Seq(
(1, 10),
(2, 20),
(3, 30),
(1, 100),
(2, 200),
(1, 1000)))
val it = map.iterator
assert(it.hasNext)
val result = it.toSet[(Int, ArrayBuffer[Int])].map(kv => (kv._1, kv._2.toSet))
assert(result === Set[(Int, Set[Int])](
(1, Set[Int](10, 100, 1000)),
(2, Set[Int](20, 200)),
(3, Set[Int](30))))
sc.stop()
}
test("ordering") {
val conf = createSparkConf(loadDefaults = false)
sc = new SparkContext("local", "test", conf)
val map1 = createExternalMap[Int]
map1.insert(1, 10)
map1.insert(2, 20)
map1.insert(3, 30)
val map2 = createExternalMap[Int]
map2.insert(2, 20)
map2.insert(3, 30)
map2.insert(1, 10)
val map3 = createExternalMap[Int]
map3.insert(3, 30)
map3.insert(1, 10)
map3.insert(2, 20)
val it1 = map1.iterator
val it2 = map2.iterator
val it3 = map3.iterator
var kv1 = it1.next()
var kv2 = it2.next()
var kv3 = it3.next()
assert(kv1._1 === kv2._1 && kv2._1 === kv3._1)
assert(kv1._2 === kv2._2 && kv2._2 === kv3._2)
kv1 = it1.next()
kv2 = it2.next()
kv3 = it3.next()
assert(kv1._1 === kv2._1 && kv2._1 === kv3._1)
assert(kv1._2 === kv2._2 && kv2._2 === kv3._2)
kv1 = it1.next()
kv2 = it2.next()
kv3 = it3.next()
assert(kv1._1 === kv2._1 && kv2._1 === kv3._1)
assert(kv1._2 === kv2._2 && kv2._2 === kv3._2)
sc.stop()
}
test("null keys and values") {
val conf = createSparkConf(loadDefaults = false)
sc = new SparkContext("local", "test", conf)
val map = createExternalMap[Int]
val nullInt = null.asInstanceOf[Int]
map.insert(1, 5)
map.insert(2, 6)
map.insert(3, 7)
map.insert(4, nullInt)
map.insert(nullInt, 8)
map.insert(nullInt, nullInt)
val result = map.iterator.toSet[(Int, ArrayBuffer[Int])].map(kv => (kv._1, kv._2.sorted))
assert(result === Set[(Int, Seq[Int])](
(1, Seq[Int](5)),
(2, Seq[Int](6)),
(3, Seq[Int](7)),
(4, Seq[Int](nullInt)),
(nullInt, Seq[Int](nullInt, 8))
))
sc.stop()
}
test("simple aggregator") {
val conf = createSparkConf(loadDefaults = false)
sc = new SparkContext("local", "test", conf)
// reduceByKey
val rdd = sc.parallelize(1 to 10).map(i => (i%2, 1))
val result1 = rdd.reduceByKey(_ + _).collect()
assert(result1.toSet === Set[(Int, Int)]((0, 5), (1, 5)))
// groupByKey
val result2 = rdd.groupByKey().collect().map(x => (x._1, x._2.toList)).toSet
assert(result2.toSet === Set[(Int, Seq[Int])]
((0, List[Int](1, 1, 1, 1, 1)), (1, List[Int](1, 1, 1, 1, 1))))
sc.stop()
}
test("simple cogroup") {
val conf = createSparkConf(loadDefaults = false)
sc = new SparkContext("local", "test", conf)
val rdd1 = sc.parallelize(1 to 4).map(i => (i, i))
val rdd2 = sc.parallelize(1 to 4).map(i => (i%2, i))
val result = rdd1.cogroup(rdd2).collect()
result.foreach { case (i, (seq1, seq2)) =>
i match {
case 0 => assert(seq1.toSet === Set[Int]() && seq2.toSet === Set[Int](2, 4))
case 1 => assert(seq1.toSet === Set[Int](1) && seq2.toSet === Set[Int](1, 3))
case 2 => assert(seq1.toSet === Set[Int](2) && seq2.toSet === Set[Int]())
case 3 => assert(seq1.toSet === Set[Int](3) && seq2.toSet === Set[Int]())
case 4 => assert(seq1.toSet === Set[Int](4) && seq2.toSet === Set[Int]())
}
}
sc.stop()
}
test("spilling") {
testSimpleSpilling()
}
private def testSimpleSpillingForAllCodecs(encrypt: Boolean) {
// Keep track of which compression codec we're using to report in test failure messages
var lastCompressionCodec: Option[String] = None
try {
allCompressionCodecs.foreach { c =>
lastCompressionCodec = Some(c)
testSimpleSpilling(Some(c), encrypt)
}
} catch {
// Include compression codec used in test failure message
// We need to catch Throwable here because assertion failures are not covered by Exceptions
case t: Throwable =>
val compressionMessage = lastCompressionCodec
.map { c => "with compression using codec " + c }
.getOrElse("without compression")
val newException = new Exception(s"Test failed $compressionMessage:\n\n${t.getMessage}")
newException.setStackTrace(t.getStackTrace)
throw newException
}
}
test("spilling with compression") {
testSimpleSpillingForAllCodecs(encrypt = false)
}
test("spilling with compression and encryption") {
testSimpleSpillingForAllCodecs(encrypt = true)
}
/**
* Test spilling through simple aggregations and cogroups.
* If a compression codec is provided, use it. Otherwise, do not compress spills.
*/
private def testSimpleSpilling(codec: Option[String] = None, encrypt: Boolean = false): Unit = {
val size = 1000
val conf = createSparkConf(loadDefaults = true, codec) // Load defaults for Spark home
conf.set(SHUFFLE_SPILL_NUM_ELEMENTS_FORCE_SPILL_THRESHOLD, size / 4)
conf.set(IO_ENCRYPTION_ENABLED, encrypt)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
assertSpilled(sc, "reduceByKey") {
val result = sc.parallelize(0 until size)
.map { i => (i / 2, i) }.reduceByKey(math.max).collect()
assert(result.length === size / 2)
result.foreach { case (k, v) =>
val expected = k * 2 + 1
assert(v === expected, s"Value for $k was wrong: expected $expected, got $v")
}
}
assertSpilled(sc, "groupByKey") {
val result = sc.parallelize(0 until size).map { i => (i / 2, i) }.groupByKey().collect()
assert(result.length == size / 2)
result.foreach { case (i, seq) =>
val actual = seq.toSet
val expected = Set(i * 2, i * 2 + 1)
assert(actual === expected, s"Value for $i was wrong: expected $expected, got $actual")
}
}
assertSpilled(sc, "cogroup") {
val rdd1 = sc.parallelize(0 until size).map { i => (i / 2, i) }
val rdd2 = sc.parallelize(0 until size).map { i => (i / 2, i) }
val result = rdd1.cogroup(rdd2).collect()
assert(result.length === size / 2)
result.foreach { case (i, (seq1, seq2)) =>
val actual1 = seq1.toSet
val actual2 = seq2.toSet
val expected = Set(i * 2, i * 2 + 1)
assert(actual1 === expected, s"Value 1 for $i was wrong: expected $expected, got $actual1")
assert(actual2 === expected, s"Value 2 for $i was wrong: expected $expected, got $actual2")
}
}
sc.stop()
}
test("ExternalAppendOnlyMap shouldn't fail when forced to spill before calling its iterator") {
val size = 1000
val conf = createSparkConf(loadDefaults = true)
conf.set(SHUFFLE_SPILL_NUM_ELEMENTS_FORCE_SPILL_THRESHOLD, size / 2)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
val map = createExternalMap[String]
val consumer = createExternalMap[String]
map.insertAll((1 to size).iterator.map(_.toString).map(i => (i, i)))
assert(map.spill(10000, consumer) == 0L)
}
test("spilling with hash collisions") {
val size = 1000
val conf = createSparkConf(loadDefaults = true)
conf.set(SHUFFLE_SPILL_NUM_ELEMENTS_FORCE_SPILL_THRESHOLD, size / 2)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
val map = createExternalMap[String]
val collisionPairs = Seq(
("Aa", "BB"), // 2112
("to", "v1"), // 3707
("variants", "gelato"), // -1249574770
("Teheran", "Siblings"), // 231609873
("misused", "horsemints"), // 1069518484
("isohel", "epistolaries"), // -1179291542
("righto", "buzzards"), // -931102253
("hierarch", "crinolines"), // -1732884796
("inwork", "hypercatalexes"), // -1183663690
("wainages", "presentencing"), // 240183619
("trichothecenes", "locular"), // 339006536
("pomatoes", "eructation") // 568647356
)
collisionPairs.foreach { case (w1, w2) =>
// String.hashCode is documented to use a specific algorithm, but check just in case
assert(w1.hashCode === w2.hashCode)
}
map.insertAll((1 to size).iterator.map(_.toString).map(i => (i, i)))
collisionPairs.foreach { case (w1, w2) =>
map.insert(w1, w2)
map.insert(w2, w1)
}
assert(map.numSpills > 0, "map did not spill")
// A map of collision pairs in both directions
val collisionPairsMap = (collisionPairs ++ collisionPairs.map(_.swap)).toMap
// Avoid map.size or map.iterator.length because this destructively sorts the underlying map
var count = 0
val it = map.iterator
while (it.hasNext) {
val kv = it.next()
val expectedValue = ArrayBuffer[String](collisionPairsMap.getOrElse(kv._1, kv._1))
assert(kv._2.equals(expectedValue))
count += 1
}
assert(count === size + collisionPairs.size * 2)
sc.stop()
}
test("spilling with many hash collisions") {
val size = 1000
val conf = createSparkConf(loadDefaults = true)
conf.set(SHUFFLE_SPILL_NUM_ELEMENTS_FORCE_SPILL_THRESHOLD, size / 2)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
val context = MemoryTestingUtils.fakeTaskContext(sc.env)
val map =
new ExternalAppendOnlyMap[FixedHashObject, Int, Int](_ => 1, _ + _, _ + _, context = context)
// Insert 10 copies each of lots of objects whose hash codes are either 0 or 1. This causes
// problems if the map fails to group together the objects with the same code (SPARK-2043).
for (i <- 1 to 10) {
for (j <- 1 to size) {
map.insert(FixedHashObject(j, j % 2), 1)
}
}
assert(map.numSpills > 0, "map did not spill")
val it = map.iterator
var count = 0
while (it.hasNext) {
val kv = it.next()
assert(kv._2 === 10)
count += 1
}
assert(count === size)
sc.stop()
}
test("spilling with hash collisions using the Int.MaxValue key") {
val size = 1000
val conf = createSparkConf(loadDefaults = true)
conf.set(SHUFFLE_SPILL_NUM_ELEMENTS_FORCE_SPILL_THRESHOLD, size / 2)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
val map = createExternalMap[Int]
(1 to size).foreach { i => map.insert(i, i) }
map.insert(Int.MaxValue, Int.MaxValue)
assert(map.numSpills > 0, "map did not spill")
val it = map.iterator
while (it.hasNext) {
// Should not throw NoSuchElementException
it.next()
}
sc.stop()
}
test("spilling with null keys and values") {
val size = 1000
val conf = createSparkConf(loadDefaults = true)
conf.set(SHUFFLE_SPILL_NUM_ELEMENTS_FORCE_SPILL_THRESHOLD, size / 2)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
val map = createExternalMap[Int]
map.insertAll((1 to size).iterator.map(i => (i, i)))
map.insert(null.asInstanceOf[Int], 1)
map.insert(1, null.asInstanceOf[Int])
map.insert(null.asInstanceOf[Int], null.asInstanceOf[Int])
assert(map.numSpills > 0, "map did not spill")
val it = map.iterator
while (it.hasNext) {
// Should not throw NullPointerException
it.next()
}
sc.stop()
}
test("SPARK-22713 spill during iteration leaks internal map") {
val size = 1000
val conf = createSparkConf(loadDefaults = true)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
val map = createExternalMap[Int]
map.insertAll((0 until size).iterator.map(i => (i / 10, i)))
assert(map.numSpills == 0, "map was not supposed to spill")
val it = map.iterator
assert(it.isInstanceOf[CompletionIterator[_, _]])
// org.apache.spark.util.collection.AppendOnlyMap.destructiveSortedIterator returns
// an instance of an anonymous Iterator class.
val underlyingMapRef = WeakReference(map.currentMap)
{
// direct asserts introduced some macro generated code that held a reference to the map
val tmpIsNull = null == underlyingMapRef.get.orNull
assert(!tmpIsNull)
}
val first50Keys = for ( _ <- 0 until 50) yield {
val (k, vs) = it.next
val sortedVs = vs.sorted
assert(sortedVs.seq == (0 until 10).map(10 * k + _))
k
}
assert(map.numSpills == 0)
map.spill(Long.MaxValue, null)
// these asserts try to show that we're no longer holding references to the underlying map.
// it'd be nice to use something like
// https://github.com/scala/scala/blob/2.13.x/test/junit/scala/tools/testing/AssertUtil.scala
// (lines 69-89)
// assert(map.currentMap == null)
eventually(timeout(5.seconds), interval(200.milliseconds)) {
System.gc()
// direct asserts introduced some macro generated code that held a reference to the map
val tmpIsNull = null == underlyingMapRef.get.orNull
assert(tmpIsNull)
}
val next50Keys = for ( _ <- 0 until 50) yield {
val (k, vs) = it.next
val sortedVs = vs.sorted
assert(sortedVs.seq == (0 until 10).map(10 * k + _))
k
}
assert(!it.hasNext)
val keys = (first50Keys ++ next50Keys).sorted
assert(keys == (0 until 100))
}
test("drop all references to the underlying map once the iterator is exhausted") {
val size = 1000
val conf = createSparkConf(loadDefaults = true)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
val map = createExternalMap[Int]
map.insertAll((0 until size).iterator.map(i => (i / 10, i)))
assert(map.numSpills == 0, "map was not supposed to spill")
val underlyingMapRef = WeakReference(map.currentMap)
{
// direct asserts introduced some macro generated code that held a reference to the map
val tmpIsNull = null == underlyingMapRef.get.orNull
assert(!tmpIsNull)
}
val it = map.iterator
assert( it.isInstanceOf[CompletionIterator[_, _]])
val keys = it.map{
case (k, vs) =>
val sortedVs = vs.sorted
assert(sortedVs.seq == (0 until 10).map(10 * k + _))
k
}
.toList
.sorted
assert(it.isEmpty)
assert(keys == (0 until 100).toList)
assert(map.numSpills == 0)
// these asserts try to show that we're no longer holding references to the underlying map.
// it'd be nice to use something like
// https://github.com/scala/scala/blob/2.13.x/test/junit/scala/tools/testing/AssertUtil.scala
// (lines 69-89)
assert(map.currentMap == null)
eventually {
Thread.sleep(500)
System.gc()
// direct asserts introduced some macro generated code that held a reference to the map
val tmpIsNull = null == underlyingMapRef.get.orNull
assert(tmpIsNull)
}
assert(it.toList.isEmpty)
}
test("SPARK-22713 external aggregation updates peak execution memory") {
val spillThreshold = 1000
val conf = createSparkConf(loadDefaults = false)
.set(SHUFFLE_SPILL_NUM_ELEMENTS_FORCE_SPILL_THRESHOLD, spillThreshold)
sc = new SparkContext("local", "test", conf)
// No spilling
AccumulatorSuite.verifyPeakExecutionMemorySet(sc, "external map without spilling") {
assertNotSpilled(sc, "verify peak memory") {
sc.parallelize(1 to spillThreshold / 2, 2).map { i => (i, i) }.reduceByKey(_ + _).count()
}
}
// With spilling
AccumulatorSuite.verifyPeakExecutionMemorySet(sc, "external map with spilling") {
assertSpilled(sc, "verify peak memory") {
sc.parallelize(1 to spillThreshold * 3, 2).map { i => (i, i) }.reduceByKey(_ + _).count()
}
}
}
test("force to spill for external aggregation") {
val conf = createSparkConf(loadDefaults = false)
.set(MEMORY_STORAGE_FRACTION, 0.999)
.set(TEST_MEMORY, 471859200L)
.set(SHUFFLE_SORT_BYPASS_MERGE_THRESHOLD, 0)
sc = new SparkContext("local", "test", conf)
val N = 200000
sc.parallelize(1 to N, 2)
.map { i => (i, i) }
.groupByKey()
.reduceByKey(_ ++ _)
.count()
}
}
| witgo/spark | core/src/test/scala/org/apache/spark/util/collection/ExternalAppendOnlyMapSuite.scala | Scala | apache-2.0 | 19,891 |
package com.twitter.util.reflect
import com.twitter.util.Memoize
import java.lang.reflect.{ParameterizedType, TypeVariable, Type => JavaType}
import scala.reflect.api.TypeCreator
import scala.reflect.runtime.universe._
object Types {
private[this] val PRODUCT: Type = typeOf[Product]
private[this] val OPTION: Type = typeOf[Option[_]]
private[this] val LIST: Type = typeOf[List[_]]
/**
* Returns `true` if the given class type is considered a case class.
* True if:
* - is assignable from PRODUCT and
* - not assignable from OPTION nor LIST and
* - is not a Tuple and
* - class symbol is case class.
*/
val isCaseClass: Class[_] => Boolean = Memoize { clazz: Class[_] =>
val tpe = asTypeTag(clazz).tpe
val classSymbol = tpe.typeSymbol.asClass
tpe <:< PRODUCT &&
!(tpe <:< OPTION || tpe <:< LIST) &&
!clazz.getName.startsWith("scala.Tuple") &&
classSymbol.isCaseClass
}
/**
* This is the negation of [[Types.isCaseClass]]
* Determine if a given class type is not a case class.
* Returns `true` if it is NOT considered a case class.
*/
def notCaseClass[T](clazz: Class[T]): Boolean = !isCaseClass(clazz)
/**
* Convert from the given `Class[T]` to a `TypeTag[T]` in the runtime universe.
* =Usage=
* {{{
* val clazz: Class[T] = ???
* val tag: TypeTag[T] = Types.asTypeTag(clazz)
* }}}
*
* @param clazz the class for which to build the resultant [[TypeTag]]
* @return a `TypeTag[T]` representing the given `Class[T]`.
*/
def asTypeTag[T](clazz: Class[_ <: T]): TypeTag[T] = {
val clazzMirror = runtimeMirror(clazz.getClassLoader)
val tpe = clazzMirror.classSymbol(clazz).toType
val typeCreator = new TypeCreator() {
def apply[U <: scala.reflect.api.Universe with scala.Singleton](
m: scala.reflect.api.Mirror[U]
): U#Type = {
if (clazzMirror != m) throw new RuntimeException("wrong mirror")
else tpe.asInstanceOf[U#Type]
}
}
TypeTag[T](clazzMirror, typeCreator)
}
/**
* Return the runtime class from a given [[TypeTag]].
* =Usage=
* {{{
* val clazz: Class[T] = Types.runtimeClass[T]
* }}}
*
* or pass in the implict TypeTag explicitly:
*
* {{{
* val tag: TypeTag[T] = ???
* val clazz: Class[T] = Types.runtimeClass(tag)
* }}}
*
*
* @note the given [[TypeTag]] must be from the runtime universe otherwise an
* [[IllegalArgumentException]] will be thrown.
*
* @tparam T the [[TypeTag]] and expected class type of the returned class.
* @return the runtime class of the given [[TypeTag]].
*/
def runtimeClass[T](implicit tag: TypeTag[T]): Class[T] = {
val clazzMirror = runtimeMirror(getClass.getClassLoader)
if (clazzMirror != tag.mirror) {
throw new IllegalArgumentException("TypeTag is not from runtime universe.")
}
tag.mirror.runtimeClass(tag.tpe).asInstanceOf[Class[T]]
}
/**
* If `thisTypeTag` [[TypeTag]] has the same TypeSymbol as `thatTypeTag` [[TypeTag]].
*
* @param thatTypeTag [[TypeTag]] to compare
* @param thisTypeTag [[TypeTag]] to compare
* @tparam T type of thisTypeTag
* @return true if the TypeSymbols are equivalent.
* @see [[scala.reflect.api.Symbols]]
*/
def equals[T](thatTypeTag: TypeTag[_])(implicit thisTypeTag: TypeTag[T]): Boolean =
thisTypeTag.tpe.typeSymbol == thatTypeTag.tpe.typeSymbol
/**
* If the given [[java.lang.reflect.Type]] is parameterized, return an Array of the
* type parameter names. E.g., `Map[T, U]` returns, `Array("T", "U")`.
*
* The use case is when we are trying to match the given type's type parameters to
* a set of bindings that are stored keyed by the type name, e.g. if the given type
* is a `Map[K, V]` we want to be able to look up the binding for the key `K` at runtime
* during reflection operations e.g., if the K type is bound to a String we want to
* be able use that when further processing this type. This type of operation most typically
* happens with Jackson reflection handling of case classes, which is a specialized case
* and thus the utility of this method may not be broadly applicable and therefore is limited
* in visibility.
*/
private[twitter] def parameterizedTypeNames(javaType: JavaType): Array[String] =
javaType match {
case parameterizedType: ParameterizedType =>
parameterizedType.getActualTypeArguments.map(_.getTypeName)
case typeVariable: TypeVariable[_] =>
Array(typeVariable.getTypeName)
case clazz: Class[_] =>
clazz.getTypeParameters.map(_.getName)
case _ => Array.empty
}
}
| twitter/util | util-reflect/src/main/scala/com/twitter/util/reflect/Types.scala | Scala | apache-2.0 | 4,678 |
package chrome.storage.bindings
import chrome.events.bindings.Event
import scala.scalajs.js.annotation.JSName
import scala.scalajs.js
class StorageChange extends js.Object {
val oldValue: js.UndefOr[js.Any] = js.native
val newValue: js.UndefOr[js.Any] = js.native
}
class StorageArea extends js.Object {
def get(keys: js.UndefOr[js.Any] = js.undefined, callback: js.Function1[Map[String, js.Any], _]): Unit = js.native
def getBytesInUse(keys: js.UndefOr[js.Any] = js.undefined, callback: js.Function1[Int, _]): Unit = js.native
def set(items: Map[String, js.Any], callback: js.UndefOr[js.Function0[_]] = js.undefined): Unit = js.native
def remove(keys: js.Any, callback: js.UndefOr[js.Function0[_]] = js.undefined): Unit = js.native
def clear(callback: js.UndefOr[js.Function0[_]] = js.undefined): Unit = js.native
}
@JSName("chrome.storage")
object Storage extends js.Object {
val onChanged: Event[js.Function2[Map[String, StorageChange], String, _]] = js.native
val sync: StorageArea = js.native
val local: StorageArea = js.native
val managed: StorageArea = js.native
}
| amsayk/scala-js-chrome | bindings/src/main/scala/chrome/storage/bindings/Storage.scala | Scala | mit | 1,106 |
class A { def m() { var x = new (,) } }
object Main { def main(args: Array[String]) { } }
| tobast/compil-petitscala | tests/syntax/bad/testfile-expr10-1.scala | Scala | gpl-3.0 | 90 |
/**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.spark.ethereum
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.zuinnote.hadoop.ethereum.format.mapreduce._
/**
* Author: Jörn Franke <zuinnote@gmail.com>
*
*/
package object block {
/**
* Adds a method, `etheruemBlockFile`, to SQLContext that allows reading Etheruem blockchain data as Ethereum blocks.
*/
implicit class EthereumBlockContext(sqlContext: SQLContext) extends Serializable {
def ethereumBlockFile(
filePath: String,
maxBlockSize: Integer = AbstractEthereumRecordReader.DEFAULT_MAXSIZE_ETHEREUMBLOCK,
useDirectBuffer: Boolean = AbstractEthereumRecordReader.DEFAULT_USEDIRECTBUFFER
): DataFrame = {
val ethereumBlockRelation = EthereumBlockRelation(filePath, maxBlockSize, useDirectBuffer)(sqlContext)
sqlContext.baseRelationToDataFrame(ethereumBlockRelation)
}
}
}
| ZuInnoTe/spark-hadoopcryptoledger-ds | src/main/scala/org/zuinnote/spark/ethereum/block/package.scala | Scala | apache-2.0 | 1,527 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.geotools
import com.vividsolutions.jts.geom._
import org.geotools.geometry.jts.JTSFactoryFinder
import org.geotools.referencing.GeodeticCalculator
/**
* The object provides convenience methods for common operations on geometries.
*/
object GeometryUtils {
val geoFactory = JTSFactoryFinder.getGeometryFactory
val zeroPoint = geoFactory.createPoint(new Coordinate(0,0))
/** Convert meters to dec degrees based on widest point in dec degrees of circles at bounding box corners */
def distanceDegrees(geometry: Geometry, meters: Double): Double = {
geometry match {
case p: Point => geometry.distance(farthestPoint(p, meters))
case _ => distanceDegrees(geometry.getEnvelopeInternal, meters)
}
}
/** Convert meters to dec degrees based on widest point in dec degrees of circles at envelope corners */
def distanceDegrees(env: Envelope, meters: Double): Double =
List(
distanceDegrees(geoFactory.createPoint(new Coordinate(env.getMaxX, env.getMaxY)), meters),
distanceDegrees(geoFactory.createPoint(new Coordinate(env.getMaxX, env.getMinY)), meters),
distanceDegrees(geoFactory.createPoint(new Coordinate(env.getMinX, env.getMinY)), meters),
distanceDegrees(geoFactory.createPoint(new Coordinate(env.getMinX, env.getMaxY)), meters)
).max
/** Farthest point based on widest point in dec degrees of circle */
def farthestPoint(startPoint: Point, meters: Double) = {
val calc = new GeodeticCalculator()
calc.setStartingGeographicPoint(startPoint.getX, startPoint.getY)
calc.setDirection(90, meters)
val dest2D = calc.getDestinationGeographicPoint
geoFactory.createPoint(new Coordinate(dest2D.getX, dest2D.getY))
}
def unfoldRight[A, B](seed: B)(f: B => Option[(A, B)]): List[A] = f(seed) match {
case None => Nil
case Some((a, b)) => a :: unfoldRight(b)(f)
}
/** Adds way points to Seq[Coordinates] so that they remain valid with Spatial4j, useful for BBOX */
def addWayPoints(coords: Seq[Coordinate]): List[Coordinate] =
unfoldRight(coords) {
case Seq() => None
case Seq(pt) => Some((pt, Seq()))
case Seq(first, second, rest @ _*) => second.x - first.x match {
case dx if dx > 120 =>
Some((first, new Coordinate(first.x + 120, first.y) +: second +: rest))
case dx if dx < -120 =>
Some((first, new Coordinate(first.x - 120, first.y) +: second +: rest))
case _ => Some((first, second +: rest))
}
}
/**
* Returns the rough bounds of a geometry
*
* @param geometry geometry
* @return (xmin, ymin, xmax, ymax)
*/
def bounds(geometry: Geometry): (Double, Double, Double, Double) = {
val env = geometry.getEnvelopeInternal
(env.getMinX, env.getMinY, env.getMaxX, env.getMaxY)
}
/**
* Evaluates the complexity of a geometry. Will return true if the geometry is a point or
* a rectangular polygon without interior holes.
*
* @param geometry geometry
* @return
*/
def isRectangular(geometry: Geometry): Boolean = geometry match {
case _: Point => true
case p: Polygon => noInteriorRings(p) && noCutouts(p) && allRightAngles(p)
case _ => false
}
// checks that there are no interior holes
private def noInteriorRings(p: Polygon): Boolean = p.getNumInteriorRing == 0
// checks that all points are on the exterior envelope of the polygon
private def noCutouts(p: Polygon): Boolean = {
val (xmin, ymin, xmax, ymax) = {
val env = p.getEnvelopeInternal
(env.getMinX, env.getMinY, env.getMaxX, env.getMaxY)
}
p.getCoordinates.forall(c => c.x == xmin || c.x == xmax || c.y == ymin || c.y == ymax)
}
// checks that there aren't any angled lines
private def allRightAngles(p: Polygon): Boolean =
p.getCoordinates.sliding(2).forall { case Array(left, right) => left.x == right.x || left.y == right.y }
/**
* This function checks if a segment crosses the IDL.
* @param point1 The first point in the segment
* @param point2 The second point in the segment
* @return boolean true if the segment crosses the IDL, otherwise false
*/
def crossesIDL(point1:Coordinate, point2:Coordinate): Boolean = {
Math.abs(point1.x - point2.x) >= 180
}
/**
* Calculate the latitude at which the segment intercepts the IDL.
* This function assumes that the provided points do actually cross the IDL.
* @param point1 The first point in the segment
* @param point2 The second point in the segment
* @return a double representing the intercept latitude
*/
def calcIDLIntercept(point1: Coordinate, point2: Coordinate): Double = {
if (point1.x < 0) {
calcCrossLat(point1, new Coordinate(point2.x - 360, point2.y), -180)
} else {
calcCrossLat(point1, new Coordinate(point2.x + 360, point2.y), 180)
}
}
/**
* Calculate the latitude at which a segment intercepts a given latitude.
* @param point1 The first point in the segment
* @param point2 The second point in the segment
* @param crossLon The longitude of intercept
* @return a double representing the intercept latitude
*/
def calcCrossLat(point1: Coordinate, point2: Coordinate, crossLon: Double): Double = {
val slope = (point1.y - point2.y) / (point1.x - point2.x);
val intercept = point1.y - (slope * point1.x);
(slope * crossLon) + intercept;
}
}
| jahhulbert-ccri/geomesa | geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/geotools/GeometryUtils.scala | Scala | apache-2.0 | 5,904 |
package org.apache.spark.ml.tuning
import com.github.fommil.netlib.F2jBLAS
import org.apache.spark.annotation.Experimental
import org.apache.spark.ml.evaluation.Evaluator
import org.apache.spark.ml.param._
import org.apache.spark.ml.util.Identifiable
import org.apache.spark.ml.{Estimator, Model}
import org.apache.spark.sql.{DataFrame, Dataset}
import org.apache.spark.sql.types.StructType
/**
* Params for [[Benchmarker]] and [[BenchmarkModel]].
*/
private[ml] trait BenchmarkerParams extends ValidatorParams {
/**
* Param for number of times for benchmark. Must be >= 1.
* Default: 1
* @group param
*/
val numTimes: IntParam = new IntParam(this, "numTimes",
"number of times for benchmark (>= 1)", ParamValidators.gtEq(1))
/** @group getParam */
def getNumTimes: Int = $(numTimes)
setDefault(numTimes -> 1)
}
/**
* :: Experimental ::
* Benchmark estimator pipelines.
*/
@Experimental
class Benchmarker(override val uid: String) extends Estimator[BenchmarkModel]
with BenchmarkerParams {
def this() = this(Identifiable.randomUID("benchmark"))
private val f2jBLAS = new F2jBLAS
/** @group setParam */
def setEstimator(value: Estimator[_]): this.type = set(estimator, value)
/** @group setParam */
def setEstimatorParamMaps(value: Array[ParamMap]): this.type = set(estimatorParamMaps, value)
/** @group setParam */
def setEvaluator(value: Evaluator): this.type = set(evaluator, value)
/** @group setParam */
def setNumTimes(value: Int): this.type = set(numTimes, value)
override def fit(dataset: Dataset[_]): BenchmarkModel = {
val schema = dataset.schema
transformSchema(schema, logging = true)
val sqlCtx = dataset.sqlContext
val est = $(estimator)
val eval = $(evaluator)
val epm = $(estimatorParamMaps)
val numModels = epm.length
val models = new Array[Model[_]](epm.length)
val trainingRuntimes = new Array[Double](epm.length)
val evaluationRuntimes = new Array[Double](epm.length)
(1 to getNumTimes).foreach { index =>
// multi-model training
logDebug(s"Train $index times with multiple sets of parameters.")
var i = 0
while (i < numModels) {
var tic = System.currentTimeMillis()
models(i) = est.fit(dataset, epm(i)).asInstanceOf[Model[_]]
trainingRuntimes(i) += System.currentTimeMillis() - tic
tic = System.currentTimeMillis()
val metric = eval.evaluate(models(i).transform(dataset, epm(i)))
evaluationRuntimes(i) += System.currentTimeMillis() - tic
logDebug(s"Got metric $metric for model trained with ${epm(i)}.")
i += 1
}
}
f2jBLAS.dscal(numModels, 1.0 / $(numTimes), trainingRuntimes, 1)
f2jBLAS.dscal(numModels, 1.0 / $(numTimes), evaluationRuntimes, 1)
logInfo(s"Average training runtimes: ${trainingRuntimes.toSeq}")
logInfo(s"Average evaluation runtimes: ${evaluationRuntimes.toSeq}")
val (fastestRuntime, fastestIndex) = trainingRuntimes.zipWithIndex.minBy(_._1)
logInfo(s"Fastest set of parameters:\\n${epm(fastestIndex)}")
logInfo(s"Fastest training runtime: $fastestRuntime.")
copyValues(new BenchmarkModel(uid, models(fastestIndex), trainingRuntimes, evaluationRuntimes).setParent(this))
}
override def transformSchema(schema: StructType): StructType = {
validateParams()
$(estimator).transformSchema(schema)
}
def validateParams(): Unit = {
val est = $(estimator)
for (paramMap <- $(estimatorParamMaps)) {
est.copy(paramMap)
}
}
override def copy(extra: ParamMap): Benchmarker = {
val copied = defaultCopy(extra).asInstanceOf[Benchmarker]
if (copied.isDefined(estimator)) {
copied.setEstimator(copied.getEstimator.copy(extra))
}
if (copied.isDefined(evaluator)) {
copied.setEvaluator(copied.getEvaluator.copy(extra))
}
copied
}
}
/**
* :: Experimental ::
* Model from benchmark runs.
*/
@Experimental
class BenchmarkModel private[ml](
override val uid: String,
val fastestModel: Model[_],
val avgTrainingRuntimes: Array[Double],
val avgEvaluationRuntimes: Array[Double])
extends Model[BenchmarkModel] with BenchmarkerParams {
override def transform(dataset: Dataset[_]): DataFrame = {
transformSchema(dataset.schema, logging = true)
fastestModel.transform(dataset)
}
override def transformSchema(schema: StructType): StructType = {
fastestModel.transformSchema(schema)
}
override def copy(extra: ParamMap): BenchmarkModel = {
val copied = new BenchmarkModel(
uid,
fastestModel.copy(extra).asInstanceOf[Model[_]],
avgTrainingRuntimes.clone(),
avgEvaluationRuntimes.clone())
copyValues(copied, extra).setParent(parent)
}
}
| saurfang/spark-knn | spark-knn-examples/src/main/scala/org/apache/spark/ml/tuning/Benchmarker.scala | Scala | apache-2.0 | 4,878 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import org.apache.spark.TaskContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{Attribute, RowOrdering, SortOrder}
import org.apache.spark.sql.catalyst.plans.physical._
import org.apache.spark.util.CompletionIterator
import org.apache.spark.util.collection.ExternalSorter
/**
* A reference sort implementation used to compare against our normal sort.
*/
case class ReferenceSort(
sortOrder: Seq[SortOrder],
global: Boolean,
child: SparkPlan)
extends UnaryExecNode {
override def requiredChildDistribution: Seq[Distribution] =
if (global) OrderedDistribution(sortOrder) :: Nil else UnspecifiedDistribution :: Nil
protected override def doExecute(): RDD[InternalRow] = {
child.execute().mapPartitions( { iterator =>
val ordering = RowOrdering.create(sortOrder, child.output)
val sorter = new ExternalSorter[InternalRow, Null, InternalRow](
TaskContext.get(), ordering = Some(ordering))
sorter.insertAll(iterator.map(r => (r.copy(), null)))
val baseIterator = sorter.iterator.map(_._1)
val context = TaskContext.get()
context.taskMetrics().incDiskBytesSpilled(sorter.diskBytesSpilled)
context.taskMetrics().incMemoryBytesSpilled(sorter.memoryBytesSpilled)
context.taskMetrics().incPeakExecutionMemory(sorter.peakMemoryUsedBytes)
CompletionIterator[InternalRow, Iterator[InternalRow]](baseIterator, sorter.stop())
}, preservesPartitioning = true)
}
override def output: Seq[Attribute] = child.output
override def outputOrdering: Seq[SortOrder] = sortOrder
override def outputPartitioning: Partitioning = child.outputPartitioning
override protected def withNewChildInternal(newChild: SparkPlan): ReferenceSort =
copy(child = newChild)
}
| mahak/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/ReferenceSort.scala | Scala | apache-2.0 | 2,677 |
package com.sksamuel.elastic4s
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper
import org.scalatest.Matchers
import org.scalatest.matchers.{Matcher, MatchResult}
trait JsonSugar extends Matchers {
protected val mapper = new ObjectMapper with ScalaObjectMapper
mapper.registerModule(DefaultScalaModule)
def matchJsonResource(resourceName: String) = new JsonResourceMatcher(resourceName)
def matchJson(right: String) = new Matcher[String] {
override def apply(left: String): MatchResult = {
withClue(s"expected JSON [$right] ") {
right should not be null
}
val expectedJson = mapper.readTree(left)
val actualJson = mapper.readTree(left)
MatchResult(
expectedJson == actualJson,
s"$actualJson did not match resource [$right]: $expectedJson",
s"$actualJson did match resource [$right]: $expectedJson"
)
}
}
class JsonResourceMatcher(resourceName: String) extends Matcher[String] {
override def apply(left: String): MatchResult = {
val jsonResource = getClass.getResource(resourceName)
withClue(s"expected JSON resource [$resourceName] ") {
jsonResource should not be null
}
val expectedJson = mapper.readTree(jsonResource)
val actualJson = mapper.readTree(left)
MatchResult(
expectedJson == actualJson,
s"$actualJson did not match resource [$resourceName]: $expectedJson",
s"$actualJson did match resource [$resourceName]: $expectedJson"
)
}
}
}
| ulric260/elastic4s | elastic4s-core-tests/src/test/scala/com/sksamuel/elastic4s/JsonSugar.scala | Scala | apache-2.0 | 1,665 |
/*
* Copyright (C) 2014 - 2020 Contributors as noted in the AUTHORS.md file
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.dfasdl.utils
/**
* An enumeration of possible element types.
*/
object ElementType extends Enumeration {
type ElementType = Value
val DataElement, ExpressionElement, RootElement, StructuralElement, UnknownElement = Value
}
| DFASDL/dfasdl-utils | src/main/scala/org/dfasdl/utils/ElementType.scala | Scala | agpl-3.0 | 997 |
/*
* The MIT License (MIT)
* <p>
* Copyright (c) 2017-2021
* <p>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p>
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
* <p>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package io.techcode.streamy.pipeline
import io.techcode.streamy.StreamyTestSystem
import org.scalatest.concurrent.Eventually.eventually
/**
* Pipeline manager spec.
*/
class PipelineManagerSpec extends StreamyTestSystem {
"Pipeline manager" should {
"be started and stopped" in {
PipelineManager(system)
}
}
}
| amannocci/streamy | core/src/test/scala/io/techcode/streamy/pipeline/PipelineManagerSpec.scala | Scala | mit | 1,489 |
package io.getquill.context.cassandra.encoding
import io.getquill.MappedEncoding
trait CassandraMapperConversions extends CassandraMapperConversionsLowPriorityImplicits {
implicit def cassandraIdentityMapper[Cas](implicit cas: CassandraType[Cas]): CassandraMapper[Cas, Cas] =
CassandraMapper(identity)
implicit def cassandraMapperEncode[T, Cas](
implicit
m: MappedEncoding[T, Cas],
cas: CassandraType[Cas]
): CassandraMapper[T, Cas] = CassandraMapper(m.f)
implicit def cassandraMapperDecode[T, Cas](
implicit
m: MappedEncoding[Cas, T],
cas: CassandraType[Cas]
): CassandraMapper[Cas, T] = CassandraMapper(m.f)
}
trait CassandraMapperConversionsLowPriorityImplicits {
implicit def cassandraMapperEncodeRec[I, O, Cas](
implicit
me: MappedEncoding[I, O],
cm: CassandraMapper[O, Cas]
): CassandraMapper[I, Cas] = CassandraMapper(me.f.andThen(cm.f))
implicit def cassandraMapperDecodeRec[I, O, Cas](
implicit
m: MappedEncoding[I, O],
cm: CassandraMapper[Cas, I]
): CassandraMapper[Cas, O] = CassandraMapper(cm.f.andThen(m.f))
} | mentegy/quill | quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CassandraMapperConversions.scala | Scala | apache-2.0 | 1,105 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.executor
import java.io.File
import java.net.URL
import java.nio.ByteBuffer
import java.util.Locale
import java.util.concurrent.atomic.AtomicBoolean
import scala.collection.mutable
import scala.util.{Failure, Success}
import scala.util.control.NonFatal
import org.json4s.DefaultFormats
import org.apache.spark._
import org.apache.spark.TaskState.TaskState
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.deploy.worker.WorkerWatcher
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config._
import org.apache.spark.resource.ResourceInformation
import org.apache.spark.resource.ResourceProfile
import org.apache.spark.resource.ResourceProfile._
import org.apache.spark.resource.ResourceUtils._
import org.apache.spark.rpc._
import org.apache.spark.scheduler.{ExecutorLossReason, TaskDescription}
import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages._
import org.apache.spark.serializer.SerializerInstance
import org.apache.spark.util.{ChildFirstURLClassLoader, MutableURLClassLoader, SignalUtils, ThreadUtils, Utils}
private[spark] class CoarseGrainedExecutorBackend(
override val rpcEnv: RpcEnv,
driverUrl: String,
executorId: String,
bindAddress: String,
hostname: String,
cores: Int,
userClassPath: Seq[URL],
env: SparkEnv,
resourcesFileOpt: Option[String],
resourceProfile: ResourceProfile)
extends IsolatedRpcEndpoint with ExecutorBackend with Logging {
import CoarseGrainedExecutorBackend._
private implicit val formats = DefaultFormats
private[this] val stopping = new AtomicBoolean(false)
var executor: Executor = null
@volatile private var decommissioned = false
@volatile var driver: Option[RpcEndpointRef] = None
// If this CoarseGrainedExecutorBackend is changed to support multiple threads, then this may need
// to be changed so that we don't share the serializer instance across threads
private[this] val ser: SerializerInstance = env.closureSerializer.newInstance()
private var _resources = Map.empty[String, ResourceInformation]
/**
* Map each taskId to the information about the resource allocated to it, Please refer to
* [[ResourceInformation]] for specifics.
* Exposed for testing only.
*/
private[executor] val taskResources = new mutable.HashMap[Long, Map[String, ResourceInformation]]
override def onStart(): Unit = {
logInfo("Registering PWR handler.")
SignalUtils.register("PWR", "Failed to register SIGPWR handler - " +
"disabling decommission feature.")(decommissionSelf)
logInfo("Connecting to driver: " + driverUrl)
try {
_resources = parseOrFindResources(resourcesFileOpt)
} catch {
case NonFatal(e) =>
exitExecutor(1, "Unable to create executor due to " + e.getMessage, e)
}
rpcEnv.asyncSetupEndpointRefByURI(driverUrl).flatMap { ref =>
// This is a very fast action so we can use "ThreadUtils.sameThread"
driver = Some(ref)
ref.ask[Boolean](RegisterExecutor(executorId, self, hostname, cores, extractLogUrls,
extractAttributes, _resources, resourceProfile.id))
}(ThreadUtils.sameThread).onComplete {
case Success(_) =>
self.send(RegisteredExecutor)
case Failure(e) =>
exitExecutor(1, s"Cannot register with driver: $driverUrl", e, notifyDriver = false)
}(ThreadUtils.sameThread)
}
/**
* Create a classLoader for use for resource discovery. The user could provide a class
* as a substitute for the default one so we have to be able to load it from a user specified
* jar.
*/
private def createClassLoader(): MutableURLClassLoader = {
val currentLoader = Utils.getContextOrSparkClassLoader
val urls = userClassPath.toArray
if (env.conf.get(EXECUTOR_USER_CLASS_PATH_FIRST)) {
new ChildFirstURLClassLoader(urls, currentLoader)
} else {
new MutableURLClassLoader(urls, currentLoader)
}
}
// visible for testing
def parseOrFindResources(resourcesFileOpt: Option[String]): Map[String, ResourceInformation] = {
// use a classloader that includes the user classpath in case they specified a class for
// resource discovery
val urlClassLoader = createClassLoader()
logDebug(s"Resource profile id is: ${resourceProfile.id}")
Utils.withContextClassLoader(urlClassLoader) {
val resources = getOrDiscoverAllResourcesForResourceProfile(
resourcesFileOpt,
SPARK_EXECUTOR_PREFIX,
resourceProfile,
env.conf)
logResourceInfo(SPARK_EXECUTOR_PREFIX, resources)
resources
}
}
def extractLogUrls: Map[String, String] = {
val prefix = "SPARK_LOG_URL_"
sys.env.filterKeys(_.startsWith(prefix))
.map(e => (e._1.substring(prefix.length).toLowerCase(Locale.ROOT), e._2))
}
def extractAttributes: Map[String, String] = {
val prefix = "SPARK_EXECUTOR_ATTRIBUTE_"
sys.env.filterKeys(_.startsWith(prefix))
.map(e => (e._1.substring(prefix.length).toUpperCase(Locale.ROOT), e._2))
}
override def receive: PartialFunction[Any, Unit] = {
case RegisteredExecutor =>
logInfo("Successfully registered with driver")
try {
executor = new Executor(executorId, hostname, env, userClassPath, isLocal = false,
resources = _resources)
driver.get.send(LaunchedExecutor(executorId))
} catch {
case NonFatal(e) =>
exitExecutor(1, "Unable to create executor due to " + e.getMessage, e)
}
case LaunchTask(data) =>
if (executor == null) {
exitExecutor(1, "Received LaunchTask command but executor was null")
} else {
if (decommissioned) {
logError("Asked to launch a task while decommissioned.")
driver match {
case Some(endpoint) =>
logInfo("Sending DecommissionExecutor to driver.")
endpoint.send(DecommissionExecutor(executorId))
case _ =>
logError("No registered driver to send Decommission to.")
}
}
val taskDesc = TaskDescription.decode(data.value)
logInfo("Got assigned task " + taskDesc.taskId)
taskResources(taskDesc.taskId) = taskDesc.resources
executor.launchTask(this, taskDesc)
}
case KillTask(taskId, _, interruptThread, reason) =>
if (executor == null) {
exitExecutor(1, "Received KillTask command but executor was null")
} else {
executor.killTask(taskId, interruptThread, reason)
}
case StopExecutor =>
stopping.set(true)
logInfo("Driver commanded a shutdown")
// Cannot shutdown here because an ack may need to be sent back to the caller. So send
// a message to self to actually do the shutdown.
self.send(Shutdown)
case Shutdown =>
stopping.set(true)
new Thread("CoarseGrainedExecutorBackend-stop-executor") {
override def run(): Unit = {
// executor.stop() will call `SparkEnv.stop()` which waits until RpcEnv stops totally.
// However, if `executor.stop()` runs in some thread of RpcEnv, RpcEnv won't be able to
// stop until `executor.stop()` returns, which becomes a dead-lock (See SPARK-14180).
// Therefore, we put this line in a new thread.
executor.stop()
}
}.start()
case UpdateDelegationTokens(tokenBytes) =>
logInfo(s"Received tokens of ${tokenBytes.length} bytes")
SparkHadoopUtil.get.addDelegationTokens(tokenBytes, env.conf)
}
override def onDisconnected(remoteAddress: RpcAddress): Unit = {
if (stopping.get()) {
logInfo(s"Driver from $remoteAddress disconnected during shutdown")
} else if (driver.exists(_.address == remoteAddress)) {
exitExecutor(1, s"Driver $remoteAddress disassociated! Shutting down.", null,
notifyDriver = false)
} else {
logWarning(s"An unknown ($remoteAddress) driver disconnected.")
}
}
override def statusUpdate(taskId: Long, state: TaskState, data: ByteBuffer): Unit = {
val resources = taskResources.getOrElse(taskId, Map.empty[String, ResourceInformation])
val msg = StatusUpdate(executorId, taskId, state, data, resources)
if (TaskState.isFinished(state)) {
taskResources.remove(taskId)
}
driver match {
case Some(driverRef) => driverRef.send(msg)
case None => logWarning(s"Drop $msg because has not yet connected to driver")
}
}
/**
* This function can be overloaded by other child classes to handle
* executor exits differently. For e.g. when an executor goes down,
* back-end may not want to take the parent process down.
*/
protected def exitExecutor(code: Int,
reason: String,
throwable: Throwable = null,
notifyDriver: Boolean = true) = {
val message = "Executor self-exiting due to : " + reason
if (throwable != null) {
logError(message, throwable)
} else {
logError(message)
}
if (notifyDriver && driver.nonEmpty) {
driver.get.send(RemoveExecutor(executorId, new ExecutorLossReason(reason)))
}
System.exit(code)
}
private def decommissionSelf(): Boolean = {
logInfo("Decommissioning self w/sync")
try {
decommissioned = true
// Tell master we are are decommissioned so it stops trying to schedule us
if (driver.nonEmpty) {
driver.get.askSync[Boolean](DecommissionExecutor(executorId))
} else {
logError("No driver to message decommissioning.")
}
if (executor != null) {
executor.decommission()
}
logInfo("Done decommissioning self.")
// Return true since we are handling a signal
true
} catch {
case e: Exception =>
logError(s"Error ${e} during attempt to decommission self")
false
}
}
}
private[spark] object CoarseGrainedExecutorBackend extends Logging {
// Message used internally to start the executor when the driver successfully accepted the
// registration request.
case object RegisteredExecutor
case class Arguments(
driverUrl: String,
executorId: String,
bindAddress: String,
hostname: String,
cores: Int,
appId: String,
workerUrl: Option[String],
userClassPath: mutable.ListBuffer[URL],
resourcesFileOpt: Option[String],
resourceProfileId: Int)
def main(args: Array[String]): Unit = {
val createFn: (RpcEnv, Arguments, SparkEnv, ResourceProfile) =>
CoarseGrainedExecutorBackend = { case (rpcEnv, arguments, env, resourceProfile) =>
new CoarseGrainedExecutorBackend(rpcEnv, arguments.driverUrl, arguments.executorId,
arguments.bindAddress, arguments.hostname, arguments.cores, arguments.userClassPath, env,
arguments.resourcesFileOpt, resourceProfile)
}
run(parseArguments(args, this.getClass.getCanonicalName.stripSuffix("$")), createFn)
System.exit(0)
}
def run(
arguments: Arguments,
backendCreateFn: (RpcEnv, Arguments, SparkEnv, ResourceProfile) =>
CoarseGrainedExecutorBackend): Unit = {
Utils.initDaemon(log)
SparkHadoopUtil.get.runAsSparkUser { () =>
// Debug code
Utils.checkHost(arguments.hostname)
// Bootstrap to fetch the driver's Spark properties.
val executorConf = new SparkConf
val fetcher = RpcEnv.create(
"driverPropsFetcher",
arguments.bindAddress,
arguments.hostname,
-1,
executorConf,
new SecurityManager(executorConf),
numUsableCores = 0,
clientMode = true)
var driver: RpcEndpointRef = null
val nTries = 3
for (i <- 0 until nTries if driver == null) {
try {
driver = fetcher.setupEndpointRefByURI(arguments.driverUrl)
} catch {
case e: Throwable => if (i == nTries - 1) {
throw e
}
}
}
val cfg = driver.askSync[SparkAppConfig](RetrieveSparkAppConfig(arguments.resourceProfileId))
val props = cfg.sparkProperties ++ Seq[(String, String)](("spark.app.id", arguments.appId))
fetcher.shutdown()
// Create SparkEnv using properties we fetched from the driver.
val driverConf = new SparkConf()
for ((key, value) <- props) {
// this is required for SSL in standalone mode
if (SparkConf.isExecutorStartupConf(key)) {
driverConf.setIfMissing(key, value)
} else {
driverConf.set(key, value)
}
}
cfg.hadoopDelegationCreds.foreach { tokens =>
SparkHadoopUtil.get.addDelegationTokens(tokens, driverConf)
}
driverConf.set(EXECUTOR_ID, arguments.executorId)
val env = SparkEnv.createExecutorEnv(driverConf, arguments.executorId, arguments.bindAddress,
arguments.hostname, arguments.cores, cfg.ioEncryptionKey, isLocal = false)
env.rpcEnv.setupEndpoint("Executor",
backendCreateFn(env.rpcEnv, arguments, env, cfg.resourceProfile))
arguments.workerUrl.foreach { url =>
env.rpcEnv.setupEndpoint("WorkerWatcher", new WorkerWatcher(env.rpcEnv, url))
}
env.rpcEnv.awaitTermination()
}
}
def parseArguments(args: Array[String], classNameForEntry: String): Arguments = {
var driverUrl: String = null
var executorId: String = null
var bindAddress: String = null
var hostname: String = null
var cores: Int = 0
var resourcesFileOpt: Option[String] = None
var appId: String = null
var workerUrl: Option[String] = None
val userClassPath = new mutable.ListBuffer[URL]()
var resourceProfileId: Int = DEFAULT_RESOURCE_PROFILE_ID
var argv = args.toList
while (!argv.isEmpty) {
argv match {
case ("--driver-url") :: value :: tail =>
driverUrl = value
argv = tail
case ("--executor-id") :: value :: tail =>
executorId = value
argv = tail
case ("--bind-address") :: value :: tail =>
bindAddress = value
argv = tail
case ("--hostname") :: value :: tail =>
hostname = value
argv = tail
case ("--cores") :: value :: tail =>
cores = value.toInt
argv = tail
case ("--resourcesFile") :: value :: tail =>
resourcesFileOpt = Some(value)
argv = tail
case ("--app-id") :: value :: tail =>
appId = value
argv = tail
case ("--worker-url") :: value :: tail =>
// Worker url is used in spark standalone mode to enforce fate-sharing with worker
workerUrl = Some(value)
argv = tail
case ("--user-class-path") :: value :: tail =>
userClassPath += new URL(value)
argv = tail
case ("--resourceProfileId") :: value :: tail =>
resourceProfileId = value.toInt
argv = tail
case Nil =>
case tail =>
// scalastyle:off println
System.err.println(s"Unrecognized options: ${tail.mkString(" ")}")
// scalastyle:on println
printUsageAndExit(classNameForEntry)
}
}
if (hostname == null) {
hostname = Utils.localHostName()
log.info(s"Executor hostname is not provided, will use '$hostname' to advertise itself")
}
if (driverUrl == null || executorId == null || cores <= 0 || appId == null) {
printUsageAndExit(classNameForEntry)
}
if (bindAddress == null) {
bindAddress = hostname
}
Arguments(driverUrl, executorId, bindAddress, hostname, cores, appId, workerUrl,
userClassPath, resourcesFileOpt, resourceProfileId)
}
private def printUsageAndExit(classNameForEntry: String): Unit = {
// scalastyle:off println
System.err.println(
s"""
|Usage: $classNameForEntry [options]
|
| Options are:
| --driver-url <driverUrl>
| --executor-id <executorId>
| --bind-address <bindAddress>
| --hostname <hostname>
| --cores <cores>
| --resourcesFile <fileWithJSONResourceInformation>
| --app-id <appid>
| --worker-url <workerUrl>
| --user-class-path <url>
| --resourceProfileId <id>
|""".stripMargin)
// scalastyle:on println
System.exit(1)
}
}
| goldmedal/spark | core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala | Scala | apache-2.0 | 17,168 |
package com.arcusys.valamis.slide.service.export
import java.io.{ ByteArrayInputStream, File, FileInputStream, InputStream }
import com.arcusys.learn.liferay.services.FileEntryServiceHelper
import com.arcusys.valamis.file.service.FileService
import com.arcusys.valamis.lesson.generator.tincan.file.TinCanRevealJSPackageGeneratorContract
import com.arcusys.valamis.lesson.generator.tincan.file.html.TinCanQuestionViewGenerator
import com.arcusys.valamis.questionbank.model.{Answer, Question}
import com.arcusys.valamis.questionbank.service.QuestionService
import com.arcusys.valamis.quiz.model._
import com.arcusys.valamis.quiz.service.QuizService
import com.arcusys.valamis.slide.model.{SlideElementModel, SlideSetModel, SlideModel, SlideEntityType}
import com.arcusys.valamis.slide.service.{SlideElementServiceContract, SlideSetServiceContract, SlideServiceContract}
import com.arcusys.valamis.uri.model.ValamisURIType
import com.arcusys.valamis.uri.service.URIServiceContract
import com.arcusys.valamis.util.mustache.Mustache
import com.arcusys.valamis.util.JsonSupport._
import com.escalatesoft.subcut.inject.{ BindingModule, Injectable }
trait SlideSetPublisherContract {
def composeTinCanPackage(slideSetId: Long, learnPortletPath: String, title: String, description: String): File
def importFromQuiz(id: Long, quizId: Int)
}
class SlideSetPublisher(implicit val bindingModule: BindingModule)
extends Injectable
with SlideSetExportUtils
with SlideSetPublisherContract {
private val tinCanRevealJSPackageGenerator = inject[TinCanRevealJSPackageGeneratorContract]
private lazy val slideService = inject[SlideServiceContract]
private lazy val slideSetService = inject[SlideSetServiceContract]
private lazy val slideElementService = inject[SlideElementServiceContract]
protected lazy val questionService = inject[QuestionService]
protected lazy val fileService = inject[FileService]
private val tincanQuestionViewGenerator = new TinCanQuestionViewGenerator(isPreview = false)
private lazy val uriService = inject[URIServiceContract]
private lazy val quizService = inject[QuizService]
private def getResourceInputStream(name: String) = Thread.currentThread.getContextClassLoader.getResourceAsStream(name)
private lazy val indexTemplate = new Mustache(scala.io.Source.fromInputStream(getResourceInputStream("tincan/revealjs.html")).mkString)
private val vendorJSFileNames =
"jquery.min.js" ::
"reveal.min.js" ::
"jquery-ui-1.10.4.custom.min.js" ::
"jquery.ui.widget.js" ::
"lodash.min.js" ::
"backbone-min.js" ::
"backbone.marionette_new.min.js" ::
"backbone.service.js" ::
"mustache.min.js" ::
Nil
private val slideSetJSFileNames =
"Urls.js" ::
"valamis-slides-editor/helper.js" ::
"valamis-slides-editor/loadTemplates.js" ::
"valamis-slides-editor/slideService.js" ::
"valamis-slides-editor/slideElementService.js" ::
"question-manager/models/AnswerModel.js" ::
"question-manager/models/QuestionModel.js" ::
"valamis-slides-editor/TinCanPackageRenderer.js" ::
"valamis-slides-editor/TinCanPackageGenericItem.js" ::
Nil
private val commonJSFileNames =
"base.js" ::
Nil
private val slideSetCSSFileNames =
"reveal.min.css" ::
"video-js.min.css" ::
"katex.min.css" ::
"valamis.css" ::
"valamis_slides.css" ::
"theme/valamis_slides.css" ::
Nil
override def composeTinCanPackage(slideSetId: Long, learnPortletPath: String, title: String, description: String): File = {
val slides = slideService.getBySlideSetId(slideSetId).map { slide =>
val statementVerbWithName = slide.statementVerb
.flatMap(uriService.getById(_, ValamisURIType.Verb))
.map(x => x.uri + "/" + x.content) match {
case None => slide.statementVerb
case Some(value) => Some(value + value.substring(value.lastIndexOf("/") + 1))
}
val statementCategoryWithName = slide.statementCategoryId
.flatMap(uriService.getById(_, ValamisURIType.Category))
.map(x => x.uri + "/" + x.content)
SlideModel(slide.id,
slide.title,
slide.bgColor,
slide.bgImage,
slide.leftSlideId,
slide.topSlideId,
slide.slideElements,
slide.slideSetId,
statementVerbWithName,
slide.statementObject,
statementCategoryWithName)
}
val slideTypes = slides.map(_.slideElements).flatMap(x => x.map(_.slideEntityType)).distinct
val additionalJSFileNames = slideTypes.collect {
case SlideEntityType.Video => "video.js"
case SlideEntityType.Math => "katex.min.js"
}
val fontFiles = filesFromDirectory(List(learnPortletPath + "fonts/"), None, true)
val previewResourceFiles = if (slideTypes contains SlideEntityType.Pdf) filesFromDirectory(List(learnPortletPath + "preview-resources/pdf/"), None, true) else Nil
val questions = getRequiredQuestions(slides)
val slidesQuestions = slides.flatMap { slide =>
slide.slideElements.filter { e => e.slideEntityType == "question" }
}
val URI = {
val uriContent = Option(Map("title" -> title, "description" -> description).toJson.get)
uriService.createLocal(ValamisURIType.Course, uriContent)
}
val index = new ByteArrayInputStream(indexTemplate.render(
Map(
"title" -> title,
"slidesJson" -> slides.toJson.get,
"isSlideJsonAvailable" -> true,
"includeVendorFiles" -> (additionalJSFileNames ::: vendorJSFileNames).map(fileName => "js/" + fileName),
"includeCommonFiles" -> commonJSFileNames.map(fileName => "js/" + fileName),
"includeFiles" -> slideSetJSFileNames.map(fileName => "js/" + fileName),
"includeCSS" -> slideSetCSSFileNames.map(fileName => "css/" + fileName),
"includeFonts" -> fontFiles.map(file => "fonts/" + file._1.replace(learnPortletPath, "")),
"rootActivityId" -> URI.uri,
"scoreLimit" -> 0.7
) ++ getQuestionsMap(questions, slidesQuestions)
).getBytes)
val filesToAdd: List[(String, InputStream)] =
("index.html" -> index) ::
getRequiredFiles(slides) :::
(additionalJSFileNames ::: vendorJSFileNames).map(fileName => "js/" + fileName -> new FileInputStream(learnPortletPath + "js2.0/vendor/" + fileName)) :::
commonJSFileNames.map(fileName => "js/" + fileName -> getResourceInputStream("common/" + fileName)) :::
previewResourceFiles.map(file => file._1.replaceAll(learnPortletPath, "") -> file._2) :::
slideSetJSFileNames.map(fileName => "js/" + fileName -> new FileInputStream(learnPortletPath + "js2.0/" + fileName)) :::
fontFiles.map(file => file._1.replaceAll(learnPortletPath, "") -> file._2) :::
slideSetCSSFileNames.map(fileName => "css/" + fileName -> new FileInputStream(learnPortletPath + "css2.0/" + fileName))
tinCanRevealJSPackageGenerator.composePackage(omitFileDuplicates(filesToAdd), URI.uri, title, description)
}
private def getQuestionsMap(questions: List[Question[Answer]], slidesQuestions: List[SlideElementModel]): Map[String, Any] = {
Map(
"questionsJson" -> getQuestionsJSON(questions, slidesQuestions),
"questionScripts" -> getQuestionScripts(questions, slidesQuestions),
"questionMarkupTemplates" -> getQuestionMarkupTemplates(questions, slidesQuestions)
)
}
private def getQuestionsJSON(questions: List[Question[Answer]], slidesQuestions: List[SlideElementModel]): String = {
questions.map(question =>
tincanQuestionViewGenerator.getViewModelFromQuestion(
question,
getQuestionAutoShowAnswer(slidesQuestions, question.id)
) + ("questionType" -> question.questionTypeCode)
).toJson.get
}
private def getQuestionScripts(questions: List[Question[Answer]], slidesQuestions: List[SlideElementModel]) = {
questions.map(question => {
val scriptRegex = "(?s)(<script.*>.*</script>)".r
scriptRegex.findFirstMatchIn(tincanQuestionViewGenerator.getHTMLByQuestionId(
question,
getQuestionAutoShowAnswer(slidesQuestions, question.id))
).map(_.group(1))
.getOrElse("")
})
}
private def getQuestionMarkupTemplates(questions: List[Question[Answer]], slidesQuestions: List[SlideElementModel]) = {
questions.map(question => {
val questionTypeString = question.questionTypeCode match {
case 0 => "ChoiceQuestion"
case 1 => "ShortAnswerQuestion"
case 2 => "NumericQuestion"
case 3 => "PositioningQuestion"
case 4 => "MatchingQuestion"
case 5 => "EssayQuestion"
case 6 => "EmbeddedAnswerQuestion"
case 7 => "CategorizationQuestion"
case 8 => "PlainText"
case 9 => "PurePlainText"
case _ => ""
}
val sectionRegex = "(?s)(<section.*>.*</section>)".r
val questionMarkup = sectionRegex
.findFirstMatchIn(tincanQuestionViewGenerator.getHTMLByQuestionId(question, getQuestionAutoShowAnswer(slidesQuestions, question.id)))
.map(_.group(1))
.getOrElse("")
.replaceAll("<(/)*section.*>", "")
"<script type='text/html' id='" + questionTypeString + "Template" + question.id + "'>" + questionMarkup + "</script>"
})
}
def getQuestionAutoShowAnswer(slideQuestions: List[SlideElementModel], questionId: Int): Boolean = {
slideQuestions
.filter { slideQuestion => Integer.parseInt(slideQuestion.content) == questionId}
.head
.notifyCorrectAnswer
.getOrElse(false)
}
private def filesFromDirectory(dirPaths: List[String], dirName: Option[String] = None, isRecursive: Boolean = false): List[(String, FileInputStream)] = {
var fileList: List[(String, FileInputStream)] = Nil
dirPaths.map { dirPath =>
val fileName = new File(dirPath).getName
fileList = listFilesForFolder(dirName.getOrElse(fileName), new File(dirPath), isRecursive) ++ fileList
}
fileList
}
private def listFilesForFolder(prefix: String, folder: File, isRecursive: Boolean): List[(String, FileInputStream)] = {
var fileList: List[(String, FileInputStream)] = Nil
folder.listFiles.foreach { fileEntry =>
if (isRecursive) {
if(fileEntry.isDirectory)
fileList = listFilesForFolder(prefix + "/" + fileEntry.getName, fileEntry, isRecursive) ++ fileList
else fileList = ((prefix + "/" + fileEntry.getName) -> new FileInputStream(fileEntry)) :: fileList
} else if (!fileEntry.isDirectory) fileList = ((prefix + fileEntry.getName) -> new FileInputStream(fileEntry)) :: fileList
}
fileList
}
private def addSlide(
questions: List[QuizQuestion],
previousSlide: Option[SlideModel],
previousSlideId: Option[Long],
previousSlideType: String,
slideSetId: Long): Unit = {
if(questions.nonEmpty) {
val question = questions.head
val createdSlide = slideService.create(
SlideModel(
leftSlideId = previousSlideId,
slideSetId = slideSetId)
)
question match {
case q@(_: QuestionBankQuizQuestion |
_: PlainTextQuizQuestion |
_: RevealJSQuizQuestion |
_: ExternalQuizQuestion |
_: PDFQuizQuestion |
_: DLVideoQuizQuestion) =>
slideService.update(
SlideModel(
createdSlide.id,
leftSlideId = previousSlide.flatMap(_.id),
topSlideId = None,
slideSetId = slideSetId
)
)
addNormalSlideElement(q, createdSlide.id.get)
addSlide(questions diff List(question), Some(createdSlide), createdSlide.id, previousSlideType = "normal", slideSetId)
case q: PPTXQuizQuestion =>
fileService.copyFile(
"quizData" + q.quizID.toString,
q.file,
"slide_" + createdSlide.id.get,
q.file,
false
)
val url = "url(\\"/delegate/files/images?folderId=slide_" + createdSlide.id.get + "&file=" + q.file + "\\") contain"
previousSlideType match {
case "normal" =>
slideService.update(
SlideModel(
createdSlide.id,
bgImage = Some(url),
leftSlideId = previousSlideId,
topSlideId = None,
slideSetId = slideSetId
)
)
addSlide(questions diff List(question), Some(createdSlide), createdSlide.id, previousSlideType = "pptx", slideSetId)
case "pptx" =>
slideService.update(
SlideModel(
createdSlide.id,
bgImage = Some(url),
leftSlideId = None,
topSlideId = previousSlideId,
slideSetId = slideSetId
)
)
addSlide(questions diff List(question), previousSlide, createdSlide.id, previousSlideType = "pptx", slideSetId)
}
case _ => throw new Exception("unsupported question type")
}
}
}
private def addNormalSlideElement(question: QuizQuestion, slideId: Long) = {
question match {
case q: QuestionBankQuizQuestion =>
addSlideElement("800", "auto", q.question.id.toString, "question", slideId)
case q: PlainTextQuizQuestion =>
addSlideElement("800", "auto", q.text, "text", slideId)
case q: PlainRevealJSQuizQuestion =>
addSlideElement("800", "auto", q.content, "text", slideId)
case q: RevealJSQuizQuestion =>
addSlideElement("800", "auto", q.content, "text", slideId)
case q: ExternalQuizQuestion =>
if (q.url.contains("youtube.com/embed/"))
addSlideElement("640", "360", q.url, "video", slideId)
else
addSlideElement("800", "600", q.url, "iframe", slideId)
case q: PDFQuizQuestion =>
fileService.copyFile(
"quizData" + q.quizID.toString,
q.filename,
"quizData" + slideId,
q.filename,
false
)
val url = "/learn-portlet/preview-resources/pdf/web/viewer.html?file=/learn-portlet/SCORMData/files/quizData" +
slideId + "/" + q.filename
addSlideElement("800", "600", url, "pdf", slideId)
case q: DLVideoQuizQuestion =>
val fileEntry = FileEntryServiceHelper.getFileEntry(q.uuid, q.groupId.get)
val groupId = fileEntry.getGroupId
val filename = fileEntry.getTitle
val fileExtension = fileEntry.getExtension
val folderId = fileEntry.getFolderId
val url = "/documents/" + groupId + "/" + folderId + "/" + filename + "/" +
q.uuid + "?groupId=" + q.groupId.get + "&ext=" + fileExtension
addSlideElement("640", "360", url, "video", slideId)
case _ =>
}
}
private def addSlideElement(width: String, height: String, content: String, slideElementType: String, slideId: Long) = {
slideElementService.create(
SlideElementModel(
width = width,
height = height,
content = content,
slideEntityType = slideElementType,
slideId = slideId
)
)
}
override def importFromQuiz(id: Long, quizId: Int) = {
var questions = quizService.getQuestionsByCategory(quizId, None)
val categories = quizService.getCategories(quizId, None).map(category =>
questions = quizService.getQuestionsByCategory(quizId, Some(category.id)) ++ questions
)
val slideSet = slideSetService.getById(id).get
val quiz = quizService.getQuiz(quizId)
if(!quiz.logo.isEmpty) {
fileService.copyFile(
"quiz_logo_" + quizId,
quiz.logo,
"slideset_logo_" + id,
quiz.logo,
false
)
}
slideSetService.update(
SlideSetModel(
Some(id),
slideSet.title,
slideSet.description,
slideSet.courseId,
Some(quiz.logo),
List())
)
addSlide(questions.toList, None, None, previousSlideType = "normal", slideSet.id.get)
}
}
| icacic/Valamis | valamis-slide/src/main/scala/com/arcusys/valamis/slide/service/export/SlideSetPublisher.scala | Scala | gpl-3.0 | 16,176 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.physical.couchbase.planner
import slamdata.Predef._
import quasar.physical.couchbase._
import quasar.physical.couchbase.N1QL._
import quasar.qscript, qscript.{ReduceFunc, ReduceFuncs => RF}
import matryoshka._
import matryoshka.implicits._
import scalaz._, Scalaz._
final class ReduceFuncPlanner[T[_[_]]: CorecursiveT, F[_]: Applicative] extends Planner[T, F, ReduceFunc] {
def plan: AlgebraM[F, ReduceFunc, T[N1QL]] = planʹ >>> (_.embed.η[F])
@SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements"))
val planʹ: Transform[T[N1QL], ReduceFunc, N1QL] = {
case RF.Arbitrary(a1) => Min(a1)
case RF.Avg(a1) => Avg(a1)
case RF.Count(a1) => Count(a1)
case RF.First(a1) => ???
case RF.Last(a1) => ???
case RF.Max(a1) => Max(a1)
case RF.Min(a1) => Min(a1)
case RF.Sum(a1) => Sum(a1)
case RF.UnshiftArray(a1) => ArrAgg(a1)
case RF.UnshiftMap(a1, a2) => Obj(List(a1 -> a2))
}
}
| jedesah/Quasar | couchbase/src/main/scala/quasar/physical/couchbase/planner/ReduceFuncPlanner.scala | Scala | apache-2.0 | 1,634 |
import sbt._
object Dependencies {
val resolutionRepos = Seq(
"spray repo" at "http://repo.spray.io/",
"Sonatype OSS Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots"
)
//Versions
val myScalaVersion = "2.10.4"
val akkaVersion = "2.2.0-RC1"
val sprayVersion = "1.2-M8"
val specs2Version = "1.14"
val slf4jVersion = "1.7.5"
val json4sVersion = "3.2.4"
val logbackVersion = "1.0.12"
val sprayJsonVersion = "1.2.5"
def compile (deps: ModuleID*): Seq[ModuleID] = deps map (_ % "compile")
def provided (deps: ModuleID*): Seq[ModuleID] = deps map (_ % "provided")
def test (deps: ModuleID*): Seq[ModuleID] = deps map (_ % "test")
def runtime (deps: ModuleID*): Seq[ModuleID] = deps map (_ % "runtime")
def container (deps: ModuleID*): Seq[ModuleID] = deps map (_ % "container")
val sprayClient = "io.spray" % "spray-client" % sprayVersion
val akkaActor = "com.typesafe.akka" %% "akka-actor" % akkaVersion
val akkaSlf4 = "com.typesafe.akka" %% "akka-slf4j" % akkaVersion
val specs2 = "org.specs2" %% "specs2" % specs2Version
//Logging
val slf4jApi = "org.slf4j" % "slf4j-api" % slf4jVersion
val logback = "ch.qos.logback" % "logback-classic" % logbackVersion
val logbackcore = "ch.qos.logback" % "logback-core" % logbackVersion
//Json
val json4sNative = "org.json4s" %% "json4s-native" % json4sVersion
val sprayJson = "io.spray" %% "spray-json" % sprayJsonVersion
//DependecyGroups
val logDependencies = Seq(akkaSlf4, slf4jApi, logback, logbackcore)
val akkaDependencies = Seq(akkaActor)
val testDependencies = Seq(specs2 % "test")
val dcHandleCheckpointsDependencies = Seq(sprayClient, json4sNative, sprayJson) ++ akkaDependencies ++ logDependencies ++ testDependencies
} | MagnusAk78/dynamic-checklist-server | tools/project/Dependencies.scala | Scala | gpl-3.0 | 1,971 |
/*
Copyright 2016 Tunalytics Foundation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
"http://www.apache.org/licenses/LICENSE-2.0".
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.tunalytics.loader.transformer.topology.signals
/**
* Signal value.
*
* For testing purposes! To be removed in production-ready version.
*/
case class Signal(val message: Message) {
// TODO: remove this class in production-ready version
override def toString(): String = {
"[Signal (message = " + message.text + ")]"
}
}
| tunalytics/loader | transformer/src/main/scala/org/tunalytics/loader/transformer/topology/signals/Signal.scala | Scala | apache-2.0 | 948 |
/*
* Part of NDLA learningpath-api.
* Copyright (C) 2016 NDLA
*
* See LICENSE
*
*/
package no.ndla.learningpathapi.integration
import com.zaxxer.hikari.{HikariConfig, HikariDataSource}
import no.ndla.learningpathapi.LearningpathApiProperties._
trait DataSource {
val dataSource: HikariDataSource
}
object DataSource {
def getHikariDataSource: HikariDataSource = {
val dataSourceConfig = new HikariConfig()
dataSourceConfig.setUsername(MetaUserName)
dataSourceConfig.setPassword(MetaPassword)
dataSourceConfig.setJdbcUrl(s"jdbc:postgresql://$MetaServer:$MetaPort/$MetaResource")
dataSourceConfig.setSchema(MetaSchema)
dataSourceConfig.setMaximumPoolSize(MetaMaxConnections)
new HikariDataSource(dataSourceConfig)
}
}
| NDLANO/learningpath-api | src/main/scala/no/ndla/learningpathapi/integration/DataSource.scala | Scala | gpl-3.0 | 761 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.parquet
import java.nio.file.{Files, Paths, StandardCopyOption}
import java.sql.{Date, Timestamp}
import java.time._
import java.util.Locale
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.reflect.ClassTag
import scala.reflect.runtime.universe.TypeTag
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.hadoop.mapreduce.{JobContext, TaskAttemptContext}
import org.apache.parquet.HadoopReadOptions
import org.apache.parquet.column.{Encoding, ParquetProperties}
import org.apache.parquet.example.data.{Group, GroupWriter}
import org.apache.parquet.example.data.simple.SimpleGroup
import org.apache.parquet.hadoop._
import org.apache.parquet.hadoop.api.WriteSupport
import org.apache.parquet.hadoop.api.WriteSupport.WriteContext
import org.apache.parquet.hadoop.metadata.CompressionCodecName
import org.apache.parquet.hadoop.util.HadoopInputFile
import org.apache.parquet.io.api.RecordConsumer
import org.apache.parquet.schema.{MessageType, MessageTypeParser}
import org.apache.spark.{SPARK_VERSION_SHORT, SparkConf, SparkException, SparkUpgradeException}
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.{InternalRow, ScalaReflection}
import org.apache.spark.sql.catalyst.expressions.{GenericInternalRow, UnsafeRow}
import org.apache.spark.sql.catalyst.util.{DateTimeTestUtils, DateTimeUtils}
import org.apache.spark.sql.execution.datasources.SQLHadoopMapReduceCommitProtocol
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy._
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
// Write support class for nested groups: ParquetWriter initializes GroupWriteSupport
// with an empty configuration (it is after all not intended to be used in this way?)
// and members are private so we need to make our own in order to pass the schema
// to the writer.
private[parquet] class TestGroupWriteSupport(schema: MessageType) extends WriteSupport[Group] {
var groupWriter: GroupWriter = null
override def prepareForWrite(recordConsumer: RecordConsumer): Unit = {
groupWriter = new GroupWriter(recordConsumer, schema)
}
override def init(configuration: Configuration): WriteContext = {
new WriteContext(schema, new java.util.HashMap[String, String]())
}
override def write(record: Group): Unit = {
groupWriter.write(record)
}
}
/**
* A test suite that tests basic Parquet I/O.
*/
class ParquetIOSuite extends QueryTest with ParquetTest with SharedSparkSession {
import testImplicits._
override def sparkConf: SparkConf =
super.sparkConf
.setAppName("test")
.set("spark.sql.parquet.columnarReaderBatchSize", "4096")
.set("spark.sql.sources.useV1SourceList", "avro")
.set("spark.sql.extensions", "com.intel.oap.ColumnarPlugin")
.set("spark.sql.execution.arrow.maxRecordsPerBatch", "4096")
//.set("spark.shuffle.manager", "org.apache.spark.shuffle.sort.ColumnarShuffleManager")
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "false")
.set("spark.sql.columnar.window", "false")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
/**
* Writes `data` to a Parquet file, reads it back and check file contents.
*/
protected def checkParquetFile[T <: Product : ClassTag: TypeTag](data: Seq[T]): Unit = {
withParquetDataFrame(data.toDF())(r => checkAnswer(r, data.map(Row.fromTuple)))
}
ignore("basic data types (without binary)") {
val data = (1 to 4).map { i =>
(i % 2 == 0, i, i.toLong, i.toFloat, i.toDouble)
}
checkParquetFile(data)
}
ignore("raw binary") {
val data = (1 to 4).map(i => Tuple1(Array.fill(3)(i.toByte)))
withParquetDataFrame(data.toDF()) { df =>
assertResult(data.map(_._1.mkString(",")).sorted) {
df.collect().map(_.getAs[Array[Byte]](0).mkString(",")).sorted
}
}
}
test("SPARK-11694 Parquet logical types are not being tested properly") {
val parquetSchema = MessageTypeParser.parseMessageType(
"""message root {
| required int32 a(INT_8);
| required int32 b(INT_16);
| required int32 c(DATE);
| required int32 d(DECIMAL(1,0));
| required int64 e(DECIMAL(10,0));
| required binary f(UTF8);
| required binary g(ENUM);
| required binary h(DECIMAL(32,0));
| required fixed_len_byte_array(32) i(DECIMAL(32,0));
| required int64 j(TIMESTAMP_MILLIS);
| required int64 k(TIMESTAMP_MICROS);
|}
""".stripMargin)
val expectedSparkTypes = Seq(ByteType, ShortType, DateType, DecimalType(1, 0),
DecimalType(10, 0), StringType, StringType, DecimalType(32, 0), DecimalType(32, 0),
TimestampType, TimestampType)
withTempPath { location =>
val path = new Path(location.getCanonicalPath)
val conf = spark.sessionState.newHadoopConf()
writeMetadata(parquetSchema, path, conf)
readParquetFile(path.toString)(df => {
val sparkTypes = df.schema.map(_.dataType)
assert(sparkTypes === expectedSparkTypes)
})
}
}
ignore("string") {
val data = (1 to 4).map(i => Tuple1(i.toString))
// Property spark.sql.parquet.binaryAsString shouldn't affect Parquet files written by Spark SQL
// as we store Spark SQL schema in the extra metadata.
withSQLConf(SQLConf.PARQUET_BINARY_AS_STRING.key -> "false")(checkParquetFile(data))
withSQLConf(SQLConf.PARQUET_BINARY_AS_STRING.key -> "true")(checkParquetFile(data))
}
testStandardAndLegacyModes("fixed-length decimals") {
def makeDecimalRDD(decimal: DecimalType): DataFrame = {
spark
.range(1000)
// Parquet doesn't allow column names with spaces, have to add an alias here.
// Minus 500 here so that negative decimals are also tested.
.select((('id - 500) / 100.0) cast decimal as 'dec)
.coalesce(1)
}
val combinations = Seq((5, 2), (1, 0), (1, 1), (18, 10), (18, 17), (19, 0), (38, 37))
for ((precision, scale) <- combinations) {
withTempPath { dir =>
val data = makeDecimalRDD(DecimalType(precision, scale))
data.write.parquet(dir.getCanonicalPath)
readParquetFile(dir.getCanonicalPath) { df => {
checkAnswer(df, data.collect().toSeq)
}}
}
}
}
ignore("date type") {
def makeDateRDD(): DataFrame =
sparkContext
.parallelize(0 to 1000)
.map(i => Tuple1(DateTimeUtils.toJavaDate(i)))
.toDF()
.select($"_1")
withTempPath { dir =>
val data = makeDateRDD()
data.write.parquet(dir.getCanonicalPath)
readParquetFile(dir.getCanonicalPath) { df =>
checkAnswer(df, data.collect().toSeq)
}
}
}
testStandardAndLegacyModes("map") {
val data = (1 to 4).map(i => Tuple1(Map(i -> s"val_$i")))
checkParquetFile(data)
}
testStandardAndLegacyModes("array") {
val data = (1 to 4).map(i => Tuple1(Seq(i, i + 1)))
checkParquetFile(data)
}
testStandardAndLegacyModes("array and double") {
val data = (1 to 4).map(i => (i.toDouble, Seq(i.toDouble, (i + 1).toDouble)))
checkParquetFile(data)
}
testStandardAndLegacyModes("struct") {
val data = (1 to 4).map(i => Tuple1((i, s"val_$i")))
withParquetDataFrame(data.toDF()) { df =>
// Structs are converted to `Row`s
checkAnswer(df, data.map { case Tuple1(struct) =>
Row(Row(struct.productIterator.toSeq: _*))
})
}
}
testStandardAndLegacyModes("array of struct") {
val data = (1 to 4).map { i =>
Tuple1(
Seq(
Tuple1(s"1st_val_$i"),
Tuple1(s"2nd_val_$i")
)
)
}
withParquetDataFrame(data.toDF()) { df =>
// Structs are converted to `Row`s
checkAnswer(df, data.map { case Tuple1(array) =>
Row(array.map(struct => Row(struct.productIterator.toSeq: _*)))
})
}
}
testStandardAndLegacyModes("array of nested struct") {
val data = (1 to 4).map { i =>
Tuple1(
Seq(
Tuple1(
Tuple1(s"1st_val_$i")),
Tuple1(
Tuple1(s"2nd_val_$i"))
)
)
}
withParquetDataFrame(data.toDF()) { df =>
// Structs are converted to `Row`s
checkAnswer(df, data.map { case Tuple1(array) =>
Row(array.map { case Tuple1(Tuple1(str)) => Row(Row(str))})
})
}
}
testStandardAndLegacyModes("nested struct with array of array as field") {
val data = (1 to 4).map(i => Tuple1((i, Seq(Seq(s"val_$i")))))
withParquetDataFrame(data.toDF()) { df =>
// Structs are converted to `Row`s
checkAnswer(df, data.map { case Tuple1(struct) =>
Row(Row(struct.productIterator.toSeq: _*))
})
}
}
testStandardAndLegacyModes("nested map with struct as key type") {
val data = (1 to 4).map { i =>
Tuple1(
Map(
(i, s"kA_$i") -> s"vA_$i",
(i, s"kB_$i") -> s"vB_$i"
)
)
}
withParquetDataFrame(data.toDF()) { df =>
// Structs are converted to `Row`s
checkAnswer(df, data.map { case Tuple1(m) =>
Row(m.map { case (k, v) => Row(k.productIterator.toSeq: _*) -> v })
})
}
}
testStandardAndLegacyModes("nested map with struct as value type") {
val data = (1 to 4).map { i =>
Tuple1(
Map(
s"kA_$i" -> ((i, s"vA_$i")),
s"kB_$i" -> ((i, s"vB_$i"))
)
)
}
withParquetDataFrame(data.toDF()) { df =>
// Structs are converted to `Row`s
checkAnswer(df, data.map { case Tuple1(m) =>
Row(m.mapValues(struct => Row(struct.productIterator.toSeq: _*)))
})
}
}
ignore("nulls") {
val allNulls = (
null.asInstanceOf[java.lang.Boolean],
null.asInstanceOf[Integer],
null.asInstanceOf[java.lang.Long],
null.asInstanceOf[java.lang.Float],
null.asInstanceOf[java.lang.Double])
withParquetDataFrame((allNulls :: Nil).toDF()) { df =>
val rows = df.collect()
assert(rows.length === 1)
assert(rows.head === Row(Seq.fill(5)(null): _*))
}
}
ignore("nones") {
val allNones = (
None.asInstanceOf[Option[Int]],
None.asInstanceOf[Option[Long]],
None.asInstanceOf[Option[String]])
withParquetDataFrame((allNones :: Nil).toDF()) { df =>
val rows = df.collect()
assert(rows.length === 1)
assert(rows.head === Row(Seq.fill(3)(null): _*))
}
}
test("SPARK-10113 Support for unsigned Parquet logical types") {
val parquetSchema = MessageTypeParser.parseMessageType(
"""message root {
| required int32 c(UINT_32);
|}
""".stripMargin)
withTempPath { location =>
val path = new Path(location.getCanonicalPath)
val conf = spark.sessionState.newHadoopConf()
writeMetadata(parquetSchema, path, conf)
val errorMessage = intercept[Throwable] {
spark.read.parquet(path.toString).printSchema()
}.toString
assert(errorMessage.contains("Parquet type not supported"))
}
}
test("SPARK-11692 Support for Parquet logical types, JSON and BSON (embedded types)") {
val parquetSchema = MessageTypeParser.parseMessageType(
"""message root {
| required binary a(JSON);
| required binary b(BSON);
|}
""".stripMargin)
val expectedSparkTypes = Seq(StringType, BinaryType)
withTempPath { location =>
val path = new Path(location.getCanonicalPath)
val conf = spark.sessionState.newHadoopConf()
writeMetadata(parquetSchema, path, conf)
val sparkTypes = spark.read.parquet(path.toString).schema.map(_.dataType)
assert(sparkTypes === expectedSparkTypes)
}
}
test("compression codec") {
val hadoopConf = spark.sessionState.newHadoopConf()
def compressionCodecFor(path: String, codecName: String): String = {
val codecs = for {
footer <- readAllFootersWithoutSummaryFiles(new Path(path), hadoopConf)
block <- footer.getParquetMetadata.getBlocks.asScala
column <- block.getColumns.asScala
} yield column.getCodec.name()
assert(codecs.distinct === Seq(codecName))
codecs.head
}
val data = (0 until 10).map(i => (i, i.toString))
def checkCompressionCodec(codec: CompressionCodecName): Unit = {
withSQLConf(SQLConf.PARQUET_COMPRESSION.key -> codec.name()) {
withParquetFile(data) { path =>
assertResult(spark.conf.get(SQLConf.PARQUET_COMPRESSION).toUpperCase(Locale.ROOT)) {
compressionCodecFor(path, codec.name())
}
}
}
}
// Checks default compression codec
checkCompressionCodec(
CompressionCodecName.fromConf(spark.conf.get(SQLConf.PARQUET_COMPRESSION)))
checkCompressionCodec(CompressionCodecName.UNCOMPRESSED)
checkCompressionCodec(CompressionCodecName.GZIP)
checkCompressionCodec(CompressionCodecName.SNAPPY)
}
ignore("read raw Parquet file") {
def makeRawParquetFile(path: Path): Unit = {
val schema = MessageTypeParser.parseMessageType(
"""
|message root {
| required boolean _1;
| required int32 _2;
| required int64 _3;
| required float _4;
| required double _5;
|}
""".stripMargin)
val testWriteSupport = new TestGroupWriteSupport(schema)
/**
* Provide a builder for constructing a parquet writer - after PARQUET-248 directly
* constructing the writer is deprecated and should be done through a builder. The default
* builders include Avro - but for raw Parquet writing we must create our own builder.
*/
class ParquetWriterBuilder() extends
ParquetWriter.Builder[Group, ParquetWriterBuilder](path) {
override def getWriteSupport(conf: Configuration) = testWriteSupport
override def self() = this
}
val writer = new ParquetWriterBuilder().build()
(0 until 10).foreach { i =>
val record = new SimpleGroup(schema)
record.add(0, i % 2 == 0)
record.add(1, i)
record.add(2, i.toLong)
record.add(3, i.toFloat)
record.add(4, i.toDouble)
writer.write(record)
}
writer.close()
}
withTempDir { dir =>
val path = new Path(dir.toURI.toString, "part-r-0.parquet")
makeRawParquetFile(path)
readParquetFile(path.toString) { df =>
checkAnswer(df, (0 until 10).map { i =>
Row(i % 2 == 0, i, i.toLong, i.toFloat, i.toDouble) })
}
}
}
ignore("write metadata") {
val hadoopConf = spark.sessionState.newHadoopConf()
withTempPath { file =>
val path = new Path(file.toURI.toString)
val fs = FileSystem.getLocal(hadoopConf)
val schema = StructType.fromAttributes(ScalaReflection.attributesFor[(Int, String)])
writeMetadata(schema, path, hadoopConf)
assert(fs.exists(new Path(path, ParquetFileWriter.PARQUET_COMMON_METADATA_FILE)))
assert(fs.exists(new Path(path, ParquetFileWriter.PARQUET_METADATA_FILE)))
val expectedSchema = new SparkToParquetSchemaConverter().convert(schema)
val actualSchema = readFooter(path, hadoopConf).getFileMetaData.getSchema
actualSchema.checkContains(expectedSchema)
expectedSchema.checkContains(actualSchema)
}
}
ignore("save - overwrite") {
withParquetFile((1 to 10).map(i => (i, i.toString))) { file =>
val newData = (11 to 20).map(i => (i, i.toString))
newData.toDF().write.format("parquet").mode(SaveMode.Overwrite).save(file)
readParquetFile(file) { df =>
checkAnswer(df, newData.map(Row.fromTuple))
}
}
}
ignore("save - ignore") {
val data = (1 to 10).map(i => (i, i.toString))
withParquetFile(data) { file =>
val newData = (11 to 20).map(i => (i, i.toString))
newData.toDF().write.format("parquet").mode(SaveMode.Ignore).save(file)
readParquetFile(file) { df =>
checkAnswer(df, data.map(Row.fromTuple))
}
}
}
test("save - throw") {
val data = (1 to 10).map(i => (i, i.toString))
withParquetFile(data) { file =>
val newData = (11 to 20).map(i => (i, i.toString))
val errorMessage = intercept[Throwable] {
newData.toDF().write.format("parquet").mode(SaveMode.ErrorIfExists).save(file)
}.getMessage
assert(errorMessage.contains("already exists"))
}
}
ignore("save - append") {
val data = (1 to 10).map(i => (i, i.toString))
withParquetFile(data) { file =>
val newData = (11 to 20).map(i => (i, i.toString))
newData.toDF().write.format("parquet").mode(SaveMode.Append).save(file)
readParquetFile(file) { df =>
checkAnswer(df, (data ++ newData).map(Row.fromTuple))
}
}
}
test("SPARK-6315 regression test") {
// Spark 1.1 and prior versions write Spark schema as case class string into Parquet metadata.
// This has been deprecated by JSON format since 1.2. Notice that, 1.3 further refactored data
// types API, and made StructType.fields an array. This makes the result of StructType.toString
// different from prior versions: there's no "Seq" wrapping the fields part in the string now.
val sparkSchema =
"StructType(Seq(StructField(a,BooleanType,false),StructField(b,IntegerType,false)))"
// The Parquet schema is intentionally made different from the Spark schema. Because the new
// Parquet data source simply falls back to the Parquet schema once it fails to parse the Spark
// schema. By making these two different, we are able to assert the old style case class string
// is parsed successfully.
val parquetSchema = MessageTypeParser.parseMessageType(
"""message root {
| required int32 c;
|}
""".stripMargin)
withTempPath { location =>
val extraMetadata = Map(ParquetReadSupport.SPARK_METADATA_KEY -> sparkSchema.toString)
val path = new Path(location.getCanonicalPath)
val conf = spark.sessionState.newHadoopConf()
writeMetadata(parquetSchema, path, conf, extraMetadata)
readParquetFile(path.toString) { df =>
assertResult(df.schema) {
StructType(
StructField("a", BooleanType, nullable = true) ::
StructField("b", IntegerType, nullable = true) ::
Nil)
}
}
}
}
test("SPARK-8121: spark.sql.parquet.output.committer.class shouldn't be overridden") {
withSQLConf(SQLConf.FILE_COMMIT_PROTOCOL_CLASS.key ->
classOf[SQLHadoopMapReduceCommitProtocol].getCanonicalName) {
val extraOptions = Map(
SQLConf.OUTPUT_COMMITTER_CLASS.key -> classOf[ParquetOutputCommitter].getCanonicalName,
SQLConf.PARQUET_OUTPUT_COMMITTER_CLASS.key ->
classOf[JobCommitFailureParquetOutputCommitter].getCanonicalName
)
withTempPath { dir =>
val message = intercept[SparkException] {
spark.range(0, 1).write.options(extraOptions).parquet(dir.getCanonicalPath)
}.getCause.getMessage
assert(message === "Intentional exception for testing purposes")
}
}
}
test("SPARK-6330 regression test") {
// In 1.3.0, save to fs other than file: without configuring core-site.xml would get:
// IllegalArgumentException: Wrong FS: hdfs://..., expected: file:///
intercept[Throwable] {
spark.read.parquet("file:///nonexistent")
}
val errorMessage = intercept[Throwable] {
spark.read.parquet("hdfs://nonexistent")
}.toString
assert(errorMessage.contains("UnknownHostException"))
}
test("SPARK-7837 Do not close output writer twice when commitTask() fails") {
withSQLConf(SQLConf.FILE_COMMIT_PROTOCOL_CLASS.key ->
classOf[SQLHadoopMapReduceCommitProtocol].getCanonicalName) {
// Using a output committer that always fail when committing a task, so that both
// `commitTask()` and `abortTask()` are invoked.
val extraOptions = Map[String, String](
SQLConf.PARQUET_OUTPUT_COMMITTER_CLASS.key ->
classOf[TaskCommitFailureParquetOutputCommitter].getCanonicalName
)
// Before fixing SPARK-7837, the following code results in an NPE because both
// `commitTask()` and `abortTask()` try to close output writers.
withTempPath { dir =>
val m1 = intercept[SparkException] {
spark.range(1).coalesce(1).write.options(extraOptions).parquet(dir.getCanonicalPath)
}.getCause.getMessage
assert(m1.contains("Intentional exception for testing purposes"))
}
withTempPath { dir =>
val m2 = intercept[SparkException] {
val df = spark.range(1).select('id as 'a, 'id as 'b).coalesce(1)
df.write.partitionBy("a").options(extraOptions).parquet(dir.getCanonicalPath)
}.getCause.getMessage
assert(m2.contains("Intentional exception for testing purposes"))
}
}
}
test("SPARK-11044 Parquet writer version fixed as version1 ") {
withSQLConf(SQLConf.FILE_COMMIT_PROTOCOL_CLASS.key ->
classOf[SQLHadoopMapReduceCommitProtocol].getCanonicalName) {
// For dictionary encoding, Parquet changes the encoding types according to its writer
// version. So, this test checks one of the encoding types in order to ensure that
// the file is written with writer version2.
val extraOptions = Map[String, String](
// Write a Parquet file with writer version2.
ParquetOutputFormat.WRITER_VERSION -> ParquetProperties.WriterVersion.PARQUET_2_0.toString,
// By default, dictionary encoding is enabled from Parquet 1.2.0 but
// it is enabled just in case.
ParquetOutputFormat.ENABLE_DICTIONARY -> "true"
)
val hadoopConf = spark.sessionState.newHadoopConfWithOptions(extraOptions)
withSQLConf(ParquetOutputFormat.JOB_SUMMARY_LEVEL -> "ALL") {
withTempPath { dir =>
val path = s"${dir.getCanonicalPath}/part-r-0.parquet"
spark.range(1 << 16).selectExpr("(id % 4) AS i")
.coalesce(1).write.options(extraOptions).mode("overwrite").parquet(path)
val blockMetadata = readFooter(new Path(path), hadoopConf).getBlocks.asScala.head
val columnChunkMetadata = blockMetadata.getColumns.asScala.head
// If the file is written with version2, this should include
// Encoding.RLE_DICTIONARY type. For version1, it is Encoding.PLAIN_DICTIONARY
assert(columnChunkMetadata.getEncodings.contains(Encoding.RLE_DICTIONARY))
}
}
}
}
test("null and non-null strings") {
// Create a dataset where the first values are NULL and then some non-null values. The
// number of non-nulls needs to be bigger than the ParquetReader batch size.
val data: Dataset[String] = spark.range(200).map (i =>
if (i < 150) null
else "a"
)
val df = data.toDF("col")
assert(df.agg("col" -> "count").collect().head.getLong(0) == 50)
withTempPath { dir =>
val path = s"${dir.getCanonicalPath}/data"
df.write.parquet(path)
readParquetFile(path) { df2 =>
assert(df2.agg("col" -> "count").collect().head.getLong(0) == 50)
}
}
}
ignore("read dictionary encoded decimals written as INT32") {
withAllParquetReaders {
checkAnswer(
// Decimal column in this file is encoded using plain dictionary
readResourceParquetFile("test-data/dec-in-i32.parquet"),
spark.range(1 << 4).select('id % 10 cast DecimalType(5, 2) as 'i32_dec))
}
}
ignore("read dictionary encoded decimals written as INT64") {
withAllParquetReaders {
checkAnswer(
// Decimal column in this file is encoded using plain dictionary
readResourceParquetFile("test-data/dec-in-i64.parquet"),
spark.range(1 << 4).select('id % 10 cast DecimalType(10, 2) as 'i64_dec))
}
}
ignore("read dictionary encoded decimals written as FIXED_LEN_BYTE_ARRAY") {
withAllParquetReaders {
checkAnswer(
// Decimal column in this file is encoded using plain dictionary
readResourceParquetFile("test-data/dec-in-fixed-len.parquet"),
spark.range(1 << 4).select('id % 10 cast DecimalType(10, 2) as 'fixed_len_dec))
}
}
ignore("read dictionary and plain encoded timestamp_millis written as INT64") {
withAllParquetReaders {
checkAnswer(
// timestamp column in this file is encoded using combination of plain
// and dictionary encodings.
readResourceParquetFile("test-data/timemillis-in-i64.parquet"),
(1 to 3).map(i => Row(new java.sql.Timestamp(10))))
}
}
ignore("SPARK-12589 copy() on rows returned from reader works for strings") {
withTempPath { dir =>
val data = (1, "abc") ::(2, "helloabcde") :: Nil
data.toDF().write.parquet(dir.getCanonicalPath)
var hash1: Int = 0
var hash2: Int = 0
(false :: true :: Nil).foreach { v =>
withSQLConf(SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> v.toString) {
val df = spark.read.parquet(dir.getCanonicalPath)
val rows = df.queryExecution.toRdd.map(_.copy()).collect()
val unsafeRows = rows.map(_.asInstanceOf[UnsafeRow])
if (!v) {
hash1 = unsafeRows(0).hashCode()
hash2 = unsafeRows(1).hashCode()
} else {
assert(hash1 == unsafeRows(0).hashCode())
assert(hash2 == unsafeRows(1).hashCode())
}
}
}
}
}
test("VectorizedParquetRecordReader - direct path read") {
val data = (0 to 10).map(i => (i, (i + 'a').toChar.toString))
withTempPath { dir =>
spark.createDataFrame(data).repartition(1).write.parquet(dir.getCanonicalPath)
val file = SpecificParquetRecordReaderBase.listDirectory(dir).get(0);
{
val conf = sqlContext.conf
val reader = new VectorizedParquetRecordReader(
conf.offHeapColumnVectorEnabled, conf.parquetVectorizedReaderBatchSize)
try {
reader.initialize(file, null)
val result = mutable.ArrayBuffer.empty[(Int, String)]
while (reader.nextKeyValue()) {
val row = reader.getCurrentValue.asInstanceOf[InternalRow]
val v = (row.getInt(0), row.getString(1))
result += v
}
assert(data.toSet == result.toSet)
} finally {
reader.close()
}
}
// Project just one column
{
val conf = sqlContext.conf
val reader = new VectorizedParquetRecordReader(
conf.offHeapColumnVectorEnabled, conf.parquetVectorizedReaderBatchSize)
try {
reader.initialize(file, ("_2" :: Nil).asJava)
val result = mutable.ArrayBuffer.empty[(String)]
while (reader.nextKeyValue()) {
val row = reader.getCurrentValue.asInstanceOf[InternalRow]
result += row.getString(0)
}
assert(data.map(_._2).toSet == result.toSet)
} finally {
reader.close()
}
}
// Project columns in opposite order
{
val conf = sqlContext.conf
val reader = new VectorizedParquetRecordReader(
conf.offHeapColumnVectorEnabled, conf.parquetVectorizedReaderBatchSize)
try {
reader.initialize(file, ("_2" :: "_1" :: Nil).asJava)
val result = mutable.ArrayBuffer.empty[(String, Int)]
while (reader.nextKeyValue()) {
val row = reader.getCurrentValue.asInstanceOf[InternalRow]
val v = (row.getString(0), row.getInt(1))
result += v
}
assert(data.map { x => (x._2, x._1) }.toSet == result.toSet)
} finally {
reader.close()
}
}
// Empty projection
{
val conf = sqlContext.conf
val reader = new VectorizedParquetRecordReader(
conf.offHeapColumnVectorEnabled, conf.parquetVectorizedReaderBatchSize)
try {
reader.initialize(file, List[String]().asJava)
var result = 0
while (reader.nextKeyValue()) {
result += 1
}
assert(result == data.length)
} finally {
reader.close()
}
}
}
}
test("VectorizedParquetRecordReader - partition column types") {
withTempPath { dir =>
Seq(1).toDF().repartition(1).write.parquet(dir.getCanonicalPath)
val dataTypes =
Seq(StringType, BooleanType, ByteType, ShortType, IntegerType, LongType,
FloatType, DoubleType, DecimalType(25, 5), DateType, TimestampType)
val constantValues =
Seq(
UTF8String.fromString("a string"),
true,
1.toByte,
2.toShort,
3,
Long.MaxValue,
0.25.toFloat,
0.75D,
Decimal("1234.23456"),
DateTimeUtils.fromJavaDate(java.sql.Date.valueOf("2015-01-01")),
DateTimeUtils.fromJavaTimestamp(java.sql.Timestamp.valueOf("2015-01-01 23:50:59.123")))
dataTypes.zip(constantValues).foreach { case (dt, v) =>
val schema = StructType(StructField("pcol", dt) :: Nil)
val conf = sqlContext.conf
val vectorizedReader = new VectorizedParquetRecordReader(
conf.offHeapColumnVectorEnabled, conf.parquetVectorizedReaderBatchSize)
val partitionValues = new GenericInternalRow(Array(v))
val file = SpecificParquetRecordReaderBase.listDirectory(dir).get(0)
try {
vectorizedReader.initialize(file, null)
vectorizedReader.initBatch(schema, partitionValues)
vectorizedReader.nextKeyValue()
val row = vectorizedReader.getCurrentValue.asInstanceOf[InternalRow]
// Use `GenericMutableRow` by explicitly copying rather than `ColumnarBatch`
// in order to use get(...) method which is not implemented in `ColumnarBatch`.
val actual = row.copy().get(1, dt)
val expected = v
assert(actual == expected)
} finally {
vectorizedReader.close()
}
}
}
}
test("SPARK-18433: Improve DataSource option keys to be more case-insensitive") {
withSQLConf(SQLConf.PARQUET_COMPRESSION.key -> "snappy") {
val option = new ParquetOptions(Map("Compression" -> "uncompressed"), spark.sessionState.conf)
assert(option.compressionCodecClassName == "UNCOMPRESSED")
}
}
ignore("SPARK-23173 Writing a file with data converted from JSON with and incorrect user schema") {
withTempPath { file =>
val jsonData =
"""{
| "a": 1,
| "c": "foo"
|}
|""".stripMargin
val jsonSchema = new StructType()
.add("a", LongType, nullable = false)
.add("b", StringType, nullable = false)
.add("c", StringType, nullable = false)
spark.range(1).select(from_json(lit(jsonData), jsonSchema) as "input")
.write.parquet(file.getAbsolutePath)
checkAnswer(spark.read.parquet(file.getAbsolutePath), Seq(Row(Row(1, null, "foo"))))
}
}
test("Write Spark version into Parquet metadata") {
withTempPath { dir =>
val path = dir.getAbsolutePath
spark.range(1).repartition(1).write.parquet(path)
val file = SpecificParquetRecordReaderBase.listDirectory(dir).get(0)
val conf = new Configuration()
val hadoopInputFile = HadoopInputFile.fromPath(new Path(file), conf)
val parquetReadOptions = HadoopReadOptions.builder(conf).build()
val m = ParquetFileReader.open(hadoopInputFile, parquetReadOptions)
val metaData = m.getFileMetaData.getKeyValueMetaData
m.close()
assert(metaData.get(SPARK_VERSION_METADATA_KEY) === SPARK_VERSION_SHORT)
}
}
// It generates input files for the test below:
// "SPARK-31159: compatibility with Spark 2.4 in reading dates/timestamps"
ignore("SPARK-31806: generate test files for checking compatibility with Spark 2.4") {
val resourceDir = "sql/core/src/test/resources/test-data"
val version = "2_4_5"
val N = 8
def save(
in: Seq[(String, String)],
t: String,
dstFile: String,
options: Map[String, String] = Map.empty): Unit = {
withTempDir { dir =>
in.toDF("dict", "plain")
.select($"dict".cast(t), $"plain".cast(t))
.repartition(1)
.write
.mode("overwrite")
.options(options)
.parquet(dir.getCanonicalPath)
Files.copy(
dir.listFiles().filter(_.getName.endsWith(".snappy.parquet")).head.toPath,
Paths.get(resourceDir, dstFile),
StandardCopyOption.REPLACE_EXISTING)
}
}
DateTimeTestUtils.withDefaultTimeZone(DateTimeTestUtils.LA) {
withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> DateTimeTestUtils.LA.getId) {
save(
(1 to N).map(i => ("1001-01-01", s"1001-01-0$i")),
"date",
s"before_1582_date_v$version.snappy.parquet")
withSQLConf(SQLConf.PARQUET_OUTPUT_TIMESTAMP_TYPE.key -> "TIMESTAMP_MILLIS") {
save(
(1 to N).map(i => ("1001-01-01 01:02:03.123", s"1001-01-0$i 01:02:03.123")),
"timestamp",
s"before_1582_timestamp_millis_v$version.snappy.parquet")
}
val usTs = (1 to N).map(i => ("1001-01-01 01:02:03.123456", s"1001-01-0$i 01:02:03.123456"))
withSQLConf(SQLConf.PARQUET_OUTPUT_TIMESTAMP_TYPE.key -> "TIMESTAMP_MICROS") {
save(usTs, "timestamp", s"before_1582_timestamp_micros_v$version.snappy.parquet")
}
withSQLConf(SQLConf.PARQUET_OUTPUT_TIMESTAMP_TYPE.key -> "INT96") {
// Comparing to other logical types, Parquet-MR chooses dictionary encoding for the
// INT96 logical type because it consumes less memory for small column cardinality.
// Huge parquet files doesn't make sense to place to the resource folder. That's why
// we explicitly set `parquet.enable.dictionary` and generate two files w/ and w/o
// dictionary encoding.
save(
usTs,
"timestamp",
s"before_1582_timestamp_int96_plain_v$version.snappy.parquet",
Map("parquet.enable.dictionary" -> "false"))
save(
usTs,
"timestamp",
s"before_1582_timestamp_int96_dict_v$version.snappy.parquet",
Map("parquet.enable.dictionary" -> "true"))
}
}
}
}
ignore("SPARK-31159: compatibility with Spark 2.4 in reading dates/timestamps") {
val N = 8
// test reading the existing 2.4 files and new 3.0 files (with rebase on/off) together.
def checkReadMixedFiles[T](
fileName: String,
catalystType: String,
rowFunc: Int => (String, String),
toJavaType: String => T,
checkDefaultLegacyRead: String => Unit,
tsOutputType: String = "TIMESTAMP_MICROS"): Unit = {
withTempPaths(2) { paths =>
paths.foreach(_.delete())
val path2_4 = getResourceParquetFilePath("test-data/" + fileName)
val path3_0 = paths(0).getCanonicalPath
val path3_0_rebase = paths(1).getCanonicalPath
val df = Seq.tabulate(N)(rowFunc).toDF("dict", "plain")
.select($"dict".cast(catalystType), $"plain".cast(catalystType))
withSQLConf(SQLConf.PARQUET_OUTPUT_TIMESTAMP_TYPE.key -> tsOutputType) {
checkDefaultLegacyRead(path2_4)
// By default we should fail to write ancient datetime values.
val e = intercept[SparkException](df.write.parquet(path3_0))
assert(e.getCause.getCause.getCause.isInstanceOf[SparkUpgradeException])
withSQLConf(SQLConf.LEGACY_PARQUET_REBASE_MODE_IN_WRITE.key -> CORRECTED.toString) {
df.write.mode("overwrite").parquet(path3_0)
}
withSQLConf(SQLConf.LEGACY_PARQUET_REBASE_MODE_IN_WRITE.key -> LEGACY.toString) {
df.write.parquet(path3_0_rebase)
}
}
// For Parquet files written by Spark 3.0, we know the writer info and don't need the
// config to guide the rebase behavior.
withSQLConf(SQLConf.LEGACY_PARQUET_REBASE_MODE_IN_READ.key -> LEGACY.toString) {
checkAnswer(
spark.read.format("parquet").load(path2_4, path3_0, path3_0_rebase),
(0 until N).flatMap { i =>
val (dictS, plainS) = rowFunc(i)
Seq.tabulate(3) { _ =>
Row(toJavaType(dictS), toJavaType(plainS))
}
})
}
}
}
def failInRead(path: String): Unit = {
val e = intercept[SparkException](spark.read.parquet(path).collect())
assert(e.getCause.isInstanceOf[SparkUpgradeException])
}
def successInRead(path: String): Unit = spark.read.parquet(path).collect()
Seq(
// By default we should fail to read ancient datetime values when parquet files don't
// contain Spark version.
"2_4_5" -> failInRead _,
"2_4_6" -> successInRead _).foreach { case (version, checkDefaultRead) =>
withAllParquetReaders {
checkReadMixedFiles(
s"before_1582_date_v$version.snappy.parquet",
"date",
(i: Int) => ("1001-01-01", s"1001-01-0${i + 1}"),
java.sql.Date.valueOf,
checkDefaultRead)
checkReadMixedFiles(
s"before_1582_timestamp_micros_v$version.snappy.parquet",
"timestamp",
(i: Int) => ("1001-01-01 01:02:03.123456", s"1001-01-0${i + 1} 01:02:03.123456"),
java.sql.Timestamp.valueOf,
checkDefaultRead)
checkReadMixedFiles(
s"before_1582_timestamp_millis_v$version.snappy.parquet",
"timestamp",
(i: Int) => ("1001-01-01 01:02:03.123", s"1001-01-0${i + 1} 01:02:03.123"),
java.sql.Timestamp.valueOf,
checkDefaultRead,
tsOutputType = "TIMESTAMP_MILLIS")
// INT96 is a legacy timestamp format and we always rebase the seconds for it.
Seq("plain", "dict").foreach { enc =>
checkAnswer(readResourceParquetFile(
s"test-data/before_1582_timestamp_int96_${enc}_v$version.snappy.parquet"),
Seq.tabulate(N) { i =>
Row(
java.sql.Timestamp.valueOf("1001-01-01 01:02:03.123456"),
java.sql.Timestamp.valueOf(s"1001-01-0${i + 1} 01:02:03.123456"))
})
}
}
}
}
ignore("SPARK-31159: rebasing timestamps in write") {
val N = 8
Seq(false, true).foreach { dictionaryEncoding =>
Seq(
("TIMESTAMP_MILLIS", "1001-01-01 01:02:03.123", "1001-01-07 01:09:05.123"),
("TIMESTAMP_MICROS", "1001-01-01 01:02:03.123456", "1001-01-07 01:09:05.123456"),
("INT96", "1001-01-01 01:02:03.123456", "1001-01-01 01:02:03.123456")
).foreach { case (outType, tsStr, nonRebased) =>
withClue(s"output type $outType") {
withSQLConf(SQLConf.PARQUET_OUTPUT_TIMESTAMP_TYPE.key -> outType) {
withTempPath { dir =>
val path = dir.getAbsolutePath
withSQLConf(SQLConf.LEGACY_PARQUET_REBASE_MODE_IN_WRITE.key -> LEGACY.toString) {
Seq.tabulate(N)(_ => tsStr).toDF("tsS")
.select($"tsS".cast("timestamp").as("ts"))
.repartition(1)
.write
.option("parquet.enable.dictionary", dictionaryEncoding)
.parquet(path)
}
withAllParquetReaders {
// The file metadata indicates if it needs rebase or not, so we can always get the
// correct result regardless of the "rebase mode" config.
Seq(LEGACY, CORRECTED, EXCEPTION).foreach { mode =>
withSQLConf(
SQLConf.LEGACY_PARQUET_REBASE_MODE_IN_READ.key -> mode.toString) {
checkAnswer(
spark.read.parquet(path),
Seq.tabulate(N)(_ => Row(Timestamp.valueOf(tsStr))))
}
}
// Force to not rebase to prove the written datetime values are rebased
// and we will get wrong result if we don't rebase while reading.
withSQLConf("spark.test.forceNoRebase" -> "true") {
checkAnswer(
spark.read.parquet(path),
Seq.tabulate(N)(_ => Row(Timestamp.valueOf(nonRebased))))
}
}
}
}
}
}
}
}
ignore("SPARK-31159: rebasing dates in write") {
val N = 8
Seq(false, true).foreach { dictionaryEncoding =>
withTempPath { dir =>
val path = dir.getAbsolutePath
withSQLConf(SQLConf.LEGACY_PARQUET_REBASE_MODE_IN_WRITE.key -> LEGACY.toString) {
Seq.tabulate(N)(_ => "1001-01-01").toDF("dateS")
.select($"dateS".cast("date").as("date"))
.repartition(1)
.write
.option("parquet.enable.dictionary", dictionaryEncoding)
.parquet(path)
}
withAllParquetReaders {
// The file metadata indicates if it needs rebase or not, so we can always get the
// correct result regardless of the "rebase mode" config.
Seq(LEGACY, CORRECTED, EXCEPTION).foreach { mode =>
withSQLConf(SQLConf.LEGACY_AVRO_REBASE_MODE_IN_READ.key -> mode.toString) {
checkAnswer(
spark.read.parquet(path),
Seq.tabulate(N)(_ => Row(Date.valueOf("1001-01-01"))))
}
}
// Force to not rebase to prove the written datetime values are rebased and we will get
// wrong result if we don't rebase while reading.
withSQLConf("spark.test.forceNoRebase" -> "true") {
checkAnswer(
spark.read.parquet(path),
Seq.tabulate(N)(_ => Row(Date.valueOf("1001-01-07"))))
}
}
}
}
}
}
class JobCommitFailureParquetOutputCommitter(outputPath: Path, context: TaskAttemptContext)
extends ParquetOutputCommitter(outputPath, context) {
override def commitJob(jobContext: JobContext): Unit = {
sys.error("Intentional exception for testing purposes")
}
}
class TaskCommitFailureParquetOutputCommitter(outputPath: Path, context: TaskAttemptContext)
extends ParquetOutputCommitter(outputPath, context) {
override def commitTask(context: TaskAttemptContext): Unit = {
sys.error("Intentional exception for testing purposes")
}
}
| Intel-bigdata/OAP | oap-native-sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetIOSuite.scala | Scala | apache-2.0 | 44,035 |
package com.arcusys.valamis.lesson.generator.util
object ResourceHelpers {
def fetchResources(sourceText: String) = {
// extract all images from question text
("""(?i)(?<=SCORMData/)(.+?)(?=")""".r findAllIn sourceText).toArray
}
def skipContextPathURL(source: String) = {
// skip context-path
"""(?i)(?<=")([^"]*?)SCORMData/""".r replaceAllIn (source, "")
}
def getCommonResourceByName(fileName: String) = {
Thread.currentThread.getContextClassLoader.getResourceAsStream("common/" + fileName)
}
}
| ViLPy/Valamis | valamis-lesson-generator/src/main/scala/com/arcusys/valamis/lesson/generator/util/ResourceHelpers.scala | Scala | lgpl-3.0 | 533 |
package tu.model.knowledge.domain
import tu.model.knowledge._
import tu.model.knowledge.annotator.AnnotatedPhrase
import tu.exception.UnexpectedException
import org.slf4j.LoggerFactory
/**
* @author max
* Date: 8/30/12
* Time: 7:54 AM
*/
class ConceptTag(val _content: TypedKLine[Concept],
var __links: List[ConceptLink],
val _uri: KnowledgeURI,
val _probability: Probability = new Probability())
extends Resource(_uri, _probability) {
def links = __links
def this(map: Map[String, String]) = {
this(
TypedKLine[Concept](Constant.CONTENT),
List[ConceptLink](),
new KnowledgeURI(map),
new Probability(map)
)
}
}
object ConceptTag {
val log = LoggerFactory.getLogger(this.getClass)
def load(kb: KB, parentId: KBNodeId, key: String, linkType: String): ConceptTag = {
val selfMap = kb.loadChild(parentId, key, linkType)
if (selfMap.isEmpty) {
log.error("Concept not loaded for link {}/{} for {}", List(key, linkType, parentId.toString))
throw new UnexpectedException("Concept not loaded for link " + key + "/" + linkType + " for " + parentId.toString)
}
val ID = new KBNodeId(selfMap)
def oneList(items: Map[String, Map[String, String]]): Map[KnowledgeURI, Concept] = {
items.keys.foldLeft(Map[KnowledgeURI, Concept]()) {
(acc, uri) => acc + Pair(KnowledgeURI(uri,true), new Concept(items(uri)))
}
}
def oneListPhrases(items: Map[String, Map[String, String]]): Map[KnowledgeURI, AnnotatedPhrase] = {
items.keys.foldLeft(Map[KnowledgeURI, AnnotatedPhrase]()) {
(acc, uri) => acc + Pair(KnowledgeURI(uri,true), AnnotatedPhrase.load(kb, ID, uri, Constant.SENTENCES_LINK_NAME))
}
}
val content =
TypedKLine[Concept](
Constant.CONTENT,
oneList(kb.loadChildrenMap(ID, Constant.GENERALISATION_LINK_NAME))
)
val linksSourceMap = kb.loadChildrenMap(ID, Constant.CONCEPT_LINK_SOURCE_NAME)
val linksDestinationMap = kb.loadChildrenMap(ID, Constant.CONCEPT_LINK_SOURCE_NAME)
val conceptLinkList: List[ConceptLink] =
linksSourceMap.keys.foldLeft(List[ConceptLink]()) {
(acc, uri) => ConceptLink(new Concept(linksSourceMap(uri)), new Concept(linksDestinationMap(uri)), uri) :: acc
}
new ConceptTag(content,
conceptLinkList,
new KnowledgeURI(selfMap),
new Probability(selfMap)
)
}
}
| keskival/2 | model.knowledge/src/main/scala/tu/model/knowledge/domain/ConceptTag.scala | Scala | gpl-3.0 | 2,469 |
package pl.touk.nussknacker.engine.management.streaming
import org.apache.flink.api.scala._
import org.scalatest.{FunSuite, Matchers}
import pl.touk.nussknacker.engine.{ModelData, ProcessingTypeConfig}
import pl.touk.nussknacker.engine.api.ProcessVersion
import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessName, VersionId}
import pl.touk.nussknacker.engine.build.ScenarioBuilder
import pl.touk.nussknacker.engine.definition.SignalDispatcher
import pl.touk.nussknacker.engine.management.{FlinkQueryableClient, FlinkStreamingDeploymentManagerProvider}
import pl.touk.nussknacker.engine.spel.Implicits._
import scala.concurrent.ExecutionContext.Implicits._
class FlinkStreamingDeploymentManagerQueryableStateTest extends FunSuite with Matchers with StreamingDockerTest {
//see DevConfigCreator
val oneElementValue = "One element"
override protected def classPath: List[String] = ClassPaths.scalaClasspath
test("fetch queryable state for all keys") {
kafkaClient.createTopic("esp.signals")
val lockProcess = ScenarioBuilder
.streaming("queryableStateProc1")
.parallelism(1)
.source("start", "oneSource")
.customNode("lock", "lockOutput", "lockStreamTransformer", "input" -> "#input")
.emptySink("sink", "monitor")
val version = ProcessVersion(VersionId.initialVersionId, ProcessName(lockProcess.id), ProcessId(1), "user1", None)
deployProcessAndWaitIfRunning(lockProcess, version)
val jobId = deploymentManager.findJobStatus(version.processName).futureValue
.flatMap(_.deploymentId).get.value
val processingTypeConfig = ProcessingTypeConfig.read(configWithHostKafka)
val modelData = ModelData(processingTypeConfig)
val client = new FlinkStreamingDeploymentManagerProvider()
.createQueryableClient(processingTypeConfig.deploymentConfig).get.asInstanceOf[FlinkQueryableClient]
def queryState(): Boolean = client.fetchState[java.lang.Boolean](
jobId = jobId,
queryName = "single-lock-state",
key = oneElementValue).map(Boolean.box(_)).futureValue
eventually {
queryState() shouldBe true
}
//see RemoveLockProcessSignalFactory for details
SignalDispatcher
.dispatchSignal(modelData)("removeLockSignal",
lockProcess.id, Map("lockId" -> oneElementValue))
eventually {
queryState() shouldBe false
}
}
}
| TouK/nussknacker | engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerQueryableStateTest.scala | Scala | apache-2.0 | 2,377 |
/*
* Copyright 2015 Tsukasa Kitachi
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sbtjooq.codegen.internal
import java.util.Properties
import scala.collection.JavaConverters._
import scala.xml.{NodeSeq, Text}
object VariableExpander {
type Handler = PartialFunction[Any, NodeSeq]
def apply(vars: Map[String, Any], handler: Handler = defaultHandler): VariableExpander =
vars.get(_).map(handler.applyOrElse(_, fallback))
def defaultHandler: Handler = {
case props: Properties =>
props.entrySet().asScala.map { e =>
<property>
<key>{e.getKey}</key>
<value>{e.getValue}</value>
</property>
}.toSeq
}
private def fallback: Any => NodeSeq =
x => Text(x.toString)
}
| kxbmap/sbt-jooq | codegen/src/main/scala/sbtjooq/codegen/internal/VariableExpander.scala | Scala | apache-2.0 | 1,262 |
package com.tritondigital.counters
import java.util.concurrent.TimeUnit
import _root_.akka.actor.{Actor, ActorSystem, Props}
import _root_.akka.pattern.Patterns
import com.codahale.metrics._
import com.tritondigital.counters.codahale.CodahaleMetricsConverters
import scala.collection.JavaConverters._
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.util.control.NonFatal
/**
* Thread safe implementation of Metrics, using Codahale for computing stats.
*/
class SimpleMetrics(actorSystem: ActorSystem) extends Metrics with MetricsProvider with Logging {
import actorSystem.dispatcher
private case object GetMetrics
private case class SetGaugeValue(key: MetricKey, value: Double)
private case class IncrementCounter(key: MetricKey, incrementBy: Long)
private case class UpdateTimer(key: MetricKey, duration: Long, unit: TimeUnit)
private case class MarkMeter(key: MetricKey, times: Long)
private case class UpdateHistogram(key: MetricKey, value: Long)
private val actor = actorSystem.actorOf(Props(new AggregatorActor))
def setGaugeValue(name: String, value: Double, tags: Tag*): Unit = {
actor ! SetGaugeValue(MetricKey(name, tags), value)
}
def incrementCounter(name: String, incrementBy: Long, tags: Tag*) {
if (log.isTraceEnabled) log.trace(s"Incrementing $name")
actor ! IncrementCounter(MetricKey(name, tags), incrementBy)
}
def incrementCounter(name: String, tags: Tag*) {
incrementCounter(name, 1, tags: _*)
}
def updateTimer(name: String, duration: Long, unit: TimeUnit, tags: Tag*) {
if (log.isTraceEnabled) log.trace(s"Updating $name")
actor ! UpdateTimer(MetricKey(name, tags), duration, unit)
}
def updateTimer(name: String, startInMillis: Long, tags: Tag*) {
updateTimer(name, System.currentTimeMillis() - startInMillis, TimeUnit.MILLISECONDS, tags: _*)
}
def markMeter(name: String, tags: Tag*) {
markMeter(name, 1, tags: _*)
}
def markMeter(name: String, times: Long, tags: Tag*) {
if (log.isTraceEnabled) log.trace(s"Marking $name")
actor ! MarkMeter(MetricKey(name, tags), times)
}
def updateHistogram(name: String, value: Long, tags: Tag*) {
if (log.isTraceEnabled) log.trace(s"Updating $name")
actor ! UpdateHistogram(MetricKey(name, tags), value)
}
def provide =
Patterns
.ask(actor, GetMetrics, 60.second)
.mapTo[Iterable[Metric]]
.recover {
case NonFatal(ex) =>
log.warn("Took more than 60 seconds to gather metrics. You probably have too much metrics...", ex)
Iterable.empty[Metric]
}
private class AggregatorActor extends Actor {
private val gauges = collection.mutable.Map
.empty[MetricKey, Double]
private val counters = collection.mutable.Map
.empty[MetricKey, Counter]
private val meters = collection.mutable.Map
.empty[MetricKey, Meter]
private val histograms = collection.mutable.Map
.empty[MetricKey, Histogram]
private val timers = collection.mutable.Map
.empty[MetricKey, Timer]
def receive = {
case SetGaugeValue(key, value) =>
gauges += key -> value
case IncrementCounter(key, incrementBy) =>
getCounter(key).inc(incrementBy)
if (log.isTraceEnabled) log.trace(s"Incremented $key")
case UpdateTimer(key, duration, unit) =>
getTimer(key).update(duration, unit)
if (log.isTraceEnabled) log.trace(s"Updated $key")
case MarkMeter(key, times) =>
getMeter(key).mark(times)
if (log.isTraceEnabled) log.trace(s"Marked $key")
case UpdateHistogram(key, value) =>
getHistogram(key).update(value)
if (log.isTraceEnabled) log.trace(s"Updated $key")
case GetMetrics =>
sender ! toMetrics
}
private def getCounter(key: MetricKey) =
if (!counters.contains(key)) {
val newCounter = new Counter
counters += key -> newCounter
newCounter
}
else
counters(key)
private def getMeter(key: MetricKey) =
if (!meters.contains(key)) {
val newMeter = new Meter
meters += key -> newMeter
newMeter
}
else
meters(key)
private def getHistogram(key: MetricKey) =
if (!histograms.contains(key)) {
val newHistogram = new Histogram(new ExponentiallyDecayingReservoir)
histograms += key -> newHistogram
newHistogram
}
else
histograms(key)
private def getTimer(key: MetricKey) =
if (!timers.contains(key)) {
val newTimer = new Timer
timers += key -> newTimer
newTimer
}
else
timers(key)
private def toMetrics = {
import CodahaleMetricsConverters._
val gaugeMetrics = gauges
.map { case (key, value) => Metric(key.name, value, key.tags) }
val counterMetrics = counters
.map { case (key, counter) => counterToMetric(counter, key.name, key.tags) }
val meterMetrics = meters
.map { case (key, meter) => meterToMetrics(meter, key.name, key.tags) }.flatten
val histogramMetrics = histograms
.map { case (key, histogram) => histogramToMetrics(histogram, key.name, key.tags) }.flatten
val timerMetrics = timers
.map { case (key, timer) => timerToMetrics(timer, key.name, key.tags) }.flatten
gaugeMetrics ++ counterMetrics ++ meterMetrics ++ histogramMetrics ++ timerMetrics
}
}
}
| tritondigital/tritondigital-counters | src/main/scala/com/tritondigital/counters/SimpleMetrics.scala | Scala | apache-2.0 | 5,463 |
object Test {
val LIMIT = 10
def test1: Unit = {
var d = 2
var i = 0 // avoid infinite loops
while (d < LIMIT && i < LIMIT) {
lazy val b = d + 1
d = b
i += 1
println("test1: " + d)
}
}
def test2: Unit = {
var d = 2
var i = 0
while (true) {
lazy val b = d + 1
d = b
i += 1
println("test2: " + d)
if (d >= LIMIT || i >= LIMIT)
return
}
}
def test3: Unit = {
var d = 2
var i = 0
do {
lazy val b = d + 1
d = b
i += 1
println("test3: " + d)
} while (d < LIMIT && i < LIMIT)
}
def test4: Unit = {
var d = 2
var i = 0
do {
lazy val b = d + 1
d = b
i += 1
println("test4: " + d)
if (d >= LIMIT || i >= LIMIT)
return
} while (true)
}
def test5: Unit = {
var d = 2
var i = 0
while (d < LIMIT && i < LIMIT) {
lazy val b = d + 1
d = b
i += 1
println("test5.1: " + d)
var e = 2
var j = 0
while (e < LIMIT && j < LIMIT) {
lazy val f = e + 1
e = f
j += 1
println("test5.2: " + e)
}
}
}
def main(args: Array[String]): Unit = {
test1
test2
test3
test4
test5
}
}
| yusuke2255/dotty | tests/run/t3877.scala | Scala | bsd-3-clause | 1,284 |
/*
* Copyright 2016 Combined Conditional Access Development, LLC.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ccadllc.cedi.config
import shapeless.{ ::, HNil }
object CompilationTest {
import ConfigParser._
val a: ConfigParser[(String, Int)] = string("foo") ~ int("bar")
val b: ConfigParser[(String, Int, Boolean)] = a ~ bool("baz")
val c: ConfigParser[(String, Int, Boolean, Int)] = b ~ int("qux")
val d: ConfigParser[String :: Int :: Boolean :: HNil] = string("foo") :: int("bar") :: bool("baz")
case class Qux(foo: String, bar: Int, baz: Boolean)
val e: ConfigParser[Qux] = d.as[Qux]
val f: ConfigParser[Qux] = subconfig("a.b.c") { derived[Qux] }
case class Quux(a: Qux, b: Qux)
// Ensures an implicit ConfigParser[Qux] doesn't conflict with deriving a Qux parser
implicit def quxParser: ConfigParser[Qux] = ???
val g: ConfigParser[Quux] = derived[Quux]
}
| ccadllc/cedi-config | core/src/test/scala/com/ccadllc/cedi/config/CompilationTest.scala | Scala | apache-2.0 | 1,420 |
package ee.cone.c4gate
import com.typesafe.scalalogging.LazyLogging
import ee.cone.c4actor.Types.SrcId
import ee.cone.c4actor._
import ee.cone.c4actor.QProtocol.S_Firstborn
import ee.cone.c4assemble.Types.{Each, Values}
import ee.cone.c4assemble._
import ee.cone.c4di.c4multi
import ee.cone.c4proto.ToByteString
@c4assemble("HiRateTxApp") class HiRateAssembleBase(factory: HiRateTxFactory) {
def joinPosts(
key: SrcId,
firstborn: Each[S_Firstborn]
): Values[(SrcId, TxTransform)] = List(WithPK(factory.create("HiRateTx")))
}
@c4multi("HiRateTxApp") final case class HiRateTx(srcId: SrcId)(
publisher: Publisher,
txAdd: LTxAdd,
) extends TxTransform with LazyLogging {
def transform(local: Context): Context = {
val timeStr = System.currentTimeMillis.toString
logger.info(s"start handling $timeStr")
val bytes = ToByteString(timeStr)
txAdd.add(publisher.publish(ByPathHttpPublication("/time",Nil,bytes),_+1000*60))(local)
}
}
/*
@assemble class HiRateAssemble {
def joinPosts(
key: SrcId,
post: Each[HttpPost]
): Values[(SrcId, TxTransform)] =
List(WithPK(TestDelayHttpPostHandler(post.srcId, post)))
}
case class HiRateTx(srcId: SrcId, post: HttpPost) extends TxTransform with LazyLogging {
def transform(local: Context): Context = {
logger.info(s"start handling $srcId")
concurrent.blocking{
Thread.sleep(1000)
}
logger.info(s"finish handling $srcId")
txAdd.add(delete(post))(local)
}
}*/
| conecenter/c4proto | base_examples/src/main/scala/ee/cone/c4gate/HiRateTxApp.scala | Scala | apache-2.0 | 1,477 |
package com.gilt.handlebars.scala.parser
import scala.util.parsing.combinator._
import scala.util.parsing.input.CharSequenceReader
object HandlebarsGrammar {
protected val grammar = new HandlebarsGrammar(("{{","}}"))
def apply(input: String): grammar.ParseResult[Program] = grammar(input)
def path(input: String): grammar.ParseResult[Identifier] = grammar.parseAll(grammar.path, input)
}
class HandlebarsGrammar(delimiters: (String, String)) extends JavaTokenParsers {
override protected val whiteSpace = """[\\u2028\\u2029\\u0085]+""".r
def apply(input: String) = {
if (input.isEmpty) {
Success(Program(List(Content(input))), new CharSequenceReader(""))
} else {
parseAll(root, input)
}
}
def root = program
def program: Parser[Program] = {
inverse ~ statements ^^ {
case _ ~ section => Program(Nil, Some(Program(section)))
} |
statements ~ inverse ~ statements ^^ {
case control ~ _ ~ flip => Program(control, Some(Program(flip)))
} |
statements ~ inverse ^^ {
case section ~ _ => Program(section)
} |
statements ^^ { Program(_) } |
inverse ^^^ { Program(Nil) }
}
def statements = rep1(statement)
def statement = {
inverseBlock |
block |
mustache |
partial |
CONTENT ^^ { Content(_) } |
comment
}
def inverseBlock = blockify("^") ^^ {
case (stache, Some(prog)) => Block(stache, prog.inverse.getOrElse(Program(Nil)), Some(prog))
case (stache, None) => Block(stache, Program(Nil), None)
}
def block = blockify("#") ^^ {
case (stache, Some(prog)) => Block(stache, prog, prog.inverse)
case (stache, None) => Block(stache, Program(Nil), None)
}
def mustache: Parser[Mustache] = {
mustachify(pad(inMustache)) ^^ { mustacheable(_) } |
mustachify("&" ~> pad(inMustache)) ^^ { mustacheable(_, true) } |
mustachify("{" ~> pad(inMustache) <~ "}") ^^ { mustacheable(_, true) }
}
def partial: Parser[Partial] = mustachify(">" ~> pad( partialName ~ opt(whiteSpace ~> path) )) ^^ {
case (name ~ contextOpt) => Partial(name, contextOpt)
}
def inMustache: Parser[(IdentifierNode, List[ValueNode], Option[HashNode])] = {
path ~ params ~ hash ^^ {
case (id ~ params ~ hash) => (id, params, Some(hash))
} |
path ~ hash ^^ {
case (id ~ hash) => (id, Nil, Some(hash))
} |
path ~ params ^^ {
case (id ~ params) => (id, params, None)
} |
path ^^ { (_ , Nil, None) } |
dataName ^^ { (_ , Nil, None) } |
failure("Invalid Mustache")
}
def params = rep1(whiteSpace ~> param)
def hash = rep1(whiteSpace ~> hashSegment) ^^ {
pairs:List[(String, ValueNode)] => HashNode(pairs.toMap)
}
def hashSegment = (ID ~ EQUALS ~ param) ^^ {
case (i ~ _ ~ p) => Pair(i, p)
}
def partialName = (path | STRING | INTEGER) ^^ { PartialName(_) }
def param = STRING |
INTEGER |
BOOLEAN |
path |
dataName
def dataName = "@" ~> not("." | "..") ~> simplePath ^^ { DataNode(_) }
def path = not("else") ~> rep1sep(ID <~ not(EQUALS) | PARENT | SELF, SEPARATOR) ^^ { Identifier(_) }
def simplePath = not("else") ~> rep1sep(ID <~ not(EQUALS), SEPARATOR) ^^ { Identifier(_) }
def inverse = mustachify( pad("^" | "else") )
def comment = mustachify("!" ~> CONTENT) ^^ { Comment(_) }
def blockify(prefix: Parser[String]): Parser[Pair[Mustache, Option[Program]]] = {
blockstache(prefix) ~ opt(program) ~ mustachify("/" ~> pad(path)) >> {
case (mustache ~ _ ~ close) if close != mustache.path => failure(mustache.path.string + " doesn't match " +
close.string)
case (mustache ~ programOpt ~ _) => success((mustache, programOpt))
}
}
def blockstache(prefix: Parser[String]) = mustachify(prefix ~> pad(inMustache)) ^^ {
mustacheable(_)
}
def mustacheable(tuple: (IdentifierNode, List[ValueNode], Option[HashNode]),
unescape: Boolean = false): Mustache = {
tuple match {
case (id, params, Some(hash)) => Mustache(id, params, hash, unescape)
case (id, params, None) => Mustache(id, params, unescaped = unescape)
}
}
def mustachify[T](parser: Parser[T]): Parser[T] = OPEN ~> parser <~ CLOSE
def pad[T](id: Parser[T]): Parser[T] = opt(whiteSpace) ~> id <~ opt(whiteSpace)
val STRING = stringLiteral ^^ { s:String => StringParameter(s.stripPrefix("\\"").stripSuffix("\\"")) }
val INTEGER = wholeNumber ^^ { n:String => IntegerParameter(n.toInt) }
val BOOLEAN = {
"true" ^^^ { BooleanParameter(true) } |
"false" ^^^ { BooleanParameter(false) }
}
val EQUALS = "="
val ID = """[^\\s!"#%-,\\.\\/;->@\\[-\\^`\\{-~]+""".r | ("[" ~> """[^\\]]*""".r <~ "]") | ident
val SEPARATOR = "/" | "."
val PARENT = ".."
val SELF = "."
val OPEN = delimiters._1
val CLOSE = delimiters._2
val ESCAPE = "\\\\"
val CONTENT = rep1((ESCAPE ~> (OPEN | CLOSE) | not(OPEN | CLOSE) ~> ".|\\r|\\n".r)) ^^ { t => t.mkString("") }
}
| QiaoBuTang/handlebars.scala | src/main/scala/com/gilt/handlebars/scala/parser/HandlebarsGrammar.scala | Scala | apache-2.0 | 4,992 |
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.cache
import scala.ref.SoftReference
/**
* Creates a per-thread cache of values wrapped in SoftReferences, which allows them to be reclaimed
* by garbage-collection when needed.
*
* @tparam K key type
* @tparam V value type
*/
class SoftThreadLocalCache[K, V <: AnyRef] extends scala.collection.mutable.Map[K, V]
with scala.collection.mutable.MapLike[K, V, SoftThreadLocalCache[K, V]] {
protected [cache] val cache = new ThreadLocal[scala.collection.mutable.Map[K, SoftReference[V]]] {
override def initialValue: scala.collection.mutable.Map[K, SoftReference[V]] =
scala.collection.mutable.Map.empty
}
override def get(key: K): Option[V] = cache.get().get(key).flatMap(_.get)
override def getOrElseUpdate(key: K, op: => V): V = get(key) match {
case Some(values) => values
case None =>
val values = op
cache.get().put(key, new SoftReference[V](values))
values
}
override def +=(kv: (K, V)): this.type = {
cache.get() += ((kv._1, new SoftReference[V](kv._2)))
this
}
override def -=(key: K): this.type = {
cache.get().remove(key).flatMap(_.get)
this
}
override def empty: SoftThreadLocalCache[K, V] = new SoftThreadLocalCache[K, V]()
override def iterator: Iterator[(K, V)] =
cache.get().iterator.withFilter(_._2.get.isDefined).map { case (k, v) => (k, v.get.get) }
}
| elahrvivaz/geomesa | geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/cache/SoftThreadLocalCache.scala | Scala | apache-2.0 | 1,868 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Core Spark functionality. [[spark.SparkContext]] serves as the main entry point to Spark, while
* [[spark.RDD]] is the data type representing a distributed collection, and provides most
* parallel operations.
*
* In addition, [[spark.PairRDDFunctions]] contains operations available only on RDDs of key-value
* pairs, such as `groupByKey` and `join`; [[spark.DoubleRDDFunctions]] contains operations
* available only on RDDs of Doubles; and [[spark.SequenceFileRDDFunctions]] contains operations
* available on RDDs that can be saved as SequenceFiles. These operations are automatically
* available on any RDD of the right type (e.g. RDD[(Int, Int)] through implicit conversions when
* you `import spark.SparkContext._`.
*/
package object spark {
// For package docs only
}
| wgpshashank/spark | core/src/main/scala/spark/package.scala | Scala | apache-2.0 | 1,597 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.scalar.tests
import org.apache.ignite.Ignition
import org.apache.ignite.scalar.scalar
import org.apache.ignite.scalar.scalar._
import org.junit.runner.RunWith
import org.scalatest._
import org.scalatest.junit.JUnitRunner
/**
* Tests for `affinityRun..` and `affinityCall..` methods.
*/
@RunWith(classOf[JUnitRunner])
class ScalarAffinityRoutingSpec extends FlatSpec with ShouldMatchers with BeforeAndAfterAll {
private val CFG = "modules/scalar/src/test/resources/spring-cache.xml"
/** Cache name. */
private val CACHE_NAME = "partitioned_tx"
"affinityRun$ method" should "run correctly" in scalar(CFG) {
val c = cache$[Int, Int](CACHE_NAME).get
// c += (0 -> 0)
// c += (1 -> 1)
// c += (2 -> 2)
val cnt = Ignition.ignite.atomicLong("affinityRun", 0, true)
ignite$.affinityRun$(CACHE_NAME, 0, () => { cnt.incrementAndGet() }, null)
ignite$.affinityRun$(CACHE_NAME, 1, () => { cnt.incrementAndGet() }, null)
ignite$.affinityRun$(CACHE_NAME, 2, () => { cnt.incrementAndGet() }, null)
assert(cnt.get === 3)
}
"affinityRunAsync$ method" should "run correctly" in scalar(CFG) {
val c = cache$[Int, Int](CACHE_NAME).get
// c += (0 -> 0)
// c += (1 -> 1)
// c += (2 -> 2)
val cnt = Ignition.ignite.atomicLong("affinityRunAsync", 0, true)
ignite$.affinityRunAsync$(CACHE_NAME, 0, () => { cnt.incrementAndGet() }, null).get
ignite$.affinityRunAsync$(CACHE_NAME, 1, () => { cnt.incrementAndGet() }, null).get
ignite$.affinityRunAsync$(CACHE_NAME, 2, () => { cnt.incrementAndGet() }, null).get
assert(cnt.get === 3)
}
}
| irudyak/ignite | modules/scalar/src/test/scala/org/apache/ignite/scalar/tests/ScalarAffinityRoutingSpec.scala | Scala | apache-2.0 | 2,521 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.io.fs
import java.io.{InputStream, OutputStream}
import com.typesafe.scalalogging.LazyLogging
import org.locationtech.geomesa.utils.collection.CloseableIterator
import org.locationtech.geomesa.utils.io.PathUtils
import org.locationtech.geomesa.utils.io.fs.FileSystemDelegate.FileHandle
trait FileSystemDelegate extends LazyLogging {
/**
* Get the handle for a given file, which may or may not exist
*
* @param path path
* @return
*/
def getHandle(path: String): FileHandle
/**
* Expand wildcards, recurse into directories, etc
*
* @param path input path
* @return any files found in the interpreted path
*/
def interpretPath(path: String): Seq[FileHandle]
}
object FileSystemDelegate {
/**
* Creation mode for files
*
* `Create` - file must not exist, else throw `FileAlreadyExists`
* `Overwrite` - existing file will be truncated, else throw `FileNotFoundException`
* `Append` - existing file will be appended, else throw `FileNotFoundException`
* `Create|Overwrite` - if file exists, create it, else truncate it
* `Create|Append` - if file exists, create it, else append it
* `CreateParents` - combined with `Create`, if parent folder does not exist, create it
*/
object CreateMode {
val Create : CreateMode = new CreateMode(0x01)
val Overwrite : CreateMode = new CreateMode(0x02)
val Append : CreateMode = new CreateMode(0x04)
}
class CreateMode(val flag: Int) extends AnyVal {
def |(other: CreateMode): CreateMode = new CreateMode(flag | other.flag)
def create: Boolean = (flag & CreateMode.Create.flag) != 0
def append: Boolean = (flag & CreateMode.Append.flag) != 0
def overwrite: Boolean = (flag & CreateMode.Overwrite.flag) != 0
def validate(): Unit = {
if (append && overwrite) {
throw new IllegalArgumentException("Can't specify both append and overwrite")
} else if (!append && !overwrite && !create) {
throw new IllegalArgumentException("Must specify at least one of create, append or overwrite")
}
}
}
/**
* Abstraction over a readable file
*/
trait FileHandle {
/**
* The file extension, minus any compression or zipping
*
* @return
*/
lazy val format: String = PathUtils.getUncompressedExtension(path)
/**
* Path to the underlying file represented by this object
*
* @return
*/
def path: String
/**
* Does the file exist or not
*
* @return
*/
def exists: Boolean
/**
* File length (size), in bytes
*
* @return
*/
def length: Long
/**
* Open an input stream to read the underlying file. Archive formats (tar, zip) will return multiple streams,
* one per archive entry, along with the name of the entry. The iterator of input streams should only be
* closed once all the input streams have been processed. The individual streams will be closed when the
* overall iterator is closed, although they may be closed individually if desired
*
* @return
*/
def open: CloseableIterator[(Option[String], InputStream)]
/**
* Open the file for writing
*
* @param mode write mode
* @param createParents if the file does not exist, create its parents. Note that this only makes sense
* with `CreateMode.Create`
*/
def write(mode: CreateMode, createParents: Boolean = false): OutputStream
/**
* Delete the file
*
* @param recursive if the file is a directory, recursively delete its contents
*/
def delete(recursive: Boolean = false): Unit
}
}
| locationtech/geomesa | geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/io/fs/FileSystemDelegate.scala | Scala | apache-2.0 | 4,225 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.sql
import slamdata.Predef._
import quasar._, RenderTree.ops._
import matryoshka.Recursive
import monocle.macros.Lenses
import scalaz._, Scalaz._
@Lenses final case class ScopedExpr[T](expr: T, scope: List[Statement[T]]) {
def mapExpressionM[M[_]:Functor](f: T => M[T]): M[ScopedExpr[T]] =
f(expr).map(ScopedExpr(_, scope))
def imports: List[Import[T]] =
scope.collect { case i: Import[_] => i }
def defs: List[FunctionDecl[T]] =
scope.collect { case d: FunctionDecl[_] => d }
def pprint(implicit T: Recursive.Aux[T, Sql]): String = {
val scopeString = if (scope.isEmpty) "" else scope.pprint + ";\\n"
scopeString + sql.pprint(expr)
}
}
object ScopedExpr {
implicit def renderTree[T:RenderTree]: RenderTree[ScopedExpr[T]] =
new RenderTree[ScopedExpr[T]] {
def render(sExpr: ScopedExpr[T]) =
NonTerminal("Sql Scoped Expr" :: Nil, None, List(sExpr.scope.render, sExpr.expr.render))
}
implicit val traverse: Traverse[ScopedExpr] = new Traverse[ScopedExpr] {
def traverseImpl[G[_]:Applicative,A,B](ba: ScopedExpr[A])(f: A => G[B]): G[ScopedExpr[B]] =
(f(ba.expr) |@| ba.scope.traverse(_.traverse(f)))(ScopedExpr(_, _))
}
implicit def equal[T: Equal]: Equal[ScopedExpr[T]] = Equal.equalBy(s => (s.expr, s.scope))
}
| drostron/quasar | frontend/src/main/scala/quasar/sql/ScopedExpr.scala | Scala | apache-2.0 | 1,904 |
package com.asto.dop.core.module.collect
import com.asto.dop.core.entity.VisitEntity
import scala.collection.mutable
/**
* 访问实体保存事件
*/
class VisitEntitySaveEvent extends mutable.Publisher[VisitEntity]{
def pub(entity:VisitEntity) = publish(entity)
}
| zj-lingxin/dop-core | src/main/scala/com/asto/dop/core/module/collect/CollectEvent.scala | Scala | mit | 274 |
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.index.api
import java.util.Objects
import org.locationtech.geomesa.features.kryo.KryoFeatureSerializer
import org.locationtech.geomesa.index.api.QueryPlan.{FeatureReducer, ResultsToFeatures}
import org.locationtech.geomesa.index.geotools.GeoMesaDataStore
import org.locationtech.geomesa.index.iterators.IteratorCache
import org.locationtech.geomesa.index.utils.Explainer
import org.locationtech.geomesa.index.utils.Reprojection.QueryReferenceSystems
import org.locationtech.geomesa.utils.collection.CloseableIterator
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
/**
* Plan for querying a GeoMesaDataStore
*
* @tparam DS type of this data store
*/
trait QueryPlan[DS <: GeoMesaDataStore[DS]] {
/**
* Type of raw results returned from the underlying database
*/
type Results
/**
* Reference back to the strategy
*
* @return
*/
def filter: FilterStrategy
/**
* Runs the query plan against the underlying database
*
* @param ds data store - provides connection object and metadata
* @return
*/
def scan(ds: DS): CloseableIterator[Results]
/**
* Transform results coming back from a raw scan into features
*
* @return
*/
def resultsToFeatures: ResultsToFeatures[Results]
/**
* Optional reduce step for simple features coming back
*
* @return
*/
def reducer: Option[FeatureReducer]
/**
* Sort fields and reverse order flags (true flips order to descending, false keeps order as ascending)
*
* @return
*/
def sort: Option[Seq[(String, Boolean)]]
/**
* Max features to return from the scan
*
* @return
*/
def maxFeatures: Option[Int]
/**
* Geometry projection
*
* @return
*/
def projection: Option[QueryReferenceSystems]
/**
* Explains details on how this query plan will be executed
*
* @param explainer explainer to use for explanation
* @param prefix prefix for explanation lines, used for nesting explanations
*/
def explain(explainer: Explainer, prefix: String = ""): Unit
}
object QueryPlan {
/**
* Convert scan results to simple features. Must have a zero-arg constructor to allow re-creation from
* a serialized form.
*
* Converters are encouraged to also allow direct instantiation via an alternate constructor, as
* serialization is generally only used for map/reduce jobs. Similarly, state is encouraged to be
* lazily evaluated.
*
* @tparam T result type
*/
trait ResultsToFeatures[T] extends SerializableState {
/**
* Simple feature type that will be returned from `apply`
*
* @return
*/
def schema: SimpleFeatureType
/**
* Convert a result to a feature
*
* @param result result
* @return
*/
def apply(result: T): SimpleFeature
}
object ResultsToFeatures {
/**
* Serialize a results to features as a string
*
* @param obj object
* @tparam T result type
* @return
*/
def serialize[T](obj: ResultsToFeatures[T]): String = SerializableState.serialize(obj)
/**
* Deserialize a results to features from a string
*
* @param serialized serialized object
* @tparam T result type
* @return
*/
def deserialize[T](serialized: String): ResultsToFeatures[T] = SerializableState.deserialize(serialized)
/**
* Empty results to features used in placeholders - don't invoke `apply` on the result.
*
* @tparam T result type
* @return
*/
def empty[T]: ResultsToFeatures[T] = EmptyResultsToFeatures.asInstanceOf[ResultsToFeatures[T]]
/**
* Identity function
*
* @return
*/
def identity(sft: SimpleFeatureType): ResultsToFeatures[SimpleFeature] = new IdentityResultsToFeatures(sft)
/**
* For 'empty' query plans - don't invoke `apply`
*/
object EmptyResultsToFeatures extends ResultsToFeatures[Void] {
override val state: Map[String, String] = Map.empty
override def init(state: Map[String, String]): Unit = {}
override def schema: SimpleFeatureType = null
override def apply(result: Void): SimpleFeature = null
}
/**
* Identity function - for situations where features are already deserialized
*
* @param sft simple feature type
*/
class IdentityResultsToFeatures(private var sft: SimpleFeatureType) extends ResultsToFeatures[SimpleFeature] {
def this() = this(null) // no-arg constructor required for serialization
override def state: Map[String, String] = Map(
"name" -> sft.getTypeName,
"spec" -> SimpleFeatureTypes.encodeType(sft, includeUserData = true)
)
override def init(state: Map[String, String]): Unit =
sft = SimpleFeatureTypes.createType(state("name"), state("spec"))
override def schema: SimpleFeatureType = sft
override def apply(result: SimpleFeature): SimpleFeature = result
def canEqual(other: Any): Boolean = other.isInstanceOf[IdentityResultsToFeatures]
override def equals(other: Any): Boolean = other match {
case that: IdentityResultsToFeatures if that.canEqual(this) => sft == that.sft
case _ => false
}
override def hashCode(): Int = {
val state = Seq(sft)
state.map(Objects.hashCode).foldLeft(0)((a, b) => 31 * a + b)
}
}
}
/**
* Client-side reduce for the results of a scan. Must have a zero-arg constructor to allow re-creation from
* a serialized form.
*
* Reducers are encouraged to also allow direct instantiation via an alternate constructor, as
* serialization is generally only used for map/reduce jobs. Similarly, state is encouraged to be
* lazily evaluated.
*/
trait FeatureReducer extends SerializableState {
/**
* Reduce the results of a scan
*
* @param features features
* @return
*/
def apply(features: CloseableIterator[SimpleFeature]): CloseableIterator[SimpleFeature]
}
object FeatureReducer {
/**
* Serialize a feature reducer
*
* @param obj object to serialize
* @return
*/
def serialize(obj: FeatureReducer): String = SerializableState.serialize(obj)
/**
* Deserialize a feature reducer
*
* @param serialized serialized object
* @return
*/
def deserialize(serialized: String): FeatureReducer = SerializableState.deserialize(serialized)
}
/**
* Abstract base class for converting the results from a normal feature index
*
* @param index index
* @param sft simple feature type returned from the scan
* @tparam T result type
*/
abstract class IndexResultsToFeatures[T](
protected var index: GeoMesaFeatureIndex[_, _],
protected var sft: SimpleFeatureType
) extends ResultsToFeatures[T] {
protected var serializer: KryoFeatureSerializer = if (index == null) { null } else { createSerializer }
override def init(state: Map[String, String]): Unit = {
val spec = state("spec")
sft = SimpleFeatureTypes.createType(state("name"), spec)
index = state.get("isft") match {
case None => IteratorCache.index(sft, spec, state("idx"))
case Some(isft) => IteratorCache.index(IteratorCache.sft(isft), isft, state("idx"))
}
serializer = createSerializer
}
override def state: Map[String, String] = {
val base = Map(
"name" -> sft.getTypeName,
"spec" -> SimpleFeatureTypes.encodeType(sft, includeUserData = true),
"idx" -> index.identifier
)
if (index.sft == sft) { base } else {
base.updated("isft", SimpleFeatureTypes.encodeType(index.sft, includeUserData = true))
}
}
override def schema: SimpleFeatureType = sft
protected def createSerializer: KryoFeatureSerializer = {
val builder = KryoFeatureSerializer.builder(sft)
if (index.serializedWithId) { builder.withId.build() } else { builder.withoutId.build() }
}
def canEqual(other: Any): Boolean = other.isInstanceOf[IndexResultsToFeatures[T]]
override def equals(other: Any): Boolean = other match {
case that: IndexResultsToFeatures[T] if that.canEqual(this) =>
sft == that.sft && {
if (index == null) { that.index == null } else if (that.index == null) { false } else {
index.identifier == that.index.identifier && index.sft == that.index.sft
}
}
case _ => false
}
override def hashCode(): Int = {
val state = Seq(index, sft)
state.map(Objects.hashCode).foldLeft(0)((a, b) => 31 * a + b)
}
}
}
| elahrvivaz/geomesa | geomesa-index-api/src/main/scala/org/locationtech/geomesa/index/api/QueryPlan.scala | Scala | apache-2.0 | 9,301 |
package in.suhj.eridown
import org.scalatest.FunSuite
class ListTest extends FunSuite {
test("A paragraph\\nwith two lines.\\n\\n indented code\\n\\n> A block quote.") {
assert(Parser.render("""A paragraph
|with two lines.
|
| indented code
|
|> A block quote.""".stripMargin.trim) ==
"""<p>A paragraph
|with two lines.</p><pre><code>indented code</code></pre><blockquote><p>A block quote.</p></blockquote>""".stripMargin.trim)
}
test("1. A paragraph\\n with two lines.\\n\\n indented code\\n\\n > A block quote.") {
assert(Parser.render("""1. A paragraph
| with two lines.
|
| indented code
|
| > A block quote.""".stripMargin.trim) ==
"""<ol><li><p>A paragraph
|with two lines.</p><pre><code>indented code</code></pre><blockquote><p>A block quote.</p></blockquote></li></ol>""".stripMargin.trim)
}
test("- one\\n\\n two") {
assert(Parser.render("- one\\n\\n two") == "<ul><li>one</li></ul><p>two</p>")
}
test("- one\\n\\n two") {
assert(Parser.render("- one\\n\\n two") == "<ul><li><p>one</p><p>two</p></li></ul>")
}
test(" - one\\n\\n two") {
assert(Parser.render(" - one\\n\\n two") == "<ul><li>one</li></ul><pre><code> two</code></pre>")
}
test(" - one\\n\\n two") {
assert(Parser.render(" - one\\n\\n two") == "<ul><li><p>one</p><p>two</p></li></ul>")
}
test(" > > 1. one\\n>>\\n>> two") {
assert(Parser.render(" > > 1. one\\n>>\\n>> two") == "<blockquote><blockquote><ol><li><p>one</p><p>two</p></li></ol></blockquote></blockquote>")
}
test(">>- one\\n>>\\n > > two") {
assert(Parser.render(">>- one\\n>>\\n > > two") == "<blockquote><blockquote><ul><li>one</li></ul><p>two</p></blockquote></blockquote>")
}
test("-one\\n\\n2.two") {
assert(Parser.render("-one\\n\\n2.two") == "<p>-one</p><p>2.two</p>")
}
test("- foo\\n\\n\\n bar") {
assert(Parser.render("- foo\\n\\n\\n bar") == "<ul><li><p>foo</p><p>bar</p></li></ul>")
}
test("1. foo\\n\\n ```\\n bar\\n ```\\n\\n baz\\n\\n > bam") {
assert(Parser.render("1. foo\\n\\n ```\\n bar\\n ```\\n\\n baz\\n\\n > bam") ==
"<ol><li><p>foo</p><pre><code>bar</code></pre><p>baz</p><blockquote><p>bam</p></blockquote></li></ol>")
}
test("- Foo\\n\\n bar\\n\\n\\n baz") {
assert(Parser.render("- Foo\\n\\n bar\\n\\n\\n baz") ==
"<ul><li><p>Foo</p><pre><code>bar\\n\\n\\nbaz</code></pre></li></ul>")
}
test("123456789. ok") {
assert(Parser.render("123456789. ok") ==
"<ol start=\\"123456789\\"><li>ok</li></ol>")
}
test("1234567890. not ok") {
assert(Parser.render("1234567890. not ok") ==
"<p>1234567890. not ok</p>")
}
test("0. ok") {
assert(Parser.render("0. ok") == "<ol start=\\"0\\"><li>ok</li></ol>")
}
test("003. ok") {
assert(Parser.render("003. ok") == "<ol start=\\"3\\"><li>ok</li></ol>")
}
test("-1. not ok") {
assert(Parser.render("-1. not ok") == "<p>-1. not ok</p>")
}
test("- foo\\n\\n bar") {
assert(Parser.render("- foo\\n\\n bar") == "<ul><li><p>foo</p><pre><code>bar</code></pre></li></ul>")
}
test(" 10. foo\\n\\n bar") {
assert(Parser.render(" 10. foo\\n\\n bar") == "<ol start=\\"10\\"><li><p>foo</p><pre><code>bar</code></pre></li></ol>")
}
test(" indented code\\n\\nparagraph\\n\\n more code") {
assert(Parser.render(" indented code\\n\\nparagraph\\n\\n more code") == "<pre><code>indented code</code></pre><p>paragraph</p><pre><code>more code</code></pre>")
}
test("1. indented code\\n\\n paragraph\\n\\n more code") {
assert(Parser.render("1. indented code\\n\\n paragraph\\n\\n more code") ==
"<ol><li><pre><code> indented code</code></pre><p>paragraph</p><pre><code>more code</code></pre></li></ol>")
}
test(" foo\\n\\nbar") {
assert(Parser.render(" foo\\n\\nbar") == "<p>foo</p><p>bar</p>")
}
test("- foo\\n\\n bar") {
assert(Parser.render("- foo\\n\\n bar") == "<ul><li>foo</li></ul><p>bar</p>")
}
test("- foo\\n\\n bar") {
assert(Parser.render("- foo\\n\\n bar") == "<ul><li><p>foo</p><p>bar</p></li></ul>")
}
test("-\\n foo\\n-\\n ```\\n bar\\n ```\\n-\\n baz") {
assert(Parser.render("-\\n foo\\n-\\n ```\\n bar\\n ```\\n-\\n baz") ==
"<ul><li>foo</li><li><pre><code>bar</code></pre></li><li><pre><code>baz</code></pre></li></ul>")
}
test("- \\n foo") {
assert(Parser.render("- \\n foo") == "<ul><li>foo</li></ul>")
}
test("-\\n\\n foo") {
assert(Parser.render("-\\n\\n foo") == "<ul><li></li></ul><p>foo</p>")
}
test("- foo\\n-\\n- bar") {
assert(Parser.render("- foo\\n-\\n- bar") == "<ul><li>foo</li><li></li><li>bar</li></ul>")
}
test("- foo\\n- \\n- bar") {
assert(Parser.render("- foo\\n- \\n- bar") == "<ul><li>foo</li><li></li><li>bar</li></ul>")
}
test("1. foo\\n2.\\n3. bar") {
assert(Parser.render("1. foo\\n2.\\n3. bar") == "<ol><li>foo</li><li></li><li>bar</li></ol>")
}
test("*") {
assert(Parser.render("*") == "<ul><li></li></ul>")
}
test("foo\\n*\\n\\nfoo\\n1.") {
assert(Parser.render("foo\\n*\\n\\nfoo\\n1.") == "<p>foo\\n*</p><p>foo\\n1.</p>")
}
test(" 1. A paragraph\\n with two lines.\\n\\n indented code\\n\\n > A block quote.") {
assert(Parser.render(" 1. A paragraph\\n with two lines.\\n\\n indented code\\n\\n > A block quote.")
== "<ol><li><p>A paragraph\\nwith two lines.</p><pre><code>indented code</code></pre><blockquote><p>A block quote.</p></blockquote></li></ol>")
}
test(" 1. A paragraph\\n with two lines.\\n\\n indented code\\n\\n > A block quote.") {
assert(Parser.render(" 1. A paragraph\\n with two lines.\\n\\n indented code\\n\\n > A block quote.")
== "<ol><li><p>A paragraph\\nwith two lines.</p><pre><code>indented code</code></pre><blockquote><p>A block quote.</p></blockquote></li></ol>")
}
test(" 1. A paragraph\\n with two lines.\\n\\n indented code\\n\\n > A block quote.") {
assert(Parser.render(" 1. A paragraph\\n with two lines.\\n\\n indented code\\n\\n > A block quote.")
== "<ol><li><p>A paragraph\\nwith two lines.</p><pre><code>indented code</code></pre><blockquote><p>A block quote.</p></blockquote></li></ol>")
}
test(" 1. A paragraph\\n with two lines.\\n\\n indented code\\n\\n > A block quote.") {
assert(Parser.render(" 1. A paragraph\\n with two lines.\\n\\n indented code\\n\\n > A block quote.")
== """<pre><code>1. A paragraph
| with two lines.
|
| indented code
|
| > A block quote.</code></pre>""".stripMargin)
}
test(" 1. A paragraph\\nwith two lines.\\n\\n indented code\\n\\n > A block quote.") {
assert(Parser.render(" 1. A paragraph\\nwith two lines.\\n\\n indented code\\n\\n > A block quote.")
==
"""<ol><li><p>A paragraph
|with two lines.</p><pre><code>indented code</code></pre><blockquote><p>A block quote.</p></blockquote></li></ol>""".stripMargin)
}
test(" 1. A paragraph\\n with two lines.") {
assert(Parser.render(" 1. A paragraph\\n with two lines.")
== "<ol><li>A paragraph\\nwith two lines.</li></ol>")
}
test("> 1. > Blockquote\\ncontinued here.") {
assert(Parser.render("> 1. > Blockquote\\ncontinued here.")
== "<blockquote><ol><li><blockquote><p>Blockquote\\ncontinued here.</p></blockquote></li></ol></blockquote>")
}
test("> 1. > Blockquote\\n> continued here.") {
assert(Parser.render("> 1. > Blockquote\\n> continued here.")
== "<blockquote><ol><li><blockquote><p>Blockquote\\ncontinued here.</p></blockquote></li></ol></blockquote>")
}
test("- foo\\n - bar\\n - baz\\n - boo") {
assert(Parser.render("- foo\\n - bar\\n - baz\\n - boo")
== "<ul><li>foo<ul><li>bar<ul><li>baz<ul><li>boo</li></ul></li></ul></li></ul></li></ul>")
}
test("- foo\\n - bar\\n - baz\\n - boo") {
assert(Parser.render("- foo\\n - bar\\n - baz\\n - boo")
== "<ul><li>foo</li><li>bar</li><li>baz</li><li>boo</li></ul>")
}
test("10) foo\\n - bar") {
assert(Parser.render("10) foo\\n - bar")
== "<ol start=\\"10\\"><li>foo<ul><li>bar</li></ul></li></ol>")
}
test("10) foo\\n - bar") {
assert(Parser.render("10) foo\\n - bar")
== "<ol start=\\"10\\"><li>foo</li></ol><ul><li>bar</li></ul>")
}
test("- - foo") {
assert(Parser.render("- - foo")
== "<ul><li><ul><li>foo</li></ul></li></ul>")
}
test("1. - 2. foo") {
assert(Parser.render("1. - 2. foo")
== "<ol><li><ul><li><ol start=\\"2\\"><li>foo</li></ol></li></ul></li></ol>")
}
test("- # Foo\\n- Bar\\n ---\\n baz") {
assert(Parser.render("- # Foo\\n- Bar\\n ---\\n baz")
== "<ul>\\n<li>\\n<h1>Foo</h1>\\n</li>\\n<li>\\n<h2>Bar</h2>\\nbaz</li>\\n</ul>")
}
test("- foo\\n- bar\\n+ baz") {
assert(Parser.render("- foo\\n- bar\\n+ baz")
== "<ul><li>foo</li><li>bar</li></ul><ul><li>baz</li></ul>")
}
test("1. foo\\n2. bar\\n3) baz") {
assert(Parser.render("1. foo\\n2. bar\\n3) baz")
== "<ol><li>foo</li><li>bar</li></ol><ol start=\\"3\\"><li>baz</li></ol>")
}
test("Foo\\n- bar\\n- baz") {
assert(Parser.render("Foo\\n- bar\\n- baz")
== "<p>Foo</p><ul><li>bar</li><li>baz</li></ul>")
}
test("The number of windows in my house is\\n14. The number of doors is 6.") {
assert(Parser.render("The number of windows in my house is\\n14. The number of doors is 6.")
== "<p>The number of windows in my house is\\n14. The number of doors is 6.</p>")
}
test("The number of windows in my house is\\n1. The number of doors is 6.") {
assert(Parser.render("The number of windows in my house is\\n1. The number of doors is 6.")
== "<p>The number of windows in my house is</p><ol><li>The number of doors is 6.</li></ol>")
}
test("- foo\\n\\n- bar\\n\\n\\n- baz") {
assert(Parser.render("- foo\\n\\n- bar\\n\\n\\n- baz")
== "<ul><li><p>foo</p></li><li><p>bar</p></li><li><p>baz</p></li></ul>")
}
test("- foo\\n - bar\\n - baz\\n\\n\\n bim") {
assert(Parser.render("- foo\\n - bar\\n - baz\\n\\n\\n bim")
== "<ul><li>foo<ul><li>bar<ul><li><p>baz</p><p>bim</p></li></ul></li></ul></li></ul>")
}
test("- foo\\n- bar\\n\\n<!-- -->\\n\\n- baz\\n- bim") {
assert(Parser.render("- foo\\n- bar\\n\\n<!-- -->\\n\\n- baz\\n- bim")
== "<ul><li>foo</li><li>bar</li></ul><!-- --><ul><li>baz</li><li>bim</li></ul>")
}
test("- foo\\n\\n notcode\\n\\n- foo\\n\\n<!-- -->\\n\\n code") {
assert(Parser.render("- foo\\n\\n notcode\\n\\n- foo\\n\\n<!-- -->\\n\\n code")
== "<ul><li><p>foo</p><p>notcode</p></li><li><p>foo</p></li></ul><!-- --><pre><code>code</code></pre>")
}
test("- a\\n - b\\n - c\\n - d\\n - e\\n - f\\n - g\\n - h\\n- i") {
assert(Parser.render("- a\\n - b\\n - c\\n - d\\n - e\\n - f\\n - g\\n - h\\n- i")
== "<ul><li>a</li><li>b</li><li>c</li><li>d</li><li>e</li><li>f</li><li>g</li><li>h</li><li>i</li></ul>")
}
test("1. a\\n\\n 2. b\\n\\n 3. c") {
assert(Parser.render("1. a\\n\\n 2. b\\n\\n 3. c")
== "<ol><li><p>a</p></li><li><p>b</p></li><li><p>c</p></li></ol>")
}
test("- a\\n- b\\n\\n- c") {
assert(Parser.render("- a\\n- b\\n\\n- c")
== "<ul><li><p>a</p></li><li><p>b</p></li><li><p>c</p></li></ul>")
}
test("* a\\n*\\n\\n* c") {
assert(Parser.render("* a\\n*\\n\\n* c")
== "<ul><li><p>a</p></li><li></li><li><p>c</p></li></ul>")
}
test("- a\\n- b\\n\\n c\\n- d") {
assert(Parser.render("- a\\n- b\\n\\n c\\n- d")
== "<ul><li><p>a</p></li><li><p>b</p><p>c</p></li><li><p>d</p></li></ul>")
}
test("- a\\n- b\\n\\n [ref]: /url\\n- d") {
assert(Parser.render("- a\\n- b\\n\\n [ref]: /url\\n- d")
== "<ul><li><p>a</p></li><li><p>b</p></li><li><p>d</p></li></ul>")
}
test("- a\\n- ```\\n b\\n\\n\\n ```\\n- c") {
assert(Parser.render("- a\\n- ```\\n b\\n\\n\\n ```\\n- c")
== "<ul><li>a</li><li><pre><code>b\\n\\n</code></pre></li><li>c</li></ul>")
}
test("- a\\n - b\\n\\n c\\n- d") {
assert(Parser.render("- a\\n - b\\n\\n c\\n- d")
== "<ul><li>a<ul><li><p>b</p><p>c</p></li></ul></li><li>d</li></ul>")
}
test("* a\\n > b\\n >\\n* c") {
assert(Parser.render("* a\\n > b\\n >\\n* c")
== "<ul><li>a<blockquote><p>b</p></blockquote></li><li>c</li></ul>")
}
test("- a\\n > b\\n ```\\n c\\n ```\\n- d") {
assert(Parser.render("- a\\n > b\\n ```\\n c\\n ```\\n- d")
== "<ul><li>a<blockquote><p>b</p></blockquote><pre><code>c</code></pre></li><li>d</li></ul>")
}
test("- a") {
assert(Parser.render("- a")
== "<ul><li>a</li></ul>")
}
test("- a\\n - b") {
assert(Parser.render("- a\\n - b")
== "<ul><li>a<ul><li>b</li></ul></li></ul>")
}
test("1. ```\\n foo\\n ```\\n\\n bar") {
assert(Parser.render("1. ```\\n foo\\n ```\\n\\n bar")
== "<ol><li><pre><code>foo</code></pre><p>bar</p></li></ol>")
}
test("* foo\\n * bar\\n\\n baz") {
assert(Parser.render("* foo\\n * bar\\n\\n baz")
== "<ul><li><p>foo</p><ul><li>bar</li></ul><p>baz</p></li></ul>")
}
test("- a\\n - b\\n - c\\n\\n- d\\n - e\\n - f") {
assert(Parser.render("- a\\n - b\\n - c\\n\\n- d\\n - e\\n - f")
== "<ul><li><p>a</p><ul><li>b</li><li>c</li></ul></li><li><p>d</p><ul><li>e</li><li>f</li></ul></li></ul>")
}
} | raon0211/eridown | src/test/scala/in/suhj/eridown/ListTest.scala | Scala | mit | 14,697 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.spark
import org.apache.ignite.Ignite
import org.apache.ignite.cache.query.SqlFieldsQuery
import org.apache.ignite.internal.IgnitionEx
import org.apache.ignite.spark.AbstractDataFrameSpec.{DEFAULT_CACHE, TEST_CONFIG_FILE, checkOptimizationResult, enclose}
import org.apache.spark.sql.ignite.IgniteSparkSession
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import java.lang.{Long ⇒ JLong}
/**
* === Doesn't supported by Spark ===
* CHAR
* DIFFERENCE
* HEXTORAW
* RAWTOHEX
* REGEXP_LIKE
* SOUNDEX
* STRINGDECODE
* STRINGENCODE
* STRINGTOUTF8
* UTF8TOSTRING
* XMLATTR
* XMLNODE
* XMLCOMMENT
* XMLCDATA
* XMLSTARTDOC
* XMLTEXT
* TO_CHAR - The function that can format a timestamp, a number, or text.
* ====== This functions in spark master but not in release =====
* LEFT
* RIGHT
* INSERT
* REPLACE
*/
@RunWith(classOf[JUnitRunner])
class IgniteOptimizationStringFuncSpec extends AbstractDataFrameSpec {
var igniteSession: IgniteSparkSession = _
describe("Supported optimized string functions") {
it("LENGTH") {
val df = igniteSession.sql("SELECT LENGTH(str) FROM strings WHERE id <= 3")
checkOptimizationResult(df, "SELECT CAST(LENGTH(str) AS INTEGER) as \\"length(str)\\" FROM strings " +
"WHERE id is not null AND id <= 3")
val data = (3, 3, 6)
checkQueryData(df, data)
}
it("RTRIM") {
val df = igniteSession.sql("SELECT RTRIM(str) FROM strings WHERE id = 3")
checkOptimizationResult(df, "SELECT RTRIM(str) FROM strings WHERE id is not null AND id = 3")
val data = Tuple1("AAA")
checkQueryData(df, data)
}
it("LTRIM") {
val df = igniteSession.sql("SELECT LTRIM(str) FROM strings WHERE id = 4")
checkOptimizationResult(df, "SELECT LTRIM(str) FROM strings WHERE id is not null AND id = 4")
val data = Tuple1("AAA")
checkQueryData(df, data)
}
it("TRIM") {
val df = igniteSession.sql("SELECT TRIM(str) FROM strings WHERE id = 5")
checkOptimizationResult(df, "SELECT TRIM(str) FROM strings WHERE id is not null AND id = 5")
val data = Tuple1("AAA")
checkQueryData(df, data)
}
it("LOWER") {
val df = igniteSession.sql("SELECT LOWER(str) FROM strings WHERE id = 2")
checkOptimizationResult(df, "SELECT LOWER(str) FROM strings WHERE id is not null AND id = 2")
val data = Tuple1("aaa")
checkQueryData(df, data)
}
it("UPPER") {
val df = igniteSession.sql("SELECT UPPER(str) FROM strings WHERE id = 1")
checkOptimizationResult(df, "SELECT UPPER(str) FROM strings WHERE id is not null AND id = 1")
val data = Tuple1("AAA")
checkQueryData(df, data)
}
it("LOWER(RTRIM)") {
val df = igniteSession.sql("SELECT LOWER(RTRIM(str)) FROM strings WHERE id = 3")
checkOptimizationResult(df, "SELECT LOWER(RTRIM(str)) FROM strings WHERE id is not null AND id = 3")
val data = Tuple1("aaa")
checkQueryData(df, data)
}
it("LOCATE") {
val df = igniteSession.sql("SELECT LOCATE('D', str) FROM strings WHERE id = 6")
checkOptimizationResult(df, "SELECT LOCATE('D', str, 1) FROM strings WHERE id is not null AND id = 6")
val data = Tuple1(4)
checkQueryData(df, data)
}
it("LOCATE - 2") {
val df = igniteSession.sql("SELECT LOCATE('A', str) FROM strings WHERE id = 6")
checkOptimizationResult(df, "SELECT LOCATE('A', str, 1) FROM strings WHERE id is not null AND id = 6")
val data = Tuple1(1)
checkQueryData(df, data)
}
it("POSITION") {
val df = igniteSession.sql("SELECT instr(str, 'BCD') FROM strings WHERE id = 6")
checkOptimizationResult(df, "SELECT POSITION('BCD', str) as \\"instr(str, BCD)\\" FROM strings " +
"WHERE id is not null AND id = 6")
val data = Tuple1(2)
checkQueryData(df, data)
}
it("CONCAT") {
val df = igniteSession.sql("SELECT concat(str, 'XXX') FROM strings WHERE id = 6")
checkOptimizationResult(df, "SELECT concat(str, 'XXX') FROM strings WHERE id is not null AND id = 6")
val data = Tuple1("ABCDEFXXX")
checkQueryData(df, data)
}
it("RPAD") {
val df = igniteSession.sql("SELECT RPAD(str, 10, 'X') FROM strings WHERE id = 6")
checkOptimizationResult(df, "SELECT RPAD(str, 10, 'X') FROM strings WHERE id is not null AND id = 6")
val data = Tuple1("ABCDEFXXXX")
checkQueryData(df, data)
}
it("LPAD") {
val df = igniteSession.sql("SELECT LPAD(str, 10, 'X') FROM strings WHERE id = 6")
checkOptimizationResult(df, "SELECT LPAD(str, 10, 'X') FROM strings WHERE id is not null AND id = 6")
val data = Tuple1("XXXXABCDEF")
checkQueryData(df, data)
}
it("REPEAT") {
val df = igniteSession.sql("SELECT REPEAT(str, 2) FROM strings WHERE id = 6")
checkOptimizationResult(df, "SELECT REPEAT(str, 2) FROM strings WHERE id is not null AND id = 6")
val data = Tuple1("ABCDEFABCDEF")
checkQueryData(df, data)
}
it("SUBSTRING") {
val df = igniteSession.sql("SELECT SUBSTRING(str, 4, 3) FROM strings WHERE id = 6")
checkOptimizationResult(df, "SELECT SUBSTR(str, 4, 3) as \\"SUBSTRING(str, 4, 3)\\" FROM strings " +
"WHERE id is not null AND id = 6")
val data = Tuple1("DEF")
checkQueryData(df, data)
}
it("SPACE") {
val df = igniteSession.sql("SELECT SPACE(LENGTH(str)) FROM strings WHERE id = 1")
checkOptimizationResult(df, "SELECT SPACE(CAST(LENGTH(str) AS INTEGER)) as \\"SPACE(LENGTH(str))\\" " +
"FROM strings WHERE id is not null AND id = 1")
val data = Tuple1(" ")
checkQueryData(df, data)
}
it("ASCII") {
val df = igniteSession.sql("SELECT ASCII(str) FROM strings WHERE id = 7")
checkOptimizationResult(df, "SELECT ASCII(str) FROM strings WHERE id is not null AND id = 7")
val data = Tuple1(50)
checkQueryData(df, data)
}
it("REGEXP_REPLACE") {
val df = igniteSession.sql("SELECT REGEXP_REPLACE(str, '(\\\\\\\\d+)', 'num') FROM strings WHERE id = 7")
checkOptimizationResult(df, "SELECT REGEXP_REPLACE(str, '(\\\\d+)', 'num') FROM strings " +
"WHERE id is not null AND id = 7")
val data = Tuple1("num")
checkQueryData(df, data)
}
it("CONCAT_WS") {
val df = igniteSession.sql("SELECT id, CONCAT_WS(', ', str, 'after') FROM strings " +
"WHERE id >= 7 AND id <= 8")
checkOptimizationResult(df, "SELECT id, CONCAT_WS(', ', str, 'after') FROM strings " +
"WHERE id is not null AND id >= 7 AND id <= 8")
val data = (
(7, "222, after"),
(8, "after"))
checkQueryData(df, data)
}
it("TRANSLATE") {
val df = igniteSession.sql("SELECT id, TRANSLATE(str, 'DEF', 'ABC') FROM strings WHERE id = 6")
checkOptimizationResult(df, "SELECT id, TRANSLATE(str, 'DEF', 'ABC') FROM strings " +
"WHERE id is not null AND id = 6")
val data = Tuple1((6, "ABCABC"))
checkQueryData(df, data)
}
}
def createStringTable(client: Ignite, cacheName: String): Unit = {
val cache = client.cache(cacheName)
cache.query(new SqlFieldsQuery(
"""
| CREATE TABLE strings (
| id LONG,
| str VARCHAR,
| PRIMARY KEY (id)) WITH "backups=1"
""".stripMargin)).getAll
val qry = new SqlFieldsQuery("INSERT INTO strings (id, str) values (?, ?)")
cache.query(qry.setArgs(1L.asInstanceOf[JLong], "aaa")).getAll
cache.query(qry.setArgs(2L.asInstanceOf[JLong], "AAA")).getAll
cache.query(qry.setArgs(3L.asInstanceOf[JLong], "AAA ")).getAll
cache.query(qry.setArgs(4L.asInstanceOf[JLong], " AAA")).getAll
cache.query(qry.setArgs(5L.asInstanceOf[JLong], " AAA ")).getAll
cache.query(qry.setArgs(6L.asInstanceOf[JLong], "ABCDEF")).getAll
cache.query(qry.setArgs(7L.asInstanceOf[JLong], "222")).getAll
cache.query(qry.setArgs(8L.asInstanceOf[JLong], null)).getAll
}
override protected def beforeAll(): Unit = {
super.beforeAll()
createStringTable(client, DEFAULT_CACHE)
val configProvider = enclose(null) (x ⇒ () ⇒ {
val cfg = IgnitionEx.loadConfiguration(TEST_CONFIG_FILE).get1()
cfg.setClientMode(true)
cfg.setIgniteInstanceName("client-2")
cfg
})
igniteSession = IgniteSparkSession.builder()
.config(spark.sparkContext.getConf)
.igniteConfigProvider(configProvider)
.getOrCreate()
}
}
| irudyak/ignite | modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationStringFuncSpec.scala | Scala | apache-2.0 | 10,284 |
package edu.gemini.ags.conf
import edu.gemini.ags.api.AgsMagnitude.{MagnitudeCalc, MagnitudeTable}
import edu.gemini.ags.gems.GemsMagnitudeTable
import edu.gemini.spModel.core.Site
import edu.gemini.spModel.core.Site.{GN, GS}
import edu.gemini.spModel.gemini.altair.AltairParams.Mode._
import edu.gemini.spModel.gemini.altair.{InstAltair, AltairAowfsGuider}
import edu.gemini.spModel.gemini.flamingos2.Flamingos2OiwfsGuideProbe
import edu.gemini.spModel.gemini.gems.Canopus
import edu.gemini.spModel.gemini.gmos.GmosOiwfsGuideProbe
import edu.gemini.spModel.gemini.gnirs.GnirsOiwfsGuideProbe
import edu.gemini.spModel.gemini.gsaoi.GsaoiOdgw
import edu.gemini.spModel.gemini.nici.NiciOiwfsGuideProbe
import edu.gemini.spModel.gemini.nifs.NifsOiwfsGuideProbe
import edu.gemini.spModel.gemini.niri.NiriOiwfsGuideProbe
import edu.gemini.spModel.guide.GuideProbe
import edu.gemini.spModel.obs.context.ObsContext
import edu.gemini.spModel.rich.shared.immutable._
import edu.gemini.spModel.target.obsComp.PwfsGuideProbe
import scalaz._
import Scalaz._
object ProbeLimitsTable {
// Okay, for now load the configuration from the classpath. At some point
// this will become user-changeable and supplied as an argument..
private val ConfFile = "Guide Limits - OT Config.csv"
def load(): String \/ MagnitudeTable = {
val is = Option(this.getClass.getResourceAsStream(ConfFile))
try {
for {
s <- is.toRightDisjunction(s"Could not find $ConfFile")
t <- new ProbeLimitsParser().read(s).map(ProbeLimitsTable(_))
} yield t
} finally {
is.foreach(_.close())
}
}
def loadOrThrow(): MagnitudeTable =
load().fold(msg => throw new RuntimeException(msg), identity)
}
case class ProbeLimitsTable(tab: CalcMap) extends MagnitudeTable {
def apply(ctx: ObsContext, probe: GuideProbe): Option[MagnitudeCalc] =
// Deferring GeMS to the old implementation until we understand
// what is supposed to happen.
probe match {
case _: GsaoiOdgw => GemsMagnitudeTable(ctx, probe)
case _: Canopus.Wfs => GemsMagnitudeTable(ctx, probe)
case _ =>
for {
s <- ctx.getSite.asScalaOpt
id <- lookup(s, ctx, probe)
ct <- tab.get(id)
} yield ct
}
private def lookup(site: Site, ctx: ObsContext, probe: GuideProbe): Option[MagLimitsId] = {
(site, probe) match {
case (GN, AltairAowfsGuider.instance) =>
ctx.getAOComponent.asScalaOpt.filter(_.isInstanceOf[InstAltair]).flatMap { ado =>
ado.asInstanceOf[InstAltair].getMode match {
case NGS | NGS_FL => Some(AltairNgs)
case LGS => Some(AltairLgs)
case _ => None
}
}
case (GS, Flamingos2OiwfsGuideProbe.instance) => Some(F2Oiwfs)
case (GN, GmosOiwfsGuideProbe.instance) => Some(GmosNOiwfs)
case (GS, GmosOiwfsGuideProbe.instance) => Some(GmosSOiwfs)
case (GN, GnirsOiwfsGuideProbe.instance) => Some(GnirsOiwfs)
case (GN, NifsOiwfsGuideProbe.instance) => Some(NifsOiwfs)
case (GN, NiriOiwfsGuideProbe.instance) => Some(NiriOiwfs)
case (GN, PwfsGuideProbe.pwfs1) => Some(GnPwfs1)
case (GN, PwfsGuideProbe.pwfs2) => Some(GnPwfs2)
case (GS, PwfsGuideProbe.pwfs1) => Some(GsPwfs1)
case (GS, PwfsGuideProbe.pwfs2) => Some(GsPwfs2)
case (GS, NiciOiwfsGuideProbe.instance) => Some(NiciOiwfs)
case _ => None
}
}
}
| arturog8m/ocs | bundle/edu.gemini.ags/src/main/scala/edu/gemini/ags/conf/ProbeLimitsTable.scala | Scala | bsd-3-clause | 3,595 |
package scalan.graphs
import scalan.primitives.PrimitiveExamples
/**
* Created by afilippov on 2/17/15.
*/
trait GraphExamples extends GraphsDsl with PrimitiveExamples {
lazy val fromAndToAdj = fun { in: Rep[(NestedCollectionFlat[Int],NestedCollectionFlat[Double])] =>
val links = in._1
val edge_vals = in._2
val vertex_vals = UnitCollection(links.length)
val graph = AdjacencyGraph.fromAdjacencyList(vertex_vals, edge_vals, links)
graph.vertexNum
}
lazy val fromAndToInc = fun { in: Rep[(Collection[Double],Int)] =>
val incMatrix = in._1
val vertexNum = in._2
val vertex_vals = UnitCollection(vertexNum)
val graph = IncidenceGraph.fromAdjacencyMatrix(vertex_vals, incMatrix, vertexNum)
graph.vertexNum
}
val UNVISITED = -2
val NO_PARENT= -1
lazy val MinWeightMonoid = RepMonoid[(Double,(Int,Int))]("MinWeight", (Double.MaxValue,(Int.MaxValue, Int.MaxValue)), true) {
(t1, t2) => IF (t1._1 < t2._1) {t1} ELSE t2
}
def MST_prime(g: Rep[Graph[Unit,Double]], startFront: Rep[Front], out: Coll[Int]): Coll[Int] = {
def stopCondition(front: Rep[Front], unused: Any) = (g.outEdgesOf(front).length === 0)
def step(front: Rep[Front], out: Coll[Int]) = {
val outEdges = g.outEdgesOf(front)
val minEdge = outEdges.map({ edge => Pair(edge.value, Pair(edge.fromId, edge.toId))}).reduce(MinWeightMonoid)
val from = minEdge._2
val to = minEdge._3
(front.append(to), out.update(to, from))
}
from(startFront, out).until(stopCondition)(step)._2
}
def MSF_prime(g: Rep[Graph[Unit,Double]], startFront: Rep[Front]) = {
val startRoot = toRep(0);
val out = Collection.replicate(g.vertexNum, UNVISITED)
val outIndexes = Collection.indexRange(g.vertexNum)
val result = from(startRoot, out).until((root, _) => (root < 0) ) { (root, out) =>
val front = startFront.append(root)
val newOut = MST_prime(g, front, out.update(root, NO_PARENT))
val remain = (outIndexes zip newOut).filter( x => x._2 === UNVISITED)
val newStart = IF (remain.length > 0) THEN remain(0)._1 ELSE toRep(-1)
(newStart, newOut)
}
result._2
}
def fallingTest(g: Rep[Graph[Unit,Double]], startFront: Rep[Front]) = {
val front = startFront.append(0)
g.outEdgesOf(front).map(_.toId)
}
lazy val mstFunAdj = fun { in: Rep[(NestedCollectionFlat[Int],NestedCollectionFlat[Double])] =>
val links = in._1
val edge_vals = in._2
val vertex_vals = UnitCollection(links.length)
val graph = AdjacencyGraph.fromAdjacencyList(vertex_vals, edge_vals, links)
val out_in = Collection.replicate(graph.vertexNum, UNVISITED).update(0, NO_PARENT)
val startFront = Front.fromStartNode(0, graph.vertexNum)
MST_prime(graph, startFront, out_in)
}
lazy val mstFunAdjMap = fun { in: Rep[(NestedCollectionFlat[Int],NestedCollectionFlat[Double])] =>
val links = in._1
val edge_vals = in._2
val vertex_vals = UnitCollection(links.length)
val graph = AdjacencyGraph.fromAdjacencyList(vertex_vals, edge_vals, links)
val out_in = Collection.replicate(graph.vertexNum, UNVISITED).update(0, NO_PARENT)
val startFront = Front.fromStartNodeMap(0, graph.vertexNum)
MST_prime(graph, startFront, out_in)
}
lazy val mstFunInc = fun { in: Rep[(Collection[Double], Int)] =>
val incMatrix = in._1
val vertexNum = in._2
val vertex_vals = UnitCollection(vertexNum)
val graph = IncidenceGraph.fromAdjacencyMatrix(vertex_vals, incMatrix, vertexNum)
val out_in = Collection.replicate(graph.vertexNum, UNVISITED).update(0, NO_PARENT)
val startFront = Front.fromStartNode(0, graph.vertexNum)
MST_prime(graph, startFront, out_in)
}
lazy val mstFunIncMap = fun { in: Rep[(Collection[Double], Int)] =>
val incMatrix = in._1
val vertexNum = in._2
val vertex_vals = UnitCollection(vertexNum)
val graph = IncidenceGraph.fromAdjacencyMatrix(vertex_vals, incMatrix, vertexNum)
val out_in = Collection.replicate(graph.vertexNum, UNVISITED).update(0, NO_PARENT)
val startFront = Front.fromStartNodeMap(0, graph.vertexNum)
MST_prime(graph, startFront, out_in)
}
lazy val mstFun1Adj = fun { in: Rep[(Array[Int], (Array[Double], (Array[Int], Array[Int])))] =>
val segments = Collection.fromArray(in._3) zip Collection.fromArray(in._4)
val links = NestedCollectionFlat(Collection.fromArray(in._1), segments)
val edge_vals = NestedCollectionFlat(Collection.fromArray(in._2), segments)
val vertex_vals = UnitCollection(segments.length)
val graph = AdjacencyGraph.fromAdjacencyList(vertex_vals, edge_vals, links)
val out_in = Collection.replicate(graph.vertexNum, UNVISITED).update(0, NO_PARENT)
val startFront = Front.fromStartNode(0, graph.vertexNum)
val res = MST_prime(graph, startFront, out_in)
res.arr
}
lazy val mstFun1AdjMap = fun { in: Rep[(Array[Int], (Array[Double], (Array[Int], Array[Int])))] =>
val segments = Collection.fromArray(in._3) zip Collection.fromArray(in._4)
val links = NestedCollectionFlat(Collection.fromArray(in._1), segments)
val edge_vals = NestedCollectionFlat(Collection.fromArray(in._2), segments)
val vertex_vals = UnitCollection(segments.length)
val graph = AdjacencyGraph.fromAdjacencyList(vertex_vals, edge_vals, links)
val out_in = Collection.replicate(graph.vertexNum, UNVISITED).update(0, NO_PARENT)
val startFront = Front.fromStartNodeMap(0, graph.vertexNum)
val res = MST_prime(graph, startFront, out_in)
res.arr
}
/*lazy val msfFunAdjMap = fun { in: Rep[(Array[Int], (Array[Double], (Array[Int], Array[Int])))] =>
val segments = Collection.fromArray(in._3) zip Collection.fromArray(in._4)
val links = NestedCollectionFlat(Collection.fromArray(in._1), segments)
val edge_vals = NestedCollectionFlat(Collection.fromArray(in._2), segments)
val vertex_vals = UnitCollection(segments.length)
val graph = AdjacencyGraph.fromAdjacencyList(vertex_vals, edge_vals, links)
val startFront = Front.emptyMapBasedFront(graph.vertexNum)
val res = MSF_prime(graph, startFront)
res.arr
} */
lazy val scanFun = fun { in: Rep[Array[Int]] => in.scan }
lazy val mstFun1Inc = fun { in: Rep[(Array[Double], Int)] =>
val incMatrix = Collection.fromArray(in._1)
val vertex_vals = UnitCollection(in._2)
val graph = IncidenceGraph.fromAdjacencyMatrix(vertex_vals, incMatrix, in._2)
val out_in = Collection.replicate(graph.vertexNum, UNVISITED).update(0, NO_PARENT)
val startFront = Front.fromStartNode(0, graph.vertexNum)
val res = MST_prime(graph, startFront, out_in)
res.arr
}
lazy val mstFun1IncMap = fun { in: Rep[(Array[Double], Int)] =>
val incMatrix = Collection.fromArray(in._1)
val vertex_vals = UnitCollection(in._2)
val graph = IncidenceGraph.fromAdjacencyMatrix(vertex_vals, incMatrix, in._2)
val out_in = Collection.replicate(graph.vertexNum, UNVISITED).update(0, NO_PARENT)
val startFront = Front.fromStartNodeMap(0, graph.vertexNum)
val res = MST_prime(graph, startFront, out_in)
res.arr
}
/* lazy val msfFunIncMap = fun { in: Rep[(Array[Double], Int)] =>
val incMatrix = Collection.fromArray(in._1)
val vertex_vals = UnitCollection(in._2)
val graph = IncidenceGraph.fromAdjacencyMatrix(vertex_vals, incMatrix, in._2)
val out_in = Collection.replicate(graph.vertexNum, UNVISITED).update(0, NO_PARENT)
val startFront = Front.emptyMapBasedFront(graph.vertexNum)
val res = MSF_prime(graph, startFront)
res.arr
} */
}
| scalan/scalan | graphs/src/test/scala/scalan/graphs/GraphExamples.scala | Scala | apache-2.0 | 7,583 |
/*
* Copyright 2015 Webtrends (http://www.webtrends.com)
*
* See the LICENCE.txt file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*package com.webtrends.harness.component.http
import akka.testkit.{TestKit, TestProbe}
import org.specs2.mutable.SpecificationWithJUnit
import spray.http.StatusCodes
import spray.httpx.RequestBuilding
import spray.testkit.Specs2RouteTest
class MetricsDirectivesSpec extends SpecificationWithJUnit
with Specs2RouteTest
with RequestBuilding
with MetricsDirectives {
val metric = Timer("group.subgroup.name.scope")
"the metrics directives" should {
"allow for timer to return a response" in {
val probe = new TestProbe(system)
MetricsEventBus.subscribe(probe.ref)
implicit val service = new MetricsAdapter {}
Get("/test") ~> time(metric) {
complete("good")
} ~> check {
status === StatusCodes.OK
}
}
"allow for timing a call" in {
val probe = new TestProbe(system)
MetricsEventBus.subscribe(probe.ref)
implicit val service = new MetricsAdapter {}
Get("/test") ~> time(metric) {
complete("good")
}
probe.expectMsgClass(classOf[TimerObservation])
success
}
}
step {
TestKit.shutdownActorSystem(system)
}
}*/
| mjwallin1/wookiee-spray | src/test/scala/com/webtrends/harness/component/spray/directive/MetricsDirectivesSpec.scala | Scala | apache-2.0 | 1,896 |
/**
* Copyright (c) 2013-2015 Patrick Nicolas - Scala for Machine Learning - All rights reserved
*
* The source code in this file is provided by the author for the sole purpose of illustrating the
* concepts and algorithms presented in "Scala for Machine Learning". It should not be used to
* build commercial applications.
* ISBN: 978-1-783355-874-2 Packt Publishing.
* Unless required by applicable law or agreed to in writing, software is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* Version 0.98
*/
package org.scalaml.workflow
/**
* This package object that introduces two examples of workflow components using a dynamic
* dependency injection:<br>
* - Dynamically configurable preprocessing module <b>PreprocessingModule</b><br>
* - Dynamically configurable clustering module <b>ClusteringModule</b><br>
* @note Scala for Machine Learning - Chapter 2 Getting started / Designing a workflow
*/
package object module { }
// --------------------------------------- EOF ----------------------------------------- | batermj/algorithm-challenger | books/cs/machine-learning/scala-for-machine-learning/1rst-edition/original-src-from-the-book/src/main/scala/org/scalaml/workflow/module/package.scala | Scala | apache-2.0 | 1,120 |
package com.tutuur.ducksoup.meta
/**
* @author Zale
*/
case class Image(id: String, md5: String, width: Int, height: Int, format: String) {
assert(format == format.toUpperCase)
assert(id.length == 8)
}
object Image {
val IdLength = 8
def tupled = Function.tupled(Image.apply _)
} | Thiross/ducksoup | web/src/main/scala/com/tutuur/ducksoup/meta/Image.scala | Scala | gpl-3.0 | 308 |
/*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package laika.io
import java.io.BufferedReader
import java.io.Closeable
import java.io.FileInputStream
import java.io.InputStream
import java.io.InputStreamReader
import java.io.StringReader
import scala.collection.immutable.PagedSeq
import scala.io.Codec
import scala.util.parsing.input.CharSequenceReader
import scala.util.parsing.input.PagedSeqReader
import scala.util.parsing.input.Reader
import java.io.File
/** Represents the input for a parser, abstracting over various types of IO resources.
*
* For parsers that use the parser combinators from the Scala SDK,
* this trait offers a convenient `asParserInput` method. Other types of parsers
* may use the `java.io.Reader` provided by the `asReader` method.
*
* @author Jens Halm
*/
trait Input {
/** The input as a `java.io.Reader`.
*/
def asReader: java.io.Reader
/** The input as a Reader instance for the parser combinators of the Scala SDK.
*/
def asParserInput: Reader[Char]
}
/** Factory methods for creating Input instances from different types of IO resources.
*
* @author Jens Halm
*/
object Input {
private class StringInput (source: String) extends Input with Closeable {
def asReader = new StringReader(source)
def asParserInput = new CharSequenceReader(source)
def close = ()
}
private class ReaderInput (val asReader: java.io.Reader) extends Input with Closeable {
def asParserInput = new PagedSeqReader(PagedSeq.fromReader(asReader))
def close = asReader.close
}
/** Creates a new Input instance from the specified string.
*/
def fromString (source: String): Input with Closeable = new StringInput(source)
/** Creates a new Input instance for the file with the specified name.
*
* @param name the name of the file
* @param codec the character encoding of the file, if not specified the platform default will be used.
*/
def fromFile (name: String)(implicit codec: Codec): Input with Closeable = fromStream(new FileInputStream(name))(codec)
/** Creates a new Input instance for the specified file.
*
* @param file the file to use as input
* @param codec the character encoding of the file, if not specified the platform default will be used.
*/
def fromFile (file: File)(implicit codec: Codec): Input with Closeable = fromStream(new FileInputStream(file))(codec)
/** Creates a new Input instance for the specified InputStream.
*
* @param stream the stream to read character data from
* @param codec the character encoding used by the text input, if not specified the platform default will be used.
*/
def fromStream (stream: InputStream)(implicit codec: Codec): Input with Closeable =
fromReader(new BufferedReader(new InputStreamReader(stream, codec.decoder)))
/** Creates a new Input instance for the specified Reader.
*/
def fromReader (reader: java.io.Reader): Input with Closeable = new ReaderInput(reader)
} | Hocdoc/sandoc | src/main/scala/laika/io/Input.scala | Scala | apache-2.0 | 3,593 |
package singleton
import java.util.UUID
/**
* Lazy Singleton class
* @author lmignot
*/
class LazySingleton private extends Singleton {
private val id: String = UUID.randomUUID.toString
override def getId: String = id
}
/**
* LazySingleton concept
* <p>
* The private instance is initialised as null
* and only created when getInstance is called
*/
object LazySingleton {
private var _instance: Singleton = _
def getInstance: Singleton = {
if (_instance == null) _instance = new LazySingleton
_instance
}
} | BBK-PiJ-2015-67/sdp-portfolio | exercises/week07/src/main/scala/singleton/LazySingleton.scala | Scala | unlicense | 544 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler
import java.nio.ByteBuffer
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, FunSuite}
import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkEnv}
import org.apache.spark.storage.TaskResultBlockId
/**
* Removes the TaskResult from the BlockManager before delegating to a normal TaskResultGetter.
*
* Used to test the case where a BlockManager evicts the task result (or dies) before the
* TaskResult is retrieved.
*/
class ResultDeletingTaskResultGetter(sparkEnv: SparkEnv, scheduler: TaskSchedulerImpl)
extends TaskResultGetter(sparkEnv, scheduler) {
var removedResult = false
override def enqueueSuccessfulTask(
taskSetManager: TaskSetManager, tid: Long, serializedData: ByteBuffer) {
if (!removedResult) {
// Only remove the result once, since we'd like to test the case where the task eventually
// succeeds.
serializer.get().deserialize[TaskResult[_]](serializedData) match {
case IndirectTaskResult(blockId) =>
sparkEnv.blockManager.master.removeBlock(blockId)
case directResult: DirectTaskResult[_] =>
taskSetManager.abort("Internal error: expect only indirect results")
}
serializedData.rewind()
removedResult = true
}
super.enqueueSuccessfulTask(taskSetManager, tid, serializedData)
}
}
/**
* Tests related to handling task results (both direct and indirect).
*/
class TaskResultGetterSuite extends FunSuite with BeforeAndAfter with BeforeAndAfterAll
with LocalSparkContext {
override def beforeAll {
// Set the Akka frame size to be as small as possible (it must be an integer, so 1 is as small
// as we can make it) so the tests don't take too long.
System.setProperty("spark.akka.frameSize", "1")
}
override def afterAll {
System.clearProperty("spark.akka.frameSize")
}
test("handling results smaller than Akka frame size") {
sc = new SparkContext("local", "test")
val result = sc.parallelize(Seq(1), 1).map(x => 2 * x).reduce((x, y) => x)
assert(result === 2)
}
test("handling results larger than Akka frame size") {
sc = new SparkContext("local", "test")
val akkaFrameSize =
sc.env.actorSystem.settings.config.getBytes("akka.remote.netty.tcp.maximum-frame-size").toInt
val result = sc.parallelize(Seq(1), 1).map(x => 1.to(akkaFrameSize).toArray).reduce((x, y) => x)
assert(result === 1.to(akkaFrameSize).toArray)
val RESULT_BLOCK_ID = TaskResultBlockId(0)
assert(sc.env.blockManager.master.getLocations(RESULT_BLOCK_ID).size === 0,
"Expect result to be removed from the block manager.")
}
test("task retried if result missing from block manager") {
// Set the maximum number of task failures to > 0, so that the task set isn't aborted
// after the result is missing.
sc = new SparkContext("local[1,2]", "test")
// If this test hangs, it's probably because no resource offers were made after the task
// failed.
val scheduler: TaskSchedulerImpl = sc.taskScheduler match {
case clusterScheduler: TaskSchedulerImpl =>
clusterScheduler
case _ =>
assert(false, "Expect local cluster to use ClusterScheduler")
throw new ClassCastException
}
scheduler.taskResultGetter = new ResultDeletingTaskResultGetter(sc.env, scheduler)
val akkaFrameSize =
sc.env.actorSystem.settings.config.getBytes("akka.remote.netty.tcp.maximum-frame-size").toInt
val result = sc.parallelize(Seq(1), 1).map(x => 1.to(akkaFrameSize).toArray).reduce((x, y) => x)
assert(result === 1.to(akkaFrameSize).toArray)
// Make sure two tasks were run (one failed one, and a second retried one).
assert(scheduler.nextTaskId.get() === 2)
}
}
| dotunolafunmiloye/spark | core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala | Scala | apache-2.0 | 4,571 |
package records.benchmark
import org.scalameter.api._
import scala.tools.nsc.util._
import records.Rec
object RecordsCompileTimeBenhcmark extends RecordsBenchmarkSuite with TypecheckingBenchmarkingSuite {
// rec is referenced from the generated code
val rec = Create.Rec(Create.maxSize)
def source(fPos: Int) = s"""|
|import records.Rec
|object A {
| val rec = records.benchmark.RecordsCompileTimeBenhcmark.rec
| ${(0 until Create.repetitionsInCompile).map(_ => " rec.f" + fPos).mkString("\\n")}
|}
|""".stripMargin
performance of "Records compile time" in {
measure method s"access size ${Create.maxSize} until typer" in {
using(fieldIndexes)
.setUp(_ => setupCompiler(List("-Ystop-after:typer")))
.in { x => compileSource(source(x)) }
}
measure method s"access size ${Create.maxSize}" in {
using(fieldIndexes)
.setUp(_ => setupCompiler())
.in { x => compileSource(source(x)) }
}
}
}
| scala-records/scala-records-benchmarks | src/test/scala/records/benchmark/RecordsCompileTimeBenchmark.scala | Scala | bsd-3-clause | 987 |
package org.jetbrains.plugins.scala
package codeInspection.etaExpansion
import com.intellij.codeInspection.{ProblemHighlightType, ProblemsHolder}
import com.intellij.openapi.project.Project
import com.intellij.psi.PsiElement
import com.intellij.psi.util.PsiTreeUtil
import org.jetbrains.plugins.scala.codeInspection.collections.MethodRepr
import org.jetbrains.plugins.scala.codeInspection.etaExpansion.ConvertibleToMethodValueInspection._
import org.jetbrains.plugins.scala.codeInspection.{AbstractFixOnPsiElement, AbstractInspection, InspectionBundle}
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScClassParameter
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
import org.jetbrains.plugins.scala.lang.psi.types.ScFunctionType
import org.jetbrains.plugins.scala.lang.psi.types.result.Success
import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaNamesUtil
/**
* Nikolay.Tropin
* 5/30/13
*/
object ConvertibleToMethodValueInspection {
val inspectionName = InspectionBundle.message("convertible.to.method.value.name")
val inspectionId = "ConvertibleToMethodValue"
}
class ConvertibleToMethodValueInspection extends AbstractInspection(inspectionId, inspectionName){
def actionFor(holder: ProblemsHolder): PartialFunction[PsiElement, Any] = {
case MethodRepr(expr, _, Some(_), args) =>
if (args.nonEmpty && args.forall(arg => arg.isInstanceOf[ScUnderscoreSection] && ScUnderScoreSectionUtil.isUnderscore(arg)))
registerProblem(holder, expr, InspectionBundle.message("convertible.to.method.value.anonymous.hint"))
case und: ScUnderscoreSection if und.bindingExpr.isDefined =>
val isInParameterOfParameterizedClass = PsiTreeUtil.getParentOfType(und, classOf[ScClassParameter]) match {
case null => false
case cp => cp.containingClass.hasTypeParameters
}
if (!isInParameterOfParameterizedClass)
registerProblem(holder, und, InspectionBundle.message("convertible.to.method.value.eta.hint"))
}
private def registerProblem(holder: ProblemsHolder, expr: ScExpression, hint: String) {
possibleReplacements(expr).find(isSuitableForReplace(expr, _)).foreach { replacement =>
holder.registerProblem(expr, inspectionName,
ProblemHighlightType.GENERIC_ERROR_OR_WARNING,
new ConvertibleToMethodValueQuickFix(expr, replacement, hint))
}
}
private def methodWithoutArgumentsText(expr: ScExpression): Seq[String] = expr match {
case call: ScMethodCall => Seq(call.getEffectiveInvokedExpr.getText)
case ScInfixExpr(_, oper, right) if !ScalaNamesUtil.isOperatorName(oper.refName) =>
val infixCopy = expr.copy.asInstanceOf[ScInfixExpr]
infixCopy.getNode.removeChild(infixCopy.rOp.getNode)
Seq(infixCopy.getText)
case und: ScUnderscoreSection => und.bindingExpr.map(_.getText).toSeq
case _ => Seq.empty
}
private def isSuitableForReplace(oldExpr: ScExpression, newExprText: String): Boolean = {
val newExpr = ScalaPsiElementFactory.createExpressionWithContextFromText(newExprText, oldExpr.getContext, oldExpr)
oldExpr.expectedType(fromUnderscore = false) match {
case Some(expectedType) if ScFunctionType.isFunctionType(expectedType) =>
def conformsExpected(expr: ScExpression): Boolean = expr.getType().getOrAny conforms expectedType
conformsExpected(oldExpr) && conformsExpected(newExpr) && oldExpr.getType().getOrAny.conforms(newExpr.getType().getOrNothing)
case None if newExprText endsWith "_" =>
(oldExpr.getType(), newExpr.getType()) match {
case (Success(oldType, _), Success(newType, _)) => oldType.equiv(newType)
case _ => false
}
case _ => false
}
}
private def possibleReplacements(expr: ScExpression): Seq[String] = {
val withoutArguments = methodWithoutArgumentsText(expr)
val withUnderscore =
if (expr.getText endsWith "_") Nil
else withoutArguments.map(_ + " _")
withoutArguments ++ withUnderscore
}
}
class ConvertibleToMethodValueQuickFix(expr: ScExpression, replacement: String, hint: String)
extends AbstractFixOnPsiElement(hint, expr){
def doApplyFix(project: Project) {
val scExpr = getElement
if (!scExpr.isValid) return
val newExpr = ScalaPsiElementFactory.createExpressionFromText(replacement, scExpr.getManager)
scExpr.replaceExpression(newExpr, removeParenthesis = true)
}
}
| double-y/translation-idea-plugin | src/org/jetbrains/plugins/scala/codeInspection/etaExpansion/ConvertibleToMethodValueInspection.scala | Scala | apache-2.0 | 4,485 |
package moe.pizza.evewho
import org.http4s.client.blaze.PooledHttp1Client
/**
* Created by Andi on 20/01/2016.
*/
object EvewhoExample extends App {
val evewho = new Evewho()
implicit val client = PooledHttp1Client()
val paistis = evewho.allianceList(1983809465).unsafePerformSync
println(paistis)
println(paistis.characters.size)
client.shutdown.unsafePerformSync
}
| xxpizzaxx/pizza-eveapi | src/main/scala/moe/pizza/evewho/EvewhoExample.scala | Scala | mit | 389 |
package edu.neu.coe.scala.numerics
/**
* @author scalaprof
*/
trait Valuable[X] {
def get: X
} | rchillyard/Scalaprof | Numerics/src/main/scala/edu/neu/coe/scala/numerics/Valuable.scala | Scala | gpl-2.0 | 99 |
/*
* ******************************************************************************
* * Copyright (C) 2013 Christopher Harris (Itszuvalex)
* * Itszuvalex@gmail.com
* *
* * This program is free software; you can redistribute it and/or
* * modify it under the terms of the GNU General Public License
* * as published by the Free Software Foundation; either version 2
* * of the License, or (at your option) any later version.
* *
* * This program is distributed in the hope that it will be useful,
* * but WITHOUT ANY WARRANTY; without even the implied warranty of
* * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* * GNU General Public License for more details.
* *
* * You should have received a copy of the GNU General Public License
* * along with this program; if not, write to the Free Software
* * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* *****************************************************************************
*/
package com.itszuvalex.femtocraft.common.gui
import net.minecraft.entity.player.EntityPlayer
import net.minecraft.inventory.{IInventory, Slot}
import net.minecraft.item.ItemStack
import net.minecraft.util.IIcon
object DisplaySlot {
var noPlaceDisplayIcon: IIcon = null
}
class DisplaySlot(par1iInventory: IInventory, par2: Int, par3: Int, par4: Int) extends Slot(par1iInventory, par2, par3, par4) {
override def isItemValid(par1ItemStack: ItemStack) = false
override def putStack(par1ItemStack: ItemStack) {
}
override def canTakeStack(par1EntityPlayer: EntityPlayer) = false
}
| Itszuvalex/Femtocraft-alpha-1 | src/main/java/com/itszuvalex/femtocraft/common/gui/DisplaySlot.scala | Scala | gpl-2.0 | 1,614 |
/*
* FolderFrameImpl.scala
* (Mellite)
*
* Copyright (c) 2012-2022 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Affero General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* contact@sciss.de
*/
package de.sciss.mellite.impl.document
import de.sciss.desktop.{FileDialog, Menu, OptionPane}
import de.sciss.equal.Implicits._
import de.sciss.file.File
import de.sciss.lucre.edit.UndoManager
import de.sciss.lucre.expr.CellView
import de.sciss.lucre.synth.{Txn => STxn}
import de.sciss.lucre.{Copy, Txn, Folder => LFolder}
import de.sciss.mellite.impl.{WindowImpl, WorkspaceWindowImpl}
import de.sciss.mellite.{ActionCloseAllWorkspaces, Application, FolderEditorView, FolderFrame, FolderView, Mellite, UniverseHandler, WindowPlacement}
import de.sciss.proc
import de.sciss.proc.{Durable, Workspace}
import java.io.{DataOutputStream, FileOutputStream}
import scala.concurrent.Future
import scala.swing.{Action, Component, SequentialContainer}
object FolderFrameImpl {
def apply[T <: STxn[T]](name: CellView[T, String],
folder: LFolder[T],
isWorkspaceRoot: Boolean)(implicit tx: T,
handler: UniverseHandler[T]): FolderFrame[T] =
handler(folder, FolderFrame)(newInstance(name, folder, isWorkspaceRoot = isWorkspaceRoot))
private def newInstance[T <: STxn[T]](name: CellView[T, String], folder: LFolder[T],
isWorkspaceRoot: Boolean)
(implicit tx: T, handler: UniverseHandler[T]): FolderFrame[T] = {
implicit val undoMgr: UndoManager[T] = UndoManager()
val view = FolderEditorView(folder)
val res = new FrameImpl[T](view, name, isWorkspaceRoot = isWorkspaceRoot)
res.init().setTitle(name)
}
def addDuplicateAction[T <: STxn[T]](w: WindowImpl[T], action: Action): Unit =
Application.windowHandler.menuFactory.get("edit") match {
case Some(mEdit: Menu.Group) =>
val itDup = Menu.Item("duplicate", action)
mEdit.add(Some(w.window), itDup) // XXX TODO - should be insert-after "Select All"
case _ =>
}
private final class FrameImpl[T <: STxn[T]](val view: FolderEditorView[T], name: CellView[T, String],
isWorkspaceRoot: Boolean)
(implicit val handler: UniverseHandler[T])
extends WorkspaceWindowImpl[T] with FolderFrame[T] {
override def supportsNewWindow: Boolean = !isWorkspaceRoot
override def newWindow()(implicit tx: T): FolderFrame[T] =
newInstance(name, view.obj, isWorkspaceRoot = isWorkspaceRoot)
def folderView: FolderView[T] = view.peer
def bottomComponent: Component with SequentialContainer = view.bottomComponent
private object actionExportBinaryWorkspace extends scala.swing.Action("Export Binary Workspace...") {
private def selectFile(): Option[File] = {
val fileDlg = FileDialog.save(title = "Binary Workspace File")
fileDlg.show(Some(window)).flatMap { file0 =>
import de.sciss.file._
val name = file0.name
val file = if (file0.ext.toLowerCase == s"${proc.Workspace.ext}.bin")
file0
else
file0.parent / s"$name.${proc.Workspace.ext}.bin"
if (!file.exists()) Some(file) else {
val optOvr = OptionPane.confirmation(
message = s"File ${file.path} already exists.\\nAre you sure you want to overwrite it?",
optionType = OptionPane.Options.OkCancel,
messageType = OptionPane.Message.Warning
)
val fullTitle = "Export Binary Workspace"
optOvr.title = fullTitle
val resOvr = optOvr.show()
val isOk = resOvr === OptionPane.Result.Ok
if (!isOk) None else if (file.delete()) Some(file) else {
val optUnable = OptionPane.message(
message = s"Unable to delete existing file ${file.path}",
messageType = OptionPane.Message.Error
)
optUnable.title = fullTitle
optUnable.show()
None
}
}
}
}
override def apply(): Unit =
selectFile().foreach { f =>
type Out = Durable.Txn
val ws = Workspace.Blob.empty(meta = Mellite.meta)
val blob = Txn.copy[T, Out, Array[Byte]] { (txIn0: T, txOut: Out) => {
implicit val txIn: T = txIn0
val context = Copy[T, Out]()(txIn, txOut)
val fIn = view.peer.root()
val fOut = ws.root(txOut)
fIn.iterator.foreach { in =>
val out = context(in)
fOut.addLast(out)(txOut)
}
context.finish()
ws.toByteArray(txOut)
}} (view.cursor, ws.cursor)
// println(s"blob size = ${blob.length}")
val fOut = new FileOutputStream(f)
try {
val dOut = new DataOutputStream(fOut)
dOut.write(blob)
dOut.flush()
} finally {
fOut.close()
}
}
}
override protected def initGUI(): Unit = {
super.initGUI()
addDuplicateAction (this, view.actionDuplicate)
if (isWorkspaceRoot) {
val mf = window.handler.menuFactory
// bindMenus("file.bounce" -> actionExportBinaryWorkspace)
mf.get("file.bounce") match {
case Some(it: Menu.ItemLike[_]) =>
it.bind(window, actionExportBinaryWorkspace)
case _ =>
}
}
// bindMenus("actions.debug-print" -> Action(null) {
// folderView.cursor.step { implicit tx =>
// val program: Ex[Boolean] = {
// import de.sciss.lucre.expr.ExImport._
// import de.sciss.lucre.expr.graph._
// val f = "in".attr(Folder())
// f.nonEmpty
// }
// val newObj = BooleanObj.newProgram[T](program)
// newObj.name = "in.nonEmpty"
// folderView.obj.addLast(newObj)
// }
// })
}
override protected def placement: WindowPlacement = WindowPlacement(0.5f, 0.0f)
override protected def performClose(): Future[Unit] = if (!isWorkspaceRoot) super.performClose() else {
saveViewState()
import view.universe.workspace
ActionCloseAllWorkspaces.tryClose(workspace, Some(window))
}
}
} | Sciss/Mellite | app/src/main/scala/de/sciss/mellite/impl/document/FolderFrameImpl.scala | Scala | agpl-3.0 | 6,567 |
/*
* Copyright 2015
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package influxdbreporter.core.metrics.push
import java.util.concurrent.TimeUnit
import com.codahale.metrics.Clock
import influxdbreporter.core.Tag
import influxdbreporter.core.metrics.Metric._
import scala.annotation.varargs
import scala.language.postfixOps
sealed trait TimerContext {
@varargs def stop(tags: Tag*): Unit = {
stopWithTags(tags)
}
// cause of: https://issues.scala-lang.org/browse/SI-9013, https://issues.scala-lang.org/browse/SI-1459
protected def stopWithTags(tags: Seq[Tag]): Unit
}
class Timer(clock: Clock) extends TagRelatedPushingMetric[CodahaleTimer] {
def this() = this(Clock.defaultClock())
@varargs def time(tags: Tag*): TimerContext = new InfluxTimerContextImpl(tags.toList, this, clock)
@varargs def calculatedTime(time: Long, unit: TimeUnit, tags: Tag*): Unit = {
increaseMetric(tags.toList, _.update(time, unit))
}
override protected def createMetric(): CodahaleTimer = new CodahaleTimer()
private def notify(tags: List[Tag], time: Long): Unit =
increaseMetric(tags, _.update(time, TimeUnit.NANOSECONDS))
private class InfluxTimerContextImpl(startingTags: List[Tag], listener: Timer, clock: Clock)
extends TimerContext {
private val startTime: Long = clock.getTick
override protected def stopWithTags(tags: Seq[Tag]): Unit = {
listener.notify(
tags.toList ::: startingTags,
clock.getTick - startTime
)
}
}
}
| TouK/influxdb-reporter | core/src/main/scala/influxdbreporter/core/metrics/push/Timer.scala | Scala | apache-2.0 | 2,018 |
object Spawn
{
def main(args: Array[String]): Unit = System.exit(1)
}
| mdedetrich/sbt | sbt/src/sbt-test/run/error/changes/RunExplicitFailure.scala | Scala | bsd-3-clause | 71 |
package scryetek.vecmath
@inline
final class Vec3(var x: Float, var y: Float, var z: Float) {
@inline
def this() = this(0, 0, 0)
@inline
def set(x: Float = this.x, y: Float = this.y, z: Float = this.z) = {
this.x = x
this.y = y
this.z = z
this
}
@inline
def set(v: Vec3): Vec3 =
set(v.x, v.y, v.z)
/** Adds two vectors. */
@inline
def +(v: Vec3): Vec3 =
Vec3(x + v.x, y + v.y, z + v.z)
/** Adds this vector to another vector into the target output vector. */
@inline
def add(v: Vec3, out: Vec3 = this): Vec3 =
out.set(x + v.x, y + v.y, z + v.z)
/** Adds this vector to another vector into the target output vector. */
@inline
def add(x: Float, y: Float, z: Float): Vec3 =
add(x, y, z, this)
/** Adds this vector to another vector into the target output vector. */
@inline
def add(x: Float, y: Float, z: Float, out: Vec3): Vec3 =
out.set(this.x + x, this.y + y, this.z + z)
/** Subtracts two vectors. */
@inline
def -(v: Vec3): Vec3 =
Vec3(x - v.x, y - v.y, z - v.z)
/** Subtracts a vector from this vector into the given output vector. */
@inline
def sub(v: Vec3, out: Vec3 = this): Vec3 =
out.set(x - v.x, y - v.y, z - v.z)
/** Subtracts a vector from this vector into the given output vector. */
@inline
def sub(x: Float, y: Float, z: Float, out: Vec3): Vec3 =
out.set(this.x - x, this.y - y, this.z - z)
/** Subtracts a vector from this vector into the given output vector. */
@inline
def sub(x: Float, y: Float, z: Float): Vec3 =
sub(x, y, z, this)
/** The dot product of two vectors. */
@inline
def *(v: Vec3): Float =
x*v.x + y*v.y + z*v.z
/** Returns the vector scaled by the given scalar. */
@inline
def *(s: Float): Vec3 =
Vec3(x*s, y*s, z*s)
/** Scales this vector by the given scalar, into the target output vector. */
def scale(s: Float, out: Vec3 = this): Vec3 =
out.set(x*s, y*s, z*s)
/** Returns the vector dividied by the given scalar. */
@inline
def /(s: Float): Vec3 = {
val f = 1/s
Vec3(x*f, y*f, z*f)
}
@inline
def div(s: Float, out: Vec3 = this): Vec3 =
scale(1/s, out)
@inline
def unary_- =
Vec3(-x, -y, -z)
@inline
def negate(out: Vec3 = this) =
out.set(-x, -y, -z)
/** Returns the squared magnitude (length<sup>2</sup>) of this vector. */
@inline
def magSqr = x*x + y*y + z*z
/** Returns the magnitude (length) of this vector. */
@inline
def magnitude = math.sqrt(magSqr).toFloat
/** Returns the normalized vector. */
@inline
def normalized = this / magnitude
@inline
def normalize(out: Vec3 = this) =
out.set(this).div(magnitude)
/** Returns the cross product of two vectors. */
@inline
def ⨯(v: Vec3): Vec3 =
Vec3(y*v.z - z*v.y, z*v.x - x*v.z, x*v.y - y*v.x)
/** Returns the cross product of two vectors. */
@inline
def crossed(v: Vec3): Vec3 = this ⨯ v
/** Cross products this vector and another into the target output vector. */
@inline
def cross(v: Vec3, out: Vec3 = this) =
out.set(y*v.z - z*v.y, z*v.x - x*v.z, x*v.y - y*v.x)
@inline
def max(v: Vec3): Vec3 =
Vec3(v.x max x, v.y max y, v.z max z)
@inline
def min(v: Vec3): Vec3 =
Vec3(v.x min x, v.y min y, v.z min z)
/**
* Return the quaternion that will align this vector with another.
*/
def angleBetween(b: Vec3): Quat = {
val mag = magnitude*b.magnitude
val qx = y*b.z - z*b.y
val qy = z*b.x - x*b.z
val qz = x*b.y - y*b.x
val qw = mag+(this*b)
Quat(qx, qy, qz, qw).normalized
}
/**
* Return a vector reflecting this vector about the given normal.
* @note the normal must be normalized.
*/
def reflected(normal: Vec3): Vec3 =
normal * 2*(this*normal) - this
/**
* Destructively reflect this vector about the given normal.
*/
def reflect(normal: Vec3, out: Vec3 = this): Vec3 = {
val scale = 2*(this*normal)
out.set(normal.x*scale-x, normal.y*scale-y, normal.z*scale-z)
}
/**
* Returns the linear interpolation of this vector with another, with t ranging from 0..1
*/
@inline
def lerp(q: Vec3, t: Float): Vec3 =
Vec3(x + t*(q.x-x),
y + t*(q.y-y),
z + t*(q.z-z))
/**
* Destructively places the linear interpolation of this vector with another into out, with t ranging from 0..1
*/
def lerp(q: Vec3, t: Float, out: Vec3): Vec3 =
out.set(x + t*(q.x-x),
y + t*(q.y-y),
z + t*(q.z-z))
@inline
def copy(x: Float = x, y: Float = y, z: Float = z): Vec3 =
Vec3(x, y, z)
override def toString =
s"Vec3(${x}f,${y}f,${z}f)"
override def equals(o: Any): Boolean = o match {
case v: Vec3 => x == v.x && y == v.y && z == v.z
case _ => false
}
override def hashCode: Int =
x.hashCode()*19 + y.hashCode()*23 + z.hashCode()*29
}
object Vec3 {
def apply(x: Float, y: Float, z: Float) = new Vec3(x, y, z)
def apply() = new Vec3()
} | mseddon/vecmath | shared/src/main/scala/scryetek/vecmath/Vec3.scala | Scala | bsd-3-clause | 4,971 |
/*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.chart.graphics
import java.awt.Graphics2D
import com.netflix.atlas.core.model.TimeSeq
case class TimeSeriesSpan(style: Style, ts: TimeSeq, xaxis: TimeAxis) extends Element {
def draw(g: Graphics2D, x1: Int, y1: Int, x2: Int, y2: Int): Unit = {
style.configure(g)
val step = ts.step
val xscale = xaxis.scale(x1, x2)
var t = xaxis.start
while (t < xaxis.end) {
val px1 = xscale(t - step)
val px2 = xscale(t)
if (!com.netflix.atlas.core.util.Math.isNearlyZero(ts(t))) {
g.fillRect(px1, y1, px2 - px1, y2 - y1)
}
t += step
}
}
}
| rspieldenner/atlas | atlas-chart/src/main/scala/com/netflix/atlas/chart/graphics/TimeSeriesSpan.scala | Scala | apache-2.0 | 1,219 |
package spray.servlet31
import spray.servlet.{Initializer, ConnectorSettings}
import javax.servlet.http.{HttpServlet, HttpServletResponse, HttpServletRequest}
import akka.actor.{ActorRef, ActorSystem}
import akka.event.{Logging, LoggingAdapter}
import akka.spray.RefUtils
import javax.servlet.{AsyncEvent, AsyncListener, AsyncContext}
class Servlet31ConnectorServlet extends HttpServlet {
var system: ActorSystem = _
var serviceActor: ActorRef = _
var timeoutHandler: ActorRef = _
implicit var settings: ConnectorSettings = _
implicit var log: LoggingAdapter = _
/**
* The same as Spray Servlet 3.0
*/
override def init() {
import Initializer._
system = getServletContext.getAttribute(SystemAttrName).asInstanceOf[ActorSystem]
serviceActor = getServletContext.getAttribute(ServiceActorAttrName).asInstanceOf[ActorRef]
settings = getServletContext.getAttribute(SettingsAttrName).asInstanceOf[ConnectorSettings]
timeoutHandler = if (settings.timeoutHandler.isEmpty) serviceActor else system.actorFor(settings.timeoutHandler)
require(system != null, "No ActorSystem configured")
require(serviceActor != null, "No ServiceActor configured")
require(settings != null, "No ConnectorSettings configured")
require(RefUtils.isLocal(serviceActor), "The serviceActor must live in the same JVM as the Servlet30ConnectorServlet")
require(RefUtils.isLocal(timeoutHandler), "The timeoutHandler must live in the same JVM as the Servlet30ConnectorServlet")
log = Logging(system, this.getClass)
log.info("Initialized Servlet API 3.1 <=> Spray Connector")
}
/**
* Service returns quickly, to free HTTP thread pool.
* @param hsRequest servlet request.
* @param hsResponse servlet response.
*/
override def service(hsRequest: HttpServletRequest, hsResponse: HttpServletResponse) {
def requestStringForLog: String = "%s request to '%s'" format(hsRequest.getMethod, ModelConverter.rebuildUri(hsRequest))
val asyncContext = hsRequest.startAsync()
asyncContext.setTimeout(settings.requestTimeout.toMillis)
asyncContext.addListener(new AsyncContextListener(asyncContext, requestStringForLog))
}
/**
* Listen to timeout and error events.
* @param asyncContext the context that this listener belongs to.
* @param requestStringForLog friendly string that represents the request. Used for logging.
*/
private class AsyncContextListener(private val asyncContext: AsyncContext,
private val requestStringForLog: String) extends AsyncListener {
/**
* onStartAsync should be run quick enough for this to be set before a timeout.
*/
var responder: Responder = null
/**
* A timeout happened. Functionality copied from Spray Servlet 3.0
* @param event timeout event data.
*/
def onTimeout(event: AsyncEvent) {
log.warning("Timeout of {}", requestStringForLog)
if (responder != null) responder.callTimeout(timeoutHandler)
asyncContext.complete()
}
/**
* An error happened. We log it.
* @param event error event data.
*/
def onError(event: AsyncEvent) {
event.getThrowable match {
case null ⇒ log.error("Unspecified Error during async processing of {}", requestStringForLog)
case ex ⇒ log.error(ex, "Error during async processing of {}", requestStringForLog)
}
}
/**
* Parse request headers and hooks to read listener. Responder will take charge once read is completed.
* @param event startAsync event data.
*/
def onStartAsync(event: AsyncEvent) {
val hsRequest = asyncContext.getRequest.asInstanceOf[HttpServletRequest]
val futureRequest = ModelConverter.toHttpRequest(hsRequest)
responder = new Responder(system, log, settings, asyncContext, requestStringForLog,
futureRequest, serviceActor)
}
/**
* We do nothing here.
* @param event complete event data.
*/
def onComplete(event: AsyncEvent) {
log.debug("onComplete event of {}", requestStringForLog)
}
}
/**
* The same as Spray Servlet 3.0
*/
override def destroy() {
if (!system.isTerminated) {
system.shutdown()
system.awaitTermination()
}
}
}
| lukiano/spray-servlet31 | src/main/scala/spray/servlet31/Servlet31ConnectorServlet.scala | Scala | mit | 4,272 |
package ca.uwaterloo.gsd.rangeFix
import java.io.OutputStreamWriter
import java.lang.Runtime
import java.io.InputStreamReader
import java.io.BufferedReader
import java.io.BufferedWriter
import java.io.Writer
import java.io.FileWriter
import scala.collection._
trait TraceWriter {
def write(content:String)
def flush()
}
class EnabledTraceWriter(w:Writer) extends TraceWriter {
def write(content:String) = {
val f = new FileWriter(TraceWriter.fileName, true)
f.write(content)
f.close
w.write(content)
}
def flush() = w.flush()
}
class DisabledTraceWriter(w:Writer) extends TraceWriter {
def write(content:String) = { w.write(content) }
def flush() = w.flush()
}
object TraceWriter {
var fileName:String = "temp.txt"
var traceEnabled = false
}
class Z3 {
val fixedParams = Array(CompilationOptions.Z3_PATH, "-in", "-smt2")
val parameters = if (CompilationOptions.THREAD_NUMBER == 1) fixedParams else fixedParams ++ List("PAR_NUM_THREADS=" + CompilationOptions.THREAD_NUMBER)
val p = Runtime.getRuntime().exec(parameters)
val orgWriter = new BufferedWriter(new OutputStreamWriter(p.getOutputStream()))
val writer = if (TraceWriter.traceEnabled) new EnabledTraceWriter(orgWriter) else new DisabledTraceWriter(orgWriter)
val reader = new BufferedReader(new InputStreamReader(p.getInputStream()))
def declareVariables(vars: Iterable[(String, SMTType)]) {
assert(reader.ready() == false, reader.readLine)
for (v <- vars) {
assert(reader.ready() == false, reader.readLine)
writer.write("(declare-const ")
writer.write(v._1 + " "
+ v._2.toString);
writer.write(")\\n")
assert(reader.ready() == false, reader.readLine)
}
// writer.flush()
assert(reader.ready() == false, reader.readLine)
}
// assuming no string
def assertConstraint(constraint: SMTExpression) {
assertConstraint(constraint.toString)
}
def assertConstraint(constraint: String) {
assert(reader.ready() == false, reader.readLine)
// println("### asserting ### " + constraint)
writer.write("(assert " + constraint + ")\\n")
assert(reader.ready() == false, constraint + " => " + reader.readLine())
}
def assertNamedConstraint(constraint:SMTExpression, name:String) {
assertNamedConstraint(constraint.toString, name)
}
def assertNamedConstraint(constraint:String, name:String) {
assert(reader.ready() == false, reader.readLine)
writer.write("(assert (! " + constraint + " :named " + name + "))\\n")
// writer.flush()
assert(reader.ready() == false)
}
def declareTypes(types: Iterable[SMTType]) {
assert(reader.ready() == false, reader.readLine)
val texts = types.map(_.toDeclaration).filter(_ != "")
if (texts.size == 0) return
writer.write("(declare-datatypes () (" + texts.reduceLeft(_ + _) + "))\\n")
// writer.flush()
assert(reader.ready() == false)
}
def declareFunc(func: SMTFuncDefine) {
assert(reader.ready() == false, reader.readLine)
writer.write(func.toDefString + "\\n")
// writer.flush()
assert(reader.ready() == false, reader.readLine)
}
def push() {
assert(reader.ready() == false, reader.readLine)
writer.write("(push)\\n")
// writer.flush()
assert(reader.ready() == false)
}
def pop() {
assert(reader.ready() == false, reader.readLine)
writer.write("(pop)\\n")
// writer.flush()
assert(reader.ready() == false, reader.readLine)
}
def checkSat(): Boolean = {
assert(reader.ready() == false, reader.readLine)
writer.write("(check-sat)\\n")
writer.flush()
val line = reader.readLine();
assert(reader.ready() == false, reader.readLine)
if (line == "sat") true
else {
assert(line == "unsat", line)
false
}
}
def enableUnsatCore() {
assert(reader.ready() == false, reader.readLine)
writer.write("(set-option :produce-unsat-cores true)\\n")
// writer.flush()
assert(reader.ready() == false)
}
def getMinimalUnsatCore(vars: Iterable[String]): Option[Traversable[String]] =
{
val core = getUnsatCore(vars)
if (core.isDefined) {
var toTest = core.get
val newCore = mutable.ListBuffer[String]()
while (toTest.size > 0) {
push()
try {
// assert vars except toTest.head
def assertVar(v:String) { assertNamedConstraint(v, exprNamePrefix + v) }
newCore foreach assertVar
toTest.tail foreach assertVar
// if toTest.head is part of the minimal core
if (checkSat) newCore += toTest.head
// remove toTest.head
toTest = toTest.tail
}
finally {
pop()
}
}
Some(newCore)
}
else core
}
val exprNamePrefix = "__ex__"
def getUnsatCore(vars: Iterable[String]): Option[Traversable[String]] =
{
assert(reader.ready() == false, reader.readLine)
push()
try {
for (v <- vars)
assertNamedConstraint(v, exprNamePrefix + v)
if (checkSat) {
None
}
else {
val result = getUnsatCore()
result.map(_.map(name => {assert(name.size > exprNamePrefix.size, name); name.substring(exprNamePrefix.size)}))
}
}
finally {
pop()
assert(reader.ready() == false, reader.readLine)
}
}
// None means sat
def getUnsatCore(): Option[Traversable[String]] = {
assert(reader.ready() == false, reader.readLine)
// println("### get-core ### " + varstr)
writer.write("(get-unsat-core)\\n")
writer.flush()
val line = reader.readLine()
assert(reader.ready() == false, reader.readLine)
val pattern = "(error \\"line \\\\d+ column \\\\d+: unsat core is not available\\")".r
// println("### result ### " + line)
if (pattern.findFirstIn(line) == line) return None
assert(line.length() >= 2, line)
assert(!line.startsWith("(error "), line)
val trimmedLine = line.substring(1, line.length - 1)
if (trimmedLine.size == 0) Some(List()) else Some(trimmedLine.split(" "))
}
//wj begin
def getValValueMap(): (String,String) = {
assert(reader.ready() == false, reader.readLine)
writer.write("(get-model)\\n")
writer.flush()
var strline:StringBuffer=new StringBuffer(80)
var flag=true
var n = false
var r = false
var braceNum=0
var group=0
while (flag) {
val character = reader.read()
if (!n && character.toChar == '\\n')
n = true
if (!r && character.toChar == '\\r')
r = true
if (character.toChar == '(')
braceNum = braceNum + 1
else if (character.toChar == ')') {
braceNum = braceNum - 1
if (braceNum == 0) { //括号匹配完成,读取输入结束
//两个read将换行符和回车符读进来
if (n)
reader.read()
if (r)
reader.read()
flag = false
} else if (braceNum == 1)
group = group + 1
}
if (character != 13 && character != 10) { //非换行符非回车符
strline.append(character.toChar)
}
}
val line = strline.toString()
assert(reader.ready() == false, reader.readLine)
(line.substring(9, line.length - 1), group.toString)
}
//wj end
def exit() {
writer.write("(exit)\\n")
try{
writer.flush()
}
catch{
case _ => //may have been closed
}
p.waitFor()
}
}
| matachi/rangeFix | src/main/scala/fixGeneration/Z3.scala | Scala | mit | 7,603 |
package tryp
import macros._
import annotation.StaticAnnotation
class export
extends StaticAnnotation
{
def macroTransform(annottees: Any*) = macro ExportAnn.process
}
class exportNames
extends StaticAnnotation
{
def macroTransform(annottees: Any*) = macro ExportNamesAnn.process
}
class exportTypes
extends StaticAnnotation
{
def macroTransform(annottees: Any*) = macro ExportTypesAnn.process
}
class exportVals
extends StaticAnnotation
{
def macroTransform(annottees: Any*) = macro ExportValsAnn.process
}
class integrate
extends StaticAnnotation
{
def macroTransform(annottees: Any*) = macro IntegrateAnn.process
}
class anno
extends StaticAnnotation
{
def macroTransform(annottees: Any*) = macro AnnoM.process
}
class annowrap
extends StaticAnnotation
{
def macroTransform(annottees: Any*) = macro AnnoWrap.process
}
| tek/pulsar | macros/src/annotations.scala | Scala | mit | 844 |
package models
import models.daos._
import play.api.db.slick.DatabaseConfigProvider
import javax.inject.Inject
import scala.concurrent.Future
class AdminUsers @Inject()(protected val dbConfigProvider: DatabaseConfigProvider) extends DAOSlick {
import driver.api._
private val Users = slickUsers
def all() = db.run(Users.result)
} | hectorgool/fara | app/models/AdminUsers.scala | Scala | apache-2.0 | 343 |
/*
* This file is part of the ToolXiT project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package toolxit.bibtex
import scala.util.parsing.combinator.RegexParsers
import scala.annotation.tailrec
/**
* @author Lucas Satabin
*
*/
object StringUtils {
implicit def char2testable(c: Char) = new {
def isAlphaNumeric = c.toString.matches("[a-zA-Z0-9]")
def isBibTeXLower =
if (c.isDigit)
true
else
c.isLower
}
object StringParser extends StringParser
class StringParser extends RegexParsers {
override def skipWhitespace = false
lazy val string: Parser[List[Word]] =
repsep(word | "," ^^^ SimpleWord(List(CharacterLetter(','))), "\\\\s+".r)
lazy val word: Parser[Word] = composedword | simpleword
lazy val simpleword: Parser[SimpleWord] = rep1(pseudoLetter) ^^ SimpleWord
lazy val composedword: Parser[ComposedWord] =
simpleword ~ sep ~ word ^^ {
case first ~ sep ~ second => ComposedWord(first, second, sep)
}
lazy val pseudoLetter: Parser[PseudoLetter] = special | block | character
lazy val character: Parser[CharacterLetter] =
"[^-~\\\\{}\\\\s,]".r ^^ (s => CharacterLetter(s.charAt(0)))
lazy val sep: Parser[CharacterLetter] =
"[-~]".r ^^ (s => CharacterLetter(s.charAt(0)))
lazy val block: Parser[BlockLetter] =
"{" ~>
rep(block | character
| "\\\\s".r ^^ (s => CharacterLetter(s.charAt(0)))) <~ "}" ^^ BlockLetter
lazy val special: Parser[SpecialLetter] =
"{\\\\" ~> ("'|\\"|´|`|\\\\^|~|[^\\\\s{}'\\"´`^~]+".r <~ "\\\\s*".r) ~
opt(block ^^ (s => (true, s.parts.mkString))
| ("\\\\s*[^{}\\\\s]+\\\\s*".r ^^ (s => (false, s.trim)))) <~ "}" ^^ {
case spec ~ Some((braces, char)) => SpecialLetter(spec, Some(char), braces)
case spec ~ None => SpecialLetter(spec, None, false)
}
}
/* returns the first non brace character at level 0 if any */
def firstCharacter(str: Word): Option[Char] = {
@tailrec
def findFirst(letters: List[PseudoLetter]): Option[Char] = letters match {
case (_: BlockLetter) :: tail =>
findFirst(tail)
case SpecialLetter(spec, _, _) :: _ if spec.contains((c: Char) => c.isLetter) =>
spec.find(_.isLetter)
case SpecialLetter(_, Some(char), _) :: _ =>
char.find(_.isAlphaNumeric)
case CharacterLetter(c) :: _ if c.isLetter =>
Some(c)
case _ :: tail =>
findFirst(tail)
case Nil => None
}
findFirst(str.letters)
}
def isFirstCharacterLower(str: Word) =
firstCharacter(str).map(_.isBibTeXLower).getOrElse(false)
}
sealed trait PseudoLetter {
val whitespace_? : Boolean
}
final case class CharacterLetter(char: Char) extends PseudoLetter {
override def toString = char.toString
val whitespace_? = char.toString.matches("\\\\s+")
}
final case class BlockLetter(parts: List[PseudoLetter]) extends PseudoLetter {
override def toString = parts.mkString("{", "", "}")
val whitespace_? = parts.forall(_.whitespace_?)
}
final case class SpecialLetter(command: String, arg: Option[String], withBraces: Boolean) extends PseudoLetter {
override def toString = {
val argument = arg match {
case Some(a) if withBraces => "{" + a + "}"
case Some(a) => a
case None => ""
}
"{\\\\" + command + argument + "}"
}
/** Returns the UTF8 representation of this special letter if known */
def toUTF8: Option[CharacterLetter] = SpecialCharacters(this).map(CharacterLetter)
val whitespace_? = false
}
trait Word {
val letters: List[PseudoLetter]
val length: Int
}
final case class ComposedWord(first: Word, second: Word, sep: CharacterLetter) extends Word {
val letters = first.letters ++ List(sep) ++ second.letters
val length = first.length + second.length + 1
override def toString = "" + first + sep + second
}
final case class SimpleWord(letters: List[PseudoLetter]) extends Word {
def this(str: String) = this(str.toCharArray.map(CharacterLetter).toList)
val length = letters.foldLeft(0) { (result, current) =>
def internalCount(letter: PseudoLetter, depth: Int): Int = letter match {
case _: CharacterLetter => 1
case _: SpecialLetter if depth == 0 =>
// only special characters at brace level 0 count
1
case BlockLetter(parts) =>
parts.map(internalCount(_, depth + 1)).sum
case _ => 0
}
result + internalCount(current, 0)
}
override def toString = letters.mkString
}
final case class Sentence(words: List[Word]) {
override def toString = words.mkString(" ")
} | gnieh/toolxit-bibtex | src/main/scala/toolxit/bibtex/StringUtils.scala | Scala | apache-2.0 | 5,091 |
package hoconspring
import java.{lang => jl, util => ju}
import com.typesafe.config._
import org.springframework.beans.factory.annotation.Qualifier
import org.springframework.beans.factory.config.ConstructorArgumentValues.ValueHolder
import org.springframework.beans.factory.config.{BeanDefinitionHolder, ConstructorArgumentValues, RuntimeBeanNameReference, RuntimeBeanReference}
import org.springframework.beans.factory.support._
import org.springframework.beans.{MutablePropertyValues, PropertyValue}
import org.springframework.core.io.Resource
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
class HoconBeanDefinitionReader(registry: BeanDefinitionRegistry)
extends AbstractBeanDefinitionReader(registry) {
import com.typesafe.config.ConfigValueType._
import hoconspring.AttrNames._
private implicit class ConfigValueExtensions(value: ConfigValue) {
def as[T: HoconType] =
implicitly[HoconType[T]].get(value)
}
private val autowireMapping = AutowireMapping.withDefault {
v => throw new IllegalArgumentException(s"Invalid value $v for $AutowireAttr attribute")
}
private val dependencyCheckMapping = DependencyCheckMapping.withDefault {
v => throw new IllegalArgumentException(s"Invalid value $v for $DependencyCheckAttr attribute")
}
private def setup[T](t: T)(setupFunc: T => Any) = {
setupFunc(t)
t
}
private def iterate(obj: ConfigObject)
(attrFun: (String, ConfigValue) => Any)
(propFun: (String, ConfigValue) => Any) =
obj.foreach {
case (key, _) if key.startsWith("_") =>
case (key, value) if key.startsWith("%") => attrFun(key, value)
case (key, value) => propFun(key, value)
}
private def validateObj(required: Set[String] = Set.empty,
requiredAny: Set[String] = Set.empty,
allowed: Set[String] = Set.empty,
props: Boolean = false)(obj: ConfigObject): Unit = {
require(required.forall(obj.containsKey),
s"Attributes ${required.mkString(", ")} must be present in object at ${obj.origin.description}")
require(requiredAny.isEmpty || requiredAny.exists(obj.containsKey),
s"At least one of ${requiredAny.mkString(", ")} must be present in object at ${obj.origin.description}")
val allAllowed = required ++ requiredAny ++ allowed
iterate(obj) { (key, value) =>
if (!allAllowed.contains(key))
badAttr(key, value)
} { (key, value) =>
if (!props)
badProp(key, value)
}
}
private def getProps(obj: ConfigObject) =
obj.filterKeys(k => !k.startsWith("%") && !k.startsWith("_"))
private def badAttr(key: String, value: ConfigValue) =
throw new IllegalArgumentException(s"Unexpected attribute $key at ${value.origin.description}")
private def badProp(key: String, value: ConfigValue) =
throw new IllegalArgumentException(s"Unexpected property $key at ${value.origin.description}")
private object BeanDefinition {
val BeanOnlyAttrs = BeanAttrs - MetaAttr
def unapply(obj: ConfigObject) =
if (BeanOnlyAttrs.exists(obj.as[ConfigObject].keySet.contains)) Some(obj) else None
}
private class ObjectWithAttributePresentExtractor(elementAttr: String) {
def unapply(obj: ConfigObject) =
if (obj.containsKey(elementAttr)) Some(obj) else None
}
private object ListDefinition extends ObjectWithAttributePresentExtractor(ListAttr)
private object ArrayDefinition extends ObjectWithAttributePresentExtractor(ArrayAttr)
private object SetDefinition extends ObjectWithAttributePresentExtractor(SetAttr)
private object PropertiesDefinition extends ObjectWithAttributePresentExtractor(PropsAttr)
private object ValueDefinition extends ObjectWithAttributePresentExtractor(ValueAttr)
private object BeanReference extends ObjectWithAttributePresentExtractor(RefAttr)
private object BeanNameReference extends ObjectWithAttributePresentExtractor(IdrefAttr)
private object RawConfig extends ObjectWithAttributePresentExtractor(ConfigAttr)
private def read(value: ConfigValue): Any = value match {
case BeanDefinition(obj) =>
val bd = readBean(obj)
obj.get(NameAttr).as[Option[String]] match {
case Some(name) => new BeanDefinitionHolder(bd, name)
case None => bd
}
case BeanReference(obj) => readRef(obj)
case BeanNameReference(obj) => readIdref(obj)
case ArrayDefinition(obj) => readArray(obj)
case ListDefinition(obj) => readList(obj)
case SetDefinition(obj) => readSet(obj)
case PropertiesDefinition(obj) => readProperties(obj)
case RawConfig(obj) => readRawConfig(obj)
case obj: ConfigObject => readMap(obj)
case list: ConfigList => readRawList(list)
case _ => value.unwrapped
}
private def readRef(obj: ConfigObject) = {
validateObj(required = Set(RefAttr), allowed = Set(ParentAttr))(obj)
new RuntimeBeanReference(obj.get(RefAttr).as[String], obj.get(ParentAttr).as[Option[Boolean]].getOrElse(false))
}
private def readIdref(obj: ConfigObject) = {
validateObj(required = Set(IdrefAttr))(obj)
new RuntimeBeanNameReference(obj.get(IdrefAttr).as[String])
}
private def readList(obj: ConfigObject) = {
validateObj(required = Set(ListAttr), allowed = Set(MergeAttr, ValueTypeAttr))(obj)
setup(new ManagedList[Any]) { list =>
list.addAll(obj.get(ListAttr).as[ConfigList].map(read))
list.setMergeEnabled(obj.get(MergeAttr).as[Option[Boolean]].getOrElse(false))
list.setElementTypeName(obj.get(ValueTypeAttr).as[Option[String]].orNull)
}
}
private def readArray(obj: ConfigObject) = {
validateObj(required = Set(ArrayAttr), allowed = Set(MergeAttr, ValueTypeAttr))(obj)
val elements = obj.get(ArrayAttr).as[ConfigList]
val valueType = obj.get(ValueTypeAttr).as[Option[String]].getOrElse("")
val result = new ManagedArray(valueType, elements.size)
result.addAll(elements.map(v => read(v).asInstanceOf[AnyRef]))
result.setMergeEnabled(obj.get(MergeAttr).as[Option[Boolean]].getOrElse(false))
result
}
private def readSet(obj: ConfigObject) = {
validateObj(required = Set(SetAttr), allowed = Set(MergeAttr, ValueTypeAttr))(obj)
setup(new ManagedSet[Any]) { set =>
set.addAll(obj.get(SetAttr).as[ConfigList].map(read))
set.setMergeEnabled(obj.get(MergeAttr).as[Option[Boolean]].getOrElse(false))
set.setElementTypeName(obj.get(ValueTypeAttr).as[Option[String]].orNull)
}
}
private def readRawList(list: ConfigList) = {
setup(new ManagedList[Any]) { ml =>
ml.addAll(list.map(read))
}
}
private def readMap(obj: ConfigObject) = {
validateObj(allowed = Set(MergeAttr, KeyTypeAttr, ValueTypeAttr, EntriesAttr), props = true)(obj)
setup(new ManagedMap[Any, Any]) { mm =>
mm.setMergeEnabled(obj.get(MergeAttr).as[Option[Boolean]].getOrElse(false))
mm.setKeyTypeName(obj.get(KeyTypeAttr).as[Option[String]].orNull)
mm.setValueTypeName(obj.get(ValueTypeAttr).as[Option[String]].orNull)
obj.get(EntriesAttr).as[Option[ConfigList]].getOrElse(ju.Collections.emptyList).foreach {
case obj: ConfigObject =>
validateObj(required = Set(KeyAttr, ValueAttr))(obj)
mm.put(read(obj.get(KeyAttr)), read(obj.get(ValueAttr)))
case _ =>
throw new IllegalArgumentException(s"Required an object at ${obj.origin.description}")
}
getProps(obj).foreach {
case (key, value) => mm.put(key, read(value))
}
}
}
private def readProperties(obj: ConfigObject) = {
validateObj(required = Set(PropsAttr), allowed = Set(MergeAttr))(obj)
setup(new ManagedProperties) { mp =>
mp.setMergeEnabled(obj.get(MergeAttr).as[Option[Boolean]].getOrElse(false))
obj.get(PropsAttr).as[Option[Config]].getOrElse(ConfigFactory.empty).entrySet.foreach {
case entry if Set(STRING, NUMBER, BOOLEAN).contains(entry.getValue.valueType) =>
mp.setProperty(entry.getKey, entry.getValue.unwrapped.toString)
case entry => throw new IllegalArgumentException(s"Bad prop definition at ${entry.getValue.origin.description}")
}
}
}
private def readRawConfig(obj: ConfigObject) = {
validateObj(required = Set(ConfigAttr))(obj)
obj.get(ConfigAttr).as[Config]
}
private def readBean(obj: ConfigObject) = {
val bd = new GenericBeanDefinition
val cargs = new ConstructorArgumentValues
val propertyValues = new MutablePropertyValues
bd.setConstructorArgumentValues(cargs)
bd.setPropertyValues(propertyValues)
bd.setResourceDescription(obj.origin.description)
def addConstructorArg(idxAndValue: (Option[Int], ValueHolder)) = idxAndValue match {
case (Some(idx), valueHolder) => cargs.addIndexedArgumentValue(idx, valueHolder)
case (None, valueHolder) => cargs.addGenericArgumentValue(valueHolder)
}
obj.get(AbstractAttr).as[Option[Boolean]].foreach(bd.setAbstract)
obj.get(AutowireCandidateAttr).as[Option[Boolean]].foreach(bd.setAutowireCandidate)
obj.get(AutowireAttr).as[Option[String]].map(autowireMapping).foreach(bd.setAutowireMode)
obj.get(ClassAttr).as[Option[String]].foreach(bd.setBeanClassName)
readConstructorArgs(obj.get(ConstructorArgsAttr)).foreach(addConstructorArg)
obj.get(DependencyCheckAttr).as[Option[String]].map(dependencyCheckMapping).foreach(bd.setDependencyCheck)
obj.get(DescriptionAttr).as[Option[String]].foreach(bd.setDescription)
obj.get(DestroyMethodAttr).as[Option[String]].foreach(bd.setDestroyMethodName)
obj.get(DependsOnAttr).as[Option[ju.List[String]]].map(_.asScala.toArray).foreach(bd.setDependsOn)
obj.get(FactoryBeanAttr).as[Option[String]].foreach(bd.setFactoryBeanName)
obj.get(FactoryMethodAttr).as[Option[String]].foreach(bd.setFactoryMethodName)
obj.get(InitMethodAttr).as[Option[String]].foreach(bd.setInitMethodName)
bd.setLazyInit(obj.get(LazyInitAttr).as[Option[Boolean]].getOrElse(false))
obj.get(LookupMethodsAttr).as[Option[ConfigObject]].foreach { obj =>
validateObj(props = true)(obj)
getProps(obj).foreach {
case (key, value) => bd.getMethodOverrides.addOverride(new LookupOverride(key, value.as[String]))
}
}
obj.get(MetaAttr).as[Option[ConfigObject]].getOrElse(ConfigFactory.empty.root).foreach {
case (mkey, mvalue) => bd.setAttribute(mkey, mvalue.as[String])
}
obj.get(ParentAttr).as[Option[String]].foreach(bd.setParentName)
obj.get(PrimaryAttr).as[Option[Boolean]].foreach(bd.setPrimary)
obj.get(QualifiersAttr).as[Option[ju.List[ConfigObject]]].getOrElse(ju.Collections.emptyList).foreach { obj =>
bd.addQualifier(readQualifier(obj))
}
obj.get(ReplacedMethodsAttr).as[Option[ju.List[ConfigObject]]].getOrElse(ju.Collections.emptyList).foreach { obj =>
bd.getMethodOverrides.addOverride(readReplacedMethod(obj))
}
obj.get(ScopeAttr).as[Option[String]].foreach(bd.setScope)
val construct = obj.get(ConstructAttr).as[Option[Boolean]].getOrElse(false)
getProps(obj).foreach {
case (key, value) =>
if (construct) {
addConstructorArg(readConstructorArg(value, Some(key)))
} else {
propertyValues.addPropertyValue(readPropertyValue(key, value))
}
}
bd
}
private def readQualifier(obj: ConfigObject) = {
validateObj(allowed = Set(TypeAttr, ValueAttr), props = true)(obj)
val acq = new AutowireCandidateQualifier(obj.get(TypeAttr).as[Option[String]].getOrElse(classOf[Qualifier].getName))
obj.get(ValueAttr).as[Option[String]].foreach(acq.setAttribute(AutowireCandidateQualifier.VALUE_KEY, _))
getProps(obj).foreach {
case (key, value) => acq.setAttribute(key, value.as[String])
}
acq
}
private def readReplacedMethod(obj: ConfigObject) = {
validateObj(required = Set(NameAttr, ReplacerAttr), allowed = Set(ArgTypesAttr))(obj)
val replaceOverride = new ReplaceOverride(obj.get(NameAttr).as[String], obj.get(ReplacerAttr).as[String])
obj.get(ArgTypesAttr).as[Option[ju.List[String]]].getOrElse(ju.Collections.emptyList).foreach(replaceOverride.addTypeIdentifier)
replaceOverride
}
private def readConstructorArgs(value: ConfigValue) = {
value.as[Option[Either[ConfigList, ConfigObject]]] match {
case Some(Left(list)) =>
list.iterator.asScala.map(configValue => readConstructorArg(configValue))
case Some(Right(obj)) =>
validateObj(props = true)(obj)
getProps(obj).iterator.map { case (name, configValue) =>
val (idxOpt, holder) = readConstructorArg(configValue)
holder.setName(name)
(idxOpt, holder)
}
case None =>
Iterator.empty
}
}
private def readConstructorArg(value: ConfigValue, forcedName: Option[String] = None) = value match {
case ValueDefinition(obj) =>
validateObj(required = Set(ValueAttr), allowed = Set(IndexAttr, TypeAttr, NameAttr))(obj)
val vh = new ValueHolder(read(obj.get(ValueAttr)))
obj.get(TypeAttr).as[Option[String]].foreach(vh.setType)
(forcedName orElse obj.get(NameAttr).as[Option[String]]).foreach(vh.setName)
val indexOpt = obj.get(IndexAttr).as[Option[Int]]
(indexOpt, vh)
case _ =>
val vh = new ValueHolder(read(value))
forcedName.foreach(vh.setName)
(None, vh)
}
private def readPropertyValue(name: String, value: ConfigValue) = value match {
case ValueDefinition(obj) =>
validateObj(required = Set(ValueAttr), allowed = Set(MetaAttr))(obj)
val pv = new PropertyValue(name, read(obj.get(ValueAttr)))
obj.get(MetaAttr).as[Option[ConfigObject]].getOrElse(ConfigFactory.empty.root).foreach {
case (mkey, mvalue) => pv.setAttribute(mkey, mvalue.as[String])
}
pv
case _ =>
new PropertyValue(name, read(value))
}
private def readBeans(obj: ConfigObject) = {
validateObj(props = true)(obj)
val beanDefs = getProps(obj).iterator.flatMap {
case (key, value) =>
try {
value.as[Option[ConfigObject]].map(obj => (key, readBean(obj)))
} catch {
case e: Exception => throw new RuntimeException(
s"Could not read definition of bean $key at ${value.origin.description}", e)
}
}.toVector
beanDefs.foreach((registry.registerBeanDefinition _).tupled)
beanDefs.size
}
private def readAliases(obj: ConfigObject): Unit = {
validateObj(props = true)(obj)
getProps(obj).foreach {
case (key, value) => value.as[Option[String]].foreach(registry.registerAlias(_, key))
}
}
def loadBeanDefinitions(config: Config): Int = {
val beans = if (config.hasPath("beans")) config.getObject("beans") else ConfigFactory.empty.root
val aliases = if (config.hasPath("aliases")) config.getObject("aliases") else ConfigFactory.empty.root
val result = readBeans(beans)
readAliases(aliases)
result
}
def loadBeanDefinitions(resource: Resource) =
loadBeanDefinitions(ConfigFactory.parseURL(resource.getURL).resolve)
}
| ghik/hocon-spring | src/main/scala/hoconspring/HoconBeanDefinitionReader.scala | Scala | apache-2.0 | 15,143 |
package com.avsystem.scex.parsing
import com.avsystem.commons.misc.{AbstractValueEnum, AbstractValueEnumCompanion, EnumCtx}
import scala.annotation.tailrec
import scala.collection.immutable.SortedMap
final class Binding(implicit enumCtx: EnumCtx) extends AbstractValueEnum
object Binding extends AbstractValueEnumCompanion[Binding] {
final val Left, Right: Value = new Binding
}
/**
* Created: 24-10-2013
* Author: ghik
*/
case class Modification(offset: Int, amount: Int, binding: Binding)
case class Bound(str: String, binding: Binding) {
def +(pstr: PString): PString =
PString(str, pstr.beg, pstr.beg, Vector(Modification(pstr.beg, str.length, binding))) + pstr
}
case class PString(result: String, beg: Int, end: Int, mods: Vector[Modification]) {
lazy val positionMapping: PositionMapping = {
val normalizedMods = if (beg > 0) Modification(0, -beg, Binding.Right) :: mods.toList else mods.toList
val (shiftMapping, reverseShiftMapping) = PString.computeMapping(normalizedMods, Nil, Nil)
new ShiftInfoPositionMapping(shiftMapping, reverseShiftMapping)
}
def +(other: PString): PString = other match {
case PString("", _, _, Vector()) =>
this
case PString(otherResult, otherBeg, otherEnd, otherMods) =>
require(end <= otherBeg)
val newMods =
if (end == otherBeg)
mods ++ otherMods
else
(mods :+ Modification(end, end - otherBeg, Binding.Right)) ++ otherMods
PString(result + otherResult, beg, otherEnd, newMods)
}
def +(other: Bound): PString =
PString(result + other.str, beg, end, mods :+ Modification(end, other.str.length, other.binding))
def +(otherOpt: Option[PString]): PString =
otherOpt match {
case Some(pstr) => this + pstr
case None => this
}
def replaceWith(replacement: String, binding: Binding): PString =
copy(result = replacement, mods =
Modification(beg, -result.length, binding) +: Modification(beg, replacement.length, binding) +: mods)
def withResult(result: String): PString =
copy(result = result)
}
object PString {
@tailrec private[scex] def computeMapping(
mods: List[Modification],
acc: List[(Int, ShiftInfo)],
racc: List[(Int, ShiftInfo)]
): (SortedMap[Int, ShiftInfo], SortedMap[Int, ShiftInfo]) =
(mods, acc, racc) match {
case (Modification(offset, amount, binding) :: tail, (prevOffset, prevInfo) :: accTail, (rprevOffset, rprevInfo) :: raccTail) =>
val newAcc = if (offset == prevOffset)
(prevOffset, prevInfo.update(amount, binding)) :: accTail
else
(offset, ShiftInfo(prevInfo.totalShift, amount, binding)) :: acc
val roffset = offset + (if (offset == prevOffset) prevInfo.totalPrevShift else prevInfo.totalShift)
val newRacc = if (roffset == rprevOffset)
(rprevOffset, rprevInfo.update(-amount, binding)) :: raccTail
else
(roffset, ShiftInfo(rprevInfo.totalShift, -amount, binding)) :: racc
computeMapping(tail, newAcc, newRacc)
case (Modification(offset, amount, binding) :: tail, Nil, Nil) =>
computeMapping(tail, List((offset, ShiftInfo(0, amount, binding))), List((offset, ShiftInfo(0, -amount, binding))))
case (Nil, _, _) =>
(SortedMap(acc: _*), SortedMap(racc: _*))
case tuple =>
throw new IllegalArgumentException(tuple.toString())
}
} | AVSystem/scex | scex-core/src/main/scala/com/avsystem/scex/parsing/PString.scala | Scala | mit | 3,411 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.runtime.stream.sql
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.table.planner.runtime.utils.StreamingWithStateTestBase.StateBackendMode
import org.apache.flink.table.planner.runtime.utils.TimeTestUtil.EventTimeProcessOperator
import org.apache.flink.table.planner.runtime.utils.UserDefinedFunctionTestUtils.{CountNullNonNull, CountPairs, LargerThanCount}
import org.apache.flink.table.planner.runtime.utils.{StreamingWithStateTestBase, TestData, TestingAppendSink}
import org.apache.flink.types.Row
import org.junit.Assert._
import org.junit._
import org.junit.runner.RunWith
import org.junit.runners.Parameterized
import scala.collection.mutable
@RunWith(classOf[Parameterized])
class OverWindowITCase(mode: StateBackendMode) extends StreamingWithStateTestBase(mode) {
val data = List(
(1L, 1, "Hello"),
(2L, 2, "Hello"),
(3L, 3, "Hello"),
(4L, 4, "Hello"),
(5L, 5, "Hello"),
(6L, 6, "Hello"),
(7L, 7, "Hello World"),
(8L, 8, "Hello World"),
(20L, 20, "Hello World"))
@Test
def testProcTimeBoundedPartitionedRowsOver(): Unit = {
val t = failingDataSource(TestData.tupleData5)
.toTable(tEnv, 'a, 'b, 'c, 'd, 'e, 'proctime.proctime)
tEnv.registerTable("MyTable", t)
val sqlQuery = "SELECT a, " +
" SUM(c) OVER (" +
" PARTITION BY a ORDER BY proctime ROWS BETWEEN 4 PRECEDING AND CURRENT ROW), " +
" MIN(c) OVER (" +
" PARTITION BY a ORDER BY proctime ROWS BETWEEN 4 PRECEDING AND CURRENT ROW) " +
"FROM MyTable"
val sink = new TestingAppendSink
tEnv.sqlQuery(sqlQuery).toAppendStream[Row].addSink(sink)
env.execute()
val expected = List(
"1,0,0",
"2,1,1",
"2,3,1",
"3,3,3",
"3,7,3",
"3,12,3",
"4,6,6",
"4,13,6",
"4,21,6",
"4,30,6",
"5,10,10",
"5,21,10",
"5,33,10",
"5,46,10",
"5,60,10")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testProcTimeBoundedPartitionedRowsOverWithBulitinProctime(): Unit = {
val t = failingDataSource(TestData.tupleData5).toTable(tEnv, 'a, 'b, 'c, 'd, 'e)
tEnv.registerTable("MyTable", t)
val sqlQuery = "SELECT a, " +
" SUM(c) OVER (" +
" PARTITION BY a ORDER BY proctime() ROWS BETWEEN 4 PRECEDING AND CURRENT ROW), " +
" MIN(c) OVER (" +
" PARTITION BY a ORDER BY proctime() ROWS BETWEEN 4 PRECEDING AND CURRENT ROW) " +
"FROM MyTable"
val sink = new TestingAppendSink
tEnv.sqlQuery(sqlQuery).toAppendStream[Row].addSink(sink)
env.execute()
val expected = List(
"1,0,0",
"2,1,1",
"2,3,1",
"3,3,3",
"3,7,3",
"3,12,3",
"4,6,6",
"4,13,6",
"4,21,6",
"4,30,6",
"5,10,10",
"5,21,10",
"5,33,10",
"5,46,10",
"5,60,10")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testProcTimeBoundedPartitionedRowsOverWithBuiltinProctime(): Unit = {
val t = failingDataSource(TestData.tupleData5).toTable(tEnv, 'a, 'b, 'c, 'd, 'e)
tEnv.registerTable("MyTable", t)
val sqlQuery = "SELECT a, " +
" SUM(c) OVER (" +
" PARTITION BY a ORDER BY proctime() ROWS BETWEEN 4 PRECEDING AND CURRENT ROW), " +
" MIN(c) OVER (" +
" PARTITION BY a ORDER BY proctime() ROWS BETWEEN 4 PRECEDING AND CURRENT ROW) " +
"FROM MyTable"
val sink = new TestingAppendSink
tEnv.sqlQuery(sqlQuery).toAppendStream[Row].addSink(sink)
env.execute()
val expected = List(
"1,0,0",
"2,1,1",
"2,3,1",
"3,3,3",
"3,7,3",
"3,12,3",
"4,6,6",
"4,13,6",
"4,21,6",
"4,30,6",
"5,10,10",
"5,21,10",
"5,33,10",
"5,46,10",
"5,60,10")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testProcTimeBoundedNonPartitionedRowsOver(): Unit = {
val t = failingDataSource(TestData.tupleData5)
.toTable(tEnv, 'a, 'b, 'c, 'd, 'e, 'proctime.proctime)
tEnv.registerTable("MyTable", t)
val sqlQuery = "SELECT a, " +
" first_value(d) OVER (" +
" ORDER BY proctime ROWS BETWEEN 10 PRECEDING AND CURRENT ROW), " +
" last_value(d) OVER (" +
" ORDER BY proctime ROWS BETWEEN 10 PRECEDING AND CURRENT ROW), " +
" SUM(c) OVER (" +
" ORDER BY proctime ROWS BETWEEN 10 PRECEDING AND CURRENT ROW), " +
" MIN(c) OVER (" +
" ORDER BY proctime ROWS BETWEEN 10 PRECEDING AND CURRENT ROW) " +
"FROM MyTable"
val sink = new TestingAppendSink
tEnv.sqlQuery(sqlQuery).toAppendStream[Row].addSink(sink)
env.execute()
val expected = List(
"1,Hallo,Hallo,0,0",
"2,Hallo,Hallo Welt,1,0",
"2,Hallo,Hallo Welt wie,3,0",
"3,Hallo,Hallo Welt wie gehts?,6,0",
"3,Hallo,ABC,10,0",
"3,Hallo,BCD,15,0",
"4,Hallo,CDE,21,0",
"4,Hallo,DEF,28,0",
"4,Hallo,EFG,36,0",
"4,Hallo,FGH,45,0",
"5,Hallo,GHI,55,0",
"5,Hallo Welt,HIJ,66,1",
"5,Hallo Welt wie,IJK,77,2",
"5,Hallo Welt wie gehts?,JKL,88,3",
"5,ABC,KLM,99,4")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testProcTimeUnboundedPartitionedRangeOver(): Unit = {
val t1 = failingDataSource(data).toTable(tEnv, 'a, 'b, 'c, 'proctime.proctime)
tEnv.registerTable("T1", t1)
val sqlQuery = "SELECT " +
"c, " +
"first_value(b) OVER (PARTITION BY c ORDER BY proctime RANGE UNBOUNDED preceding)," +
"last_value(b) OVER (PARTITION BY c ORDER BY proctime RANGE UNBOUNDED preceding)," +
"count(a) OVER (PARTITION BY c ORDER BY proctime RANGE UNBOUNDED preceding), " +
"sum(a) OVER (PARTITION BY c ORDER BY proctime RANGE UNBOUNDED preceding) " +
"from T1"
val sink = new TestingAppendSink
tEnv.sqlQuery(sqlQuery).toAppendStream[Row].addSink(sink)
env.execute()
val expected = List(
"Hello World,7,7,1,7", "Hello World,7,8,2,15", "Hello World,7,20,3,35",
"Hello,1,1,1,1", "Hello,1,2,2,3", "Hello,1,3,3,6", "Hello,1,4,4,10", "Hello,1,5,5,15",
"Hello,1,6,6,21")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testProcTimeUnboundedPartitionedRowsOver(): Unit = {
val t1 = failingDataSource(data).toTable(tEnv, 'a, 'b, 'c, 'proctime.proctime)
tEnv.registerTable("T1", t1)
val sql =
"""
|SELECT c, sum1, maxnull
|FROM (
| SELECT c,
| max(cast(null as varchar)) OVER
| (PARTITION BY c ORDER BY proctime ROWS BETWEEN UNBOUNDED preceding AND CURRENT ROW)
| as maxnull,
| sum(1) OVER
| (PARTITION BY c ORDER BY proctime ROWS BETWEEN UNBOUNDED preceding AND CURRENT ROW)
| as sum1
| FROM T1
|)
""".stripMargin
val sink = new TestingAppendSink
tEnv.sqlQuery(sql).toAppendStream[Row].addSink(sink)
env.execute()
val expected = List(
"Hello World,1,null", "Hello World,2,null", "Hello World,3,null",
"Hello,1,null", "Hello,2,null", "Hello,3,null", "Hello,4,null",
"Hello,5,null", "Hello,6,null")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testProcTimeUnboundedNonPartitionedRangeOver(): Unit = {
val t1 = failingDataSource(data).toTable(tEnv, 'a, 'b, 'c, 'proctime.proctime)
tEnv.registerTable("T1", t1)
val sqlQuery = "SELECT " +
"c, " +
"count(a) OVER (ORDER BY proctime RANGE UNBOUNDED preceding), " +
"sum(a) OVER (ORDER BY proctime RANGE UNBOUNDED preceding) " +
"from T1"
val sink = new TestingAppendSink
tEnv.sqlQuery(sqlQuery).toAppendStream[Row].addSink(sink)
env.execute()
val expected = List(
"Hello World,7,28", "Hello World,8,36", "Hello World,9,56",
"Hello,1,1", "Hello,2,3", "Hello,3,6", "Hello,4,10", "Hello,5,15", "Hello,6,21")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testProcTimeUnboundedNonPartitionedRowsOver(): Unit = {
val t1 = failingDataSource(data).toTable(tEnv, 'a, 'b, 'c, 'proctime.proctime)
tEnv.registerTable("T1", t1)
val sqlQuery = "SELECT " +
"count(a) OVER (ORDER BY proctime ROWS BETWEEN UNBOUNDED preceding AND CURRENT ROW) " +
"from T1"
val sink = new TestingAppendSink
tEnv.sqlQuery(sqlQuery).toAppendStream[Row].addSink(sink)
env.execute()
val expected = List("1", "2", "3", "4", "5", "6", "7", "8", "9")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testRowTimeBoundedPartitionedRangeOver(): Unit = {
val data: Seq[Either[(Long, (Long, Int, String)), Long]] = Seq(
Left((1500L, (1L, 15, "Hello"))),
Left((1600L, (1L, 16, "Hello"))),
Left((1000L, (1L, 1, "Hello"))),
Left((2000L, (2L, 2, "Hello"))),
Right(1000L),
Left((2000L, (2L, 2, "Hello"))),
Left((2000L, (2L, 3, "Hello"))),
Left((3000L, (3L, 3, "Hello"))),
Right(2000L),
Left((4000L, (4L, 4, "Hello"))),
Right(3000L),
Left((5000L, (5L, 5, "Hello"))),
Right(5000L),
Left((6000L, (6L, 6, "Hello"))),
Left((6500L, (6L, 65, "Hello"))),
Right(7000L),
Left((9000L, (6L, 9, "Hello"))),
Left((9500L, (6L, 18, "Hello"))),
Left((9000L, (6L, 9, "Hello"))),
Right(10000L),
Left((10000L, (7L, 7, "Hello World"))),
Left((11000L, (7L, 17, "Hello World"))),
Left((11000L, (7L, 77, "Hello World"))),
Right(12000L),
Left((14000L, (7L, 18, "Hello World"))),
Right(14000L),
Left((15000L, (8L, 8, "Hello World"))),
Right(17000L),
Left((20000L, (20L, 20, "Hello World"))),
Right(19000L))
val source = failingDataSource(data)
val t1 = source.transform("TimeAssigner", new EventTimeProcessOperator[(Long, Int, String)])
.setParallelism(source.parallelism)
.toTable(tEnv, 'a, 'b, 'c, 'rowtime.rowtime)
tEnv.registerTable("T1", t1)
tEnv.registerFunction("LTCNT", new LargerThanCount)
val sqlQuery = "SELECT " +
" c, b, " +
" LTCNT(a, CAST('4' AS BIGINT)) OVER (PARTITION BY c ORDER BY rowtime RANGE " +
" BETWEEN INTERVAL '1' SECOND PRECEDING AND CURRENT ROW), " +
" COUNT(a) OVER (PARTITION BY c ORDER BY rowtime RANGE " +
" BETWEEN INTERVAL '1' SECOND PRECEDING AND CURRENT ROW), " +
" SUM(a) OVER (PARTITION BY c ORDER BY rowtime RANGE " +
" BETWEEN INTERVAL '1' SECOND PRECEDING AND CURRENT ROW)" +
" FROM T1"
val sink = new TestingAppendSink
tEnv.sqlQuery(sqlQuery).toAppendStream[Row].addSink(sink)
env.execute()
val expected = List(
"Hello,1,0,1,1",
"Hello,15,0,2,2",
"Hello,16,0,3,3",
"Hello,2,0,6,9",
"Hello,3,0,6,9",
"Hello,2,0,6,9",
"Hello,3,0,4,9",
"Hello,4,0,2,7",
"Hello,5,1,2,9",
"Hello,6,2,2,11",
"Hello,65,2,2,12",
"Hello,9,2,2,12",
"Hello,9,2,2,12",
"Hello,18,3,3,18",
"Hello World,17,3,3,21",
"Hello World,7,1,1,7",
"Hello World,77,3,3,21",
"Hello World,18,1,1,7",
"Hello World,8,2,2,15",
"Hello World,20,1,1,20")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testRowTimeBoundedPartitionedRowsOver(): Unit = {
val data: Seq[Either[(Long, (Long, Int, String)), Long]] = Seq(
Left((1L, (1L, 1, "Hello"))),
Left((2L, (2L, 2, "Hello"))),
Left((1L, (1L, 1, "Hello"))),
Left((2L, (2L, 2, "Hello"))),
Left((2L, (2L, 2, "Hello"))),
Left((1L, (1L, 1, "Hello"))),
Left((3L, (7L, 7, "Hello World"))),
Left((1L, (7L, 7, "Hello World"))),
Left((1L, (7L, 7, "Hello World"))),
Right(2L),
Left((3L, (3L, 3, "Hello"))),
Left((4L, (4L, 4, "Hello"))),
Left((5L, (5L, 5, "Hello"))),
Left((6L, (6L, 6, "Hello"))),
Left((20L, (20L, 20, "Hello World"))),
Right(6L),
Left((8L, (8L, 8, "Hello World"))),
Left((7L, (7L, 7, "Hello World"))),
Right(20L))
val source = failingDataSource(data)
val t1 = source.transform("TimeAssigner", new EventTimeProcessOperator[(Long, Int, String)])
.setParallelism(source.parallelism)
.toTable(tEnv, 'a, 'b, 'c, 'rowtime.rowtime)
tEnv.registerTable("T1", t1)
tEnv.registerFunction("LTCNT", new LargerThanCount)
val sqlQuery = "SELECT " +
" c, a, " +
" LTCNT(a, CAST('4' AS BIGINT)) " +
" OVER (PARTITION BY c ORDER BY rowtime ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), " +
" COUNT(1) " +
" OVER (PARTITION BY c ORDER BY rowtime ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), " +
" SUM(a) " +
" OVER (PARTITION BY c ORDER BY rowtime ROWS BETWEEN 2 PRECEDING AND CURRENT ROW) " +
"FROM T1"
val sink = new TestingAppendSink
tEnv.sqlQuery(sqlQuery).toAppendStream[Row].addSink(sink)
env.execute()
val expected = List(
"Hello,1,0,1,1", "Hello,1,0,2,2", "Hello,1,0,3,3",
"Hello,2,0,3,4", "Hello,2,0,3,5", "Hello,2,0,3,6",
"Hello,3,0,3,7", "Hello,4,0,3,9", "Hello,5,1,3,12",
"Hello,6,2,3,15",
"Hello World,7,1,1,7", "Hello World,7,2,2,14", "Hello World,7,3,3,21",
"Hello World,7,3,3,21", "Hello World,8,3,3,22", "Hello World,20,3,3,35")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testRowTimeBoundedNonPartitionedRangeOver(): Unit = {
val data: Seq[Either[(Long, (Long, Int, String)), Long]] = Seq(
Left((1500L, (1L, 15, "Hello"))),
Left((1600L, (1L, 16, "Hello"))),
Left((1000L, (1L, 1, "Hello"))),
Left((2000L, (2L, 2, "Hello"))),
Right(1000L),
Left((2000L, (2L, 2, "Hello"))),
Left((2000L, (2L, 3, "Hello"))),
Left((3000L, (3L, 3, "Hello"))),
Right(2000L),
Left((4000L, (4L, 4, "Hello"))),
Right(3000L),
Left((5000L, (5L, 5, "Hello"))),
Right(5000L),
Left((6000L, (6L, 6, "Hello"))),
Left((6500L, (6L, 65, "Hello"))),
Right(7000L),
Left((9000L, (6L, 9, "Hello"))),
Left((9500L, (6L, 18, "Hello"))),
Left((9000L, (6L, 9, "Hello"))),
Right(10000L),
Left((10000L, (7L, 7, "Hello World"))),
Left((11000L, (7L, 17, "Hello World"))),
Left((11000L, (7L, 77, "Hello World"))),
Right(12000L),
Left((14000L, (7L, 18, "Hello World"))),
Right(14000L),
Left((15000L, (8L, 8, "Hello World"))),
Right(17000L),
Left((20000L, (20L, 20, "Hello World"))),
Right(19000L))
val source = failingDataSource(data)
val t1 = source.transform("TimeAssigner", new EventTimeProcessOperator[(Long, Int, String)])
.setParallelism(source.parallelism)
.toTable(tEnv, 'a, 'b, 'c, 'rowtime.rowtime)
tEnv.registerTable("T1", t1)
val sqlQuery = "SELECT " +
" c, b, " +
" COUNT(a) " +
" OVER (ORDER BY rowtime RANGE BETWEEN INTERVAL '1' SECOND PRECEDING AND CURRENT ROW), " +
" SUM(a) " +
" OVER (ORDER BY rowtime RANGE BETWEEN INTERVAL '1' SECOND PRECEDING AND CURRENT ROW) " +
" FROM T1"
val sink = new TestingAppendSink
tEnv.sqlQuery(sqlQuery).toAppendStream[Row].addSink(sink)
env.execute()
val expected = List(
"Hello,1,1,1", "Hello,15,2,2", "Hello,16,3,3",
"Hello,2,6,9", "Hello,3,6,9", "Hello,2,6,9",
"Hello,3,4,9",
"Hello,4,2,7",
"Hello,5,2,9",
"Hello,6,2,11", "Hello,65,2,12",
"Hello,9,2,12", "Hello,9,2,12", "Hello,18,3,18",
"Hello World,7,4,25", "Hello World,17,3,21", "Hello World,77,3,21", "Hello World,18,1,7",
"Hello World,8,2,15",
"Hello World,20,1,20")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testRowTimeBoundedNonPartitionedRowsOver(): Unit = {
val data: Seq[Either[(Long, (Long, Int, String)), Long]] = Seq(
Left((2L, (2L, 2, "Hello"))),
Left((2L, (2L, 2, "Hello"))),
Left((1L, (1L, 1, "Hello"))),
Left((1L, (1L, 1, "Hello"))),
Left((2L, (2L, 2, "Hello"))),
Left((1L, (1L, 1, "Hello"))),
Left((20L, (20L, 20, "Hello World"))), // early row
Right(3L),
Left((2L, (2L, 2, "Hello"))), // late row
Left((3L, (3L, 3, "Hello"))),
Left((4L, (4L, 4, "Hello"))),
Left((5L, (5L, 5, "Hello"))),
Left((6L, (6L, 6, "Hello"))),
Left((7L, (7L, 7, "Hello World"))),
Right(7L),
Left((9L, (9L, 9, "Hello World"))),
Left((8L, (8L, 8, "Hello World"))),
Left((8L, (8L, 8, "Hello World"))),
Right(20L))
val source = failingDataSource(data)
val t1 = source.transform("TimeAssigner", new EventTimeProcessOperator[(Long, Int, String)])
.setParallelism(source.parallelism)
.toTable(tEnv, 'a, 'b, 'c, 'rowtime.rowtime)
tEnv.registerTable("T1", t1)
val sqlQuery = "SELECT " +
"c, a, " +
" COUNT(a) OVER (ORDER BY rowtime ROWS BETWEEN 2 preceding AND CURRENT ROW), " +
" SUM(a) OVER (ORDER BY rowtime ROWS BETWEEN 2 preceding AND CURRENT ROW) " +
"FROM T1"
val sink = new TestingAppendSink
tEnv.sqlQuery(sqlQuery).toAppendStream[Row].addSink(sink)
env.execute()
val expected = List(
"Hello,1,1,1", "Hello,1,2,2", "Hello,1,3,3",
"Hello,2,3,4", "Hello,2,3,5", "Hello,2,3,6",
"Hello,3,3,7",
"Hello,4,3,9", "Hello,5,3,12",
"Hello,6,3,15", "Hello World,7,3,18",
"Hello World,8,3,21", "Hello World,8,3,23",
"Hello World,9,3,25",
"Hello World,20,3,37")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testRowTimeUnBoundedPartitionedRangeOver(): Unit = {
val sqlQuery = "SELECT a, b, c, " +
" LTCNT(b, CAST('4' AS BIGINT)) OVER(" +
" PARTITION BY a ORDER BY rowtime RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), " +
" SUM(b) OVER (" +
" PARTITION BY a ORDER BY rowtime RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), " +
" COUNT(b) OVER (" +
" PARTITION BY a ORDER BY rowtime RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), " +
" AVG(b) OVER (" +
" PARTITION BY a ORDER BY rowtime RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), " +
" MAX(b) OVER (" +
" PARTITION BY a ORDER BY rowtime RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), " +
" MIN(b) OVER (" +
" PARTITION BY a ORDER BY rowtime RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) " +
"FROM T1"
val data: Seq[Either[(Long, (Int, Long, String)), Long]] = Seq(
Left(14000005L, (1, 1L, "Hi")),
Left(14000000L, (2, 1L, "Hello")),
Left(14000002L, (1, 1L, "Hello")),
Left(14000002L, (1, 2L, "Hello")),
Left(14000002L, (1, 3L, "Hello world")),
Left(14000003L, (2, 2L, "Hello world")),
Left(14000003L, (2, 3L, "Hello world")),
Right(14000020L),
Left(14000021L, (1, 4L, "Hello world")),
Left(14000022L, (1, 5L, "Hello world")),
Left(14000022L, (1, 6L, "Hello world")),
Left(14000022L, (1, 7L, "Hello world")),
Left(14000023L, (2, 4L, "Hello world")),
Left(14000023L, (2, 5L, "Hello world")),
Right(14000030L))
val source = failingDataSource(data)
val t1 = source.transform("TimeAssigner", new EventTimeProcessOperator[(Int, Long, String)])
.setParallelism(source.parallelism)
.toTable(tEnv, 'a, 'b, 'c, 'rowtime.rowtime)
tEnv.registerTable("T1", t1)
tEnv.registerFunction("LTCNT", new LargerThanCount)
val sink = new TestingAppendSink
tEnv.sqlQuery(sqlQuery).toAppendStream[Row].addSink(sink)
env.execute()
val expected = List(
s"1,1,Hello,0,6,3,${6/3},3,1",
s"1,2,Hello,0,6,3,${6/3},3,1",
s"1,3,Hello world,0,6,3,${6/3},3,1",
s"1,1,Hi,0,7,4,${7/4},3,1",
s"2,1,Hello,0,1,1,${1/1},1,1",
s"2,2,Hello world,0,6,3,${6/3},3,1",
s"2,3,Hello world,0,6,3,${6/3},3,1",
s"1,4,Hello world,0,11,5,${11/5},4,1",
s"1,5,Hello world,3,29,8,${29/8},7,1",
s"1,6,Hello world,3,29,8,${29/8},7,1",
s"1,7,Hello world,3,29,8,${29/8},7,1",
s"2,4,Hello world,1,15,5,${15/5},5,1",
s"2,5,Hello world,1,15,5,${15/5},5,1")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testRowTimeUnBoundedPartitionedRowsOver(): Unit = {
val sqlQuery = "SELECT a, b, c, " +
"LTCNT(b, CAST('4' AS BIGINT)) over(" +
"partition by a order by rowtime rows between unbounded preceding and current row), " +
"SUM(b) over (" +
"partition by a order by rowtime rows between unbounded preceding and current row), " +
"count(b) over (" +
"partition by a order by rowtime rows between unbounded preceding and current row), " +
"avg(b) over (" +
"partition by a order by rowtime rows between unbounded preceding and current row), " +
"max(b) over (" +
"partition by a order by rowtime rows between unbounded preceding and current row), " +
"min(b) over (" +
"partition by a order by rowtime rows between unbounded preceding and current row) " +
"from T1"
val data: Seq[Either[(Long, (Int, Long, String)), Long]] = Seq(
Left(14000005L, (1, 1L, "Hi")),
Left(14000000L, (2, 1L, "Hello")),
Left(14000002L, (3, 1L, "Hello")),
Left(14000003L, (1, 2L, "Hello")),
Left(14000004L, (1, 3L, "Hello world")),
Left(14000007L, (3, 2L, "Hello world")),
Left(14000008L, (2, 2L, "Hello world")),
Right(14000010L),
Left(14000012L, (1, 5L, "Hello world")),
Left(14000021L, (1, 6L, "Hello world")),
Left(14000023L, (2, 5L, "Hello world")),
Right(14000020L),
Left(14000024L, (3, 5L, "Hello world")),
Left(14000026L, (1, 7L, "Hello world")),
Left(14000025L, (1, 8L, "Hello world")),
Left(14000022L, (1, 9L, "Hello world")),
Right(14000030L))
val source = failingDataSource(data)
val t1 = source.transform("TimeAssigner", new EventTimeProcessOperator[(Int, Long, String)])
.setParallelism(source.parallelism)
.toTable(tEnv, 'a, 'b, 'c, 'rowtime.rowtime)
tEnv.registerTable("T1", t1)
tEnv.registerFunction("LTCNT", new LargerThanCount)
val sink = new TestingAppendSink
tEnv.sqlQuery(sqlQuery).toAppendStream[Row].addSink(sink)
env.execute()
val expected = mutable.MutableList(
s"1,2,Hello,0,2,1,${2/1},2,2",
s"1,3,Hello world,0,5,2,${5/2},3,2",
s"1,1,Hi,0,6,3,${6/3},3,1",
s"2,1,Hello,0,1,1,${1/1},1,1",
s"2,2,Hello world,0,3,2,${3/2},2,1",
s"3,1,Hello,0,1,1,${1/1},1,1",
s"3,2,Hello world,0,3,2,${3/2},2,1",
s"1,5,Hello world,1,11,4,${11/4},5,1",
s"1,6,Hello world,2,17,5,${17/5},6,1",
s"1,9,Hello world,3,26,6,${26/6},9,1",
s"1,8,Hello world,4,34,7,${34/7},9,1",
s"1,7,Hello world,5,41,8,${41/8},9,1",
s"2,5,Hello world,1,8,3,${8/3},5,1",
s"3,5,Hello world,1,8,3,${8/3},5,1")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testRowTimeUnBoundedNonPartitionedRangeOver(): Unit = {
val sqlQuery = "SELECT a, b, c, " +
" SUM(b) OVER (ORDER BY rowtime RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), " +
" COUNT(b) OVER (ORDER BY rowtime RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), " +
" AVG(b) OVER (ORDER BY rowtime RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), " +
" MAX(b) OVER (ORDER BY rowtime RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), " +
" MIN(b) OVER (ORDER BY rowtime RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) " +
"FROM T1"
val data: Seq[Either[(Long, (Int, Long, String)), Long]] = Seq(
Left(14000005L, (1, 1L, "Hi")),
Left(14000000L, (2, 1L, "Hello")),
Left(14000002L, (1, 1L, "Hello")),
Left(14000002L, (1, 2L, "Hello")),
Left(14000002L, (1, 3L, "Hello world")),
Left(14000003L, (2, 2L, "Hello world")),
Left(14000003L, (2, 3L, "Hello world")),
Right(14000020L),
Left(14000021L, (1, 4L, "Hello world")),
Left(14000022L, (1, 5L, "Hello world")),
Left(14000022L, (1, 6L, "Hello world")),
Left(14000022L, (1, 7L, "Hello world")),
Left(14000023L, (2, 4L, "Hello world")),
Left(14000023L, (2, 5L, "Hello world")),
Right(14000030L))
val source = failingDataSource(data)
val t1 = source.transform("TimeAssigner", new EventTimeProcessOperator[(Int, Long, String)])
.setParallelism(source.parallelism)
.toTable(tEnv, 'a, 'b, 'c, 'rowtime.rowtime)
tEnv.registerTable("T1", t1)
val sink = new TestingAppendSink
tEnv.sqlQuery(sqlQuery).toAppendStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = List(
s"2,1,Hello,1,1,${1/1},1,1",
s"1,1,Hello,7,4,${7/4},3,1",
s"1,2,Hello,7,4,${7/4},3,1",
s"1,3,Hello world,7,4,${7/4},3,1",
s"2,2,Hello world,12,6,${12/6},3,1",
s"2,3,Hello world,12,6,${12/6},3,1",
s"1,1,Hi,13,7,${13/7},3,1",
s"1,4,Hello world,17,8,${17/8},4,1",
s"1,5,Hello world,35,11,${35/11},7,1",
s"1,6,Hello world,35,11,${35/11},7,1",
s"1,7,Hello world,35,11,${35/11},7,1",
s"2,4,Hello world,44,13,${44/13},7,1",
s"2,5,Hello world,44,13,${44/13},7,1")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testRowTimeUnBoundedNonPartitionedRowsOver(): Unit = {
val sqlQuery = "SELECT a, b, c, " +
" SUM(b) OVER (ORDER BY rowtime ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), " +
" COUNT(b) OVER (ORDER BY rowtime ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), " +
" AVG(b) OVER (ORDER BY rowtime ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), " +
" MAX(b) OVER (ORDER BY rowtime ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), " +
" MIN(b) OVER (ORDER BY rowtime ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) " +
"FROM T1"
val data: Seq[Either[(Long, (Int, Long, String)), Long]] = Seq(
Left(14000005L, (1, 1L, "Hi")),
Left(14000000L, (2, 2L, "Hello")),
Left(14000002L, (3, 5L, "Hello")),
Left(14000003L, (1, 3L, "Hello")),
Left(14000004L, (3, 7L, "Hello world")),
Left(14000007L, (4, 9L, "Hello world")),
Left(14000008L, (5, 8L, "Hello world")),
Right(14000010L),
// this element will be discard because it is late
Left(14000008L, (6, 8L, "Hello world")),
Right(14000020L),
Left(14000021L, (6, 8L, "Hello world")),
Right(14000030L)
)
val source = failingDataSource(data)
val t1 = source.transform("TimeAssigner", new EventTimeProcessOperator[(Int, Long, String)])
.setParallelism(source.parallelism)
.toTable(tEnv, 'a, 'b, 'c, 'rowtime.rowtime)
tEnv.registerTable("T1", t1)
val sink = new TestingAppendSink
tEnv.sqlQuery(sqlQuery).toAppendStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = mutable.MutableList(
s"2,2,Hello,2,1,${2/1},2,2",
s"3,5,Hello,7,2,${7/2},5,2",
s"1,3,Hello,10,3,${10/3},5,2",
s"3,7,Hello world,17,4,${17/4},7,2",
s"1,1,Hi,18,5,${18/5},7,1",
s"4,9,Hello world,27,6,${27/6},9,1",
s"5,8,Hello world,35,7,${35/7},9,1",
s"6,8,Hello world,43,8,${43/8},9,1")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
/** test sliding event-time unbounded window with partition by **/
@Test
def testRowTimeUnBoundedPartitionedRowsOver2(): Unit = {
val sqlQuery = "SELECT a, b, c, " +
"SUM(b) over (" +
"partition by a order by rowtime rows between unbounded preceding and current row), " +
"count(b) over (" +
"partition by a order by rowtime rows between unbounded preceding and current row), " +
"avg(b) over (" +
"partition by a order by rowtime rows between unbounded preceding and current row), " +
"max(b) over (" +
"partition by a order by rowtime rows between unbounded preceding and current row), " +
"min(b) over (" +
"partition by a order by rowtime rows between unbounded preceding and current row) " +
"from T1"
val data: Seq[Either[(Long, (Int, Long, String)), Long]] = Seq(
Left(14000005L, (1, 1L, "Hi")),
Left(14000000L, (2, 1L, "Hello")),
Left(14000002L, (3, 1L, "Hello")),
Left(14000003L, (1, 2L, "Hello")),
Left(14000004L, (1, 3L, "Hello world")),
Left(14000007L, (3, 2L, "Hello world")),
Left(14000008L, (2, 2L, "Hello world")),
Right(14000010L),
// the next 3 elements are late
Left(14000008L, (1, 4L, "Hello world")),
Left(14000008L, (2, 3L, "Hello world")),
Left(14000008L, (3, 3L, "Hello world")),
Left(14000012L, (1, 5L, "Hello world")),
Right(14000020L),
Left(14000021L, (1, 6L, "Hello world")),
// the next 3 elements are late
Left(14000019L, (1, 6L, "Hello world")),
Left(14000018L, (2, 4L, "Hello world")),
Left(14000018L, (3, 4L, "Hello world")),
Left(14000022L, (2, 5L, "Hello world")),
Left(14000022L, (3, 5L, "Hello world")),
Left(14000024L, (1, 7L, "Hello world")),
Left(14000023L, (1, 8L, "Hello world")),
Left(14000021L, (1, 9L, "Hello world")),
Right(14000030L)
)
val source = failingDataSource(data)
val t1 = source.transform("TimeAssigner", new EventTimeProcessOperator[(Int, Long, String)])
.setParallelism(source.parallelism)
.toTable(tEnv, 'a, 'b, 'c, 'rowtime.rowtime)
tEnv.registerTable("T1", t1)
val sink = new TestingAppendSink
tEnv.sqlQuery(sqlQuery).toAppendStream[Row].addSink(sink)
env.execute()
val expected = List(
s"1,2,Hello,2,1,${2/1},2,2",
s"1,3,Hello world,5,2,${5/2},3,2",
s"1,1,Hi,6,3,${6/3},3,1",
s"2,1,Hello,1,1,${1/1},1,1",
s"2,2,Hello world,3,2,${3/2},2,1",
s"3,1,Hello,1,1,${1/1},1,1",
s"3,2,Hello world,3,2,${3/2},2,1",
s"1,5,Hello world,11,4,${11/4},5,1",
s"1,6,Hello world,17,5,${17/5},6,1",
s"1,9,Hello world,26,6,${26/6},9,1",
s"1,8,Hello world,34,7,${34/7},9,1",
s"1,7,Hello world,41,8,${41/8},9,1",
s"2,5,Hello world,8,3,${8/3},5,1",
s"3,5,Hello world,8,3,${8/3},5,1"
)
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testProcTimeDistinctUnboundedPartitionedRowsOver(): Unit = {
val t = failingDataSource(TestData.tupleData5)
.toTable(tEnv, 'a, 'b, 'c, 'd, 'e, 'proctime.proctime)
tEnv.registerTable("MyTable", t)
val sqlQuery = "SELECT a, " +
" COUNT(e) OVER (" +
" PARTITION BY a ORDER BY proctime RANGE UNBOUNDED preceding), " +
" SUM(DISTINCT e) OVER (" +
" PARTITION BY a ORDER BY proctime RANGE UNBOUNDED preceding), " +
" MIN(DISTINCT e) OVER (" +
" PARTITION BY a ORDER BY proctime RANGE UNBOUNDED preceding) " +
"FROM MyTable"
val result = tEnv.sqlQuery(sqlQuery).toAppendStream[Row]
val sink = new TestingAppendSink
result.addSink(sink)
env.execute()
val expected = List(
"1,1,1,1",
"2,1,2,2",
"2,2,3,1",
"3,1,2,2",
"3,2,2,2",
"3,3,5,2",
"4,1,2,2",
"4,2,3,1",
"4,3,3,1",
"4,4,3,1",
"5,1,1,1",
"5,2,4,1",
"5,3,4,1",
"5,4,6,1",
"5,5,6,1")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testRowTimeDistinctUnboundedPartitionedRangeOverWithNullValues(): Unit = {
val data = List(
(1L, 1, null),
(2L, 1, null),
(3L, 2, null),
(4L, 1, "Hello"),
(5L, 1, "Hello"),
(6L, 2, "Hello"),
(7L, 1, "Hello World"),
(8L, 2, "Hello World"),
(9L, 2, "Hello World"),
(10L, 1, null))
// for sum aggregation ensure that every time the order of each element is consistent
env.setParallelism(1)
val table = failingDataSource(data)
.assignAscendingTimestamps(_._1)
.toTable(tEnv, 'a, 'b, 'c, 'rowtime.rowtime)
tEnv.registerTable("MyTable", table)
tEnv.registerFunction("CntNullNonNull", new CountNullNonNull)
val sqlQuery = "SELECT " +
" c, " +
" b, " +
" COUNT(DISTINCT c) " +
" OVER (PARTITION BY b ORDER BY rowtime RANGE UNBOUNDED preceding), " +
" CntNullNonNull(DISTINCT c) " +
" OVER (PARTITION BY b ORDER BY rowtime RANGE UNBOUNDED preceding)" +
"FROM " +
" MyTable"
val result = tEnv.sqlQuery(sqlQuery).toAppendStream[Row]
val sink = new TestingAppendSink
result.addSink(sink)
env.execute()
val expected = List(
"null,1,0,0|1", "null,1,0,0|1", "null,2,0,0|1", "null,1,2,2|1",
"Hello,1,1,1|1", "Hello,1,1,1|1", "Hello,2,1,1|1",
"Hello World,1,2,2|1", "Hello World,2,2,2|1", "Hello World,2,2,2|1")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testProcTimeDistinctBoundedPartitionedRowsOver(): Unit = {
val t = failingDataSource(TestData.tupleData5)
.toTable(tEnv, 'a, 'b, 'c, 'd, 'e, 'proctime.proctime)
tEnv.registerTable("MyTable", t)
val sqlQuery = "SELECT a, " +
" SUM(DISTINCT e) OVER (" +
" PARTITION BY a ORDER BY proctime ROWS BETWEEN 3 PRECEDING AND CURRENT ROW), " +
" MIN(DISTINCT e) OVER (" +
" PARTITION BY a ORDER BY proctime ROWS BETWEEN 3 PRECEDING AND CURRENT ROW), " +
" COLLECT(DISTINCT e) OVER (" +
" PARTITION BY a ORDER BY proctime ROWS BETWEEN 3 PRECEDING AND CURRENT ROW) " +
"FROM MyTable"
val result = tEnv.sqlQuery(sqlQuery).toAppendStream[Row]
val sink = new TestingAppendSink
result.addSink(sink)
env.execute()
val expected = List(
"1,1,1,{1=1}",
"2,2,2,{2=1}",
"2,3,1,{1=1, 2=1}",
"3,2,2,{2=1}",
"3,2,2,{2=1}",
"3,5,2,{2=1, 3=1}",
"4,2,2,{2=1}",
"4,3,1,{1=1, 2=1}",
"4,3,1,{1=1, 2=1}",
"4,3,1,{1=1, 2=1}",
"5,1,1,{1=1}",
"5,4,1,{1=1, 3=1}",
"5,4,1,{1=1, 3=1}",
"5,6,1,{1=1, 2=1, 3=1}",
"5,5,2,{2=1, 3=1}")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testProcTimeDistinctPairWithNulls(): Unit = {
val data = List(
("A", null),
("A", null),
("B", null),
(null, "Hello"),
("A", "Hello"),
("A", "Hello"),
(null, "Hello World"),
(null, "Hello World"),
("A", "Hello World"),
("B", "Hello World"))
env.setParallelism(1)
val table = env.fromCollection(data).toTable(tEnv, 'a, 'b, 'proctime.proctime)
tEnv.registerTable("MyTable", table)
tEnv.registerFunction("PairCount", new CountPairs)
val sqlQuery = "SELECT a, b, " +
" PairCount(a, b) OVER (ORDER BY proctime RANGE UNBOUNDED preceding), " +
" PairCount(DISTINCT a, b) OVER (ORDER BY proctime RANGE UNBOUNDED preceding) " +
"FROM MyTable"
val result = tEnv.sqlQuery(sqlQuery).toAppendStream[Row]
val sink = new TestingAppendSink
result.addSink(sink)
env.execute()
val expected = List(
"A,null,1,1",
"A,null,2,1",
"B,null,3,2",
"null,Hello,4,3",
"A,Hello,5,4",
"A,Hello,6,4",
"null,Hello World,7,5",
"null,Hello World,8,5",
"A,Hello World,9,6",
"B,Hello World,10,7")
assertEquals(expected, sink.getAppendResults)
}
}
| GJL/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/OverWindowITCase.scala | Scala | apache-2.0 | 36,421 |
package de.uniulm.dds.defaultimpl
import java.util.Comparator
import de.uniulm.dds.base.Variable
/**
* The parameter type for contexts in defaultimpl
* Created by Felix on 03.06.2014.
*
* @param variableOrder the comparator that specifies variable order. For variables <code>a</code> and <code>b</code>,
* <code>a</code> will always come before <code>b</code> in a diagram of this context iff
* <code>variableOrder.compare(a, b) < 0</code>. The comparator must be consistent with equals.
* @param restrictCacheSize the size of the restrict cache
* @param binaryOpCacheSize the size of the binary operation cache
* @param unaryOpCacheSize the size of the unary operation cache
* @tparam V The type of the elements of variable domains
* @tparam T The type of leaf values of the diagrams
*/
case class Parameters[V, T](variableOrder: Comparator[Variable[V]], restrictCacheSize: Int = 1000000, binaryOpCacheSize: Int = 1000000, unaryOpCacheSize: Int = 1000000)
| uulm-ai/scadd | src/main/scala/de/uniulm/dds/defaultimpl/Parameters.scala | Scala | mit | 1,039 |
package TAPL
import TAPL.Lib._
trait KParser[K] {
val pK: Parser[K]
}
trait ETKParser[E, T, K] extends ETParser[E, T] with KParser[K]
object Omega {
trait Alg[E, T, K] {
def KnStar(): K
def KnArr(k1: K, k2: K): K
def TyAll(x: String, k: K, t: T): T
def TySome(x: String, k: K, t: T): T
def TmTAbs(x: String, k: K, e: E): E
def TmTApp(e: E, t: T): E
def TyAbs(x: String, k: K, t: T): T
def TyApp(t1: T, t2: T): T
}
trait Print extends Alg[String, String, String] {
def KnStar(): String = "Star"
def KnArr(k1: String, k2: String): String = k1 + "=>" + k2
def TyAll(x: String, k: String, t: String): String = "All " + x + ":" + k + "." + t
def TySome(x: String, k: String, t: String): String = "{Some " + x + ":" + k + "," + t + "}"
def TmTAbs(x: String, k: String, e: String): String = "\\\\(" + x + ":" + k + ")." + e
def TmTApp(e: String, t: String): String = e + " [" + t + "]"
def TyAbs(x: String, k: String, t: String): String = "\\\\(" + x + ":" + k + ")." + t
def TyApp(t1: String, t2: String): String = "[" + t1 + " " + t2 + "]"
}
trait Parse[E, T, K] extends ETKParser[E, T, K] {
lexical.reserved += ("Star", "All", "Some")
lexical.delimiters += ("=>", ":", ".", ",", "{", "}")
val alg: Alg[E, T, K]
val pOmegaE: Parser[E] =
"\\\\" ~> ucid ~ (":" ~> pK) ~ ("." ~> pE) ^^ { case x ~ kn ~ ex => alg.TmTAbs(x, kn, ex) } |||
pE ~ ("[" ~> pT <~ "]") ^^ { case ex ~ ty => alg.TmTApp(ex, ty) }
val pOmegaT: Parser[T] =
"All" ~> ucid ~ (":" ~> pK) ~ ("." ~> pT) ^^ { case x ~ kn ~ ty => alg.TyAll(x, kn, ty) } |||
("{" ~> "Some" ~> ucid ~ (":" ~> pK) ~ ("," ~> pT) <~ "}") ^^ { case x ~ kn ~ ty => alg.TySome(x, kn, ty) } |||
"\\\\" ~> ucid ~ (":" ~> pK) ~ ("." ~> pT) ^^ { case x ~ kn ~ ty => alg.TyAbs(x, kn, ty) } |||
pT ~ pT ^^ { case t1 ~ t2 => alg.TyApp(t1, t2) }
val pOmegaK: Parser[K] =
"Star" ^^ { _ => alg.KnStar() } |||
pK ~ ("=>" ~> pK) ^^ { case k1 ~ k2 => alg.KnArr(k1, k2) } |||
"(" ~> pK <~ ")"
}
}
object FullOmega {
trait Alg[E, T, K] extends Simple.Alg[E, T] with Pack.Alg[E, T] with Ref.Alg[E, T] with Omega.Alg[E, T, K]
trait Print extends Alg[String, String, String] with Simple.Print with Pack.Print with Ref.Print with Omega.Print
trait Parse[E, T, K] extends Simple.Parse[E, T] with Pack.Parse[E, T]
with Ref.Parse[E, T] with Omega.Parse[E, T, K] {
override val alg: Alg[E, T, K]
val pFullOmegaE: Parser[E] = pSimpleE ||| pPackE ||| pRefE ||| pOmegaE
val pFullOmegaT: Parser[T] = pSimpleT ||| pRefT ||| pOmegaT
val pFullOmegaK: Parser[K] = pOmegaK
override val pE: Parser[E] = pFullOmegaE
override val pT: Parser[T] = pFullOmegaT
override val pK: Parser[K] = pFullOmegaK
}
}
object TestFullOmega {
def parseWithAlg[E, T, K](inp: String)(a: FullOmega.Alg[E, T, K]): E = {
val p = new FullOmega.Parse[E, T, K] {
override val alg: FullOmega.Alg[E, T, K] = a
}
parse(p.pE)(inp)
}
def parseAndPrint(inp: String): Unit = println(parseWithAlg(inp)(new FullOmega.Print {}))
} | hy-zhang/parser | Scala/Parser/src/TAPL/FullOmega.scala | Scala | bsd-3-clause | 3,154 |
package org.hammerlab.guacamole.reads
import htsjdk.samtools.SAMRecord
import org.hammerlab.genomics.reference.{ContigName, Locus}
/**
* Details of the mate read alignment
* @param contigName Contig/chromosome of the mate read
* @param start 0-based start position of the mate read
* @param inferredInsertSize Insert size between the reads if defined
* @param isPositiveStrand Whether the mate is on the positive strand
*/
case class MateAlignmentProperties(contigName: ContigName,
start: Locus,
inferredInsertSize: Option[Int],
isPositiveStrand: Boolean) {
}
object MateAlignmentProperties {
def apply(record: SAMRecord): Option[MateAlignmentProperties] = {
if (!record.getMateUnmappedFlag)
Some(
MateAlignmentProperties(
contigName = record.getMateReferenceName,
start = record.getMateAlignmentStart - 1, //subtract 1 from start, since samtools is 1-based and we're 0-based.
inferredInsertSize =
if (record.getInferredInsertSize != 0)
Some(record.getInferredInsertSize)
else
None,
isPositiveStrand = !record.getMateNegativeStrandFlag
)
)
else
None
}
}
| hammerlab/guacamole | src/main/scala/org/hammerlab/guacamole/reads/MateAlignmentProperties.scala | Scala | apache-2.0 | 1,306 |
package mesosphere.marathon
package raml
/**
* Conversion from [[mesosphere.marathon.state.KillSelection]] to [[mesosphere.marathon.raml.KillSelection]] and vice versa.
*/
trait KillSelectionConversion {
implicit val ramlKillSelectionRead = Reads[KillSelection, state.KillSelection] {
case KillSelection.OldestFirst => state.KillSelection.OldestFirst
case KillSelection.YoungestFirst => state.KillSelection.YoungestFirst
}
implicit val ramlKillSelectionWrite = Writes[state.KillSelection, KillSelection] {
case state.KillSelection.YoungestFirst => KillSelection.YoungestFirst
case state.KillSelection.OldestFirst => KillSelection.OldestFirst
}
implicit val protoRamlKillSelection = Writes[Protos.KillSelection, KillSelection] {
case Protos.KillSelection.OldestFirst => KillSelection.OldestFirst
case Protos.KillSelection.YoungestFirst => KillSelection.YoungestFirst
case badKillSelection => throw new IllegalStateException(s"unsupported kill selection $badKillSelection")
}
}
object KillSelectionConversion extends KillSelectionConversion
| guenter/marathon | src/main/scala/mesosphere/marathon/raml/KillSelectionConversion.scala | Scala | apache-2.0 | 1,088 |
package com.twitter.zipkin.sampler
import java.util.Random
import com.twitter.app.App
import com.twitter.finagle.Service
import com.twitter.util.Await.{ready, result}
import com.twitter.util.Future
import com.twitter.util.Time.now
import com.twitter.zipkin.Constants
import com.twitter.zipkin.common.{Annotation, Endpoint, Span}
import com.twitter.zipkin.storage.InMemorySpanStore
import com.twitter.zipkin.storage.SpanStore.toScalaFunc
import org.apache.curator.framework.CuratorFrameworkFactory.newClient
import org.apache.curator.retry.RetryOneTime
import org.apache.curator.test.TestingServer
import org.junit._
import org.scalactic.Tolerance
import org.scalatest.junit.JUnitSuite
class AdaptiveSamplerTest extends JUnitSuite with Tolerance {
import AdaptiveSamplerTest._
@Test def sampleRateReadFromZookeeper() {
val spanStore = new InMemorySpanStore
// Simulates an existing sample rate, set from zookeeper
client.setData().forPath("/sampleRate", Array[Byte]('0','.','9'))
result(sampler.apply(hundredSpans, Service.mk(spanStore)))
assert(spanStore.spans.size === (90 +- 10)) // TODO: see if there's a way to tighten this up!
}
@Test def exportsStoreRateToZookeeperOnInterval() {
result(sampler.apply(hundredSpans, Service.mk(_ => Future.Unit)))
// Until the update interval, we'll see a store rate of zero
assert(getLocalStoreRate === 0)
// Await until update interval passes (1 second + fudge)
Thread.sleep(TestAdaptiveSampler.asUpdateFreq().inMillis) // let the update interval pass
// since update frequency is secondly, the rate exported to ZK will be the amount stored * 60
assert(getLocalStoreRate === hundredSpans.size * 60)
}
@Before def clear() {
// default to always sample
client.setData().forPath("/sampleRate", Array[Byte]('1','.','0'))
// remove any storage rate members
val groupMembers = client.getChildren().forPath("/storeRates")
if (!groupMembers.isEmpty) {
client.setData().forPath("/storeRates/" + groupMembers.get(0), Array[Byte]('0'))
}
}
}
object AdaptiveSamplerTest {
/** Makes a hundred spans, with realistic, random trace ids */
val hundredSpans = {
val ann = Annotation(now.inMicroseconds, Constants.ServerRecv, Some(Endpoint(127 << 24 | 1, 8080, "service")))
val proto = Span(1L, "get", 1L, annotations = List(ann))
new Random().longs(100).toArray.toSeq.map(id => proto.copy(traceId = id, id = id))
}
object TestAdaptiveSampler extends App with AdaptiveSampler
val zookeeper = new TestingServer()
lazy val client = newClient(zookeeper.getConnectString, new RetryOneTime(200 /* ms */))
lazy val sampler = TestAdaptiveSampler.newAdaptiveSamplerFilter()
.asInstanceOf[AdaptiveSamplerFilter]
@BeforeClass def beforeAll() {
zookeeper.start()
client.start()
// AdaptiveSampler doesn't create these!
client.createContainers("/election")
client.createContainers("/storeRates")
client.createContainers("/sampleRate")
client.createContainers("/targetStoreRate")
TestAdaptiveSampler.nonExitingMain(Array(
"-zipkin.sampler.adaptive.basePath", "", // shorten for test readability
"-zipkin.sampler.adaptive.updateFreq", "1.second", // least possible value
"-zipkin.zookeeper.location", zookeeper.getConnectString
))
ready(TestAdaptiveSampler)
// prime zookeeper data, to make sure connection-concerns don't fail tests
result(sampler.apply(hundredSpans, Service.mk(_ => Future.Unit)))
Thread.sleep(TestAdaptiveSampler.asUpdateFreq().inMillis) // let the update interval pass
}
@AfterClass def afterAll() {
client.close()
zookeeper.close()
ready(TestAdaptiveSampler.close())
}
/** Twitter's zookeeper group is where you store the same value as a child node */
def getLocalStoreRate = {
val groupMember = client.getChildren().forPath("/storeRates").get(0)
val data = client.getData().forPath("/storeRates/" + groupMember)
if (data.length == 0) 0 else Integer.parseInt(new String(data))
}
}
| prat0318/zipkin | zipkin-sampler/src/test/scala/com/twitter/zipkin/sampler/AdaptiveSamplerTest.scala | Scala | apache-2.0 | 4,062 |
package com.joescii.typeprog
object TList2Spec {
type Nat1 = NatN[Nat0]
type Nat2 = NatN[Nat1]
type Nat3 = NatN[Nat2]
type Nat6 = NatN[NatN[NatN[Nat3]]]
type L = Nat1 :: Nat2 :: Nat3 :: TNil
implicitly[L#reduce =:= Nat6]
}
| joescii/type-prog-impress | src/test/scala/com.joescii.typeprog/TList2Spec.scala | Scala | apache-2.0 | 237 |
/* Copyright 2012-2015 Micronautics Research Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License. */
package com.micronautics.aws
import com.amazonaws.auth.AWSCredentials
import com.amazonaws.services.cloudfront.{AmazonCloudFront, AmazonCloudFrontClientBuilder}
import com.amazonaws.services.cloudfront.model._
import com.micronautics.cache.{Memoizer, Memoizer0, Memoizer2}
import java.util.concurrent.atomic.AtomicBoolean
import scala.jdk.CollectionConverters._
import scala.collection.mutable
import scala.util.{Failure, Success, Try}
object CloudFront {
val oneMinute: Long = 1L * 60L * 1000L
def apply: CloudFront = new CloudFront
}
class CloudFront extends CFImplicits with S3Implicits {
import CloudFront._
import com.amazonaws.services.s3.model.Bucket
implicit val cf: CloudFront = this
implicit val cfClient: AmazonCloudFront = AmazonCloudFrontClientBuilder.standard.build
val cacheIsDirty = new AtomicBoolean(false)
protected val _distributions: Memoizer0[List[DistributionSummary]] =
Memoizer(cfClient.listDistributions(new ListDistributionsRequest).getDistributionList.getItems.asScala.toList)
val distributions: List[DistributionSummary] = _distributions.apply
protected val _bucketNamesWithDistributions: Memoizer0[List[String]] =
Memoizer(
for {
distribution <- distributions
item <- distribution.getOrigins.getItems.asScala // value: s"S3-$lcBucketName"
} yield item.getId.substring(3)
)
/** @return List of bucket names that have CloudFront distributions for this AWS account */
// id.getDomainName value: s"$lcBucketName.s3.amazonaws.com"
val bucketNamesWithDistributions: List[String] =
_bucketNamesWithDistributions.apply
protected val _bucketsWithDistributions: Memoizer[S3, List[Bucket]] = Memoizer( s3 =>
for {
bucketName <- bucketNamesWithDistributions
bucket <- s3.findByName(bucketName).toList
} yield bucket
)
def bucketsWithDistributions(implicit s3: S3): List[Bucket] =
_bucketsWithDistributions(s3)
/** @return List of CloudFront distributions for the specified bucket */
protected val _distributionsFor: Memoizer2[Bucket, S3, List[DistributionSummary]] =
Memoizer { (bucket, s3) =>
val (_, bucketOriginId) = RichBucket(bucket)(s3).safeNames
val distributionSummaries: List[DistributionSummary] = distributions.filter { distribution =>
val origins: mutable.Seq[Origin] = distribution.getOrigins.getItems.asScala
origins.exists(_.getId == bucketOriginId)
}
distributionSummaries
}
def distributionsFor(bucket: Bucket)
(implicit s3: S3): List[DistributionSummary] =
_distributionsFor.apply(bucket, s3)
def clearCaches(): Unit = {
_bucketNamesWithDistributions.clear()
_bucketsWithDistributions.clear()
_distributions.clear()
_distributionsFor.clear()
cacheIsDirty.set(false)
}
/** Enable/disable all distributions for the given bucketName
* @return list of UpdateDistributionResult for distributions that were enabled */
def enableAllDistributions(bucket: Bucket, newStatus: Boolean=true)
(implicit s3: S3): List[UpdateDistributionResult] = {
val distributions: List[DistributionSummary] = distributionsFor(bucket)
distributions.flatMap { implicit distributionSummary =>
val configResult: GetDistributionConfigResult = distributionSummary.configResult
if (!configResult.getDistributionConfig.getEnabled) {
configResult.getDistributionConfig.setEnabled(newStatus)
val distributionETag = configResult.getETag // Is this in the proper sequence?
val updateRequest = new UpdateDistributionRequest(configResult.getDistributionConfig, distributionSummary.getId, distributionETag)
val result = cfClient.updateDistribution(updateRequest)
cacheIsDirty.set(true)
Some(result)
} else None
}
}
/** Enable/disable the most recently created distribution for the given bucketName
* @return Some(UpdateDistributionResult) if distributions was enabled, else None */
def enableLastDistribution(bucket: Bucket, newStatus: Boolean=true)
(implicit s3: S3): Option[UpdateDistributionResult] = {
val distributions: Seq[DistributionSummary] = distributionsFor(bucket)
distributions.lastOption.flatMap { implicit distributionSummary =>
val configResult: GetDistributionConfigResult = distributionSummary.configResult
if (!configResult.getDistributionConfig.getEnabled) {
configResult.getDistributionConfig.setEnabled(newStatus)
val distributionETag = configResult.getETag // Is this in the proper sequence?
val updateRequest = new UpdateDistributionRequest(configResult.getDistributionConfig, distributionSummary.getId, distributionETag)
val result = cfClient.updateDistribution(updateRequest)
cacheIsDirty.set(true)
Some(result)
} else None
}
}
/** Invalidate asset in all bucket distributions where it is present.
* @param assetPath The path of the objects to invalidate, relative to the distribution and must begin with a slash (/).
* If the path is a directory, all assets within in are invalidated
* @return number of asset invalidations */
def invalidate(bucket: Bucket, assetPath: String)
(implicit s3: S3): Int =
invalidateMany(bucket, List(assetPath))
/** Invalidate asset in all bucket distributions where it is present.
* @param assetPaths The path of the objects to invalidate, relative to the distribution and must begin with a slash (/).
* If the path is a directory, all assets within in are invalidated
* @return number of asset invalidations
* @see http://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/services/cloudfront/model/InvalidationBatch.html#InvalidationBatch(com.amazonaws.services.cloudfront.model.Paths,%20java.lang.String) */
def invalidateMany(bucket: Bucket, assetPaths: List[String])(implicit s3: S3): Int = {
val foundAssets: List[String] = assetPaths.filter(bucket.oneObjectData(_).isDefined)
val foundPaths: Paths = new Paths().withItems(foundAssets.asJava).withQuantity(foundAssets.size)
val counts: List[Int] = distributionsFor(bucket) map { distributionSummary =>
import com.amazonaws.services.cloudfront.model.CreateInvalidationRequest
val invalidationBatch = new InvalidationBatch(foundPaths, uuid)
// distributionSummary.getId returns distribution.getId
cfClient.createInvalidation(new CreateInvalidationRequest().withDistributionId(distributionSummary.getId).withInvalidationBatch(invalidationBatch))
1
}
val sum = counts.sum
if (sum>0) cacheIsDirty.set(true)
sum
}
/** Remove the most recently created distribution for the given bucketName.
* Can take 15 minutes to an hour to return. */
def removeDistribution(bucket: Bucket)
(implicit s3: S3): Boolean =
distributionsFor(bucket).lastOption.exists { implicit distSummary =>
val distConfigResult = cfClient.getDistributionConfig(new GetDistributionConfigRequest().withId(distSummary.getId))
val result = distSummary.originItems.map { removeOrigin(distSummary, distConfigResult, _) }.forall { _.isSuccess }
if (result) cacheIsDirty.set(true)
result
}
def removeOrigin(distSummary: DistributionSummary, distConfigResult: GetDistributionConfigResult, origin: Origin): Try[Boolean] = {
val domainName: String = origin.getDomainName
val distributionETag: String = distConfigResult.getETag
val config: DistributionConfig = distConfigResult.getDistributionConfig
// The explanation of how to find the eTag is wrong in the docs: http://docs.aws.amazon.com/AmazonCloudFront/latest/APIReference/DeleteDistribution.html
// I think the correct explanation is that the eTag from the most recent GET or PUT operation is what is actually required
val eTag = if (distConfigResult.getDistributionConfig.getEnabled) {
Logger.debug(s"Disabling distribution of $domainName with id $$distributionId and ETag $distributionETag")
config.setEnabled(false)
Logger.debug(s"Distribution config after disabling=${ jsonPrettyPrint(config) }")
val updateRequest = new UpdateDistributionRequest(config, distSummary.getId, distributionETag)
val updateResult: UpdateDistributionResult = cfClient.updateDistribution(updateRequest)
val updateETag = updateResult.getETag
Logger.debug(s"Update result ETag = $updateETag; enabled=${ distSummary.enabled }; status=${ distSummary.status }")
var i = 1
while (distSummary.status == "InProgress") {
Thread.sleep(oneMinute) // TODO don't tie up a thread like this
Logger.debug(s" $i: Distribution enabled=${ distSummary.enabled }; status=InProgress")
i = i + 1
}
updateETag
} else {
Logger.debug(s"Distribution of $domainName with id ${ distSummary.getId } and ETag $distributionETag was already disabled.")
distributionETag
}
// Fails with: Distribution of scalacoursesdemo.s3.amazonaws.com with id E1ALVO6LY3X3XE and ETag E21ZQTZDDOETEA:
// The distribution you are trying to delete has not been disabled.
try {
Logger.debug(s"Deleting distribution of $domainName with id ${ distSummary.getId } and ETag $eTag.")
distSummary.delete(eTag)
cacheIsDirty.set(true)
Success(true)
} catch {
case nsde: NoSuchDistributionException =>
Failure(nsde.prefixMsg(s"Distribution of $domainName with id ${ distSummary.getId } and ETag $distributionETag does not exist"))
case e: Exception =>
Failure(e.prefixMsg(s"Distribution of $domainName with id ${ distSummary.getId } and ETag $distributionETag: ${e.getMessage}"))
}
}
def tryConfig(id: String): Try[DistributionConfig] = findDistributionById(id).map(_.getDistributionConfig)
def findDistributionById(id: String): Try[Distribution] = try {
Success(cfClient.getDistribution(new GetDistributionRequest().withId(id)).getDistribution)
} catch {
case nsde: NoSuchDistributionException =>
Failure(nsde.prefixMsg(s"Distribution with id $id does not exist"))
case e: Exception =>
Failure(e.prefixMsg(s"Distribution of with id $id: ${ e.getMessage }"))
}
}
trait CFImplicits {
import com.amazonaws.services.s3.model.Bucket
object RichDistribution {
var minimumCacheTime: Long = 60 * 60 // one hour
/** Create a new distribution for the given S3 bucket */
def apply(
bucket: Bucket,
priceClass: PriceClass=PriceClass.PriceClass_All,
minimumCacheTime: Long=minimumCacheTime
)(implicit
awsCredentials: AWSCredentials,
cfClient: AmazonCloudFront,
s3: S3
): Distribution = {
val (lcBucketName, bucketOriginId) = bucket.safeNames
val aliases = new Aliases().withQuantity(0)
val allowedMethods = new AllowedMethods().withItems(Method.GET, Method.HEAD).withQuantity(2)
val cacheBehaviors = new CacheBehaviors().withQuantity(0)
val cookiePreference = new CookiePreference().withForward(ItemSelection.All)
val loggingConfig = new LoggingConfig()
.withEnabled(false).withIncludeCookies(false)
.withPrefix("").withBucket("")
val s3OriginConfig = new S3OriginConfig().withOriginAccessIdentity("")
val trustedSigners = new TrustedSigners().withEnabled(false).withQuantity(0)
val forwardedValues = new ForwardedValues()
.withCookies(cookiePreference)
.withQueryString(false)
val defaultCacheBehavior = new DefaultCacheBehavior()
.withAllowedMethods(allowedMethods)
.withForwardedValues(forwardedValues)
.withMinTTL(minimumCacheTime)
.withTargetOriginId(bucketOriginId)
.withTrustedSigners(trustedSigners)
.withViewerProtocolPolicy(ViewerProtocolPolicy.AllowAll)
val origin = new Origin()
.withDomainName(s"$lcBucketName.s3.amazonaws.com")
.withS3OriginConfig(s3OriginConfig)
.withId(bucketOriginId)
val items = List(origin).asJava
val origins = new Origins().withItems(items).withQuantity(items.size)
val distributionConfig = new DistributionConfig()
.withAliases(aliases)
.withCacheBehaviors(cacheBehaviors)
.withCallerReference(System.nanoTime.toString)
.withComment("")
.withDefaultCacheBehavior(defaultCacheBehavior)
.withDefaultRootObject("index.html")
.withEnabled(true)
.withLogging(loggingConfig)
.withOrigins(origins)
.withPriceClass(priceClass)
val cdr = new CreateDistributionRequest().withDistributionConfig(distributionConfig)
Logger.debug(s"cdr=${jsonPrettyPrint(cdr)}")
try {
val result: CreateDistributionResult = cfClient.createDistribution(cdr)
result.getDistribution
} catch {
case nsoe: NoSuchOriginException =>
throw new Exception(s"Origin with domain name '${ origin.getDomainName }' and id '${ origin.getId }' does not exist", nsoe)
}
}
}
implicit class RichDistributionSummary(distributionSummary: DistributionSummary)
(implicit cfClient: AmazonCloudFront) {
def config: DistributionConfig = configResult.getDistributionConfig
def configResult: GetDistributionConfigResult = {
val getDistributionConfigRequest = new GetDistributionConfigRequest().withId(distributionSummary.getId)
cfClient.getDistributionConfig(getDistributionConfigRequest)
}
def enabled: Boolean = {
Logger.debug("Getting enabled from distributionSummary")
distributionSummary.getEnabled
}
def delete(eTag: String): Unit = {
Logger.debug(s"Deleting distribution with id ${ distributionSummary.getId }")
val deleteDistributionRequest = new DeleteDistributionRequest().withId(distributionSummary.getId).withIfMatch(eTag)
Logger.debug(s"deleteDistributionRequest=${ jsonPrettyPrint(deleteDistributionRequest) }")
cfClient.deleteDistribution(deleteDistributionRequest)
()
}
def eTag: String = configResult.getETag
def originItems: List[Origin] = config.getOrigins.getItems.asScala.toList
def removeOrigin(distConfigResult: GetDistributionConfigResult, origin: Origin)(implicit cf: CloudFront): Try[Boolean] =
cf.removeOrigin(distributionSummary, distConfigResult, origin)
def status: String = {
Logger.debug(s"Getting status from distributionSummary with id ${ distributionSummary.getId }")
distributionSummary.getStatus
}
}
}
| mslinn/awslib_scala | src/main/scala/com/micronautics/aws/CloudFront.scala | Scala | mit | 15,266 |
import wop.game.{Point, WopState}
import wop.game.WopState.{InProgress, Player}
import wop.game.ai.WopSolver
import scala.annotation.tailrec
object RunSolver extends App {
implicit case object Time extends WopState.TimeProvider {
def currentTime: Long = 0
}
@tailrec def loop(state: WopState, whoAI: Player): Option[WopState] = {
println(state)
state match {
case s: InProgress if s.player == whoAI =>
WopSolver.minMax(state) match {
case Some(p) => {
s(p) match {
case Right(newState) => loop(newState, whoAI)
case x => {
println("i dont know", x)
None
}
}
}
case x => {
println("i dont know", x)
None
}
}
case prevState: WopState.Turn =>
println("input local x,y")
val point: Point = (readInt(), readInt())
prevState(point) match {
case Right(newState) => loop(newState, whoAI)
case _ =>
println("wrong input")
loop(prevState, whoAI)
}
case prevState: WopState.Select =>
println("input global x,y")
val point: Point = (readInt(), readInt())
prevState(point) match {
case Right(newState) => loop(newState, whoAI)
case _ =>
println("wrong input")
loop(prevState, whoAI)
}
case _ => None
}
}
@tailrec def whoAI : Player = {
println("Which are AI X or O?")
readChar() match {
case 'X' => Player.P1
case 'O' => Player.P2
case _ => whoAI
}
}
loop(WopState.initial, whoAI)
}
| ognick/wizards-of-portal | core/src/main/scala/RunSolver.scala | Scala | gpl-3.0 | 1,688 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kudu.backup
import java.util.concurrent.TimeUnit
import org.apache.kudu.Type
import org.apache.kudu.client.AsyncKuduScanner.ReadMode
import org.apache.kudu.client._
import org.apache.kudu.spark.kudu.KuduContext
import org.apache.kudu.util.HybridTimeUtil
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.Row
import org.apache.spark.{Partition, SparkContext, TaskContext}
import org.apache.yetus.audience.{InterfaceAudience, InterfaceStability}
import scala.collection.JavaConverters._
@InterfaceAudience.Private
@InterfaceStability.Unstable
class KuduBackupRDD private[kudu](@transient val table: KuduTable,
@transient val options: KuduBackupOptions,
val kuduContext: KuduContext,
@transient val sc: SparkContext
) extends RDD[Row](sc, Nil) {
// TODO: Split large tablets into smaller scan tokens?
override protected def getPartitions: Array[Partition] = {
val client = kuduContext.syncClient
// Set a hybrid time for the scan to ensure application consistency.
val timestampMicros = TimeUnit.MILLISECONDS.toMicros(options.timestampMs)
val hybridTime = HybridTimeUtil.physicalAndLogicalToHTTimestamp(timestampMicros, 0)
// Create the scan tokens for each partition.
val tokens = client.newScanTokenBuilder(table)
.cacheBlocks(false)
// TODO: Use fault tolerant scans to get mostly.
// ordered results when KUDU-2466 is fixed.
// .setFaultTolerant(true)
.replicaSelection(ReplicaSelection.CLOSEST_REPLICA)
.readMode(ReadMode.READ_AT_SNAPSHOT)
.snapshotTimestampRaw(hybridTime)
.batchSizeBytes(options.scanBatchSize)
.scanRequestTimeout(options.scanRequestTimeout)
.prefetching(options.scanPrefetching)
.build()
tokens.asScala.zipWithIndex.map {
case (token, index) =>
// TODO: Support backups from any replica or followers only.
// Always run on the leader for data locality.
val leaderLocation = token.getTablet.getLeaderReplica.getRpcHost
KuduBackupPartition(index, token.serialize(), Array(leaderLocation))
}.toArray
}
// TODO: Do we need a custom spark partitioner for any guarantees?
// override val partitioner = None
override def compute(part: Partition, taskContext: TaskContext): Iterator[Row] = {
val client: KuduClient = kuduContext.syncClient
val partition: KuduBackupPartition = part.asInstanceOf[KuduBackupPartition]
// TODO: Get deletes and updates for incremental backups.
val scanner = KuduScanToken.deserializeIntoScanner(partition.scanToken, client)
new RowIterator(scanner)
}
override def getPreferredLocations(partition: Partition): Seq[String] = {
partition.asInstanceOf[KuduBackupPartition].locations
}
}
private case class KuduBackupPartition(index: Int,
scanToken: Array[Byte],
locations: Array[String]) extends Partition
/**
* This iterator wraps a KuduScanner, converts the returned RowResults into a
* Spark Row, and allows iterating over those scanned results.
*
* The Spark RDD abstraction has an abstract compute method, implemented in KuduBackupRDD,
* that takes the job partitions and task context and expects to return an Iterator[Row].
* This implementation facilitates that.
*/
private class RowIterator(private val scanner: KuduScanner) extends Iterator[Row] {
private var currentIterator: RowResultIterator = _
override def hasNext: Boolean = {
while ((currentIterator != null && !currentIterator.hasNext && scanner.hasMoreRows) ||
(scanner.hasMoreRows && currentIterator == null)) {
if (TaskContext.get().isInterrupted()) {
throw new RuntimeException("KuduBackup spark task interrupted")
}
currentIterator = scanner.nextRows()
}
currentIterator.hasNext
}
// TODO: Use a more "raw" encoding for efficiency?
private def get(rowResult: RowResult, i: Int): Any = {
if (rowResult.isNull(i)) null
else rowResult.getColumnType(i) match {
case Type.BOOL => rowResult.getBoolean(i)
case Type.INT8 => rowResult.getByte(i)
case Type.INT16 => rowResult.getShort(i)
case Type.INT32 => rowResult.getInt(i)
case Type.INT64 => rowResult.getLong(i)
case Type.UNIXTIME_MICROS => rowResult.getTimestamp(i)
case Type.FLOAT => rowResult.getFloat(i)
case Type.DOUBLE => rowResult.getDouble(i)
case Type.STRING => rowResult.getString(i)
case Type.BINARY => rowResult.getBinaryCopy(i)
case Type.DECIMAL => rowResult.getDecimal(i)
case _ => throw new RuntimeException(s"Unsupported column type: ${rowResult.getColumnType(i)}")
}
}
// TODO: There may be an old KuduRDD implementation where we did some
// sort of zero copy/object pool pattern for performance (we could use that here).
override def next(): Row = {
val rowResult = currentIterator.next()
val columnCount = rowResult.getColumnProjection.getColumnCount
val columns = Array.ofDim[Any](columnCount)
for (i <- 0 until columnCount) {
columns(i) = get(rowResult, i)
}
Row.fromSeq(columns)
}
}
| EvilMcJerkface/kudu | java/kudu-backup/src/main/scala/org/apache/kudu/backup/KuduBackupRDD.scala | Scala | apache-2.0 | 6,093 |
/*
* Copyright 2016 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v2
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtInteger}
case class B8(value: Int) extends CtBoxIdentifier(name = "Annuities, annual payments and discounts not arising from loan relationships and from which income tax has not been deducted") with CtInteger
| ahudspith-equalexperts/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600/v2/B8.scala | Scala | apache-2.0 | 894 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.expressions
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.table.planner.validate._
abstract class Ordering extends UnaryExpression {
override private[flink] def validateInput(): ValidationResult = {
if (!child.isInstanceOf[NamedExpression]) {
ValidationFailure(s"Sort should only based on field reference")
} else {
ValidationSuccess
}
}
}
case class Asc(child: PlannerExpression) extends Ordering {
override def toString: String = s"($child).asc"
override private[flink] def resultType: TypeInformation[_] = child.resultType
}
case class Desc(child: PlannerExpression) extends Ordering {
override def toString: String = s"($child).desc"
override private[flink] def resultType: TypeInformation[_] = child.resultType
}
| apache/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/expressions/ordering.scala | Scala | apache-2.0 | 1,643 |
package debop4s.core
import java.lang.{Double => JDouble, Float => JFloat, Integer => JInt, Long => JLong}
import debop4s.core.utils.Strings
/**
* CorePackageFunSuite
* @author sunghyouk.bae@gmail.com
*/
class CorePackageFunSuite extends AbstractCoreFunSuite {
test("int convert") {
"1".asInt shouldBe 1
1L.asInt shouldBe 1
1.0.asInt shouldBe 1
}
test("asChar") {
val one:java.lang.Character = 'A'
val empty:java.lang.Character = null
one.asChar shouldEqual 'A'
empty.asChar shouldEqual '\\u0000'
'A'.asInt.asChar shouldEqual 'A'
}
test("asByte") {
val one:java.lang.Byte = 0x01.toByte
val empty:java.lang.Byte = null
one.asByte shouldEqual 1
empty.asByte shouldEqual 0
12.asByte shouldEqual 12
}
test("asShort") {
val one:java.lang.Short = 1.toShort
val empty:java.lang.Short = null
one.asShort shouldEqual 1
empty.asShort shouldEqual 0
"123".asShort shouldEqual 123
}
test("asInt") {
val one:Integer = 1
val empty:Integer = null
one.asInt shouldEqual 1
empty.asInt shouldEqual 0
"123".asInt shouldEqual 123
}
test("asLong") {
val one:JLong = 1L
val empty:JLong = null
one.asLong shouldEqual 1L
empty.asLong shouldEqual 0L
"123".asLong shouldEqual 123L
}
test("asFloat") {
val one:JFloat = 1.23f
val empty:JFloat = null
one.asFloat shouldEqual 1.23f
empty.asFloat shouldEqual 0.0f
"123.04".asFloat shouldEqual 123.04f
}
test("asDouble") {
val one:JDouble = 1.23d
val empty:JDouble = null
one.asDouble shouldEqual 1.23d
empty.asDouble shouldEqual 0.0d
"123.04".asDouble shouldEqual 123.04d
}
test("asString") {
val one:Integer = 1
val empty:Integer = null
one.asString shouldEqual "1"
empty.asString shouldEqual Strings.NULL_STR
123.asString shouldEqual "123"
}
// TODO: asDateTime, asDate 에 대해 kesti 의 소스로 update 해야 함.
// test("asDateTime") {
// 0.asDateTime() shouldEqual Some(new DateTime(0))
// val now = DateTime.now
// now.getMillis.asDateTime() shouldEqual Some(now)
//
// val a = null:DateTime
// a.asDateTime() shouldEqual None
// None.asDateTime() shouldEqual None
//
// new Date(now.getMillis).asDateTime() shouldEqual Some(now)
//
// "2015-10-14".asDateTime() shouldEqual Some(new DateTime(2015, 10, 14, 0, 0))
// }
//
// test("asDate") {
// 0.asDate() shouldEqual Some(new Date(0))
// val now = new Date()
// now.getTime().asDate() shouldEqual Some(now)
//
// val a = null:Date
// a.asDate() shouldEqual None
// None.asDate() shouldEqual None
//
// new DateTime(now.getTime).asDate() shouldEqual Some(now)
// }
}
| debop/debop4s | debop4s-core/src/test/scala/debop4s/core/CorePackageFunSuite.scala | Scala | apache-2.0 | 2,721 |
package com.jancy.mateusz.sug.free
import com.jancy.mateusz.sug.free.impl.custom.{Registration, Runner}
object Main extends App {
val result = Runner.run(Registration.createAccount("mateusz", "jancy"))
println(result)
}
| mateuszjancy/scalaz-free-in-example | src/main/scala/com/jancy/mateusz/sug/free/Main.scala | Scala | apache-2.0 | 226 |
package com.cloudray.scalapress.plugin.ecommerce.controller.admin
import org.springframework.stereotype.Controller
import org.springframework.web.bind.annotation.{ModelAttribute, RequestMethod, RequestMapping}
import org.springframework.beans.factory.annotation.Autowired
import scala.Array
import javax.servlet.http.HttpServletRequest
import com.cloudray.scalapress.plugin.ecommerce.{ShoppingPlugin, ShoppingPluginDao}
import com.cloudray.scalapress.theme.MarkupDao
import com.cloudray.scalapress.item.controller.admin.MarkupPopulator
import com.cloudray.scalapress.util.EnumPopulator
import com.cloudray.scalapress.item.StockMethod
import com.cloudray.scalapress.framework.ScalapressContext
/** @author Stephen Samuel */
@Controller
@RequestMapping(Array("backoffice/plugin/shopping"))
@Autowired
class ShoppingPluginController(val context: ScalapressContext,
val markupDao: MarkupDao,
val shoppingPluginDao: ShoppingPluginDao) extends MarkupPopulator with EnumPopulator {
@RequestMapping(produces = Array("text/html"), method = Array(RequestMethod.GET))
def edit(req: HttpServletRequest,
@ModelAttribute("plugin") plugin: ShoppingPlugin) = "admin/plugin/shopping/plugin.vm"
@RequestMapping(produces = Array("text/html"), method = Array(RequestMethod.POST))
def save(req: HttpServletRequest, @ModelAttribute("plugin") plugin: ShoppingPlugin) = {
shoppingPluginDao.save(plugin)
edit(req, plugin)
}
@ModelAttribute("plugin") def plugin = shoppingPluginDao.get
@ModelAttribute("stockMethods") def stockMethods = populate(StockMethod.values)
}
| vidyacraghav/scalapress | src/main/scala/com/cloudray/scalapress/plugin/ecommerce/controller/admin/ShoppingPluginController.scala | Scala | apache-2.0 | 1,642 |
/*
* Copyright 2015 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs
import akka.actor.{ActorContext, ActorSystem, ActorRefFactory}
import akka.util.Timeout
import scala.concurrent.duration._
import scala.language.implicitConversions
import scala.language.postfixOps
package object httpclient {
implicit def refFactoryToSystem(refFactory: ActorRefFactory): ActorSystem = refFactory match {
case sys: ActorSystem => sys
case ctx: ActorContext => ctx.system
case other =>
throw new IllegalArgumentException(s"Cannot create HttpClient with ActorRefFactory Impl ${other.getClass}")
}
implicit class RequestToAskTimeout(val requestTimeout: Timeout) extends AnyVal {
def askTimeout: Timeout = {
val Timeout(d) = requestTimeout
if (d < 1.second) d + 100.millis
else if (d > 10.second) d + 1.second
else {
(d.toMicros * 1.1).toLong micros
}
}
}
}
| tutufool/squbs | squbs-httpclient/src/main/scala/org/squbs/httpclient/package.scala | Scala | apache-2.0 | 1,462 |
package de.fuberlin.wiwiss.silk.execution
import de.fuberlin.wiwiss.silk.entity.{Path, Link, Index, Entity}
import de.fuberlin.wiwiss.silk.linkagerule.LinkageRule
import de.fuberlin.wiwiss.silk.cache.Partition
import de.fuberlin.wiwiss.silk.util.DPair
import methods.MultiBlock
import scala.math.{min, max, abs}
/**
* The execution method determines how a linkage rule is executed.
*/
trait ExecutionMethod {
/**
* Generates an index for a single entity.
*/
def indexEntity(entity: Entity, rule: LinkageRule): Index = Index.default
/**
* Generates comparison pairs from two partitions.
*/
def comparisonPairs(sourcePartition: Partition, targetPartition: Partition, full: Boolean) = new Traversable[DPair[Entity]] {
/**
* Iterates through all comparison pairs
*/
def foreach[U](f: DPair[Entity] => U) {
//Iterate over all entities in the source partition
var s = 0
while(s < sourcePartition.size) {
//Iterate over all entities in the target partition
var t = if (full) 0 else s + 1
while(t < targetPartition.size) {
//Check if the indices match
if((sourcePartition.indices(s) matches targetPartition.indices(t))) {
//Yield entity pair
f(DPair(sourcePartition.entities(s), targetPartition.entities(t)))
}
t += 1
}
s += 1
}
}
}
}
object ExecutionMethod {
/** Returns the default execution method. */
def apply(): ExecutionMethod = new methods.MultiBlock()
} | fusepoolP3/p3-silk | silk-core/src/main/scala/de/fuberlin/wiwiss/silk/execution/ExecutionMethod.scala | Scala | apache-2.0 | 1,537 |
package com.webtrends.harness.http
import java.net.{HttpURLConnection, URL}
import akka.actor.{Props, ActorSystem}
import akka.testkit.TestKit
import com.webtrends.harness.TestKitSpecificationWithJUnit
class InternalHttpSpec extends TestKitSpecificationWithJUnit(ActorSystem("test")) with InternalHttpClient {
val port = 8123
val path = "http://127.0.0.1:" + port + "/"
val httpActor = system.actorOf(Props(classOf[SimpleHttpServer], port))
"Test handlers" should {
"handle the get path /ping" in {
val url = new URL(path + "ping")
val conn = url.openConnection().asInstanceOf[HttpURLConnection]
val resp = getResponseContent(conn)
resp.status mustEqual "200"
resp.content.length must be > 0
resp.content.substring(0, 5) mustEqual "pong:"
}
}
step {
TestKit.shutdownActorSystem(system)
}
}
| davis20/wookiee | wookiee-core/src/test/scala/com/webtrends/harness/http/InternalHttpSpec.scala | Scala | apache-2.0 | 859 |
import org.scalatest.{Matchers, FunSuite}
/** @version 1.0.0 */
class EtlTest extends FunSuite with Matchers {
test("a single letter") {
Etl.transform(Map(1 -> Seq("A"))) should be(Map("a" -> 1))
}
test("single score with multiple letters") {
pending
Etl.transform(Map(1 -> Seq("A", "E", "I", "O", "U"))) should be(Map("a" -> 1,
"e" -> 1, "i" -> 1, "o" -> 1, "u" -> 1))
}
test("multiple scores with multiple letters") {
pending
Etl.transform(Map(1 -> Seq("A", "E"), 2 -> Seq("D", "G"))) should be(Map("a" -> 1,
"d" -> 2, "e" -> 1, "g" -> 2))
}
test("multiple scores with differing numbers of letters") {
pending
Etl.transform(Map(1 -> Seq("A", "E", "I", "O", "U", "L", "N", "R", "S", "T"),
2 -> Seq("D", "G"), 3 -> Seq("B", "C", "M", "P"), 4 -> Seq("F", "H", "V", "W", "Y"),
5 -> Seq("K"), 8 -> Seq("J", "X"), 10 -> Seq("Q", "Z"))) should be(Map("a" -> 1,
"b" -> 3, "c" -> 3, "d" -> 2, "e" -> 1, "f" -> 4, "g" -> 2, "h" -> 4,
"i" -> 1, "j" -> 8, "k" -> 5, "l" -> 1, "m" -> 3, "n" -> 1, "o" -> 1,
"p" -> 3, "q" -> 10, "r" -> 1, "s" -> 1, "t" -> 1, "u" -> 1, "v" -> 4,
"w" -> 4, "x" -> 8, "y" -> 4, "z" -> 10))
}
} | exercism/xscala | exercises/practice/etl/src/test/scala/EtlTest.scala | Scala | mit | 1,218 |
/*
* konpare
* Copyright (C) 2015 Alexander Fefelov <https://github.com/alexanderfefelov>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.github.alexanderfefelov.konpare.syntax.subject.config
import com.github.alexanderfefelov.konpare.syntax.{Syntax, Subject}
object JumboFrame extends Subject {
override def process2(data: List[String], model: collection.mutable.Map[String, String]) = {
data.head match {
case Syntax.PARAMETER_PORTS =>
data(3) match {
case Syntax.VALUE_ENABLE | Syntax.VALUE_DISABLE =>
// config jumbo_frame ports 1-28 state enable
Syntax.expandRange(data(1)).foreach( i =>
model += s"${Syntax.SUBJECT_JUMBO_FRAME}=$i=${data(2)}" -> data(3)
)
case _ =>
}
}
}
} | alexanderfefelov/konpare | src/main/scala/com/github/alexanderfefelov/konpare/syntax/subject/config/JumboFrame.scala | Scala | gpl-3.0 | 1,410 |
package ecommerce.sales.app
import akka.actor._
import akka.japi.Util.immutableSeq
import scala.collection.immutable.Seq
import scala.concurrent.duration.{FiniteDuration, MILLISECONDS}
trait SalesFrontConfiguration {
this: Actor =>
object httpService {
val interface = appConfig.getString("http-service.interface")
val port = appConfig.getInt("http-service.port")
val askTimeout = FiniteDuration(appConfig.getDuration("http-service.ask-timeout", MILLISECONDS), MILLISECONDS)
}
lazy val contactPoints: Seq[String] = immutableSeq(appConfig.getStringList("backend-contact-points"))
private val appConfig = config.getConfig("app")
def config = context.system.settings.config
} | odd/ddd-leaven-akka-v2 | sales/write-front/src/main/scala/ecommerce/sales/app/SalesFrontConfiguration.scala | Scala | mit | 714 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.