answer
stringlengths 17
10.2M
|
|---|
package algorithms.warmup;
import java.util.Scanner;
/*
* Problem Statement:
* You're given an array containing integer values. You need to print the
* fraction of count of positive numbers, negative numbers and zeroes to the
* total numbers. Print the value of the fractions correct to 3 decimal places.
*
* Input Format:
* First line contains N, which is the size of the array.
* Next line contains N integers A1,A2,A3,...,AN, separated by space.
*
* Constraints:
* 1 <= N <= 100
* -100 <= Ai <= 100
*
* Output Format:
* Output three values on different lines equal to the fraction of count of
* positive numbers, negative numbers and zeroes to the total numbers
* respectively correct to 3 decimal places.
*
* Sample Input:
* 6
* -4 3 -9 0 4 1
*
* Sample Output:
* 0.500
* 0.333
* 0.167
*/
public class PlusMinus {
public static void main(String[] args) {
Scanner sc = new Scanner(System.in);
int numElements = sc.nextInt();
int numPos = 0;
int numNeg = 0;
int numZero = 0;
//int num;
for (int i = 0; i < numElements; i++) {
int num = sc.nextInt();
if (num > 0) {
numPos++;
} else if (num < 0) {
numNeg++;
} else {
numZero++;
}
}
sc.close();
double percentPos = (double) numPos / numElements;
double percentNeg = (double) numNeg / numElements;
double percentZero = (double) numZero / numElements;
System.out.printf("%.3f %n", percentPos);
System.out.printf("%.3f %n", percentNeg);
System.out.printf("%.3f", percentZero);
}
}
|
package ibis.smartsockets.virtual.modules.direct;
import ibis.smartsockets.SmartSocketsProperties;
import ibis.smartsockets.direct.DirectServerSocket;
import ibis.smartsockets.direct.DirectSocket;
import ibis.smartsockets.direct.DirectSocketAddress;
import ibis.smartsockets.direct.DirectSocketFactory;
import ibis.smartsockets.util.TypedProperties;
import ibis.smartsockets.virtual.NonFatalIOException;
import ibis.smartsockets.virtual.VirtualSocket;
import ibis.smartsockets.virtual.VirtualSocketAddress;
import ibis.smartsockets.virtual.modules.AbstractDirectModule;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.HashMap;
import java.util.Map;
public class Direct extends AbstractDirectModule {
private static int DEFAULT_CONNECT_TIMEOUT = 3000;
private final DirectSocketFactory direct;
private AcceptThread acceptThread;
private DirectServerSocket server;
private int defaultSendBuffer;
private int defaultReceiveBuffer;
private boolean count = false;
private class AcceptThread extends Thread {
AcceptThread() {
super("DirectModule AcceptThread");
setDaemon(true);
}
public void run() {
while (true) {
handleAccept();
}
}
}
public Direct(DirectSocketFactory direct) {
super("ConnectModule(Direct)", false);
// Store the direct socket factory for later use
this.direct = direct;
}
public void initModule(TypedProperties properties) throws Exception {
// Retrieve the value of the port property (if set). Default value
// is '0' (any available port).
int port = 0;
// if (properties != null) {
// port = properties.getIntProperty(
// SmartSocketsProperties.PORT, 0);
// TODO: why the default ??
TypedProperties p = SmartSocketsProperties.getDefaultProperties();
count = p.booleanProperty(SmartSocketsProperties.DIRECT_COUNT, false);
int backlog =
p.getIntProperty(SmartSocketsProperties.DIRECT_BACKLOG, 100);
defaultReceiveBuffer = p.getIntProperty(
SmartSocketsProperties.DIRECT_RECEIVE_BUFFER, -1);
defaultSendBuffer = p.getIntProperty(
SmartSocketsProperties.DIRECT_SEND_BUFFER, -1);
// Create a server socket to accept incoming connections.
HashMap <String, String> prop = new HashMap<String, String>(3);
prop.put("PortForwarding", "yes");
prop.put("ForwardingMayFail", "yes");
prop.put("SameExternalPort", "no");
try {
if (logger.isDebugEnabled()) {
logger.debug(module + ": Creating ServerSocket on port " + port);
}
server = direct.createServerSocket(port, backlog,
defaultReceiveBuffer, prop);
if (logger.isInfoEnabled()) {
logger.info(module + ": ServerSocket created: "
+ server.getAddressSet());
}
} catch (IOException e) {
logger.info(module + ": Failed to initialize direct module "
+ port, e);
throw e;
//throw new Exception("Failed to initialize direct module: "
// + e.getMessage() + ")", e);
}
if (logger.isInfoEnabled()) {
logger.info(module + ": Starting AcceptThread");
}
// Finally start a thread to handle the incoming connections.
acceptThread = new AcceptThread();
acceptThread.start();
}
public void startModule() throws Exception {
// nothing to do here...
}
public DirectSocketAddress getAddresses() {
return server.getAddressSet();
}
/*
private boolean checkTarget(SocketAddressSet target) {
// TODO: implement
return true;
}
private void handleSocket(DirectSocket s) {
DataInputStream in = null;
DataOutputStream out = null;
if (logger.isDebugEnabled()) {
logger.debug(name + ": Got incoming connection on " + s);
}
try {
in = new DataInputStream(s.getInputStream());
out = new DataOutputStream(s.getOutputStream());
SocketAddressSet target = new SocketAddressSet(in.readUTF());
int targetPort = in.readInt();
if (logger.isDebugEnabled()) {
logger.debug(name + ": Target port " + targetPort);
}
// First check if we are the desired target machine...
if (!checkTarget(target)) {
out.write(WRONG_MACHINE);
out.flush();
DirectSocketFactory.close(s, out, in);
if (logger.isDebugEnabled()) {
logger.debug(name + ": Connection failed, WRONG machine!");
}
return;
}
// Next check if the port exists locally
VirtualServerSocket vss = parent.getServerSocket(targetPort);
if (vss == null) {
out.write(PORT_NOT_FOUND);
out.flush();
DirectSocketFactory.close(s, out, in);
if (logger.isDebugEnabled()) {
logger.debug(name + ": Connection failed, PORT not found!");
}
return;
}
if (logger.isDebugEnabled()) {
logger.debug(name + ": Connection seems OK, checking is " +
"server is willing to accept");
}
// Next check if the serverSocket is willing to accept
DirectVirtualSocket dvs = new DirectVirtualSocket(
new VirtualSocketAddress(target, targetPort), s, out, in, null);
boolean accept = vss.incomingConnection(dvs);
if (!accept) {
out.write(CONNECTION_REJECTED);
out.flush();
DirectSocketFactory.close(s, out, in);
if (logger.isDebugEnabled()) {
logger.debug(name + ": Connection failed, REJECTED!");
}
return;
}
} catch (Exception e) {
logger.warn(name + ": Got exception during connection setup!", e);
DirectSocketFactory.close(s, out, in);
}
}
*/
void handleAccept() {
try {
handleAccept(server.accept());
} catch (IOException e) {
logger.warn(module + ": Got exception while waiting " +
"for connection!, waiting one second, then retrying", e);
try {
Thread.sleep(1000);
} catch (Exception e2) {
//IGNORE
}
}
}
public VirtualSocket connect(VirtualSocketAddress target, int timeout,
Map<String, Object> properties) throws NonFatalIOException {
// outgoingConnectionAttempts++;
int sendBuffer = defaultSendBuffer;
int receiveBuffer = defaultReceiveBuffer;
if (properties != null) {
Integer tmp = (Integer) properties.get("sendbuffer");
if (tmp != null) {
sendBuffer = tmp;
}
tmp = (Integer) properties.get("receivebuffer");
if (tmp != null) {
receiveBuffer = tmp;
}
}
try {
DirectSocket s = direct.createSocket(target.machine(), timeout, 0,
sendBuffer, receiveBuffer, properties, false,
target.port());
// Next, we wrap the direct socket in a virtual socket and return it.
// Any exceptions thrown here are forwarded to the user. Note that
// the connection setup is not complete yet, but the rest of it is
// in generic code.
return createVirtualSocket(target, s);
} catch (IOException e) {
// Failed to create the connection, but other modules may be more
// succesful.
// failedOutgoingConnections++;
throw new NonFatalIOException(e);
}
}
public boolean matchAdditionalRuntimeRequirements(Map<String, ?> requirements) {
// No additional properties, so always matches requirements.
return true;
}
// Called when incoming connections are accepted
protected VirtualSocket createVirtualSocket(VirtualSocketAddress a,
DirectSocket s, OutputStream out, InputStream in) {
return new DirectVirtualSocket(a, s, out, in, count, null);
}
private VirtualSocket createVirtualSocket(VirtualSocketAddress a,
DirectSocket s) throws IOException {
InputStream in = null;
OutputStream out = null;
try {
if (s != null) {
in = s.getInputStream();
out = s.getOutputStream();
}
return new DirectVirtualSocket(a, s, out, in, count, null);
} catch (IOException e) {
// This module worked fine, but we got a 'normal' exception while
// connecting (i.e., because the other side refused to connection).
// There is no use trying other modules.
DirectSocketFactory.close(s, out, in);
throw e;
}
}
@Override
public int getDefaultTimeout() {
return DEFAULT_CONNECT_TIMEOUT;
}
}
|
package interdroid.vdb.persistence.impl;
import interdroid.vdb.persistence.api.DirtyCheckoutException;
import interdroid.vdb.persistence.api.MergeInProgressException;
import interdroid.vdb.persistence.api.MergeInfo;
import interdroid.vdb.persistence.api.VdbCheckout;
import interdroid.vdb.persistence.api.VdbInitializer;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.nio.CharBuffer;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.eclipse.jgit.api.AddCommand;
import org.eclipse.jgit.api.CommitCommand;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.errors.ConcurrentRefUpdateException;
import org.eclipse.jgit.api.errors.JGitInternalException;
import org.eclipse.jgit.api.errors.NoFilepatternException;
import org.eclipse.jgit.api.errors.NoHeadException;
import org.eclipse.jgit.api.errors.NoMessageException;
import org.eclipse.jgit.api.errors.WrongRepositoryStateException;
import org.eclipse.jgit.dircache.DirCache;
import org.eclipse.jgit.dircache.DirCacheBuilder;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.CommitBuilder;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.RefUpdate;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import android.database.sqlite.SQLiteDatabase;
@SuppressWarnings("deprecation")
public class VdbCheckoutImpl implements VdbCheckout {
private static final Logger logger = LoggerFactory.getLogger(VdbCheckoutImpl.class);
private static final String SCHEMA_FILE = "schema";
private final VdbRepositoryImpl parentRepo_;
private final Repository gitRepo_;
private final String checkoutName_;
private final File checkoutDir_;
private MergeInfo mergeInfo_;
private SQLiteDatabase db_;
private boolean wasDeleted_;
/**
* We protect access to the sqlite database by using this lock.
* The read/write lock DOES NOT correspond to reading or writing
* the database.
*
* Instead - the read lock is used for accessing the database both
* for ro or rw modes, while the write lock is used for exclusively
* locking the checkout directory for commits.
*/
private final ReentrantReadWriteLock accessLock_ = new ReentrantReadWriteLock();
private static final String BRANCH_REF_PREFIX = Constants.R_HEADS;
private static final String SQLITEDB = "sqlite.db";
private static final String MERGEINFO = "MERGE_INFO";
public VdbCheckoutImpl(VdbRepositoryImpl parentRepo, String checkoutName)
{
parentRepo_ = parentRepo;
checkoutName_ = checkoutName;
checkoutDir_ = new File(parentRepo.getRepositoryDir(), checkoutName);
gitRepo_ = parentRepo.getGitRepository(checkoutName);
if (!checkoutDir_.isDirectory()) { // assume it's already checked out
throw new RuntimeException("Not checked out yet.");
}
loadMergeInfo();
}
@Override
public synchronized void commit(String authorName, String authorEmail, String msg)
throws IOException, MergeInProgressException
{
checkDeletedState();
if (logger.isDebugEnabled())
logger.debug("commit on " + checkoutName_);
if (mergeInfo_ != null && !mergeInfo_.resolved_) {
throw new MergeInProgressException();
}
try {
if (!accessLock_.writeLock().tryLock(5, TimeUnit.SECONDS)) {
throw new RuntimeException("Timeout waiting for the locked database for commit.");
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
try {
commitImpl(authorName, authorEmail, msg);
} finally {
accessLock_.writeLock().unlock();
}
}
private synchronized void commitImpl(String authorName, String authorEmail, String msg)
throws IOException, MergeInProgressException
{
if (mergeInfo_ != null && !mergeInfo_.resolved_) {
throw new MergeInProgressException();
}
Git git = new Git(gitRepo_);
CommitCommand commit = git.commit();
AddCommand add = git.add();
add.addFilepattern(SQLITEDB);
add.addFilepattern(SCHEMA_FILE);
try {
add.call();
} catch (NoFilepatternException e) {
throw new IOException();
}
PersonIdent author = new PersonIdent(authorName, authorEmail);
commit.setAuthor(author);
commit.setCommitter(author);
RevCommit revision;
try {
revision = commit.call();
} catch (Exception e) {
throw new IOException();
}
if (mergeInfo_ != null) {
// successfully committed the merge, get back to normal mode
mergeInfo_ = null;
saveMergeInfo();
detachMergeDatabases();
}
if (logger.isDebugEnabled())
logger.debug("Succesfully committed revision "
+ revision.getName().toString() + " on branch "
+ checkoutName_);
}
public static VdbCheckoutImpl createMaster(VdbRepositoryImpl parentRepo,
VdbInitializer initializer)
throws IOException
{
VdbCheckoutImpl branch = null;
if (logger.isDebugEnabled())
logger.debug("Creating master for: " + parentRepo.getName());
File masterDir = new File(parentRepo.getRepositoryDir(), Constants.MASTER);
if (!masterDir.mkdirs()) {
throw new IOException("Unable to create directory: " + masterDir.getCanonicalPath());
}
if (initializer != null) {
SQLiteDatabase db = SQLiteDatabase.openOrCreateDatabase(new File(masterDir, SQLITEDB), null);
db.setVersion(1);
initializer.onCreate(db);
File schema = new File(masterDir, SCHEMA_FILE);
if (!schema.createNewFile()) {
throw new RuntimeException("Unable to create schema file");
}
FileOutputStream fos = new FileOutputStream(schema);
fos.write(initializer.getSchema().getBytes("utf8"));
fos.close();
branch = new VdbCheckoutImpl(parentRepo, Constants.MASTER);
branch.setDb(db);
try {
branch.commit("Versioning Daemon", "vd@localhost", "Initial schema-only version.");
} catch (MergeInProgressException e) {
// should never happen because we're surely not in merge mode
throw new RuntimeException(e);
}
}
return branch;
}
private synchronized void setDb(SQLiteDatabase db) {
db_ = db;
}
private synchronized void openDatabase()
{
if (db_ == null) {
db_ = SQLiteDatabase.openDatabase(new File(checkoutDir_, SQLITEDB).getAbsolutePath(),
null /* cursor factory */, SQLiteDatabase.OPEN_READWRITE);
try {
attachMergeDatabases();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
private synchronized void detachMergeDatabases()
{
db_.execSQL("DETACH DATABASE base");
db_.execSQL("DETACH DATABASE ours");
db_.execSQL("DETACH DATABASE theirs");
}
private synchronized void attachMergeDatabases() throws IOException
{
openDatabase();
MergeInfo mergeInfo = getMergeInfo();
if (mergeInfo != null) {
// mergeInfo.baseCommit = mergeInfo.ourCommit = mergeInfo.theirCommit
// = gitRepo_.resolve(BRANCH_REF_PREFIX + checkoutName_).getName();
File baseCheckout = parentRepo_.checkoutCommit(mergeInfo.baseCommit_);
File oursCheckout = parentRepo_.checkoutCommit(mergeInfo.ourCommit_);
File theirsCheckout = parentRepo_.checkoutCommit(mergeInfo.theirCommit_);
db_.execSQL("ATTACH DATABASE '" + new File(baseCheckout, SQLITEDB).getAbsolutePath()
+ "' AS base");
db_.execSQL("ATTACH DATABASE '" + new File(oursCheckout, SQLITEDB).getAbsolutePath()
+ "' AS ours");
db_.execSQL("ATTACH DATABASE '" + new File(theirsCheckout, SQLITEDB).getAbsolutePath()
+ "' AS theirs");
}
}
private synchronized SQLiteDatabase getDatabase() {
openDatabase();
try {
if (accessLock_.readLock().tryLock(5, TimeUnit.SECONDS)) {
return db_;
}
} catch (InterruptedException e) {
logger.warn("Ignoring interupted exception: ", e);
}
throw new RuntimeException("Timeout waiting for the locked database.");
}
@Override
public synchronized SQLiteDatabase getReadOnlyDatabase() throws IOException {
checkDeletedState();
return getDatabase();
}
@Override
public synchronized SQLiteDatabase getReadWriteDatabase() throws IOException {
checkDeletedState();
return getDatabase();
}
@Override
public synchronized void releaseDatabase() {
checkDeletedState();
accessLock_.readLock().unlock();
}
private synchronized void loadMergeInfo()
{
File infoFile = new File(checkoutDir_, MERGEINFO);
try {
FileInputStream fis = new FileInputStream(infoFile);
ObjectInputStream ois = new ObjectInputStream(fis);
mergeInfo_ = (MergeInfo) ois.readObject();
ois.close();
} catch (FileNotFoundException e) {
mergeInfo_ = null;
} catch (IOException e) {
throw new RuntimeException("Error while reading MergeInformation from "
+ infoFile, e);
} catch (ClassNotFoundException e) {
throw new RuntimeException("Error while reading MergeInformation from "
+ infoFile, e);
}
}
private synchronized void saveMergeInfo()
{
File infoFile = new File(checkoutDir_, MERGEINFO);
if (mergeInfo_ == null) {
if(!infoFile.delete()) {
logger.warn("Error deleting: {}", infoFile);
}
} else {
try {
FileOutputStream fos = new FileOutputStream(infoFile);
ObjectOutputStream oos = new ObjectOutputStream(fos);
oos.writeObject(mergeInfo_);
oos.close();
} catch (FileNotFoundException e) {
throw new RuntimeException("Could not open " + infoFile.getAbsolutePath()
+ " for writing");
} catch (IOException e) {
throw new RuntimeException("Error while reading MergeInformation from "
+ infoFile, e);
}
}
}
@Override
public synchronized MergeInfo getMergeInfo()
{
checkDeletedState();
return (mergeInfo_ != null) ? mergeInfo_.clone() : null;
}
@Override
public synchronized void doneMerge() throws IllegalStateException
{
checkDeletedState();
if (mergeInfo_ == null) {
throw new IllegalStateException("Branch was not in merge mode.");
}
mergeInfo_.resolved_ = true;
saveMergeInfo();
}
@Override
public synchronized void revert() throws IOException
{
checkDeletedState();
try {
Runtime.getRuntime().exec(new String[] {"rm", "-r",
checkoutDir_.getAbsolutePath()}).waitFor();
} catch (InterruptedException e) {
throw new RuntimeException("Interrupt not allowed.");
}
parentRepo_.checkoutBranch(checkoutName_);
mergeInfo_ = null;
}
@Override
public synchronized void startMerge(String theirSha1)
throws MergeInProgressException, DirtyCheckoutException, IOException
{
checkDeletedState();
if (mergeInfo_ != null) {
throw new MergeInProgressException();
}
// TODO(emilian): throw DirtyCheckoutException
MergeInfo info = new MergeInfo();
try {
AnyObjectId theirCommit = gitRepo_.resolve(theirSha1);
AnyObjectId ourCommit = gitRepo_.getRef(BRANCH_REF_PREFIX + checkoutName_).getObjectId();
info.theirCommit_ = theirCommit.getName();
info.ourCommit_ = ourCommit.getName();
info.baseCommit_ =
parentRepo_.getMergeBase(theirCommit, ourCommit).getId().getName();
info.resolved_ = false;
} catch (IOException e) {
throw new RuntimeException(e);
}
// Only now save the merge state to the member variable to prevent invalid merge
// state in case part of the above operations fail.
mergeInfo_ = info;
saveMergeInfo();
attachMergeDatabases();
}
private void checkDeletedState()
{
if (wasDeleted_) {
throw new IllegalStateException("This checkout was deleted.");
}
}
private void doDelete(File path) throws IOException
{
if (path.isDirectory()) {
for (File child : path.listFiles()) {
doDelete(child);
}
}
if (!path.delete()) {
throw new IOException("Could not delete " + path);
}
}
@Override
public void delete()
{
checkDeletedState();
if (logger.isDebugEnabled())
logger.debug("delete called for " + checkoutName_);
try {
if (!accessLock_.writeLock().tryLock(5, TimeUnit.SECONDS)) {
throw new RuntimeException("Timeout waiting for exclusive lock on database.");
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
try {
wasDeleted_ = true;
doDelete(checkoutDir_);
} catch (IOException e) {
throw new RuntimeException("Could not delete checkout.", e);
} finally {
accessLock_.writeLock().unlock();
parentRepo_.releaseCheckout(checkoutName_);
}
}
@Override
public String getSchema() throws IOException {
File schema = new File(checkoutDir_, SCHEMA_FILE);
if (!schema.canRead()) {
throw new RuntimeException("Unable to read schema file");
}
BufferedReader reader = new BufferedReader(new FileReader(schema));
CharBuffer target = null;
try {
target = CharBuffer.allocate((int) schema.length());
reader.read(target);
} finally {
reader.close();
}
return target == null ? "" : target.toString();
}
}
|
package org.apache.lucene.analysis;
import java.io.IOException;
/**
* Normalizes token text to lower case.
*
* @version $Id$
*/
public final class LowerCaseFilter extends TokenFilter {
public LowerCaseFilter(TokenStream in) {
super(in);
}
public final Token next() throws IOException {
Token t = input.next();
if (t == null)
return null;
t.termText = t.termText.toLowerCase();
return t;
}
}
|
package me.teaisaweso.client.graphmanagers;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import me.teaisaweso.client.EdgeDrawable;
import me.teaisaweso.client.VertexDrawable;
import me.teaisaweso.shared.Edge;
import me.teaisaweso.shared.Graph;
import me.teaisaweso.shared.Vertex;
import me.teaisaweso.shared.VertexDirection;
public class GraphManager2dImpl implements GraphManager2d {
private List<Vertex> mVertices = new ArrayList<Vertex>();
private List<Edge> mEdges = new ArrayList<Edge>();
private Map<Vertex, VertexDrawable> mVertexRenderMap = new HashMap<Vertex, VertexDrawable>();
protected GraphManager2dImpl() {
}
@Override
public Graph getUnderlyingGraph() {
// TODO Auto-generated method stub
return null;
}
@Override
public void addVertex(Vertex v, int xPosition, int yPosition, int size) {
mVertices.add(v);
// left and top are x and y - size/2
int halfSize = size / 2;
int left = xPosition - halfSize;
int top = yPosition - halfSize;
mVertexRenderMap.put(v, new VertexDrawable(left, top, size, size, v.getLabel()));
}
@Override
public void removeVertex(Vertex v) {
mVertices.remove(v);
mVertexRenderMap.remove(v);
}
@Override
public void moveVertexTo(Vertex v, int xPosition, int yPosition) {
VertexDrawable vd = mVertexRenderMap.get(v);
int halfWidth = vd.getWidth() / 2;
int halfHeight = vd.getHeight() / 2;
int left = xPosition - halfWidth;
int top = yPosition - halfHeight;
vd.updateBoundingRectangle(left, top, vd.getWidth(), vd.getHeight());
}
@Override
public void scaleVertex(Vertex v, int newSize) {
VertexDrawable vd = mVertexRenderMap.get(v);
int newLeft = vd.getLeft() - newSize / 2;
int newTop = vd.getTop() - newSize / 2;
int newWidth = newSize;
int newHeight = newSize;
vd.updateBoundingRectangle(newLeft, newTop, newWidth, newHeight);
}
@Override
public void addEdge(Vertex v1, Vertex v2, VertexDirection dir) {
mEdges.add(new Edge(v1, v2, dir));
}
@Override
public Collection<VertexDrawable> getVertexDrawables() {
return mVertexRenderMap.values();
}
@Override
public Collection<EdgeDrawable> getEdgeDrawables() {
List<EdgeDrawable> result = new ArrayList<EdgeDrawable>(mEdges.size());
for (Edge e : mEdges) {
VertexDrawable v1 = mVertexRenderMap.get(e.getFromVertex());
VertexDrawable v2 = mVertexRenderMap.get(e.getToVertex());
int l1 = v1.getCenterX();
int l2 = v2.getCenterX();
int t1 = v1.getCenterY();
int t2 = v2.getCenterY();
// swap l1 and t1 with l2 and t2 if we're entering the "from" node
// NOTE: that's an in place swap algorithm using xor
if (e.enters(e.getFromVertex())) {
l1 ^= l2;
l2 ^= l1;
l1 ^= l2;
t1 ^= t2;
t2 ^= t1;
t1 ^= t2;
}
result.add(new EdgeDrawable(l1, t1, l2, t2, e.hasDirection()));
}
return result;
}
@Override
public void removeEdge(Edge e) {
mEdges.remove(e);
}
@Override
public void removeAllEdges(Vertex v1, Vertex v2) {
List<Edge> toDelete = new ArrayList<Edge>();
for (Edge e : mEdges) {
if ((e.enters(v1) || e.exits(v1)) && (e.enters(v2) || e.exits(v2))) {
toDelete.add(e);
}
}
mEdges.removeAll(toDelete);
}
}
|
package de.team33.patterns.random.tarvos;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Optional;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static java.lang.String.format;
import static java.util.Arrays.asList;
import static java.util.Collections.singleton;
import static java.util.Collections.unmodifiableMap;
final class Types {
private static final Map<Type, Collection<Type>> MATCHING = newMatching();
private Types() {
}
private static Map<Type, Collection<Type>> newMatching() {
final Map<Type, Collection<Type>> result = new HashMap<>();
for (final Primary primary : Primary.values()) {
result.put(primary.type, primary.matching);
result.put(primary.boxed, primary.matching);
}
return unmodifiableMap(result);
}
static Naming naming(final Type type) {
final Class<? extends Type> typeClass = type.getClass();
return Stream.of(Naming.values())
.filter(value -> value.typeClass.isAssignableFrom(typeClass))
.findAny()
.orElseThrow(() -> new NoSuchElementException(format("No entry found for type <%s>",
typeClass)));
}
static Object defaultValue(final Type type) {
return Stream.of(Primary.values())
.filter(primary -> type.equals(primary.type))
.findAny()
.map(primary -> primary.value)
.orElse(null);
}
static boolean isMatching(final Type desired, final Type found) {
return Optional.ofNullable(MATCHING.get(found))
.orElseGet(() -> singleton(found))
.contains(desired);
}
@SuppressWarnings("PackageVisibleField")
private enum Primary {
BOOLEAN(boolean.class, Boolean.class, false),
BYTE(byte.class, Byte.class, (byte) 0),
SHORT(short.class, Short.class, (short) 0),
INT(int.class, Integer.class, 0),
LONG(long.class, Long.class, 0L),
FLOAT(float.class, Float.class, 0.0f),
DOUBLE(double.class, Double.class, 0.0),
CHAR(char.class, Character.class, '\0');
final Type type;
final Type boxed;
final List<Type> matching;
final Object value;
<T> Primary(final Class<T> type, final Class<T> boxed, final T value) {
this.type = type;
this.boxed = boxed;
this.value = value;
this.matching = asList(type, boxed);
}
}
enum Naming {
CLASS(Class.class, Class::getSimpleName, type -> ""),
PARAMETERIZED(ParameterizedType.class, Naming::toSimpleName, Naming::toParameters),
OTHER(Type.class, Type::getTypeName, type -> "");
private final Class<?> typeClass;
@SuppressWarnings("rawtypes")
private final Function toSimpleName;
@SuppressWarnings("rawtypes")
private final Function toParameters;
<T extends Type> Naming(final Class<T> typeClass,
final Function<T, String> toSimpleName,
final Function<T, String> toParameters) {
this.typeClass = typeClass;
this.toSimpleName = toSimpleName;
this.toParameters = toParameters;
}
private static String toParameters(final ParameterizedType type) {
return Arrays.stream(type.getActualTypeArguments())
.map(pType -> naming(pType).simpleName(pType))
.collect(Collectors.joining(", ", "<", ">"));
}
private static String toSimpleName(final ParameterizedType type) {
final Type rawType = type.getRawType();
return naming(rawType).simpleName(rawType);
}
final String simpleName(final Type type) {
//noinspection unchecked
return (String) toSimpleName.apply(type);
}
final String parameterizedName(final Type type) {
//noinspection unchecked
return simpleName(type) + toParameters.apply(type);
}
}
}
|
package org.aesh.readline.action.mappings;
import org.aesh.readline.InputProcessor;
import org.aesh.readline.ReadlineFlag;
import org.aesh.readline.action.Action;
import org.aesh.readline.terminal.Key;
import org.aesh.utils.Config;
public class EndOfFile implements Action {
private int EOFCounter = 0;
private int ignoreEOFSize = -1;
@Override
public String name() {
return "eof";
}
@Override
public void accept(InputProcessor inputProcessor) {
//always do this first
if(ignoreEOFSize < 0) {
ignoreEOFSize = inputProcessor.flags().getOrDefault(ReadlineFlag.IGNORE_EOF, 0);
}
//if buffer.length > 0 delete-char
if(inputProcessor.buffer().buffer().length() > 0) {
new DeleteChar().accept(inputProcessor);
}
else {
//reset EOFCounter if prev key != ctrl-d
if(EOFCounter > 0 && inputProcessor.editMode().prevKey() != null &&
inputProcessor.editMode().prevKey().getCodePointAt(0) != Key.CTRL_D.getFirstValue())
EOFCounter = 0;
if(ignoreEOFSize > EOFCounter)
EOFCounter++;
else {
//we got a eof, close the connection and call finish
inputProcessor.connection().write(Config.getLineSeparator());
inputProcessor.connection().close();
}
}
}
}
|
package org.jivesoftware.wildfire.ldap;
import org.jivesoftware.util.JiveGlobals;
import org.jivesoftware.util.Log;
import org.jivesoftware.wildfire.user.UserNotFoundException;
import javax.naming.Context;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.directory.DirContext;
import javax.naming.directory.InitialDirContext;
import javax.naming.directory.SearchControls;
import javax.naming.directory.SearchResult;
import javax.naming.ldap.InitialLdapContext;
import javax.naming.ldap.LdapContext;
import java.net.URLEncoder;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Centralized administration of LDAP connections. The {@link #getInstance()} method
* should be used to get an instace. The following properties configure this manager:
*
* <ul>
* <li>ldap.host</li>
* <li>ldap.port</li>
* <li>ldap.baseDN</li>
* <li>ldap.alternateBaseDN</li>
* <li>ldap.adminDN</li>
* <li>ldap.adminPassword</li>
* <li>ldap.usernameField -- default value is "uid".</li>
* <li>ldap.nameField -- default value is "cn".</li>
* <li>ldap.emailField -- default value is "mail".</li>
* <li>ldap.searchFilter -- the filter used to load the list of users. When defined, it
* will be used with the default filter, which is "([usernameField]={0})" where
* [usernameField] is the value of ldap.usernameField.
* <li>ldap.groupNameField</li>
* <li>ldap.groupMemberField</li>
* <li>ldap.groupDescriptionField</li>
* <li>ldap.posixMode</li>
* <li>ldap.groupSearchFilter</li>
* <li>ldap.debugEnabled</li>
* <li>ldap.sslEnabled</li>
* <li>ldap.autoFollowReferrals</li>
* <li>ldap.initialContextFactory -- if this value is not specified,
* "com.sun.jndi.ldap.LdapCtxFactory" will be used.</li>
* <li>ldap.connectionPoolEnabled -- true if an LDAP connection pool should be used.
* False if not set.</li>
* </ul>
*
* @author Matt Tucker
*/
public class LdapManager {
private static LdapManager instance;
static {
// Create a special Map implementation to wrap XMLProperties. We only implement
// the get, put, and remove operations, since those are the only ones used. Using a Map
// makes it easier to perform LdapManager testing.
Map<String, String> properties = new Map<String, String>() {
public String get(Object key) {
return JiveGlobals.getXMLProperty((String)key);
}
public String put(String key, String value) {
JiveGlobals.setProperty(key, value);
// Always return null since XMLProperties doesn't support the normal semantics.
return null;
}
public String remove(Object key) {
JiveGlobals.deleteProperty((String)key);
// Always return null since XMLProperties doesn't support the normal semantics.
return null;
}
public int size() {
return 0;
}
public boolean isEmpty() {
return false;
}
public boolean containsKey(Object key) {
return false;
}
public boolean containsValue(Object value) {
return false;
}
public void putAll(Map<? extends String, ? extends String> t) {
}
public void clear() {
}
public Set<String> keySet() {
return null;
}
public Collection<String> values() {
return null;
}
public Set<Entry<String, String>> entrySet() {
return null;
}
};
instance = new LdapManager(properties);
}
private Collection<String> hosts = new ArrayList<String>();
private int port;
private String usernameField;
private String nameField;
private String emailField;
private String baseDN;
private String alternateBaseDN = null;
private String adminDN = null;
private String adminPassword;
private boolean ldapDebugEnabled = false;
private boolean sslEnabled = false;
private String initialContextFactory;
private boolean followReferrals = false;
private boolean connectionPoolEnabled = true;
private String searchFilter = null;
private boolean subTreeSearch;
private boolean encloseUserDN;
private String groupNameField;
private String groupMemberField;
private String groupDescriptionField;
private boolean posixMode = false;
private String groupSearchFilter = null;
private Pattern userDNPattern;
private Map<String, String> properties;
/**
* Provides singleton access to an instance of the LdapManager class.
*
* @return an LdapManager instance.
*/
public static LdapManager getInstance() {
return instance;
}
/**
* Constructs a new LdapManager instance. Typically, {@link #getInstance()} should be
* called instead of this method. LdapManager instances should only be created directly
* for testing purposes.
*
* @param properties the Map that contains properties used by the LDAP manager, such as
* LDAP host and base DN.
*/
public LdapManager(Map<String, String> properties) {
this.properties = properties;
String host = properties.get("ldap.host");
// Parse the property and check if many hosts were defined. Hosts can be separated
// by commas or white spaces
StringTokenizer st = new StringTokenizer(host, " ,\t\n\r\f");
while (st.hasMoreTokens()) {
hosts.add(st.nextToken());
}
String portStr = properties.get("ldap.port");
port = 389;
if (portStr != null) {
try {
this.port = Integer.parseInt(portStr);
}
catch (NumberFormatException nfe) {
Log.error(nfe);
}
}
usernameField = properties.get("ldap.usernameField");
if (usernameField == null) {
usernameField = "uid";
}
baseDN = properties.get("ldap.baseDN");
if (baseDN == null) {
baseDN = "";
}
alternateBaseDN = properties.get("ldap.alternateBaseDN");
nameField = properties.get("ldap.nameField");
if (nameField == null) {
nameField = "cn";
}
emailField = properties.get("ldap.emailField");
if (emailField == null) {
emailField = "mail";
}
connectionPoolEnabled = true;
String connectionPoolStr = properties.get("ldap.connectionPoolEnabled");
if (connectionPoolStr != null) {
connectionPoolEnabled = Boolean.valueOf(connectionPoolStr);
}
searchFilter = properties.get("ldap.searchFilter");
subTreeSearch = true;
String subTreeStr = properties.get("ldap.subTreeSearch");
if (subTreeStr != null) {
subTreeSearch = Boolean.valueOf(subTreeStr);
}
groupNameField = properties.get("ldap.groupNameField");
if (groupNameField == null) {
groupNameField = "cn";
}
groupMemberField = properties.get("ldap.groupMemberField");
if (groupMemberField ==null) {
groupMemberField = "member";
}
groupDescriptionField = properties.get("ldap.groupDescriptionField");
if (groupDescriptionField == null) {
groupDescriptionField = "description";
}
posixMode = false;
String posixStr = properties.get("ldap.posixMode");
if (posixStr != null) {
posixMode = Boolean.valueOf(posixStr);
}
groupSearchFilter = properties.get("ldap.groupSearchFilter");
adminDN = properties.get("ldap.adminDN");
if (adminDN != null && adminDN.trim().equals("")) {
adminDN = null;
}
adminPassword = properties.get("ldap.adminPassword");
ldapDebugEnabled = false;
String ldapDebugStr = properties.get("ldap.debugEnabled");
if (ldapDebugStr != null) {
ldapDebugEnabled = Boolean.valueOf(ldapDebugStr);
}
sslEnabled = false;
String sslEnabledStr = properties.get("ldap.sslEnabled");
if (sslEnabledStr != null) {
sslEnabled = Boolean.valueOf(sslEnabledStr);
}
followReferrals = false;
String followReferralsStr = properties.get("ldap.autoFollowReferrals");
if (followReferralsStr != null) {
followReferrals = Boolean.valueOf(followReferralsStr);
}
encloseUserDN = true;
String encloseUserStr = properties.get("ldap.encloseUserDN");
if (encloseUserStr != null) {
encloseUserDN = Boolean.valueOf(encloseUserStr);
}
// Set the pattern to use to wrap userDNs values "
userDNPattern = Pattern.compile("(=)([^\\\"][^=]*[^\\\"])(?:,|$)");
this.initialContextFactory = properties.get("ldap.initialContextFactory");
if (initialContextFactory != null) {
try {
Class.forName(initialContextFactory);
}
catch (ClassNotFoundException cnfe) {
Log.error("Initial context factory class failed to load: " + initialContextFactory +
". Using default initial context factory class instead.");
initialContextFactory = "com.sun.jndi.ldap.LdapCtxFactory";
}
}
// Use default value if none was set.
else {
initialContextFactory = "com.sun.jndi.ldap.LdapCtxFactory";
}
StringBuilder buf = new StringBuilder();
buf.append("Created new LdapManager() instance, fields:\n");
buf.append("\t host: ").append(hosts).append("\n");
buf.append("\t port: ").append(port).append("\n");
buf.append("\t usernamefield: ").append(usernameField).append("\n");
buf.append("\t baseDN: ").append(baseDN).append("\n");
buf.append("\t alternateBaseDN: ").append(alternateBaseDN).append("\n");
buf.append("\t nameField: ").append(nameField).append("\n");
buf.append("\t emailField: ").append(emailField).append("\n");
buf.append("\t adminDN: ").append(adminDN).append("\n");
buf.append("\t adminPassword: ").append(adminPassword).append("\n");
buf.append("\t searchFilter: ").append(searchFilter).append("\n");
buf.append("\t subTreeSearch:").append(subTreeSearch).append("\n");
buf.append("\t ldapDebugEnabled: ").append(ldapDebugEnabled).append("\n");
buf.append("\t sslEnabled: ").append(sslEnabled).append("\n");
buf.append("\t initialContextFactory: ").append(initialContextFactory).append("\n");
buf.append("\t connectionPoolEnabled: ").append(connectionPoolEnabled).append("\n");
buf.append("\t autoFollowReferrals: ").append(followReferrals).append("\n");
buf.append("\t groupNameField: ").append(groupNameField).append("\n");
buf.append("\t groupMemberField: ").append(groupMemberField).append("\n");
buf.append("\t groupDescriptionField: ").append(groupDescriptionField).append("\n");
buf.append("\t posixMode: ").append(posixMode).append("\n");
buf.append("\t groupSearchFilter: ").append(groupSearchFilter).append("\n");
if (Log.isDebugEnabled()) {
Log.debug(buf.toString());
}
if (ldapDebugEnabled) {
System.err.println(buf.toString());
}
}
/**
* Returns a DirContext for the LDAP server that can be used to perform
* lookups and searches using the default base DN. The context uses the
* admin login that is defined by <tt>adminDN</tt> and <tt>adminPassword</tt>.
*
* @return a connection to the LDAP server.
* @throws NamingException if there is an error making the LDAP connection.
*/
public LdapContext getContext() throws NamingException {
return getContext(baseDN);
}
/**
* Returns a DirContext for the LDAP server that can be used to perform
* lookups and searches using the specified base DN. The context uses the
* admin login that is defined by <tt>adminDN</tt> and <tt>adminPassword</tt>.
*
* @param baseDN the base DN to use for the context.
* @return a connection to the LDAP server.
* @throws NamingException if there is an error making the LDAP connection.
*/
public LdapContext getContext(String baseDN) throws NamingException {
boolean debug = Log.isDebugEnabled();
if (debug) {
Log.debug("Creating a DirContext in LdapManager.getContext()...");
}
// Set up the environment for creating the initial context
Hashtable<String, Object> env = new Hashtable<String, Object>();
env.put(Context.INITIAL_CONTEXT_FACTORY, initialContextFactory);
env.put(Context.PROVIDER_URL, getProviderURL(baseDN));
if (sslEnabled) {
env.put("java.naming.ldap.factory.socket",
"org.jivesoftware.util.SimpleSSLSocketFactory");
env.put(Context.SECURITY_PROTOCOL, "ssl");
}
// Use simple authentication to connect as the admin.
if (adminDN != null) {
env.put(Context.SECURITY_AUTHENTICATION, "simple");
env.put(Context.SECURITY_PRINCIPAL, adminDN);
if (adminPassword != null) {
env.put(Context.SECURITY_CREDENTIALS, adminPassword);
}
}
// No login information so attempt to use anonymous login.
else {
env.put(Context.SECURITY_AUTHENTICATION, "none");
}
if (ldapDebugEnabled) {
env.put("com.sun.jndi.ldap.trace.ber", System.err);
}
if (connectionPoolEnabled) {
env.put("com.sun.jndi.ldap.connect.pool", "true");
}
if (followReferrals) {
env.put(Context.REFERRAL, "follow");
}
if (debug) {
Log.debug("Created hashtable with context values, attempting to create context...");
}
// Create new initial context
LdapContext context = new InitialLdapContext(env, null);
if (debug) {
Log.debug("... context created successfully, returning.");
}
return context;
}
/**
* Returns true if the user is able to successfully authenticate against
* the LDAP server. The "simple" authentication protocol is used.
*
* @param userDN the user's dn to authenticate (relative to <tt>baseDN</tt>).
* @param password the user's password.
* @return true if the user successfully authenticates.
*/
public boolean checkAuthentication(String userDN, String password) {
boolean debug = Log.isDebugEnabled();
if (debug) {
Log.debug("In LdapManager.checkAuthentication(userDN, password), userDN is: " + userDN + "...");
}
DirContext ctx = null;
try {
// See if the user authenticates.
Hashtable<String, Object> env = new Hashtable<String, Object>();
env.put(Context.INITIAL_CONTEXT_FACTORY, initialContextFactory);
env.put(Context.PROVIDER_URL, getProviderURL(baseDN));
if (sslEnabled) {
env.put("java.naming.ldap.factory.socket",
"org.jivesoftware.util.SimpleSSLSocketFactory");
env.put(Context.SECURITY_PROTOCOL, "ssl");
}
env.put(Context.SECURITY_AUTHENTICATION, "simple");
env.put(Context.SECURITY_PRINCIPAL, userDN + "," + baseDN);
env.put(Context.SECURITY_CREDENTIALS, password);
// Specify timeout to be 10 seconds, only on non SSL since SSL connections
// break with a timemout.
if (!sslEnabled) {
env.put("com.sun.jndi.ldap.connect.timeout", "10000");
}
if (ldapDebugEnabled) {
env.put("com.sun.jndi.ldap.trace.ber", System.err);
}
if (followReferrals) {
env.put(Context.REFERRAL, "follow");
}
if (debug) {
Log.debug("Created context values, attempting to create context...");
}
ctx = new InitialDirContext(env);
if (debug) {
Log.debug("... context created successfully, returning.");
}
}
catch (NamingException ne) {
// If an alt baseDN is defined, attempt a lookup there.
if (alternateBaseDN != null) {
try {
if (ctx != null) {
ctx.close();
}
}
catch (Exception e) {
Log.error(e);
}
try {
// See if the user authenticates.
Hashtable<String, Object> env = new Hashtable<String, Object>();
// Use a custom initial context factory if specified. Otherwise, use the default.
env.put(Context.INITIAL_CONTEXT_FACTORY, initialContextFactory);
env.put(Context.PROVIDER_URL, getProviderURL(alternateBaseDN));
if (sslEnabled) {
env.put("java.naming.ldap.factory.socket", "org.jivesoftware.util.SimpleSSLSocketFactory");
env.put(Context.SECURITY_PROTOCOL, "ssl");
}
env.put(Context.SECURITY_AUTHENTICATION, "simple");
env.put(Context.SECURITY_PRINCIPAL, userDN + "," + alternateBaseDN);
env.put(Context.SECURITY_CREDENTIALS, password);
// Specify timeout to be 10 seconds, only on non SSL since SSL connections
// break with a timemout.
if (!sslEnabled) {
env.put("com.sun.jndi.ldap.connect.timeout", "10000");
}
if (ldapDebugEnabled) {
env.put("com.sun.jndi.ldap.trace.ber", System.err);
}
if (followReferrals) {
env.put(Context.REFERRAL, "follow");
}
if (debug) {
Log.debug("Created context values, attempting to create context...");
}
ctx = new InitialDirContext(env);
}
catch (NamingException e) {
if (debug) {
Log.debug("Caught a naming exception when creating InitialContext", ne);
}
return false;
}
}
else {
if (debug) {
Log.debug("Caught a naming exception when creating InitialContext", ne);
}
return false;
}
}
finally {
try {
if (ctx != null) {
ctx.close();
}
}
catch (Exception e) {
Log.error(e);
}
}
return true;
}
/**
* Finds a user's dn using their username. Normally, this search will
* be performed using the field "uid", but this can be changed by setting
* the <tt>usernameField</tt> property.<p>
*
* Searches are performed over all subtrees relative to the <tt>baseDN</tt>.
* If the search fails in the <tt>baseDN</tt> then another search will be
* performed in the <tt>alternateBaseDN</tt>. For example, if the <tt>baseDN</tt>
* is "o=jivesoftware, o=com" and we do a search for "mtucker", then we might
* find a userDN of "uid=mtucker,ou=People". This kind of searching is a good
* thing since it doesn't make the assumption that all user records are stored
* in a flat structure. However, it does add the requirement that "uid" field
* (or the other field specified) must be unique over the entire subtree from
* the <tt>baseDN</tt>. For example, it's entirely possible to create two dn's
* in your LDAP directory with the same uid: "uid=mtucker,ou=People" and
* "uid=mtucker,ou=Administrators". In such a case, it's not possible to
* uniquely identify a user, so this method will throw an error.<p>
*
* The dn that's returned is relative to the default <tt>baseDN</tt>.
*
* @param username the username to lookup the dn for.
* @return the dn associated with <tt>username</tt>.
* @throws Exception if the search for the dn fails.
*/
public String findUserDN(String username) throws Exception {
try {
return findUserDN(username, baseDN);
}
catch (Exception e) {
if (alternateBaseDN != null) {
return findUserDN(username, alternateBaseDN);
}
else {
throw e;
}
}
}
/**
* Finds a user's dn using their username in the specified baseDN. Normally, this search
* will be performed using the field "uid", but this can be changed by setting
* the <tt>usernameField</tt> property.<p>
*
* Searches are performed over all sub-trees relative to the <tt>baseDN</tt> unless
* sub-tree searching has been disabled. For example, if the <tt>baseDN</tt> is
* "o=jivesoftware, o=com" and we do a search for "mtucker", then we might find a userDN of
* "uid=mtucker,ou=People". This kind of searching is a good thing since
* it doesn't make the assumption that all user records are stored in a flat
* structure. However, it does add the requirement that "uid" field (or the
* other field specified) must be unique over the entire subtree from the
* <tt>baseDN</tt>. For example, it's entirely possible to create two dn's
* in your LDAP directory with the same uid: "uid=mtucker,ou=People" and
* "uid=mtucker,ou=Administrators". In such a case, it's not possible to
* uniquely identify a user, so this method will throw an error.<p>
*
* The DN that's returned is relative to the <tt>baseDN</tt>.
*
* @param username the username to lookup the dn for.
* @param baseDN the base DN to use for this search.
* @return the dn associated with <tt>username</tt>.
* @throws Exception if the search for the dn fails.
* @see #findUserDN(String) to search using the default baseDN and alternateBaseDN.
*/
public String findUserDN(String username, String baseDN) throws Exception {
boolean debug = Log.isDebugEnabled();
if (debug) {
Log.debug("Trying to find a user's DN based on their username. " + usernameField + ": " + username
+ ", Base DN: " + baseDN + "...");
}
DirContext ctx = null;
try {
ctx = getContext(baseDN);
if (debug) {
Log.debug("Starting LDAP search...");
}
// Search for the dn based on the username.
SearchControls constraints = new SearchControls();
// If sub-tree searching is enabled (default is true) then search the entire tree.
if (subTreeSearch) {
constraints.setSearchScope(SearchControls.SUBTREE_SCOPE);
}
// Otherwise, only search a single level.
else {
constraints.setSearchScope(SearchControls.ONELEVEL_SCOPE);
}
constraints.setReturningAttributes(new String[] { usernameField });
NamingEnumeration answer = ctx.search("", getSearchFilter(), new String[] {username},
constraints);
if (debug) {
Log.debug("... search finished");
}
if (answer == null || !answer.hasMoreElements()) {
if (debug) {
Log.debug("User DN based on username '" + username + "' not found.");
}
throw new UserNotFoundException("Username " + username + " not found");
}
String userDN = ((SearchResult)answer.next()).getName();
// Make sure there are no more search results. If there are, then
// the username isn't unique on the LDAP server (a perfectly possible
// scenario since only fully qualified dn's need to be unqiue).
// There really isn't a way to handle this, so throw an exception.
// The baseDN must be set correctly so that this doesn't happen.
if (answer.hasMoreElements()) {
if (debug) {
Log.debug("Search for userDN based on username '" + username + "' found multiple " +
"responses, throwing exception.");
}
throw new UserNotFoundException("LDAP username lookup for " + username +
" matched multiple entries.");
}
// Close the enumeration.
answer.close();
// All other methods assume that userDN is not a full LDAP string.
// However if a referal was followed this is not the case. The
// following code converts a referral back to a "partial" LDAP string.
if (userDN.startsWith("ldap:
userDN = userDN.replace("," + baseDN, "");
userDN = userDN.substring(userDN.lastIndexOf("/") + 1);
}
if (encloseUserDN) {
// Enclose userDN values between "
// eg. cn=John\, Doe,ou=People --> cn="John\, Doe",ou="People"
Matcher matcher = userDNPattern.matcher(userDN);
userDN = matcher.replaceAll("$1\"$2\",");
if (userDN.endsWith(",")) {
userDN = userDN.substring(0, userDN.length() - 1);
}
}
return userDN;
}
catch (Exception e) {
if (debug) {
Log.debug("Exception thrown when searching for userDN based on username '" + username + "'", e);
}
throw e;
}
finally {
try { ctx.close(); }
catch (Exception ignored) {
// Ignore.
}
}
}
/**
* Returns a properly encoded URL for use as the PROVIDER_URL.
* If the encoding fails then the URL will contain the raw base dn.
*
* @param baseDN the base dn to use in the URL.
* @return the properly encoded URL for use in as PROVIDER_URL.
*/
private String getProviderURL(String baseDN) {
StringBuffer ldapURL = new StringBuffer();
try {
baseDN = URLEncoder.encode(baseDN, "UTF-8");
// The java.net.URLEncoder class encodes spaces as +, but they need to be %20
baseDN = baseDN.replaceAll("\\+", "%20");
}
catch (java.io.UnsupportedEncodingException e) {
// UTF-8 is not supported, fall back to using raw baseDN
}
for (String host : hosts) {
// Create a correctly-encoded ldap URL for the PROVIDER_URL
ldapURL.append("ldap:
ldapURL.append(host);
ldapURL.append(":");
ldapURL.append(port);
ldapURL.append("/");
ldapURL.append(baseDN);
ldapURL.append(" ");
}
return ldapURL.toString();
}
/**
* Returns the LDAP servers hosts; e.g. <tt>localhost</tt> or
* <tt>machine.example.com</tt>, etc. This value is stored as the Jive
* Property <tt>ldap.host</tt>.
*
* @return the LDAP server host name.
*/
public Collection<String> getHosts() {
return hosts;
}
/**
* Sets the list of LDAP servers host; e.g., <tt>localhost</tt> or
* <tt>machine.example.com</tt>, etc. This value is store as the Jive
* Property <tt>ldap.host</tt> using a comma as a delimiter for each host.<p>
*
* Note that all LDAP servers have to share the same configuration.
*
* @param hosts the LDAP servers host names.
*/
public void setHosts(Collection<String> hosts) {
this.hosts = hosts;
StringBuilder hostProperty = new StringBuilder();
for (String host : hosts) {
hostProperty.append(host).append(",");
}
if (!hosts.isEmpty()) {
// Remove the last comma
hostProperty.setLength(hostProperty.length()-1);
}
properties.put("ldap.host", hostProperty.toString());
}
/**
* Returns the LDAP server port number. The default is 389. This value is
* stored as the Jive Property <tt>ldap.port</tt>.
*
* @return the LDAP server port number.
*/
public int getPort() {
return port;
}
/**
* Sets the LDAP server port number. The default is 389. This value is
* stored as the Jive property <tt>ldap.port</tt>.
*
* @param port the LDAP server port number.
*/
public void setPort(int port) {
this.port = port;
properties.put("ldap.port", Integer.toString(port));
}
/**
* Returns true if LDAP connection debugging is turned on. When on, trace
* information about BER buffers sent and received by the LDAP provider is
* written to System.out. Debugging is turned off by default.
*
* @return true if LDAP debugging is turned on.
*/
public boolean isDebugEnabled() {
return ldapDebugEnabled;
}
/**
* Sets whether LDAP connection debugging is turned on. When on, trace
* information about BER buffers sent and received by the LDAP provider is
* written to System.out. Debugging is turned off by default.
*
* @param debugEnabled true if debugging should be turned on.
*/
public void setDebugEnabled(boolean debugEnabled) {
this.ldapDebugEnabled = debugEnabled;
properties.put("ldap.ldapDebugEnabled", Boolean.toString(debugEnabled));
}
/**
* Returns true if LDAP connection is via SSL or not. SSL is turned off by default.
*
* @return true if SSL connections are enabled or not.
*/
public boolean isSslEnabled() {
return sslEnabled;
}
/**
* Sets whether the connection to the LDAP server should be made via ssl or not.
*
* @param sslEnabled true if ssl should be enabled, false otherwise.
*/
public void setSslEnabled(boolean sslEnabled) {
this.sslEnabled = sslEnabled;
properties.put("ldap.sslEnabled", Boolean.toString(sslEnabled));
}
/**
* Returns the LDAP field name that the username lookup will be performed
* on. By default this is "uid".
*
* @return the LDAP field that the username lookup will be performed on.
*/
public String getUsernameField() {
return usernameField;
}
/**
* Sets the LDAP field name that the username lookup will be performed on.
* By default this is "uid".
*
* @param usernameField the LDAP field that the username lookup will be
* performed on.
*/
public void setUsernameField(String usernameField) {
this.usernameField = usernameField;
if (usernameField == null) {
properties.remove("ldap.usernameField");
this.usernameField = "uid";
}
else {
properties.put("ldap.usernameField", usernameField);
}
}
/**
* Returns the LDAP field name that the user's name is stored in. By default
* this is "cn". Another common value is "displayName".
*
* @return the LDAP field that that corresponds to the user's name.
*/
public String getNameField() {
return nameField;
}
/**
* Sets the LDAP field name that the user's name is stored in. By default
* this is "cn". Another common value is "displayName".
*
* @param nameField the LDAP field that that corresponds to the user's name.
*/
public void setNameField(String nameField) {
this.nameField = nameField;
if (nameField == null) {
properties.remove("ldap.nameField");
}
else {
properties.put("ldap.nameField", nameField);
}
}
/**
* Returns the LDAP field name that the user's email address is stored in.
* By default this is "mail".
*
* @return the LDAP field that that corresponds to the user's email
* address.
*/
public String getEmailField() {
return emailField;
}
/**
* Sets the LDAP field name that the user's email address is stored in.
* By default this is "mail".
*
* @param emailField the LDAP field that that corresponds to the user's
* email address.
*/
public void setEmailField(String emailField) {
this.emailField = emailField;
if (emailField == null) {
properties.remove("ldap.emailField");
}
else {
properties.put("ldap.emailField", emailField);
}
}
/**
* Returns the starting DN that searches for users will performed with.
* Searches will performed on the entire sub-tree under the base DN.
*
* @return the starting DN used for performing searches.
*/
public String getBaseDN() {
return baseDN;
}
/**
* Sets the starting DN that searches for users will performed with.
* Searches will performed on the entire sub-tree under the base DN.
*
* @param baseDN the starting DN used for performing searches.
*/
public void setBaseDN(String baseDN) {
this.baseDN = baseDN;
properties.put("ldap.baseDN", baseDN);
}
/**
* Returns the alternate starting DN that searches for users will performed with.
* Searches will performed on the entire sub-tree under the alternate base DN after
* they are performed on the main base DN.
*
* @return the alternate starting DN used for performing searches. If no alternate
* DN is set, this method will return <tt>null</tt>.
*/
public String getAlternateBaseDN() {
return alternateBaseDN;
}
/**
* Sets the alternate starting DN that searches for users will performed with.
* Searches will performed on the entire sub-tree under the alternate base DN after
* they are performed on the main base dn.
*
* @param alternateBaseDN the alternate starting DN used for performing searches.
*/
public void setAlternateBaseDN(String alternateBaseDN) {
this.alternateBaseDN = alternateBaseDN;
if (alternateBaseDN == null) {
properties.remove("ldap.alternateBaseDN");
}
else {
properties.put("ldap.alternateBaseDN", alternateBaseDN);
}
}
/**
* Returns the starting admin DN that searches for admins will performed with.
* Searches will performed on the entire sub-tree under the admin DN.
*
* @return the starting DN used for performing searches.
*/
public String getAdminDN() {
return adminDN;
}
/**
* Sets the starting admin DN that searches for admins will performed with.
* Searches will performed on the entire sub-tree under the admins DN.
*
* @param adminDN the starting DN used for performing admin searches.
*/
public void setAdminDN(String adminDN) {
this.adminDN = adminDN;
properties.put("ldap.adminDN", adminDN);
}
/**
* Returns the starting admin DN that searches for admins will performed with.
* Searches will performed on the entire sub-tree under the admin DN.
*
* @return the starting DN used for performing searches.
*/
public String getAdminPassword() {
return adminPassword;
}
/**
* Sets the admin password for the LDAP server we're connecting to.
*
* @param adminPassword the admin password for the LDAP server we're
* connecting to.
*/
public void setAdminPassword(String adminPassword) {
this.adminPassword = adminPassword;
properties.put("ldap.adminPassword", adminPassword);
}
/**
* Returns the filter used for searching the directory for users, which includes
* the default filter (username field search) plus any custom-defined search filter.
*
* @return the search filter.
*/
public String getSearchFilter() {
StringBuilder filter = new StringBuilder();
if (searchFilter == null) {
filter.append("(").append(usernameField).append("={0})");
}
else {
filter.append("(&(").append(usernameField).append("={0})");
filter.append(searchFilter).append(")");
}
return filter.toString();
}
/**
* Sets the search filter appended to the default filter when searching for users.
*
* @param searchFilter the search filter appended to the default filter
* when searching for users.
*/
public void setSearchFilter(String searchFilter) {
this.searchFilter = searchFilter;
properties.put("ldap.searchFilter", searchFilter);
}
/**
* Returns true if the entire tree under the base DN will be searched (recursive search)
* when doing LDAP queries (finding users, groups, etc). When false, only a single level
* under the base DN will be searched. The default is <tt>true</tt> which is the best
* option for most LDAP setups. In only a few cases will the directory be setup in such
* a way that it's better to do single level searching.
*
* @return true if the entire tree under the base DN will be searched.
*/
public boolean isSubTreeSearch() {
return subTreeSearch;
}
/**
* Sets whether the entire tree under the base DN will be searched (recursive search)
* when doing LDAP queries (finding users, groups, etc). When false, only a single level
* under the base DN will be searched. The default is <tt>true</tt> which is the best
* option for most LDAP setups. In only a few cases will the directory be setup in such
* a way that it's better to do single level searching.
*
* @param subTreeSearch true if the entire tree under the base DN will be searched.
*/
public void setSubTreeSearch(boolean subTreeSearch) {
this.subTreeSearch = subTreeSearch;
properties.put("ldap.subTreeSearch", String.valueOf(subTreeSearch));
}
/**
* Returns true if LDAP referrals will automatically be followed when found.
*
* @return true if LDAP referrals are automatically followed.
*/
public boolean isFollowReferralsEnabled() {
return followReferrals;
}
/**
* Sets whether LDAP referrals should be automatically followed.
*
* @param followReferrals true if LDAP referrals should be automatically followed.
*/
public void setFollowReferralsEnabled(boolean followReferrals) {
this.followReferrals = followReferrals;
properties.put("ldap.autoFollowReferrals", String.valueOf(followReferrals));
}
/**
* Returns the field name used for groups.
* Value of groupNameField defaults to "cn".
*
* @return the field used for groups.
*/
public String getGroupNameField() {
return groupNameField;
}
/**
* Sets the field name used for groups.
*
* @param groupNameField the field used for groups.
*/
public void setGroupNameField(String groupNameField) {
this.groupNameField = groupNameField;
properties.put("ldap.groupNameField", groupNameField);
}
/**
* Return the field used to list members within a group.
* Value of groupMemberField defaults to "member".
*
* @return the field used to list members within a group.
*/
public String getGroupMemberField() {
return groupMemberField;
}
/**
* Sets the field used to list members within a group.
* Value of groupMemberField defaults to "member".
*
* @param groupMemberField the field used to list members within a group.
*/
public void setGroupmemberField(String groupMemberField) {
this.groupMemberField = groupMemberField;
properties.put("ldap.groupMemberField", groupMemberField);
}
/**
* Return the field used to describe a group.
* Value of groupDescriptionField defaults to "description".
*
* @return the field used to describe a group.
*/
public String getGroupDescriptionField() {
return groupDescriptionField;
}
/**
* Sets the field used to describe a group.
* Value of groupDescriptionField defaults to "description".
*
* @param groupDescriptionField the field used to describe a group.
*/
public void setGroupDescriptionField(String groupDescriptionField) {
this.groupDescriptionField = groupDescriptionField;
properties.put("ldap.groupDescriptionField", groupDescriptionField);
}
/**
* Return true if the LDAP server is operating in Posix mode. By default
* false is returned. When in Posix mode, users are stored within a group
* by their username alone. When not enabled, users are stored in a group using
* their entire DN.
*
* @return true if posix mode is being used by the LDAP server.
*/
public boolean isPosixMode() {
return posixMode;
}
/**
* Sets whether the LDAP server is operating in Posix mode. When in Posix mode,
* users are stored within a group by their username alone. When not enabled,
* users are stored in a group using their entire DN.
*
* @param posixMode true if posix mode is being used by the LDAP server.
*/
public void setPosixMode(boolean posixMode) {
this.posixMode = posixMode;
properties.put("ldap.posixMode", String.valueOf(posixMode));
}
/**
* Returns the filter used for searching the directory for groups, which includes
* the default filter plus any custom-defined search filter.
*
* @return the search filter when searching for groups.
*/
public String getGroupSearchFilter() {
StringBuilder groupFilter = new StringBuilder();
if (groupSearchFilter == null) {
groupFilter.append("(").append(groupNameField).append("={0})");
}
else {
groupFilter.append("(&(").append(groupNameField).append("={0})");
groupFilter.append(groupSearchFilter).append(")");
}
return groupFilter.toString();
}
/**
* Sets the search filter appended to the default filter when searching for groups.
*
* @param groupSearchFilter the search filter appended to the default filter
* when searching for groups.
*/
public void setGroupSearchFilter(String groupSearchFilter) {
this.groupSearchFilter = groupSearchFilter;
properties.put("ldap.groupSearchFilter", groupSearchFilter);
}
}
|
package relex.concurrent;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import relex.RelationExtractor;
import relex.algs.SentenceAlgorithmApplier;
import relex.anaphora.Antecedents;
import relex.anaphora.Hobbs;
import relex.corpus.DocSplitter;
import relex.corpus.DocSplitterFactory;
import relex.corpus.GateEntityMaintainer;
import relex.entity.EntityMaintainer;
import relex.parser.LinkParser;
import relex.parser.LinkParserClient;
import relex.parser.LinkParserSocketClient;
import relex.tree.PhraseMarkup;
public class ParallelRelationExtractor {
private static final String DEFAULT_HOST = "localhost";
public static final int FIRST_PORT = 9000;
public static final int CLIENT_POOL_SIZE = 2;
private ExecutorService exec;
private BlockingQueue<LinkParserClient> pool;
private LinkedBlockingQueue<Future<RelexTaskResult>> results;
public int count = 0;
private boolean stop;
// Single-threaded processors
/** Entity detector */
private GateEntityMaintainer entityDetector;
/** Antecedents used in anaphora resolution */
public Antecedents antecedents;
/** Anaphora resolution */
private Hobbs hobbs;
// Thread-safe processors
/** Syntactic processing */
private LinkParser linkParser;
/** Semantic (RelEx) processing */
private SentenceAlgorithmApplier sentenceAlgorithmApplier;
/** Penn tree-bank style phrase structure markup. */
private PhraseMarkup phraseMarkup;
public ParallelRelationExtractor(){
initializePool();
results = new LinkedBlockingQueue<Future<RelexTaskResult>>();
entityDetector = new GateEntityMaintainer();
entityDetector.initialize();
linkParser = new LinkParser();
sentenceAlgorithmApplier = new SentenceAlgorithmApplier();
sentenceAlgorithmApplier.read(new File(RelationExtractor.DEFAULT_ALGS_FILE));
phraseMarkup = new PhraseMarkup();
antecedents = new Antecedents();
hobbs = new Hobbs(antecedents);
}
/**
* Initialize the pool of LinkParserClients, creating CLIENT_POOL_SIZE instances,
* which connects to ports FIRST_PORT, FIRST_PORT+1, ..., FIRST_PORT+(CLIENT_POOL_SIZE-1)
*/
private void initializePool() {
exec = Executors.newFixedThreadPool(CLIENT_POOL_SIZE); // thread pool
pool = new ArrayBlockingQueue<LinkParserClient>(CLIENT_POOL_SIZE);
for (int i = 0 ; i < CLIENT_POOL_SIZE; i++){
LinkParserClient lpc = new LinkParserSocketClient(DEFAULT_HOST, FIRST_PORT+i);
try {
pool.put(lpc);
} catch (InterruptedException e) {
}
}
}
/**
* Submit a new sentence to be processed, blocking if no resources are available.
* Results are obtained calling take(), and are returned in order of submission.
*
* @param sentence The sentence to be processed.
* @param The optional entity mantainer used for the document
* @throws InterruptedException
*/
public void push(String sentence) throws InterruptedException{
push(sentence, null);
}
/**
* Submit a new sentence to be processed, blocking if no resources are available.
* Results are obtained calling take(), and are returned in order of submission.
*
* @param sentence The sentence to be processed.
* @param The optional entity mantainer used for the document
* @throws InterruptedException
*/
public void push(String sentence, EntityMaintainer entityMaintainer) throws InterruptedException{
if (entityMaintainer == null) {
entityMaintainer = entityDetector.process(sentence);
}
LinkParserClient lpc = pool.take();
Callable<RelexTaskResult> callable =
new RelexTask(count++, sentence,
entityMaintainer,
linkParser,
sentenceAlgorithmApplier,
phraseMarkup, lpc, pool);
Future<RelexTaskResult> submit = exec.submit(callable);
results.add(submit);
}
/**
* Return the next result, in order of submission, or blocks until
* it's ready
*
* @return The next result
* @throws InterruptedException
* @throws ExecutionException
*/
protected RelexTaskResult take() throws InterruptedException, ExecutionException {
Future<RelexTaskResult> first = results.take();
RelexTaskResult taskResult = first.get();
// Perform anaphora resolution
hobbs.addParse(taskResult.result);
hobbs.resolve(taskResult.result);
return taskResult;
}
/**
* Clear out the cache of old sentences.
*
* The Anaphora resolver keeps a list of sentences previously seen,
* so that anaphora resolution can be done. When starting the parse
* of a new text, this cache needs to be cleaned out. This is the
* way to do so.
*/
public void clear()
{
antecedents.clear();
hobbs = new Hobbs(antecedents);
}
/**
* Stop accepting requests, and shutdown the thread pool after all
* remaining requests are done.
*/
public void shutdown(){
stop = true;
exec.shutdown();
}
/**
* @return true is no more sentences are accepted (i.e., shutdown() was called)
* and there are no pending results
*/
protected boolean isRunning() {
return !stop || !results.isEmpty();
}
/**
* Unit test. Read a text file and process its sentences in parallel.
* Assumes link-grammar servers running on DEFAULT_HOST,
* listening to ports FIRST_PORT, FIRST_PORT+1, ..., FIRST_PORT+(CLIENT_POOL_SIZE-1)
*
* @param args The text file to be read
* @throws IOException
* @throws InterruptedException
*/
public static void main(final String[] args) throws IOException, InterruptedException {
final long t = System.currentTimeMillis();
final ParallelRelationExtractor pre = new ParallelRelationExtractor();
// Producer - submits sentences from a file
new Thread(new Runnable(){
public void run() {
DocSplitter ds = DocSplitterFactory.create();
try {
// Read entire file
StringBuilder sb = new StringBuilder();
BufferedReader in = new BufferedReader(new FileReader(args[0]));
String line = in.readLine();
while (line!=null){
sb.append(" "+line+" ");
line = in.readLine();
}
in.close();
// Break text into sentences and submit
ds.addText(sb.toString());
sb = null;
String sentence = ds.getNextSentence();
while (sentence!=null){
pre.push(sentence);
sentence = ds.getNextSentence();
}
} catch (Exception e) {
e.printStackTrace();
}
pre.clear(); // Clear anaphora resolution cache; do this when changing documents
pre.shutdown(); // end all threads in the pool after finishing all requests
}}).start();
// Consumer - print the results, in the original order
new Thread(new Runnable(){
public void run() {
try {
while(pre.isRunning()){
System.out.println(pre.take());
}
} catch (Exception e) {
e.printStackTrace();
}
System.out.println("Elapsed time: "+((System.currentTimeMillis() - t)/1000)+" s");
}
}).start();
}
}
|
package at.fhtw.mcs.controller;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.ResourceBundle;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.TimeUnit;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.Clip;
import javax.sound.sampled.Line;
import javax.sound.sampled.Mixer;
import at.fhtw.mcs.model.Format;
import at.fhtw.mcs.model.Track;
import at.fhtw.mcs.util.AudioOuput;
import at.fhtw.mcs.util.TrackFactory;
import at.fhtw.mcs.util.TrackFactory.UnsupportedFormatException;
import javafx.application.Platform;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.fxml.Initializable;
import javafx.scene.control.Alert;
import javafx.scene.control.Alert.AlertType;
import javafx.scene.control.Button;
import javafx.scene.control.Label;
import javafx.scene.control.Menu;
import javafx.scene.control.MenuItem;
import javafx.scene.control.ProgressBar;
import javafx.scene.control.RadioMenuItem;
import javafx.scene.control.Toggle;
import javafx.scene.control.ToggleGroup;
import javafx.scene.layout.Region;
import javafx.scene.layout.VBox;
import javafx.scene.text.Text;
import javafx.stage.FileChooser;
import javafx.stage.Stage;
/**
* Controller class for Root.fxml
*/
public class RootController implements Initializable {
/*
* Where to move those? ResourceBundles?
*/
private static final String ICON_PAUSE = "||";
private static final String ICON_PLAY = "";
@FXML
private VBox vboxTracks;
@FXML
private Menu menuOutputDevices;
@FXML
private MenuItem menuItemQuit;
@FXML
private MenuItem menuItemAddTrack;
@FXML
private MenuItem menuItemAbout;
@FXML
private Button buttonPlayPause;
@FXML
private Button buttonStop;
@FXML
private Button buttonAddTrack;
@FXML
private Text textCurrentTime;
@FXML
private Text textTotalTime;
@FXML
private ProgressBar progressBarTime;
private ResourceBundle bundle;
private Stage stage;
private List<Track> tracks = new ArrayList<>();
public RootController(Stage stage) {
this.stage = stage;
}
@Override
public void initialize(URL viewSource, ResourceBundle translations) {
this.bundle = translations;
// 'x -> functionCall' is a minimalistic Java8 lambda
menuItemQuit.setOnAction(e -> Platform.exit());
menuItemAddTrack.setOnAction(this::handleAddTrack);
menuItemAbout.setOnAction(this::handleAbout);
// TODO: inline lambdas vs methods?
buttonPlayPause.setOnAction(e -> {
tracks.forEach(Track::togglePlayPause);
buttonPlayPause.setText(ICON_PLAY.equals(buttonPlayPause.getText()) ? ICON_PAUSE : ICON_PLAY);
});
buttonStop.setOnAction(e -> {
tracks.forEach(Track::stop);
buttonPlayPause.setText(ICON_PLAY);
});
buttonAddTrack.setOnAction(this::handleAddTrack);
ToggleGroup group = new ToggleGroup();
//@formatter:off
Arrays.stream(AudioSystem.getMixerInfo())
.filter(RootController::isOutputMixerInfo)
.forEach(info -> {
RadioMenuItem radio = new RadioMenuItem();
radio.setText(String.format("%s (%s)", info.getName(), info.getDescription()));
radio.setUserData(info);
radio.setToggleGroup(group);
radio.setSelected(info.equals(AudioOuput.getSelectedMixerInfo()));
menuOutputDevices.getItems().add(radio);
});
//@formatter:on
group.selectedToggleProperty().addListener(new ChangeListener<Toggle>() {
public void changed(ObservableValue<? extends Toggle> value, Toggle previousSelection,
Toggle newSelection) {
/*
* When modifying grouped RadioMenuItems, this is invoked twice:
* 1) oldValue and null 2) null and newValue
*/
if (newSelection != null) {
AudioOuput.setSelectedMixerInfo((Mixer.Info) newSelection.getUserData());
tracks.forEach(Track::reload);
}
}
});
}
private static boolean isOutputMixerInfo(Mixer.Info info) {
return AudioSystem.getMixer(info).isLineSupported(new Line.Info(Clip.class));
}
private void updateTime() {
// TODO: for now, we'll assume only checking the first track is ok.
Track track = tracks.get(0);
long currentMicroseconds = track.getCurrentMicroseconds();
long totalMicroseconds = track.getTotalMicroseconds();
progressBarTime.setProgress((double) currentMicroseconds / totalMicroseconds);
textCurrentTime.setText(formatTimeString(currentMicroseconds));
// TODO: reset playPause button; doesn't work
// if (currentMicroseconds == totalMicroseconds) {
// buttonPlayPause.setText(ICON_PLAY);
}
private void handleAddTrack(ActionEvent event) {
FileChooser chooser = new FileChooser();
/*
* TODO: should restrict file types! but maybe don't hardcode, rather
* 'ask' a responsible class what file types are allowed?
*/
chooser.setTitle("TRANSLATE ME");
File file = chooser.showOpenDialog(stage);
if (file == null) {
return;
}
Track track;
try {
track = TrackFactory.loadTrack(file.getAbsolutePath());
} catch (UnsupportedFormatException e) {
this.showErrorUnsupportedFormat(e.getFormat(), e.getAudioFormat());
return;
}
// Things to be done for first track
if (tracks.isEmpty()) {
long totalMicroseconds = track.getTotalMicroseconds();
String timeString = formatTimeString(totalMicroseconds);
textTotalTime.setText(timeString);
// TODO: config parameter
long updateFrequencyMs = 100;
Timer timer = new Timer(true);
/*
* Reading the documentation of timer.schedule(...), it seems like
* there's no danger of timer-execution-congestion when a time
* invocation blocks: "[...]each execution is scheduled relative to
* the actual execution time of the previous execution."
*/
timer.schedule(new TimerTask() {
@Override
public void run() {
long prevMillis = System.currentTimeMillis();
updateTime();
long elapsedMs = System.currentTimeMillis() - prevMillis;
if (elapsedMs >= updateFrequencyMs) {
System.err.println(String.format(
"Warning: Time update (%dms) took longer than the update frequency (%dms).", elapsedMs,
updateFrequencyMs));
}
}
}, 0, updateFrequencyMs);
}
tracks.add(track);
try {
FXMLLoader loader = new FXMLLoader();
loader.setController(new TrackController(track));
loader.setLocation(getClass().getClassLoader().getResource("views/Track.fxml"));
loader.setResources(bundle);
vboxTracks.getChildren().add(loader.load());
} catch (IOException e) {
e.printStackTrace();
}
buttonPlayPause.setDisable(false);
buttonStop.setDisable(false);
}
private String formatTimeString(long totalMicroseconds) {
long minutes = TimeUnit.MICROSECONDS.toMinutes(totalMicroseconds);
long seconds = TimeUnit.MICROSECONDS.toSeconds(totalMicroseconds) % 60;
return String.format("%d:%02d", minutes, seconds);
}
private void handleAbout(ActionEvent event) {
Alert alertAbout = new Alert(AlertType.INFORMATION);
alertAbout.setTitle(bundle.getString("about.title"));
alertAbout.setHeaderText(null);
alertAbout.setContentText(bundle.getString("about.contentText"));
((Label) alertAbout.getDialogPane().getChildren().get(1)).setWrapText(false);
alertAbout.getDialogPane().setPrefHeight(Region.USE_COMPUTED_SIZE);
// TODO: auto-resize to content
alertAbout.getDialogPane().setPrefWidth(700);
alertAbout.showAndWait();
}
private void showErrorUnsupportedFormat(Format format, AudioFormat audioFormat) {
Alert alertError = new Alert(AlertType.ERROR);
alertError.setTitle(bundle.getString("errorUnsupportedFormat.title"));
alertError.setHeaderText(null);
String errorText;
switch (format) {
case AIFF:
case WAV:
if (audioFormat.getSampleSizeInBits() == 24) {
errorText = bundle.getString("errorUnsupportedFormat.content24bit");
} else {
errorText = bundle.getString("errorUnsupportedFormat.contentDefault");
}
break;
case MP3:
errorText = bundle.getString("errorUnsupportedFormat.contentMp3");
break;
default:
errorText = bundle.getString("errorUnsupportedFormat.contentDefault");
break;
}
errorText += bundle.getString("errorUnsupportedFormat.supportedFormats");
alertError.setContentText(errorText);
alertError.getDialogPane().setPrefHeight(Region.USE_COMPUTED_SIZE);
alertError.getDialogPane().setPrefWidth(700);
alertError.showAndWait();
}
}
|
package banane.io.pdb.validator;
import banane.io.pdb.model.Hero;
import org.springframework.stereotype.Component;
import org.springframework.validation.Errors;
import org.springframework.validation.ValidationUtils;
import org.springframework.validation.Validator;
@Component
public class HeroValidator implements Validator {
@Override
public boolean supports(Class<?> clazz) {
return Hero.class.equals(clazz);
}
@Override
public void validate(Object target, Errors errors) {
Hero hero = (Hero) target;
ValidationUtils.rejectIfEmptyOrWhitespace(errors, "username", "NotEmpty");
if (hero.getUsername().length() < 6 || hero.getUsername().length() > 32) {
errors.rejectValue("username", "Size.userForm.username");
}
if(hero.getAgility() + hero.getIntelligence() + hero.getStrength() != 15) {
errors.rejectValue("agility", "Size.userForm.username");
}
}
}
|
package blacksmyth.stopwatch;
import blacksmyth.stopwatch.model.StopWatchModel;
import blacksmyth.stopwatch.model.StopWatchModelBuilder;
import blacksmyth.stopwatch.presenter.StopWatchPresenter;
import blacksmyth.stopwatch.presenter.StopWatchPresenterBuilder;
import blacksmyth.stopwatch.view.StopWatchView;
import blacksmyth.stopwatch.view.swing.SwingViewBuilder;
/**
* An implementation of the Builder pattern that uses the MVP pattern and Dependency Injection
* to create a fully functional StopWatch application.
*/
public final class StopWatchBuilder {
private StopWatchModel model;
private StopWatchView view;
private StopWatchPresenter presenter;
public static StopWatchView build() {
StopWatchBuilder builder = new StopWatchBuilder();
builder.addPresenter().addAndBindModel().addAndBindView();
return builder.getView();
}
private StopWatchBuilder addPresenter() {
presenter = StopWatchPresenterBuilder.build();
return this;
}
private StopWatchBuilder addAndBindModel() {
model = StopWatchModelBuilder.build();
bindModel();
return this;
}
private void bindModel() {
presenter.setModel(model);
model.addObserver(presenter);
}
private StopWatchBuilder addAndBindView() {
view = SwingViewBuilder.build();
bindView();
return this;
}
private void bindView() {
presenter.setView(view);
view.addObserver(presenter);
}
public StopWatchView getView() {
return view;
}
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package net.cantab.hayward.george.OCS.Parsing;
import VASSAL.Info;
import VASSAL.build.GameModule;
import VASSAL.command.Command;
import VASSAL.launch.BasicModule;
import VASSAL.launch.EditorWindow;
import VASSAL.launch.Launcher;
import VASSAL.launch.PlayerWindow;
import VASSAL.tools.ArchiveWriter;
import VASSAL.tools.icon.IconFactory;
import VASSAL.tools.menu.MacOSXMenuManager;
import VASSAL.tools.menu.MenuBarProxy;
import VASSAL.tools.menu.MenuManager;
import java.io.File;
import java.io.IOException;
import java.util.zip.ZipFile;
import javax.swing.JFrame;
import javax.swing.JMenuBar;
import net.cantab.hayward.george.OCS.Statics;
import org.apache.commons.lang3.SystemUtils;
/**
*
* @author george
*/
public class ScenarioBuilder extends Launcher {
static File load;
static File text;
static File save;
public static void main(String[] args) {
if (args.length != 4) {
System.err.println("Bad number of arguments " + args.length);
System.exit(0);
}
load = new File(args[1]);
text = new File(args[2]);
save = new File(args[3]);
new ScenarioBuilder(new String[]{args[0], "--standalone"});
}
protected ScenarioBuilder(String[] args) {
// the ctor is protected to enforce that it's called via main()
super(args);
}
protected MenuManager createMenuManager() {
return SystemUtils.IS_OS_MAC ? new MacOSXMenuManager() : new MyMenuManager();
}
protected void launch()
throws IOException {
new IconFactory(); // initialsie the icon factory
final ArchiveWriter archive = new ArchiveWriter(new ZipFile(
lr.module.getPath()));
GameModule.init(new GameModule(archive));
Command c = GameModule.getGameModule().getGameState().decodeSavedGame(
load);
c.execute();
Statics.readingTextFile = true;
ParseText p = new ParseText(text);
p.parse();
Statics.readingTextFile = false;
try {
GameModule.getGameModule().getGameState().saveGame(save);
} catch (IOException e) {
}
System.exit(0);
}
private static class MyMenuManager extends MenuManager {
private final MenuBarProxy editorBar = new MenuBarProxy();
private final MenuBarProxy playerBar = new MenuBarProxy();
@Override
public JMenuBar getMenuBarFor(JFrame fc) {
if (fc instanceof PlayerWindow) {
return playerBar.createPeer();
} else if (fc instanceof EditorWindow) {
return editorBar.createPeer();
} else {
return null;
}
}
@Override
public MenuBarProxy getMenuBarProxyFor(JFrame fc) {
if (fc instanceof PlayerWindow) {
return playerBar;
} else if (fc instanceof EditorWindow) {
return editorBar;
} else {
return null;
}
}
}
}
|
package ch.openech.transaction;
import java.util.List;
import org.minimalj.backend.Backend;
import org.minimalj.backend.Persistence;
import org.minimalj.security.Subject;
import org.minimalj.transaction.criteria.By;
import org.minimalj.util.CloneHelper;
import org.minimalj.util.IdUtils;
import org.minimalj.util.StringUtils;
import ch.openech.frontend.preferences.OpenEchPreferences;
import ch.openech.model.organisation.Organisation;
import ch.openech.model.organisation.OrganisationIdentification;
import ch.openech.model.person.Person;
import ch.openech.model.person.PersonIdentification;
public class EchPersistence {
public static Person getByIdentification(PersonIdentification personIdentification) {
String localId = null;
if (personIdentification.technicalIds.localId.openEch()) {
localId = personIdentification.technicalIds.localId.personId;
if (StringUtils.isBlank(localId)) {
localId = null;
}
if (localId != null && localId.length() == 36) {
Person person = Backend.read(Person.class, localId);
if (person != null) {
return person;
}
}
}
if (personIdentification.vn != null && personIdentification.vn.value != null) {
List<Person> persons = Backend.read(Person.class, By.search(personIdentification.vn.value, Person.SEARCH_BY_VN) , 1);
if (localId != null) {
for (Person person : persons) {
if (IdUtils.getCompactIdString(person).startsWith(localId)) {
return person;
}
}
} else {
if (!persons.isEmpty()) return persons.get(0);
}
}
List<Person> persons = Backend.read(Person.class, By.search(personIdentification.officialName), 500);
for (Person person : persons) {
if (localId == null || IdUtils.getCompactIdString(person).startsWith(localId)) {
if (StringUtils.equals(person.firstName, personIdentification.firstName)) {
if (StringUtils.equals(person.officialName, personIdentification.officialName)) {
return Backend.read(Person.class, person.id);
}
}
}
}
return null;
}
public static Organisation getByIdentification(Persistence persistence, OrganisationIdentification organisationIdentification) {
String localId = null;
if (organisationIdentification.technicalIds.localId.openEch()) {
localId = organisationIdentification.technicalIds.localId.personId;
if (StringUtils.isBlank(localId)) {
localId = null;
}
if (localId != null && localId.length() == 36) {
Organisation organisation = persistence.read(Organisation.class, localId);
if (organisation != null) {
return organisation;
}
}
}
List<Organisation> organisations = persistence.read(Organisation.class, By.field(Organisation.$.uid.value, organisationIdentification.uid.value), 2);
if (organisations.isEmpty()) {
organisations = persistence.read(Organisation.class, By.field(Organisation.$.organisationName, organisationIdentification.organisationName), 2);
}
for (Organisation organisation : organisations) {
if (localId == null || IdUtils.getCompactIdString(organisation).startsWith(localId)) {
return organisation;
}
}
return null;
}
public static OpenEchPreferences getPreferences() {
Subject subject = Subject.getSubject();
if (subject != null && !StringUtils.isEmpty(subject.getName())) {
List<OpenEchPreferences> preferences = Backend.read(OpenEchPreferences.class, By.field(OpenEchPreferences.$.user, subject.getName()), 2);
if (preferences.size() > 1) {
throw new IllegalStateException("Too many preference rows for " + subject.getName());
} else if (preferences.size() == 1) {
return preferences.get(0);
}
}
return CloneHelper.newInstance(OpenEchPreferences.class);
}
public static void savePreferences(OpenEchPreferences preferences) {
if (Subject.getSubject() != null) {
preferences.user = Subject.getSubject().getName();
if (preferences.id != null) {
Backend.update(preferences);
} else {
Backend.insert(preferences);
}
}
}
}
|
package org.biojava.bio.structure.align.helper;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
public class JointFragments {
double rms;
List<int[]> idxlist;
public JointFragments(){
idxlist = new ArrayList<int[]>();
rms = 999;
}
public void setIdxlist(List<int[]> idxs) {
Iterator<int[]> iter = idxs.iterator();
while (iter.hasNext()){
int[] e = (int[])iter.next();
idxlist.add(e);
}
}
public double getRms() {
return rms;
}
public void setRms(double rms) {
this.rms = rms;
}
public List<int[]> getIdxlist(){
return idxlist;
}
public void add(int p1, int p2,int start,int end){
//System.out.println("JointFragments add " +p1 + " " + p2 + " " + start + " " + end);
for ( int k = start;k< end ; k++){
//e = (f[0]+k,f[1]+k)
//e = (f[0]+k,f[1]+k);
int[] e = new int[] {p1+k,p2+k};
// check if already known ...
Iterator<int[]> iter = idxlist.iterator();
while (iter.hasNext()){
int[] kno = (int[])iter.next();
if ((kno[0] == e[0]) && ( kno[1] == e[1])){
System.err.println("already known index pair!! how is this possible?" + e[0] + " " + e[1]);
}
}
idxlist.add(e);
}
}
public String toString(){
String s = "Joint Fragment idxlist len: " +idxlist.size();
return s;
}
}
|
package cn.momia.mapi.api.course;
import cn.momia.api.course.CouponServiceApi;
import cn.momia.api.course.CourseServiceApi;
import cn.momia.api.course.OrderServiceApi;
import cn.momia.api.course.SubjectServiceApi;
import cn.momia.api.course.dto.course.Course;
import cn.momia.api.course.dto.subject.Subject;
import cn.momia.api.course.dto.subject.SubjectSku;
import cn.momia.api.course.dto.comment.UserCourseComment;
import cn.momia.api.course.dto.subject.SubjectOrder;
import cn.momia.api.user.UserServiceApi;
import cn.momia.api.user.dto.Contact;
import cn.momia.api.user.dto.User;
import cn.momia.common.core.dto.PagedList;
import cn.momia.common.core.http.MomiaHttpResponse;
import cn.momia.common.webapp.config.Configuration;
import cn.momia.mapi.api.AbstractApi;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.List;
@RestController
@RequestMapping("/v1/subject")
public class SubjectV1Api extends AbstractApi {
@Autowired private CourseServiceApi courseServiceApi;
@Autowired private SubjectServiceApi subjectServiceApi;
@Autowired private CouponServiceApi couponServiceApi;
@Autowired private OrderServiceApi orderServiceApi;
@Autowired private UserServiceApi userServiceApi;
@RequestMapping(method = RequestMethod.GET)
public MomiaHttpResponse get(@RequestParam long id) {
if (id <= 0) return MomiaHttpResponse.FAILED("ID");
Subject subject = subjectServiceApi.get(id);
completeLargeImg(subject);
PagedList<Course> courses = courseServiceApi.query(id, 0, 2);
completeMiddleCoursesImgs(courses.getList());
PagedList<UserCourseComment> comments = subjectServiceApi.queryCommentsBySubject(id, 0, 2);
completeCourseCommentsImgs(comments.getList());
JSONObject responseJson = new JSONObject();
responseJson.put("subject", subject);
responseJson.put("courses", courses);
if (!comments.getList().isEmpty()) responseJson.put("comments", comments);
return MomiaHttpResponse.SUCCESS(responseJson);
}
@RequestMapping(value = "/list", method = RequestMethod.GET)
public MomiaHttpResponse list(@RequestParam(value = "city") int cityId) {
if (cityId < 0) return MomiaHttpResponse.FAILED("ID");
List<Subject> subjects = subjectServiceApi.list(cityId);
for (Subject subject : subjects) {
completeLargeImg(subject);
}
return MomiaHttpResponse.SUCCESS(subjects);
}
@RequestMapping(value = "/course", method = RequestMethod.GET)
public MomiaHttpResponse listCourses(@RequestParam long id,
@RequestParam(value = "pid", required = false, defaultValue = "0") long packageId,
@RequestParam(required = false, defaultValue = "0") int age,
@RequestParam(required = false, defaultValue = "0") int sort,
@RequestParam int start) {
if (id <= 0) return MomiaHttpResponse.FAILED("ID");
if (start < 0) return MomiaHttpResponse.FAILED("start");
// FIXME Magic Number
PagedList<Course> courses = courseServiceApi.query(id, packageId, 1, 100, 0, start, Configuration.getInt("PageSize.Course"));
completeMiddleCoursesImgs(courses.getList());
JSONObject responseJson = new JSONObject();
responseJson.put("ages", buildAgeRanges());
responseJson.put("sorts", buildSortTypes());
responseJson.put("currentAge", 0);
responseJson.put("currentSort", 0);
responseJson.put("courses", courses);
return MomiaHttpResponse.SUCCESS(responseJson);
}
private JSONArray buildAgeRanges() {
JSONArray ageRanges = new JSONArray();
JSONObject ageRange = new JSONObject();
ageRange.put("id", 0);
ageRange.put("min", 1);
ageRange.put("max", 100);
ageRange.put("text", "");
ageRanges.add(ageRange);
return ageRanges;
}
private JSONArray buildSortTypes() {
JSONArray sortTypes = new JSONArray();
JSONObject sortType = new JSONObject();
sortType.put("id", 0);
sortType.put("text", "");
sortTypes.add(sortType);
return sortTypes;
}
@RequestMapping(value = "/course/bookable", method = RequestMethod.GET)
public MomiaHttpResponse listBookableCourses(@RequestParam(value = "pid") long packageId, @RequestParam int start) {
if (packageId <= 0) return MomiaHttpResponse.FAILED("ID");
if (start < 0) return MomiaHttpResponse.FAILED("start");
// FIXME Magic Number
PagedList<Course> courses = courseServiceApi.queryBookable(packageId, start, Configuration.getInt("PageSize.Course"));
completeMiddleCoursesImgs(courses.getList());
JSONObject responseJson = new JSONObject();
responseJson.put("ages", buildAgeRanges());
responseJson.put("sorts", buildSortTypes());
responseJson.put("currentAge", 0);
responseJson.put("currentSort", 0);
responseJson.put("courses", courses);
return MomiaHttpResponse.SUCCESS(responseJson);
}
@RequestMapping(value = "/comment/list", method = RequestMethod.GET)
public MomiaHttpResponse listComments(@RequestParam long id, @RequestParam int start) {
if (id <= 0) return MomiaHttpResponse.FAILED("ID");
if (start < 0) return MomiaHttpResponse.FAILED("start");
PagedList<UserCourseComment> pageComments = subjectServiceApi.queryCommentsBySubject(id, start, Configuration.getInt("PageSize.CourseComment"));
completeCourseCommentsImgs(pageComments.getList());
return MomiaHttpResponse.SUCCESS(pageComments);
}
@RequestMapping(value = "/sku", method = RequestMethod.GET)
public MomiaHttpResponse sku(@RequestParam String utoken, @RequestParam long id) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (id <= 0) return MomiaHttpResponse.FAILED("ID");
Subject subject = subjectServiceApi.get(id);
List<SubjectSku> skus = subjectServiceApi.querySkus(id);
Contact contact = userServiceApi.getContact(utoken);
List<SubjectSku> subjectSkus = new ArrayList<SubjectSku>();
for (SubjectSku sku : skus) {
if (subject.getType() == Subject.Type.NORMAL) {
if (sku.getCourseId() <= 0) subjectSkus.add(sku);
} else if (subject.getType() == Subject.Type.TRIAL) {
if (sku.getCourseId() > 0) subjectSkus.add(sku);
}
}
JSONObject responseJson = new JSONObject();
responseJson.put("skus", subjectSkus);
responseJson.put("contact", contact);
return MomiaHttpResponse.SUCCESS(responseJson);
}
@RequestMapping(value = "/order", method = RequestMethod.POST)
public MomiaHttpResponse order(@RequestParam String utoken, @RequestParam String order) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (StringUtils.isBlank(order)) return MomiaHttpResponse.FAILED("");
JSONObject orderJson = JSON.parseObject(order);
User user = userServiceApi.get(utoken);
orderJson.put("userId", user.getId());
JSONObject contactJson = orderJson.getJSONObject("contact");
orderJson.put("contact", contactJson.getString("name"));
orderJson.put("mobile", contactJson.getString("mobile"));
JSONArray packagesJson = new JSONArray();
JSONArray skusJson = orderJson.getJSONArray("skus");
for (int i = 0; i < skusJson.size(); i++) {
JSONObject skuJson = skusJson.getJSONObject(i);
orderJson.put("subjectId", skuJson.getLong("subjectId"));
int count = skuJson.getInteger("count");
for (int j = 0; j < count; j++) {
JSONObject packageJson = new JSONObject();
packageJson.put("skuId", skuJson.getLong("id"));
packagesJson.add(packageJson);
}
}
orderJson.put("packages", packagesJson);
return MomiaHttpResponse.SUCCESS(orderServiceApi.placeOrder(orderJson));
}
@RequestMapping(value = "/order/delete", method = RequestMethod.POST)
public MomiaHttpResponse deleteOrder(@RequestParam String utoken, @RequestParam(value = "oid") long orderId) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (orderId <= 0) return MomiaHttpResponse.FAILED("ID");
if (!orderServiceApi.deleteOrder(utoken, orderId)) return MomiaHttpResponse.FAILED("");
return MomiaHttpResponse.SUCCESS;
}
@RequestMapping(value = "/order/refund", method = RequestMethod.POST)
public MomiaHttpResponse refund(@RequestParam String utoken,
@RequestParam(value = "oid") long orderId,
@RequestParam(required = false, defaultValue = "0") BigDecimal fee,
@RequestParam(required = false, defaultValue = "") String message) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (orderId <= 0) return MomiaHttpResponse.FAILED("ID");
if (new BigDecimal(0).compareTo(fee) >= 0) return MomiaHttpResponse.FAILED("");
if (!orderServiceApi.applyRefundOrder(utoken, orderId, fee, message)) return MomiaHttpResponse.FAILED("");
return MomiaHttpResponse.SUCCESS;
}
@RequestMapping(value = "/order/coupon", method = RequestMethod.GET)
public MomiaHttpResponse coupon(@RequestParam String utoken,
@RequestParam(value = "oid") long orderId,
@RequestParam(value = "coupon") long userCouponId) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (orderId <= 0) return MomiaHttpResponse.FAILED("ID");
if (userCouponId <= 0) return MomiaHttpResponse.FAILED("ID");
return MomiaHttpResponse.SUCCESS(couponServiceApi.coupon(utoken, orderId, userCouponId));
}
@RequestMapping(value = "/order/detail", method = RequestMethod.GET)
public MomiaHttpResponse orderDetail(@RequestParam String utoken, @RequestParam(value = "oid") long orderId) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (orderId <= 0) return MomiaHttpResponse.FAILED("ID");
SubjectOrder order = orderServiceApi.get(utoken, orderId);
order.setCover(completeMiddleImg(order.getCover()));
return MomiaHttpResponse.SUCCESS(order);
}
}
|
package org.callimachusproject.behaviours;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import org.apache.http.util.EntityUtils;
import org.callimachusproject.client.HttpUriClient;
import org.callimachusproject.client.HttpUriEntity;
import org.callimachusproject.concepts.ScriptBundle;
import org.callimachusproject.server.exceptions.GatewayTimeout;
import org.callimachusproject.server.exceptions.InternalServerError;
import org.callimachusproject.traits.CalliObject;
import org.openrdf.OpenRDFException;
import org.openrdf.annotations.Sparql;
import com.google.javascript.jscomp.CheckLevel;
import com.google.javascript.jscomp.CommandLineRunner;
import com.google.javascript.jscomp.CompilationLevel;
import com.google.javascript.jscomp.Compiler;
import com.google.javascript.jscomp.CompilerOptions;
import com.google.javascript.jscomp.CompilerOptions.LanguageMode;
import com.google.javascript.jscomp.DiagnosticGroups;
import com.google.javascript.jscomp.Result;
import com.google.javascript.jscomp.SourceFile;
public abstract class ScriptBundleSupport implements ScriptBundle, CalliObject {
/** very short lived cache to avoid race condition */
private static final Map<String, Callable<String>> cache = new HashMap<String, Callable<String>>();
@Override
public String calliGetBundleSource() throws GatewayTimeout, IOException, OpenRDFException {
HttpUriClient client = this.getHttpClient();
List<SourceFile> scripts = new ArrayList<SourceFile>();
for (Object ext : getCalliScriptsAsList()) {
String url = ext.toString();
String code = getJavaScriptCode(client, url);
scripts.add(SourceFile.fromCode(url, code));
}
StringBuilder sb = new StringBuilder();
for (SourceFile script : scripts) {
sb.append(script.getCode()).append("\n");
}
return sb.toString();
}
@Override
public String calliGetMinifiedBundle() throws Exception {
final int minification = this.getMinification();
if (minification < 1)
return calliGetBundleSource();
String uri = this.getResource().stringValue();
final HttpUriClient client = this.getHttpClient();
final List<String> scripts = new ArrayList<String>(getCalliScriptsAsList());
Callable<String> future;
synchronized (cache) {
future = cache.get(uri);
if (future == null) {
cache.put(uri, future = new Callable<String>() {
private String result;
public synchronized String call() throws Exception {
if (result == null)
return result = compress(minification, scripts, client);
return result;
}
});
}
}
String source = future.call();
synchronized (cache) {
cache.remove(uri);
}
return "// @source: " + uri + "?source\n" + source;
}
@Sparql("PREFIX rdfs:<http://www.w3.org/2000/01/rdf-schema
+ "SELECT DISTINCT (str(?script) AS ?url)\n"
+ "WHERE { {$this ?one ?script FILTER (regex(str(?one), \"
+ "UNION {$this ?two ?script FILTER (regex(str(?two), \"#_\\\\d\\\\d$\"))}\n"
+ "UNION {$this ?three ?script FILTER (regex(str(?three), \"#_\\\\d\\\\d\\\\d+$\"))}\n"
+ "UNION {?member rdfs:member ?script FILTER (?member = $this)}\n"
+ "} ORDER BY ?member ?three ?two ?one")
protected abstract List<String> getCalliScriptsAsList();
static String compress(int minification, List<String> links, HttpUriClient client)
throws IOException {
final List<SourceFile> scripts = new ArrayList<SourceFile>();
for (String url : links) {
String code = getJavaScriptCode(client, url);
scripts.add(SourceFile.fromCode(url, code));
}
Compiler compiler = new Compiler();
CompilerOptions options = new CompilerOptions();
options.setLanguageIn(LanguageMode.ECMASCRIPT5);
options.setLanguageOut(LanguageMode.ECMASCRIPT5);
options.setWarningLevel(DiagnosticGroups.NON_STANDARD_JSDOC, CheckLevel.OFF);
getCompilationLevel(minification).setOptionsForCompilationLevel(options);
List<SourceFile> externals = CommandLineRunner.getDefaultExterns();
Result result = compiler.compile(externals, scripts, options);
if (result.errors != null && result.errors.length > 0) {
throw new InternalServerError(result.errors[0].toString());
}
return compiler.toSource();
}
private int getMinification() {
int result = Integer.MAX_VALUE;
for (Number number : getCalliMinified()) {
if (number.intValue() < result) {
result = number.intValue();
}
}
if (result == Integer.MAX_VALUE)
return 2;
return result;
}
private static CompilationLevel getCompilationLevel(int minification) {
if (minification == 1)
return CompilationLevel.WHITESPACE_ONLY;
if (minification == 2)
return CompilationLevel.SIMPLE_OPTIMIZATIONS;
return CompilationLevel.ADVANCED_OPTIMIZATIONS;
}
private static String getJavaScriptCode(HttpUriClient client, String url) throws IOException {
HttpUriEntity entity = client.getEntity(url, "text/javascript");
return EntityUtils.toString(entity, "UTF-8");
}
}
|
package cn.mutils.app.javadoc;
import cn.mutils.app.javadoc.model.*;
import cn.mutils.app.javadoc.sort.ClassDocInfoComparator;
import cn.mutils.app.javadoc.sort.ClassDocInfoTreeComparator;
import cn.mutils.app.javadoc.sort.FieldDocInfoComparator;
import cn.mutils.app.javadoc.sort.MethodDocInfoComparator;
import cn.mutils.app.javadoc.util.IOUtil;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.*;
public class JavaDocReporter {
private List<ClassDocInfo> mClassDocInfoList;
private List<ClassDocInfo> mClassDocInfoTreeList;
private PackageTreeNode mPackageTreeNodeRoot;
private Map<String, PackageTreeNode> mPackageTreeNodeMap;
private OverviewTreeNode mOverviewTreeNodeRoot;
private Map<String, OverviewTreeNode> mOverviewTreeNodeMap;
public JavaDocReporter(List<ClassDocInfo> classDocInfoList) {
Collections.sort(classDocInfoList, new ClassDocInfoComparator());
mClassDocInfoList = classDocInfoList;
mClassDocInfoTreeList = new ArrayList<ClassDocInfo>(mClassDocInfoList);
Collections.sort(mClassDocInfoTreeList, new ClassDocInfoTreeComparator());
buildPackageTree();
buildOverviewTree();
}
private void buildOverviewTree() {
mOverviewTreeNodeRoot = new OverviewTreeNode();
if (mOverviewTreeNodeMap != null) {
mOverviewTreeNodeMap.clear();
} else {
mOverviewTreeNodeMap = new HashMap<String, OverviewTreeNode>();
}
mOverviewTreeNodeMap.put(mOverviewTreeNodeRoot.name, mOverviewTreeNodeRoot);
Map<String, Object> packageScannedMap = new HashMap<String, Object>();
String packageName = " ";
for (ClassDocInfo classDocInfo : mClassDocInfoTreeList) {
if (!packageName.equals(classDocInfo.packageName)) {
buildOverviewTreePackage(classDocInfo.packageName, packageScannedMap);
}
buildOverviewTreeClass(classDocInfo);
packageName = classDocInfo.packageName;
}
mOverviewTreeNodeRoot.resetAllIds(-1);
}
private void buildOverviewTreePackage(String packageName, Map<String, Object> packageScannedMap) {
if (packageName.isEmpty()) {
return;
}
String[] packageElements = packageName.split("\\.");
for (int i = 0, size = packageElements.length; i < size; i++) {
String package4i = makePackage(packageElements, i);
if (packageScannedMap.containsKey(package4i)) {
continue;
}
packageScannedMap.put(package4i, null);
PackageTreeNode node = mPackageTreeNodeMap.get(package4i);
if (node == null) {
continue;
}
OverviewTreeNode overviewTreeNode = null;
if (node.isTogetherWithParent()) {
overviewTreeNode = mOverviewTreeNodeMap.get(node.parent.name);
if (overviewTreeNode == null) {
continue;
}
overviewTreeNode.name += ".";
} else {
overviewTreeNode = new OverviewTreeNode();
OverviewTreeNode overviewNodeParent = mOverviewTreeNodeMap.get(node.parent.name);
if (overviewNodeParent == null) {
continue;
}
overviewTreeNode.parent = overviewNodeParent;
overviewTreeNode.parent.children.add(overviewTreeNode);
}
mOverviewTreeNodeMap.put(package4i, overviewTreeNode);
overviewTreeNode.name += packageElements[i];
overviewTreeNode.title = package4i;
}
}
private void buildOverviewTreeClass(ClassDocInfo classDocInfo) {
String packageName = classDocInfo.packageName;
OverviewTreeNode nodeParent = mOverviewTreeNodeMap.get(packageName);
if (nodeParent == null) {
return;
}
OverviewTreeNode node = new OverviewTreeNode(classDocInfo.simpleName, nodeParent);
if (!classDocInfo.comment.isEmpty()) {
String lineTitle = getFirstLineForTitle(classDocInfo.comment);
if (lineTitle != null && lineTitle.length() < 25) {
if (lineTitle.indexOf('<') == -1 && lineTitle.indexOf('>') == -1) {
node.comment = " " + lineTitle;
}
}
}
node.title = classDocInfo.name;
nodeParent.children.add(node);
mOverviewTreeNodeMap.put(classDocInfo.name, node);
}
private void buildPackageTree() {
mPackageTreeNodeRoot = new PackageTreeNode();
if (mPackageTreeNodeMap != null) {
mPackageTreeNodeMap.clear();
} else {
mPackageTreeNodeMap = new HashMap<String, PackageTreeNode>();
}
mPackageTreeNodeMap.put(mPackageTreeNodeRoot.name, mPackageTreeNodeRoot);
for (ClassDocInfo classDocInfo : mClassDocInfoTreeList) {
String packageName = classDocInfo.packageName;
PackageTreeNode node = mPackageTreeNodeMap.get(packageName);
if (node != null) {
node.classes.add(classDocInfo);
continue;
}
if (packageName == null || packageName.isEmpty()) {
mPackageTreeNodeRoot.classes.add(classDocInfo);
continue;
}
PackageTreeNode parentNode = mPackageTreeNodeRoot;
String[] packageElements = packageName.split("\\.");
for (int i = 0, size = packageElements.length; i < size; i++) {
String package4i = makePackage(packageElements, i);
PackageTreeNode node4i = mPackageTreeNodeMap.get(package4i);
if (node4i == null) {
node4i = new PackageTreeNode(package4i, parentNode);
mPackageTreeNodeMap.put(package4i, node4i);
parentNode.children.add(node4i);
}
parentNode = node4i;
}
}
}
/**
* markdown
*
* @param docPath
*/
public void report(String docPath) {
if (mClassDocInfoList.isEmpty()) {
return;
}
Writer writer = null;
try {
FileOutputStream fis = new FileOutputStream(docPath);
writer = new OutputStreamWriter(fis, "UTF-8");
exportTree(writer);
String packageName = " ";
for (ClassDocInfo classDocInfo : mClassDocInfoList) {
export(writer, classDocInfo, packageName);
packageName = classDocInfo.packageName;
}
writer.flush();
} catch (Exception e) {
e.printStackTrace();
System.exit(-1);
} finally {
IOUtil.closeQuietly(writer);
}
}
private void exportTree(Writer writer) throws Exception {
writer.write("## Overview ##\n<pre>\n");
exportTree(writer, mOverviewTreeNodeRoot);
writer.write("</pre>\n");
}
private void exportTree(Writer writer, OverviewTreeNode node) throws Exception {
if (!node.name.isEmpty()) {
OverviewTreeNode[] parents = node.parents();
for (int i = parents.length - 1; i >= 0; i
OverviewTreeNode parentNode = parents[i];
if (parentNode.name.isEmpty()) {
continue;
}
if (parentNode.isLastOne()) {
writer.write(" ");
} else {
writer.write(" ");
}
}
if (node.isLastOne()) {
writer.write("");
} else {
writer.write("");
}
writer.write(" <a href=\"#id" + node.id + "\" title=\"" + node.title + "\">");
if (node.isPackage()) {
writer.write("<b>" + node.name + "</b>");
} else {
writer.write(node.name);
}
writer.write("</a>");
if (!node.comment.isEmpty()) {
writer.write(node.comment);
}
writer.write("\n");
}
for (OverviewTreeNode subNode : node.children) {
exportTree(writer, subNode);
}
}
private static String makePackage(String[] packages, int packageIndex) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i <= packageIndex; i++) {
sb.append(packages[i]);
if (i != packageIndex) {
sb.append(".");
}
}
return sb.toString();
}
private static String getFirstLineForTitle(String string) {
if (string == null) {
return null;
}
if (string.isEmpty()) {
return null;
}
int firstIndexOfLine = string.indexOf('\n');
if (firstIndexOfLine == -1) {
return string;
} else {
int secondIndexOfLine = string.indexOf('\n', firstIndexOfLine + 1);
if (secondIndexOfLine == -1) {
return string.substring(0, firstIndexOfLine);
}
String secondLine = string.substring(firstIndexOfLine + 1, secondIndexOfLine);
if (secondLine.length() == 0) {
return string.substring(0, firstIndexOfLine);
}
return null;
}
}
private void export(Writer writer, ClassDocInfo doc, String lastPackage) throws Exception {
System.out.println("export class " + doc.name + " ...");
boolean isNewPackage = !doc.packageName.equals(lastPackage);
if (isNewPackage) {
String packageName = doc.packageName;
if (packageName.isEmpty()) {
packageName = "(default)";
} else {
OverviewTreeNode node = mOverviewTreeNodeMap.get(doc.packageName);
writer.write("\n<a name=\"id" + node.id + "\"></a>\n");
packageName = packageName.replace("_", "\\_");
}
writer.write("\n## " + packageName + " ##\n");
}
OverviewTreeNode node = mOverviewTreeNodeMap.get(doc.name);
writer.write("\n<a name=\"id" + node.id + "\"></a>\n");
writer.write("\n
if (!doc.comment.isEmpty()) {
writer.write("\n" + transformComment(doc.comment) + "\n");
}
Collections.sort(doc.fields, new FieldDocInfoComparator());
for (FieldDocInfo fieldDocInfo : doc.fields) {
export(writer, fieldDocInfo);
}
Collections.sort(doc.methods, new MethodDocInfoComparator());
for (MethodDocInfo methodDocInfo : doc.methods) {
export(writer, methodDocInfo);
}
writer.write("\n
System.out.println("export class " + doc.name + " success");
}
private static void export(Writer writer, FieldDocInfo doc) throws Exception {
writer.write("\n<br/><b>" + doc.name.replace("_", "\\_") + "</b> : " + doc.type.replace("_", "\\_") + "\n");
if (!doc.comment.isEmpty()) {
writer.write("\n" + transformComment(doc.comment) + "\n");
}
System.out.println("export field " + doc.name + " success");
}
private static void export(Writer writer, MethodDocInfo doc) throws Exception {
writer.write("\n<br/><b>" + doc.name.replace("_", "\\_") + "</b>" + doc.parameters.replace("_", "\\_") + "\n");
if (!doc.comment.isEmpty()) {
writer.write("\n" + transformComment(doc.comment) + "\n");
}
System.out.println("export method " + doc.name + " success");
}
private static String transformComment(String comment) {
StringBuilder sb = new StringBuilder();
String[] lines = comment.split("\n");
boolean isPreStatement = false;
boolean isCodeStatement = false;
for (int i = 0, length = lines.length; i < length; i++) {
String line = lines[i];
line = line.replace("<U>", "");
line = line.replace("</U>", "");
line = line.replace("<S>", "");
line = line.replace("</S>", "");
int indexOfPreBegin = line.lastIndexOf("<pre>");
int indexOfPreEnd = line.lastIndexOf("</pre>");
int indexOfCodeBegin = line.lastIndexOf("<code>");
int indexOfCodeEnd = line.lastIndexOf("</code>");
if (indexOfPreEnd != -1 && indexOfPreEnd > indexOfPreBegin) {
isPreStatement = false;
} else {
if (indexOfPreBegin != -1 && indexOfPreBegin > indexOfPreEnd) {
isPreStatement = true;
}
}
if (indexOfCodeEnd != -1 && indexOfCodeEnd > indexOfCodeBegin) {
isCodeStatement = false;
} else {
if (indexOfCodeBegin != -1 && indexOfCodeBegin > indexOfCodeEnd) {
isCodeStatement = true;
}
}
sb.append("> ");
sb.append(line);
if (i != length - 1) {
if (!isPreStatement && !isCodeStatement) {
sb.append("<br/>");
}
sb.append("\n");
}
}
return sb.toString();
}
}
|
package co.carrotsword.shorten_sql;
import java.io.InputStream;
import java.sql.*;
import java.util.ArrayList;
import java.util.List;
/**
* @author carrotsword
* @since 15/02/07
*/
public class PreparedQx implements AutoCloseable {
PreparedStatement statement;
PreparedQx(Connection connection, String sql) throws SQLException {
statement = connection.prepareStatement(sql, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
}
@Override
public void close() throws SQLException {
if (statement == null)
return;
statement.close();
}
public void setParameters(List<Qp<?>> parameters) throws SQLException {
for (int i = 0; i < parameters.size(); i++) {
parameters.get(i).setToStatement(i + 1, statement);
}
}
public int update() throws SQLException {
return statement.executeUpdate();
}
public List<ResultMap> list(int offset, int length) throws SQLException {
statement.setFetchDirection(ResultSet.FETCH_FORWARD);
statement.setMaxRows(length);
statement.setFetchSize(length);
ResultSet resultSet = statement.executeQuery();
resultSet.setFetchDirection(ResultSet.FETCH_FORWARD);
resultSet.absolute(offset+1);
List<ResultMap> result = new ArrayList<>(length);
int limit = (length == 0) ? Integer.MAX_VALUE : length;
for (int i = 0; i < limit && resultSet.next(); i++) {
ResultMap map = new ResultMap();
ResultSetMetaData metaData = resultSet.getMetaData();
for (int j = 0; j < metaData.getColumnCount(); j++) {
String colName = metaData.getColumnLabel(j + 1);
map.put(colName.toLowerCase(), resultSet.getObject(colName));
}
result.add(map);
}
return result;
}
public boolean execute() throws SQLException {
return statement.execute();
}
public InputStream stream(int index) throws SQLException {
ResultSet resultSet = statement.executeQuery();
if(resultSet.next()){
int columnType = resultSet.getMetaData().getColumnType(index+1);
InputStream inputStream;
switch(columnType){
case Types.CLOB:
case Types.NCLOB:
inputStream = resultSet.getAsciiStream(index+1);
break;
default:
inputStream = resultSet.getBinaryStream(index+1);
}
return new ProxyInputStream(inputStream, statement);
}
return null;
}
}
|
package codechicken.lib.render;
import codechicken.lib.vec.Matrix4;
import com.google.common.collect.ImmutableMap;
import net.minecraft.client.renderer.block.model.ItemCameraTransforms.TransformType;
import net.minecraftforge.common.model.TRSRTransformation;
import javax.vecmath.Matrix4f;
import javax.vecmath.Vector3f;
public class TransformUtils {
private static final TRSRTransformation flipX = new TRSRTransformation(null, null, new Vector3f(-1, 1, 1), null);
public static final CCModelState DEFAULT_BLOCK;
public static final CCModelState DEFAULT_ITEM;
public static final CCModelState DEFAULT_TOOL;
public static final CCModelState DEFAULT_BOW;
static {
TRSRTransformation thirdPerson = get(0, 2.5f, 0, 75, 45, 0, 0.375f);
TRSRTransformation firstPerson;
ImmutableMap.Builder<TransformType, TRSRTransformation> defaultBlockBuilder = ImmutableMap.builder();
defaultBlockBuilder.put(TransformType.GUI, get(0, 0, 0, 30, 225, 0, 0.625f));
defaultBlockBuilder.put(TransformType.GROUND, get(0, 3, 0, 0, 0, 0, 0.25f));
defaultBlockBuilder.put(TransformType.FIXED, get(0, 0, 0, 0, 0, 0, 0.5f));
defaultBlockBuilder.put(TransformType.THIRD_PERSON_RIGHT_HAND, thirdPerson);
defaultBlockBuilder.put(TransformType.THIRD_PERSON_LEFT_HAND, leftify(thirdPerson));
defaultBlockBuilder.put(TransformType.FIRST_PERSON_RIGHT_HAND, get(0, 0, 0, 0, 45, 0, 0.4f));
defaultBlockBuilder.put(TransformType.FIRST_PERSON_LEFT_HAND, get(0, 0, 0, 0, 225, 0, 0.4f));
DEFAULT_BLOCK = new CCModelState(defaultBlockBuilder.build());
thirdPerson = get(0, 3, 1, 0, 0, 0, 0.55f);
firstPerson = get(1.13f, 3.2f, 1.13f, 0, -90, 25, 0.68f);
ImmutableMap.Builder<TransformType, TRSRTransformation> defaultItemBuilder = ImmutableMap.builder();
defaultItemBuilder.put(TransformType.GROUND, get(0, 2, 0, 0, 0, 0, 0.5f));
defaultItemBuilder.put(TransformType.HEAD, get(0, 13, 7, 0, 180, 0, 1));
defaultItemBuilder.put(TransformType.THIRD_PERSON_RIGHT_HAND, thirdPerson);
defaultItemBuilder.put(TransformType.THIRD_PERSON_LEFT_HAND, leftify(thirdPerson));
defaultItemBuilder.put(TransformType.FIRST_PERSON_RIGHT_HAND, firstPerson);
defaultItemBuilder.put(TransformType.FIRST_PERSON_LEFT_HAND, leftify(firstPerson));
DEFAULT_ITEM = new CCModelState(defaultItemBuilder.build());
ImmutableMap.Builder<TransformType, TRSRTransformation> defaultToolBuilder = ImmutableMap.builder();
defaultToolBuilder.put(TransformType.THIRD_PERSON_RIGHT_HAND, get(0, 4, 0.5F, 0, -90, 55, 0.85F));
defaultToolBuilder.put(TransformType.THIRD_PERSON_LEFT_HAND, get(0, 4, 0.5f, 0, 90, -55, 0.85f));
defaultToolBuilder.put(TransformType.FIRST_PERSON_RIGHT_HAND, get(1.13f, 3.2f, 1.13f, 0, -90, 25, 0.68f));
defaultToolBuilder.put(TransformType.FIRST_PERSON_LEFT_HAND, get(1.13f, 3.2f, 1.13f, 0, 90, -25, 0.68f));
DEFAULT_TOOL = new CCModelState(defaultToolBuilder.build());
ImmutableMap.Builder<TransformType, TRSRTransformation> defaultBowBuilder = ImmutableMap.builder();
defaultBowBuilder.put(TransformType.THIRD_PERSON_RIGHT_HAND, get(-1F, -2F, 2.5F, -80, 260, -40, 0.9F));
defaultBowBuilder.put(TransformType.THIRD_PERSON_LEFT_HAND, get(-1F, -2F, 2.5F, -80, -280, 40, 0.9f));
defaultBowBuilder.put(TransformType.FIRST_PERSON_RIGHT_HAND, get(1.13F, 3.2F, 1.13F, 0, -90, 25, 0.68f));
defaultBowBuilder.put(TransformType.FIRST_PERSON_LEFT_HAND, get(1.13f, 3.2f, 1.13f, 0, 90, -25, 0.68f));
DEFAULT_BOW = new CCModelState(defaultBowBuilder.build());
}
public static TRSRTransformation get(float tx, float ty, float tz, float rx, float ry, float rz, float s) {
return TRSRTransformation.blockCenterToCorner(new TRSRTransformation(new Vector3f(tx / 16, ty / 16, tz / 16), TRSRTransformation.quatFromXYZDegrees(new Vector3f(rx, ry, rz)), new Vector3f(s, s, s), null));
}
public static TRSRTransformation leftify(TRSRTransformation transform) {
return TRSRTransformation.blockCenterToCorner(flipX.compose(TRSRTransformation.blockCornerToCenter(transform)).compose(flipX));
}
public static TRSRTransformation fromMatrix4(Matrix4 matrix4){
TRSRTransformation transformation = new TRSRTransformation(matrix4.toMatrix4f());
transformation.getLeftRot();//Call something to cause it to gen properly.
return transformation;
}
}
|
package com.areen.jlib.gui.fcb;
import com.areen.jlib.gui.ColorArrowUI;
import com.areen.jlib.tuple.Pair;
import com.areen.jlib.util.Sise;
import java.awt.event.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.*;
import javax.swing.event.PopupMenuEvent;
import javax.swing.event.PopupMenuListener;
import javax.swing.text.AttributeSet;
import javax.swing.text.BadLocationException;
import javax.swing.text.JTextComponent;
import javax.swing.text.PlainDocument;
public class ComboBoxFilter extends PlainDocument {
private JComboBox comboBox;
private JTextComponent comboBoxEditor;
private FilteredComboBoxModel comboBoxModel;
/**
* Every call to setSelectedItem() in the combo-box will provoke another
* call to the insertString, with the newly selected item. We prevent this
* by seting "selecting" to true before we call setPattern(). Subsequent
* calls to remove/insertString should be ignored.
*/
boolean selecting = false;
boolean hidePopupOnFocusLoss;
private boolean arrowKeyPressed = false;
private boolean keyPressed = false;
private boolean finish = false;
private boolean inPreparation;
private int selectedIndex;
private Object pickedItem;
private Object pickedKey;
/**
* This value is set by cell-editors to inform ComboBoxFilter whether to
* execute pick value or not during the preparation phase (prepare()
* method).
*/
private boolean triggeredByKeyPress = false;
private int previousItemCount;
/**
* We have to store the popup menu's dimension so we can fix incorrect popup
* size during the filtering process.
*/
private int popupMenuWidth;
private int popupMenuHeight;
static final Logger LOGGER = Logger.getLogger(ComboBoxFilter.class.getCanonicalName());
/**
* This constructor adds filtering capability to the given JComboBox object
* argComboBox. It will also assign appropriate model to the combo-box, one
* that has setPattern() method. It will also set the argComboBox to be
* editable.
*
* @param argComboBox
* @param argComboBoxModel
*/
public ComboBoxFilter(final JComboBox argComboBox, FilteredComboBoxModel argComboBoxModel) {
comboBox = argComboBox;
comboBox.setEditable(true);
fixComboBoxArrowUI();
comboBoxModel = argComboBoxModel;
comboBox.setModel(comboBoxModel);
// If initially an item is selected, we will assume that item is a picked item.
comboBox.putClientProperty("item-picked", Boolean.TRUE);
/*
if (comboBox.getSelectedItem() == null) {
comboBox.putClientProperty("item-picked", Boolean.TRUE);
} else {
comboBox.putClientProperty("item-picked", Boolean.TRUE);
}
*
*/
comboBoxEditor = (JTextComponent) comboBox.getEditor().getEditorComponent();
comboBoxEditor.setDocument(this);
// let's add a key listener to the editor
comboBoxEditor.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
LOGGER.info("keyPressed()");
int keyCode = e.getKeyCode();
System.out.println(keyCode);
/*
* In the case user presses SHIFT, CTRL, ALT keys, or <ANY>+TAB,
* we return imediately.
*/
//int modifiers = e.getModifiersEx();
if (keyCode == KeyEvent.VK_SHIFT
|| (keyCode == KeyEvent.VK_ALT)
|| (keyCode == KeyEvent.VK_CONTROL)
|| (keyCode == KeyEvent.VK_WINDOWS)
|| (keyCode == KeyEvent.VK_CONTEXT_MENU)
|| (keyCode == KeyEvent.VK_TAB && e.isShiftDown())) {
keyPressed = false;
return;
}
keyPressed = true;
boolean isTableCellEditor = false;
Object tmp = comboBox.getClientProperty("JComboBox.isTableCellEditor");
if (tmp != null) {
isTableCellEditor = tmp.equals(Boolean.TRUE);
}
if (comboBox.isDisplayable()) {
comboBox.setPopupVisible(true);
}
arrowKeyPressed = false;
finish = false;
int currentIndex = comboBox.getSelectedIndex();
switch (keyCode) {
case KeyEvent.VK_TAB:
LOGGER.info("TAB!");
finish = true;
comboBoxModel.setReadyToFinish(false);
if (!isTableCellEditor()) {
String txt = updateFcbEditor();
}
if ((comboBox.getSelectedItem() == null)) {
/*
* if TAB is pressed, but nothing is selected, and
* the picked item is not null * that means the user
* pressed tab when there was an empty list of
* items. * (Typically when user typed something
* that does not exist in the list of * items). In
* this case we cancel the editing.
*/
comboBoxModel.setCancelled(true);
}
if (comboBox.getSelectedItem() != null) {
if (pickedItem == comboBox.getSelectedItem()) {
/*
* We cancel the editing when the picked item is
* the same as the item that * is currently
* selected in the combo-box, because we do not
* want to * trigger database change (there is
* no need to update to the same value).
*/
comboBoxModel.setCancelled(true);
} else {
pickedItem = comboBox.getSelectedItem();
pickedKey = comboBoxModel.getKeyOfTheSelectedItem().toString();
} // else
}
break;
case KeyEvent.VK_ESCAPE:
if (isTableCellEditor()) {
comboBoxModel.setCancelled(true);
comboBox.setSelectedItem(pickedItem);
} else {
ComboBoxFilter.this.setText(pickedKey.toString());
} // else
break;
case KeyEvent.VK_ENTER:
finish = true;
comboBoxModel.setReadyToFinish(false); // we expect cell editor
String txt = updateFcbEditor();
boolean pa = comboBoxModel.isAnyPatternAllowed();
boolean ma = comboBoxModel.isMultiSelectionAllowed();
if ((pa || ma)) {
if (txt == null) {
// do nothing
} else {
// we have to update the text here because updateFcbEditor won't
setText(txt);
pickedItem = comboBox.getSelectedItem();
pickedKey = txt;
comboBoxModel.setPickedItem(pickedItem);
comboBoxModel.setPickedKey(txt);
} //else
} else {
if (txt == null) {
// if user types a string that has no match, we select the last picked item.
comboBox.setSelectedItem(pickedItem);
// After all events are processed, alert the user
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
String eol = System.getProperty("line.separator");
JOptionPane.showMessageDialog(comboBox,
"Invalid option.");
} // run() method
}); // Runnable (anonymous) implementation
} else {
// setText(txt); // No need because updateFcbEditor will do it in this case
pickedItem = comboBox.getSelectedItem();
pickedKey = txt;
comboBoxModel.setPickedItem(pickedItem);
comboBoxModel.setPickedKey(txt);
} // else
} // else
break;
case KeyEvent.VK_UP:
arrowKeyPressed = true;
if (isTableCellEditor) {
/*
* For some reason, the JTable is stealing keyboard
* events and preventing us * from moving up/down,
* so we have to select proper values manually. * *
* If selection did not move, we will manually
* select appropriate item.
*/
if ((selectedIndex == currentIndex) && (currentIndex > 0)) {
comboBox.setSelectedIndex(currentIndex - 1);
selectedIndex = currentIndex - 1;
} else {
selectedIndex = currentIndex;
} // else
// we set this to false ONLY if the combo box is a cell editor!
arrowKeyPressed = false;
}
break;
case KeyEvent.VK_DOWN:
arrowKeyPressed = true;
if (isTableCellEditor && comboBox.isPopupVisible()) {
/*
* For some reason, the JTable is stealing keyboard
* events and preventing us * from moving up/down,
* so we have to select proper values manually. * *
* If selection did not move, we will manually
* select appropriate item.
*/
if ((selectedIndex == currentIndex)
&& (currentIndex < comboBox.getItemCount() - 1)) {
comboBox.setSelectedIndex(currentIndex + 1);
selectedIndex = currentIndex + 1;
} else {
selectedIndex = currentIndex;
} // else
// we set this to false ONLY if the combo box is a cell editor!
arrowKeyPressed = false;
}
break;
default:
selectedIndex = currentIndex;
} // switch
keyPressed = false;
} // keyPressed() method
});
// Bug 5100422 on Java 1.5: Editable JComboBox won't hide popup when tabbing out
hidePopupOnFocusLoss = System.getProperty("java.version").startsWith("1.5");
// Highlight whole text when focus gets lost
comboBoxEditor.addFocusListener(new FocusAdapter() {
@Override
public void focusLost(FocusEvent e) {
boolean pa = comboBoxModel.isAnyPatternAllowed();
boolean ma = comboBoxModel.isMultiSelectionAllowed();
LOGGER.info("focusLost()");
if (pickedKey != null && !(pa || ma)) {
// When combo-box loses focus, we need to set the text to the selected
setText(pickedKey.toString());
} else {
if (!(pa || ma)) {
setText("");
}
} // else
// Workaround for Bug 5100422 - Hide Popup on focus loss
if (hidePopupOnFocusLoss) {
ComboBoxFilter.this.comboBox.setPopupVisible(false);
}
comboBoxModel.setReadyToFinish(false);
} // focusLost() method
@Override
public void focusGained(FocusEvent fe) {
if (!isTableCellEditor()) {
super.focusGained(fe);
comboBoxEditor.selectAll();
}
} // focusGained() method
});
/*
* The following PopupMenuListener is needed to store the inidial
* Dimension of the combobox popup.
*/
comboBox.addPopupMenuListener(new PopupMenuListener() {
@Override
public void popupMenuWillBecomeVisible(PopupMenuEvent pme) {
/*
* NOTE: Dimension returned by getSize() method is actually the
* dimension of the combo-box popup menu! It is not the size of
* the JTextComponent object! :)
*/
JComboBox box = (JComboBox) pme.getSource();
popupMenuWidth = box.getSize().width;
popupMenuHeight = box.getSize().height;
} // popupMenuWillBecomeVisible() method
@Override
public void popupMenuWillBecomeInvisible(PopupMenuEvent pme) {
LOGGER.info("hiding popup...");
comboBox.putClientProperty("item-picked", Boolean.FALSE);
} // popupMenuWillBecomeInvisible() method
@Override
public void popupMenuCanceled(PopupMenuEvent pme) {
// do nothing
} // popupMenuCanceled() method
});
if (!isTableCellEditor()) {
comboBoxEditor.setFocusTraversalKeysEnabled(false);
Action myAction = new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
// Nothing is needed here - all we want is to skip focusLost() call when user presses TAB
comboBoxEditor.transferFocus();
} // actionPerformed() method
};
comboBoxEditor.getActionMap().put("tab-action", myAction);
comboBoxEditor.getInputMap(JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT)
.put(KeyStroke.getKeyStroke("TAB"), "tab-action");
}
Object selected = comboBox.getSelectedItem();
pickedItem = selected;
Object tmp = comboBoxModel.getKeyOfTheSelectedItem();
if (tmp instanceof String) {
pickedKey = tmp.toString();
} else {
pickedKey = tmp;
} // else
selectedIndex = comboBox.getSelectedIndex();
if (selected != null) {
setText(comboBoxModel.getKeyOfTheSelectedItem().toString());
}
} // ComboBoxFilter() method
/**
* User typically will call this method from a table cell editor when we
* want to "inform" filtered combo-box that we want to re-filter before we
* actually start editing. The reason for this is when user goes to another
* cell, the filtered set of entries from the previous cell will not apply.
*
* @param argPattern
*/
public void prepare(Object argPattern) {
LOGGER.info("prepare(" + argPattern + ")");
if (argPattern == null) {
// If the previous value is null, we simply exit this method.
pickedItem = null;
pickedKey = null;
comboBox.setSelectedItem(null);
return;
}
inPreparation = true;
selecting = true;
comboBoxModel.setCancelled(false);
if (isTableCellEditor()) {
comboBoxModel.setReadyToFinish(false);
}
String pat = (String) argPattern.toString();
/*
* If the editing is triggered by a key-press in a JTable, then the
* argPattern contains a string in SISE format, so we have to extract
* they key pressed and the previous value.
*/
String key = "";
if (isTriggeredByKeyPress()) {
String[] strs = Sise.units(pat);
/*
* In the case the cell's value was a NULL, then strs will have only
* one element. In that case we set pat to be a null, and do not set
* the pickedItem.
*/
if (strs.length == 2) {
pat = strs[1];
} else {
pat = null;
} // else
key = strs[0];
}
try {
if (argPattern == null) {
setText("");
} else {
if (pat != null) {
setText(pat.trim());
}
} // else
if (pat == null) {
pickedItem = null;
pickedKey = null;
} else {
// Cell's value was not a null, so we can execute filter so the combo-box updates the
// selected item to be what was previously selected.
filterTheModel();
pickedItem = comboBox.getSelectedItem();
pickedKey = comboBoxModel.getKeyOfTheSelectedItem();
} // else
// Finally, when we have selected the item that was previously selected, now we can insert
// the character user typed inside a JTable
if (isTriggeredByKeyPress()) {
setText(key);
filterTheModel();
}
} catch (BadLocationException ex) {
Logger.getLogger(ComboBoxFilter.class.getName()).log(Level.SEVERE, null, ex);
} // catch
if (isTableCellEditor()) {
comboBoxModel.setReadyToFinish(false);
}
inPreparation = false;
LOGGER.info("prepare done.");
} // prepare() method
private void clearTextSelection() {
if (comboBoxEditor.getSelectedText() != null) {
// we have a selected text, removing the selection. On Windows text may become selected by default
LOGGER.info("SELECTED TEXT: " + comboBoxEditor.getSelectedText());
int pos = comboBoxEditor.getCaretPosition();
comboBoxEditor.select(0, 0);
// return caret position to the original place
comboBoxEditor.setCaretPosition(pos);
}
} // clearTextSelection() method
private void setText(String text) {
try {
// remove all text and insert the completed string
super.remove(0, getLength());
super.insertString(0, text, null);
} catch (BadLocationException e) {
throw new RuntimeException(e.toString());
} // catch
} // setText() method
/**
* This method is a leftover from the previous version of the
* ComboBoxFilter. Should be removed after the testing phase.
*/
private void highlightCompletedText(int start) {
comboBoxEditor.setCaretPosition(getLength());
comboBoxEditor.moveCaretPosition(start);
} // highlightCompletedText() method
@Override
public void insertString(int offs, String str, AttributeSet a) throws BadLocationException {
LOGGER.info("insertString(" + offs + ", " + str + ")");
//LOGGER.info("insertString(" + selecting + ", " + arrowKeyPressed + ")");
// return immediately when selecting an item
if (selecting) {
return;
}
if (arrowKeyPressed) {
arrowKeyPressed = false;
comboBox.putClientProperty("item-picked", Boolean.FALSE);
return;
}
boolean itemPicked = false; // we need this value because item-picked may change due to chain
// of events
// inform action-performed listeners that the item has been picked so they may update
// some other components
if (keyPressed) {
itemPicked = false;
} else {
itemPicked = true;
} // else
boolean isPicked = (Boolean) comboBox.getClientProperty("item-picked");
comboBox.putClientProperty("item-picked", itemPicked || isPicked);
// insert the string into the document
if (str.contains(Sise.UNIT_SEPARATOR_STRING)) {
LOGGER.info("%%%%%%%%%%%%%");
System.out.println(str);
// we got a string in the Sise format, that must be because user picked an item with a mouse
// in that case, we will take the key component (SISE unit) and put that instead.
String[] strs = Sise.units(str);
int idx = comboBoxModel.getKeyIndex();
if (isTableCellEditor()) {
comboBoxModel.setReadyToFinish(true);
}
// This is an ArrayOutOfBounds exception "fix". When user presses SPACE + ENTER sometimes we get
// an error, because strs is an empty array!
if (strs.length > 0) {
super.insertString(offs, strs[idx], a);
} else {
super.insertString(offs, "", a);
} // else
// We have to filter after the user selects an item with the mouse.
// WARNING: here we rely on the FilteredComboBoxModel's setPattern() method to select the
// exact match - ie the item that user picked with the mouse.
filterTheModel();
if (itemPicked) {
pickedItem = comboBox.getSelectedItem();
Object tmp = comboBoxModel.getKeyOfTheSelectedItem();
if (tmp instanceof String) {
pickedKey = tmp.toString();
} else {
pickedKey = tmp;
} // else
}
comboBox.putClientProperty("item-picked", Boolean.TRUE);
comboBoxModel.setPickedItem(pickedItem);
comboBoxModel.setPickedKey(pickedKey);
return;
} else {
// otherwise, insert the whole string
super.insertString(offs, str, a);
Object obj = comboBoxModel.getSelectedItem();
if (obj != null) {
if (itemPicked && !(obj instanceof Pair) && !(obj instanceof Object[])) {
comboBoxModel.setPickedItem(obj);
comboBoxModel.setPickedKey(obj);
}
}
} // else
if (finish) {
return;
}
filterTheModel();
} // insertString() method
@Override
public void remove(int offs, int len) throws BadLocationException {
LOGGER.log(Level.INFO, "remove({0}, {1})", new Object[]{offs, len});
LOGGER.info("remove(" + selecting + ", " + arrowKeyPressed + ")");
// return immediately when selecting an item
if (selecting) {
// remove() is called whenever setSelectedItem() or setSelectedIndex() are called. They may be
// called during the filtering process, so we do not want to remove while in the middle.
return;
}
if (arrowKeyPressed) {
if (isTableCellEditor()) {
// if the remove() has been called while user navigates through the combobox list, we do not
// filter. when user navigates via arrow keys, remove() is always called first, followed by
// the insertString. However, sometimes table steals the event, and causes trouble.
// As remove() is called first, here we check if correct value has been selected after user
// presses UP/DOWN
int currentIndex = comboBox.getSelectedIndex();
if (selectedIndex != currentIndex) {
// they are not equal, fix it
comboBox.setSelectedIndex(selectedIndex);
}
}
return;
}
// remove the string from the document
super.remove(offs, len);
if (finish) {
// user pressed ENTER so in the case remove is called we do not filter the model.
return;
}
// finally, do the filter
filterTheModel();
} // remove() method
/**
* Use this method when you need to programmatically pick an item.
* @param argItem
*/
public void pickItem(Object argItem) {
Object obj = comboBoxModel.getKeyOfAnItem(argItem);
if ((argItem == null) && (obj == null)) {
return;
}
pickedItem = argItem;
pickedKey = obj.toString();
} // pickItem() method
/**
* This method calls the setPatter() method, and starts the filtering.
*
* It also sets the previousItemCount variable to hold the previous number
* of filtered items.
*/
private void filterTheModel() throws BadLocationException {
// we have to "guard" the call to comboBoxModel.setPattern() with selecting set to true, then false
selecting = true;
boolean oldValue = comboBoxModel.isReadyToFinish();
comboBoxModel.setReadyToFinish(false); // we must set this to false during the filtering
previousItemCount = comboBox.getItemCount(); /// store the number of items before filtering
String pattern = getText(0, getLength());
//LOGGER.info("filterTheModel(): " + pattern);
comboBoxModel.setPattern(pattern);
clearTextSelection();
fixPopupSize();
comboBoxModel.setReadyToFinish(oldValue); // restore the value
selecting = false;
selectedIndex = comboBox.getSelectedIndex();
//LOGGER.info("SELECTED AFTER:" + comboBox.getSelectedItem());
} // filterTheModel() method
/**
* Use this method whenever you need to determine if the comboBox is used as
* a cell editor or not.
*
* @return boolean Value indicating whether comboBox is a cell editor (TRUE)
* or not (FALSE).
*/
private boolean isTableCellEditor() {
boolean isTableCellEditor = false;
Object tmp = comboBox.getClientProperty("JComboBox.isTableCellEditor");
if (tmp != null) {
isTableCellEditor = tmp.equals(Boolean.TRUE);
}
return isTableCellEditor;
} // isTableCellEditor() method
/**
* This method is used internally to fix the popup-menu size. Apparently
* JComboBox has a bug and does not calculate the proper height of the
* popup.
*
* The first time popup menu is shown, ComboBoxFilter stores the dimension,
* and re-adjusts the width to the original value all the time. Reason for
* this is that we do not want to have different widths while user types
* something.
*/
private void fixPopupSize() {
if (inPreparation) {
return;
}
LOGGER.info("fixPopupSize()");
int maxRows = comboBox.getMaximumRowCount();
if ((previousItemCount < maxRows) || (comboBox.getItemCount() < maxRows)) {
// do this only when we have less than maxRows items, to prevent the flickering.
// this is a hack and is actually the easiest solution to the JComboBox's popup resizing problem.
if (comboBox.isPopupVisible()) {
comboBox.setPopupVisible(false);
comboBox.setPopupVisible(true);
}
}
} // fixPopupSize() method
/**
* If the l&f is system, and OS is Windows, we have to fix the arrow UI of
* the combo box, when editing is enabled. This method is responsible for
* doing that.
*/
private void fixComboBoxArrowUI() {
String scn = UIManager.getSystemLookAndFeelClassName();
if (System.getProperty("os.name").startsWith("Windows")
&& UIManager.getLookAndFeel().getClass().getCanonicalName().equals(scn)) {
System.err.println("DEBUG: fixing the combo-box's arrow");
comboBox.setUI(ColorArrowUI.createUI(comboBox));
}
} // fixComboBoxArrowUI() method
private String updateFcbEditor() {
Object obj = comboBoxModel.getKeyOfTheSelectedItem();
String txt = null;
if (obj != null) {
txt = obj.toString();
}
if (txt != null) {
if (!comboBoxModel.isAnyPatternAllowed() || !comboBoxModel.isMultiSelectionAllowed()) {
/* In the case when *any* pattern is allowed, or all we want is to get a *
* list of items that match, then we do not update the comboBox editor *
* component with the newly selected item's key. */
if (!isTableCellEditor()) {
setText(txt);
}
}
}
return txt;
} // updateFcbEditor() method
/**
*
* @return
*/
public boolean isTriggeredByKeyPress() {
return triggeredByKeyPress;
} // isTriggeredByKeyPress() method
/**
*
* @param argTriggeredByKeyPress
*/
public void setTriggeredByKeyPress(boolean argTriggeredByKeyPress) {
triggeredByKeyPress = argTriggeredByKeyPress;
} // setTriggeredByKeyPress() method
/**
*
* @return
*/
public Object getPickedItem() {
return pickedItem;
} // getPickedItem() method
/**
*
* @return
*/
public Object getPickedKey() {
return pickedKey;
} // getPickedKey() method
} // ComboBoxFilter class
|
package com.bladecoder.ink.runtime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Random;
import com.bladecoder.ink.runtime.CallStack.Thread;
/**
* All story state information is included in the StoryState class, including
* global variables, read counts, the pointer to the current point in the story,
* the call stack (for tunnels, functions, etc), and a few other smaller bits
* and pieces. You can save the current state using the json serialisation
* functions ToJson and LoadJson.
*/
public class StoryState {
/**
* The current version of the state save file JSON-based format.
*/
public static final int kInkSaveStateVersion = 7;
public static final int kMinCompatibleLoadVersion = 6;
// REMEMBER! REMEMBER! REMEMBER!
// When adding state, update the Copy method and serialisation
// REMEMBER! REMEMBER! REMEMBER!
private List<RTObject> outputStream;
private CallStack callStack;
private List<Choice> currentChoices;
private List<String> currentErrors;
private int currentTurnIndex;
private boolean didSafeExit;
private RTObject divertedTargetObject;
private List<RTObject> evaluationStack;
private Story story;
private int storySeed;
private int previousRandom;
private HashMap<String, Integer> turnIndices;
private VariablesState variablesState;
private HashMap<String, Integer> visitCounts;
private String currentText;
// Temporary state only, during externally called function evaluation
private boolean isExternalFunctionEvaluation;
private CallStack originalCallstack;
private int originalEvaluationStackHeight;
private boolean outputStreamTextDirty = true;
private boolean outputStreamTagsDirty = true;
private List<String> currentTags;
StoryState(Story story) {
this.story = story;
outputStream = new ArrayList<RTObject>();
outputStreamDirty();
evaluationStack = new ArrayList<RTObject>();
callStack = new CallStack(story.getRootContentContainer());
variablesState = new VariablesState(callStack, story.getListDefinitions());
visitCounts = new HashMap<String, Integer>();
turnIndices = new HashMap<String, Integer>();
currentTurnIndex = -1;
// Seed the shuffle random numbers
long timeSeed = System.currentTimeMillis();
storySeed = new Random(timeSeed).nextInt() % 100;
previousRandom = 0;
currentChoices = new ArrayList<Choice>();
goToStart();
}
int getCallStackDepth() {
return callStack.getDepth();
}
void addError(String message) {
// TODO: Could just add to output?
if (currentErrors == null) {
currentErrors = new ArrayList<String>();
}
currentErrors.add(message);
}
// Warning: Any RTObject content referenced within the StoryState will
// be re-referenced rather than cloned. This is generally okay though since
// RTObjects are treated as immutable after they've been set up.
// (e.g. we don't edit a Runtime.Text after it's been created an added.)
// I wonder if there's a sensible way to enforce that..??
StoryState copy() {
StoryState copy = new StoryState(story);
copy.getOutputStream().addAll(outputStream);
outputStreamDirty();
copy.currentChoices.addAll(currentChoices);
if (hasError()) {
copy.currentErrors = new ArrayList<String>();
copy.currentErrors.addAll(currentErrors);
}
copy.callStack = new CallStack(callStack);
copy.originalCallstack = new CallStack(originalCallstack);
copy.variablesState = new VariablesState(copy.callStack, story.getListDefinitions());
copy.variablesState.copyFrom(variablesState);
copy.evaluationStack.addAll(evaluationStack);
copy.originalEvaluationStackHeight = originalEvaluationStackHeight;
if (getDivertedTargetObject() != null)
copy.setDivertedTargetObject(divertedTargetObject);
copy.setPreviousContentObject(getPreviousContentObject());
copy.visitCounts = new HashMap<String, Integer>(visitCounts);
copy.turnIndices = new HashMap<String, Integer>(turnIndices);
copy.currentTurnIndex = currentTurnIndex;
copy.storySeed = storySeed;
copy.previousRandom = previousRandom;
copy.setDidSafeExit(didSafeExit);
return copy;
}
Container currentContainer() {
return callStack.getCurrentElement().currentContainer;
}
int currentGlueIndex() {
for (int i = outputStream.size() - 1; i >= 0; i
RTObject c = outputStream.get(i);
Glue glue = c instanceof Glue ? (Glue) c : null;
if (glue != null)
return i;
else if (c instanceof ControlCommand) // e.g. BeginString
break;
}
return -1;
}
String getCurrentText() {
if (outputStreamTextDirty) {
StringBuilder sb = new StringBuilder();
for (RTObject outputObj : outputStream) {
StringValue textContent = null;
if (outputObj instanceof StringValue)
textContent = (StringValue) outputObj;
if (textContent != null) {
sb.append(textContent.value);
}
}
currentText = sb.toString();
outputStreamTextDirty = false;
}
return currentText;
}
/**
* Ends the current ink flow, unwrapping the callstack but without affecting
* any variables. Useful if the ink is (say) in the middle a nested tunnel,
* and you want it to reset so that you can divert elsewhere using
* ChoosePathString(). Otherwise, after finishing the content you diverted
* to, it would continue where it left off. Calling this is equivalent to
* calling -> END in ink.
*/
public void forceEnd() throws Exception {
while (callStack.canPopThread())
callStack.popThread();
while (callStack.canPop())
callStack.pop();
currentChoices.clear();
setCurrentContentObject(null);
setPreviousContentObject(null);
setDidSafeExit(true);
}
RTObject getCurrentContentObject() {
return callStack.getCurrentElement().getCurrentRTObject();
}
Path getCurrentPath() {
if (getCurrentContentObject() == null)
return null;
return getCurrentContentObject().getPath();
}
List<String> getCurrentTags() {
if (outputStreamTagsDirty) {
currentTags = new ArrayList<String>();
for (RTObject outputObj : outputStream) {
Tag tag = null;
if (outputObj instanceof Tag)
tag = (Tag) outputObj;
if (tag != null) {
currentTags.add(tag.getText());
}
}
outputStreamTagsDirty = false;
}
return currentTags;
}
boolean getInExpressionEvaluation() {
return callStack.getCurrentElement().inExpressionEvaluation;
}
/**
* Object representation of full JSON state. Usually you should use LoadJson
* and ToJson since they serialise directly to String for you. But it may be
* useful to get the object representation so that you can integrate it into
* your own serialisation system.
*/
public HashMap<String, Object> getJsonToken() throws Exception {
HashMap<String, Object> obj = new HashMap<String, Object>();
HashMap<String, Object> choiceThreads = null;
for (Choice c : currentChoices) {
c.originalChoicePath = c.getchoicePoint().getPath().getComponentsString();
c.originalThreadIndex = c.getThreadAtGeneration().threadIndex;
if (callStack.getThreadWithIndex(c.originalThreadIndex) == null) {
if (choiceThreads == null)
choiceThreads = new HashMap<String, Object>();
choiceThreads.put(Integer.toString(c.originalThreadIndex), c.getThreadAtGeneration().jsonToken());
}
}
if (choiceThreads != null)
obj.put("choiceThreads", choiceThreads);
obj.put("callstackThreads", callStack.getJsonToken());
obj.put("variablesState", variablesState.getjsonToken());
obj.put("evalStack", Json.listToJArray(evaluationStack));
obj.put("outputStream", Json.listToJArray(outputStream));
obj.put("currentChoices", Json.listToJArray(currentChoices));
if (getDivertedTargetObject() != null)
obj.put("currentDivertTarget", getDivertedTargetObject().getPath().getComponentsString());
obj.put("visitCounts", Json.intHashMapToJObject(visitCounts));
obj.put("turnIndices", Json.intHashMapToJObject(turnIndices));
obj.put("turnIdx", currentTurnIndex);
obj.put("storySeed", storySeed);
obj.put("previousRandom", previousRandom);
obj.put("inkSaveVersion", kInkSaveStateVersion);
// Not using this right now, but could do in future.
obj.put("inkFormatVersion", Story.inkVersionCurrent);
return obj;
}
RTObject getPreviousContentObject() {
return callStack.getcurrentThread().previousContentRTObject;
}
public HashMap<String, Integer> getVisitCounts() {
return visitCounts;
}
void goToStart() {
callStack.getCurrentElement().currentContainer = story.mainContentContainer();
callStack.getCurrentElement().currentContentIndex = 0;
}
boolean hasError() {
return currentErrors != null && currentErrors.size() > 0;
}
boolean inStringEvaluation() {
for (int i = outputStream.size() - 1; i >= 0; i
ControlCommand cmd = outputStream.get(i) instanceof ControlCommand ? (ControlCommand) outputStream.get(i)
: null;
if (cmd != null && cmd.getCommandType() == ControlCommand.CommandType.BeginString) {
return true;
}
}
return false;
}
/**
* Loads a previously saved state in JSON format.
*
* @param json
* The JSON String to load.
*/
public void loadJson(String json) throws Exception {
setJsonToken(SimpleJson.textToHashMap(json));
}
List<Choice> getCurrentChoices() {
if (canContinue())
return new ArrayList<Choice>();
return currentChoices;
}
List<Choice> getGeneratedChoices() {
return currentChoices;
}
boolean canContinue() {
return getCurrentContentObject() != null && !hasError();
}
List<String> getCurrentErrors() {
return currentErrors;
}
List<RTObject> getOutputStream() {
return outputStream;
}
CallStack getCallStack() {
return callStack;
}
VariablesState getVariablesState() {
return variablesState;
}
List<RTObject> getEvaluationStack() {
return evaluationStack;
}
int getStorySeed() {
return storySeed;
}
void setStorySeed(int s) {
storySeed = s;
}
int getPreviousRandom() {
return previousRandom;
}
void setPreviousRandom(int i) {
previousRandom = i;
}
HashMap<String, Integer> getTurnIndices() {
return turnIndices;
}
int getCurrentTurnIndex() {
return currentTurnIndex;
}
boolean outputStreamContainsContent() {
for (RTObject content : outputStream) {
if (content instanceof StringValue)
return true;
}
return false;
}
Glue matchRightGlueForLeftGlue(Glue leftGlue) {
if (!leftGlue.isLeft())
return null;
for (int i = outputStream.size() - 1; i >= 0; i
RTObject c = outputStream.get(i);
Glue g = null;
if (c instanceof Glue)
g = (Glue) c;
if (g != null && g.isRight() && g.getParent() == leftGlue.getParent()) {
return g;
} else if (c instanceof ControlCommand) // e.g. BeginString
break;
}
return null;
}
boolean outputStreamEndsInNewline() {
if (outputStream.size() > 0) {
for (int i = outputStream.size() - 1; i >= 0; i
RTObject obj = outputStream.get(i);
if (obj instanceof ControlCommand) // e.g. BeginString
break;
StringValue text = outputStream.get(i) instanceof StringValue ? (StringValue) outputStream.get(i)
: null;
if (text != null) {
if (text.isNewline())
return true;
else if (text.isNonWhitespace())
break;
}
}
}
return false;
}
RTObject peekEvaluationStack() {
return evaluationStack.get(evaluationStack.size() - 1);
}
RTObject popEvaluationStack() {
RTObject obj = evaluationStack.get(evaluationStack.size() - 1);
evaluationStack.remove(evaluationStack.size() - 1);
return obj;
}
List<RTObject> popEvaluationStack(int numberOfObjects) throws Exception {
if (numberOfObjects > evaluationStack.size()) {
throw new Exception("trying to pop too many objects");
}
List<RTObject> popped = new ArrayList<RTObject>(
evaluationStack.subList(evaluationStack.size() - numberOfObjects, evaluationStack.size()));
evaluationStack.subList(evaluationStack.size() - numberOfObjects, evaluationStack.size()).clear();
return popped;
}
void pushEvaluationStack(RTObject obj) {
// Include metadata about the origin List for set values when
// they're used, so that lower level functions can make use
// of the origin list to get related items, or make comparisons
// with the integer values etc.
ListValue listValue = null;
if (obj instanceof ListValue)
listValue = (ListValue) obj;
if (listValue != null) {
// Update origin when list is has something to indicate the list
// origin
InkList rawList = listValue.getValue();
List<String> names = rawList.getOriginNames();
if (names != null) {
ArrayList<ListDefinition> origins = new ArrayList<ListDefinition>();
for (String n : names) {
ListDefinition def = story.getListDefinitions().getDefinition(n);
if (!origins.contains(def))
origins.add(def);
}
rawList.origins = origins;
}
}
evaluationStack.add(obj);
}
// Push to output stream, but split out newlines in text for consistency
// in dealing with them later.
void pushToOutputStream(RTObject obj) {
StringValue text = obj instanceof StringValue ? (StringValue) obj : null;
if (text != null) {
List<StringValue> listText = trySplittingHeadTailWhitespace(text);
if (listText != null) {
for (StringValue textObj : listText) {
pushToOutputStreamIndividual(textObj);
}
return;
}
}
pushToOutputStreamIndividual(obj);
}
void pushToOutputStreamIndividual(RTObject obj) {
Glue glue = obj instanceof Glue ? (Glue) obj : null;
StringValue text = obj instanceof StringValue ? (StringValue) obj : null;
boolean includeInOutput = true;
if (glue != null) {
// Found matching left-glue for right-glue? Close it.
Glue matchingRightGlue = null;
if (glue.isLeft())
matchingRightGlue = matchRightGlueForLeftGlue(glue);
// Left/Right glue is auto-generated for inline expressions
// where we want to absorb newlines but only in a certain direction.
// "Bi" glue is written by the user in their ink with <>
if (glue.isLeft() || glue.isBi()) {
trimNewlinesFromOutputStream(matchingRightGlue);
}
// New right-glue
includeInOutput = glue.isBi() || glue.isRight();
} else if (text != null) {
if (currentGlueIndex() != -1) {
// Absorb any new newlines if there's existing glue
// in the output stream.
// Also trim any extra whitespace (spaces/tabs) if so.
if (text.isNewline()) {
trimFromExistingGlue();
includeInOutput = false;
}
// Able to completely reset when
else if (text.isNonWhitespace()) {
removeExistingGlue();
}
} else if (text.isNewline()) {
if (outputStreamEndsInNewline() || !outputStreamContainsContent())
includeInOutput = false;
}
}
if (includeInOutput) {
outputStream.add(obj);
}
outputStreamDirty();
}
// Only called when non-whitespace is appended
void removeExistingGlue() {
for (int i = outputStream.size() - 1; i >= 0; i
RTObject c = outputStream.get(i);
if (c instanceof Glue) {
outputStream.remove(i);
} else if (c instanceof ControlCommand) {
// BeginString
break;
}
}
outputStreamDirty();
}
void outputStreamDirty() {
outputStreamTextDirty = true;
outputStreamTagsDirty = true;
}
void resetErrors() {
currentErrors = null;
}
void resetOutput() {
outputStream.clear();
outputStreamDirty();
}
// Don't make public since the method need to be wrapped in Story for visit
// counting
void setChosenPath(Path path) throws Exception {
// Changing direction, assume we need to clear current set of choices
currentChoices.clear();
setCurrentPath(path);
currentTurnIndex++;
}
void startExternalFunctionEvaluation(Container funcContainer, Object[] arguments) throws Exception {
// We'll start a new callstack, so keep hold of the original,
// as well as the evaluation stack so we know if the function
// returned something
originalCallstack = callStack;
originalEvaluationStackHeight = evaluationStack.size();
// Create a new base call stack element.
callStack = new CallStack(funcContainer);
callStack.getCurrentElement().type = PushPopType.Function;
// Change the callstack the variableState is looking at to be
// this temporary function evaluation one. We'll restore it afterwards
variablesState.setCallStack(callStack);
// By setting ourselves in external function evaluation mode,
// we're saying it's okay to end the flow without a Done or End,
// but with a ~ return instead.
isExternalFunctionEvaluation = true;
passArgumentsToEvaluationStack(arguments);
}
void passArgumentsToEvaluationStack(Object[] arguments) throws Exception {
// Pass arguments onto the evaluation stack
if (arguments != null) {
for (int i = 0; i < arguments.length; i++) {
if (!(arguments[i] instanceof Integer || arguments[i] instanceof Float
|| arguments[i] instanceof String)) {
throw new Exception(
"ink arguments when calling EvaluateFunction / ChoosePathStringWithParameters must be int, float or string");
}
pushEvaluationStack(Value.create(arguments[i]));
}
}
}
boolean tryExitExternalFunctionEvaluation() {
if (isExternalFunctionEvaluation && callStack.getElements().size() == 1
&& callStack.getCurrentElement().type == PushPopType.Function) {
setCurrentContentObject(null);
didSafeExit = true;
return true;
}
return false;
}
Object completeExternalFunctionEvaluation() {
// Do we have a returned value?
// Potentially pop multiple values off the stack, in case we need
// to clean up after ourselves (e.g. caller of EvaluateFunction may
// have passed too many arguments, and we currently have no way to check
// for that)
RTObject returnedObj = null;
while (evaluationStack.size() > originalEvaluationStackHeight) {
RTObject poppedObj = popEvaluationStack();
if (returnedObj == null)
returnedObj = poppedObj;
}
// Restore our own state
callStack = originalCallstack;
originalCallstack = null;
originalEvaluationStackHeight = 0;
// Restore the callstack that the variablesState uses
variablesState.setCallStack(callStack);
// What did we get back?
if (returnedObj != null) {
if (returnedObj instanceof Void)
return null;
// Some kind of value, if not void
Value<?> returnVal = null;
if (returnedObj instanceof Value)
returnVal = (Value<?>) returnedObj;
// DivertTargets get returned as the string of components
// (rather than a Path, which isn't public)
if (returnVal.getValueType() == ValueType.DivertTarget) {
return returnVal.getValueObject().toString();
}
// Other types can just have their exact object type:
// int, float, string. VariablePointers get returned as strings.
return returnVal.getValueObject();
}
return null;
}
void setCurrentContentObject(RTObject value) {
callStack.getCurrentElement().setcurrentRTObject(value);
}
void setCurrentPath(Path value) throws Exception {
if (value != null)
setCurrentContentObject(story.contentAtPath(value));
else
setCurrentContentObject(null);
}
void setInExpressionEvaluation(boolean value) {
callStack.getCurrentElement().inExpressionEvaluation = value;
}
@SuppressWarnings("unchecked")
public void setJsonToken(HashMap<String, Object> value) throws StoryException, Exception {
HashMap<String, Object> jObject = value;
Object jSaveVersion = jObject.get("inkSaveVersion");
if (jSaveVersion == null) {
throw new StoryException("ink save format incorrect, can't load.");
} else if ((int) jSaveVersion < kMinCompatibleLoadVersion) {
throw new StoryException("Ink save format isn't compatible with the current version (saw '" + jSaveVersion
+ "', but minimum is " + kMinCompatibleLoadVersion + "), so can't load.");
}
callStack.setJsonToken((HashMap<String, Object>) jObject.get("callstackThreads"), story);
variablesState.setjsonToken((HashMap<String, Object>) jObject.get("variablesState"));
evaluationStack = Json.jArrayToRuntimeObjList((List<Object>) jObject.get("evalStack"));
outputStream = Json.jArrayToRuntimeObjList((List<Object>) jObject.get("outputStream"));
outputStreamDirty();
currentChoices = Json.jArrayToRuntimeObjList((List<Object>) jObject.get("currentChoices"));
Object currentDivertTargetPath = jObject.get("currentDivertTarget");
if (currentDivertTargetPath != null) {
Path divertPath = new Path(currentDivertTargetPath.toString());
setDivertedTargetObject(story.contentAtPath(divertPath));
}
visitCounts = Json.jObjectToIntHashMap((HashMap<String, Object>) jObject.get("visitCounts"));
turnIndices = Json.jObjectToIntHashMap((HashMap<String, Object>) jObject.get("turnIndices"));
currentTurnIndex = (int) jObject.get("turnIdx");
storySeed = (int) jObject.get("storySeed");
previousRandom = (int) jObject.get("previousRandom");
Object jChoiceThreadsObj = jObject.get("choiceThreads");
HashMap<String, Object> jChoiceThreads = (HashMap<String, Object>) jChoiceThreadsObj;
for (Choice c : currentChoices) {
c.setChoicePoint((ChoicePoint) story.contentAtPath(new Path(c.originalChoicePath)));
Thread foundActiveThread = callStack.getThreadWithIndex(c.originalThreadIndex);
if (foundActiveThread != null) {
c.setThreadAtGeneration(foundActiveThread);
} else {
HashMap<String, Object> jSavedChoiceThread = (HashMap<String, Object>) jChoiceThreads
.get(Integer.toString(c.originalThreadIndex));
c.setThreadAtGeneration(new CallStack.Thread(jSavedChoiceThread, story));
}
}
}
void setPreviousContentObject(RTObject value) {
callStack.getcurrentThread().previousContentRTObject = value;
}
/**
* Exports the current state to json format, in order to save the game.
*
* @return The save state in json format.
*/
public String toJson() throws Exception {
return SimpleJson.HashMapToText(getJsonToken());
}
void trimFromExistingGlue() {
int i = currentGlueIndex();
while (i < outputStream.size()) {
StringValue txt = outputStream.get(i) instanceof StringValue ? (StringValue) outputStream.get(i) : null;
if (txt != null && !txt.isNonWhitespace())
outputStream.remove(i);
else
i++;
}
outputStreamDirty();
}
void trimNewlinesFromOutputStream(Glue rightGlueToStopAt) {
int removeWhitespaceFrom = -1;
int rightGluePos = -1;
boolean foundNonWhitespace = false;
// Work back from the end, and try to find the point where
// we need to start removing content. There are two ways:
// - Start from the matching right-glue (because we just saw a
// left-glue)
// - Simply work backwards to find the first newline in a String of
// whitespace
int i = outputStream.size() - 1;
while (i >= 0) {
RTObject obj = outputStream.get(i);
ControlCommand cmd = obj instanceof ControlCommand ? (ControlCommand) obj : null;
StringValue txt = obj instanceof StringValue ? (StringValue) obj : null;
Glue glue = obj instanceof Glue ? (Glue) obj : null;
if (cmd != null || (txt != null && txt.isNonWhitespace())) {
foundNonWhitespace = true;
if (rightGlueToStopAt == null)
break;
} else if (rightGlueToStopAt != null && glue == rightGlueToStopAt) {
rightGluePos = i;
break;
} else if (txt != null && txt.isNewline() && !foundNonWhitespace) {
removeWhitespaceFrom = i;
}
i
}
// Remove the whitespace
if (removeWhitespaceFrom >= 0) {
i = removeWhitespaceFrom;
while (i < outputStream.size()) {
StringValue text = outputStream.get(i) instanceof StringValue ? (StringValue) outputStream.get(i)
: null;
if (text != null) {
outputStream.remove(i);
} else {
i++;
}
}
}
// Remove the glue (it will come before the whitespace,
// so index is still valid)
// Also remove any other non-matching right glues that come after,
// since they'll have lost their matching glues already
if (rightGlueToStopAt != null && rightGluePos > -1) {
i = rightGluePos;
while (i < outputStream.size()) {
if (outputStream.get(i) instanceof Glue && ((Glue) outputStream.get(i)).isRight()) {
outputStream.remove(i);
} else {
i++;
}
}
}
}
// At both the start and the end of the String, split out the new lines like
// " \n \n \n the String \n is awesome \n \n "
// Excess newlines are converted into single newlines, and spaces discarded.
// Outside spaces are significant and retained. "Interior" newlines within
// the main String are ignored, since this is for the purpose of gluing
// only.
// - If no splitting is necessary, null is returned.
// - A newline on its own is returned in an list for consistency.
List<StringValue> trySplittingHeadTailWhitespace(StringValue single) {
String str = single.value;
int headFirstNewlineIdx = -1;
int headLastNewlineIdx = -1;
for (int i = 0; i < str.length(); ++i) {
char c = str.charAt(i);
if (c == '\n') {
if (headFirstNewlineIdx == -1)
headFirstNewlineIdx = i;
headLastNewlineIdx = i;
} else if (c == ' ' || c == '\t')
continue;
else
break;
}
int tailLastNewlineIdx = -1;
int tailFirstNewlineIdx = -1;
for (int i = 0; i < str.length(); ++i) {
char c = str.charAt(i);
if (c == '\n') {
if (tailLastNewlineIdx == -1)
tailLastNewlineIdx = i;
tailFirstNewlineIdx = i;
} else if (c == ' ' || c == '\t')
continue;
else
break;
}
// No splitting to be done?
if (headFirstNewlineIdx == -1 && tailLastNewlineIdx == -1)
return null;
List<StringValue> listTexts = new ArrayList<StringValue>();
int innerStrStart = 0;
int innerStrEnd = str.length();
if (headFirstNewlineIdx != -1) {
if (headFirstNewlineIdx > 0) {
StringValue leadingSpaces = new StringValue(str.substring(0, headFirstNewlineIdx));
listTexts.add(leadingSpaces);
}
listTexts.add(new StringValue("\n"));
innerStrStart = headLastNewlineIdx + 1;
}
if (tailLastNewlineIdx != -1) {
innerStrEnd = tailFirstNewlineIdx;
}
if (innerStrEnd > innerStrStart) {
String innerStrText = str.substring(innerStrStart, innerStrEnd);
listTexts.add(new StringValue(innerStrText));
}
if (tailLastNewlineIdx != -1 && tailFirstNewlineIdx > headLastNewlineIdx) {
listTexts.add(new StringValue("\n"));
if (tailLastNewlineIdx < str.length() - 1) {
int numSpaces = (str.length() - tailLastNewlineIdx) - 1;
StringValue trailingSpaces = new StringValue(
str.substring(tailLastNewlineIdx + 1, numSpaces + tailLastNewlineIdx + 1));
listTexts.add(trailingSpaces);
}
}
return listTexts;
}
/**
* Gets the visit/read count of a particular Container at the given path.
* For a knot or stitch, that path String will be in the form:
*
* knot knot.stitch
*
* @return The number of times the specific knot or stitch has been
* enountered by the ink engine.
*
* @param pathString
* The dot-separated path String of the specific knot or stitch.
*
*/
public int visitCountAtPathString(String pathString) {
Integer visitCountOut = visitCounts.get(pathString);
if (visitCountOut != null)
return visitCountOut;
return 0;
}
public RTObject getDivertedTargetObject() {
return divertedTargetObject;
}
public void setDivertedTargetObject(RTObject divertedTargetObject) {
this.divertedTargetObject = divertedTargetObject;
}
public boolean isDidSafeExit() {
return didSafeExit;
}
public void setDidSafeExit(boolean didSafeExit) {
this.didSafeExit = didSafeExit;
}
void setCallStack(CallStack cs) {
callStack = cs;
}
}
|
package org.mozilla.mozstumbler.service.datahandling;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.os.Environment;
import android.provider.BaseColumns;
import android.util.Log;
import org.mozilla.mozstumbler.BuildConfig;
/** Used by Provider */
public class Database extends SQLiteOpenHelper {
private static final String LOGTAG = Database.class.getName();
private static final int DATABASE_VERSION = 2;
private static final String DATABASE_NAME = "stumbler.db";
static final String TABLE_REPORTS = "reports";
static final String TABLE_STATS = "stats";
public Database(Context context) {
super(context,
// Uncomment for dev use to get db in public location
// (BuildConfig.DEBUG)? Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOCUMENTS) + "/" + DATABASE_NAME :
DATABASE_NAME,
null, DATABASE_VERSION);
}
@Override
public void onCreate(SQLiteDatabase db) {
createTableReports(db);
db.execSQL("CREATE TABLE " + TABLE_STATS + " ("
+ BaseColumns._ID + " INTEGER PRIMARY KEY,"
+ DatabaseContract.StatsColumns.KEY + " VARCHAR(80) UNIQUE NOT NULL,"
+ DatabaseContract.StatsColumns.VALUE + " TEXT NOT NULL)");
db.insertWithOnConflict(TABLE_STATS, null, DatabaseContract.Stats.values(DatabaseContract.Stats.KEY_LAST_UPLOAD_TIME, "0"), SQLiteDatabase.CONFLICT_REPLACE);
db.insertWithOnConflict(TABLE_STATS, null, DatabaseContract.Stats.values(DatabaseContract.Stats.KEY_OBSERVATIONS_SENT, "0"), SQLiteDatabase.CONFLICT_REPLACE);
db.insertWithOnConflict(TABLE_STATS, null, DatabaseContract.Stats.values(DatabaseContract.Stats.KEY_WIFIS_SENT, "0"), SQLiteDatabase.CONFLICT_REPLACE);
db.insertWithOnConflict(TABLE_STATS, null, DatabaseContract.Stats.values(DatabaseContract.Stats.KEY_CELLS_SENT, "0"), SQLiteDatabase.CONFLICT_REPLACE);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
Log.d(LOGTAG, "onUpgrade() from " + oldVersion + " to " + newVersion);
int version = oldVersion;
switch (version) {
case 1:
db.execSQL("DROP TABLE IF EXISTS " + TABLE_REPORTS);
createTableReports(db);
version = 2;
}
if (version != DATABASE_VERSION) {
db.execSQL("DROP TABLE IF EXISTS " + TABLE_REPORTS);
db.execSQL("DROP TABLE IF EXISTS " + TABLE_STATS);
onCreate(db);
}
}
private void createTableReports(SQLiteDatabase db) {
db.execSQL("CREATE TABLE " + TABLE_REPORTS + " ("
+ BaseColumns._ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ DatabaseContract.ReportsColumns.TIME + " INTEGER NOT NULL,"
+ DatabaseContract.ReportsColumns.LAT + " REAL NOT NULL,"
+ DatabaseContract.ReportsColumns.LON + " REAL NOT NULL,"
+ DatabaseContract.ReportsColumns.ALTITUDE + " INTEGER,"
+ DatabaseContract.ReportsColumns.ACCURACY + " INTEGER,"
+ DatabaseContract.ReportsColumns.RADIO + " VARCHAR(8) NOT NULL,"
+ DatabaseContract.ReportsColumns.CELL + " TEXT NOT NULL,"
+ DatabaseContract.ReportsColumns.WIFI + " TEXT NOT NULL,"
+ DatabaseContract.ReportsColumns.CELL_COUNT + " INTEGER NOT NULL,"
+ DatabaseContract.ReportsColumns.WIFI_COUNT + " INTEGER NOT NULL,"
+ DatabaseContract.ReportsColumns.RETRY_NUMBER + " INTEGER NOT NULL DEFAULT 0)");
}
}
|
package org.mtransit.android.commons.provider;
import java.net.HttpURLConnection;
import java.net.SocketException;
import java.net.URL;
import java.net.URLConnection;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.mtransit.android.commons.ArrayUtils;
import org.mtransit.android.commons.CollectionUtils;
import org.mtransit.android.commons.HtmlUtils;
import org.mtransit.android.commons.LocaleUtils;
import org.mtransit.android.commons.MTLog;
import org.mtransit.android.commons.PackageManagerUtils;
import org.mtransit.android.commons.PreferenceUtils;
import org.mtransit.android.commons.R;
import org.mtransit.android.commons.SqlUtils;
import org.mtransit.android.commons.TimeUtils;
import org.mtransit.android.commons.UriUtils;
import org.mtransit.android.commons.data.POI;
import org.mtransit.android.commons.data.POIStatus;
import org.mtransit.android.commons.data.RouteTripStop;
import org.mtransit.android.commons.data.Schedule;
import org.mtransit.android.commons.data.ServiceUpdate;
import org.mtransit.android.commons.helpers.MTDefaultHandler;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import android.annotation.SuppressLint;
import android.content.ContentValues;
import android.content.Context;
import android.content.UriMatcher;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.net.Uri;
import android.text.TextUtils;
@SuppressLint("Registered")
public class NextBusProvider extends MTContentProvider implements ServiceUpdateProviderContract, StatusProviderContract {
private static final String TAG = NextBusProvider.class.getSimpleName();
@Override
public String getLogTag() {
return TAG;
}
/**
* Override if multiple {@link NextBusProvider} implementations in same app.
*/
private static final String PREF_KEY_AGENCY_LAST_UPDATE_MS = NextBusDbHelper.PREF_KEY_AGENCY_LAST_UPDATE_MS;
public static UriMatcher getNewUriMatcher(String authority) {
UriMatcher URI_MATCHER = new UriMatcher(UriMatcher.NO_MATCH);
ServiceUpdateProvider.append(URI_MATCHER, authority);
StatusProvider.append(URI_MATCHER, authority);
return URI_MATCHER;
}
private static UriMatcher uriMatcher = null;
/**
* Override if multiple {@link NextBusProvider} implementations in same app.
*/
public static UriMatcher getURIMATCHER(Context context) {
if (uriMatcher == null) {
uriMatcher = getNewUriMatcher(getAUTHORITY(context));
}
return uriMatcher;
}
private static String authority = null;
/**
* Override if multiple {@link NextBusProvider} implementations in same app.
*/
public static String getAUTHORITY(Context context) {
if (authority == null) {
authority = context.getResources().getString(R.string.next_bus_authority);
}
return authority;
}
private static String targetAuthority = null;
/**
* Override if multiple {@link NextBusProvider} implementations in same app.
*/
public static String getTARGET_AUTHORITY(Context context) {
if (targetAuthority == null) {
targetAuthority = context.getResources().getString(R.string.next_bus_for_poi_authority);
}
return targetAuthority;
}
private static Uri authorityUri = null;
/**
* Override if multiple {@link NextBusProvider} implementations in same app.
*/
public static Uri getAUTHORITY_URI(Context context) {
if (authorityUri == null) {
authorityUri = UriUtils.newContentUri(getAUTHORITY(context));
}
return authorityUri;
}
private static String agencyTag = null;
/**
* Override if multiple {@link NextBusProvider} implementations in same app.
*/
public static String getAGENCY_TAG(Context context) {
if (agencyTag == null) {
agencyTag = context.getResources().getString(R.string.next_bus_agency_tag);
}
return agencyTag;
}
private static String textLanguageCode = null;
/**
* Override if multiple {@link NextBusProvider} implementations in same app.
*/
public static String getTEXT_LANGUAGE_CODE(Context context) {
if (textLanguageCode == null) {
textLanguageCode = context.getResources().getString(R.string.next_bus_messages_text_language_code);
}
return textLanguageCode;
}
private static String textSecondaryLanguageCode = null;
/**
* Override if multiple {@link NextBusProvider} implementations in same app.
*/
public static String getTEXT_SECONDARY_LANGUAGE_CODE(Context context) {
if (textSecondaryLanguageCode == null) {
textSecondaryLanguageCode = context.getResources().getString(R.string.next_bus_messages_text_secondary_language_code);
}
return textSecondaryLanguageCode;
}
private static String textBoldWords = null;
/**
* Override if multiple {@link NextBusProvider} implementations in same app.
*/
public static String getTEXT_BOLD_WORDS(Context context) {
if (textBoldWords == null) {
textBoldWords = context.getResources().getString(R.string.next_bus_messages_text_bold_words);
}
return textBoldWords;
}
private static String textSecondaryBoldWords = null;
/**
* Override if multiple {@link NextBusProvider} implementations in same app.
*/
public static String getTEXT_SECONDARY_BOLD_WORDS(Context context) {
if (textSecondaryBoldWords == null) {
textSecondaryBoldWords = context.getResources().getString(R.string.next_bus_messages_text_secondary_bold_words);
}
return textSecondaryBoldWords;
}
private static final long SERVICE_UPDATE_MIN_DURATION_BETWEEN_REFRESH_IN_MS = TimeUnit.MINUTES.toMillis(10);
private static final long SERVICE_UPDATE_MIN_DURATION_BETWEEN_REFRESH_IN_FOCUS_IN_MS = TimeUnit.MINUTES.toMillis(1);
@Override
public long getMinDurationBetweenServiceUpdateRefreshInMs(boolean inFocus) {
if (inFocus) {
return SERVICE_UPDATE_MIN_DURATION_BETWEEN_REFRESH_IN_FOCUS_IN_MS;
}
return SERVICE_UPDATE_MIN_DURATION_BETWEEN_REFRESH_IN_MS;
}
private static final long SERVICE_UPDATE_MAX_VALIDITY_IN_MS = TimeUnit.DAYS.toMillis(1);
@Override
public long getServiceUpdateMaxValidityInMs() {
return SERVICE_UPDATE_MAX_VALIDITY_IN_MS;
}
private static final long SERVICE_UPDATE_VALIDITY_IN_MS = TimeUnit.HOURS.toMillis(1);
private static final long SERVICE_UPDATE_VALIDITY_IN_FOCUS_IN_MS = TimeUnit.MINUTES.toMillis(10);
@Override
public long getServiceUpdateValidityInMs(boolean inFocus) {
if (inFocus) {
return SERVICE_UPDATE_VALIDITY_IN_FOCUS_IN_MS;
}
return SERVICE_UPDATE_VALIDITY_IN_MS;
}
@Override
public String getServiceUpdateDbTableName() {
return NextBusDbHelper.T_NEXT_BUS_SERVICE_UPDATE;
}
@Override
public void cacheServiceUpdates(ArrayList<ServiceUpdate> newServiceUpdates) {
ServiceUpdateProvider.cacheServiceUpdatesS(this, newServiceUpdates);
}
@Override
public ArrayList<ServiceUpdate> getCachedServiceUpdates(ServiceUpdateProvider.ServiceUpdateFilter serviceUpdateFilter) {
if (serviceUpdateFilter.getPoi() == null || !(serviceUpdateFilter.getPoi() instanceof RouteTripStop)) {
MTLog.w(this, "getCachedServiceUpdates() > no service update (poi null or not RTS)");
return null;
}
RouteTripStop rts = (RouteTripStop) serviceUpdateFilter.getPoi();
ArrayList<ServiceUpdate> serviceUpdates = new ArrayList<ServiceUpdate>();
HashSet<String> targetUUIDs = getTargetUUIDs(rts);
for (String targetUUID : targetUUIDs) {
ArrayList<ServiceUpdate> cachedServiceUpdates = ServiceUpdateProvider.getCachedServiceUpdatesS(this, targetUUID);
serviceUpdates.addAll(cachedServiceUpdates);
}
enhanceRTServiceUpdateForStop(serviceUpdates, rts);
return serviceUpdates;
}
private void enhanceRTServiceUpdateForStop(ArrayList<ServiceUpdate> serviceUpdates, RouteTripStop rts) {
try {
if (CollectionUtils.getSize(serviceUpdates) > 0) {
for (ServiceUpdate serviceUpdate : serviceUpdates) {
serviceUpdate.setTargetUUID(rts.getUUID()); // route trip service update targets stop
}
}
} catch (Exception e) {
MTLog.w(this, e, "Error while trying to enhance route trip service update for stop!");
}
}
private HashSet<String> getTargetUUIDs(RouteTripStop rts) {
HashSet<String> targetUUIDs = new HashSet<String>();
targetUUIDs.add(getAgencyTargetUUID(rts.getAuthority()));
targetUUIDs.add(getAgencyRouteTagTargetUUID(rts.getAuthority(), getRouteTag(rts)));
targetUUIDs.add(getAgencyRouteStopTagTargetUUID(rts.getAuthority(), getRouteTag(rts), getStopTag(rts)));
return targetUUIDs;
}
public String getRouteTag(RouteTripStop rts) {
return rts.getRoute().getShortName();
}
public String getStopTag(RouteTripStop rts) {
return rts.getStop().getCode();
}
public String cleanStopTag(String stopTag) {
return stopTag;
}
protected static String getAgencyRouteTagTargetUUID(String agencyAuthority, String routeTag) {
return POI.POIUtils.getUUID(agencyAuthority, routeTag);
}
protected static String getAgencyRouteStopTagTargetUUID(String agencyAuthority, String routeTag, String stopTag) {
return POI.POIUtils.getUUID(agencyAuthority, routeTag, stopTag);
}
protected static String getAgencyTargetUUID(String agencyAuthority) {
return POI.POIUtils.getUUID(agencyAuthority);
}
@Override
public boolean purgeUselessCachedServiceUpdates() {
return ServiceUpdateProvider.purgeUselessCachedServiceUpdates(this);
}
@Override
public boolean deleteCachedServiceUpdate(Integer serviceUpdateId) {
return ServiceUpdateProvider.deleteCachedServiceUpdate(this, serviceUpdateId);
}
@Override
public boolean deleteCachedServiceUpdate(String targetUUID, String sourceId) {
return ServiceUpdateProvider.deleteCachedServiceUpdate(this, targetUUID, sourceId);
}
@Override
public ArrayList<ServiceUpdate> getNewServiceUpdates(ServiceUpdateProvider.ServiceUpdateFilter serviceUpdateFilter) {
if (serviceUpdateFilter == null || serviceUpdateFilter.getPoi() == null || !(serviceUpdateFilter.getPoi() instanceof RouteTripStop)) {
MTLog.w(this, "getNewServiceUpdates() > no new service update (filter null or poi null or not RTS): %s", serviceUpdateFilter);
return null;
}
RouteTripStop rts = (RouteTripStop) serviceUpdateFilter.getPoi();
updateAgencyServiceUpdateDataIfRequired(rts.getAuthority(), serviceUpdateFilter.isInFocusOrDefault());
ArrayList<ServiceUpdate> cachedServiceUpdates = getCachedServiceUpdates(serviceUpdateFilter);
if (CollectionUtils.getSize(cachedServiceUpdates) == 0) {
String agencyTargetUUID = getAgencyTargetUUID(rts.getAuthority());
cachedServiceUpdates = ArrayUtils.asArrayList(getServiceUpdateNone(agencyTargetUUID));
enhanceRTServiceUpdateForStop(cachedServiceUpdates, rts); // convert to stop service update
}
return cachedServiceUpdates;
}
public ServiceUpdate getServiceUpdateNone(String agencyTargetUUID) {
return new ServiceUpdate(null, agencyTargetUUID, TimeUtils.currentTimeMillis(), getServiceUpdateMaxValidityInMs(), null, null,
ServiceUpdate.SEVERITY_NONE, AGENCY_SOURCE_ID, AGENCY_SOURCE_LABEL, getServiceUpdateLanguage());
}
private static final String AGENCY_SOURCE_ID = "next_bus_com_messages";
private static final String AGENCY_SOURCE_LABEL = "NextBus";
private void updateAgencyServiceUpdateDataIfRequired(String tagetAuthority, boolean inFocus) {
long lastUpdateInMs = PreferenceUtils.getPrefLcl(getContext(), PREF_KEY_AGENCY_LAST_UPDATE_MS, 0l);
long minUpdateMs = Math.min(getServiceUpdateMaxValidityInMs(), getServiceUpdateValidityInMs(inFocus));
long nowInMs = TimeUtils.currentTimeMillis();
if (lastUpdateInMs + minUpdateMs > nowInMs) {
return;
}
updateAgencyServiceUpdateDataIfRequiredSync(tagetAuthority, lastUpdateInMs, inFocus);
}
private synchronized void updateAgencyServiceUpdateDataIfRequiredSync(String tagetAuthority, long lastUpdateInMs, boolean inFocus) {
if (PreferenceUtils.getPrefLcl(getContext(), PREF_KEY_AGENCY_LAST_UPDATE_MS, 0l) > lastUpdateInMs) {
return; // too late, another thread already updated
}
long nowInMs = TimeUtils.currentTimeMillis();
boolean deleteAllRequired = false;
if (lastUpdateInMs + getServiceUpdateMaxValidityInMs() < nowInMs) {
deleteAllRequired = true; // too old to display
}
long minUpdateMs = Math.min(getServiceUpdateMaxValidityInMs(), getServiceUpdateValidityInMs(inFocus));
if (deleteAllRequired || lastUpdateInMs + minUpdateMs < nowInMs) {
updateAllAgencyServiceUpdateDataFromWWW(tagetAuthority, deleteAllRequired); // try to update
}
}
private void updateAllAgencyServiceUpdateDataFromWWW(String tagetAuthority, boolean deleteAllRequired) {
boolean deleteAllDone = false;
if (deleteAllRequired) {
deleteAllAgencyServiceUpdateData();
deleteAllDone = true;
}
ArrayList<ServiceUpdate> newServiceUpdates = loadAgencyServiceUpdateDataFromWWW(tagetAuthority);
if (newServiceUpdates != null) { // empty is OK
long nowInMs = TimeUtils.currentTimeMillis();
if (!deleteAllDone) {
deleteAllAgencyServiceUpdateData();
}
cacheServiceUpdates(newServiceUpdates);
PreferenceUtils.savePrefLcl(getContext(), PREF_KEY_AGENCY_LAST_UPDATE_MS, nowInMs, true); // sync
} // else keep whatever we have until max validity reached
}
private static final String AGENCY_URL_PART_1_BEFORE_AGENCY_TAG = "http://webservices.nextbus.com/service/publicXMLFeed?command=messages&a=";
private static String getAgencyUrlString(Context context) {
return new StringBuilder()
.append(AGENCY_URL_PART_1_BEFORE_AGENCY_TAG)
.append(getAGENCY_TAG(context))
.toString();
}
private ArrayList<ServiceUpdate> loadAgencyServiceUpdateDataFromWWW(String tagetAuthority) {
try {
String urlString = getAgencyUrlString(getContext());
MTLog.i(this, "Loading from '%s'...", urlString);
URL url = new URL(urlString);
URLConnection urlc = url.openConnection();
HttpURLConnection httpUrlConnection = (HttpURLConnection) urlc;
switch (httpUrlConnection.getResponseCode()) {
case HttpURLConnection.HTTP_OK:
long newLastUpdateInMs = TimeUtils.currentTimeMillis();
SAXParserFactory spf = SAXParserFactory.newInstance();
SAXParser sp = spf.newSAXParser();
XMLReader xr = sp.getXMLReader();
NextBusMessagesDataHandler handler = new NextBusMessagesDataHandler(this, newLastUpdateInMs, getTARGET_AUTHORITY(getContext()),
getServiceUpdateMaxValidityInMs(), getTEXT_LANGUAGE_CODE(getContext()), getTEXT_SECONDARY_LANGUAGE_CODE(getContext()),
getTEXT_BOLD_WORDS(getContext()), getTEXT_SECONDARY_BOLD_WORDS(getContext()));
xr.setContentHandler(handler);
xr.parse(new InputSource(urlc.getInputStream()));
return handler.getServiceUpdates();
default:
MTLog.w(this, "ERROR: HTTP URL-Connection Response Code %s (Message: %s)", httpUrlConnection.getResponseCode(),
httpUrlConnection.getResponseMessage());
return null;
}
} catch (UnknownHostException uhe) {
if (MTLog.isLoggable(android.util.Log.DEBUG)) {
MTLog.w(this, uhe, "No Internet Connection!");
} else {
MTLog.w(this, "No Internet Connection!");
}
return null;
} catch (SocketException se) {
MTLog.w(TAG, se, "No Internet Connection!");
return null;
} catch (Exception e) { // Unknown error
MTLog.e(TAG, e, "INTERNAL ERROR: Unknown Exception");
return null;
}
}
private int deleteAllAgencyServiceUpdateData() {
int affectedRows = 0;
SQLiteDatabase db = null;
try {
db = getDBHelper().getWritableDatabase();
String selection = new StringBuilder().append(ServiceUpdateProvider.ServiceUpdateColumns.T_SERVICE_UPDATE_K_SOURCE_ID).append("=").append('\'')
.append(AGENCY_SOURCE_ID).append('\'').toString();
affectedRows = db.delete(getServiceUpdateDbTableName(), selection, null);
} catch (Exception e) {
MTLog.w(this, e, "Error while deleting all agency service update data!");
} finally {
SqlUtils.closeQuietly(db);
}
return affectedRows;
}
private static String serviceUpdateLanguage = null;
@Override
public String getServiceUpdateLanguage() {
if (serviceUpdateLanguage == null) {
String newServiceUpdateLanguage = Locale.ENGLISH.getLanguage();
if (LocaleUtils.isFR()) {
if (getTEXT_LANGUAGE_CODE(getContext()).contains(Locale.FRENCH.getLanguage())
|| getTEXT_SECONDARY_LANGUAGE_CODE(getContext()).contains(Locale.FRENCH.getLanguage())) {
newServiceUpdateLanguage = Locale.FRENCH.getLanguage();
}
}
serviceUpdateLanguage = newServiceUpdateLanguage;
}
return serviceUpdateLanguage;
}
private static final long NEXT_BUS_STATUS_MAX_VALIDITY_IN_MS = TimeUnit.MINUTES.toMillis(30);
private static final long NEXT_BUS_STATUS_VALIDITY_IN_MS = TimeUnit.MINUTES.toMillis(5);
private static final long NEXT_BUS_STATUS_VALIDITY_IN_FOCUS_IN_MS = TimeUnit.MINUTES.toMillis(1);
private static final long NEXT_BUS_STATUS_MIN_DURATION_BETWEEN_REFRESH_IN_MS = TimeUnit.MINUTES.toMillis(2);
private static final long NEXT_BUS_STATUS_MIN_DURATION_BETWEEN_REFRESH_IN_FOCUS_IN_MS = TimeUnit.MINUTES.toMillis(1);
@Override
public long getMinDurationBetweenRefreshInMs(boolean inFocus) {
if (inFocus) {
return NEXT_BUS_STATUS_MIN_DURATION_BETWEEN_REFRESH_IN_FOCUS_IN_MS;
}
return NEXT_BUS_STATUS_MIN_DURATION_BETWEEN_REFRESH_IN_MS;
}
@Override
public long getStatusValidityInMs(boolean inFocus) {
if (inFocus) {
return NEXT_BUS_STATUS_VALIDITY_IN_FOCUS_IN_MS;
}
return NEXT_BUS_STATUS_VALIDITY_IN_MS;
}
@Override
public long getStatusMaxValidityInMs() {
return NEXT_BUS_STATUS_MAX_VALIDITY_IN_MS;
}
@Override
public void cacheStatus(POIStatus newStatusToCache) {
StatusProvider.cacheStatusS(this, newStatusToCache);
}
@Override
public POIStatus getCachedStatus(StatusFilter statusFilter) {
if (!(statusFilter instanceof Schedule.ScheduleStatusFilter)) {
MTLog.w(this, "getNewStatus() > Can't find new schecule whithout schedule filter!");
return null;
}
Schedule.ScheduleStatusFilter scheduleStatusFilter = (Schedule.ScheduleStatusFilter) statusFilter;
RouteTripStop rts = scheduleStatusFilter.getRouteTripStop();
String targetUUID = getAgencyRouteStopTagTargetUUID(rts.getAuthority(), getRouteTag(rts), getStopTag(rts));
POIStatus cachedStatus = StatusProvider.getCachedStatusS(this, targetUUID);
if (cachedStatus != null) {
cachedStatus.setTargetUUID(rts.getUUID()); // target RTS UUID instead of custom NextBus Route & Stop tags
}
return cachedStatus;
}
@Override
public boolean purgeUselessCachedStatuses() {
return StatusProvider.purgeUselessCachedStatuses(this);
}
@Override
public boolean deleteCachedStatus(int cachedStatusId) {
return StatusProvider.deleteCachedStatus(this, cachedStatusId);
}
@Override
public String getStatusDbTableName() {
return NextBusDbHelper.T_NEXT_BUS_STATUS;
}
@Override
public int getStatusType() {
return POI.ITEM_STATUS_TYPE_SCHEDULE;
}
@Override
public POIStatus getNewStatus(StatusFilter statusFilter) {
if (!(statusFilter instanceof Schedule.ScheduleStatusFilter)) {
MTLog.w(this, "getNewStatus() > Can't find new schecule whithout schedule filter!");
return null;
}
Schedule.ScheduleStatusFilter scheduleStatusFilter = (Schedule.ScheduleStatusFilter) statusFilter;
RouteTripStop rts = scheduleStatusFilter.getRouteTripStop();
loadPredictionsFromWWW(rts.getStop().getId(), rts.isDecentOnly() ? getRouteTag(rts) : null);
return getCachedStatus(statusFilter);
}
private static final String PREDICTION_URL_PART_1_BEFORE_AGENCY_TAG = "http://webservices.nextbus.com/service/publicXMLFeed?command=predictions&a=";
private static final String PREDICTION_URL_PART_2_BEFORE_STOP_ID = "&stopId=";
private static String getPredictionUrlString(Context context, int stopId) {
return new StringBuilder()
.append(PREDICTION_URL_PART_1_BEFORE_AGENCY_TAG)
.append(getAGENCY_TAG(context))
.append(PREDICTION_URL_PART_2_BEFORE_STOP_ID)
.append(stopId)
.toString();
}
private void loadPredictionsFromWWW(int stopId, String decentOnlyRouteTag) {
try {
String urlString = getPredictionUrlString(getContext(), stopId);
MTLog.i(this, "Loading from '%s'...", urlString);
URL url = new URL(urlString);
URLConnection urlc = url.openConnection();
HttpURLConnection httpUrlConnection = (HttpURLConnection) urlc;
switch (httpUrlConnection.getResponseCode()) {
case HttpURLConnection.HTTP_OK:
long newLastUpdateInMs = TimeUtils.currentTimeMillis();
SAXParserFactory spf = SAXParserFactory.newInstance();
SAXParser sp = spf.newSAXParser();
XMLReader xr = sp.getXMLReader();
NextBusPredictionsDataHandler handler = new NextBusPredictionsDataHandler(this, newLastUpdateInMs, decentOnlyRouteTag);
xr.setContentHandler(handler);
xr.parse(new InputSource(urlc.getInputStream()));
Collection<POIStatus> statuses = handler.getStatuses();
Collection<String> targetUUIDs = handler.getStatusesTargetUUIDs();
StatusProvider.deleteCachedStatus(this, targetUUIDs);
for (POIStatus status : statuses) {
StatusProvider.cacheStatusS(this, status);
}
return;
default:
MTLog.w(this, "ERROR: HTTP URL-Connection Response Code %s (Message: %s)", httpUrlConnection.getResponseCode(),
httpUrlConnection.getResponseMessage());
return;
}
} catch (UnknownHostException uhe) {
if (MTLog.isLoggable(android.util.Log.DEBUG)) {
MTLog.w(this, uhe, "No Internet Connection!");
} else {
MTLog.w(this, "No Internet Connection!");
}
return;
} catch (SocketException se) {
MTLog.w(TAG, se, "No Internet Connection!");
return;
} catch (Exception e) { // Unknown error
MTLog.e(TAG, e, "INTERNAL ERROR: Unknown Exception");
return;
}
}
@Override
public boolean onCreateMT() {
ping();
return true;
}
@Override
public void ping() {
PackageManagerUtils.removeModuleLauncherIcon(getContext());
}
private static NextBusDbHelper dbHelper;
private static int currentDbVersion = -1;
private NextBusDbHelper getDBHelper(Context context) {
if (dbHelper == null) { // initialize
dbHelper = getNewDbHelper(context);
currentDbVersion = getCurrentDbVersion();
} else { // reset
try {
if (currentDbVersion != getCurrentDbVersion()) {
dbHelper.close();
dbHelper = null;
return getDBHelper(context);
}
} catch (Exception e) { // fail if locked, will try again later
MTLog.w(this, e, "Can't check DB version!");
}
}
return dbHelper;
}
/**
* Override if multiple {@link NextBusProvider} implementations in same app.
*/
public int getCurrentDbVersion() {
return NextBusDbHelper.getDbVersion(getContext());
}
/**
* Override if multiple {@link NextBusProvider} implementations in same app.
*/
public NextBusDbHelper getNewDbHelper(Context context) {
return new NextBusDbHelper(context.getApplicationContext());
}
@Override
public UriMatcher getURI_MATCHER() {
return getURIMATCHER(getContext());
}
@Override
public Uri getAuthorityUri() {
return getAUTHORITY_URI(getContext());
}
@Override
public Context getContentProviderContext() {
return getContext();
}
@Override
public SQLiteOpenHelper getDBHelper() {
return getDBHelper(getContext());
}
@Override
public Cursor queryMT(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) {
Cursor cursor = ServiceUpdateProvider.queryS(this, uri, selection);
if (cursor != null) {
return cursor;
}
cursor = StatusProvider.queryS(this, uri, selection);
if (cursor != null) {
return cursor;
}
throw new IllegalArgumentException(String.format("Unknown URI (query): '%s'", uri));
}
@Override
public String getTypeMT(Uri uri) {
String type = ServiceUpdateProvider.getTypeS(this, uri);
if (type != null) {
return type;
}
type = StatusProvider.getTypeS(this, uri);
if (type != null) {
return type;
}
throw new IllegalArgumentException(String.format("Unknown URI (type): '%s'", uri));
}
@Override
public int deleteMT(Uri uri, String selection, String[] selectionArgs) {
MTLog.w(this, "The delete method is not available.");
return 0;
}
@Override
public int updateMT(Uri uri, ContentValues values, String selection, String[] selectionArgs) {
MTLog.w(this, "The update method is not available.");
return 0;
}
@Override
public Uri insertMT(Uri uri, ContentValues values) {
MTLog.w(this, "The insert method is not available.");
return null;
}
private static class NextBusPredictionsDataHandler extends MTDefaultHandler {
private static final String TAG = NextBusProvider.TAG + ">" + NextBusPredictionsDataHandler.class.getSimpleName();
@Override
public String getLogTag() {
return TAG;
}
private static final String BODY = "body";
private static final String PREDICTIONS = "predictions";
private static final String PREDICTIONS_ROUTE_TAG = "routeTag";
private static final String PREDICTIONS_STOP_TAG = "stopTag";
private static final String DIRECTION = "direction";
private static final String PREDICTION = "prediction";
private static final String PREDICTION_EPOCH_TIME = "epochTime";
private static final String MESSAGE = "message";
private static long PROVIDER_PRECISION_IN_MS = TimeUnit.SECONDS.toMillis(10);
private String decentOnlyRouteTag = null;
private String currentLocalName = BODY;
private String currentRouteTag = null;
private String currentStopTag = null;
private HashSet<Long> currentPredictionEpochTimes = new HashSet<Long>();
private HashSet<POIStatus> statuses = new HashSet<POIStatus>();
private HashSet<String> statusesTargetUUIDs = new HashSet<String>();
private NextBusProvider provider;
private String authority;
private long lastUpdateInMs;
public NextBusPredictionsDataHandler(NextBusProvider provider, long lastUpdateInMs, String decentOnlyRouteTag) {
this.provider = provider;
this.authority = NextBusProvider.getTARGET_AUTHORITY(this.provider.getContext());
this.lastUpdateInMs = lastUpdateInMs;
this.decentOnlyRouteTag = decentOnlyRouteTag;
}
public Collection<POIStatus> getStatuses() {
return this.statuses;
}
public Collection<String> getStatusesTargetUUIDs() {
return this.statusesTargetUUIDs;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException {
super.startElement(uri, localName, qName, attributes);
this.currentLocalName = localName;
if (BODY.equals(this.currentLocalName)) {
this.currentRouteTag = null;
this.currentStopTag = null;
} else if (PREDICTIONS.equals(this.currentLocalName)) {
this.currentRouteTag = attributes.getValue(PREDICTIONS_ROUTE_TAG);
this.currentStopTag = this.provider.cleanStopTag(attributes.getValue(PREDICTIONS_STOP_TAG));
this.currentPredictionEpochTimes.clear();
} else if (DIRECTION.equals(this.currentLocalName)) {
this.currentPredictionEpochTimes.clear();
} else if (PREDICTION.equals(this.currentLocalName)) {
try {
Long epochTime = Long.valueOf(attributes.getValue(PREDICTION_EPOCH_TIME));
if (epochTime != null) {
this.currentPredictionEpochTimes.add(TimeUtils.timeToTheTensSecondsMillis(epochTime));
}
} catch (Exception e) {
MTLog.w(this, e, "Error while reading prediction epoch time!");
}
} else if (MESSAGE.equals(this.currentLocalName)) { // ignore
} else {
MTLog.w(this, "startElement() > Unexpected element '%s'", this.currentLocalName);
}
}
@Override
public void characters(char[] ch, int start, int length) throws SAXException {
super.characters(ch, start, length);
try {
String string = new String(ch, start, length);
if (TextUtils.isEmpty(string)) {
return;
}
if (BODY.equals(this.currentLocalName)) { // ignore
} else if (PREDICTIONS.equals(this.currentLocalName)) { // ignore
} else if (DIRECTION.equals(this.currentLocalName)) { // ignore
} else if (PREDICTION.equals(this.currentLocalName)) { // ignore
} else {
MTLog.w(this, "characters() > Unexpected name '%s'! while parsing '%s'", this.currentLocalName, string);
}
} catch (Exception e) {
MTLog.w(this, e, "Error while parsing '%s' value '%s, %s, %s'!", this.currentLocalName, ch, start, length);
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
super.endElement(uri, localName, qName);
if (PREDICTION.equals(localName)) {
if (TextUtils.isEmpty(this.currentRouteTag) || TextUtils.isEmpty(this.currentStopTag)) {
return;
}
String targetUUID = NextBusProvider.getAgencyRouteStopTagTargetUUID(this.authority, this.currentRouteTag, this.currentStopTag);
Schedule newSchedule = new Schedule(targetUUID, this.lastUpdateInMs, this.provider.getStatusMaxValidityInMs(), this.lastUpdateInMs,
PROVIDER_PRECISION_IN_MS, this.currentRouteTag.equals(this.decentOnlyRouteTag));
for (Long epochTime : this.currentPredictionEpochTimes) {
newSchedule.addTimestampWithoutSort(new Schedule.Timestamp(epochTime));
}
this.statuses.add(newSchedule);
this.statusesTargetUUIDs.add(targetUUID);
}
}
}
public static class NextBusMessagesDataHandler extends MTDefaultHandler {
private static final String TAG = NextBusProvider.TAG + ">" + NextBusMessagesDataHandler.class.getSimpleName();
@Override
public String getLogTag() {
return TAG;
}
private static final String BODY = "body";
private static final String ROUTE = "route";
private static final String ROUTE_TAG = "tag";
private static final String ROUTE_TAG_ALL = "all";
private static final String MESSAGE = "message";
private static final String MESSAGE_ID = "id";
private static final String MESSAGE_PRIORITY = "priority";
private static final String MESSAGE_PRIORITY_NORMAL = "Normal";
private static final String MESSAGE_PRIORITY_LOW = "Low";
private static final String ROUTE_CONFIGURED_FOR_MESSAGE = "routeConfiguredForMessage";
private static final String ROUTE_CONFIGURED_FOR_MESSAGE_TAG = "tag";
private static final String STOP = "stop";
private static final String STOP_TAG = "tag";
private static final String TEXT = "text";
private static final String TEXT_SECONDARY_LANGUAGE = "textSecondaryLanguage";
private static final String INTERVAL = "interval";
private String currentLocalName = BODY;
private boolean currentRouteAll = false;
private long newLastUpdateInMs;
private long serviceUpdateMaxValidityInMs;
private ArrayList<ServiceUpdate> serviceUpdates = new ArrayList<ServiceUpdate>();
private String authority;
private String currentRouteTag = null;
private String currentRouteConfiguredForMessageRouteTag = null;
private HashMap<String, HashSet<String>> currentRouteConfiguredForMessage = new HashMap<String, HashSet<String>>();
private StringBuilder currentTextSb = new StringBuilder();
private StringBuilder currentTextSecondaryLanguageSb = new StringBuilder();
private String textLanguageCode;
private String textSecondaryLanguageCode;
private Pattern textBoldWords;
private Pattern textSecondaryBoldWords;
private HashMap<String, HashSet<String>> textMessageIdTargetUUID = new HashMap<String, HashSet<String>>();
private HashMap<String, HashSet<String>> textSecondaryMessageIdTargetUUID = new HashMap<String, HashSet<String>>();
private String currentMessageId;
private String currentMessagePriority;
private NextBusProvider provider;
public NextBusMessagesDataHandler(NextBusProvider provider, long newLastUpdateInMs, String authority, long serviceUpdateMaxValidityInMs,
String textLanguageCode, String textSecondaryLanguageCode, String textBoldWords, String textSecondaryBoldWords) {
this.provider = provider;
this.newLastUpdateInMs = newLastUpdateInMs;
this.authority = authority;
this.serviceUpdateMaxValidityInMs = serviceUpdateMaxValidityInMs;
this.textLanguageCode = textLanguageCode;
this.textSecondaryLanguageCode = textSecondaryLanguageCode;
try {
this.textBoldWords = Pattern.compile(textBoldWords, Pattern.CASE_INSENSITIVE);
} catch (Exception e) {
MTLog.w(this, e, "Error while compiling text bold regex pattern!");
}
try {
this.textSecondaryBoldWords = Pattern.compile(textSecondaryBoldWords, Pattern.CASE_INSENSITIVE);
} catch (Exception e) {
MTLog.w(this, e, "Error while compiling text bold regex pattern!");
}
}
public ArrayList<ServiceUpdate> getServiceUpdates() {
return this.serviceUpdates;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException {
super.startElement(uri, localName, qName, attributes);
this.currentLocalName = localName;
if (BODY.equals(this.currentLocalName)) {
} else if (ROUTE.equals(this.currentLocalName)) {
String routeTag = attributes.getValue(ROUTE_TAG);
if (ROUTE_TAG_ALL.equals(routeTag)) {
this.currentRouteTag = null;
this.currentRouteAll = true;
} else {
this.currentRouteTag = routeTag;
this.currentRouteAll = false;
}
this.currentMessagePriority = null;
this.currentMessageId = null;
} else if (MESSAGE.equals(this.currentLocalName)) {
this.currentMessagePriority = attributes.getValue(MESSAGE_PRIORITY);
this.currentMessageId = attributes.getValue(MESSAGE_ID);
if (!this.textMessageIdTargetUUID.containsKey(this.currentMessageId)) {
this.textMessageIdTargetUUID.put(this.currentMessageId, new HashSet<String>());
}
if (!this.textSecondaryMessageIdTargetUUID.containsKey(this.currentMessageId)) {
this.textSecondaryMessageIdTargetUUID.put(this.currentMessageId, new HashSet<String>());
}
this.currentRouteConfiguredForMessageRouteTag = null;
this.currentRouteConfiguredForMessage.clear();
this.currentTextSb = new StringBuilder();
this.currentTextSecondaryLanguageSb = new StringBuilder();
} else if (ROUTE_CONFIGURED_FOR_MESSAGE.equals(this.currentLocalName)) {
String routeTag = attributes.getValue(ROUTE_CONFIGURED_FOR_MESSAGE_TAG);
this.currentRouteConfiguredForMessageRouteTag = routeTag;
if (!this.currentRouteConfiguredForMessage.containsKey(this.currentRouteConfiguredForMessageRouteTag)) {
this.currentRouteConfiguredForMessage.put(this.currentRouteConfiguredForMessageRouteTag, new HashSet<String>());
}
} else if (STOP.equals(this.currentLocalName)) {
String stopTag = this.provider.cleanStopTag(attributes.getValue(STOP_TAG));
this.currentRouteConfiguredForMessage.get(this.currentRouteConfiguredForMessageRouteTag).add(stopTag);
} else if (TEXT.equals(this.currentLocalName)) { // ignore
} else if (TEXT_SECONDARY_LANGUAGE.equals(this.currentLocalName)) { // ignore
} else if (INTERVAL.equals(this.currentLocalName)) { // ignore
} else {
MTLog.w(this, "startElement() > Unexpected element '%s'", this.currentLocalName);
}
}
@Override
public void characters(char[] ch, int start, int length) throws SAXException {
super.characters(ch, start, length);
try {
String string = new String(ch, start, length);
if (TextUtils.isEmpty(string)) {
return;
}
if (BODY.equals(this.currentLocalName)) { // ignore
} else if (ROUTE.equals(this.currentLocalName)) { // ignore
} else if (MESSAGE.equals(this.currentLocalName)) { // ignore
} else if (ROUTE_CONFIGURED_FOR_MESSAGE.equals(this.currentLocalName)) { // ignore
} else if (STOP.equals(this.currentLocalName)) { // ignore
} else if (TEXT.equals(this.currentLocalName)) {
this.currentTextSb.append(string);
} else if (TEXT_SECONDARY_LANGUAGE.equals(this.currentLocalName)) {
this.currentTextSecondaryLanguageSb.append(string);
} else {
MTLog.w(this, "characters() > Unexpected name '%s'! while parsing '%s'", this.currentLocalName, string);
}
} catch (Exception e) {
MTLog.w(this, e, "Error while parsing '%s' value '%s, %s, %s'!", this.currentLocalName, ch, start, length);
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
super.endElement(uri, localName, qName);
if (MESSAGE.equals(localName)) {
if (this.currentTextSb.length() == 0 && this.currentTextSecondaryLanguageSb.length() == 0) {
return; // no message
}
String textHtml = enhanceHtml(this.currentTextSb.toString(), this.textBoldWords);
String textSecondaryHtml = enhanceHtml(this.currentTextSecondaryLanguageSb.toString(), this.textSecondaryBoldWords);
if (this.currentRouteConfiguredForMessage.size() > 0) { // ROUTE(s)
for (String routeTag : this.currentRouteConfiguredForMessage.keySet()) {
if (this.currentRouteConfiguredForMessage.get(routeTag).size() == 0) {
String targetUUID = NextBusProvider.getAgencyRouteTagTargetUUID(this.authority, routeTag);
int severity = findRouteSeverity();
addServiceUpdates(targetUUID, severity, textHtml, textSecondaryHtml);
} else {
for (String stopTag : this.currentRouteConfiguredForMessage.get(routeTag)) {
String targetUUID = NextBusProvider.getAgencyRouteStopTagTargetUUID(this.authority, routeTag, stopTag);
int severity = findStopPriority();
addServiceUpdates(targetUUID, severity, textHtml, textSecondaryHtml);
}
}
}
} else if (this.currentRouteTag != null) {
String targetUUID = NextBusProvider.getAgencyRouteTagTargetUUID(this.authority, this.currentRouteTag);
int severity = findAgencySeverity();
addServiceUpdates(targetUUID, severity, textHtml, textSecondaryHtml);
} else if (this.currentRouteAll) { // AGENCY
String targetUUID = NextBusProvider.getAgencyTargetUUID(this.authority);
int severity = findAgencySeverity();
addServiceUpdates(targetUUID, severity, textHtml, textSecondaryHtml);
} else {
MTLog.w(this, "Unexpected combination of tags!");
}
}
}
private int findStopPriority() {
if (MESSAGE_PRIORITY_NORMAL.equals(this.currentMessagePriority)) {
return ServiceUpdate.SEVERITY_WARNING_POI;
} else if (MESSAGE_PRIORITY_LOW.equals(this.currentMessagePriority)) {
return ServiceUpdate.SEVERITY_INFO_POI;
}
MTLog.w(this, "endElement() > unexpected message priority: %s", this.currentMessagePriority);
return ServiceUpdate.SEVERITY_WARNING_UNKNOWN; // default
}
private int findRouteSeverity() {
if (MESSAGE_PRIORITY_NORMAL.equals(this.currentMessagePriority)) {
return ServiceUpdate.SEVERITY_WARNING_RELATED_POI;
} else if (MESSAGE_PRIORITY_LOW.equals(this.currentMessagePriority)) {
return ServiceUpdate.SEVERITY_INFO_RELATED_POI;
}
MTLog.w(this, "endElement() > unexpected message priority: %s", this.currentMessagePriority);
return ServiceUpdate.SEVERITY_WARNING_UNKNOWN; // default
}
private int findAgencySeverity() {
if (MESSAGE_PRIORITY_NORMAL.equals(this.currentMessagePriority)) {
return ServiceUpdate.SEVERITY_WARNING_AGENCY;
} else if (MESSAGE_PRIORITY_LOW.equals(this.currentMessagePriority)) {
return ServiceUpdate.SEVERITY_INFO_AGENCY;
}
MTLog.w(this, "endElement() > unexpected message priority: %s", this.currentMessagePriority);
return ServiceUpdate.SEVERITY_WARNING_UNKNOWN; // default
}
private void addServiceUpdates(String targetUUID, int severity, String textHtml, String textSecondaryHtml) {
if (this.currentTextSb.length() > 0) {
if (!this.textMessageIdTargetUUID.get(this.currentMessageId).contains(targetUUID)) {
this.serviceUpdates.add(new ServiceUpdate(null, targetUUID, this.newLastUpdateInMs, this.serviceUpdateMaxValidityInMs, this.currentTextSb
.toString(), textHtml, severity, AGENCY_SOURCE_ID, AGENCY_SOURCE_LABEL, this.textLanguageCode));
this.textMessageIdTargetUUID.get(this.currentMessageId).add(targetUUID);
}
}
if (this.currentTextSecondaryLanguageSb.length() > 0) {
if (!this.textSecondaryMessageIdTargetUUID.get(this.currentMessageId).contains(targetUUID)) {
this.serviceUpdates.add(new ServiceUpdate(null, targetUUID, this.newLastUpdateInMs, this.serviceUpdateMaxValidityInMs,
this.currentTextSecondaryLanguageSb.toString(), textSecondaryHtml, severity, AGENCY_SOURCE_ID, AGENCY_SOURCE_LABEL,
this.textSecondaryLanguageCode));
this.textSecondaryMessageIdTargetUUID.get(this.currentMessageId).add(targetUUID);
}
}
}
private String enhanceHtml(String originalHtml, Pattern boldWords) {
if (TextUtils.isEmpty(originalHtml)) {
return originalHtml;
}
try {
String html = originalHtml;
html = enhanceHtmlBold(html, boldWords);
return html;
} catch (Exception e) {
MTLog.w(this, e, "Error while trying to enhance HTML (using original)!");
return originalHtml;
}
}
private static final String CLEAN_BOLD_REPLACEMENT = HtmlUtils.applyBold("$1");
private String enhanceHtmlBold(String html, Pattern regex) {
if (regex == null || TextUtils.isEmpty(html)) {
return html;
}
try {
return regex.matcher(html).replaceAll(CLEAN_BOLD_REPLACEMENT);
} catch (Exception e) {
MTLog.w(this, e, "Error while making text bold!");
return html;
}
}
}
public static class NextBusDbHelper extends MTSQLiteOpenHelper {
private static final String TAG = NextBusDbHelper.class.getSimpleName();
@Override
public String getLogTag() {
return TAG;
}
/**
* Override if multiple {@link NextBusDbHelper} implementations in same app.
*/
protected static final String DB_NAME = "nextbus.db";
/**
* Override if multiple {@link NextBusDbHelper} implementations in same app.
*/
protected static final String PREF_KEY_AGENCY_LAST_UPDATE_MS = "pNextBusMessagesLastUpdate";
public static final String T_NEXT_BUS_SERVICE_UPDATE = ServiceUpdateProvider.ServiceUpdateDbHelper.T_SERVICE_UPDATE;
private static final String T_NEXT_BUS_SERVICE_UPDATE_SQL_CREATE = ServiceUpdateProvider.ServiceUpdateDbHelper.getSqlCreate(T_NEXT_BUS_SERVICE_UPDATE);
private static final String T_NEXT_BUS_SERVICE_UPDATE_SQL_DROP = SqlUtils.getSQLDropIfExistsQuery(T_NEXT_BUS_SERVICE_UPDATE);
public static final String T_NEXT_BUS_STATUS = StatusDbHelper.T_STATUS;
private static final String T_NEXT_BUS_STATUS_SQL_CREATE = StatusDbHelper.getSqlCreate(T_NEXT_BUS_STATUS);
private static final String T_NEXT_BUS_STATUS_SQL_DROP = SqlUtils.getSQLDropIfExistsQuery(T_NEXT_BUS_STATUS);
private static int dbVersion = -1;
/**
* Override if multiple {@link NextBusDbHelper} in same app.
*/
public static int getDbVersion(Context context) {
if (dbVersion < 0) {
dbVersion = context.getResources().getInteger(R.integer.next_bus_db_version);
}
return dbVersion;
}
private Context context;
public NextBusDbHelper(Context context) {
super(context, DB_NAME, null, getDbVersion(context));
this.context = context;
}
@Override
public void onCreateMT(SQLiteDatabase db) {
initAllDbTables(db);
}
@Override
public void onUpgradeMT(SQLiteDatabase db, int oldVersion, int newVersion) {
db.execSQL(T_NEXT_BUS_SERVICE_UPDATE_SQL_DROP);
db.execSQL(T_NEXT_BUS_STATUS_SQL_DROP);
PreferenceUtils.savePrefLcl(this.context, PREF_KEY_AGENCY_LAST_UPDATE_MS, 0l, true);
initAllDbTables(db);
}
public boolean isDbExist(Context context) {
return SqlUtils.isDbExist(context, DB_NAME);
}
private void initAllDbTables(SQLiteDatabase db) {
db.execSQL(T_NEXT_BUS_SERVICE_UPDATE_SQL_CREATE);
db.execSQL(T_NEXT_BUS_STATUS_SQL_CREATE);
}
}
}
|
package com.codepoetics.protonpack;
import java.util.*;
import java.util.function.*;
import java.util.stream.Collector;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* A source of Streams that can be repeatedly streamed.
* @param <T> The type over which the streamable's streams stream.
*/
public interface Streamable<T> extends Supplier<Stream<T>> {
/**
* Gets an empty streamable.
* @param <T> The type of the values that aren't in the streamable's streams.
* @return An empty streamable.
*/
static <T> Streamable<T> empty() {
return Stream::empty;
}
/**
* For converting method references to no-arg methods that return streams into streamable.
* @param streamable Anything that can be cast to a Streamable.
* @param <T> The type over which the streamable's streams stream.
* @return The streamable.
*/
static <T> Streamable<T> of(Supplier<Stream<T>> streamable) {
return streamable::get;
}
/**
* Create a streamable that produces streams over an array of items.
* @param items The items that the streamable's streams will stream.
* @param <T> The type of the values in the array.
* @return The streamable.
*/
static <T> Streamable<T> of(T...items) {
return () -> Stream.of(items);
}
/**
* Create a streamable that produces streams over a collection of items.
* @param collection The items that the streamable's streams will stream.
* @param <T> The type of the values in the collection.
* @return The streamable.
*/
static <T> Streamable<T> of(Collection<T> collection) {
return collection::stream;
}
/**
* Create a streamable that produces streams over an iterable of items.
* @param iterable The items that the streamable's streams will stream.
* @param <T> The type of the values in the iterable.
* @return The streamable.
*/
static <T> Streamable<T> of(Iterable<T> iterable) {
return () -> StreamUtils.stream(iterable);
}
/**
* Create a streamable that produces streams of 0 or 1 elements over an optional value.
* @param optional The optional item that the streamable's streams will stream.
* @param <T> The type of the optional.
* @return The streamable.
*/
static <T> Streamable<T> of(Optional<T> optional) {
return () -> StreamUtils.stream(optional);
}
/**
* Concatenate a series of streamables together.
* @param streamables The streamables to concatenate.
* @param <T> The type of the streamables.
* @return A streamable which streams over the concatenation of the streams produced by all the source streamables.
*/
@SafeVarargs
static <T> Streamable<T> ofAll(Streamable<T>...streamables) {
return Stream.of(streamables).reduce(Streamable::concat).orElseGet(Streamable::empty);
}
/**
* Synonym for "get"
* @return A stream over the streamable
*/
default Stream<T> stream() {
return get();
}
/**
* Concatenate this streamable with another streamable.
* @param streamable The streamable to concatenate.
* @return A concatenated streamable, which streams over the concatenation of the streams produces by its source streamables.
*/
default Streamable<T> concat(Streamable<T> streamable) {
return () -> Stream.concat(stream(), streamable.stream());
}
/**
* Create a streamable that transforms the streams produced by this streamable with a stream transformer.
* @param transformer The transformer to apply to this streamable's streams.
* @param <T2> The type of the streams produced by the transformation.
* @return A streamable which produces the transformed streams.
*/
default <T2> Streamable<T2> transform(Function<Stream<T>, Stream<T2>> transformer) {
return () -> transformer.apply(stream());
}
/**
* Transform this streamable's streams with the supplied map.
* @param f The map to apply.
* @param <T2> The mapped-to type.
* @return A streamable which produces the transformed streams.
*/
default <T2> Streamable<T2> map(Function<? super T, ? extends T2> f) {
return transform(s -> s.map(f));
}
/**
* Transform this streamable's streams with the supplied flatmap.
* @param f The flatmap to apply.
* @param <T2> The flatmapped-to type.
* @return A streamable which produces the transformed streams.
*/
default <T2> Streamable<T2> flatMap(Function<? super T, Stream<? extends T2>> f) {
return transform(s -> s.flatMap(f));
}
/**
* Transform this streamable's streams with the supplied filter predicate.
* @param predicate The filter predicate to apply.
* @return A streamable which produces the transformed streams.
*/
default Streamable<T> filter(Predicate<? super T> predicate) {
return transform(s -> s.filter(predicate));
}
/**
* Transform this streamable's streams with the supplied filter predicate, rejecting items which match the predicate.
* @param predicate The filter predicate to apply.
* @return A streamable which produces the transformed streams.
*/
default Streamable<T> reject(Predicate<? super T> predicate) {
return transform(s -> s.filter(predicate.negate()));
}
/**
* Transform this streamable's streams by sorting them.
* @param comparator The comparator to use in sorting.
* @return A streamable which produces the transformed streams.
*/
default Streamable<T> sorted(Comparator<? super T> comparator) {
return () -> stream().sorted(comparator);
}
/**
* Transform this streamable's streams by skipping elements
* @param n The number of elements to skip
* @return A streamable which produces the transformed streams.
*/
default Streamable<T> skip(long n) {
return () -> stream().skip(n);
}
/**
* Transform this streamable's streams by limiting the number of elements they can contain.
* @param n The number of elements to limit to.
* @return A streamable which produces the transformed streams.
*/
default Streamable<T> limit(long n) {
return () -> stream().limit(n);
}
/**
* Stream this streamable, and call forEach on the resulting stream with the supplied action.
* @param action The action to apply to each stream element.
*/
default void forEach(Consumer<T> action) {
stream().forEach(action);
}
/**
* Stream this streamable, and call forEach on the resulting stream in order with the supplied action.
* @param action The action to apply to each stream element.
*/
default void forEachOrdered(Consumer<T> action) {
stream().forEachOrdered(action);
}
/**
* Stream this streamable, and collect the stream with the supplied collector.
* @param collector The collector to use to collect streamed values.
* @param <O> The output type of the collector.
* @return The collected result.
*/
default <O> O collect(Collector<T, ?, O> collector) {
return stream().collect(collector);
}
/**
* Stream this streamable, and collect the stream to a list.
* @return The collected result.
*/
default List<T> toList() {
return collect(Collectors.toList());
}
/**
* Stream this streamable, and collect the stream to a set.
* @return The collected result.
*/
default Set<T> toSet() {
return collect(Collectors.toSet());
}
/**
* Stream this streamable, and collect the stream to a map, extracting keys with the supplied index function.
* @param indexFunction The function to use to extract keys from the streamed values.
* @param <K> The type of the keys.
* @return The collected result.
*/
default <K> Map<K, T> toMap(Function<? super T, ? extends K> indexFunction) {
return collect(Collectors.toMap(indexFunction, v -> v));
}
/**
* Stream this streamable, and collect the stream to a map, extracting keys and values with the supplied functions.
* @param keyFunction The function to use to extract keys from the stream.
* @param valueFunction The function to use to extract values from the stream.
* @param <K> The type of the keys.
* @param <V> The type of the values.
* @return The collected result.
*/
default <K, V> Map<K, V> toMap(Function<? super T, ? extends K> keyFunction, Function<? super T, ? extends V> valueFunction) {
return collect(Collectors.toMap(keyFunction, valueFunction));
}
/**
* Stream this streamable, and collect the stream to an array.
* @param arrayConstructor A function that will construct a new empty array of the required size.
* @return The collected result.
*/
default T[] toArray(IntFunction<T[]> arrayConstructor) {
return stream().toArray(arrayConstructor);
}
}
|
package org.transtruct.cmthunes.ircbot.applets;
import java.io.*;
import java.net.*;
import java.util.*;
import org.htmlparser.*;
import org.htmlparser.lexer.*;
import org.htmlparser.util.*;
import org.transtruct.cmthunes.util.*;
import org.transtruct.cmthunes.irc.*;
public class GroupHugApplet implements BotApplet {
private URL url;
private FixedBlockingBuffer<String> confessions;
private String errorMessage;
private Flag error;
private Thread confessionFetcher;
private List<Integer> page_numbers;
private class GetConfessions implements Runnable {
@Override
public void run() {
while(true) {
try {
int page = page_numbers.remove(0);
page_numbers.add(page);
url = new URL(String.format("http://archive.grouphug.us/frontpage?page=%d", page));
} catch(MalformedURLException e) {
e.printStackTrace();
}
/* Wait for error flag to be cleared */
GroupHugApplet.this.error.waitUninterruptiblyFor(false);
GroupHugApplet.this.populateConfessions();
}
}
}
public GroupHugApplet() {
Random r = new Random();
this.page_numbers = new ArrayList<Integer>();
this.page_numbers.add(0);
for(int i = 1; i <= 200; i++) {
this.page_numbers.add(r.nextInt(this.page_numbers.size()), i);
}
this.confessions = new FixedBlockingBuffer<String>(10);
this.errorMessage = null;
this.error = new Flag();
this.confessionFetcher = new Thread(new GetConfessions());
this.confessionFetcher.setDaemon(true);
this.confessionFetcher.start();
}
private void populateConfessions() {
HttpURLConnection connection;
Lexer lexer;
try {
connection = (HttpURLConnection) this.url.openConnection();
lexer = new Lexer(connection);
} catch(IOException e) {
this.errorMessage = "Could not establish a connection to grouphug.us";
this.error.set();
return;
} catch(ParserException e) {
this.errorMessage = "Parser error";
this.error.set();
return;
}
Node node = null;
int state = 0;
String confessionId = "";
while(true) {
try {
node = lexer.nextNode();
if(node == null) {
break;
}
if(node instanceof Tag) {
Tag tag = (Tag) node;
String tagId = tag.getAttribute("id");
String tagName = tag.getRawTagName();
String tagClass = tag.getAttribute("class");
switch(state) {
case 0:
if(tagId != null && tagId.startsWith("node-")) {
state = 1;
}
break;
case 1:
if(tagName != null && tagName.equals("a")) {
node = lexer.nextNode();
if(node != null && node instanceof Text) {
confessionId = ((Text) node).getText();
}
} else if((tagName != null && tagClass != null)
&& (tagName.equals("div") && tagClass.contains("content"))) {
StringBuffer confessionBuffer = new StringBuffer();
while(true) {
node = lexer.nextNode();
if(node == null) {
break;
}
if(node instanceof Tag) {
tagName = ((Tag) node).getRawTagName();
if(tagName.equals("/div")) {
break;
}
if(tagName.equals("/p")) {
confessionBuffer.append("\n");
}
}
if(node instanceof Text) {
String confessionPart = ((Text) node).getText();
confessionPart = Translate.decode(confessionPart);
confessionPart = confessionPart.replaceAll("\u2019", "'");
confessionBuffer.append(confessionPart.trim());
}
}
if(confessionBuffer.length() > 0) {
String confession = String.format("%s: %s", confessionId,
confessionBuffer.toString());
this.confessions.add(confession.trim());
}
state = 0;
}
break;
}
}
} catch(ParserException e) {
this.errorMessage = "Parser error";
this.error.set();
return;
}
}
}
public String getConfession() {
if(this.error.isSet()) {
String msg = this.errorMessage;
this.error.clear();
return msg;
} else if(this.confessionFetcher.isAlive()) {
return this.confessions.get();
} else {
return "Fetching thread died";
}
}
public void run(IRCChannel channel, IRCUser from, String command, String[] args, String unparsed) {
String confession = this.getConfession();
String[] parts = BotAppletUtil.blockFormat(confession, 300, 10);
channel.writeMultiple(parts);
}
}
|
package com.conveyal.r5.analyst.broker;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.ec2.AmazonEC2;
import com.amazonaws.services.ec2.AmazonEC2Client;
import com.amazonaws.services.ec2.model.*;
import com.conveyal.r5.analyst.cluster.GenericClusterRequest;
import com.conveyal.r5.common.JsonUtilities;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
import com.google.common.collect.TreeMultimap;
import gnu.trove.map.TIntObjectMap;
import gnu.trove.map.TObjectLongMap;
import gnu.trove.map.hash.TIntObjectHashMap;
import gnu.trove.map.hash.TObjectLongHashMap;
import org.glassfish.grizzly.http.server.Request;
import org.glassfish.grizzly.http.server.Response;
import org.glassfish.grizzly.http.util.HttpStatus;
import com.conveyal.r5.analyst.cluster.AnalystWorker;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.*;
/**
* This class tracks incoming requests from workers to consume Analyst tasks, and attempts to match those
* requests to enqueued tasks. It aims to draw tasks fairly from all users, and fairly from all jobs within each user,
* while attempting to respect the graph affinity of each worker (give it tasks that require the same graph it has been
* working on recently).
*
* When no work is available or no workers are available, the polling functions return immediately, avoiding spin-wait.
* When they are receiving no work, workers are expected to disconnect and re-poll occasionally, on the order of 30
* seconds. This serves as a signal to the broker that they are still alive and waiting.
*
* TODO if there is a backlog of work (the usual case when jobs are lined up) workers will constantly change graphs.
* Because (at least currently) two users never share the same graph, we can get by with pulling tasks cyclically or
* randomly from all the jobs, and just actively shaping the number of workers with affinity for each graph by forcing
* some of them to accept tasks on graphs other than the one they have declared affinity for.
*
* This could be thought of as "affinity homeostasis". We will constantly keep track of the ideal proportion of workers
* by graph (based on active queues), and the true proportion of consumers by graph (based on incoming requests) then
* we can decide when a worker's graph affinity should be ignored and what it should be forced to.
*
* It may also be helpful to mark jobs every time they are skipped in the LRU queue. Each time a job is serviced,
* it is taken out of the queue and put at its end. Jobs that have not been serviced float to the top.
*
* TODO: occasionally purge closed connections from workersByCategory
* TODO: worker catalog and graph affinity homeostasis
* TODO: catalog of recently seen consumers by affinity with IP: response.getRequest().getRemoteAddr();
*/
public class Broker implements Runnable {
private static final Logger LOG = LoggerFactory.getLogger(Broker.class);
public final CircularList<Job> jobs = new CircularList<>();
/** The most tasks to deliver to a worker at a time. */
public final int MAX_TASKS_PER_WORKER = 8;
/**
* How long to give workers to start up (in ms) before assuming that they have started (and starting more
* on a given graph if they haven't.
*/
public static final long WORKER_STARTUP_TIME = 60 * 60 * 1000;
private int nWaitingConsumers = 0; // including some that might be closed
private int nextTaskId = 0;
/** Maximum number of workers allowed */
private int maxWorkers;
private static final ObjectMapper mapper = JsonUtilities.objectMapper;
private long nextRedeliveryCheckTime = System.currentTimeMillis();
/*static {
mapper.registerModule(AgencyAndIdSerializer.makeModule());
mapper.registerModule(QualifiedModeSetSerializer.makeModule());
mapper.registerModule(JavaLocalDateSerializer.makeModule());
mapper.registerModule(TraverseModeSetSerializer.makeModule());
}*/
/** The configuration for this broker. */
private final Properties brokerConfig;
/** The configuration that will be applied to workers launched by this broker. */
private Properties workerConfig;
/** Keeps track of all the workers that have contacted this broker recently asking for work. */
protected WorkerCatalog workerCatalog = new WorkerCatalog();
/**
* Requests that are not part of a job and can "cut in line" in front of jobs for immediate execution.
* When a high priority task is first received, we attempt to send it to a worker right away via
* the side channels. If that doesn't work, we put them here to be picked up the next time a worker
* is available via normal task distribution channels.
*/
private ArrayListMultimap<WorkerCategory, GenericClusterRequest> stalledHighPriorityTasks = ArrayListMultimap.create();
/**
* High priority requests that have just come and are about to be sent down a single point channel.
* They put here for just 100 ms so that any that arrive together are batched to the same worker.
* If we didn't do this, two requests arriving at basically the same time could get fanned out to
* two different workers because the second came in in between closing the side channel and the worker
* reopening it.
*/
private Multimap<WorkerCategory, GenericClusterRequest> newHighPriorityTasks = ArrayListMultimap.create();
/** Priority requests that have already been farmed out to workers, and are awaiting a response. */
private TIntObjectMap<Response> highPriorityResponses = new TIntObjectHashMap<>();
/** Outstanding requests from workers for tasks, grouped by worker graph affinity. */
Map<WorkerCategory, Deque<Response>> workersByCategory = new HashMap<>();
/**
* Side channels used to send single point requests to workers, cutting in front of any other work on said workers.
* We use a TreeMultimap because it is ordered, and the wrapped response defines an order based on
* machine ID. This way, the same machine will tend to get all single point work for a graph,
* so multiple machines won't stay alive to do single point work.
*/
private Multimap<WorkerCategory, WrappedResponse> singlePointChannels = TreeMultimap.create();
/** should we work offline */
private boolean workOffline;
private AmazonEC2 ec2;
private Timer timer = new Timer();
private String workerName, project;
/**
* keep track of which graphs we have launched workers on and how long ago we launched them,
* so that we don't re-request workers which have been requested.
*/
private TObjectLongMap<WorkerCategory> recentlyRequestedWorkers = new TObjectLongHashMap<>();
// Queue of tasks to complete Delete, Enqueue etc. to avoid synchronizing all the functions ?
public Broker (Properties brokerConfig, String addr, int port) {
// print out date on startup so that CloudWatch logs has a unique fingerprint
LOG.info("Analyst broker starting at {}", LocalDateTime.now().format(DateTimeFormatter.ISO_DATE_TIME));
this.brokerConfig = brokerConfig;
Boolean workOffline = Boolean.parseBoolean(brokerConfig.getProperty("work-offline"));
if (workOffline == null) workOffline = true;
this.workOffline = workOffline;
if (!workOffline) {
// create a config for the AWS workers
workerConfig = new Properties();
if (this.brokerConfig.getProperty("worker-config") != null) {
// load the base worker configuration if specified
try {
File f = new File(this.brokerConfig.getProperty("worker-config"));
FileInputStream fis = new FileInputStream(f);
workerConfig.load(fis);
fis.close();
} catch (IOException e) {
LOG.error("Error loading base worker configuration", e);
}
}
workerConfig.setProperty("broker-address", addr);
workerConfig.setProperty("broker-port", "" + port);
if (brokerConfig.getProperty("statistics-queue") != null)
workerConfig.setProperty("statistics-queue", brokerConfig.getProperty("statistics-queue"));
workerConfig.setProperty("graphs-bucket", brokerConfig.getProperty("graphs-bucket"));
workerConfig.setProperty("pointsets-bucket", brokerConfig.getProperty("pointsets-bucket"));
// Tell the workers to shut themselves down automatically
workerConfig.setProperty("auto-shutdown", "true");
}
// TODO what are these for?
workerName = brokerConfig.getProperty("worker-name") != null ? brokerConfig.getProperty("worker-name") : "analyst-worker";
project = brokerConfig.getProperty("project") != null ? brokerConfig.getProperty("project") : "analyst";
this.maxWorkers = brokerConfig.getProperty("max-workers") != null ? Integer.parseInt(brokerConfig.getProperty("max-workers")) : 4;
ec2 = new AmazonEC2Client();
// default to current region when running in EC2
com.amazonaws.regions.Region r = Regions.getCurrentRegion();
if (r != null)
ec2.setRegion(r);
}
/**
* Enqueue a task for execution ASAP, planning to return the response over the same HTTP connection.
* Low-reliability, no re-delivery.
*/
public synchronized void enqueuePriorityTask (GenericClusterRequest task, Response response) {
boolean workersAvailable = workersAvailable(task.getWorkerCategory());
if (!workersAvailable) {
createWorkersInCategory(task.getWorkerCategory());
// chances are it won't be done in 30 seconds, but we want to poll frequently to avoid issues with phasing
try {
response.setHeader("Retry-After", "30");
response.setStatus(202, "No workers available in this category, please retry shortly");
Writer resWriter = response.getWriter();
JsonUtilities.objectMapper.writeValue(resWriter, new ClusterStatus(ClusterStatus.Status.CLUSTER_STARTING_UP));
resWriter.close();
} catch (IOException e) {
LOG.error("Could not finish high-priority task, 202 response", e);
}
}
// if we're in offline mode, enqueue anyhow to kick the cluster to build the graph
// note that this will mean that requests get delivered multiple times in offline mode,
// so some unnecessary computation takes place
if (workersAvailable || workOffline) {
task.taskId = nextTaskId++;
newHighPriorityTasks.put(task.getWorkerCategory(), task);
// workers aren't available, don't suspend the response
if (workersAvailable) highPriorityResponses.put(task.taskId, response);
// wait 100ms to deliver to workers in case another request comes in almost simultaneously
timer.schedule(new TimerTask() {
@Override
public void run() {
deliverHighPriorityTasks(task.getWorkerCategory());
}
}, 100);
}
// do not notify task delivery thread just yet as we haven't put anything in the task delivery queue yet.
}
/** Attempt to deliver high priority tasks via side channels, or move them into normal channels if need be. */
public synchronized void deliverHighPriorityTasks (WorkerCategory category) {
Collection<GenericClusterRequest> tasks = newHighPriorityTasks.get(category);
if (tasks.isEmpty())
// someone got here first
return;
// try to deliver via side channels
Collection<WrappedResponse> wrs = singlePointChannels.get(category);
if (!wrs.isEmpty()) {
// there is (probably) a single point machine waiting to receive this
WrappedResponse wr = wrs.iterator().next();
try {
wr.response.setContentType("application/json");
OutputStream os = wr.response.getOutputStream();
mapper.writeValue(os, tasks);
os.close();
wr.response.resume();
newHighPriorityTasks.removeAll(category);
return;
} catch (Exception e) {
LOG.info("Failed to deliver single point job via side channel, reverting to normal channel", e);
} finally {
// remove responses whether they are dead or alive
removeSinglePointChannel(category, wr);
}
}
// if we got here we didn't manage to send it via side channel, put it in the rotation for normal channels
// not using putAll as it retains a link to the original collection and then we get a concurrent modification exception later.
tasks.forEach(t -> stalledHighPriorityTasks.put(category, t));
LOG.info("No side channel available for graph {}, delivering {} tasks via normal channel",
category, tasks.size());
newHighPriorityTasks.removeAll(category);
// wake up delivery thread
notify();
}
/** Enqueue some tasks for queued execution possibly much later. Results will be saved to S3. */
public synchronized void enqueueTasks (List<GenericClusterRequest> tasks) {
Job job = findJob(tasks.get(0)); // creates one if it doesn't exist
if (!workersAvailable(job.getWorkerCategory())) {
createWorkersInCategory(job.getWorkerCategory());
}
for (GenericClusterRequest task : tasks) {
task.taskId = nextTaskId++;
job.addTask(task);
LOG.debug("Enqueued task id {} in job {}", task.taskId, job.jobId);
if (!task.graphId.equals(job.workerCategory.graphId)) {
LOG.error("Task graph ID {} does not match job: {}.", task.graphId, job.workerCategory);
}
if (!task.workerVersion.equals(job.workerCategory.workerVersion)) {
LOG.error("Task R5 commit {} does not match job: {}.", task.workerVersion, job.workerCategory);
}
}
// Wake up the delivery thread if it's waiting on input.
// This wakes whatever thread called wait() while holding the monitor for this Broker object.
notify();
}
public boolean workersAvailable (WorkerCategory category) {
// Ensure we don't assign work to dead workers.
workerCatalog.purgeDeadWorkers();
if (workOffline) {
return !workerCatalog.workersByGraph.get(category.graphId).isEmpty();
} else {
return !workerCatalog.workersByCategory.get(category).isEmpty();
}
}
/** Create workers for a given job, if need be */
public void createWorkersInCategory (WorkerCategory category) {
String clientToken = UUID.randomUUID().toString().replaceAll("-", "");
if (workOffline) {
LOG.info("Work offline enabled, not creating workers for {}", category);
return;
}
if (workerCatalog.observationsByWorkerId.size() >= maxWorkers) {
LOG.warn("{} workers already started, not starting more; jobs will not complete on {}", maxWorkers, category);
return;
}
// If workers have already been started up, don't repeat the operation.
if (recentlyRequestedWorkers.containsKey(category)
&& recentlyRequestedWorkers.get(category) >= System.currentTimeMillis() - WORKER_STARTUP_TIME){
LOG.info("Workers still starting on {}, not starting more", category);
return;
}
// TODO: compute
int nWorkers = 1;
// There are no workers on this graph with the right worker commit, start some.
LOG.info("Starting {} workers as there are none on {}", nWorkers, category);
RunInstancesRequest req = new RunInstancesRequest();
req.setImageId(brokerConfig.getProperty("ami-id"));
req.setInstanceType(InstanceType.valueOf(brokerConfig.getProperty("worker-type")));
req.setSubnetId(brokerConfig.getProperty("subnet-id"));
// even if we can't get all the workers we want at least get some
req.setMinCount(1);
req.setMaxCount(nWorkers);
// It's fine to just modify the worker config without a protective copy because this method is synchronized.
workerConfig.setProperty("initial-graph-id", category.graphId);
workerConfig.setProperty("worker-version", category.workerVersion);
// Tell the worker where to get its R5 JAR. This is a Conveyal S3 bucket with HTTP access turned on.
String workerDownloadUrl = String.format("http://r5-builds.s3-website-eu-west-1.amazonaws.com/%s.jar",
category.workerVersion);
workerConfig.setProperty("download-url", workerDownloadUrl);
// This is the R5 broker, so always start R5 workers (rather than OTP workers).
workerConfig.setProperty("main-class", AnalystWorker.class.getName());
ByteArrayOutputStream cfg = new ByteArrayOutputStream();
try {
workerConfig.store(cfg, "Worker config");
cfg.close();
} catch (Exception e) {
throw new RuntimeException(e);
}
// Send the config to the new workers as EC2 "user data"
String userData = new String(Base64.getEncoder().encode(cfg.toByteArray()));
req.setUserData(userData);
if (brokerConfig.getProperty("worker-iam-role") != null)
req.setIamInstanceProfile(new IamInstanceProfileSpecification().withArn(brokerConfig.getProperty("worker-iam-role")));
// launch into a VPC if desired
if (brokerConfig.getProperty("subnet") != null)
req.setSubnetId(brokerConfig.getProperty("subnet"));
// allow us to retry request at will
req.setClientToken(clientToken);
// allow machine to shut itself completely off
req.setInstanceInitiatedShutdownBehavior(ShutdownBehavior.Terminate);
RunInstancesResult res = ec2.runInstances(req);
res.getReservation().getInstances().forEach(i -> {
Collection<Tag> tags = Arrays.asList(
new Tag("name", workerName),
new Tag("project", project)
);
i.setTags(tags);
});
recentlyRequestedWorkers.put(category, System.currentTimeMillis());
LOG.info("Requesting {} workers", nWorkers);
}
/** Consumer long-poll operations are enqueued here. */
public synchronized void registerSuspendedResponse(WorkerCategory category, Response response) {
// Add this worker to our catalog, tracking its graph affinity and the last time it was seen.
String workerId = response.getRequest().getHeader(AnalystWorker.WORKER_ID_HEADER);
if (workerId != null && !workerId.isEmpty()) {
workerCatalog.catalog(workerId, category);
} else {
LOG.error("Worker did not supply a unique ID for itself . Ignoring it.");
return;
}
// Shelf this suspended response in a queue grouped by graph affinity.
Deque<Response> deque = workersByCategory.get(category);
if (deque == null) {
deque = new ArrayDeque<>();
workersByCategory.put(category, deque);
}
deque.addLast(response);
nWaitingConsumers += 1;
// Wake up the delivery thread if it's waiting on consumers.
// This is whatever thread called wait() while holding the monitor for this Broker object.
notify();
}
/** When we notice that a long poll connection has closed, we remove it here. */
public synchronized boolean removeSuspendedResponse(WorkerCategory category, Response response) {
Deque<Response> deque = workersByCategory.get(category);
if (deque == null) {
return false;
}
if (deque.remove(response)) {
nWaitingConsumers -= 1;
LOG.debug("Removed closed connection from queue.");
return true;
}
return false;
}
/**
* Register an HTTP connection that can be used to send single point requests directly to
* workers, bypassing normal task distribution channels.
*/
public synchronized void registerSinglePointChannel (WorkerCategory category, WrappedResponse response) {
singlePointChannels.put(category, response);
// No need to notify as the side channels are not used by the normal task delivery loop.
}
/**
* Remove a single point channel because the connection was closed.
*/
public synchronized boolean removeSinglePointChannel (WorkerCategory category, WrappedResponse response) {
return singlePointChannels.remove(category, response);
}
/**
* See if any jobs have undelivered tasks that should be re-enqueued for delivery.
*/
private void checkRedelivery() {
for (Job job : jobs) {
job.redeliver();
}
}
private boolean noUndeliveredTasks() {
for (Job job : jobs) {
if (!job.tasksAwaitingDelivery.isEmpty()) {
return false;
}
}
// No jobs have any tasks waiting for delivery, but high priority tasks that were not delivered via the side
// channel are stored outside the jobs.
return stalledHighPriorityTasks.isEmpty();
}
/**
* This method checks whether there are any high-priority tasks or normal job tasks and attempts to match them with
* waiting workers.
*
* It blocks by calling wait() whenever it has nothing to do (when no tasks or workers available). It is awakened
* whenever new tasks come in or when a worker (re-)connects.
*
* This whole function is synchronized because wait() must be called within a synchronized block. When wait() is
* called, the monitor is released and other threads listening for worker connections or added jobs can act.
*/
public synchronized void deliverTasks() throws InterruptedException {
// See if any tasks failed and need to be re-enqueued.
checkRedelivery();
// Wait until there are some undelivered tasks.
while (noUndeliveredTasks()) {
LOG.debug("Task delivery thread is going to sleep, there are no tasks waiting for delivery.");
// Thread will be notified when tasks are added or there are new incoming consumer connections.
wait();
// If a worker connected while there were no tasks queued for delivery,
// we need to check if any should be re-delivered.
checkRedelivery();
}
LOG.debug("Task delivery thread is awake and there are some undelivered tasks.");
while (nWaitingConsumers == 0) {
LOG.debug("Task delivery thread is going to sleep, there are no consumers waiting.");
// Thread will be notified when tasks are added or there are new incoming consumer connections.
wait();
}
LOG.debug("Task delivery thread awake; consumers are waiting and tasks are available");
// Loop over all jobs and send them to consumers
// This makes for an as-fair-as-possible allocation: jobs are fairly allocated between
// workers on their graph.
// start with high-priority tasks
HIGHPRIORITY: for (Map.Entry<WorkerCategory, Collection<GenericClusterRequest>> e : stalledHighPriorityTasks
.asMap().entrySet()) {
// the collection is an arraylist with the most recently added at the end
WorkerCategory workerCategory = e.getKey();
Collection<GenericClusterRequest> tasks = e.getValue();
// See if there are any workers that requested tasks in this category.
// Don't respect graph affinity when working offline; we can't arbitrarily start more workers.
Deque<Response> consumers;
if (!workOffline) {
consumers = workersByCategory.get(workerCategory);
} else {
Optional<Deque<Response>> opt = workersByCategory.values().stream().filter(c -> !c.isEmpty()).findFirst();
if (opt.isPresent()) consumers = opt.get();
else consumers = null;
}
if (consumers == null || consumers.isEmpty()) {
LOG.warn("No worker found for {}, needed for {} high-priority tasks", workerCategory, tasks.size());
continue HIGHPRIORITY;
}
Iterator<GenericClusterRequest> taskIt = tasks.iterator();
while (taskIt.hasNext() && !consumers.isEmpty()) {
Response consumer = consumers.pop();
// package tasks into a job
Job job = new Job("HIGH PRIORITY");
job.workerCategory = workerCategory;
for (int i = 0; i < MAX_TASKS_PER_WORKER && taskIt.hasNext(); i++) {
job.addTask(taskIt.next());
taskIt.remove();
}
// TODO inefficiency here: we should mix single point and multipoint in the same response
deliver(job, consumer);
nWaitingConsumers
}
}
// deliver low priority tasks
while (nWaitingConsumers > 0) {
// ensure we advance at least one; advanceToElement will not advance if the predicate passes
// for the first element.
jobs.advance();
// find a job that both has visible tasks and has available workers
// We don't respect graph affinity when working offline, because we can't start more workers
Job current;
if (!workOffline) {
current = jobs.advanceToElement(job -> !job.tasksAwaitingDelivery.isEmpty() &&
workersByCategory.containsKey(job.workerCategory) &&
!workersByCategory.get(job.workerCategory).isEmpty());
}
else {
current = jobs.advanceToElement(e -> !e.tasksAwaitingDelivery.isEmpty());
}
// nothing to see here
if (current == null) break;
Deque<Response> consumers;
if (!workOffline)
consumers = workersByCategory.get(current.workerCategory);
else {
Optional<Deque<Response>> opt = workersByCategory.values().stream().filter(c -> !c.isEmpty()).findFirst();
if (opt.isPresent()) consumers = opt.get();
else consumers = null;
}
// deliver this job to only one consumer
// This way if there are multiple workers and multiple jobs the jobs will be fairly distributed, more or less
deliver(current, consumers.pop());
nWaitingConsumers
}
// TODO: graph switching
// we've delivered everything we can, prevent anything else from happening until something changes
wait();
}
/**
* This uses a linear search through jobs, which should not be problematic unless there are thousands of
* simultaneous jobs. TODO task IDs should really not be sequential integers should they?
* @return a Job object that contains the given task ID.
*/
public Job getJobForTask (int taskId) {
for (Job job : jobs) {
if (job.containsTask(taskId)) {
return job;
}
}
return null;
}
/**
* Attempt to hand some tasks from the given job to a waiting consumer connection.
* The write will fail if the consumer has closed the connection but it hasn't been removed from the connection
* queue yet. This can happen because the Broker methods are synchronized, and the removal action may be waiting
* to get the monitor while we are trying to distribute tasks here.
* @return whether the handoff succeeded.
*/
public synchronized boolean deliver (Job job, Response response) {
// Check up-front whether the connection is still open.
if (!response.getRequest().getRequest().getConnection().isOpen()) {
LOG.debug("Consumer connection was closed. It will be removed.");
return false;
}
// Get up to N tasks from the tasksAwaitingDelivery deque
List<GenericClusterRequest> tasks = new ArrayList<>();
while (tasks.size() < MAX_TASKS_PER_WORKER && !job.tasksAwaitingDelivery.isEmpty()) {
tasks.add(job.tasksAwaitingDelivery.poll());
}
// Attempt to deliver the tasks to the given consumer.
try {
response.setStatus(HttpStatus.OK_200);
OutputStream out = response.getOutputStream();
mapper.writeValue(out, tasks);
response.resume();
} catch (IOException e) {
// The connection was probably closed by the consumer, but treat it as a server error.
LOG.debug("Consumer connection caused IO error, it will be removed.");
response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR_500);
response.resume();
// Delivery failed, put tasks back on (the end of) the queue.
job.tasksAwaitingDelivery.addAll(tasks);
return false;
}
LOG.debug("Delivery of {} tasks succeeded.", tasks.size());
job.lastDeliveryTime = System.currentTimeMillis();
return true;
}
/**
* Take a normal (non-priority) task out of a job queue, marking it as completed so it will not be re-delivered.
* TODO maybe use unique delivery receipts instead of task IDs to handle redelivered tasks independently
* @return whether the task was found and removed.
*/
public synchronized boolean markTaskCompleted (int taskId) {
Job job = getJobForTask(taskId);
if (job == null) {
LOG.error("Could not find a job containing task {}, and therefore could not mark the task as completed.", taskId);
return false;
}
job.completedTasks.add(taskId);
return true;
}
/**
* Marks the specified priority request as completed, and returns the suspended Response object for the connection
* that submitted the priority request (the UI), which probably still waiting to receive a result back over the
* same connection. A HttpHandler thread can then pump data from the DELETE body back to the origin of the request,
* without blocking the broker thread.
* TODO rename to "deregisterSuspendedProducer" and "deregisterSuspendedConsumer" ?
*/
public synchronized Response deletePriorityTask (int taskId) {
return highPriorityResponses.remove(taskId);
}
/** This is the broker's main event loop. */
@Override
public void run() {
while (true) {
try {
deliverTasks();
} catch (InterruptedException e) {
LOG.info("Task pump thread was interrupted.");
return;
}
}
}
/** Find the job that should contain a given task, creating that job if it does not exist. */
public Job findJob (GenericClusterRequest task) {
Job job = findJob(task.jobId);
if (job != null) {
return job;
}
job = new Job(task.jobId);
job.workerCategory = new WorkerCategory(task.graphId, task.workerVersion);
jobs.insertAtTail(job);
return job;
}
/** Find the job for the given jobId, returning null if that job does not exist. */
public Job findJob (String jobId) {
for (Job job : jobs) {
if (job.jobId.equals(jobId)) {
return job;
}
}
return null;
}
/** Delete the job with the given ID. */
public synchronized boolean deleteJob (String jobId) {
Job job = findJob(jobId);
if (job == null) return false;
return jobs.remove(job);
}
/** Returns whether this broker is tracking any jobs that have unfinished tasks. */
public synchronized boolean anyJobsActive() {
for (Job job : jobs) {
if (!job.isComplete()) return true;
}
return false;
}
/**
* We wrap responses in a class that has a machine ID, and then put them in a TreeSet so that
* the machine with the lowest ID on a given graph always gets single-point work. The reason
* for this is so that a single machine will tend to get single-point work and thus we don't
* unnecessarily keep multiple multipoint machines alive.
*/
public static class WrappedResponse implements Comparable<WrappedResponse> {
public final Response response;
public final String machineId;
public WrappedResponse(Request request, Response response) {
this.response = response;
this.machineId = request.getHeader(AnalystWorker.WORKER_ID_HEADER);
}
@Override public int compareTo(WrappedResponse wrappedResponse) {
return this.machineId.compareTo(wrappedResponse.machineId);
}
}
}
|
package org.usfirst.frc.team4930.robot.commands;
import org.usfirst.frc.team4930.robot.Robot;
import edu.wpi.first.wpilibj.command.Command;
/**
* ArmController Command
*
* @author Thomas : Telling the code that it needs the designated "arm".
*
*/
public class ArmController extends Command {
public ArmController() {
requires(Robot.arm);
}
protected void initialize() {
}
protected void execute() {
Robot.arm.moveCan(Robot.oi.joystick2.getY());
}
protected boolean isFinished() {
return false;
}
protected void end() {
Robot.arm.stop();
}
protected void interrupted() {
Robot.arm.stop();
}
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.datatorrent.netlet;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.SelectionKey;
import java.nio.channels.SocketChannel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.datatorrent.common.util.Slice;
import com.datatorrent.netlet.Listener.ClientListener;
import com.datatorrent.netlet.util.CircularBuffer;
/**
* <p>Abstract AbstractClient class.</p>
*
* @author Chetan Narsude <chetan@datatorrent.com>
* @since 0.3.2
*/
public abstract class AbstractClient implements ClientListener
{
private static final int THROWABLES_COLLECTION_SIZE = 4;
public static final int MAX_SENDBUFFER_SIZE = 32 * 1024;
protected CircularBuffer<NetletThrowable> throwables;
protected final ByteBuffer writeBuffer;
protected final CircularBuffer<Slice> freeBuffer;
protected CircularBuffer<Slice> sendBuffer4Offers, sendBuffer4Polls;
protected boolean write = true;
protected SelectionKey key;
public boolean isConnected()
{
return key.isValid() && ((SocketChannel)key.channel()).isConnected();
}
public AbstractClient(int writeBufferSize, int sendBufferSize)
{
this(ByteBuffer.allocateDirect(writeBufferSize), sendBufferSize);
this.throwables = new CircularBuffer<NetletThrowable>(THROWABLES_COLLECTION_SIZE);
}
public AbstractClient(int sendBufferSize)
{
this(8 * 1 * 1024, sendBufferSize);
this.throwables = new CircularBuffer<NetletThrowable>(THROWABLES_COLLECTION_SIZE);
}
public AbstractClient()
{
this(8 * 1 * 1024, 1024);
this.throwables = new CircularBuffer<NetletThrowable>(THROWABLES_COLLECTION_SIZE);
}
public AbstractClient(ByteBuffer writeBuffer, int sendBufferSize)
{
this.throwables = new CircularBuffer<NetletThrowable>(THROWABLES_COLLECTION_SIZE);
this.writeBuffer = writeBuffer;
if (sendBufferSize == 0) {
sendBufferSize = 1024;
}
else if (sendBufferSize % 1024 > 0) {
sendBufferSize += 1024 - (sendBufferSize % 1024);
}
sendBuffer4Polls = sendBuffer4Offers = new CircularBuffer<Slice>(sendBufferSize, 10);
freeBuffer = new CircularBuffer<Slice>(sendBufferSize, 10);
}
@Override
public void registered(SelectionKey key)
{
this.key = key;
}
@Override
public void connected()
{
write = false;
}
@Override
public void disconnected()
{
write = true;
}
@Override
public final void read() throws IOException
{
SocketChannel channel = (SocketChannel)key.channel();
int read;
if ((read = channel.read(buffer())) > 0) {
this.read(read);
}
else if (read == -1) {
try {
channel.close();
}
finally {
disconnected();
unregistered(key);
key.attach(Listener.NOOP_CLIENT_LISTENER);
}
}
else {
logger.debug("{} read 0 bytes", this);
}
}
public void suspendRead()
{
key.interestOps(key.interestOps() & ~SelectionKey.OP_READ);
}
public void resumeRead()
{
key.interestOps(key.interestOps() | SelectionKey.OP_READ);
}
@Override
public final void write() throws IOException
{
/*
* at first when we enter this function, our buffer is in fill mode.
*/
int remaining, size;
if ((size = sendBuffer4Polls.size()) > 0 && (remaining = writeBuffer.remaining()) > 0) {
do {
Slice f = sendBuffer4Polls.peekUnsafe();
if (remaining <= f.length) {
writeBuffer.put(f.buffer, f.offset, remaining);
f.offset += remaining;
f.length -= remaining;
break;
}
else {
writeBuffer.put(f.buffer, f.offset, f.length);
remaining -= f.length;
freeBuffer.offer(sendBuffer4Polls.pollUnsafe());
}
}
while (--size > 0);
}
/*
* switch to the read mode!
*/
writeBuffer.flip();
SocketChannel channel = (SocketChannel)key.channel();
while ((remaining = writeBuffer.remaining()) > 0) {
remaining -= channel.write(writeBuffer);
if (remaining > 0) {
/*
* switch back to the fill mode.
*/
writeBuffer.compact();
return;
}
else if (size > 0) {
/*
* switch back to the write mode.
*/
writeBuffer.clear();
remaining = writeBuffer.capacity();
do {
Slice f = sendBuffer4Polls.peekUnsafe();
if (remaining <= f.length) {
writeBuffer.put(f.buffer, f.offset, remaining);
f.offset += remaining;
f.length -= remaining;
break;
}
else {
writeBuffer.put(f.buffer, f.offset, f.length);
remaining -= f.length;
freeBuffer.offer(sendBuffer4Polls.pollUnsafe());
}
}
while (--size > 0);
/*
* switch to the read mode.
*/
writeBuffer.flip();
}
}
/*
* switch back to fill mode.
*/
writeBuffer.clear();
synchronized (this) {
if (sendBuffer4Polls.isEmpty()) {
if (sendBuffer4Offers == sendBuffer4Polls) {
key.interestOps(key.interestOps() & ~SelectionKey.OP_WRITE);
write = false;
}
else {
sendBuffer4Polls = sendBuffer4Offers;
}
}
}
}
public boolean send(byte[] array)
{
return send(array, 0, array.length);
}
public boolean send(byte[] array, int offset, int len)
{
Slice f;
if (freeBuffer.isEmpty()) {
f = new Slice(array, offset, len);
}
else {
f = freeBuffer.pollUnsafe();
f.buffer = array;
f.offset = offset;
f.length = len;
}
if (sendBuffer4Offers.offer(f)) {
synchronized (this) {
if (!write) {
key.interestOps(key.interestOps() | SelectionKey.OP_WRITE);
write = true;
}
}
return true;
}
if (sendBuffer4Offers.capacity() != MAX_SENDBUFFER_SIZE) {
synchronized (this) {
if (sendBuffer4Offers == sendBuffer4Polls) {
logger.debug("allocating new sendBuffer4Offers of size {} for {}", sendBuffer4Offers.size(), this);
sendBuffer4Offers = new CircularBuffer<Slice>(sendBuffer4Offers.capacity() << 1);
sendBuffer4Offers.add(f);
if (!write) {
key.interestOps(key.interestOps() | SelectionKey.OP_WRITE);
write = true;
}
return true;
}
}
}
logger.debug("sendBuffer for Offers = {}, socket = {}", sendBuffer4Offers, key.channel());
return false;
}
@Override
public void handleException(Exception cce, DefaultEventLoop el)
{
logger.debug("Collecting exception in {}", throwables.size(), cce);
throwables.offer(NetletThrowable.Util.rewrap(cce, el));
}
public abstract ByteBuffer buffer();
public abstract void read(int len);
@Override
public synchronized void unregistered(SelectionKey key)
{
final CircularBuffer<Slice> SEND_BUFFER = sendBuffer4Offers;
sendBuffer4Offers = new CircularBuffer<Slice>(0)
{
@Override
public boolean isEmpty()
{
return SEND_BUFFER.isEmpty();
}
@Override
public boolean offer(Slice e)
{
throw new RuntimeException("client does not own the socket any longer!");
}
@Override
public int size()
{
return SEND_BUFFER.size();
}
@Override
public Slice pollUnsafe()
{
return SEND_BUFFER.pollUnsafe();
}
@Override
public Slice peekUnsafe()
{
return SEND_BUFFER.peekUnsafe();
}
};
}
private static final Logger logger = LoggerFactory.getLogger(AbstractClient.class);
}
|
package org.usfirst.frc157.ProtoBot2017.commands;
import org.opencv.core.Point;
import org.usfirst.frc157.ProtoBot2017.Robot;
import org.usfirst.frc157.ProtoBot2017.subsystems.Drive.DriveMode;
import org.usfirst.frc157.ProtoBot2017.subsystems.Vision;
import edu.wpi.first.wpilibj.DriverStation;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.command.Command;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
public class AlignForShot extends Command {
private final double PID_P = 0.05; //note currently no I or D
public enum ShotRangeCommand
{
NEAR,
FAR,
AUTO
}
public enum AcquisitionType
{
LEFT,
RIGHT,
AUTO,
NONE
}
private enum TargettingState
{
ROTATE,
RANGE,
ACQUIRE,
STOP
}
ShotRangeCommand selectedRange;
Point spot;
DriveMode preCommandDriveMode;
AcquisitionType acquisitionType;
Vision.VisionTarget lastTarget;
public AlignForShot(ShotRangeCommand inSelectedRange) {
selectedRange = inSelectedRange;
// Use requires() here to declare subsystem dependencies
requires(Robot.drive);
acquisitionType = AcquisitionType.NONE;
}
public AlignForShot(ShotRangeCommand inSelectedRange, AcquisitionType acquisitionType) {
selectedRange = inSelectedRange;
// Use requires() here to declare subsystem dependencies
requires(Robot.drive);
this.acquisitionType = acquisitionType;
}
// Called just before this Command runs the first time
protected void initialize() {
System.out.println("Starting AlignForShot(" + selectedRange.toString() + ")");
switch(selectedRange)
{
case NEAR:
{
Robot.vision.setVisionMode(Vision.VisionMode.FIND_BOILER, Vision.BoilerRange.NEAR);
spot = Robot.vision.getBoilerTargetCenter(Vision.BoilerRange.NEAR);
} break;
case FAR:
{
Robot.vision.setVisionMode(Vision.VisionMode.FIND_BOILER, Vision.BoilerRange.FAR);
spot = Robot.vision.getBoilerTargetCenter(Vision.BoilerRange.FAR);
} break;
case AUTO:
{
Robot.vision.setVisionMode(Vision.VisionMode.FIND_BOILER, Robot.vision.getBoilerTargetRange());
spot = Robot.vision.getBoilerTargetCenter(Robot.vision.getBoilerTargetRange());
} break;
}
preCommandDriveMode = Robot.drive.getDriveMode();
Robot.vision.storePictures();
Robot.drive.setDriveMode(DriveMode.ROBOT_RELATIVE);
state = TargettingState.STOP;
lastTarget = Robot.vision.getTarget();
}
private static double ROTATION_TOLERANCE = 10;
private static double ROT_FRACTION = 0.05;
private static double ROT_SPEED = 0.125;
private static double DISTANCE_TOLERANCE = 10;
private static double DIST_FRACTION = 0.05;
private static double DIST_SPEED = 0.125;
private static double ACQ_SPEED = 0.125;
private double dRot;
private double rotTime;
private double dDist;
private double distTime;
private double acquireTime;
private TargettingState state;
double stateChangeTime;
// Called repeatedly when this Command is scheduled to run
protected void execute() {
Vision.VisionTarget target = Robot.vision.getTarget();
if((target.loopCount != lastTarget.loopCount) && (target.inRange == true) && (state == TargettingState.STOP))
{
// Got new target update
// sort out how far to move this time
dRot = target.x - spot.x;
dDist = target.y - spot.y;
if(Math.abs(dRot) >= ROTATION_TOLERANCE)
{
state = TargettingState.ROTATE;
rotTime = dRot * ROT_FRACTION;
stateChangeTime = Timer.getFPGATimestamp();
}
else if(Math.abs(dDist) > DISTANCE_TOLERANCE)
{
state = TargettingState.RANGE;
distTime = dDist * DIST_FRACTION;
stateChangeTime = Timer.getFPGATimestamp();
}
else
{
state = TargettingState.STOP;
stateChangeTime = Timer.getFPGATimestamp();
}
}
else if(target.inRange == false)
{
// new target is not found
state = TargettingState.ACQUIRE;
stateChangeTime = Timer.getFPGATimestamp();
}
else
{
// work with old target
// so no changes to anything
}
switch(state)
{
case ROTATE:
{
if(Timer.getFPGATimestamp() > stateChangeTime + rotTime)
{
Robot.drive.driveBot(0, 0, ROT_SPEED * Math.signum(dRot));
}
else
{
state = TargettingState.STOP;
stateChangeTime = Timer.getFPGATimestamp();
}
} break;
case RANGE:
{
if(Timer.getFPGATimestamp() > stateChangeTime + distTime)
{
Robot.drive.driveBot(0, DIST_SPEED * Math.signum(dDist), 0);
}
else
{
state = TargettingState.STOP;
stateChangeTime = Timer.getFPGATimestamp();
}
} break;
case ACQUIRE:
{
double cmdRot = ACQ_SPEED;
switch(acquisitionType)
{
case LEFT:
{
// turn left
cmdRot = cmdRot;
}
break;
case RIGHT:
{
// turn right
cmdRot = -cmdRot;
}
break;
case AUTO:
{
switch(DriverStation.getInstance().getAlliance())
{
case Red:
{
// turn right
cmdRot = -cmdRot;
} break;
case Blue:
{
//turn left
cmdRot = cmdRot;
} break;
case Invalid:
{
// it's not valid, but we should do something.
// turn left
cmdRot = cmdRot;
} break;
}
}
break;
case NONE:
{
// Should never get here but turn left at half speed anyway
cmdRot = cmdRot/2;
}
break;
}
Robot.drive.driveBot(0, 0, cmdRot);
if(target.inRange == true)
{
state = TargettingState.STOP;
stateChangeTime = Timer.getFPGATimestamp();
}
} break;
case STOP:
{
Robot.drive.driveBot(0, 0, 0); // stop - consider setting brake mode on for this
} break;
}
}
// Make this return true when this Command no longer needs to run execute()
protected boolean isFinished() {
return false;
}
// Called once after isFinished returns true
protected void end() {
Robot.drive.setDriveMode(preCommandDriveMode);
System.out.println("AlignForShot.end()");
}
// Called when another command which requires one or more of the same
// subsystems is scheduled to run
protected void interrupted() {
Robot.drive.setDriveMode(preCommandDriveMode);
System.out.println("AlignForShot.interrupted()");
}
}
|
package com.forter.monitoring;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import com.aphyr.riemann.client.RiemannClient;
import com.forter.monitoring.utils.Discovery;
import com.google.common.base.Throwables;
import org.testng.ITestResult;
import org.testng.TestListenerAdapter;
public class RiemannListener extends TestListenerAdapter{
private String riemannIP;
private String machineName;
private final int riemannPort = 5555;
private final int eventTTL = 20;
private RiemannClient client;
private String description;
public void connect() {
if (client == null) {
try {
machineName = Discovery.instance().getMachineName();
riemannIP = Discovery.instance().getRiemannIP(machineName);
client = RiemannClient.tcp(riemannIP, riemannPort);
}
catch (IOException e) {
throw Throwables.propagate(e);
}
try {
// initializes client, connection is actually async
client.connect();
}
catch (IOException e) {
throw Throwables.propagate(e);
}
}
}
public void sendEvent(ITestResult tr, String state) {
StringWriter errors = new StringWriter();
if (Discovery.instance().isAWS()) {
connect();
if (state.equals("failed")) {
tr.getThrowable().printStackTrace(new PrintWriter(errors));
description = errors.toString();
} else
description = null;
}
client.event().
service(machineName + " " + tr.getInstanceName() + "-" + tr.getName()).
state(state).
tags("javatests").
description(description).
ttl(eventTTL).
send();
}
@Override
public void onTestFailure(ITestResult tr) {
sendEvent(tr, "failed");
}
@Override
public void onConfigurationFailure(ITestResult tr) {
sendEvent(tr, "failed");
}
@Override
public void onConfigurationSkip(ITestResult tr) {
sendEvent(tr, "skipped");
}
@Override
public void onConfigurationSuccess(ITestResult tr) {
sendEvent(tr, "passed");
}
@Override
public void onTestSkipped(ITestResult tr) {
sendEvent(tr, "skipped");
}
@Override
public void onTestSuccess(ITestResult tr) {
sendEvent(tr, "passed");
}
}
|
package com.github.aesteve.vertx.nubes;
import io.vertx.core.Handler;
import io.vertx.core.Vertx;
import io.vertx.core.json.JsonArray;
import io.vertx.core.json.JsonObject;
import io.vertx.ext.auth.AuthProvider;
import io.vertx.ext.web.RoutingContext;
import io.vertx.ext.web.handler.sockjs.SockJSHandlerOptions;
import io.vertx.ext.web.templ.TemplateEngine;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.ResourceBundle;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import com.github.aesteve.vertx.nubes.auth.AuthMethod;
import com.github.aesteve.vertx.nubes.context.RateLimit;
import com.github.aesteve.vertx.nubes.exceptions.MissingConfigurationException;
import com.github.aesteve.vertx.nubes.handlers.AnnotationProcessorRegistry;
import com.github.aesteve.vertx.nubes.handlers.Processor;
import com.github.aesteve.vertx.nubes.marshallers.PayloadMarshaller;
import com.github.aesteve.vertx.nubes.reflections.RouteRegistry;
import com.github.aesteve.vertx.nubes.reflections.injectors.annot.AnnotatedParamInjectorRegistry;
import com.github.aesteve.vertx.nubes.reflections.injectors.typed.TypedParamInjectorRegistry;
import com.github.aesteve.vertx.nubes.services.ServiceRegistry;
import io.vertx.ext.web.templ.impl.HandlebarsTemplateEngineImpl;
import io.vertx.ext.web.templ.impl.JadeTemplateEngineImpl;
import io.vertx.ext.web.templ.impl.MVELTemplateEngineImpl;
import io.vertx.ext.web.templ.impl.ThymeleafTemplateEngineImpl;
public class Config {
private Config() {
bundlesByLocale = new HashMap<>();
globalHandlers = new ArrayList<>();
templateEngines = new HashMap<>();
sockJSOptions = new SockJSHandlerOptions();
marshallers = new HashMap<>();
}
public JsonObject json;
public String srcPackage;
public List<String> controllerPackages;
public List<String> fixturePackages;
public String verticlePackage;
public String domainPackage;
public RateLimit rateLimit;
public String webroot;
public String assetsPath;
public String tplDir;
public boolean displayErrors;
public Vertx vertx;
public AuthProvider authProvider;
public AuthMethod authMethod;
public String i18nDir;
public AnnotationProcessorRegistry apRegistry;
public Map<Class<? extends Annotation>, Set<Handler<RoutingContext>>> annotationHandlers;
public Map<Class<?>, Processor> typeProcessors;
public TypedParamInjectorRegistry typeInjectors;
public AnnotatedParamInjectorRegistry annotInjectors;
public ServiceRegistry serviceRegistry;
public RouteRegistry routeRegistry;
public Map<Class<?>, Handler<RoutingContext>> paramHandlers;
public Map<String, Handler<RoutingContext>> aopHandlerRegistry;
public Map<Locale, ResourceBundle> bundlesByLocale;
public List<Handler<RoutingContext>> globalHandlers;
public Map<String, TemplateEngine> templateEngines;
public SockJSHandlerOptions sockJSOptions;
public Map<String, PayloadMarshaller> marshallers;
/**
* TODO : check config instead of throwing exceptions
*
* @param json
* @return config
*/
@SuppressWarnings("unchecked")
public static Config fromJsonObject(JsonObject json, Vertx vertx) {
Config instance = new Config();
JsonObject services;
JsonArray templates;
instance.json = json;
instance.srcPackage = json.getString("src-package","src.package");
instance.vertx = vertx;
instance.i18nDir = json.getString("i18nDir", "web/i18n/");
if (!instance.i18nDir.endsWith("/")) {
instance.i18nDir = instance.i18nDir + "/";
}
JsonArray controllers = json.getJsonArray("controller-packages",new JsonArray().add(instance.srcPackage + ".controllers"));
instance.controllerPackages = controllers.getList();
instance.verticlePackage = json.getString("verticle-package",instance.srcPackage + ".verticles");
// instance.domainPackage = json.getString("domain-package",instance.srcPackage + ".domains");
JsonArray fixtures = json.getJsonArray("fixture-packages",new JsonArray().add(instance.srcPackage + ".fixtures"));
instance.fixturePackages = fixtures.getList();
//register services included in config
services = json.getJsonObject("services", new JsonObject());
instance.serviceRegistry = new ServiceRegistry(vertx);
for (Map.Entry<String, Object> service : services) {
String name = service.getKey();
String className = (String)service.getValue();
try {
Class<?> clazz = Class.forName(className);
instance.serviceRegistry.registerService(name, clazz.newInstance());
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException e) {
e.printStackTrace();
}
}
templates = json.getJsonArray("templates", new JsonArray());
//Register templateEngines for extensions added in config
if(templates.contains("hbs")) {
instance.templateEngines.put("hbs", new HandlebarsTemplateEngineImpl());
}
if(templates.contains("jade")) {
instance.templateEngines.put("jade", new JadeTemplateEngineImpl());
}
if(templates.contains("templ")){
instance.templateEngines.put("templ", new MVELTemplateEngineImpl());
}
if(templates.contains("thymeleaf")){
instance.templateEngines.put("html", new ThymeleafTemplateEngineImpl());
}
JsonObject rateLimitJson = json.getJsonObject("throttling");
if (rateLimitJson != null) {
int count = rateLimitJson.getInteger("count");
int value = rateLimitJson.getInteger("time-frame");
TimeUnit timeUnit = TimeUnit.valueOf(rateLimitJson.getString("time-unit"));
instance.rateLimit = new RateLimit(count, value, timeUnit);
}
instance.webroot = json.getString("webroot", "web/assets");
instance.assetsPath = json.getString("static-path", "/assets");
instance.tplDir = json.getString("views-dir", "web/views");
instance.displayErrors = json.getBoolean("display-errors", Boolean.FALSE);
// TODO : read sockJSOptions from config
return instance;
}
public ResourceBundle getResourceBundle(Locale loc) {
return bundlesByLocale.get(loc);
}
}
|
package com.github.davidmoten.rx;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import com.github.davidmoten.rx.internal.operators.OperatorBufferEmissions;
import com.github.davidmoten.rx.internal.operators.OperatorDoOnNth;
import com.github.davidmoten.rx.internal.operators.OperatorFromTransformer;
import com.github.davidmoten.rx.internal.operators.OperatorOrderedMerge;
import com.github.davidmoten.rx.internal.operators.TransformerStateMachine;
import com.github.davidmoten.rx.util.MapWithIndex;
import com.github.davidmoten.rx.util.MapWithIndex.Indexed;
import com.github.davidmoten.rx.util.Pair;
import rx.Observable;
import rx.Observable.Operator;
import rx.Observable.Transformer;
import rx.Observer;
import rx.functions.Action1;
import rx.functions.Action2;
import rx.functions.Func0;
import rx.functions.Func1;
import rx.functions.Func2;
import rx.functions.Func3;
public final class Transformers {
public static <T, R> Operator<R, T> toOperator(
Func1<? super Observable<T>, ? extends Observable<R>> function) {
return OperatorFromTransformer.toOperator(function);
}
public static <T extends Number> Transformer<T, Statistics> collectStats() {
return new Transformer<T, Statistics>() {
@Override
public Observable<Statistics> call(Observable<T> o) {
return o.scan(Statistics.create(), Functions.collectStats());
}
};
}
public static <T, R extends Number> Transformer<T, Pair<T, Statistics>> collectStats(
final Func1<? super T, ? extends R> function) {
return new Transformer<T, Pair<T, Statistics>>() {
@Override
public Observable<Pair<T, Statistics>> call(Observable<T> source) {
return source.scan(Pair.create((T) null, Statistics.create()),
new Func2<Pair<T, Statistics>, T, Pair<T, Statistics>>() {
@Override
public Pair<T, Statistics> call(Pair<T, Statistics> pair, T t) {
return Pair.create(t, pair.b().add(function.call(t)));
}
}).skip(1);
}
};
}
public static <T extends Comparable<? super T>> Transformer<T, T> sort() {
return new Transformer<T, T>() {
@Override
public Observable<T> call(Observable<T> o) {
return o.toSortedList().flatMapIterable(Functions.<List<T>> identity());
}
};
}
public static <T> Transformer<T, T> sort(final Comparator<? super T> comparator) {
return new Transformer<T, T>() {
@Override
public Observable<T> call(Observable<T> o) {
return o.toSortedList(Functions.toFunc2(comparator))
.flatMapIterable(Functions.<List<T>> identity());
}
};
}
public static <T> Transformer<T, Set<T>> toSet() {
return new Transformer<T, Set<T>>() {
@Override
public Observable<Set<T>> call(Observable<T> o) {
return o.collect(new Func0<Set<T>>() {
@Override
public Set<T> call() {
return new HashSet<T>();
}
}, new Action2<Set<T>, T>() {
@Override
public void call(Set<T> set, T t) {
set.add(t);
}
});
}
};
}
public static <T> Transformer<T, Indexed<T>> mapWithIndex() {
return MapWithIndex.instance();
}
/**
* Returns a {@link Transformer} that allows processing of the source stream
* to be defined in a state machine where transitions of the state machine
* may also emit items to downstream that are buffered if necessary when
* backpressure is requested. <code>flatMap</code> is part of the processing
* chain so the source may experience requests for more items than are
* strictly required by the endpoint subscriber.
*
* <p>
* Internally this transformer uses {@link Observable#scan} emitting a
* stream of new states composed with emissions from the transition to each
* state and {@link Observable#flatMap} to emit the recorded emissions with
* backpressure.
*
* @param initialStateFactory
* the factory to create the initial state of the state machine.
* @param transition
* defines state transitions and consequent emissions to
* downstream when an item arrives from upstream
* @param completionAction
* defines activity that should happen based on the final state
* just before downstream <code>onCompleted()</code> is called.
* For example any buffered emissions in state could be emitted
* at this point. Don't call <code>observer.onCompleted()</code>
* as it is called for you after the action completes.
* @param <State>
* the class representing the state of the state machine
* @param <In>
* the input observable type
* @param <Out>
* the output observable type
* @throws NullPointerException
* if {@code initialStateFactory} or {@code transition},or
* {@code completionAction} is null
* @return a backpressure supporting transformer that implements the state
* machine specified by the parameters
*/
public static <State, In, Out> Transformer<In, Out> stateMachine(
Func0<State> initialStateFactory,
Func3<? super State, ? super In, ? super Observer<Out>, ? extends State> transition,
Action2<? super State, ? super Observer<Out>> completionAction) {
return TransformerStateMachine.<State, In, Out> create(initialStateFactory, transition,
completionAction);
}
/**
* Returns a {@link Transformer} that allows processing of the source stream
* to be defined in a state machine where transitions of the state machine
* may also emit items to downstream that are buffered if necessary when
* backpressure is requested. <code>flatMap</code> is part of the processing
* chain so the source may experience requests for more items than are
* strictly required by the endpoint subscriber.
*
* <p>
* Internally this transformer uses {@link Observable#scan} emitting a
* stream of new states composed with emissions from the transition to each
* state and {@link Observable#flatMap} to emit the recorded emissions with
* backpressure.
*
* @param initialState
* the initial state of the state machine.
* @param transition
* defines state transitions and consequent emissions to
* downstream when an item arrives from upstream
* @param completionAction
* defines activity that should happen based on the final state
* just before downstream <code>onCompleted()</code> is called.
* For example any buffered emissions in state could be emitted
* at this point. Don't call <code>observer.onCompleted()</code>
* as it is called for you after the action completes.
* @param <State>
* the class representing the state of the state machine
* @param <In>
* the input observable type
* @param <Out>
* the output observable type
* @throws NullPointerException
* if {@code transition} or {@code completionAction} is null
* @return a backpressure supporting transformer that implements the state
* machine specified by the parameters
*/
public static <State, In, Out> Transformer<In, Out> stateMachine(State initialState,
Func3<? super State, ? super In, ? super Observer<Out>, ? extends State> transition,
Action2<? super State, ? super Observer<Out>> completionAction) {
Func0<State> f = Functions.constant0(initialState);
return TransformerStateMachine.<State, In, Out> create(f, transition, completionAction);
}
/**
* Returns a {@link Transformer} that allows processing of the source stream
* to be defined in a state machine where transitions of the state machine
* may also emit items to downstream that are buffered if necessary when
* backpressure is requested. <code>flatMap</code> is part of the processing
* chain so the source may experience requests for more items than are
* strictly required by the endpoint subscriber. This overload uses a do
* nothing {@code completionAction} which may leave some emissions recorded
* in State as unemitted.
*
* <p>
* Internally this transformer uses {@link Observable#scan} emitting a
* stream of new states composed with emissions from the transition to each
* state and {@link Observable#flatMap} to emit the recorded emissions with
* backpressure.
*
* @param initialState
* the initial state of the state machine.
* @param transition
* defines state transitions and consequent emissions to
* downstream when an item arrives from upstream
* @param <State>
* the class representing the state of the state machine
* @param <In>
* the input observable type
* @param <Out>
* the output observable type
* @throws NullPointerException
* if {@code initialState} or {@code transition} is null
* @return a backpressure supporting transformer that implements the state
* machine specified by the parameters
*/
public static <State, In, Out> Transformer<In, Out> stateMachine(State initialState,
Func3<? super State, ? super In, ? super Observer<Out>, ? extends State> transition) {
Func0<State> f = Functions.constant0(initialState);
return TransformerStateMachine.<State, In, Out> create(f, transition,
new Action2<State, Observer<Out>>() {
@Override
public void call(State state, Observer<Out> observer) {
// do nothing
}
});
}
@SuppressWarnings("unchecked")
public static <T> Transformer<T, T> bufferEmissions() {
return (Transformer<T, T>) BufferEmissionsHolder.INSTANCE;
}
// holder lazy singleton pattern
private static class BufferEmissionsHolder {
static Transformer<Object, Object> INSTANCE = new Transformer<Object, Object>() {
@Override
public Observable<Object> call(Observable<Object> o) {
return o.lift(new OperatorBufferEmissions<Object>());
}
};
}
/**
* Returns the source {@link Observable} merged with the <code>other</code>
* observable using the given {@link Comparator} for order. A precondition
* is that the source and other are already ordered. This transformer does
* not support backpressure but its inputs must support backpressure. If you
* need backpressure support then compose with
* <code>.onBackpressureXXX</code>.
*
* @param other
* the other already ordered observable
* @param comparator
* the ordering to use
* @param <T>
* the generic type of the objects being compared
* @return merged and ordered observable
*/
public static final <T> Transformer<T, T> orderedMergeWith(final Observable<T> other,
final Func2<? super T, ? super T, Integer> comparator) {
return new Transformer<T, T>() {
@Override
public Observable<T> call(Observable<T> source) {
return source.lift(new OperatorOrderedMerge<T>(other, comparator));
}
};
}
/**
* Returns a {@link Transformer} that returns an {@link Observable} that is
* a buffering of the source Observable into lists of sequential items that
* are equal.
*
* <p>
* For example, the stream
* {@code Observable.just(1, 1, 2, 2, 1).compose(toListUntilChanged())}
* would emit {@code [1,1], [2], [1]}.
*
* @param <T>
* the generic type of the source Observable
* @return transformer as above
*/
public static <T> Transformer<T, List<T>> toListUntilChanged() {
Func2<Collection<T>, T, Boolean> equal = HolderEquals.instance();
return toListWhile(equal);
}
private static class HolderEquals {
private static final Func2<Collection<Object>, Object, Boolean> INSTANCE = new Func2<Collection<Object>, Object, Boolean>() {
@Override
public Boolean call(Collection<Object> list, Object t) {
return list.isEmpty() || list.iterator().next().equals(t);
}
};
@SuppressWarnings("unchecked")
static <T> Func2<Collection<T>, T, Boolean> instance() {
return (Func2<Collection<T>, T, Boolean>) (Func2<?, ?, Boolean>) INSTANCE;
}
}
/**
* Returns a {@link Transformer} that returns an {@link Observable} that is
* a buffering of the source Observable into lists of sequential items that
* satisfy the condition {@code condition}.
*
* @param condition
* condition function that must return true if an item is to be
* part of the list being prepared for emission
* @param <T>
* the generic type of the source Observable
* @return transformer as above
*/
public static <T> Transformer<T, List<T>> toListWhile(
final Func2<? super List<T>, ? super T, Boolean> condition) {
Func0<List<T>> initialState = new Func0<List<T>>() {
@Override
public List<T> call() {
return new ArrayList<T>();
}
};
Action2<List<T>, T> collect = new Action2<List<T>, T>() {
@Override
public void call(List<T> list, T n) {
list.add(n);
}
};
return collectWhile(initialState, collect, condition);
}
public static <T, R extends Collection<T>> Transformer<T, R> collectWhile(
final Func0<R> factory, final Action2<? super R, ? super T> collect) {
return collectWhile(factory, collect, HolderEquals.<T> instance());
}
public static <T, R extends Collection<T>> Transformer<T, R> collectWhile(
final Func0<R> factory, final Action2<? super R, ? super T> collect,
final Func2<? super R, ? super T, Boolean> condition) {
Func3<R, T, Observer<R>, R> transition = new Func3<R, T, Observer<R>, R>() {
@Override
public R call(R collection, T t, Observer<R> observer) {
if (condition.call(collection, t)) {
collect.call(collection, t);
return collection;
} else {
observer.onNext(collection);
R r = factory.call();
collect.call(r, t);
return r;
}
}
};
Action2<R, Observer<R>> completionAction = new Action2<R, Observer<R>>() {
@Override
public void call(R collection, Observer<R> observer) {
if (!collection.isEmpty()) {
observer.onNext(collection);
}
}
};
return Transformers.stateMachine(factory, transition, completionAction);
}
/**
* Returns a {@link Transformer} that applied to a source {@link Observable}
* calls the given action on the {@code n}th onNext emission.
*
* @param n
* the 1-based count of onNext to do the action on
* @param action
* is performed on {@code n}th onNext.
* @return Transformer that applied to a source Observable calls the given
* action on the nth onNext emission.
*/
public static <T> Transformer<T, T> doOnNext(final int n, final Action1<? super T> action) {
return new Transformer<T, T>() {
@Override
public Observable<T> call(Observable<T> o) {
return o.lift(OperatorDoOnNth.create(action, n));
}
};
}
/**
* Returns a {@link Transformer} that applied to a source {@link Observable}
* calls the given action on the first onNext emission.
*
* @param action
* is performed on first onNext.
* @return Transformer that applied to a source Observable calls the given
* action on the first onNext emission.
*/
public static <T> Transformer<T, T> doOnFirst(final Action1<? super T> action) {
return doOnNext(1, action);
}
public static <R, T> Transformer<T, R> ignoreElementsThen(final Observable<R> next) {
return new Transformer<T, R>() {
@SuppressWarnings("unchecked")
@Override
public Observable<R> call(Observable<T> source) {
return ((Observable<R>) (Observable<?>) source.ignoreElements()).concatWith(next);
}
};
}
}
|
package com.github.emmanueltouzery.crony;
import java.time.DayOfWeek;
import java.time.ZonedDateTime;
import java.time.Month;
import javaslang.collection.List;
import javaslang.collection.Set;
import javaslang.control.Validation;
/**
* The cron class represents a cron specification.
*/
public class Cron
{
/**
* the month part of the cron specification
*/
public final MonthSpec monthSpec;
/**
* the day of month part of the cron specification
*/
public final DayOfMonthSpec dayOfMonthSpec;
/**
* the day of week part of the cron specification
*/
public final DayOfWeekSpec dayOfWeekSpec;
/**
* the hour of day part of the cron specification
*/
public final HourSpec hourSpec;
/**
* the minute of day part of the cron specification
*/
public final MinSpec minSpec;
private Cron(MinSpec minSpec,
HourSpec hourSpec,
DayOfMonthSpec dayOfMonthSpec,
MonthSpec monthSpec,
DayOfWeekSpec dayOfWeekSpec) {
this.monthSpec = monthSpec;
this.dayOfMonthSpec = dayOfMonthSpec;
this.dayOfWeekSpec = dayOfWeekSpec;
this.hourSpec = hourSpec;
this.minSpec = minSpec;
}
/**
* Programmatically reate a Cron specification.
* @param minutes minutes of the hour (0-59)
* @param hours hours of the day (0-23)
* @param daysOfMonth the days of the month (1-31). To specify the
* last day of the month, use {@link DayOfMonthSpec#LAST_DAY_OF_MONTH}
* @param months months of the year
* @param daysOfWeek the days of the week
* @return a Cron object or an error message
*/
public static Validation<String, Cron> build(
Set<Integer> minutes, Set<Integer> hours,
Set<Integer> daysOfMonth, Set<Month> months,
Set<DayOfWeek> daysOfWeek) {
return Validation.combine(
MinSpec.build(minutes),
HourSpec.build(hours),
DayOfMonthSpec.build(daysOfMonth),
MonthSpec.build(months),
DayOfWeekSpec.build(daysOfWeek))
.ap(Cron::new).leftMap(l -> l.mkString(", "));
}
/**
* Build a Cron specification from a cron string specification
* @param cronString a cron string specification to parse
* @return a Cron object or an error message
*/
public static Validation<String, Cron> parseCronString(String cronString) {
return Javaslang.splitValidate(cronString, " ", 5)
.flatMap(pieces ->
Validation.combine(
MinSpec.parse(pieces[0]),
HourSpec.parse(pieces[1]),
DayOfMonthSpec.parse(pieces[2]),
MonthSpec.parse(pieces[3]),
DayOfWeekSpec.parse(pieces[4]))
.ap(Cron::new).leftMap(l -> l.mkString(", ")));
}
/**
* Generate a cron string specification from the cron specification.
* @return a cron string specification as used in crontab
*/
public String toCronString() {
return List.of(minSpec.minutes,
hourSpec.hours,
dayOfMonthSpec.daysOfMonthFormattedSet(),
monthSpec.monthsFormattedSet(),
dayOfWeekSpec.daysOfWeekFormattedSet())
.map(set -> set.isEmpty() ? "*" : set.mkString(","))
.mkString(" ");
}
/**
* Will return true if the date time given is a match
* for this cron specification (in other words if
* execution would trigger at that exact date and time)
* @param dateTime the date time to test
* @return true if the cron would execute at that dateTime
*/
public boolean isMatch(ZonedDateTime dateTime) {
return dateTime.getSecond() == 0 &&
dateTime.getNano() == 0 &&
minSpec.isMatch(dateTime) &&
hourSpec.isMatch(dateTime) &&
isDayMatch(dateTime);
}
/*package*/ boolean isDayMatch(ZonedDateTime datetime) {
// The day of a command's execution can be specified in the following two fields
// (i.e., do not contain the "*" character), the
// command will be run when either field matches the current time.
boolean bothDayMonthWeekSpecified =
!dayOfMonthSpec.monthDays.isEmpty() && !dayOfWeekSpec.days.isEmpty();
boolean isDayMatch = bothDayMonthWeekSpecified
? dayOfMonthSpec.isMatch(datetime) || dayOfWeekSpec.isMatch(datetime)
: dayOfMonthSpec.isMatch(datetime) && dayOfWeekSpec.isMatch(datetime);
return monthSpec.isMatch(datetime) && isDayMatch;
}
}
|
package com.google.sps.data;
import com.google.maps.GeoApiContext;
import com.google.maps.PlacesApi;
import com.google.maps.NearbySearchRequest;
import com.google.maps.TextSearchRequest;
import com.google.maps.model.LatLng;
import com.google.maps.model.LocationType;
import com.google.maps.model.Photo;
import com.google.maps.model.PlaceType;
import com.google.maps.model.RankBy;
import com.google.maps.model.Geometry;
import com.google.maps.model.PlacesSearchResponse;
import com.google.maps.model.PlacesSearchResult;
import com.google.maps.model.PlaceDetails;
import com.google.maps.PlaceDetailsRequest;
import com.google.appengine.api.datastore.DatastoreService;
import com.google.appengine.api.datastore.DatastoreServiceFactory;
import com.google.appengine.api.datastore.Entity;
import com.google.maps.TextSearchRequest;
import com.google.api.services.customsearch.model.Search;
import com.google.api.services.customsearch.model.Result;
import com.google.api.services.customsearch.Customsearch;
import com.google.api.services.customsearch.CustomsearchRequestInitializer;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.json.jackson2.JacksonFactory;
import com.google.maps.errors.ApiException;
import com.google.maps.model.PlacesSearchResult;
import com.google.appengine.api.datastore.KeyFactory;
import com.google.appengine.api.datastore.EntityNotFoundException;
import io.github.cdimascio.dotenv.Dotenv;
import java.util.List;
import java.util.LinkedList;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.logging.Logger;
import java.security.GeneralSecurityException;
import java.io.IOException;
import java.lang.Integer;
import java.util.logging.Logger;
import java.util.Iterator;
/** BusinessesService object representing all businesses
* components of the webapp.
**/
public class BusinessesService {
private Dotenv dotenv = Dotenv.configure().filename("env").load();
private final String KEY = dotenv.get("APIKEY");
private final static Logger LOGGER =
Logger.getLogger(BusinessesService.class.getName());
private final int ALLOWED_SEARCH_REQUESTS = 3;
private final int MAX_ALLOWED_TEXT_SEARCH_RADIUS = 50000;
private final int RADIUS_MULTIPLIAR = 4;
private final int MAX_NUMBER_OF_RESULTS_PER_REQUEST = 20;
private final int MIN_FOLLOWERS = 50000;
private final int SMALL_BUSINESSES_DISPLAYED = 15;
private final String START_SUBSTRING = "| ";
private final String END_SUBSTRING = "followers";
private final int ALLOWED_NUMBER_OF_MATCHING_BUSINESSES = 5;
private final String BIG_BUSINESSES_DATABASE = "BigBusinesses";
private final String SMALL_BUSINESSES_DATABASE = "SmallBusinesses";
private LatLng latLng;
private List<Listing> allBusinesses;
/** Create a new Businesses instance
* @param allBusinesses businesses from SmallCityService
**/
public BusinessesService(List<Listing> allBusinesses) {
this.allBusinesses = allBusinesses;
}
public List<Listing>
getBusinessesFromTextSearch(MapLocation mapLocation, String product) {
latLng = new LatLng(mapLocation.lat, mapLocation.lng);
final GeoApiContext context = new GeoApiContext.Builder()
.apiKey(KEY)
.build();
TextSearchRequest request = PlacesApi.textSearchQuery(context, product);
int radius = 1500;
try {
PlacesSearchResponse response = request.location(latLng)
.radius(radius)
.await();
while (response.results.length < MAX_NUMBER_OF_RESULTS_PER_REQUEST &&
radius < MAX_ALLOWED_TEXT_SEARCH_RADIUS) {
radius *= RADIUS_MULTIPLIAR;
response = request.location(latLng).radius(radius).await();
}
for (int i=0; i<ALLOWED_SEARCH_REQUESTS; i++) {
for(PlacesSearchResult place : response.results) {
String url = getUrlFromPlaceDetails(context, place.placeId);
addListingToBusinesses(place, url);
}
//Maximum of 2 next token requests allowed
if (i < 2) {
Thread.sleep(2000); // Required delay before next API request
response = PlacesApi
.textSearchNextPage(context, response.nextPageToken).await();
}
}
} catch(Exception e) {
LOGGER.warning(e.getMessage());
}
return allBusinesses;
}
public List<Listing> getBusinessesFromNearbySearch(MapLocation mapLocation) {
latLng = new LatLng(mapLocation.lat, mapLocation.lng);
final GeoApiContext context = new GeoApiContext.Builder()
.apiKey(KEY)
.build();
NearbySearchRequest request = PlacesApi.nearbySearchQuery(context, latLng);
try {
PlacesSearchResponse response = request.type(PlaceType.STORE)
.rankby(RankBy.DISTANCE)
.await();
for (int i=0; i<ALLOWED_SEARCH_REQUESTS; i++) {
for(PlacesSearchResult place : response.results) {
String url = getUrlFromPlaceDetails(context, place.placeId);
addListingToBusinesses(place, url);
}
//Maximum of 2 next token requests allowed
if (i < 2) {
Thread.sleep(2000); // Required delay before next API request
response = PlacesApi
.nearbySearchNextPage(context, response.nextPageToken).await();
}
}
} catch(Exception e) {
LOGGER.warning(e.getMessage());
}
return allBusinesses;
}
private String getUrlFromPlaceDetails(GeoApiContext context, String placeId) {
try {
PlaceDetails result =
new PlaceDetailsRequest(context).placeId(placeId).await();
if (result.website != null) {
return (result.website.toString());
}
else {
return (result.url.toString());
}
} catch(Exception e) {
LOGGER.warning(e.getMessage());
}
// Place Details failure
return "";
}
private void addListingToBusinesses(PlacesSearchResult place, String url) {
String name = place.name;
String formattedAddress;
if (place.vicinity != null) {
formattedAddress = place.vicinity;
}
else {
formattedAddress = place.formattedAddress;
}
Geometry geometry = place.geometry;
MapLocation placeLocation =
new MapLocation(geometry.location.lat, geometry.location.lng);
double rating = place.rating;
Photo photos[] = place.photos;
String types[] = place.types;
allBusinesses.add(new Listing(name, formattedAddress,
placeLocation, rating, photos, types, url));
}
public List<Listing> removeBigBusinessesFromResults() {
DatastoreService datastore = DatastoreServiceFactory.getDatastoreService();
Iterator<Listing> businesses = allBusinesses.iterator();
String businessName = "";
int numberOfSmallBusinesses = 0;
while (businesses.hasNext() &&
(numberOfSmallBusinesses < SMALL_BUSINESSES_DISPLAYED)) {
Listing currentBusiness = businesses.next();
if (determineIfTheCurrentBusinessIsBig(currentBusiness)) {
businesses.remove();
} else {
numberOfSmallBusinesses++;
}
}
return allBusinesses;
}
private boolean determineIfTheCurrentBusinessIsBig(Listing currentBusiness) {
if (checkIfBusinessInDatabase(currentBusiness, SMALL_BUSINESSES_DATABASE)) {
return false;
} else if (checkIfBusinessInDatabase(currentBusiness, BIG_BUSINESSES_DATABASE)) {
return true;
}
if (checkNumberOfSimilarBusinesses(currentBusiness) ||
checkBusinessThroughLinkedin(currentBusiness)) {
addBusinessToDatabase(currentBusiness, BIG_BUSINESSES_DATABASE);
return true;
}
addBusinessToDatabase(currentBusiness, SMALL_BUSINESSES_DATABASE);
return false;
}
private boolean checkIfBusinessInDatabase(Listing currentBusiness,
String databaseEntry) {
DatastoreService datastore = DatastoreServiceFactory.getDatastoreService();
try {
String businessName =
(String) datastore.get(KeyFactory.createKey(
databaseEntry,
currentBusiness.getName()))
.getProperty("Business");
return true;
} catch (EntityNotFoundException e) {
// Sometimes the small or big business will be in the database,
// but if not additional checks will follow to determine it's size.
LOGGER.warning(e.getMessage());
}
return false;
}
private boolean checkNumberOfSimilarBusinesses(Listing currentBusiness) {
GeoApiContext context =
new GeoApiContext.Builder().apiKey(KEY).build();
TextSearchRequest request =
new TextSearchRequest(context).query(currentBusiness.getName())
.location(latLng).radius(50000);
PlacesSearchResult[] similarBusinessesInTheArea = {};
try {
similarBusinessesInTheArea = request.await().results;
} catch(IOException | InterruptedException | ApiException e ) {
LOGGER.warning(e.getMessage());
}
return checkNumberOfSimilarBusinessesInTheArea(currentBusiness,
similarBusinessesInTheArea);
}
private boolean checkNumberOfSimilarBusinessesInTheArea(Listing currentBusiness,
PlacesSearchResult[] similarBusinessesInTheArea) {
int numberOfMatchingBusinesses = 0;
int i = 0;
while (i < similarBusinessesInTheArea.length
&& numberOfMatchingBusinesses < ALLOWED_NUMBER_OF_MATCHING_BUSINESSES) {
if (similarBusinessesInTheArea[i].name.equals(currentBusiness.getName())
&& !similarBusinessesInTheArea[i].formattedAddress
.equals(currentBusiness.getFormattedAddress())) {
numberOfMatchingBusinesses++;
}
i++;
}
if (numberOfMatchingBusinesses >= ALLOWED_NUMBER_OF_MATCHING_BUSINESSES) {
return true;
}
return false;
}
private boolean checkBusinessThroughLinkedin(Listing currentBusiness) {
String searchEngineID = dotenv.get("CX");
List<Result> searchJsonResults = new ArrayList<>();
int companyFollowers = 0;
try {
Customsearch cs = new Customsearch.Builder(
GoogleNetHttpTransport.newTrustedTransport(),
JacksonFactory.getDefaultInstance(), null)
.setApplicationName("linkedinSearch")
.setGoogleClientRequestInitializer(new CustomsearchRequestInitializer(KEY))
.build();
Customsearch.Cse.List list = cs.cse().list(currentBusiness.getName())
.setCx(searchEngineID);
searchJsonResults = list.execute().getItems();
} catch (GeneralSecurityException | IOException e) {
LOGGER.warning(e.getMessage());
}
if (searchJsonResults != null && searchJsonResults.size() != 0) {
Result linkedinBusiness = searchJsonResults.get(0);
String businessDescription =
(String) linkedinBusiness.getPagemap().get("metatags").get(0).get("og:description");
if (businessDescription.contains(START_SUBSTRING)
&& businessDescription.contains(END_SUBSTRING)) {
String followers = businessDescription.substring(
businessDescription.indexOf(START_SUBSTRING) + 2,
businessDescription.indexOf(END_SUBSTRING) - 1);
try {
companyFollowers = Integer.parseInt(followers.replaceAll(",", ""));
} catch (NumberFormatException e) {
// Sometimes businessDescription does not contain a string that
// follows a certain pattern, so the string of the company followers
// is not a number
LOGGER.warning(e.getMessage());
}
}
if (companyFollowers > MIN_FOLLOWERS) {
return true;
}
}
return false;
}
private void addBusinessToDatabase(Listing currentBusiness, String databaseEntry) {
String title = "Business";
String businessTypes = "BusinessTypes";
String address = "Address";
String rating = "Rating";
String photos = "Photos";
Entity businessEntity = new Entity(databaseEntry, currentBusiness.getName());
DatastoreService datastore = DatastoreServiceFactory.getDatastoreService();
businessEntity.setProperty(title, currentBusiness.getName());
businessEntity.setProperty(address, currentBusiness.getFormattedAddress());
businessEntity.setProperty(rating, currentBusiness.getRating());
businessEntity.setProperty(businessTypes,
Arrays.asList(currentBusiness.getBusinessTypes()));
datastore.put(businessEntity);
}
}
|
package com.hivemq.spi.services;
import com.codahale.metrics.Metric;
import com.codahale.metrics.MetricRegistry;
import com.hivemq.spi.annotations.Nullable;
import com.hivemq.spi.metrics.HiveMQMetric;
import java.util.Map;
public interface MetricService {
/**
* Returns a specific HiveMQ metric. If the metric does not exist, this method will return
* <code>null</code>.
* <p/>
* For a list of all available metrics, refer to the {@link com.hivemq.spi.metrics.HiveMQMetrics} constant class.
*
* @param metric the metric
* @param <T> the metric type
* @return the metric (if available) or <code>null</code>
*/
@Nullable
<T extends Metric> T getHiveMQMetric(HiveMQMetric<T> metric);
/**
* Returns the metric registry of HiveMQ.
*
* @return the metric registry
*/
MetricRegistry getMetricRegistry();
}
|
package com.impossibl.postgres.system;
import java.util.HashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class Version {
//private static final Pattern VERSION_PATTERN = compile("(\\d+)(?:\\.(\\d+)(?:\\.(\\d+))?)?");
private static final Pattern VERSION_PATTERN = Pattern.compile("(\\d+)(?:\\.(\\d+))?(?:\\.(\\d+))?");
private static final HashMap<Version,Version> all = new HashMap<Version,Version>();
int major;
Integer minor;
Integer revision;
public static Version parse(String versionString) {
Matcher matcher = VERSION_PATTERN.matcher(versionString);
if(!matcher.find())
return null;
int major = Integer.parseInt(matcher.group(1));
Integer minor = matcher.group(2) != null ? Integer.parseInt(matcher.group(2)) : null;
Integer revision = matcher.group(3) != null ? Integer.parseInt(matcher.group(3)) : null;
return get(major, minor, revision);
}
public static synchronized Version get(int major, Integer minor, Integer revision) {
Version test = new Version(major, minor, revision);
Version found = all.get(test);
if(found == null) {
all.put(test, test);
found = test;
}
return found;
}
private Version(int major, Integer minor, Integer revision) {
if(minor == null && revision != null)
throw new IllegalArgumentException();
this.major = major;
this.minor = minor;
this.revision = revision;
}
public int getMajor() {
return major;
}
public Integer getMinor() {
return minor;
}
public Integer getRevision() {
return revision;
}
public boolean compatible(Version current) {
return compatible(current.major, current.minor, current.revision);
}
public boolean compatible(int major, Integer minor, Integer revision) {
return this.major >= major
&& (minor == null || this.minor == null || minor >= this.minor)
&& (revision == null || this.revision == null || revision >= this.revision);
}
public boolean equals(Version current) {
return equals(current.major, current.minor, current.revision);
}
public boolean equals(int major, Integer minor, Integer revision) {
return this.major == major && (minor == null || minor.equals(this.minor)) && (revision == null || revision.equals(this.revision));
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(major);
if(minor != null)
sb.append( "." + minor);
if(revision != null)
sb.append("." + revision);
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + major;
result = prime * result + ((minor == null) ? 0 : minor.hashCode());
result = prime * result + ((revision == null) ? 0 : revision.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Version other = (Version) obj;
if (major != other.major)
return false;
if (minor == null) {
if (other.minor != null)
return false;
}
else if (!minor.equals(other.minor))
return false;
if (revision == null) {
if (other.revision != null)
return false;
}
else if (!revision.equals(other.revision))
return false;
return true;
}
}
|
package com.joelj.collections;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
public class LinkedArrayList<T> implements Iterable<T> {
public static final int DEFAULT_BLOCK_SIZE = 16;
private int size;
private int blockCount;
private final int blockSize;
private Block<T> head;
private Block<T> tail;
/**
* Creates an instance of {@link LinkedArrayList} with the given block size and wraps it in a {@link List}.
* Note: The returned instance fails the check: {@code returnedObject instanceOf LinkedArrayList} since only a wrapper is returned.
*
* @param blockSize The size of each underlying array block.
* The bigger this value, the better random access performs, but the worse remove operations perform.
* @param <T> The type of elements contained in the list.
* @return A {@link List} wrapper. See above notice about {@code instanceOf} checks.
*/
public static <T> List<T> createWithBlockSize(int blockSize) {
LinkedArrayList<T> rawWithBlockSize = createRawWithBlockSize(blockSize);
return LinkedArrayListImpl.wrap(rawWithBlockSize);
}
/**
* Same as {@link #createWithBlockSize(int)}, but uses DEFAULT_BLOCK_SIZE for the block size.
*/
public static <T> List<T> create() {
return createWithBlockSize(DEFAULT_BLOCK_SIZE);
}
public static <T> LinkedArrayList<T> createRawWithBlockSize(int blockSize) {
return new LinkedArrayList<T>(blockSize);
}
public static <T> LinkedArrayList<T> createRaw() {
return createRawWithBlockSize(DEFAULT_BLOCK_SIZE);
}
private LinkedArrayList(int blockSize) {
if(blockSize <= 0) {
blockSize = DEFAULT_BLOCK_SIZE;
}
this.blockSize = blockSize;
init(blockSize);
}
private void init(int blockSize) {
this.size = 0;
this.tail = new Block<T>(blockSize);
this.head = tail;
this.blockCount = 1;
}
/**
* Allocates a block of memory for more entries.
*/
public void allocateBlock() {
Block<T> newBlock = new Block<T>(blockSize);
tail.next = newBlock;
tail = newBlock;
blockCount++;
}
/**
* Adds the given value to the list, allocating new memory if it needs to.
*/
public void add(T toAdd) {
if(toAdd == null) {
throw new NullPointerException("cannot add null to LinkedArrayList");
}
if(!tail.add(toAdd)) {
allocateBlock();
if(!tail.add(toAdd)) {
throw new IllegalStateException("We just allocated a new block but couldn't add to it. This line wasn't expected to ever be possible to be it.");
}
}
size++;
}
/**
* Replaces the value at the given index with the given value.
* @param index The index to replace.
* @param toAdd The item to add at the given index.
* @return The value at give index BEFORE it is replaced.
*/
public T replace(int index, T toAdd) {
if(toAdd == null) {
throw new NullPointerException("cannot add null to LinkedArrayList");
}
Pair<Block<T>, Integer> blockIndex = getBlockIndex(index);
Block<T> block = blockIndex.getFirst();
Integer blockIndexValue = blockIndex.getSecond();
T result = block.get(blockIndexValue);
block.array[blockIndexValue] = toAdd;
return result;
}
public T get(int index) {
Pair<Block<T>, Integer> blockIndex = getBlockIndex(index);
return blockIndex.getFirst().get(blockIndex.getSecond());
}
public T remove(int index) {
Pair<Block<T>, Integer> blockIndex = getBlockIndex(index);
T remove = blockIndex.getFirst().remove(blockIndex.getSecond());
size
return remove;
}
/**
* Gets the block and index of that block represented by the global index
* @param globalIndex The index in the overall list to get.
* @return A pair where the first element is the block that contains the element represented by the globalIndex and
* the second element represents the index in the block that contains the element represented by the globalIndex.
*/
private Pair<Block<T>, Integer> getBlockIndex(int globalIndex) {
if(globalIndex < 0 || globalIndex >= getSize()) {
throw new IndexOutOfBoundsException(""+globalIndex);
}
Block<T> current = getHead();
int currentMaxIndex = 0;
int blockIndex = globalIndex;
while(current != null) {
currentMaxIndex += current.getNextIndex();
if(globalIndex < currentMaxIndex) {
break;
} else {
blockIndex -= current.getNextIndex();
}
current = current.next;
}
assert current != null : "current should never become null";
//I'm really not sure why IntelliJ things this line needs to be checked
//noinspection unchecked
return Pair.of(current, blockIndex);
}
public int getSize() {
return size;
}
public int getBlockCount() {
return blockCount;
}
/**
* @return The first Block in the list
*/
public Block<T> getHead() {
return head;
}
/**
* @return The currently last Block in the list
*/
public Block<T> getTail() {
return tail;
}
@Override
public Iterator<T> iterator() {
return new LinkedArrayBlockIterator<T>(this.getHead());
}
public void clear() {
init(this.blockSize);
}
static class Block<T> {
final T[] array;
Block<T> next;
int nextIndex;
@SuppressWarnings("unchecked")
public Block(int size) {
assert size > 0 : "size should be positive: " + size;
this.array = (T[])new Object[size];
this.nextIndex = 0;
}
/**
* The max size of the block.
* Or, in other words: the size of the underlying array.
*/
public int getMaxSize() {
return array.length;
}
/**
* The next index of the array that will be added to.
* this.array[getNextIndex()] will always return null.
*/
public int getNextIndex() {
return nextIndex;
}
public boolean add(T toAdd) {
assert getNextIndex() <= getMaxSize() : "nextIndex should never pass the maxSize";
if(getNextIndex() == getMaxSize()) {
return false;
}
assert array[nextIndex] == null : "the index we're adding to should always be null";
array[nextIndex++] = toAdd;
return true;
}
public T get(int index) {
assert index < nextIndex : "shouldn't be indexing a value not in this block";
return array[index];
}
public T remove(int index) {
assert index < nextIndex : "shouldn't be removing a value not in this block";
T result = get(index);
System.arraycopy(array, index + 1, array, index + 1 - 1, nextIndex - (index + 1));
nextIndex
array[nextIndex] = null;
return result;
}
@Override
public String toString() {
return "Block{" +
"array=" + Arrays.toString(array) +
", next=" + next +
", nextIndex=" + nextIndex +
", maxSize=" + getMaxSize() +
'}';
}
}
}
|
package com.kiran.service.integration;
import com.kiran.service.exception.InvalidMove;
import org.json.JSONArray;
import org.json.JSONObject;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Component;
import org.springframework.web.client.RestTemplate;
import java.text.DecimalFormat;
import java.util.HashMap;
/**
* @author Kiran
* @since 9/3/17
*/
@Component
public class YelpAPI {
@Value("${yelp.search.url}")
private String yelpSearchURL;
@Value("${yelp.auth}")
private String yelpAuth;
public HashMap<Integer, HashMap<String, String>> findRestaurants(String restaurant, String location) throws InterruptedException {
String termFilter = "?term="+restaurant;
String locationFilter = "&location="+location;
String sortByFilter = "&sort_by=review_count";
String URL = yelpSearchURL + termFilter+locationFilter;
try {
RestTemplate restTemplate = new RestTemplate();
HttpHeaders headers = new HttpHeaders();
headers.add("Authorization", yelpAuth);
HttpEntity<String> request = new HttpEntity<String>(headers);
ResponseEntity<String> response = restTemplate.exchange(URL, HttpMethod.GET, request, String.class);
JSONObject jObject = new JSONObject(response.getBody());
JSONArray jArray = jObject.getJSONArray("businesses");
HashMap<Integer, HashMap<String, String>> restaurantsInfo = new HashMap<>();
DecimalFormat df2 = new DecimalFormat(".
for (int i = 0; i < 5; i++) {
HashMap<String, String> oneRestaurantInfo = new HashMap<>();
oneRestaurantInfo.put("name", jArray.getJSONObject(i).getString("name"));
oneRestaurantInfo.put("url", jArray.getJSONObject(i).getString("url"));
oneRestaurantInfo.put("review", Integer.toString(jArray.getJSONObject(i).getInt("review_count")));
oneRestaurantInfo.put("rating", Double.toString(jArray.getJSONObject(i).getDouble("rating")));
oneRestaurantInfo.put("image_url", jArray.getJSONObject(i).getString("image_url"));
oneRestaurantInfo.put("location", jArray.getJSONObject(i).getJSONObject("location").getString("city"));
long distanceMeter = jArray.getJSONObject(i).getLong("distance");
double inches = (39.370078 * distanceMeter);
double miles = (inches / 63360.00);
oneRestaurantInfo.put("distance", df2.format(miles));
restaurantsInfo.put(i, oneRestaurantInfo);
}
return restaurantsInfo;
} catch (Exception ex) {
throw new InvalidMove("Something went wrong, please contact your administrator.");
}
}
}
|
package com.kodcu.config;
import javafx.collections.ObservableList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.json.*;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@Component
public class AsciidocConfigMerger {
private Logger logger = LoggerFactory.getLogger(AsciidocConfigMerger.class);
private final EditorConfigBean editorConfigBean;
public final Pattern attributePattern = Pattern.compile("^:(!*)(?<key>.*)(!*):(.*)$", Pattern.MULTILINE);
@Autowired
public AsciidocConfigMerger(EditorConfigBean editorConfigBean) {
this.editorConfigBean = editorConfigBean;
}
public JsonObject updateConfig(String asciidoc, JsonObject config) {
try {
Matcher matcher = attributePattern.matcher(asciidoc);
JsonObject currentAttributes = config.getJsonObject("attributes");
JsonObjectBuilder finalBuilder = Json.createObjectBuilder();
JsonArrayBuilder finalAttrBuilder = Json.createArrayBuilder();
// add converter attributes
for (Map.Entry<String, JsonValue> entry : config.entrySet()) {
String key = entry.getKey();
JsonValue value = entry.getValue();
if (!"attributes".equals(key)) {
finalBuilder.add(key, value);
}
}
// find document attributes
List<String> foundKeys = new LinkedList<>();
while (matcher.find()) {
String key = matcher.group("key");
foundKeys.add(key);
}
// add document attributes
for (Map.Entry<String, JsonValue> entry : currentAttributes.entrySet()) {
String key = entry.getKey();
JsonValue value = entry.getValue();
String finalValue = "";
if (value.getValueType() == JsonValue.ValueType.STRING) {
finalValue = ((JsonString) value).getString();
} else {
finalValue = value.toString().replace("\"", "");
}
if (!foundKeys.contains(key)) {
finalAttrBuilder.add(key + "=" + finalValue);
}
}
if (!foundKeys.contains("lang") && !currentAttributes.containsKey("lang")) {
ObservableList<String> defaultLanguage = editorConfigBean.getDefaultLanguage();
if (defaultLanguage.size() > 0) {
finalAttrBuilder.add("lang=" + defaultLanguage.get(0));
}
}
finalBuilder.add("attributes", finalAttrBuilder);
return finalBuilder.build();
} catch (Exception e) {
logger.error("Problem occured while merging options", e);
}
return config;
}
}
|
package com.microsoft.sqlserver.jdbc;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Reader;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.math.RoundingMode;
import java.net.Inet4Address;
import java.net.Inet6Address;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketAddress;
import java.net.SocketException;
import java.net.SocketTimeoutException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.SocketChannel;
import java.nio.charset.Charset;
import java.security.KeyStore;
import java.security.Provider;
import java.security.Security;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.sql.Timestamp;
import java.text.MessageFormat;
import java.time.OffsetDateTime;
import java.time.OffsetTime;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.GregorianCalendar;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.SimpleTimeZone;
import java.util.TimeZone;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSocket;
import javax.net.ssl.TrustManager;
import javax.net.ssl.TrustManagerFactory;
import javax.net.ssl.X509TrustManager;
import javax.xml.bind.DatatypeConverter;
final class TDS {
// TDS protocol versions
static final int VER_DENALI = 0x74000004; // TDS 7.4
static final int VER_KATMAI = 0x730B0003; // TDS 7.3B(includes null bit compression)
static final int VER_YUKON = 0x72090002; // TDS 7.2
static final int VER_UNKNOWN = 0x00000000; // Unknown/uninitialized
static final int TDS_RET_STAT = 0x79;
static final int TDS_COLMETADATA = 0x81;
static final int TDS_TABNAME = 0xA4;
static final int TDS_COLINFO = 0xA5;
static final int TDS_ORDER = 0xA9;
static final int TDS_ERR = 0xAA;
static final int TDS_MSG = 0xAB;
static final int TDS_RETURN_VALUE = 0xAC;
static final int TDS_LOGIN_ACK = 0xAD;
static final int TDS_FEATURE_EXTENSION_ACK = 0xAE;
static final int TDS_ROW = 0xD1;
static final int TDS_NBCROW = 0xD2;
static final int TDS_ENV_CHG = 0xE3;
static final int TDS_SSPI = 0xED;
static final int TDS_DONE = 0xFD;
static final int TDS_DONEPROC = 0xFE;
static final int TDS_DONEINPROC = 0xFF;
static final int TDS_FEDAUTHINFO = 0xEE;
// FedAuth
static final int TDS_FEATURE_EXT_FEDAUTH = 0x02;
static final int TDS_FEDAUTH_LIBRARY_SECURITYTOKEN = 0x01;
static final int TDS_FEDAUTH_LIBRARY_ADAL = 0x02;
static final int TDS_FEDAUTH_LIBRARY_RESERVED = 0x7F;
static final byte ADALWORKFLOW_ACTIVEDIRECTORYPASSWORD = 0x01;
static final byte ADALWORKFLOW_ACTIVEDIRECTORYINTEGRATED = 0x02;
static final byte FEDAUTH_INFO_ID_STSURL = 0x01; // FedAuthInfoData is token endpoint URL from which to acquire fed auth token
static final byte FEDAUTH_INFO_ID_SPN = 0x02; // FedAuthInfoData is the SPN to use for acquiring fed auth token
// AE constants
static final int TDS_FEATURE_EXT_AE = 0x04;
static final int MAX_SUPPORTED_TCE_VERSION = 0x01; // max version
static final int CUSTOM_CIPHER_ALGORITHM_ID = 0; // max version
static final int AES_256_CBC = 1;
static final int AEAD_AES_256_CBC_HMAC_SHA256 = 2;
static final int AE_METADATA = 0x08;
static final int TDS_TVP = 0xF3;
static final int TVP_ROW = 0x01;
static final int TVP_NULL_TOKEN = 0xFFFF;
static final int TVP_STATUS_DEFAULT = 0x02;
static final int TVP_ORDER_UNIQUE_TOKEN = 0x10;
// TVP_ORDER_UNIQUE_TOKEN flags
static final byte TVP_ORDERASC_FLAG = 0x1;
static final byte TVP_ORDERDESC_FLAG = 0x2;
static final byte TVP_UNIQUE_FLAG = 0x4;
// TVP flags, may be used in other places
static final int FLAG_NULLABLE = 0x01;
static final int FLAG_TVP_DEFAULT_COLUMN = 0x200;
static final int FEATURE_EXT_TERMINATOR = -1;
static final String getTokenName(int tdsTokenType) {
switch (tdsTokenType) {
case TDS_RET_STAT:
return "TDS_RET_STAT (0x79)";
case TDS_COLMETADATA:
return "TDS_COLMETADATA (0x81)";
case TDS_TABNAME:
return "TDS_TABNAME (0xA4)";
case TDS_COLINFO:
return "TDS_COLINFO (0xA5)";
case TDS_ORDER:
return "TDS_ORDER (0xA9)";
case TDS_ERR:
return "TDS_ERR (0xAA)";
case TDS_MSG:
return "TDS_MSG (0xAB)";
case TDS_RETURN_VALUE:
return "TDS_RETURN_VALUE (0xAC)";
case TDS_LOGIN_ACK:
return "TDS_LOGIN_ACK (0xAD)";
case TDS_FEATURE_EXTENSION_ACK:
return "TDS_FEATURE_EXTENSION_ACK (0xAE)";
case TDS_ROW:
return "TDS_ROW (0xD1)";
case TDS_NBCROW:
return "TDS_NBCROW (0xD2)";
case TDS_ENV_CHG:
return "TDS_ENV_CHG (0xE3)";
case TDS_SSPI:
return "TDS_SSPI (0xED)";
case TDS_DONE:
return "TDS_DONE (0xFD)";
case TDS_DONEPROC:
return "TDS_DONEPROC (0xFE)";
case TDS_DONEINPROC:
return "TDS_DONEINPROC (0xFF)";
case TDS_FEDAUTHINFO:
return "TDS_FEDAUTHINFO (0xEE)";
default:
return "unknown token (0x" + Integer.toHexString(tdsTokenType).toUpperCase() + ")";
}
}
// RPC ProcIDs for use with RPCRequest (PKT_RPC) calls
static final short PROCID_SP_CURSOR = 1;
static final short PROCID_SP_CURSOROPEN = 2;
static final short PROCID_SP_CURSORPREPARE = 3;
static final short PROCID_SP_CURSOREXECUTE = 4;
static final short PROCID_SP_CURSORPREPEXEC = 5;
static final short PROCID_SP_CURSORUNPREPARE = 6;
static final short PROCID_SP_CURSORFETCH = 7;
static final short PROCID_SP_CURSOROPTION = 8;
static final short PROCID_SP_CURSORCLOSE = 9;
static final short PROCID_SP_EXECUTESQL = 10;
static final short PROCID_SP_PREPARE = 11;
static final short PROCID_SP_EXECUTE = 12;
static final short PROCID_SP_PREPEXEC = 13;
static final short PROCID_SP_PREPEXECRPC = 14;
static final short PROCID_SP_UNPREPARE = 15;
// Constants for use with cursor RPCs
static final short SP_CURSOR_OP_UPDATE = 1;
static final short SP_CURSOR_OP_DELETE = 2;
static final short SP_CURSOR_OP_INSERT = 4;
static final short SP_CURSOR_OP_REFRESH = 8;
static final short SP_CURSOR_OP_LOCK = 16;
static final short SP_CURSOR_OP_SETPOSITION = 32;
static final short SP_CURSOR_OP_ABSOLUTE = 64;
// Constants for server-cursored result sets.
// See the Engine Cursors Functional Specification for details.
static final int FETCH_FIRST = 1;
static final int FETCH_NEXT = 2;
static final int FETCH_PREV = 4;
static final int FETCH_LAST = 8;
static final int FETCH_ABSOLUTE = 16;
static final int FETCH_RELATIVE = 32;
static final int FETCH_REFRESH = 128;
static final int FETCH_INFO = 256;
static final int FETCH_PREV_NOADJUST = 512;
static final byte RPC_OPTION_NO_METADATA = (byte) 0x02;
// Transaction manager request types
static final short TM_GET_DTC_ADDRESS = 0;
static final short TM_PROPAGATE_XACT = 1;
static final short TM_BEGIN_XACT = 5;
static final short TM_PROMOTE_PROMOTABLE_XACT = 6;
static final short TM_COMMIT_XACT = 7;
static final short TM_ROLLBACK_XACT = 8;
static final short TM_SAVE_XACT = 9;
static final byte PKT_QUERY = 1;
static final byte PKT_RPC = 3;
static final byte PKT_REPLY = 4;
static final byte PKT_CANCEL_REQ = 6;
static final byte PKT_BULK = 7;
static final byte PKT_DTC = 14;
static final byte PKT_LOGON70 = 16; // 0x10
static final byte PKT_SSPI = 17;
static final byte PKT_PRELOGIN = 18; // 0x12
static final byte PKT_FEDAUTH_TOKEN_MESSAGE = 8; // Authentication token for federated authentication
static final byte STATUS_NORMAL = 0x00;
static final byte STATUS_BIT_EOM = 0x01;
static final byte STATUS_BIT_ATTENTION = 0x02;// this is called ignore bit in TDS spec
static final byte STATUS_BIT_RESET_CONN = 0x08;
// Various TDS packet size constants
static final int INVALID_PACKET_SIZE = -1;
static final int INITIAL_PACKET_SIZE = 4096;
static final int MIN_PACKET_SIZE = 512;
static final int MAX_PACKET_SIZE = 32767;
static final int DEFAULT_PACKET_SIZE = 8000;
static final int SERVER_PACKET_SIZE = 0; // Accept server's configured packet size
// TDS packet header size and offsets
static final int PACKET_HEADER_SIZE = 8;
static final int PACKET_HEADER_MESSAGE_TYPE = 0;
static final int PACKET_HEADER_MESSAGE_STATUS = 1;
static final int PACKET_HEADER_MESSAGE_LENGTH = 2;
static final int PACKET_HEADER_SPID = 4;
static final int PACKET_HEADER_SEQUENCE_NUM = 6;
static final int PACKET_HEADER_WINDOW = 7; // Reserved/Not used
// MARS header length:
// 2 byte header type
// 8 byte transaction descriptor
// 4 byte outstanding request count
static final int MARS_HEADER_LENGTH = 18; // 2 byte header type, 8 byte transaction descriptor,
static final int TRACE_HEADER_LENGTH = 26; // header length (4) + header type (2) + guid (16) + Sequence number size (4)
static final short HEADERTYPE_TRACE = 3; // trace header type
// Message header length
static final int MESSAGE_HEADER_LENGTH = MARS_HEADER_LENGTH + 4; // length includes message header itself
static final byte B_PRELOGIN_OPTION_VERSION = 0x00;
static final byte B_PRELOGIN_OPTION_ENCRYPTION = 0x01;
static final byte B_PRELOGIN_OPTION_INSTOPT = 0x02;
static final byte B_PRELOGIN_OPTION_THREADID = 0x03;
static final byte B_PRELOGIN_OPTION_MARS = 0x04;
static final byte B_PRELOGIN_OPTION_TRACEID = 0x05;
static final byte B_PRELOGIN_OPTION_FEDAUTHREQUIRED = 0x06;
static final byte B_PRELOGIN_OPTION_TERMINATOR = (byte) 0xFF;
// Login option byte 1
static final byte LOGIN_OPTION1_ORDER_X86 = 0x00;
static final byte LOGIN_OPTION1_ORDER_6800 = 0x01;
static final byte LOGIN_OPTION1_CHARSET_ASCII = 0x00;
static final byte LOGIN_OPTION1_CHARSET_EBCDIC = 0x02;
static final byte LOGIN_OPTION1_FLOAT_IEEE_754 = 0x00;
static final byte LOGIN_OPTION1_FLOAT_VAX = 0x04;
static final byte LOGIN_OPTION1_FLOAT_ND5000 = 0x08;
static final byte LOGIN_OPTION1_DUMPLOAD_ON = 0x00;
static final byte LOGIN_OPTION1_DUMPLOAD_OFF = 0x10;
static final byte LOGIN_OPTION1_USE_DB_ON = 0x00;
static final byte LOGIN_OPTION1_USE_DB_OFF = 0x20;
static final byte LOGIN_OPTION1_INIT_DB_WARN = 0x00;
static final byte LOGIN_OPTION1_INIT_DB_FATAL = 0x40;
static final byte LOGIN_OPTION1_SET_LANG_OFF = 0x00;
static final byte LOGIN_OPTION1_SET_LANG_ON = (byte) 0x80;
// Login option byte 2
static final byte LOGIN_OPTION2_INIT_LANG_WARN = 0x00;
static final byte LOGIN_OPTION2_INIT_LANG_FATAL = 0x01;
static final byte LOGIN_OPTION2_ODBC_OFF = 0x00;
static final byte LOGIN_OPTION2_ODBC_ON = 0x02;
static final byte LOGIN_OPTION2_TRAN_BOUNDARY_OFF = 0x00;
static final byte LOGIN_OPTION2_TRAN_BOUNDARY_ON = 0x04;
static final byte LOGIN_OPTION2_CACHE_CONNECTION_OFF = 0x00;
static final byte LOGIN_OPTION2_CACHE_CONNECTION_ON = 0x08;
static final byte LOGIN_OPTION2_USER_NORMAL = 0x00;
static final byte LOGIN_OPTION2_USER_SERVER = 0x10;
static final byte LOGIN_OPTION2_USER_REMUSER = 0x20;
static final byte LOGIN_OPTION2_USER_SQLREPL = 0x30;
static final byte LOGIN_OPTION2_INTEGRATED_SECURITY_OFF = 0x00;
static final byte LOGIN_OPTION2_INTEGRATED_SECURITY_ON = (byte) 0x80;
// Login option byte 3
static final byte LOGIN_OPTION3_DEFAULT = 0x00;
static final byte LOGIN_OPTION3_CHANGE_PASSWORD = 0x01;
static final byte LOGIN_OPTION3_SEND_YUKON_BINARY_XML = 0x02;
static final byte LOGIN_OPTION3_USER_INSTANCE = 0x04;
static final byte LOGIN_OPTION3_UNKNOWN_COLLATION_HANDLING = 0x08;
static final byte LOGIN_OPTION3_FEATURE_EXTENSION = 0x10;
// Login type flag (bits 5 - 7 reserved for future use)
static final byte LOGIN_SQLTYPE_DEFAULT = 0x00;
static final byte LOGIN_SQLTYPE_TSQL = 0x01;
static final byte LOGIN_SQLTYPE_ANSI_V1 = 0x02;
static final byte LOGIN_SQLTYPE_ANSI89_L1 = 0x03;
static final byte LOGIN_SQLTYPE_ANSI89_L2 = 0x04;
static final byte LOGIN_SQLTYPE_ANSI89_IEF = 0x05;
static final byte LOGIN_SQLTYPE_ANSI89_ENTRY = 0x06;
static final byte LOGIN_SQLTYPE_ANSI89_TRANS = 0x07;
static final byte LOGIN_SQLTYPE_ANSI89_INTER = 0x08;
static final byte LOGIN_SQLTYPE_ANSI89_FULL = 0x09;
static final byte LOGIN_OLEDB_OFF = 0x00;
static final byte LOGIN_OLEDB_ON = 0x10;
static final byte LOGIN_READ_ONLY_INTENT = 0x20;
static final byte LOGIN_READ_WRITE_INTENT = 0x00;
static final byte ENCRYPT_OFF = 0x00;
static final byte ENCRYPT_ON = 0x01;
static final byte ENCRYPT_NOT_SUP = 0x02;
static final byte ENCRYPT_REQ = 0x03;
static final byte ENCRYPT_INVALID = (byte) 0xFF;
static final String getEncryptionLevel(int level) {
switch (level) {
case ENCRYPT_OFF:
return "OFF";
case ENCRYPT_ON:
return "ON";
case ENCRYPT_NOT_SUP:
return "NOT SUPPORTED";
case ENCRYPT_REQ:
return "REQUIRED";
default:
return "unknown encryption level (0x" + Integer.toHexString(level).toUpperCase() + ")";
}
}
// Prelogin packet length, including the tds header,
// version, encrpytion, and traceid data sessions.
// For detailed info, please check the definition of
// preloginRequest in Prelogin function.
static final byte B_PRELOGIN_MESSAGE_LENGTH = 67;
static final byte B_PRELOGIN_MESSAGE_LENGTH_WITH_FEDAUTH = 73;
// Scroll options and concurrency options lifted out
// of the the Yukon cursors spec for sp_cursoropen.
final static int SCROLLOPT_KEYSET = 1;
final static int SCROLLOPT_DYNAMIC = 2;
final static int SCROLLOPT_FORWARD_ONLY = 4;
final static int SCROLLOPT_STATIC = 8;
final static int SCROLLOPT_FAST_FORWARD = 16;
final static int SCROLLOPT_PARAMETERIZED_STMT = 4096;
final static int SCROLLOPT_AUTO_FETCH = 8192;
final static int SCROLLOPT_AUTO_CLOSE = 16384;
final static int CCOPT_READ_ONLY = 1;
final static int CCOPT_SCROLL_LOCKS = 2;
final static int CCOPT_OPTIMISTIC_CC = 4;
final static int CCOPT_OPTIMISTIC_CCVAL = 8;
final static int CCOPT_ALLOW_DIRECT = 8192;
final static int CCOPT_UPDT_IN_PLACE = 16384;
// Result set rows include an extra, "hidden" ROWSTAT column which indicates
// the overall success or failure of the row fetch operation. With a keyset
// cursor, the value in the ROWSTAT column indicates whether the row has been
// deleted from the database.
static final int ROWSTAT_FETCH_SUCCEEDED = 1;
static final int ROWSTAT_FETCH_MISSING = 2;
// ColumnInfo status
final static int COLINFO_STATUS_EXPRESSION = 0x04;
final static int COLINFO_STATUS_KEY = 0x08;
final static int COLINFO_STATUS_HIDDEN = 0x10;
final static int COLINFO_STATUS_DIFFERENT_NAME = 0x20;
final static int MAX_FRACTIONAL_SECONDS_SCALE = 7;
final static Timestamp MAX_TIMESTAMP = Timestamp.valueOf("2079-06-06 23:59:59");
final static Timestamp MIN_TIMESTAMP = Timestamp.valueOf("1900-01-01 00:00:00");
static int nanosSinceMidnightLength(int scale) {
final int[] scaledLengths = {3, 3, 3, 4, 4, 5, 5, 5};
assert scale >= 0;
assert scale <= MAX_FRACTIONAL_SECONDS_SCALE;
return scaledLengths[scale];
}
final static int DAYS_INTO_CE_LENGTH = 3;
final static int MINUTES_OFFSET_LENGTH = 2;
// Number of days in a "normal" (non-leap) year according to SQL Server.
final static int DAYS_PER_YEAR = 365;
final static int BASE_YEAR_1900 = 1900;
final static int BASE_YEAR_1970 = 1970;
final static String BASE_DATE_1970 = "1970-01-01";
static int timeValueLength(int scale) {
return nanosSinceMidnightLength(scale);
}
static int datetime2ValueLength(int scale) {
return DAYS_INTO_CE_LENGTH + nanosSinceMidnightLength(scale);
}
static int datetimeoffsetValueLength(int scale) {
return DAYS_INTO_CE_LENGTH + MINUTES_OFFSET_LENGTH + nanosSinceMidnightLength(scale);
}
// TDS is just a namespace - it can't be instantiated.
private TDS() {
}
}
class Nanos {
static final int PER_SECOND = 1000000000;
static final int PER_MAX_SCALE_INTERVAL = PER_SECOND / (int) Math.pow(10, TDS.MAX_FRACTIONAL_SECONDS_SCALE);
static final int PER_MILLISECOND = PER_SECOND / 1000;
static final long PER_DAY = 24 * 60 * 60 * (long) PER_SECOND;
private Nanos() {
}
}
// Constants relating to the historically accepted Julian-Gregorian calendar cutover date (October 15, 1582).
// Used in processing SQL Server temporal data types whose date component may precede that date.
// Scoping these constants to a class defers their initialization to first use.
class GregorianChange {
// Cutover date for a pure Gregorian calendar - that is, a proleptic Gregorian calendar with
// Gregorian leap year behavior throughout its entire range. This is the cutover date is used
// with temporal server values, which are represented in terms of number of days relative to a
// base date.
static final java.util.Date PURE_CHANGE_DATE = new java.util.Date(Long.MIN_VALUE);
// The standard Julian to Gregorian cutover date (October 15, 1582) that the JDBC temporal
// classes (Time, Date, Timestamp) assume when converting to and from their UTC milliseconds
// representations.
static final java.util.Date STANDARD_CHANGE_DATE = (new GregorianCalendar(Locale.US)).getGregorianChange();
// A hint as to the number of days since 1/1/0001, past which we do not need to
// not rationalize the difference between SQL Server behavior (pure Gregorian)
// and Java behavior (standard Gregorian).
// Not having to rationalize the difference has a substantial (measured) performance benefit
// for temporal getters.
// The hint does not need to be exact, as long as it's later than the actual change date.
static final int DAYS_SINCE_BASE_DATE_HINT = DDC.daysSinceBaseDate(1583, 1, 1);
// Extra days that need to added to a pure gregorian date, post the gergorian
// cut over date, to match the default julian-gregorain calendar date of java.
static final int EXTRA_DAYS_TO_BE_ADDED;
static {
// This issue refers to the following bugs in java(same issue).
// The issue is fixed in JRE 1.7
// and exists in all the older versions.
// Due to the above bug, in older JVM versions(1.6 and before),
// the date calculation is incorrect at the Gregorian cut over date.
// i.e. the next date after Oct 4th 1582 is Oct 17th 1582, where as
// it should have been Oct 15th 1582.
// We intentionally do not make a check based on JRE version.
// If we do so, our code would break if the bug is fixed in a later update
// to an older JRE. So, we check for the existence of the bug instead.
GregorianCalendar cal = new GregorianCalendar(Locale.US);
cal.clear();
cal.set(1, 1, 577738, 0, 0, 0);// 577738 = 1+577737(no of days since epoch that brings us to oct 15th 1582)
if (cal.get(Calendar.DAY_OF_MONTH) == 15) {
// If the date calculation is correct(the above bug is fixed),
// post the default gregorian cut over date, the pure gregorian date
// falls short by two days for all dates compared to julian-gregorian date.
// so, we add two extra days for functional correctness.
// Note: other ways, in which this issue can be fixed instead of
// trying to detect the JVM bug is
// a) use unoptimized code path in the function convertTemporalToObject
// b) use cal.add api instead of cal.set api in the current optimized code path
// In both the above approaches, the code is about 6-8 times slower,
// resulting in an overall perf regression of about (10-30)% for perf test cases
EXTRA_DAYS_TO_BE_ADDED = 2;
}
else
EXTRA_DAYS_TO_BE_ADDED = 0;
}
private GregorianChange() {
}
}
// UTC/GMT time zone singleton. The enum type delays initialization until first use.
enum UTC {
INSTANCE;
static final TimeZone timeZone = new SimpleTimeZone(0, "UTC");
}
final class TDSChannel {
private static final Logger logger = Logger.getLogger("com.microsoft.sqlserver.jdbc.internals.TDS.Channel");
final Logger getLogger() {
return logger;
}
private final String traceID;
final public String toString() {
return traceID;
}
private final SQLServerConnection con;
private final TDSWriter tdsWriter;
final TDSWriter getWriter() {
return tdsWriter;
}
final TDSReader getReader(TDSCommand command) {
return new TDSReader(this, con, command);
}
// Socket for raw TCP/IP communications with SQL Server
private Socket tcpSocket;
// Socket for SSL-encrypted communications with SQL Server
private SSLSocket sslSocket;
// Socket providing the communications interface to the driver.
// For SSL-encrypted connections, this is the SSLSocket wrapped
// around the TCP socket. For unencrypted connections, it is
// just the TCP socket itself.
private Socket channelSocket;
// Implementation of a Socket proxy that can switch from TDS-wrapped I/O
// (using the TDSChannel itself) during SSL handshake to raw I/O over
// the TCP/IP socket.
ProxySocket proxySocket = null;
// I/O streams for raw TCP/IP communications with SQL Server
private InputStream tcpInputStream;
private OutputStream tcpOutputStream;
// I/O streams providing the communications interface to the driver.
// For SSL-encrypted connections, these are streams obtained from
// the SSL socket above. They wrap the underlying TCP streams.
// For unencrypted connections, they are just the TCP streams themselves.
private InputStream inputStream;
private OutputStream outputStream;
/** TDS packet payload logger */
private static Logger packetLogger = Logger.getLogger("com.microsoft.sqlserver.jdbc.internals.TDS.DATA");
private final boolean isLoggingPackets = packetLogger.isLoggable(Level.FINEST);
final boolean isLoggingPackets() {
return isLoggingPackets;
}
// Number of TDS messages sent to and received from the server
int numMsgsSent = 0;
int numMsgsRcvd = 0;
// Last SPID received from the server. Used for logging and to tag subsequent outgoing
// packets to facilitate diagnosing problems from the server side.
private int spid = 0;
void setSPID(int spid) {
this.spid = spid;
}
int getSPID() {
return spid;
}
void resetPooledConnection() {
tdsWriter.resetPooledConnection();
}
TDSChannel(SQLServerConnection con) {
this.con = con;
traceID = "TDSChannel (" + con.toString() + ")";
this.tcpSocket = null;
this.sslSocket = null;
this.channelSocket = null;
this.tcpInputStream = null;
this.tcpOutputStream = null;
this.inputStream = null;
this.outputStream = null;
this.tdsWriter = new TDSWriter(this, con);
}
/**
* Opens the physical communications channel (TCP/IP socket and I/O streams) to the SQL Server.
*/
final void open(String host,
int port,
int timeoutMillis,
boolean useParallel,
boolean useTnir,
boolean isTnirFirstAttempt,
int timeoutMillisForFullTimeout) throws SQLServerException {
if (logger.isLoggable(Level.FINER))
logger.finer(this.toString() + ": Opening TCP socket...");
SocketFinder socketFinder = new SocketFinder(traceID, con);
channelSocket = tcpSocket = socketFinder.findSocket(host, port, timeoutMillis, useParallel, useTnir, isTnirFirstAttempt,
timeoutMillisForFullTimeout);
try {
// Set socket options
tcpSocket.setTcpNoDelay(true);
tcpSocket.setKeepAlive(true);
// set SO_TIMEOUT
int socketTimeout = con.getSocketTimeoutMilliseconds();
tcpSocket.setSoTimeout(socketTimeout);
inputStream = tcpInputStream = tcpSocket.getInputStream();
outputStream = tcpOutputStream = tcpSocket.getOutputStream();
}
catch (IOException ex) {
SQLServerException.ConvertConnectExceptionToSQLServerException(host, port, con, ex);
}
}
/**
* Disables SSL on this TDS channel.
*/
void disableSSL() {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " Disabling SSL...");
/*
* The mission: To close the SSLSocket and release everything that it is holding onto other than the TCP/IP socket and streams.
*
* The challenge: Simply closing the SSLSocket tries to do additional, unnecessary shutdown I/O over the TCP/IP streams that are bound to the
* socket proxy, resulting in a hang and confusing SQL Server.
*
* Solution: Rewire the ProxySocket's input and output streams (one more time) to closed streams. SSLSocket sees that the streams are already
* closed and does not attempt to do any further I/O on them before closing itself.
*/
// Create a couple of cheap closed streams
InputStream is = new ByteArrayInputStream(new byte[0]);
try {
is.close();
}
catch (IOException e) {
// No reason to expect a brand new ByteArrayInputStream not to close,
// but just in case...
logger.fine("Ignored error closing InputStream: " + e.getMessage());
}
OutputStream os = new ByteArrayOutputStream();
try {
os.close();
}
catch (IOException e) {
// No reason to expect a brand new ByteArrayOutputStream not to close,
// but just in case...
logger.fine("Ignored error closing OutputStream: " + e.getMessage());
}
// Rewire the proxy socket to the closed streams
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Rewiring proxy streams for SSL socket close");
proxySocket.setStreams(is, os);
// Now close the SSL socket. It will see that the proxy socket's streams
// are closed and not try to do any further I/O over them.
try {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " Closing SSL socket");
sslSocket.close();
}
catch (IOException e) {
// Don't care if we can't close the SSL socket. We're done with it anyway.
logger.fine("Ignored error closing SSLSocket: " + e.getMessage());
}
// Do not close the proxy socket. Doing so would close our TCP socket
// to which the proxy socket is bound. Instead, just null out the reference
// to free up the few resources it holds onto.
proxySocket = null;
// Finally, with all of the SSL support out of the way, put the TDSChannel
// back to using the TCP/IP socket and streams directly.
inputStream = tcpInputStream;
outputStream = tcpOutputStream;
channelSocket = tcpSocket;
sslSocket = null;
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " SSL disabled");
}
/**
* Used during SSL handshake, this class implements an InputStream that reads SSL handshake response data (framed in TDS messages) from the TDS
* channel.
*/
private class SSLHandshakeInputStream extends InputStream {
private final TDSReader tdsReader;
private final SSLHandshakeOutputStream sslHandshakeOutputStream;
private final Logger logger;
private final String logContext;
SSLHandshakeInputStream(TDSChannel tdsChannel,
SSLHandshakeOutputStream sslHandshakeOutputStream) {
this.tdsReader = tdsChannel.getReader(null);
this.sslHandshakeOutputStream = sslHandshakeOutputStream;
this.logger = tdsChannel.getLogger();
this.logContext = tdsChannel.toString() + " (SSLHandshakeInputStream):";
}
/**
* If there is no handshake response data available to be read from existing packets then this method ensures that the SSL handshake output
* stream has been flushed to the server, and reads another packet (starting the next TDS response message).
*
* Note that simply using TDSReader.ensurePayload isn't sufficient as it does not automatically start the new response message.
*/
private void ensureSSLPayload() throws IOException {
if (0 == tdsReader.available()) {
if (logger.isLoggable(Level.FINEST))
logger.finest(logContext + " No handshake response bytes available. Flushing SSL handshake output stream.");
try {
sslHandshakeOutputStream.endMessage();
}
catch (SQLServerException e) {
logger.finer(logContext + " Ending TDS message threw exception:" + e.getMessage());
throw new IOException(e.getMessage());
}
if (logger.isLoggable(Level.FINEST))
logger.finest(logContext + " Reading first packet of SSL handshake response");
try {
tdsReader.readPacket();
}
catch (SQLServerException e) {
logger.finer(logContext + " Reading response packet threw exception:" + e.getMessage());
throw new IOException(e.getMessage());
}
}
}
public long skip(long n) throws IOException {
if (logger.isLoggable(Level.FINEST))
logger.finest(logContext + " Skipping " + n + " bytes...");
if (n <= 0)
return 0;
if (n > Integer.MAX_VALUE)
n = Integer.MAX_VALUE;
ensureSSLPayload();
try {
tdsReader.skip((int) n);
}
catch (SQLServerException e) {
logger.finer(logContext + " Skipping bytes threw exception:" + e.getMessage());
throw new IOException(e.getMessage());
}
return n;
}
private final byte oneByte[] = new byte[1];
public int read() throws IOException {
int bytesRead;
while (0 == (bytesRead = readInternal(oneByte, 0, oneByte.length)))
;
assert 1 == bytesRead || -1 == bytesRead;
return 1 == bytesRead ? oneByte[0] : -1;
}
public int read(byte[] b) throws IOException {
return readInternal(b, 0, b.length);
}
public int read(byte b[],
int offset,
int maxBytes) throws IOException {
return readInternal(b, offset, maxBytes);
}
private int readInternal(byte b[],
int offset,
int maxBytes) throws IOException {
if (logger.isLoggable(Level.FINEST))
logger.finest(logContext + " Reading " + maxBytes + " bytes...");
ensureSSLPayload();
try {
tdsReader.readBytes(b, offset, maxBytes);
}
catch (SQLServerException e) {
logger.finer(logContext + " Reading bytes threw exception:" + e.getMessage());
throw new IOException(e.getMessage());
}
return maxBytes;
}
}
/**
* Used during SSL handshake, this class implements an OutputStream that writes SSL handshake request data (framed in TDS messages) to the TDS
* channel.
*/
private class SSLHandshakeOutputStream extends OutputStream {
private final TDSWriter tdsWriter;
/** Flag indicating when it is necessary to start a new prelogin TDS message */
private boolean messageStarted;
private final Logger logger;
private final String logContext;
SSLHandshakeOutputStream(TDSChannel tdsChannel) {
this.tdsWriter = tdsChannel.getWriter();
this.messageStarted = false;
this.logger = tdsChannel.getLogger();
this.logContext = tdsChannel.toString() + " (SSLHandshakeOutputStream):";
}
public void flush() throws IOException {
// It seems that the security provider implementation in some JVMs
// (notably SunJSSE in the 6.0 JVM) likes to add spurious calls to
// flush the SSL handshake output stream during SSL handshaking.
// We need to ignore these calls because the SSL handshake payload
// needs to be completely encapsulated in TDS. The SSL handshake
// input stream always ensures that this output stream has been flushed
// before trying to read the response.
if (logger.isLoggable(Level.FINEST))
logger.finest(logContext + " Ignored a request to flush the stream");
}
void endMessage() throws SQLServerException {
// We should only be asked to end the message if we have started one
assert messageStarted;
if (logger.isLoggable(Level.FINEST))
logger.finest(logContext + " Finishing TDS message");
// Flush any remaining bytes through the writer. Since there may be fewer bytes
// ready to send than a full TDS packet, we end the message here and start a new
// one later if additional handshake data needs to be sent.
tdsWriter.endMessage();
messageStarted = false;
}
private final byte singleByte[] = new byte[1];
public void write(int b) throws IOException {
singleByte[0] = (byte) (b & 0xFF);
writeInternal(singleByte, 0, singleByte.length);
}
public void write(byte[] b) throws IOException {
writeInternal(b, 0, b.length);
}
public void write(byte[] b,
int off,
int len) throws IOException {
writeInternal(b, off, len);
}
private void writeInternal(byte[] b,
int off,
int len) throws IOException {
try {
// Start out the handshake request in a new prelogin message. Subsequent
// writes just add handshake data to the request until flushed.
if (!messageStarted) {
if (logger.isLoggable(Level.FINEST))
logger.finest(logContext + " Starting new TDS packet...");
tdsWriter.startMessage(null, TDS.PKT_PRELOGIN);
messageStarted = true;
}
if (logger.isLoggable(Level.FINEST))
logger.finest(logContext + " Writing " + len + " bytes...");
tdsWriter.writeBytes(b, off, len);
}
catch (SQLServerException e) {
logger.finer(logContext + " Writing bytes threw exception:" + e.getMessage());
throw new IOException(e.getMessage());
}
}
}
/**
* This class implements an InputStream that just forwards all of its methods to an underlying InputStream.
*
* It is more predictable than FilteredInputStream which forwards some of its read methods directly to the underlying stream, but not others.
*/
private final class ProxyInputStream extends InputStream {
private InputStream filteredStream;
ProxyInputStream(InputStream is) {
filteredStream = is;
}
final void setFilteredStream(InputStream is) {
filteredStream = is;
}
public long skip(long n) throws IOException {
long bytesSkipped;
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Skipping " + n + " bytes");
bytesSkipped = filteredStream.skip(n);
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Skipped " + n + " bytes");
return bytesSkipped;
}
public int available() throws IOException {
int bytesAvailable = filteredStream.available();
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " " + bytesAvailable + " bytes available");
return bytesAvailable;
}
private final byte oneByte[] = new byte[1];
public int read() throws IOException {
int bytesRead;
while (0 == (bytesRead = readInternal(oneByte, 0, oneByte.length)))
;
assert 1 == bytesRead || -1 == bytesRead;
return 1 == bytesRead ? oneByte[0] : -1;
}
public int read(byte[] b) throws IOException {
return readInternal(b, 0, b.length);
}
public int read(byte b[],
int offset,
int maxBytes) throws IOException {
return readInternal(b, offset, maxBytes);
}
private int readInternal(byte b[],
int offset,
int maxBytes) throws IOException {
int bytesRead;
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Reading " + maxBytes + " bytes");
try {
bytesRead = filteredStream.read(b, offset, maxBytes);
}
catch (IOException e) {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " " + e.getMessage());
logger.finer(toString() + " Reading bytes threw exception:" + e.getMessage());
throw e;
}
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Read " + bytesRead + " bytes");
return bytesRead;
}
public boolean markSupported() {
boolean markSupported = filteredStream.markSupported();
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Returning markSupported: " + markSupported);
return markSupported;
}
public void mark(int readLimit) {
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Marking next " + readLimit + " bytes");
filteredStream.mark(readLimit);
}
public void reset() throws IOException {
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Resetting to previous mark");
filteredStream.reset();
}
public void close() throws IOException {
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Closing");
filteredStream.close();
}
}
/**
* This class implements an OutputStream that just forwards all of its methods to an underlying OutputStream.
*
* This class essentially does what FilteredOutputStream does, but is more efficient for our usage. FilteredOutputStream transforms block writes
* to sequences of single-byte writes.
*/
final class ProxyOutputStream extends OutputStream {
private OutputStream filteredStream;
ProxyOutputStream(OutputStream os) {
filteredStream = os;
}
final void setFilteredStream(OutputStream os) {
filteredStream = os;
}
public void close() throws IOException {
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Closing");
filteredStream.close();
}
public void flush() throws IOException {
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Flushing");
filteredStream.flush();
}
private final byte singleByte[] = new byte[1];
public void write(int b) throws IOException {
singleByte[0] = (byte) (b & 0xFF);
writeInternal(singleByte, 0, singleByte.length);
}
public void write(byte[] b) throws IOException {
writeInternal(b, 0, b.length);
}
public void write(byte[] b,
int off,
int len) throws IOException {
writeInternal(b, off, len);
}
private void writeInternal(byte[] b,
int off,
int len) throws IOException {
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Writing " + len + " bytes");
filteredStream.write(b, off, len);
}
}
/**
* This class implements a Socket whose I/O streams can be switched from using a TDSChannel for I/O to using its underlying TCP/IP socket.
*
* The SSL socket binds to a ProxySocket. The initial SSL handshake is done over TDSChannel I/O streams so that the handshake payload is framed in
* TDS packets. The I/O streams are then switched to TCP/IP I/O streams using setStreams, and SSL communications continue directly over the TCP/IP
* I/O streams.
*
* Most methods other than those for getting the I/O streams are simply forwarded to the TDSChannel's underlying TCP/IP socket. Methods that
* change the socket binding or provide direct channel access are disallowed.
*/
private class ProxySocket extends Socket {
private final TDSChannel tdsChannel;
private final Logger logger;
private final String logContext;
private final ProxyInputStream proxyInputStream;
private final ProxyOutputStream proxyOutputStream;
ProxySocket(TDSChannel tdsChannel) {
this.tdsChannel = tdsChannel;
this.logger = tdsChannel.getLogger();
this.logContext = tdsChannel.toString() + " (ProxySocket):";
// Create the I/O streams
SSLHandshakeOutputStream sslHandshakeOutputStream = new SSLHandshakeOutputStream(tdsChannel);
SSLHandshakeInputStream sslHandshakeInputStream = new SSLHandshakeInputStream(tdsChannel, sslHandshakeOutputStream);
this.proxyOutputStream = new ProxyOutputStream(sslHandshakeOutputStream);
this.proxyInputStream = new ProxyInputStream(sslHandshakeInputStream);
}
void setStreams(InputStream is,
OutputStream os) {
proxyInputStream.setFilteredStream(is);
proxyOutputStream.setFilteredStream(os);
}
public InputStream getInputStream() throws IOException {
if (logger.isLoggable(Level.FINEST))
logger.finest(logContext + " Getting input stream");
return proxyInputStream;
}
public OutputStream getOutputStream() throws IOException {
if (logger.isLoggable(Level.FINEST))
logger.finest(logContext + " Getting output stream");
return proxyOutputStream;
}
// Allow methods that should just forward to the underlying TCP socket or return fixed values
public InetAddress getInetAddress() {
return tdsChannel.tcpSocket.getInetAddress();
}
public boolean getKeepAlive() throws SocketException {
return tdsChannel.tcpSocket.getKeepAlive();
}
public InetAddress getLocalAddress() {
return tdsChannel.tcpSocket.getLocalAddress();
}
public int getLocalPort() {
return tdsChannel.tcpSocket.getLocalPort();
}
public SocketAddress getLocalSocketAddress() {
return tdsChannel.tcpSocket.getLocalSocketAddress();
}
public boolean getOOBInline() throws SocketException {
return tdsChannel.tcpSocket.getOOBInline();
}
public int getPort() {
return tdsChannel.tcpSocket.getPort();
}
public int getReceiveBufferSize() throws SocketException {
return tdsChannel.tcpSocket.getReceiveBufferSize();
}
public SocketAddress getRemoteSocketAddress() {
return tdsChannel.tcpSocket.getRemoteSocketAddress();
}
public boolean getReuseAddress() throws SocketException {
return tdsChannel.tcpSocket.getReuseAddress();
}
public int getSendBufferSize() throws SocketException {
return tdsChannel.tcpSocket.getSendBufferSize();
}
public int getSoLinger() throws SocketException {
return tdsChannel.tcpSocket.getSoLinger();
}
public int getSoTimeout() throws SocketException {
return tdsChannel.tcpSocket.getSoTimeout();
}
public boolean getTcpNoDelay() throws SocketException {
return tdsChannel.tcpSocket.getTcpNoDelay();
}
public int getTrafficClass() throws SocketException {
return tdsChannel.tcpSocket.getTrafficClass();
}
public boolean isBound() {
return true;
}
public boolean isClosed() {
return false;
}
public boolean isConnected() {
return true;
}
public boolean isInputShutdown() {
return false;
}
public boolean isOutputShutdown() {
return false;
}
public String toString() {
return tdsChannel.tcpSocket.toString();
}
public SocketChannel getChannel() {
return null;
}
// Disallow calls to methods that would change the underlying TCP socket
public void bind(SocketAddress bindPoint) throws IOException {
logger.finer(logContext + " Disallowed call to bind. Throwing IOException.");
throw new IOException();
}
public void connect(SocketAddress endpoint) throws IOException {
logger.finer(logContext + " Disallowed call to connect (without timeout). Throwing IOException.");
throw new IOException();
}
public void connect(SocketAddress endpoint,
int timeout) throws IOException {
logger.finer(logContext + " Disallowed call to connect (with timeout). Throwing IOException.");
throw new IOException();
}
// Ignore calls to methods that would otherwise allow the SSL socket
// to directly manipulate the underlying TCP socket
public void close() throws IOException {
if (logger.isLoggable(Level.FINER))
logger.finer(logContext + " Ignoring close");
}
public void setReceiveBufferSize(int size) throws SocketException {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " Ignoring setReceiveBufferSize size:" + size);
}
public void setSendBufferSize(int size) throws SocketException {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " Ignoring setSendBufferSize size:" + size);
}
public void setReuseAddress(boolean on) throws SocketException {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " Ignoring setReuseAddress");
}
public void setSoLinger(boolean on,
int linger) throws SocketException {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " Ignoring setSoLinger");
}
public void setSoTimeout(int timeout) throws SocketException {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " Ignoring setSoTimeout");
}
public void setTcpNoDelay(boolean on) throws SocketException {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " Ignoring setTcpNoDelay");
}
public void setTrafficClass(int tc) throws SocketException {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " Ignoring setTrafficClass");
}
public void shutdownInput() throws IOException {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " Ignoring shutdownInput");
}
public void shutdownOutput() throws IOException {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " Ignoring shutdownOutput");
}
public void sendUrgentData(int data) throws IOException {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " Ignoring sendUrgentData");
}
public void setKeepAlive(boolean on) throws SocketException {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " Ignoring setKeepAlive");
}
public void setOOBInline(boolean on) throws SocketException {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " Ignoring setOOBInline");
}
}
/**
* This class implements an X509TrustManager that always accepts the X509Certificate chain offered to it.
*
* A PermissiveX509TrustManager is used to "verify" the authenticity of the server when the trustServerCertificate connection property is set to
* true.
*/
private final class PermissiveX509TrustManager extends Object implements X509TrustManager {
private final TDSChannel tdsChannel;
private final Logger logger;
private final String logContext;
PermissiveX509TrustManager(TDSChannel tdsChannel) {
this.tdsChannel = tdsChannel;
this.logger = tdsChannel.getLogger();
this.logContext = tdsChannel.toString() + " (PermissiveX509TrustManager):";
}
public void checkClientTrusted(X509Certificate[] chain,
String authType) throws CertificateException {
if (logger.isLoggable(Level.FINER))
logger.finer(logContext + " Trusting client certificate (!)");
}
public void checkServerTrusted(X509Certificate[] chain,
String authType) throws CertificateException {
if (logger.isLoggable(Level.FINER))
logger.finer(logContext + " Trusting server certificate");
}
public X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[0];
}
}
/**
* This class implements an X509TrustManager that hostname for validation.
*
* This validates the subject name in the certificate with the host name
*/
private final class HostNameOverrideX509TrustManager extends Object implements X509TrustManager {
private final Logger logger;
private final String logContext;
private final X509TrustManager defaultTrustManager;
private String hostName;
HostNameOverrideX509TrustManager(TDSChannel tdsChannel,
X509TrustManager tm,
String hostName) {
this.logger = tdsChannel.getLogger();
this.logContext = tdsChannel.toString() + " (HostNameOverrideX509TrustManager):";
defaultTrustManager = tm;
// canonical name is in lower case so convert this to lowercase too.
this.hostName = hostName.toLowerCase();
;
}
// Parse name in RFC 2253 format
// Returns the common name if successful, null if failed to find the common name.
// The parser tuned to be safe than sorry so if it sees something it cant parse correctly it returns null
private String parseCommonName(String distinguishedName) {
int index;
// canonical name converts entire name to lowercase
index = distinguishedName.indexOf("cn=");
if (index == -1) {
return null;
}
distinguishedName = distinguishedName.substring(index + 3);
// Parse until a comma or end is reached
// Note the parser will handle gracefully (essentially will return empty string) , inside the quotes (e.g cn="Foo, bar") however
// RFC 952 says that the hostName cant have commas however the parser should not (and will not) crash if it sees a , within quotes.
for (index = 0; index < distinguishedName.length(); index++) {
if (distinguishedName.charAt(index) == ',') {
break;
}
}
String commonName = distinguishedName.substring(0, index);
// strip any quotes
if (commonName.length() > 1 && ('\"' == commonName.charAt(0))) {
if ('\"' == commonName.charAt(commonName.length() - 1))
commonName = commonName.substring(1, commonName.length() - 1);
else {
// Be safe the name is not ended in " return null so the common Name wont match
commonName = null;
}
}
return commonName;
}
private boolean validateServerName(String nameInCert) throws CertificateException {
// Failed to get the common name from DN or empty CN
if (null == nameInCert) {
if (logger.isLoggable(Level.FINER))
logger.finer(logContext + " Failed to parse the name from the certificate or name is empty.");
return false;
}
// Verify that the name in certificate matches exactly with the host name
if (!nameInCert.equals(hostName)) {
if (logger.isLoggable(Level.FINER))
logger.finer(logContext + " The name in certificate " + nameInCert + " does not match with the server name " + hostName + ".");
return false;
}
if (logger.isLoggable(Level.FINER))
logger.finer(logContext + " The name in certificate:" + nameInCert + " validated against server name " + hostName + ".");
return true;
}
public void checkClientTrusted(X509Certificate[] chain,
String authType) throws CertificateException {
if (logger.isLoggable(Level.FINEST))
logger.finest(logContext + " Forwarding ClientTrusted.");
defaultTrustManager.checkClientTrusted(chain, authType);
}
public void checkServerTrusted(X509Certificate[] chain,
String authType) throws CertificateException {
if (logger.isLoggable(Level.FINEST))
logger.finest(logContext + " Forwarding Trusting server certificate");
defaultTrustManager.checkServerTrusted(chain, authType);
if (logger.isLoggable(Level.FINEST))
logger.finest(logContext + " default serverTrusted succeeded proceeding with server name validation");
validateServerNameInCertificate(chain[0]);
}
private void validateServerNameInCertificate(X509Certificate cert) throws CertificateException {
String nameInCertDN = cert.getSubjectX500Principal().getName("canonical");
if (logger.isLoggable(Level.FINER)) {
logger.finer(logContext + " Validating the server name:" + hostName);
logger.finer(logContext + " The DN name in certificate:" + nameInCertDN);
}
boolean isServerNameValidated;
// the name in cert is in RFC2253 format parse it to get the actual subject name
String subjectCN = parseCommonName(nameInCertDN);
isServerNameValidated = validateServerName(subjectCN);
if (!isServerNameValidated) {
Collection<List<?>> sanCollection = cert.getSubjectAlternativeNames();
if (sanCollection != null) {
// find a subjectAlternateName entry corresponding to DNS Name
for (List<?> sanEntry : sanCollection) {
if (sanEntry != null && sanEntry.size() >= 2) {
Object key = sanEntry.get(0);
Object value = sanEntry.get(1);
if (logger.isLoggable(Level.FINER)) {
logger.finer(logContext + "Key: " + key + "; KeyClass:" + (key != null ? key.getClass() : null) + ";value: " + value
+ "; valueClass:" + (value != null ? value.getClass() : null));
}
// "Note that the Collection returned may contain
// more than one name of the same type."
// So, more than one entry of dnsNameType can be present.
// Java docs guarantee that the first entry in the list will be an integer.
// 2 is the sequence no of a dnsName
if ((key != null) && (key instanceof Integer) && ((Integer) key == 2)) {
// As per RFC2459, the DNSName will be in the
// "preferred name syntax" as specified by RFC
// 1034 and the name can be in upper or lower case.
// And no significance is attached to case.
// Java docs guarantee that the second entry in the list
// will be a string for dnsName
if (value != null && value instanceof String) {
String dnsNameInSANCert = (String) value;
// convert to upper case and then to lower case in english locale
// to avoid Turkish i issues.
// Note that, this conversion was not necessary for
// cert.getSubjectX500Principal().getName("canonical");
// as the above API already does this by default as per documentation.
dnsNameInSANCert = dnsNameInSANCert.toUpperCase(Locale.US);
dnsNameInSANCert = dnsNameInSANCert.toLowerCase(Locale.US);
isServerNameValidated = validateServerName(dnsNameInSANCert);
if (isServerNameValidated) {
if (logger.isLoggable(Level.FINER)) {
logger.finer(logContext + " found a valid name in certificate: " + dnsNameInSANCert);
}
break;
}
}
if (logger.isLoggable(Level.FINER)) {
logger.finer(logContext + " the following name in certificate does not match the serverName: " + value);
}
}
}
else {
if (logger.isLoggable(Level.FINER)) {
logger.finer(logContext + " found an invalid san entry: " + sanEntry);
}
}
}
}
}
if (!isServerNameValidated) {
String msg = SQLServerException.getErrString("R_certNameFailed");
throw new CertificateException(msg);
}
}
public X509Certificate[] getAcceptedIssuers() {
return defaultTrustManager.getAcceptedIssuers();
}
}
enum SSLHandhsakeState {
SSL_HANDHSAKE_NOT_STARTED,
SSL_HANDHSAKE_STARTED,
SSL_HANDHSAKE_COMPLETE
};
/**
* Enables SSL Handshake.
*
* @param host
* Server Host Name for SSL Handshake
* @param port
* Server Port for SSL Handshake
* @throws SQLServerException
*/
void enableSSL(String host,
int port) throws SQLServerException {
// If enabling SSL fails, which it can for a number of reasons, the following items
// are used in logging information to the TDS channel logger to help diagnose the problem.
Provider tmfProvider = null; // TrustManagerFactory provider
Provider sslContextProvider = null; // SSLContext provider
Provider ksProvider = null; // KeyStore provider
String tmfDefaultAlgorithm = null; // Default algorithm (typically X.509) used by the TrustManagerFactory
SSLHandhsakeState handshakeState = SSLHandhsakeState.SSL_HANDHSAKE_NOT_STARTED;
boolean isFips = false;
String trustStoreType = null;
String fipsProvider = null;
// If anything in here fails, terminate the connection and throw an exception
try {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " Enabling SSL...");
String trustStoreFileName = con.activeConnectionProperties.getProperty(SQLServerDriverStringProperty.TRUST_STORE.toString());
String trustStorePassword = con.activeConnectionProperties.getProperty(SQLServerDriverStringProperty.TRUST_STORE_PASSWORD.toString());
String hostNameInCertificate = con.activeConnectionProperties
.getProperty(SQLServerDriverStringProperty.HOSTNAME_IN_CERTIFICATE.toString());
trustStoreType = con.activeConnectionProperties.getProperty(SQLServerDriverStringProperty.TRUST_STORE_TYPE.toString());
if(StringUtils.isEmpty(trustStoreType)) {
trustStoreType = SQLServerDriverStringProperty.TRUST_STORE_TYPE.getDefaultValue();
}
fipsProvider = con.activeConnectionProperties.getProperty(SQLServerDriverStringProperty.FIPS_PROVIDER.toString());
isFips = Boolean.valueOf(con.activeConnectionProperties.getProperty(SQLServerDriverBooleanProperty.FIPS.toString()));
if (isFips) {
validateFips(fipsProvider, trustStoreType, trustStoreFileName);
}
byte requestedEncryptionLevel = con.getRequestedEncryptionLevel();
assert TDS.ENCRYPT_OFF == requestedEncryptionLevel || // Login only SSL
TDS.ENCRYPT_ON == requestedEncryptionLevel; // Full SSL
byte negotiatedEncryptionLevel = con.getNegotiatedEncryptionLevel();
assert TDS.ENCRYPT_OFF == negotiatedEncryptionLevel || // Login only SSL
TDS.ENCRYPT_ON == negotiatedEncryptionLevel || // Full SSL
TDS.ENCRYPT_REQ == negotiatedEncryptionLevel; // Full SSL
// If we requested login only SSL or full SSL without server certificate validation,
// then we'll "validate" the server certificate using a naive TrustManager that trusts
// everything it sees.
TrustManager[] tm = null;
if (TDS.ENCRYPT_OFF == con.getRequestedEncryptionLevel()
|| (TDS.ENCRYPT_ON == con.getRequestedEncryptionLevel() && con.trustServerCertificate())) {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " SSL handshake will trust any certificate");
tm = new TrustManager[] {new PermissiveX509TrustManager(this)};
}
// Otherwise, we'll validate the certificate using a real TrustManager obtained
// from the a security provider that is capable of validating X.509 certificates.
else {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " SSL handshake will validate server certificate");
KeyStore ks = null;
// If we are using the system default trustStore and trustStorePassword
// then we can skip all of the KeyStore loading logic below.
// The security provider's implementation takes care of everything for us.
if (null == trustStoreFileName && null == trustStorePassword) {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " Using system default trust store and password");
}
// Otherwise either the trustStore, trustStorePassword, or both was specified.
// In that case, we need to load up a KeyStore ourselves.
else {
// First, obtain an interface to a KeyStore that can load trust material
// stored in Java Key Store (JKS) format.
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Finding key store interface");
if (isFips) {
ks = KeyStore.getInstance(trustStoreType, fipsProvider);
}
else {
ks = KeyStore.getInstance(trustStoreType);
}
ksProvider = ks.getProvider();
// Next, load up the trust store file from the specified location.
// Note: This function returns a null InputStream if the trust store cannot
// be loaded. This is by design. See the method comment and documentation
// for KeyStore.load for details.
InputStream is = loadTrustStore(trustStoreFileName);
// Finally, load the KeyStore with the trust material (if any) from the
// InputStream and close the stream.
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Loading key store");
try {
ks.load(is, (null == trustStorePassword) ? null : trustStorePassword.toCharArray());
}
finally {
// We are done with the trustStorePassword (if set). Clear it for better security.
con.activeConnectionProperties.remove(SQLServerDriverStringProperty.TRUST_STORE_PASSWORD.toString());
// We are also done with the trust store input stream.
if (null != is) {
try {
is.close();
}
catch (IOException e) {
if (logger.isLoggable(Level.FINE))
logger.fine(toString() + " Ignoring error closing trust material InputStream...");
}
}
}
}
// Either we now have a KeyStore populated with trust material or we are using the
// default source of trust material (cacerts). Either way, we are now ready to
// use a TrustManagerFactory to create a TrustManager that uses the trust material
// to validate the server certificate.
// Next step is to get a TrustManagerFactory that can produce TrustManagers
// that understands X.509 certificates.
TrustManagerFactory tmf = null;
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Locating X.509 trust manager factory");
tmfDefaultAlgorithm = TrustManagerFactory.getDefaultAlgorithm();
tmf = TrustManagerFactory.getInstance(tmfDefaultAlgorithm);
tmfProvider = tmf.getProvider();
// Tell the TrustManagerFactory to give us TrustManagers that we can use to
// validate the server certificate using the trust material in the KeyStore.
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Getting trust manager");
tmf.init(ks);
tm = tmf.getTrustManagers();
// if the host name in cert provided use it or use the host name Only if it is not FIPS
if (!isFips) {
if (null != hostNameInCertificate) {
tm = new TrustManager[] {new HostNameOverrideX509TrustManager(this, (X509TrustManager) tm[0], hostNameInCertificate)};
}
else {
tm = new TrustManager[] {new HostNameOverrideX509TrustManager(this, (X509TrustManager) tm[0], host)};
}
}
} // end if (!con.trustServerCertificate())
// Now, with a real or fake TrustManager in hand, get a context for creating a
// SSL sockets through a SSL socket factory. We require at least TLS support.
SSLContext sslContext = null;
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Getting TLS or better SSL context");
sslContext = SSLContext.getInstance("TLS");
sslContextProvider = sslContext.getProvider();
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Initializing SSL context");
sslContext.init(null, tm, null);
// Got the SSL context. Now create an SSL socket over our own proxy socket
// which we can toggle between TDS-encapsulated and raw communications.
// Initially, the proxy is set to encapsulate the SSL handshake in TDS packets.
proxySocket = new ProxySocket(this);
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Creating SSL socket");
sslSocket = (SSLSocket) sslContext.getSocketFactory().createSocket(proxySocket, host, port, false); // don't close proxy when SSL socket
// is closed
// At long last, start the SSL handshake ...
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " Starting SSL handshake");
// TLS 1.2 intermittent exception happens here.
handshakeState = SSLHandhsakeState.SSL_HANDHSAKE_STARTED;
sslSocket.startHandshake();
handshakeState = SSLHandhsakeState.SSL_HANDHSAKE_COMPLETE;
// After SSL handshake is complete, rewire proxy socket to use raw TCP/IP streams ...
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Rewiring proxy streams after handshake");
proxySocket.setStreams(inputStream, outputStream);
// ... and rewire TDSChannel to use SSL streams.
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Getting SSL InputStream");
inputStream = sslSocket.getInputStream();
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Getting SSL OutputStream");
outputStream = sslSocket.getOutputStream();
// SSL is now enabled; switch over the channel socket
channelSocket = sslSocket;
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " SSL enabled");
}
catch (Exception e) {
// Log the original exception and its source at FINER level
if (logger.isLoggable(Level.FINER))
logger.log(Level.FINER, e.getMessage(), e);
// If enabling SSL fails, the following information may help diagnose the problem.
// Do not use Level INFO or above which is sent to standard output/error streams.
// This is because due to an intermittent TLS 1.2 connection issue, we will be retrying the connection and
// do not want to print this message in console.
if (logger.isLoggable(Level.FINER))
logger.log(Level.FINER,
"java.security path: " + JAVA_SECURITY + "\n" + "Security providers: " + Arrays.asList(Security.getProviders()) + "\n"
+ ((null != sslContextProvider) ? ("SSLContext provider info: " + sslContextProvider.getInfo() + "\n"
+ "SSLContext provider services:\n" + sslContextProvider.getServices() + "\n") : "")
+ ((null != tmfProvider) ? ("TrustManagerFactory provider info: " + tmfProvider.getInfo() + "\n") : "")
+ ((null != tmfDefaultAlgorithm) ? ("TrustManagerFactory default algorithm: " + tmfDefaultAlgorithm + "\n") : "")
+ ((null != ksProvider) ? ("KeyStore provider info: " + ksProvider.getInfo() + "\n") : "") + "java.ext.dirs: "
+ System.getProperty("java.ext.dirs"));
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_sslFailed"));
Object[] msgArgs = {e.getMessage()};
// It is important to get the localized message here, otherwise error messages won't match for different locales.
String errMsg = e.getLocalizedMessage();
// The error message may have a connection id appended to it. Extract the message only for comparison.
// This client connection id is appended in method checkAndAppendClientConnId().
if (errMsg.contains(SQLServerException.LOG_CLIENT_CONNECTION_ID_PREFIX)) {
errMsg = errMsg.substring(0, errMsg.indexOf(SQLServerException.LOG_CLIENT_CONNECTION_ID_PREFIX));
}
// Isolate the TLS1.2 intermittent connection error.
if (e instanceof IOException && (SSLHandhsakeState.SSL_HANDHSAKE_STARTED == handshakeState)
&& (errMsg.equals(SQLServerException.getErrString("R_truncatedServerResponse")))) {
con.terminate(SQLServerException.DRIVER_ERROR_INTERMITTENT_TLS_FAILED, form.format(msgArgs), e);
}
else {
con.terminate(SQLServerException.DRIVER_ERROR_SSL_FAILED, form.format(msgArgs), e);
}
}
}
/**
* Validate FIPS if fips set as true
*
* Valid FIPS settings:
* <LI>Encrypt should be true
* <LI>trustServerCertificate should be false
* <LI>if certificate is not installed FIPSProvider & TrustStoreType should be present.
*
* @param fipsProvider
* FIPS Provider
* @param trustStoreType
* @param trustStoreFileName
* @throws SQLServerException
* @since 6.1.4
*/
private void validateFips(final String fipsProvider,
final String trustStoreType,
final String trustStoreFileName) throws SQLServerException {
boolean isValid = false;
boolean isEncryptOn;
boolean isValidTrustStoreType;
boolean isValidTrustStore;
boolean isTrustServerCertificate;
boolean isValidFipsProvider;
String strError = SQLServerException.getErrString("R_invalidFipsConfig");
isEncryptOn = (TDS.ENCRYPT_ON == con.getRequestedEncryptionLevel());
// Here different FIPS provider supports different KeyStore type along with different JVM Implementation.
isValidFipsProvider = !StringUtils.isEmpty(fipsProvider);
isValidTrustStoreType = !StringUtils.isEmpty(trustStoreType);
isValidTrustStore = !StringUtils.isEmpty(trustStoreFileName);
isTrustServerCertificate = con.trustServerCertificate();
if (isEncryptOn & !isTrustServerCertificate) {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " Found parameters are encrypt is true & trustServerCertificate false");
isValid = true;
if (isValidTrustStore) {
// In case of valid trust store we need to check fipsProvider and TrustStoreType.
if (!isValidFipsProvider || !isValidTrustStoreType) {
isValid = false;
strError = SQLServerException.getErrString("R_invalidFipsProviderConfig");
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " FIPS provider & TrustStoreType should pass with TrustStore.");
}
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " Found FIPS parameters seems to be valid.");
}
}
else {
strError = SQLServerException.getErrString("R_invalidFipsEncryptConfig");
}
if (!isValid) {
throw new SQLServerException(strError, null, 0, null);
}
}
private final static String SEPARATOR = System.getProperty("file.separator");
private final static String JAVA_HOME = System.getProperty("java.home");
private final static String JAVA_SECURITY = JAVA_HOME + SEPARATOR + "lib" + SEPARATOR + "security";
private final static String JSSECACERTS = JAVA_SECURITY + SEPARATOR + "jssecacerts";
private final static String CACERTS = JAVA_SECURITY + SEPARATOR + "cacerts";
/**
* Loads the contents of a trust store into an InputStream.
*
* When a location to a trust store is specified, this method attempts to load that store. Otherwise, it looks for and attempts to load the
* default trust store using essentially the same logic (outlined in the JSSE Reference Guide) as the default X.509 TrustManagerFactory.
*
* @return an InputStream containing the contents of the loaded trust store
* @return null if the trust store cannot be loaded.
*
* Note: It is by design that this function returns null when the trust store cannot be loaded rather than throwing an exception. The
* reason is that KeyStore.load, which uses the returned InputStream, interprets a null InputStream to mean that there are no trusted
* certificates, which mirrors the behavior of the default (no trust store, no password specified) path.
*/
final InputStream loadTrustStore(String trustStoreFileName) {
FileInputStream is = null;
// First case: Trust store filename was specified
if (null != trustStoreFileName) {
try {
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Opening specified trust store: " + trustStoreFileName);
is = new FileInputStream(trustStoreFileName);
}
catch (FileNotFoundException e) {
if (logger.isLoggable(Level.FINE))
logger.fine(toString() + " Trust store not found: " + e.getMessage());
// If the trustStoreFileName connection property is set, but the file is not found,
// then treat it as if the file was empty so that the TrustManager reports
// that no certificate is found.
}
}
// Second case: Trust store filename derived from javax.net.ssl.trustStore system property
else if (null != (trustStoreFileName = System.getProperty("javax.net.ssl.trustStore"))) {
try {
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Opening default trust store (from javax.net.ssl.trustStore): " + trustStoreFileName);
is = new FileInputStream(trustStoreFileName);
}
catch (FileNotFoundException e) {
if (logger.isLoggable(Level.FINE))
logger.fine(toString() + " Trust store not found: " + e.getMessage());
// If the javax.net.ssl.trustStore property is set, but the file is not found,
// then treat it as if the file was empty so that the TrustManager reports
// that no certificate is found.
}
}
// Third case: No trust store specified and no system property set. Use jssecerts/cacerts.
else {
try {
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Opening default trust store: " + JSSECACERTS);
is = new FileInputStream(JSSECACERTS);
}
catch (FileNotFoundException e) {
if (logger.isLoggable(Level.FINE))
logger.fine(toString() + " Trust store not found: " + e.getMessage());
}
// No jssecerts. Try again with cacerts...
if (null == is) {
try {
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Opening default trust store: " + CACERTS);
is = new FileInputStream(CACERTS);
}
catch (FileNotFoundException e) {
if (logger.isLoggable(Level.FINE))
logger.fine(toString() + " Trust store not found: " + e.getMessage());
// No jssecerts or cacerts. Treat it as if the trust store is empty so that
// the TrustManager reports that no certificate is found.
}
}
}
return is;
}
final int read(byte[] data,
int offset,
int length) throws SQLServerException {
try {
return inputStream.read(data, offset, length);
}
catch (IOException e) {
if (logger.isLoggable(Level.FINE))
logger.fine(toString() + " read failed:" + e.getMessage());
if (e instanceof SocketTimeoutException) {
con.terminate(SQLServerException.ERROR_SOCKET_TIMEOUT, e.getMessage(), e);
}
else {
con.terminate(SQLServerException.DRIVER_ERROR_IO_FAILED, e.getMessage(), e);
}
return 0; // Keep the compiler happy.
}
}
final void write(byte[] data,
int offset,
int length) throws SQLServerException {
try {
outputStream.write(data, offset, length);
}
catch (IOException e) {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " write failed:" + e.getMessage());
con.terminate(SQLServerException.DRIVER_ERROR_IO_FAILED, e.getMessage(), e);
}
}
final void flush() throws SQLServerException {
try {
outputStream.flush();
}
catch (IOException e) {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " flush failed:" + e.getMessage());
con.terminate(SQLServerException.DRIVER_ERROR_IO_FAILED, e.getMessage(), e);
}
}
final void close() {
if (null != sslSocket)
disableSSL();
if (null != inputStream) {
if (logger.isLoggable(Level.FINEST))
logger.finest(this.toString() + ": Closing inputStream...");
try {
inputStream.close();
}
catch (IOException e) {
if (logger.isLoggable(Level.FINE))
logger.log(Level.FINE, this.toString() + ": Ignored error closing inputStream", e);
}
}
if (null != outputStream) {
if (logger.isLoggable(Level.FINEST))
logger.finest(this.toString() + ": Closing outputStream...");
try {
outputStream.close();
}
catch (IOException e) {
if (logger.isLoggable(Level.FINE))
logger.log(Level.FINE, this.toString() + ": Ignored error closing outputStream", e);
}
}
if (null != tcpSocket) {
if (logger.isLoggable(Level.FINER))
logger.finer(this.toString() + ": Closing TCP socket...");
try {
tcpSocket.close();
}
catch (IOException e) {
if (logger.isLoggable(Level.FINE))
logger.log(Level.FINE, this.toString() + ": Ignored error closing socket", e);
}
}
}
/**
* Logs TDS packet data to the com.microsoft.sqlserver.jdbc.TDS.DATA logger
*
* @param data
* the buffer containing the TDS packet payload data to log
* @param nStartOffset
* offset into the above buffer from where to start logging
* @param nLength
* length (in bytes) of payload
* @param messageDetail
* other loggable details about the payload
*/
void logPacket(byte data[],
int nStartOffset,
int nLength,
String messageDetail) {
assert 0 <= nLength && nLength <= data.length;
assert 0 <= nStartOffset && nStartOffset <= data.length;
final char hexChars[] = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'};
final char printableChars[] = {'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.',
'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', ' ', '!', '\"', '
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', ':', ';', '<', '=', '>', '?', '@', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J',
'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '[', '\\', ']', '^', '_', '`', 'a', 'b', 'c', 'd',
'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '{', '|', '}', '~', '.',
'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.',
'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.',
'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.',
'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.',
'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.'};
// Log message body lines have this form:
// "XX XX XX XX XX XX XX XX XX XX XX XX XX XX XX XX ................"
// 012345678911111111112222222222333333333344444444445555555555666666
// 01234567890123456789012345678901234567890123456789012345
final char lineTemplate[] = {' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ',
' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ',
' ', ' ',
'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.'};
char logLine[] = new char[lineTemplate.length];
System.arraycopy(lineTemplate, 0, logLine, 0, lineTemplate.length);
// Logging builds up a string buffer for the entire log trace
// before writing it out. So use an initial size large enough
// that the buffer doesn't have to resize itself.
StringBuilder logMsg = new StringBuilder(messageDetail.length() + // Message detail
4 * nLength + // 2-digit hex + space + ASCII, per byte
4 * (1 + nLength / 16) + // 2 extra spaces + CR/LF, per line (16 bytes per line)
80); // Extra fluff: IP:Port, Connection #, SPID, ...
// Format the headline like so:
// /157.55.121.182:2983 Connection 1, SPID 53, Message info here ...
// Note: the log formatter itself timestamps what we write so we don't have
// to do it again here.
logMsg.append(tcpSocket.getLocalAddress().toString() + ":" + tcpSocket.getLocalPort() + " SPID:" + spid + " " + messageDetail + "\r\n");
// Fill in the body of the log message, line by line, 16 bytes per line.
int nBytesLogged = 0;
int nBytesThisLine;
while (true) {
// Fill up the line with as many bytes as we can (up to 16 bytes)
for (nBytesThisLine = 0; nBytesThisLine < 16 && nBytesLogged < nLength; nBytesThisLine++, nBytesLogged++) {
int nUnsignedByteVal = (data[nStartOffset + nBytesLogged] + 256) % 256;
logLine[3 * nBytesThisLine] = hexChars[nUnsignedByteVal / 16];
logLine[3 * nBytesThisLine + 1] = hexChars[nUnsignedByteVal % 16];
logLine[50 + nBytesThisLine] = printableChars[nUnsignedByteVal];
}
// Pad out the remainder with whitespace
for (int nBytesJustified = nBytesThisLine; nBytesJustified < 16; nBytesJustified++) {
logLine[3 * nBytesJustified] = ' ';
logLine[3 * nBytesJustified + 1] = ' ';
}
logMsg.append(logLine, 0, 50 + nBytesThisLine);
if (nBytesLogged == nLength)
break;
logMsg.append("\r\n");
}
packetLogger.finest(logMsg.toString());
}
/**
* Get the current socket SO_TIMEOUT value.
*
* @return the current socket timeout value
* @throws IOException thrown if the socket timeout cannot be read
*/
final int getNetworkTimeout() throws IOException {
return tcpSocket.getSoTimeout();
}
/**
* Set the socket SO_TIMEOUT value.
*
* @param timeout the socket timeout in milliseconds
* @throws IOException thrown if the socket timeout cannot be set
*/
final void setNetworkTimeout(int timeout) throws IOException {
tcpSocket.setSoTimeout(timeout);
}
}
/**
* SocketFinder is used to find a server socket to which a connection can be made. This class abstracts the logic of finding a socket from TDSChannel
* class.
*
* In the case when useParallel is set to true, this is achieved by trying to make parallel connections to multiple IP addresses. This class is
* responsible for spawning multiple threads and keeping track of the search result and the connected socket or exception to be thrown.
*
* In the case where multiSubnetFailover is false, we try our old logic of trying to connect to the first ip address
*
* Typical usage of this class is SocketFinder sf = new SocketFinder(traceId, conn); Socket = sf.getSocket(hostName, port, timeout);
*/
final class SocketFinder {
/**
* Indicates the result of a search
*/
enum Result {
UNKNOWN,// search is still in progress
SUCCESS,// found a socket
FAILURE// failed in finding a socket
}
// Thread pool - the values in the constructor are chosen based on the
// explanation given in design_connection_director_multisubnet.doc
private static final ThreadPoolExecutor threadPoolExecutor = new ThreadPoolExecutor(0, Integer.MAX_VALUE, 5, TimeUnit.SECONDS,
new SynchronousQueue<Runnable>());
// When parallel connections are to be used, use minimum timeout slice of 1500 milliseconds.
private static final int minTimeoutForParallelConnections = 1500;
// lock used for synchronization while updating
// data within a socketFinder object
private final Object socketFinderlock = new Object();
// lock on which the parent thread would wait
// after spawning threads.
private final Object parentThreadLock = new Object();
// indicates whether the socketFinder has succeeded or failed
// in finding a socket or is still trying to find a socket
private volatile Result result = Result.UNKNOWN;
// total no of socket connector threads
// spawned by a socketFinder object
private int noOfSpawnedThreads = 0;
// no of threads that finished their socket connection
// attempts and notified socketFinder about their result
private volatile int noOfThreadsThatNotified = 0;
// If a valid connected socket is found, this value would be non-null,
// else this would be null
private volatile Socket selectedSocket = null;
// This would be one of the exceptions returned by the
// socketConnector threads
private volatile IOException selectedException = null;
// Logging variables
private static final Logger logger = Logger.getLogger("com.microsoft.sqlserver.jdbc.internals.SocketFinder");
private final String traceID;
// maximum number of IP Addresses supported
private static final int ipAddressLimit = 64;
// necessary for raising exceptions so that the connection pool can be notified
private final SQLServerConnection conn;
/**
* Constructs a new SocketFinder object with appropriate traceId
*
* @param callerTraceID
* traceID of the caller
* @param sqlServerConnection
* the SQLServer connection
*/
SocketFinder(String callerTraceID,
SQLServerConnection sqlServerConnection) {
traceID = "SocketFinder(" + callerTraceID + ")";
conn = sqlServerConnection;
}
/**
* Used to find a socket to which a connection can be made
*
* @param hostName
* @param portNumber
* @param timeoutInMilliSeconds
* @return connected socket
* @throws IOException
*/
Socket findSocket(String hostName,
int portNumber,
int timeoutInMilliSeconds,
boolean useParallel,
boolean useTnir,
boolean isTnirFirstAttempt,
int timeoutInMilliSecondsForFullTimeout) throws SQLServerException {
assert timeoutInMilliSeconds != 0 : "The driver does not allow a time out of 0";
try {
InetAddress[] inetAddrs = null;
// inetAddrs is only used if useParallel is true or TNIR is true. Skip resolving address if that's not the case.
if (useParallel || useTnir) {
// Ignore TNIR if host resolves to more than 64 IPs. Make sure we are using original timeout for this.
inetAddrs = InetAddress.getAllByName(hostName);
if ((useTnir) && (inetAddrs.length > ipAddressLimit)) {
useTnir = false;
timeoutInMilliSeconds = timeoutInMilliSecondsForFullTimeout;
}
}
if (!useParallel) {
// MSF is false. TNIR could be true or false. DBMirroring could be true or false.
// For TNIR first attempt, we should do existing behavior including how host name is resolved.
if (useTnir && isTnirFirstAttempt) {
return getDefaultSocket(hostName, portNumber, SQLServerConnection.TnirFirstAttemptTimeoutMs);
}
else if (!useTnir) {
return getDefaultSocket(hostName, portNumber, timeoutInMilliSeconds);
}
}
// Code reaches here only if MSF = true or (TNIR = true and not TNIR first attempt)
if (logger.isLoggable(Level.FINER)) {
String loggingString = this.toString() + " Total no of InetAddresses: " + inetAddrs.length + ". They are: ";
for (InetAddress inetAddr : inetAddrs) {
loggingString = loggingString + inetAddr.toString() + ";";
}
logger.finer(loggingString);
}
if (inetAddrs.length > ipAddressLimit) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_ipAddressLimitWithMultiSubnetFailover"));
Object[] msgArgs = {Integer.toString(ipAddressLimit)};
String errorStr = form.format(msgArgs);
// we do not want any retry to happen here. So, terminate the connection
// as the config is unsupported.
conn.terminate(SQLServerException.DRIVER_ERROR_UNSUPPORTED_CONFIG, errorStr);
}
if (Util.isIBM()) {
timeoutInMilliSeconds = Math.max(timeoutInMilliSeconds, minTimeoutForParallelConnections);
if (logger.isLoggable(Level.FINER)) {
logger.finer(this.toString() + "Using Java NIO with timeout:" + timeoutInMilliSeconds);
}
findSocketUsingJavaNIO(inetAddrs, portNumber, timeoutInMilliSeconds);
}
else {
LinkedList<Inet4Address> inet4Addrs = new LinkedList<Inet4Address>();
LinkedList<Inet6Address> inet6Addrs = new LinkedList<Inet6Address>();
for (InetAddress inetAddr : inetAddrs) {
if (inetAddr instanceof Inet4Address) {
inet4Addrs.add((Inet4Address) inetAddr);
}
else {
assert inetAddr instanceof Inet6Address : "Unexpected IP address " + inetAddr.toString();
inet6Addrs.add((Inet6Address) inetAddr);
}
}
// use half timeout only if both IPv4 and IPv6 addresses are present
int timeoutForEachIPAddressType;
if ((!inet4Addrs.isEmpty()) && (!inet6Addrs.isEmpty())) {
timeoutForEachIPAddressType = Math.max(timeoutInMilliSeconds / 2, minTimeoutForParallelConnections);
}
else
timeoutForEachIPAddressType = Math.max(timeoutInMilliSeconds, minTimeoutForParallelConnections);
if (!inet4Addrs.isEmpty()) {
if (logger.isLoggable(Level.FINER)) {
logger.finer(this.toString() + "Using Java NIO with timeout:" + timeoutForEachIPAddressType);
}
// inet4Addrs.toArray(new InetAddress[0]) is java style of converting a linked list to an array of reqd size
findSocketUsingJavaNIO(inet4Addrs.toArray(new InetAddress[0]), portNumber, timeoutForEachIPAddressType);
}
if (!result.equals(Result.SUCCESS)) {
// try threading logic
if (!inet6Addrs.isEmpty()) {
// do not start any threads if there is only one ipv6 address
if (inet6Addrs.size() == 1) {
return getConnectedSocket(inet6Addrs.get(0), portNumber, timeoutForEachIPAddressType);
}
if (logger.isLoggable(Level.FINER)) {
logger.finer(this.toString() + "Using Threading with timeout:" + timeoutForEachIPAddressType);
}
findSocketUsingThreading(inet6Addrs, portNumber, timeoutForEachIPAddressType);
}
}
}
// If the thread continued execution due to timeout, the result may not be known.
// In that case, update the result to failure. Note that this case is possible
// for both IPv4 and IPv6.
// Using double-checked locking for performance reasons.
if (result.equals(Result.UNKNOWN)) {
synchronized (socketFinderlock) {
if (result.equals(Result.UNKNOWN)) {
result = Result.FAILURE;
if (logger.isLoggable(Level.FINER)) {
logger.finer(this.toString() + " The parent thread updated the result to failure");
}
}
}
}
// After we reach this point, there is no need for synchronization any more.
// Because, the result would be known(success/failure).
// And no threads would update SocketFinder
// as their function calls would now be no-ops.
if (result.equals(Result.FAILURE)) {
if (selectedException == null) {
if (logger.isLoggable(Level.FINER)) {
logger.finer(this.toString()
+ " There is no selectedException. The wait calls timed out before any connect call returned or timed out.");
}
String message = SQLServerException.getErrString("R_connectionTimedOut");
selectedException = new IOException(message);
}
throw selectedException;
}
}
catch (InterruptedException ex) {
close(selectedSocket);
SQLServerException.ConvertConnectExceptionToSQLServerException(hostName, portNumber, conn, ex);
}
catch (IOException ex) {
close(selectedSocket);
// The code below has been moved from connectHelper.
// If we do not move it, the functions open(caller of findSocket)
// and findSocket will have to
// declare both IOException and SQLServerException in the throws clause
// as we throw custom SQLServerExceptions(eg:IPAddressLimit, wrapping other exceptions
// like interruptedException) in findSocket.
// That would be a bit awkward, because connecthelper(the caller of open)
// just wraps IOException into SQLServerException and throws SQLServerException.
// Instead, it would be good to wrap all exceptions at one place - Right here, their origin.
SQLServerException.ConvertConnectExceptionToSQLServerException(hostName, portNumber, conn, ex);
}
assert result.equals(Result.SUCCESS) == true;
assert selectedSocket != null : "Bug in code. Selected Socket cannot be null here.";
return selectedSocket;
}
/**
* This function uses java NIO to connect to all the addresses in inetAddrs with in a specified timeout. If it succeeds in connecting, it closes
* all the other open sockets and updates the result to success.
*
* @param inetAddrs
* the array of inetAddress to which connection should be made
* @param portNumber
* the port number at which connection should be made
* @param timeoutInMilliSeconds
* @throws IOException
*/
private void findSocketUsingJavaNIO(InetAddress[] inetAddrs,
int portNumber,
int timeoutInMilliSeconds) throws IOException {
// The driver does not allow a time out of zero.
// Also, the unit of time the user can specify in the driver is seconds.
// So, even if the user specifies 1 second(least value), the least possible
// value that can come here as timeoutInMilliSeconds is 500 milliseconds.
assert timeoutInMilliSeconds != 0 : "The timeout cannot be zero";
assert inetAddrs.length != 0 : "Number of inetAddresses should not be zero in this function";
Selector selector = null;
LinkedList<SocketChannel> socketChannels = new LinkedList<SocketChannel>();
SocketChannel selectedChannel = null;
try {
selector = Selector.open();
for (int i = 0; i < inetAddrs.length; i++) {
SocketChannel sChannel = SocketChannel.open();
socketChannels.add(sChannel);
// make the channel non-blocking
sChannel.configureBlocking(false);
// register the channel for connect event
int ops = SelectionKey.OP_CONNECT;
SelectionKey key = sChannel.register(selector, ops);
sChannel.connect(new InetSocketAddress(inetAddrs[i], portNumber));
if (logger.isLoggable(Level.FINER))
logger.finer(this.toString() + " initiated connection to address: " + inetAddrs[i] + ", portNumber: " + portNumber);
}
long timerNow = System.currentTimeMillis();
long timerExpire = timerNow + timeoutInMilliSeconds;
// Denotes the no of channels that still need to processed
int noOfOutstandingChannels = inetAddrs.length;
while (true) {
long timeRemaining = timerExpire - timerNow;
// if the timeout expired or a channel is selected or there are no more channels left to processes
if ((timeRemaining <= 0) || (selectedChannel != null) || (noOfOutstandingChannels <= 0))
break;
// denotes the no of channels that are ready to be processed. i.e. they are either connected
// or encountered an exception while trying to connect
int readyChannels = selector.select(timeRemaining);
if (logger.isLoggable(Level.FINER))
logger.finer(this.toString() + " no of channels ready: " + readyChannels);
// There are no real time guarantees on the time out of the select API used above.
// This check is necessary
// a) to guard against cases where the select returns faster than expected.
// b) for cases where no channels could connect with in the time out
if (readyChannels != 0) {
Set<SelectionKey> selectedKeys = selector.selectedKeys();
Iterator<SelectionKey> keyIterator = selectedKeys.iterator();
while (keyIterator.hasNext()) {
SelectionKey key = keyIterator.next();
SocketChannel ch = (SocketChannel) key.channel();
if (logger.isLoggable(Level.FINER))
logger.finer(this.toString() + " processing the channel :" + ch);// this traces the IP by default
boolean connected = false;
try {
connected = ch.finishConnect();
// ch.finishConnect should either return true or throw an exception
// as we have subscribed for OP_CONNECT.
assert connected == true : "finishConnect on channel:" + ch + " cannot be false";
selectedChannel = ch;
if (logger.isLoggable(Level.FINER))
logger.finer(this.toString() + " selected the channel :" + selectedChannel);
break;
}
catch (IOException ex) {
if (logger.isLoggable(Level.FINER))
logger.finer(this.toString() + " the exception: " + ex.getClass() + " with message: " + ex.getMessage()
+ " occured while processing the channel: " + ch);
updateSelectedException(ex, this.toString());
// close the channel pro-actively so that we do not
// hang on to network resources
ch.close();
}
// unregister the key and remove from the selector's selectedKeys
key.cancel();
keyIterator.remove();
noOfOutstandingChannels
}
}
timerNow = System.currentTimeMillis();
}
}
catch (IOException ex) {
// in case of an exception, close the selected channel.
// All other channels will be closed in the finally block,
// as they need to be closed irrespective of a success/failure
close(selectedChannel);
throw ex;
}
finally {
// close the selector
// As per java docs, on selector.close(), any uncancelled keys still
// associated with this
// selector are invalidated, their channels are deregistered, and any other
// resources associated with this selector are released.
// So, its not necessary to cancel each key again
close(selector);
// Close all channels except the selected one.
// As we close channels pro-actively in the try block,
// its possible that we close a channel twice.
// Closing a channel second time is a no-op.
// This code is should be in the finally block to guard against cases where
// we pre-maturely exit try block due to an exception in selector or other places.
for (SocketChannel s : socketChannels) {
if (s != selectedChannel) {
close(s);
}
}
}
// if a channel was selected, make the necessary updates
if (selectedChannel != null) {
//the selectedChannel has the address that is connected successfully
//convert it to a java.net.Socket object with the address
SocketAddress iadd = selectedChannel.getRemoteAddress();
selectedSocket = new Socket();
selectedSocket.connect(iadd);
result = Result.SUCCESS;
//close the channel since it is not used anymore
selectedChannel.close();
}
}
// This method contains the old logic of connecting to
// a socket of one of the IPs corresponding to a given host name.
// In the old code below, the logic around 0 timeout has been removed as
// 0 timeout is not allowed. The code has been re-factored so that the logic
// is common for hostName or InetAddress.
private Socket getDefaultSocket(String hostName,
int portNumber,
int timeoutInMilliSeconds) throws IOException {
// Open the socket, with or without a timeout, throwing an UnknownHostException
// if there is a failure to resolve the host name to an InetSocketAddress.
// Note that Socket(host, port) throws an UnknownHostException if the host name
// cannot be resolved, but that InetSocketAddress(host, port) does not - it sets
// the returned InetSocketAddress as unresolved.
InetSocketAddress addr = new InetSocketAddress(hostName, portNumber);
return getConnectedSocket(addr, timeoutInMilliSeconds);
}
private Socket getConnectedSocket(InetAddress inetAddr,
int portNumber,
int timeoutInMilliSeconds) throws IOException {
InetSocketAddress addr = new InetSocketAddress(inetAddr, portNumber);
return getConnectedSocket(addr, timeoutInMilliSeconds);
}
private Socket getConnectedSocket(InetSocketAddress addr,
int timeoutInMilliSeconds) throws IOException {
assert timeoutInMilliSeconds != 0 : "timeout cannot be zero";
if (addr.isUnresolved())
throw new java.net.UnknownHostException();
selectedSocket = new Socket();
selectedSocket.connect(addr, timeoutInMilliSeconds);
return selectedSocket;
}
private void findSocketUsingThreading(LinkedList<Inet6Address> inetAddrs,
int portNumber,
int timeoutInMilliSeconds) throws IOException, InterruptedException {
assert timeoutInMilliSeconds != 0 : "The timeout cannot be zero";
assert inetAddrs.isEmpty() == false : "Number of inetAddresses should not be zero in this function";
LinkedList<Socket> sockets = new LinkedList<Socket>();
LinkedList<SocketConnector> socketConnectors = new LinkedList<SocketConnector>();
try {
// create a socket, inetSocketAddress and a corresponding socketConnector per inetAddress
noOfSpawnedThreads = inetAddrs.size();
for (InetAddress inetAddress : inetAddrs) {
Socket s = new Socket();
sockets.add(s);
InetSocketAddress inetSocketAddress = new InetSocketAddress(inetAddress, portNumber);
SocketConnector socketConnector = new SocketConnector(s, inetSocketAddress, timeoutInMilliSeconds, this);
socketConnectors.add(socketConnector);
}
// acquire parent lock and spawn all threads
synchronized (parentThreadLock) {
for (SocketConnector sc : socketConnectors) {
threadPoolExecutor.execute(sc);
}
long timerNow = System.currentTimeMillis();
long timerExpire = timerNow + timeoutInMilliSeconds;
// The below loop is to guard against the spurious wake up problem
while (true) {
long timeRemaining = timerExpire - timerNow;
if (logger.isLoggable(Level.FINER)) {
logger.finer(this.toString() + " TimeRemaining:" + timeRemaining + "; Result:" + result + "; Max. open thread count: "
+ threadPoolExecutor.getLargestPoolSize() + "; Current open thread count:" + threadPoolExecutor.getActiveCount());
}
// if there is no time left or if the result is determined, break.
// Note that a dirty read of result is totally fine here.
// Since this thread holds the parentThreadLock, even if we do a dirty
// read here, the child thread, after updating the result, would not be
// able to call notify on the parentThreadLock
// (and thus finish execution) as it would be waiting on parentThreadLock
// held by this thread(the parent thread).
// So, this thread will wait again and then be notified by the childThread.
// On the other hand, if we try to take socketFinderLock here to avoid
// dirty read, we would introduce a dead lock due to the
// reverse order of locking in updateResult method.
if (timeRemaining <= 0 || (!result.equals(Result.UNKNOWN)))
break;
parentThreadLock.wait(timeRemaining);
if (logger.isLoggable(Level.FINER)) {
logger.finer(this.toString() + " The parent thread wokeup.");
}
timerNow = System.currentTimeMillis();
}
}
}
finally {
// Close all sockets except the selected one.
// As we close sockets pro-actively in the child threads,
// its possible that we close a socket twice.
// Closing a socket second time is a no-op.
// If a child thread is waiting on the connect call on a socket s,
// closing the socket s here ensures that an exception is thrown
// in the child thread immediately. This mitigates the problem
// of thread explosion by ensuring that unnecessary threads die
// quickly without waiting for "min(timeOut, 21)" seconds
for (Socket s : sockets) {
if (s != selectedSocket) {
close(s);
}
}
}
}
/**
* search result
*/
Result getResult() {
return result;
}
void close(Selector selector) {
if (null != selector) {
if (logger.isLoggable(Level.FINER))
logger.finer(this.toString() + ": Closing Selector");
try {
selector.close();
}
catch (IOException e) {
if (logger.isLoggable(Level.FINE))
logger.log(Level.FINE, this.toString() + ": Ignored the following error while closing Selector", e);
}
}
}
void close(Socket socket) {
if (null != socket) {
if (logger.isLoggable(Level.FINER))
logger.finer(this.toString() + ": Closing TCP socket:" + socket);
try {
socket.close();
}
catch (IOException e) {
if (logger.isLoggable(Level.FINE))
logger.log(Level.FINE, this.toString() + ": Ignored the following error while closing socket", e);
}
}
}
void close(SocketChannel socketChannel) {
if (null != socketChannel) {
if (logger.isLoggable(Level.FINER))
logger.finer(this.toString() + ": Closing TCP socket channel:" + socketChannel);
try {
socketChannel.close();
}
catch (IOException e) {
if (logger.isLoggable(Level.FINE))
logger.log(Level.FINE, this.toString() + "Ignored the following error while closing socketChannel", e);
}
}
}
/**
* Used by socketConnector threads to notify the socketFinder of their connection attempt result(a connected socket or exception). It updates the
* result, socket and exception variables of socketFinder object. This method notifies the parent thread if a socket is found or if all the
* spawned threads have notified. It also closes a socket if it is not selected for use by socketFinder.
*
* @param socket
* the SocketConnector's socket
* @param exception
* Exception that occurred in socket connector thread
* @param threadId
* Id of the calling Thread for diagnosis
*/
void updateResult(Socket socket,
IOException exception,
String threadId) {
if (result.equals(Result.UNKNOWN)) {
if (logger.isLoggable(Level.FINER)) {
logger.finer("The following child thread is waiting for socketFinderLock:" + threadId);
}
synchronized (socketFinderlock) {
if (logger.isLoggable(Level.FINER)) {
logger.finer("The following child thread acquired socketFinderLock:" + threadId);
}
if (result.equals(Result.UNKNOWN)) {
// if the connection was successful and no socket has been
// selected yet
if (exception == null && selectedSocket == null) {
selectedSocket = socket;
result = Result.SUCCESS;
if (logger.isLoggable(Level.FINER)) {
logger.finer("The socket of the following thread has been chosen:" + threadId);
}
}
// if an exception occurred
if (exception != null) {
updateSelectedException(exception, threadId);
}
}
noOfThreadsThatNotified++;
// if all threads notified, but the result is still unknown,
// update the result to failure
if ((noOfThreadsThatNotified >= noOfSpawnedThreads) && result.equals(Result.UNKNOWN)) {
result = Result.FAILURE;
}
if (!result.equals(Result.UNKNOWN)) {
// 1) Note that at any point of time, there is only one
// thread(parent/child thread) competing for parentThreadLock.
// 2) The only time where a child thread could be waiting on
// parentThreadLock is before the wait call in the parentThread
// 3) After the above happens, the parent thread waits to be
// notified on parentThreadLock. After being notified,
// it would be the ONLY thread competing for the lock.
// for the following reasons
// a) The parentThreadLock is taken while holding the socketFinderLock.
// So, all child threads, except one, block on socketFinderLock
// (not parentThreadLock)
// b) After parentThreadLock is notified by a child thread, the result
// would be known(Refer the double-checked locking done at the
// start of this method). So, all child threads would exit
// as no-ops and would never compete with parent thread
// for acquiring parentThreadLock
// 4) As the parent thread is the only thread that competes for the
// parentThreadLock, it need not wait to acquire the lock once it wakes
// up and gets scheduled.
// This results in better performance as it would close unnecessary
// sockets and thus help child threads die quickly.
if (logger.isLoggable(Level.FINER)) {
logger.finer("The following child thread is waiting for parentThreadLock:" + threadId);
}
synchronized (parentThreadLock) {
if (logger.isLoggable(Level.FINER)) {
logger.finer("The following child thread acquired parentThreadLock:" + threadId);
}
parentThreadLock.notify();
}
if (logger.isLoggable(Level.FINER)) {
logger.finer("The following child thread released parentThreadLock and notified the parent thread:" + threadId);
}
}
}
if (logger.isLoggable(Level.FINER)) {
logger.finer("The following child thread released socketFinderLock:" + threadId);
}
}
}
/**
* Updates the selectedException if
* <p>
* a) selectedException is null
* <p>
* b) ex is a non-socketTimeoutException and selectedException is a socketTimeoutException
* <p>
* If there are multiple exceptions, that are not related to socketTimeout the first non-socketTimeout exception is picked. If all exceptions are
* related to socketTimeout, the first exception is picked. Note: This method is not thread safe. The caller should ensure thread safety.
*
* @param ex
* the IOException
* @param traceId
* the traceId of the thread
*/
public void updateSelectedException(IOException ex,
String traceId) {
boolean updatedException = false;
if (selectedException == null) {
selectedException = ex;
updatedException = true;
}
else if ((!(ex instanceof SocketTimeoutException)) && (selectedException instanceof SocketTimeoutException)) {
selectedException = ex;
updatedException = true;
}
if (updatedException) {
if (logger.isLoggable(Level.FINER)) {
logger.finer("The selected exception is updated to the following: ExceptionType:" + ex.getClass() + "; ExceptionMessage:"
+ ex.getMessage() + "; by the following thread:" + traceId);
}
}
}
/**
* Used fof tracing
*
* @return traceID string
*/
public String toString() {
return traceID;
}
}
/**
* This is used to connect a socket in a separate thread
*/
final class SocketConnector implements Runnable {
// socket on which connection attempt would be made
private final Socket socket;
// the socketFinder associated with this connector
private final SocketFinder socketFinder;
// inetSocketAddress to connect to
private final InetSocketAddress inetSocketAddress;
// timeout in milliseconds
private final int timeoutInMilliseconds;
// Logging variables
private static final Logger logger = Logger.getLogger("com.microsoft.sqlserver.jdbc.internals.SocketConnector");
private final String traceID;
// Id of the thread. used for diagnosis
private final String threadID;
// a counter used to give unique IDs to each connector thread.
// this will have the id of the thread that was last created.
private static long lastThreadID = 0;
/**
* Constructs a new SocketConnector object with the associated socket and socketFinder
*/
SocketConnector(Socket socket,
InetSocketAddress inetSocketAddress,
int timeOutInMilliSeconds,
SocketFinder socketFinder) {
this.socket = socket;
this.inetSocketAddress = inetSocketAddress;
this.timeoutInMilliseconds = timeOutInMilliSeconds;
this.socketFinder = socketFinder;
this.threadID = Long.toString(nextThreadID());
this.traceID = "SocketConnector:" + this.threadID + "(" + socketFinder.toString() + ")";
}
/**
* If search for socket has not finished, this function tries to connect a socket(with a timeout) synchronously. It further notifies the
* socketFinder the result of the connection attempt
*/
public void run() {
IOException exception = null;
// Note that we do not need socketFinder lock here
// as we update nothing in socketFinder based on the condition.
// So, its perfectly fine to make a dirty read.
SocketFinder.Result result = socketFinder.getResult();
if (result.equals(SocketFinder.Result.UNKNOWN)) {
try {
if (logger.isLoggable(Level.FINER)) {
logger.finer(
this.toString() + " connecting to InetSocketAddress:" + inetSocketAddress + " with timeout:" + timeoutInMilliseconds);
}
socket.connect(inetSocketAddress, timeoutInMilliseconds);
}
catch (IOException ex) {
if (logger.isLoggable(Level.FINER)) {
logger.finer(this.toString() + " exception:" + ex.getClass() + " with message:" + ex.getMessage()
+ " occured while connecting to InetSocketAddress:" + inetSocketAddress);
}
exception = ex;
}
socketFinder.updateResult(socket, exception, this.toString());
}
}
/**
* Used for tracing
*
* @return traceID string
*/
public String toString() {
return traceID;
}
/**
* Generates the next unique thread id.
*/
private static synchronized long nextThreadID() {
if (lastThreadID == Long.MAX_VALUE) {
if (logger.isLoggable(Level.FINER))
logger.finer("Resetting the Id count");
lastThreadID = 1;
}
else {
lastThreadID++;
}
return lastThreadID;
}
}
/**
* TDSWriter implements the client to server TDS data pipe.
*/
final class TDSWriter {
private static Logger logger = Logger.getLogger("com.microsoft.sqlserver.jdbc.internals.TDS.Writer");
private final String traceID;
final public String toString() {
return traceID;
}
private final TDSChannel tdsChannel;
private final SQLServerConnection con;
// Flag to indicate whether data written via writeXXX() calls
// is loggable. Data is normally loggable. But sensitive
// data, such as user credentials, should never be logged for
// security reasons.
private boolean dataIsLoggable = true;
void setDataLoggable(boolean value) {
dataIsLoggable = value;
}
private TDSCommand command = null;
// TDS message type (Query, RPC, DTC, etc.) sent at the beginning
// of every TDS message header. Value is set when starting a new
// TDS message of the specified type.
private byte tdsMessageType;
private volatile int sendResetConnection = 0;
// Size (in bytes) of the TDS packets to/from the server.
// This size is normally fixed for the life of the connection,
// but it can change once after the logon packet because packet
// size negotiation happens at logon time.
private int currentPacketSize = 0;
// Size of the TDS packet header, which is:
// byte type
// byte status
// short length
// short SPID
// byte packet
// byte window
private final static int TDS_PACKET_HEADER_SIZE = 8;
private final static byte[] placeholderHeader = new byte[TDS_PACKET_HEADER_SIZE];
// Intermediate array used to convert typically "small" values such as fixed-length types
// (byte, int, long, etc.) and Strings from their native form to bytes for sending to
// the channel buffers.
private byte valueBytes[] = new byte[256];
// Monotonically increasing packet number associated with the current message
private volatile int packetNum = 0;
// Bytes for sending decimal/numeric data
private final static int BYTES4 = 4;
private final static int BYTES8 = 8;
private final static int BYTES12 = 12;
private final static int BYTES16 = 16;
public final static int BIGDECIMAL_MAX_LENGTH = 0x11;
// is set to true when EOM is sent for the current message.
// Note that this variable will never be accessed from multiple threads
// simultaneously and so it need not be volatile
private boolean isEOMSent = false;
boolean isEOMSent() {
return isEOMSent;
}
// Packet data buffers
private ByteBuffer stagingBuffer;
private ByteBuffer socketBuffer;
private ByteBuffer logBuffer;
private CryptoMetadata cryptoMeta = null;
TDSWriter(TDSChannel tdsChannel,
SQLServerConnection con) {
this.tdsChannel = tdsChannel;
this.con = con;
traceID = "TDSWriter@" + Integer.toHexString(hashCode()) + " (" + con.toString() + ")";
}
// TDS message start/end operations
void preparePacket() throws SQLServerException {
if (tdsChannel.isLoggingPackets()) {
Arrays.fill(logBuffer.array(), (byte) 0xFE);
logBuffer.clear();
}
// Write a placeholder packet header. This will be replaced
// with the real packet header when the packet is flushed.
writeBytes(placeholderHeader);
}
/**
* Start a new TDS message.
*/
void writeMessageHeader() throws SQLServerException {
// TDS 7.2 & later:
// Include ALL_Headers/MARS header in message's first packet
// Note: The PKT_BULK message does not nees this ALL_HEADERS
if ((TDS.PKT_QUERY == tdsMessageType || TDS.PKT_DTC == tdsMessageType || TDS.PKT_RPC == tdsMessageType)) {
boolean includeTraceHeader = false;
int totalHeaderLength = TDS.MESSAGE_HEADER_LENGTH;
if (TDS.PKT_QUERY == tdsMessageType || TDS.PKT_RPC == tdsMessageType) {
if (con.isDenaliOrLater() && !ActivityCorrelator.getCurrent().IsSentToServer() && Util.IsActivityTraceOn()) {
includeTraceHeader = true;
totalHeaderLength += TDS.TRACE_HEADER_LENGTH;
}
}
writeInt(totalHeaderLength); // allHeaders.TotalLength (DWORD)
writeInt(TDS.MARS_HEADER_LENGTH); // MARS header length (DWORD)
writeShort((short) 2); // allHeaders.HeaderType(MARS header) (USHORT)
writeBytes(con.getTransactionDescriptor());
writeInt(1); // marsHeader.OutstandingRequestCount
if (includeTraceHeader) {
writeInt(TDS.TRACE_HEADER_LENGTH); // trace header length (DWORD)
writeTraceHeaderData();
ActivityCorrelator.setCurrentActivityIdSentFlag(); // set the flag to indicate this ActivityId is sent
}
}
}
void writeTraceHeaderData() throws SQLServerException {
ActivityId activityId = ActivityCorrelator.getCurrent();
final byte[] actIdByteArray = Util.asGuidByteArray(activityId.getId());
long seqNum = activityId.getSequence();
writeShort(TDS.HEADERTYPE_TRACE); // trace header type
writeBytes(actIdByteArray, 0, actIdByteArray.length); // guid part of ActivityId
writeInt((int) seqNum); // sequence number of ActivityId
if (logger.isLoggable(Level.FINER))
logger.finer("Send Trace Header - ActivityID: " + activityId.toString());
}
/**
* Convenience method to prepare the TDS channel for writing and start a new TDS message.
*
* @param command
* The TDS command
* @param tdsMessageType
* The TDS message type (PKT_QUERY, PKT_RPC, etc.)
*/
void startMessage(TDSCommand command,
byte tdsMessageType) throws SQLServerException {
this.command = command;
this.tdsMessageType = tdsMessageType;
this.packetNum = 0;
this.isEOMSent = false;
this.dataIsLoggable = true;
// If the TDS packet size has changed since the last request
// (which should really only happen after the login packet)
// then allocate new buffers that are the correct size.
int negotiatedPacketSize = con.getTDSPacketSize();
if (currentPacketSize != negotiatedPacketSize) {
socketBuffer = ByteBuffer.allocate(negotiatedPacketSize).order(ByteOrder.LITTLE_ENDIAN);
stagingBuffer = ByteBuffer.allocate(negotiatedPacketSize).order(ByteOrder.LITTLE_ENDIAN);
logBuffer = ByteBuffer.allocate(negotiatedPacketSize).order(ByteOrder.LITTLE_ENDIAN);
currentPacketSize = negotiatedPacketSize;
}
socketBuffer.position(socketBuffer.limit());
stagingBuffer.clear();
preparePacket();
writeMessageHeader();
}
final void endMessage() throws SQLServerException {
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Finishing TDS message");
writePacket(TDS.STATUS_BIT_EOM);
}
// If a complete request has not been sent to the server,
// the client MUST send the next packet with both ignore bit (0x02) and EOM bit (0x01)
// set in the status to cancel the request.
final boolean ignoreMessage() throws SQLServerException {
if (packetNum > 0) {
assert !isEOMSent;
if (logger.isLoggable(Level.FINER))
logger.finest(toString() + " Finishing TDS message by sending ignore bit and end of message");
writePacket(TDS.STATUS_BIT_EOM | TDS.STATUS_BIT_ATTENTION);
return true;
}
return false;
}
final void resetPooledConnection() {
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " resetPooledConnection");
sendResetConnection = TDS.STATUS_BIT_RESET_CONN;
}
// Primitive write operations
void writeByte(byte value) throws SQLServerException {
if (stagingBuffer.remaining() >= 1) {
stagingBuffer.put(value);
if (tdsChannel.isLoggingPackets()) {
if (dataIsLoggable)
logBuffer.put(value);
else
logBuffer.position(logBuffer.position() + 1);
}
}
else {
valueBytes[0] = value;
writeWrappedBytes(valueBytes, 1);
}
}
void writeChar(char value) throws SQLServerException {
if (stagingBuffer.remaining() >= 2) {
stagingBuffer.putChar(value);
if (tdsChannel.isLoggingPackets()) {
if (dataIsLoggable)
logBuffer.putChar(value);
else
logBuffer.position(logBuffer.position() + 2);
}
}
else {
Util.writeShort((short) value, valueBytes, 0);
writeWrappedBytes(valueBytes, 2);
}
}
void writeShort(short value) throws SQLServerException {
if (stagingBuffer.remaining() >= 2) {
stagingBuffer.putShort(value);
if (tdsChannel.isLoggingPackets()) {
if (dataIsLoggable)
logBuffer.putShort(value);
else
logBuffer.position(logBuffer.position() + 2);
}
}
else {
Util.writeShort(value, valueBytes, 0);
writeWrappedBytes(valueBytes, 2);
}
}
void writeInt(int value) throws SQLServerException {
if (stagingBuffer.remaining() >= 4) {
stagingBuffer.putInt(value);
if (tdsChannel.isLoggingPackets()) {
if (dataIsLoggable)
logBuffer.putInt(value);
else
logBuffer.position(logBuffer.position() + 4);
}
}
else {
Util.writeInt(value, valueBytes, 0);
writeWrappedBytes(valueBytes, 4);
}
}
/**
* Append a real value in the TDS stream.
*
* @param value
* the data value
*/
void writeReal(Float value) throws SQLServerException {
if (false) // stagingBuffer.remaining() >= 4)
{
stagingBuffer.putFloat(value);
if (tdsChannel.isLoggingPackets()) {
if (dataIsLoggable)
logBuffer.putFloat(value);
else
logBuffer.position(logBuffer.position() + 4);
}
}
else {
writeInt(Float.floatToRawIntBits(value.floatValue()));
}
}
/**
* Append a double value in the TDS stream.
*
* @param value
* the data value
*/
void writeDouble(double value) throws SQLServerException {
if (stagingBuffer.remaining() >= 8) {
stagingBuffer.putDouble(value);
if (tdsChannel.isLoggingPackets()) {
if (dataIsLoggable)
logBuffer.putDouble(value);
else
logBuffer.position(logBuffer.position() + 8);
}
}
else {
long bits = Double.doubleToLongBits(value);
long mask = 0xFF;
int nShift = 0;
for (int i = 0; i < 8; i++) {
writeByte((byte) ((bits & mask) >> nShift));
nShift += 8;
mask = mask << 8;
}
}
}
/**
* Append a big decimal in the TDS stream.
*
* @param bigDecimalVal
* the big decimal data value
* @param srcJdbcType
* the source JDBCType
* @param precision
* the precision of the data value
* @param scale
* the scale of the column
* @throws SQLServerException
*/
void writeBigDecimal(BigDecimal bigDecimalVal,
int srcJdbcType,
int precision,
int scale) throws SQLServerException {
/*
* Length including sign byte One 1-byte unsigned integer that represents the sign of the decimal value (0 => Negative, 1 => positive) One 4-,
* 8-, 12-, or 16-byte signed integer that represents the decimal value multiplied by 10^scale.
*/
/*
* setScale of all BigDecimal value based on metadata as scale is not sent seperately for individual value. Use the rounding used in Server.
* Say, for BigDecimal("0.1"), if scale in metdadata is 0, then ArithmeticException would be thrown if RoundingMode is not set
*/
bigDecimalVal = bigDecimalVal.setScale(scale, RoundingMode.HALF_UP);
// data length + 1 byte for sign
int bLength = BYTES16 + 1;
writeByte((byte) (bLength));
// Byte array to hold all the data and padding bytes.
byte[] bytes = new byte[bLength];
byte[] valueBytes = DDC.convertBigDecimalToBytes(bigDecimalVal, scale);
// removing the precision and scale information from the valueBytes array
System.arraycopy(valueBytes, 2, bytes, 0, valueBytes.length - 2);
writeBytes(bytes);
}
void writeSmalldatetime(String value) throws SQLServerException {
GregorianCalendar calendar = initializeCalender(TimeZone.getDefault());
long utcMillis = 0; // Value to which the calendar is to be set (in milliseconds 1/1/1970 00:00:00 GMT)
java.sql.Timestamp timestampValue = java.sql.Timestamp.valueOf(value);
utcMillis = timestampValue.getTime();
// Load the calendar with the desired value
calendar.setTimeInMillis(utcMillis);
// Number of days since the SQL Server Base Date (January 1, 1900)
int daysSinceSQLBaseDate = DDC.daysSinceBaseDate(calendar.get(Calendar.YEAR), calendar.get(Calendar.DAY_OF_YEAR), TDS.BASE_YEAR_1900);
// Next, figure out the number of milliseconds since midnight of the current day.
int millisSinceMidnight = 1000 * calendar.get(Calendar.SECOND) + // Seconds into the current minute
60 * 1000 * calendar.get(Calendar.MINUTE) + // Minutes into the current hour
60 * 60 * 1000 * calendar.get(Calendar.HOUR_OF_DAY); // Hours into the current day
// The last millisecond of the current day is always rounded to the first millisecond
// of the next day because DATETIME is only accurate to 1/300th of a second.
if (1000 * 60 * 60 * 24 - 1 <= millisSinceMidnight) {
++daysSinceSQLBaseDate;
millisSinceMidnight = 0;
}
// Number of days since the SQL Server Base Date (January 1, 1900)
writeShort((short) daysSinceSQLBaseDate);
int secondsSinceMidnight = (millisSinceMidnight / 1000);
int minutesSinceMidnight = (secondsSinceMidnight / 60);
// Values that are 29.998 seconds or less are rounded down to the nearest minute
minutesSinceMidnight = ((secondsSinceMidnight % 60) > 29.998) ? minutesSinceMidnight + 1 : minutesSinceMidnight;
// Minutes since midnight
writeShort((short) minutesSinceMidnight);
}
void writeDatetime(String value) throws SQLServerException {
GregorianCalendar calendar = initializeCalender(TimeZone.getDefault());
long utcMillis = 0; // Value to which the calendar is to be set (in milliseconds 1/1/1970 00:00:00 GMT)
int subSecondNanos = 0;
java.sql.Timestamp timestampValue = java.sql.Timestamp.valueOf(value);
utcMillis = timestampValue.getTime();
subSecondNanos = timestampValue.getNanos();
// Load the calendar with the desired value
calendar.setTimeInMillis(utcMillis);
// Number of days there have been since the SQL Base Date.
// These are based on SQL Server algorithms
int daysSinceSQLBaseDate = DDC.daysSinceBaseDate(calendar.get(Calendar.YEAR), calendar.get(Calendar.DAY_OF_YEAR), TDS.BASE_YEAR_1900);
// Number of milliseconds since midnight of the current day.
int millisSinceMidnight = (subSecondNanos + Nanos.PER_MILLISECOND / 2) / Nanos.PER_MILLISECOND + // Millis into the current second
1000 * calendar.get(Calendar.SECOND) + // Seconds into the current minute
60 * 1000 * calendar.get(Calendar.MINUTE) + // Minutes into the current hour
60 * 60 * 1000 * calendar.get(Calendar.HOUR_OF_DAY); // Hours into the current day
// The last millisecond of the current day is always rounded to the first millisecond
// of the next day because DATETIME is only accurate to 1/300th of a second.
if (1000 * 60 * 60 * 24 - 1 <= millisSinceMidnight) {
++daysSinceSQLBaseDate;
millisSinceMidnight = 0;
}
// Last-ditch verification that the value is in the valid range for the
// DATETIMEN TDS data type (1/1/1753 to 12/31/9999). If it's not, then
// throw an exception now so that statement execution is safely canceled.
// Attempting to put an invalid value on the wire would result in a TDS
// exception, which would close the connection.
// These are based on SQL Server algorithms
if (daysSinceSQLBaseDate < DDC.daysSinceBaseDate(1753, 1, TDS.BASE_YEAR_1900)
|| daysSinceSQLBaseDate >= DDC.daysSinceBaseDate(10000, 1, TDS.BASE_YEAR_1900)) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_valueOutOfRange"));
Object[] msgArgs = {SSType.DATETIME};
throw new SQLServerException(form.format(msgArgs), SQLState.DATA_EXCEPTION_DATETIME_FIELD_OVERFLOW, DriverError.NOT_SET, null);
}
// Number of days since the SQL Server Base Date (January 1, 1900)
writeInt(daysSinceSQLBaseDate);
// Milliseconds since midnight (at a resolution of three hundredths of a second)
writeInt((3 * millisSinceMidnight + 5) / 10);
}
void writeDate(String value) throws SQLServerException {
GregorianCalendar calendar = initializeCalender(TimeZone.getDefault());
long utcMillis = 0;
java.sql.Date dateValue = java.sql.Date.valueOf(value);
utcMillis = dateValue.getTime();
// Load the calendar with the desired value
calendar.setTimeInMillis(utcMillis);
writeScaledTemporal(calendar, 0, // subsecond nanos (none for a date value)
0, // scale (dates are not scaled)
SSType.DATE);
}
void writeTime(java.sql.Timestamp value,
int scale) throws SQLServerException {
GregorianCalendar calendar = initializeCalender(TimeZone.getDefault());
long utcMillis = 0; // Value to which the calendar is to be set (in milliseconds 1/1/1970 00:00:00 GMT)
int subSecondNanos = 0;
utcMillis = value.getTime();
subSecondNanos = value.getNanos();
// Load the calendar with the desired value
calendar.setTimeInMillis(utcMillis);
writeScaledTemporal(calendar, subSecondNanos, scale, SSType.TIME);
}
void writeDateTimeOffset(Object value,
int scale,
SSType destSSType) throws SQLServerException {
GregorianCalendar calendar = null;
TimeZone timeZone = TimeZone.getDefault(); // Time zone to associate with the value in the Gregorian calendar
long utcMillis = 0; // Value to which the calendar is to be set (in milliseconds 1/1/1970 00:00:00 GMT)
int subSecondNanos = 0;
int minutesOffset = 0;
microsoft.sql.DateTimeOffset dtoValue = (microsoft.sql.DateTimeOffset) value;
utcMillis = dtoValue.getTimestamp().getTime();
subSecondNanos = dtoValue.getTimestamp().getNanos();
minutesOffset = dtoValue.getMinutesOffset();
// If the target data type is DATETIMEOFFSET, then use UTC for the calendar that
// will hold the value, since writeRPCDateTimeOffset expects a UTC calendar.
// Otherwise, when converting from DATETIMEOFFSET to other temporal data types,
// use a local time zone determined by the minutes offset of the value, since
// the writers for those types expect local calendars.
timeZone = (SSType.DATETIMEOFFSET == destSSType) ? UTC.timeZone : new SimpleTimeZone(minutesOffset * 60 * 1000, "");
calendar = new GregorianCalendar(timeZone, Locale.US);
calendar.setLenient(true);
calendar.clear();
calendar.setTimeInMillis(utcMillis);
writeScaledTemporal(calendar, subSecondNanos, scale, SSType.DATETIMEOFFSET);
writeShort((short) minutesOffset);
}
void writeOffsetDateTimeWithTimezone(OffsetDateTime offsetDateTimeValue,
int scale) throws SQLServerException {
GregorianCalendar calendar = null;
TimeZone timeZone;
long utcMillis = 0;
int subSecondNanos = 0;
int minutesOffset = 0;
try {
// offsetTimeValue.getOffset() returns a ZoneOffset object which has only hours and minutes
// components. So the result of the division will be an integer always. SQL Server also supports
// offsets in minutes precision.
minutesOffset = offsetDateTimeValue.getOffset().getTotalSeconds() / 60;
}
catch (Exception e) {
throw new SQLServerException(SQLServerException.getErrString("R_zoneOffsetError"), null, // SQLState is null as this error is generated in
// the driver
0, // Use 0 instead of DriverError.NOT_SET to use the correct constructor
e);
}
subSecondNanos = offsetDateTimeValue.getNano();
// writeScaledTemporal() expects subSecondNanos in 9 digits precssion
// but getNano() used in OffsetDateTime returns precession based on nanoseconds read from csv
// padding zeros to match the expectation of writeScaledTemporal()
int padding = 9 - String.valueOf(subSecondNanos).length();
while (padding > 0) {
subSecondNanos = subSecondNanos * 10;
padding
}
// For TIME_WITH_TIMEZONE, use UTC for the calendar that will hold the value
timeZone = UTC.timeZone;
// The behavior is similar to microsoft.sql.DateTimeOffset
// In Timestamp format, only YEAR needs to have 4 digits. The leading zeros for the rest of the fields can be omitted.
String offDateTimeStr = String.format("%04d", offsetDateTimeValue.getYear()) + '-' + offsetDateTimeValue.getMonthValue() + '-'
+ offsetDateTimeValue.getDayOfMonth() + ' ' + offsetDateTimeValue.getHour() + ':' + offsetDateTimeValue.getMinute() + ':'
+ offsetDateTimeValue.getSecond();
utcMillis = Timestamp.valueOf(offDateTimeStr).getTime();
calendar = initializeCalender(timeZone);
calendar.setTimeInMillis(utcMillis);
// Local timezone value in minutes
int minuteAdjustment = ((TimeZone.getDefault().getRawOffset()) / (60 * 1000));
// check if date is in day light savings and add daylight saving minutes
if (TimeZone.getDefault().inDaylightTime(calendar.getTime()))
minuteAdjustment += (TimeZone.getDefault().getDSTSavings()) / (60 * 1000);
// If the local time is negative then positive minutesOffset must be subtracted from calender
minuteAdjustment += (minuteAdjustment < 0) ? (minutesOffset * (-1)) : minutesOffset;
calendar.add(Calendar.MINUTE, minuteAdjustment);
writeScaledTemporal(calendar, subSecondNanos, scale, SSType.DATETIMEOFFSET);
writeShort((short) minutesOffset);
}
void writeOffsetTimeWithTimezone(OffsetTime offsetTimeValue,
int scale) throws SQLServerException {
GregorianCalendar calendar = null;
TimeZone timeZone;
long utcMillis = 0;
int subSecondNanos = 0;
int minutesOffset = 0;
try {
// offsetTimeValue.getOffset() returns a ZoneOffset object which has only hours and minutes
// components. So the result of the division will be an integer always. SQL Server also supports
// offsets in minutes precision.
minutesOffset = offsetTimeValue.getOffset().getTotalSeconds() / 60;
}
catch (Exception e) {
throw new SQLServerException(SQLServerException.getErrString("R_zoneOffsetError"), null, // SQLState is null as this error is generated in
// the driver
0, // Use 0 instead of DriverError.NOT_SET to use the correct constructor
e);
}
subSecondNanos = offsetTimeValue.getNano();
// writeScaledTemporal() expects subSecondNanos in 9 digits precssion
// but getNano() used in OffsetDateTime returns precession based on nanoseconds read from csv
// padding zeros to match the expectation of writeScaledTemporal()
int padding = 9 - String.valueOf(subSecondNanos).length();
while (padding > 0) {
subSecondNanos = subSecondNanos * 10;
padding
}
// For TIME_WITH_TIMEZONE, use UTC for the calendar that will hold the value
timeZone = UTC.timeZone;
// Using TDS.BASE_YEAR_1900, based on SQL server behavious
// If date only contains a time part, the return value is 1900, the base year.
// In Timestamp format, leading zeros for the fields can be omitted.
String offsetTimeStr = TDS.BASE_YEAR_1900 + "-01-01" + ' ' + offsetTimeValue.getHour() + ':' + offsetTimeValue.getMinute() + ':'
+ offsetTimeValue.getSecond();
utcMillis = Timestamp.valueOf(offsetTimeStr).getTime();
calendar = initializeCalender(timeZone);
calendar.setTimeInMillis(utcMillis);
int minuteAdjustment = (TimeZone.getDefault().getRawOffset()) / (60 * 1000);
// check if date is in day light savings and add daylight saving minutes to Local timezone(in minutes)
if (TimeZone.getDefault().inDaylightTime(calendar.getTime()))
minuteAdjustment += ((TimeZone.getDefault().getDSTSavings()) / (60 * 1000));
// If the local time is negative then positive minutesOffset must be subtracted from calender
minuteAdjustment += (minuteAdjustment < 0) ? (minutesOffset * (-1)) : minutesOffset;
calendar.add(Calendar.MINUTE, minuteAdjustment);
writeScaledTemporal(calendar, subSecondNanos, scale, SSType.DATETIMEOFFSET);
writeShort((short) minutesOffset);
}
void writeLong(long value) throws SQLServerException {
if (stagingBuffer.remaining() >= 8) {
stagingBuffer.putLong(value);
if (tdsChannel.isLoggingPackets()) {
if (dataIsLoggable)
logBuffer.putLong(value);
else
logBuffer.position(logBuffer.position() + 8);
}
}
else {
valueBytes[0] = (byte) ((value >> 0) & 0xFF);
valueBytes[1] = (byte) ((value >> 8) & 0xFF);
valueBytes[2] = (byte) ((value >> 16) & 0xFF);
valueBytes[3] = (byte) ((value >> 24) & 0xFF);
valueBytes[4] = (byte) ((value >> 32) & 0xFF);
valueBytes[5] = (byte) ((value >> 40) & 0xFF);
valueBytes[6] = (byte) ((value >> 48) & 0xFF);
valueBytes[7] = (byte) ((value >> 56) & 0xFF);
writeWrappedBytes(valueBytes, 8);
}
}
void writeBytes(byte[] value) throws SQLServerException {
writeBytes(value, 0, value.length);
}
void writeBytes(byte[] value,
int offset,
int length) throws SQLServerException {
assert length <= value.length;
int bytesWritten = 0;
int bytesToWrite;
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Writing " + length + " bytes");
while ((bytesToWrite = length - bytesWritten) > 0) {
if (0 == stagingBuffer.remaining())
writePacket(TDS.STATUS_NORMAL);
if (bytesToWrite > stagingBuffer.remaining())
bytesToWrite = stagingBuffer.remaining();
stagingBuffer.put(value, offset + bytesWritten, bytesToWrite);
if (tdsChannel.isLoggingPackets()) {
if (dataIsLoggable)
logBuffer.put(value, offset + bytesWritten, bytesToWrite);
else
logBuffer.position(logBuffer.position() + bytesToWrite);
}
bytesWritten += bytesToWrite;
}
}
void writeWrappedBytes(byte value[],
int valueLength) throws SQLServerException {
// This function should only be used to write a value that is longer than
// what remains in the current staging buffer. However, the value must
// be short enough to fit in an empty buffer.
assert valueLength <= value.length;
assert stagingBuffer.remaining() < valueLength;
assert valueLength <= stagingBuffer.capacity();
// Fill any remaining space in the staging buffer
int remaining = stagingBuffer.remaining();
if (remaining > 0) {
stagingBuffer.put(value, 0, remaining);
if (tdsChannel.isLoggingPackets()) {
if (dataIsLoggable)
logBuffer.put(value, 0, remaining);
else
logBuffer.position(logBuffer.position() + remaining);
}
}
writePacket(TDS.STATUS_NORMAL);
// After swapping, the staging buffer should once again be empty, so the
// remainder of the value can be written to it.
stagingBuffer.put(value, remaining, valueLength - remaining);
if (tdsChannel.isLoggingPackets()) {
if (dataIsLoggable)
logBuffer.put(value, remaining, valueLength - remaining);
else
logBuffer.position(logBuffer.position() + remaining);
}
}
void writeString(String value) throws SQLServerException {
int charsCopied = 0;
int length = value.length();
while (charsCopied < length) {
int bytesToCopy = 2 * (length - charsCopied);
if (bytesToCopy > valueBytes.length)
bytesToCopy = valueBytes.length;
int bytesCopied = 0;
while (bytesCopied < bytesToCopy) {
char ch = value.charAt(charsCopied++);
valueBytes[bytesCopied++] = (byte) ((ch >> 0) & 0xFF);
valueBytes[bytesCopied++] = (byte) ((ch >> 8) & 0xFF);
}
writeBytes(valueBytes, 0, bytesCopied);
}
}
void writeStream(InputStream inputStream,
long advertisedLength,
boolean writeChunkSizes) throws SQLServerException {
assert DataTypes.UNKNOWN_STREAM_LENGTH == advertisedLength || advertisedLength >= 0;
long actualLength = 0;
final byte[] streamByteBuffer = new byte[4 * currentPacketSize];
int bytesRead = 0;
int bytesToWrite;
do {
// Read in next chunk
for (bytesToWrite = 0; -1 != bytesRead && bytesToWrite < streamByteBuffer.length; bytesToWrite += bytesRead) {
try {
bytesRead = inputStream.read(streamByteBuffer, bytesToWrite, streamByteBuffer.length - bytesToWrite);
}
catch (IOException e) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_errorReadingStream"));
Object[] msgArgs = {e.toString()};
error(form.format(msgArgs), SQLState.DATA_EXCEPTION_NOT_SPECIFIC, DriverError.NOT_SET);
}
if (-1 == bytesRead)
break;
// Check for invalid bytesRead returned from InputStream.read
if (bytesRead < 0 || bytesRead > streamByteBuffer.length - bytesToWrite) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_errorReadingStream"));
Object[] msgArgs = {SQLServerException.getErrString("R_streamReadReturnedInvalidValue")};
error(form.format(msgArgs), SQLState.DATA_EXCEPTION_NOT_SPECIFIC, DriverError.NOT_SET);
}
}
// Write it out
if (writeChunkSizes)
writeInt(bytesToWrite);
writeBytes(streamByteBuffer, 0, bytesToWrite);
actualLength += bytesToWrite;
}
while (-1 != bytesRead || bytesToWrite > 0);
// If we were given an input stream length that we had to match and
// the actual stream length did not match then cancel the request.
if (DataTypes.UNKNOWN_STREAM_LENGTH != advertisedLength && actualLength != advertisedLength) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_mismatchedStreamLength"));
Object[] msgArgs = {Long.valueOf(advertisedLength), Long.valueOf(actualLength)};
error(form.format(msgArgs), SQLState.DATA_EXCEPTION_LENGTH_MISMATCH, DriverError.NOT_SET);
}
}
/*
* Adding another function for writing non-unicode reader instead of re-factoring the writeReader() for performance efficiency. As this method
* will only be used in bulk copy, it needs to be efficient. Note: Any changes in algorithm/logic should propagate to both writeReader() and
* writeNonUnicodeReader().
*/
void writeNonUnicodeReader(Reader reader,
long advertisedLength,
boolean isDestBinary,
Charset charSet) throws SQLServerException {
assert DataTypes.UNKNOWN_STREAM_LENGTH == advertisedLength || advertisedLength >= 0;
long actualLength = 0;
char[] streamCharBuffer = new char[currentPacketSize];
// The unicode version, writeReader() allocates a byte buffer that is 4 times the currentPacketSize, not sure why.
byte[] streamByteBuffer = new byte[currentPacketSize];
int charsRead = 0;
int charsToWrite;
int bytesToWrite;
String streamString;
do {
// Read in next chunk
for (charsToWrite = 0; -1 != charsRead && charsToWrite < streamCharBuffer.length; charsToWrite += charsRead) {
try {
charsRead = reader.read(streamCharBuffer, charsToWrite, streamCharBuffer.length - charsToWrite);
}
catch (IOException e) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_errorReadingStream"));
Object[] msgArgs = {e.toString()};
error(form.format(msgArgs), SQLState.DATA_EXCEPTION_NOT_SPECIFIC, DriverError.NOT_SET);
}
if (-1 == charsRead)
break;
// Check for invalid bytesRead returned from Reader.read
if (charsRead < 0 || charsRead > streamCharBuffer.length - charsToWrite) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_errorReadingStream"));
Object[] msgArgs = {SQLServerException.getErrString("R_streamReadReturnedInvalidValue")};
error(form.format(msgArgs), SQLState.DATA_EXCEPTION_NOT_SPECIFIC, DriverError.NOT_SET);
}
}
if (!isDestBinary) {
// Write it out
// This also writes the PLP_TERMINATOR token after all the data in the the stream are sent.
// The Do-While loop goes on one more time as charsToWrite is greater than 0 for the last chunk, and
// in this last round the only thing that is written is an int value of 0, which is the PLP Terminator token(0x00000000).
writeInt(charsToWrite);
for (int charsCopied = 0; charsCopied < charsToWrite; ++charsCopied) {
if (null == charSet) {
streamByteBuffer[charsCopied] = (byte) (streamCharBuffer[charsCopied] & 0xFF);
}
else {
// encoding as per collation
streamByteBuffer[charsCopied] = new String(streamCharBuffer[charsCopied] + "").getBytes(charSet)[0];
}
}
writeBytes(streamByteBuffer, 0, charsToWrite);
}
else {
bytesToWrite = charsToWrite;
if (0 != charsToWrite)
bytesToWrite = charsToWrite / 2;
streamString = new String(streamCharBuffer);
byte[] bytes = ParameterUtils.HexToBin(streamString.trim());
writeInt(bytesToWrite);
writeBytes(bytes, 0, bytesToWrite);
}
actualLength += charsToWrite;
}
while (-1 != charsRead || charsToWrite > 0);
// If we were given an input stream length that we had to match and
// the actual stream length did not match then cancel the request.
if (DataTypes.UNKNOWN_STREAM_LENGTH != advertisedLength && actualLength != advertisedLength) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_mismatchedStreamLength"));
Object[] msgArgs = {Long.valueOf(advertisedLength), Long.valueOf(actualLength)};
error(form.format(msgArgs), SQLState.DATA_EXCEPTION_LENGTH_MISMATCH, DriverError.NOT_SET);
}
}
/*
* Note: There is another method with same code logic for non unicode reader, writeNonUnicodeReader(), implemented for performance efficiency. Any
* changes in algorithm/logic should propagate to both writeReader() and writeNonUnicodeReader().
*/
void writeReader(Reader reader,
long advertisedLength,
boolean writeChunkSizes) throws SQLServerException {
assert DataTypes.UNKNOWN_STREAM_LENGTH == advertisedLength || advertisedLength >= 0;
long actualLength = 0;
char[] streamCharBuffer = new char[2 * currentPacketSize];
byte[] streamByteBuffer = new byte[4 * currentPacketSize];
int charsRead = 0;
int charsToWrite;
do {
// Read in next chunk
for (charsToWrite = 0; -1 != charsRead && charsToWrite < streamCharBuffer.length; charsToWrite += charsRead) {
try {
charsRead = reader.read(streamCharBuffer, charsToWrite, streamCharBuffer.length - charsToWrite);
}
catch (IOException e) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_errorReadingStream"));
Object[] msgArgs = {e.toString()};
error(form.format(msgArgs), SQLState.DATA_EXCEPTION_NOT_SPECIFIC, DriverError.NOT_SET);
}
if (-1 == charsRead)
break;
// Check for invalid bytesRead returned from Reader.read
if (charsRead < 0 || charsRead > streamCharBuffer.length - charsToWrite) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_errorReadingStream"));
Object[] msgArgs = {SQLServerException.getErrString("R_streamReadReturnedInvalidValue")};
error(form.format(msgArgs), SQLState.DATA_EXCEPTION_NOT_SPECIFIC, DriverError.NOT_SET);
}
}
// Write it out
if (writeChunkSizes)
writeInt(2 * charsToWrite);
// Convert from Unicode characters to bytes
// Note: The following inlined code is much faster than the equivalent
// call to (new String(streamCharBuffer)).getBytes("UTF-16LE") because it
// saves a conversion to String and use of Charset in that conversion.
for (int charsCopied = 0; charsCopied < charsToWrite; ++charsCopied) {
streamByteBuffer[2 * charsCopied] = (byte) ((streamCharBuffer[charsCopied] >> 0) & 0xFF);
streamByteBuffer[2 * charsCopied + 1] = (byte) ((streamCharBuffer[charsCopied] >> 8) & 0xFF);
}
writeBytes(streamByteBuffer, 0, 2 * charsToWrite);
actualLength += charsToWrite;
}
while (-1 != charsRead || charsToWrite > 0);
// If we were given an input stream length that we had to match and
// the actual stream length did not match then cancel the request.
if (DataTypes.UNKNOWN_STREAM_LENGTH != advertisedLength && actualLength != advertisedLength) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_mismatchedStreamLength"));
Object[] msgArgs = {Long.valueOf(advertisedLength), Long.valueOf(actualLength)};
error(form.format(msgArgs), SQLState.DATA_EXCEPTION_LENGTH_MISMATCH, DriverError.NOT_SET);
}
}
GregorianCalendar initializeCalender(TimeZone timeZone) {
GregorianCalendar calendar = null;
// Create the calendar that will hold the value. For DateTimeOffset values, the calendar's
// time zone is UTC. For other values, the calendar's time zone is a local time zone.
calendar = new GregorianCalendar(timeZone, Locale.US);
// Set the calendar lenient to allow setting the DAY_OF_YEAR and MILLISECOND fields
// to roll other fields to their correct values.
calendar.setLenient(true);
// Clear the calendar of any existing state. The state of a new Calendar object always
// reflects the current date, time, DST offset, etc.
calendar.clear();
return calendar;
}
final void error(String reason,
SQLState sqlState,
DriverError driverError) throws SQLServerException {
assert null != command;
command.interrupt(reason);
throw new SQLServerException(reason, sqlState, driverError, null);
}
/**
* Sends an attention signal to the server, if necessary, to tell it to stop processing the current command on this connection.
*
* If no packets of the command's request have yet been sent to the server, then no attention signal needs to be sent. The interrupt will be
* handled entirely by the driver.
*
* This method does not need synchronization as it does not manipulate interrupt state and writing is guaranteed to occur only from one thread at
* a time.
*/
final boolean sendAttention() throws SQLServerException {
// If any request packets were already written to the server then send an
// attention signal to the server to tell it to ignore the request or
// cancel its execution.
if (packetNum > 0) {
// Ideally, we would want to add the following assert here.
// But to add that the variable isEOMSent would have to be made
// volatile as this piece of code would be reached from multiple
// threads. So, not doing it to avoid perf hit. Note that
// isEOMSent would be updated in writePacket everytime an EOM is sent
// assert isEOMSent;
if (logger.isLoggable(Level.FINE))
logger.fine(this + ": sending attention...");
++tdsChannel.numMsgsSent;
startMessage(command, TDS.PKT_CANCEL_REQ);
endMessage();
return true;
}
return false;
}
private void writePacket(int tdsMessageStatus) throws SQLServerException {
final boolean atEOM = (TDS.STATUS_BIT_EOM == (TDS.STATUS_BIT_EOM & tdsMessageStatus));
final boolean isCancelled = ((TDS.PKT_CANCEL_REQ == tdsMessageType)
|| ((tdsMessageStatus & TDS.STATUS_BIT_ATTENTION) == TDS.STATUS_BIT_ATTENTION));
// Before writing each packet to the channel, check if an interrupt has occurred.
if (null != command && (!isCancelled))
command.checkForInterrupt();
writePacketHeader(tdsMessageStatus | sendResetConnection);
sendResetConnection = 0;
flush(atEOM);
// If this is the last packet then flush the remainder of the request
// through the socket. The first flush() call ensured that data currently
// waiting in the socket buffer was sent, flipped the buffers, and started
// sending data from the staging buffer (flipped to be the new socket buffer).
// This flush() call ensures that all remaining data in the socket buffer is sent.
if (atEOM) {
flush(atEOM);
isEOMSent = true;
++tdsChannel.numMsgsSent;
}
// If we just sent the first login request packet and SSL encryption was enabled
// for login only, then disable SSL now.
if (TDS.PKT_LOGON70 == tdsMessageType && 1 == packetNum && TDS.ENCRYPT_OFF == con.getNegotiatedEncryptionLevel()) {
tdsChannel.disableSSL();
}
// Notify the currently associated command (if any) that we have written the last
// of the response packets to the channel.
if (null != command && (!isCancelled) && atEOM)
command.onRequestComplete();
}
private void writePacketHeader(int tdsMessageStatus) {
int tdsMessageLength = stagingBuffer.position();
++packetNum;
// Write the TDS packet header back at the start of the staging buffer
stagingBuffer.put(TDS.PACKET_HEADER_MESSAGE_TYPE, tdsMessageType);
stagingBuffer.put(TDS.PACKET_HEADER_MESSAGE_STATUS, (byte) tdsMessageStatus);
stagingBuffer.put(TDS.PACKET_HEADER_MESSAGE_LENGTH, (byte) ((tdsMessageLength >> 8) & 0xFF)); // Note: message length is 16 bits,
stagingBuffer.put(TDS.PACKET_HEADER_MESSAGE_LENGTH + 1, (byte) ((tdsMessageLength >> 0) & 0xFF)); // written BIG ENDIAN
stagingBuffer.put(TDS.PACKET_HEADER_SPID, (byte) ((tdsChannel.getSPID() >> 8) & 0xFF)); // Note: SPID is 16 bits,
stagingBuffer.put(TDS.PACKET_HEADER_SPID + 1, (byte) ((tdsChannel.getSPID() >> 0) & 0xFF)); // written BIG ENDIAN
stagingBuffer.put(TDS.PACKET_HEADER_SEQUENCE_NUM, (byte) (packetNum % 256));
stagingBuffer.put(TDS.PACKET_HEADER_WINDOW, (byte) 0); // Window (Reserved/Not used)
// Write the header to the log buffer too if logging.
if (tdsChannel.isLoggingPackets()) {
logBuffer.put(TDS.PACKET_HEADER_MESSAGE_TYPE, tdsMessageType);
logBuffer.put(TDS.PACKET_HEADER_MESSAGE_STATUS, (byte) tdsMessageStatus);
logBuffer.put(TDS.PACKET_HEADER_MESSAGE_LENGTH, (byte) ((tdsMessageLength >> 8) & 0xFF)); // Note: message length is 16 bits,
logBuffer.put(TDS.PACKET_HEADER_MESSAGE_LENGTH + 1, (byte) ((tdsMessageLength >> 0) & 0xFF)); // written BIG ENDIAN
logBuffer.put(TDS.PACKET_HEADER_SPID, (byte) ((tdsChannel.getSPID() >> 8) & 0xFF)); // Note: SPID is 16 bits,
logBuffer.put(TDS.PACKET_HEADER_SPID + 1, (byte) ((tdsChannel.getSPID() >> 0) & 0xFF)); // written BIG ENDIAN
logBuffer.put(TDS.PACKET_HEADER_SEQUENCE_NUM, (byte) (packetNum % 256));
logBuffer.put(TDS.PACKET_HEADER_WINDOW, (byte) 0); // Window (Reserved/Not used);
}
}
void flush(boolean atEOM) throws SQLServerException {
// First, flush any data left in the socket buffer.
tdsChannel.write(socketBuffer.array(), socketBuffer.position(), socketBuffer.remaining());
socketBuffer.position(socketBuffer.limit());
// If there is data in the staging buffer that needs to be written
// to the socket, the socket buffer is now empty, so swap buffers
// and start writing data from the staging buffer.
if (stagingBuffer.position() >= TDS_PACKET_HEADER_SIZE) {
// Swap the packet buffers ...
ByteBuffer swapBuffer = stagingBuffer;
stagingBuffer = socketBuffer;
socketBuffer = swapBuffer;
// ... and prepare to send data from the from the new socket
// buffer (the old staging buffer).
// We need to use flip() rather than rewind() here so that
// the socket buffer's limit is properly set for the last
// packet, which may be shorter than the other packets.
socketBuffer.flip();
stagingBuffer.clear();
// If we are logging TDS packets then log the packet we're about
// to send over the wire now.
if (tdsChannel.isLoggingPackets()) {
tdsChannel.logPacket(logBuffer.array(), 0, socketBuffer.limit(),
this.toString() + " sending packet (" + socketBuffer.limit() + " bytes)");
}
// Prepare for the next packet
if (!atEOM)
preparePacket();
// Finally, start sending data from the new socket buffer.
tdsChannel.write(socketBuffer.array(), socketBuffer.position(), socketBuffer.remaining());
socketBuffer.position(socketBuffer.limit());
}
}
// Composite write operations
/**
* Write out elements common to all RPC values.
*
* @param sName
* the optional parameter name
* @param bOut
* boolean true if the value that follows is being registered as an ouput parameter
* @param tdsType
* TDS type of the value that follows
*/
void writeRPCNameValType(String sName,
boolean bOut,
TDSType tdsType) throws SQLServerException {
int nNameLen = 0;
if (null != sName)
nNameLen = sName.length() + 1; // The @ prefix is required for the param
writeByte((byte) nNameLen); // param name len
if (nNameLen > 0) {
writeChar('@');
writeString(sName);
}
if (null != cryptoMeta)
writeByte((byte) (bOut ? 1 | TDS.AE_METADATA : 0 | TDS.AE_METADATA)); // status
else
writeByte((byte) (bOut ? 1 : 0)); // status
writeByte(tdsType.byteValue()); // type
}
/**
* Append a boolean value in RPC transmission format.
*
* @param sName
* the optional parameter name
* @param booleanValue
* the data value
* @param bOut
* boolean true if the data value is being registered as an ouput parameter
*/
void writeRPCBit(String sName,
Boolean booleanValue,
boolean bOut) throws SQLServerException {
writeRPCNameValType(sName, bOut, TDSType.BITN);
writeByte((byte) 1); // max length of datatype
if (null == booleanValue) {
writeByte((byte) 0); // len of data bytes
}
else {
writeByte((byte) 1); // length of datatype
writeByte((byte) (booleanValue.booleanValue() ? 1 : 0));
}
}
/**
* Append a short value in RPC transmission format.
*
* @param sName
* the optional parameter name
* @param shortValue
* the data value
* @param bOut
* boolean true if the data value is being registered as an ouput parameter
*/
void writeRPCByte(String sName,
Byte byteValue,
boolean bOut) throws SQLServerException {
writeRPCNameValType(sName, bOut, TDSType.INTN);
writeByte((byte) 1); // max length of datatype
if (null == byteValue) {
writeByte((byte) 0); // len of data bytes
}
else {
writeByte((byte) 1); // length of datatype
writeByte(byteValue.byteValue());
}
}
/**
* Append a short value in RPC transmission format.
*
* @param sName
* the optional parameter name
* @param shortValue
* the data value
* @param bOut
* boolean true if the data value is being registered as an ouput parameter
*/
void writeRPCShort(String sName,
Short shortValue,
boolean bOut) throws SQLServerException {
writeRPCNameValType(sName, bOut, TDSType.INTN);
writeByte((byte) 2); // max length of datatype
if (null == shortValue) {
writeByte((byte) 0); // len of data bytes
}
else {
writeByte((byte) 2); // length of datatype
writeShort(shortValue.shortValue());
}
}
/**
* Append an int value in RPC transmission format.
*
* @param sName
* the optional parameter name
* @param intValue
* the data value
* @param bOut
* boolean true if the data value is being registered as an ouput parameter
*/
void writeRPCInt(String sName,
Integer intValue,
boolean bOut) throws SQLServerException {
writeRPCNameValType(sName, bOut, TDSType.INTN);
writeByte((byte) 4); // max length of datatype
if (null == intValue) {
writeByte((byte) 0); // len of data bytes
}
else {
writeByte((byte) 4); // length of datatype
writeInt(intValue.intValue());
}
}
/**
* Append a long value in RPC transmission format.
*
* @param sName
* the optional parameter name
* @param longValue
* the data value
* @param bOut
* boolean true if the data value is being registered as an ouput parameter
*/
void writeRPCLong(String sName,
Long longValue,
boolean bOut) throws SQLServerException {
writeRPCNameValType(sName, bOut, TDSType.INTN);
writeByte((byte) 8); // max length of datatype
if (null == longValue) {
writeByte((byte) 0); // len of data bytes
}
else {
writeByte((byte) 8); // length of datatype
writeLong(longValue.longValue());
}
}
/**
* Append a real value in RPC transmission format.
*
* @param sName
* the optional parameter name
* @param floatValue
* the data value
* @param bOut
* boolean true if the data value is being registered as an ouput parameter
*/
void writeRPCReal(String sName,
Float floatValue,
boolean bOut) throws SQLServerException {
writeRPCNameValType(sName, bOut, TDSType.FLOATN);
// Data and length
if (null == floatValue) {
writeByte((byte) 4); // max length
writeByte((byte) 0); // actual length (0 == null)
}
else {
writeByte((byte) 4); // max length
writeByte((byte) 4); // actual length
writeInt(Float.floatToRawIntBits(floatValue.floatValue()));
}
}
/**
* Append a double value in RPC transmission format.
*
* @param sName
* the optional parameter name
* @param doubleValue
* the data value
* @param bOut
* boolean true if the data value is being registered as an ouput parameter
*/
void writeRPCDouble(String sName,
Double doubleValue,
boolean bOut) throws SQLServerException {
writeRPCNameValType(sName, bOut, TDSType.FLOATN);
int l = 8;
writeByte((byte) l); // max length of datatype
// Data and length
if (null == doubleValue) {
writeByte((byte) 0); // len of data bytes
}
else {
writeByte((byte) l); // len of data bytes
long bits = Double.doubleToLongBits(doubleValue.doubleValue());
long mask = 0xFF;
int nShift = 0;
for (int i = 0; i < 8; i++) {
writeByte((byte) ((bits & mask) >> nShift));
nShift += 8;
mask = mask << 8;
}
}
}
/**
* Append a big decimal in RPC transmission format.
*
* @param sName
* the optional parameter name
* @param bdValue
* the data value
* @param nScale
* the desired scale
* @param bOut
* boolean true if the data value is being registered as an ouput parameter
*/
void writeRPCBigDecimal(String sName,
BigDecimal bdValue,
int nScale,
boolean bOut) throws SQLServerException {
writeRPCNameValType(sName, bOut, TDSType.DECIMALN);
writeByte((byte) 0x11); // maximum length
writeByte((byte) SQLServerConnection.maxDecimalPrecision); // precision
byte[] valueBytes = DDC.convertBigDecimalToBytes(bdValue, nScale);
writeBytes(valueBytes, 0, valueBytes.length);
}
/**
* Appends a standard v*max header for RPC parameter transmission.
*
* @param headerLength
* the total length of the PLP data block.
* @param isNull
* true if the value is NULL.
* @param collation
* The SQL collation associated with the value that follows the v*max header. Null for non-textual types.
*/
void writeVMaxHeader(long headerLength,
boolean isNull,
SQLCollation collation) throws SQLServerException {
// Send v*max length indicator 0xFFFF.
writeShort((short) 0xFFFF);
// Send collation if requested.
if (null != collation)
collation.writeCollation(this);
// Handle null here and return, we're done here if it's null.
if (isNull) {
// Null header for v*max types is 0xFFFFFFFFFFFFFFFF.
writeLong(0xFFFFFFFFFFFFFFFFL);
}
else if (DataTypes.UNKNOWN_STREAM_LENGTH == headerLength) {
// Append v*max length.
// UNKNOWN_PLP_LEN is 0xFFFFFFFFFFFFFFFE
writeLong(0xFFFFFFFFFFFFFFFEL);
// NOTE: Don't send the first chunk length, this will be calculated by caller.
}
else {
// For v*max types with known length, length is <totallength8><chunklength4>
// We're sending same total length as chunk length (as we're sending 1 chunk).
writeLong(headerLength);
}
}
/**
* Utility for internal writeRPCString calls
*/
void writeRPCStringUnicode(String sValue) throws SQLServerException {
writeRPCStringUnicode(null, sValue, false, null);
}
/**
* Writes a string value as Unicode for RPC
*
* @param sName
* the optional parameter name
* @param sValue
* the data value
* @param bOut
* boolean true if the data value is being registered as an ouput parameter
* @param collation
* the collation of the data value
*/
void writeRPCStringUnicode(String sName,
String sValue,
boolean bOut,
SQLCollation collation) throws SQLServerException {
boolean bValueNull = (sValue == null);
int nValueLen = bValueNull ? 0 : (2 * sValue.length());
boolean isShortValue = nValueLen <= DataTypes.SHORT_VARTYPE_MAX_BYTES;
// Textual RPC requires a collation. If none is provided, as is the case when
// the SSType is non-textual, then use the database collation by default.
if (null == collation)
collation = con.getDatabaseCollation();
// Use PLP encoding on Yukon and later with long values and OUT parameters
boolean usePLP = (!isShortValue || bOut);
if (usePLP) {
writeRPCNameValType(sName, bOut, TDSType.NVARCHAR);
// Handle Yukon v*max type header here.
writeVMaxHeader(nValueLen, // Length
bValueNull, // Is null?
collation);
// Send the data.
if (!bValueNull) {
if (nValueLen > 0) {
writeInt(nValueLen);
writeString(sValue);
}
// Send the terminator PLP chunk.
writeInt(0);
}
}
else // non-PLP type
{
// Write maximum length of data
if (isShortValue) {
writeRPCNameValType(sName, bOut, TDSType.NVARCHAR);
writeShort((short) DataTypes.SHORT_VARTYPE_MAX_BYTES);
}
else {
writeRPCNameValType(sName, bOut, TDSType.NTEXT);
writeInt(DataTypes.IMAGE_TEXT_MAX_BYTES);
}
collation.writeCollation(this);
// Data and length
if (bValueNull) {
writeShort((short) -1); // actual len
}
else {
// Write actual length of data
if (isShortValue)
writeShort((short) nValueLen);
else
writeInt(nValueLen);
// If length is zero, we're done.
if (0 != nValueLen)
writeString(sValue); // data
}
}
}
void writeTVP(TVP value) throws SQLServerException {
if (!value.isNull()) {
writeByte((byte) 0); // status
}
else {
// Default TVP
writeByte((byte) TDS.TVP_STATUS_DEFAULT); // default TVP
}
writeByte((byte) TDS.TDS_TVP);
/*
* TVP_TYPENAME = DbName OwningSchema TypeName
*/
// Database where TVP type resides
if (null != value.getDbNameTVP()) {
writeByte((byte) value.getDbNameTVP().length());
writeString(value.getDbNameTVP());
}
else
writeByte((byte) 0x00); // empty DB name
// Schema where TVP type resides
if (null != value.getOwningSchemaNameTVP()) {
writeByte((byte) value.getOwningSchemaNameTVP().length());
writeString(value.getOwningSchemaNameTVP());
}
else
writeByte((byte) 0x00); // empty Schema name
// TVP type name
if (null != value.getTVPName()) {
writeByte((byte) value.getTVPName().length());
writeString(value.getTVPName());
}
else
writeByte((byte) 0x00); // empty TVP name
if (!value.isNull()) {
writeTVPColumnMetaData(value);
// optional OrderUnique metadata
writeTvpOrderUnique(value);
}
else {
writeShort((short) TDS.TVP_NULL_TOKEN);
}
// TVP_END_TOKEN
writeByte((byte) 0x00);
try {
writeTVPRows(value);
}
catch (NumberFormatException e) {
throw new SQLServerException(SQLServerException.getErrString("R_TVPInvalidColumnValue"), e);
}
catch (ClassCastException e) {
throw new SQLServerException(SQLServerException.getErrString("R_TVPInvalidColumnValue"), e);
}
}
void writeTVPRows(TVP value) throws SQLServerException {
boolean isShortValue, isNull;
int dataLength;
boolean tdsWritterCached = false;
ByteBuffer cachedTVPHeaders = null;
TDSCommand cachedCommand = null;
boolean cachedRequestComplete = false;
boolean cachedInterruptsEnabled = false;
boolean cachedProcessedResponse = false;
if (!value.isNull()) {
// is used, the tdsWriter of the calling preparedStatement is overwritten by the SQLServerResultSet#next() method when fetching new rows.
// Therefore, we need to send TVP data row by row before fetching new row.
if (TVPType.ResultSet == value.tvpType) {
if ((null != value.sourceResultSet) && (value.sourceResultSet instanceof SQLServerResultSet)) {
SQLServerResultSet sourceResultSet = (SQLServerResultSet) value.sourceResultSet;
SQLServerStatement src_stmt = (SQLServerStatement) sourceResultSet.getStatement();
int resultSetServerCursorId = sourceResultSet.getServerCursorId();
if (con.equals(src_stmt.getConnection()) && 0 != resultSetServerCursorId) {
cachedTVPHeaders = ByteBuffer.allocate(stagingBuffer.capacity()).order(stagingBuffer.order());
cachedTVPHeaders.put(stagingBuffer.array(), 0, stagingBuffer.position());
cachedCommand = this.command;
cachedRequestComplete = command.getRequestComplete();
cachedInterruptsEnabled = command.getInterruptsEnabled();
cachedProcessedResponse = command.getProcessedResponse();
tdsWritterCached = true;
if (sourceResultSet.isForwardOnly()) {
sourceResultSet.setFetchSize(1);
}
}
}
}
Map<Integer, SQLServerMetaData> columnMetadata = value.getColumnMetadata();
Iterator<Entry<Integer, SQLServerMetaData>> columnsIterator;
while (value.next()) {
// restore command and TDS header, which have been overwritten by value.next()
if (tdsWritterCached) {
command = cachedCommand;
stagingBuffer.clear();
logBuffer.clear();
writeBytes(cachedTVPHeaders.array(), 0, cachedTVPHeaders.position());
}
Object[] rowData = value.getRowData();
// ROW
writeByte((byte) TDS.TVP_ROW);
columnsIterator = columnMetadata.entrySet().iterator();
int currentColumn = 0;
while (columnsIterator.hasNext()) {
Map.Entry<Integer, SQLServerMetaData> columnPair = columnsIterator.next();
// If useServerDefault is set, client MUST NOT emit TvpColumnData for the associated column
if (columnPair.getValue().useServerDefault) {
currentColumn++;
continue;
}
JDBCType jdbcType = JDBCType.of(columnPair.getValue().javaSqlType);
String currentColumnStringValue = null;
Object currentObject = null;
if (null != rowData) {
// if rowData has value for the current column, retrieve it. If not, current column will stay null.
if (rowData.length > currentColumn) {
currentObject = rowData[currentColumn];
if (null != currentObject) {
currentColumnStringValue = String.valueOf(currentObject);
}
}
}
try {
switch (jdbcType) {
case BIGINT:
if (null == currentColumnStringValue)
writeByte((byte) 0);
else {
writeByte((byte) 8);
writeLong(Long.valueOf(currentColumnStringValue).longValue());
}
break;
case BIT:
if (null == currentColumnStringValue)
writeByte((byte) 0);
else {
writeByte((byte) 1);
writeByte((byte) (Boolean.valueOf(currentColumnStringValue).booleanValue() ? 1 : 0));
}
break;
case INTEGER:
if (null == currentColumnStringValue)
writeByte((byte) 0);
else {
writeByte((byte) 4);
writeInt(Integer.valueOf(currentColumnStringValue).intValue());
}
break;
case SMALLINT:
case TINYINT:
if (null == currentColumnStringValue)
writeByte((byte) 0);
else {
writeByte((byte) 2); // length of datatype
writeShort(Short.valueOf(currentColumnStringValue).shortValue());
}
break;
case DECIMAL:
case NUMERIC:
if (null == currentColumnStringValue)
writeByte((byte) 0);
else {
writeByte((byte) TDSWriter.BIGDECIMAL_MAX_LENGTH); // maximum length
BigDecimal bdValue = new BigDecimal(currentColumnStringValue);
/*
* setScale of all BigDecimal value based on metadata as scale is not sent seperately for individual value. Use
* the rounding used in Server. Say, for BigDecimal("0.1"), if scale in metdadata is 0, then ArithmeticException
* would be thrown if RoundingMode is not set
*/
bdValue = bdValue.setScale(columnPair.getValue().scale, RoundingMode.HALF_UP);
byte[] valueBytes = DDC.convertBigDecimalToBytes(bdValue, bdValue.scale());
// 1-byte for sign and 16-byte for integer
byte[] byteValue = new byte[17];
// removing the precision and scale information from the valueBytes array
System.arraycopy(valueBytes, 2, byteValue, 0, valueBytes.length - 2);
writeBytes(byteValue);
}
break;
case DOUBLE:
if (null == currentColumnStringValue)
writeByte((byte) 0); // len of data bytes
else {
writeByte((byte) 8); // len of data bytes
long bits = Double.doubleToLongBits(Double.valueOf(currentColumnStringValue).doubleValue());
long mask = 0xFF;
int nShift = 0;
for (int i = 0; i < 8; i++) {
writeByte((byte) ((bits & mask) >> nShift));
nShift += 8;
mask = mask << 8;
}
}
break;
case FLOAT:
case REAL:
if (null == currentColumnStringValue)
writeByte((byte) 0); // actual length (0 == null)
else {
writeByte((byte) 4); // actual length
writeInt(Float.floatToRawIntBits(Float.valueOf(currentColumnStringValue).floatValue()));
}
break;
case DATE:
case TIME:
case TIMESTAMP:
case DATETIMEOFFSET:
case TIMESTAMP_WITH_TIMEZONE:
case TIME_WITH_TIMEZONE:
case CHAR:
case VARCHAR:
case NCHAR:
case NVARCHAR:
case LONGVARCHAR:
case LONGNVARCHAR:
case SQLXML:
isShortValue = (2L * columnPair.getValue().precision) <= DataTypes.SHORT_VARTYPE_MAX_BYTES;
isNull = (null == currentColumnStringValue);
dataLength = isNull ? 0 : currentColumnStringValue.length() * 2;
if (!isShortValue) {
// check null
if (isNull)
// Null header for v*max types is 0xFFFFFFFFFFFFFFFF.
writeLong(0xFFFFFFFFFFFFFFFFL);
else if (DataTypes.UNKNOWN_STREAM_LENGTH == dataLength)
// Append v*max length.
// UNKNOWN_PLP_LEN is 0xFFFFFFFFFFFFFFFE
writeLong(0xFFFFFFFFFFFFFFFEL);
else
// For v*max types with known length, length is <totallength8><chunklength4>
writeLong(dataLength);
if (!isNull) {
if (dataLength > 0) {
writeInt(dataLength);
writeString(currentColumnStringValue);
}
// Send the terminator PLP chunk.
writeInt(0);
}
}
else {
if (isNull)
writeShort((short) -1); // actual len
else {
writeShort((short) dataLength);
writeString(currentColumnStringValue);
}
}
break;
case BINARY:
case VARBINARY:
case LONGVARBINARY:
// Handle conversions as done in other types.
isShortValue = columnPair.getValue().precision <= DataTypes.SHORT_VARTYPE_MAX_BYTES;
isNull = (null == currentObject);
if (currentObject instanceof String)
dataLength = isNull ? 0 : (toByteArray(currentObject.toString())).length;
else
dataLength = isNull ? 0 : ((byte[]) currentObject).length;
if (!isShortValue) {
// check null
if (isNull)
// Null header for v*max types is 0xFFFFFFFFFFFFFFFF.
writeLong(0xFFFFFFFFFFFFFFFFL);
else if (DataTypes.UNKNOWN_STREAM_LENGTH == dataLength)
// Append v*max length.
// UNKNOWN_PLP_LEN is 0xFFFFFFFFFFFFFFFE
writeLong(0xFFFFFFFFFFFFFFFEL);
else
// For v*max types with known length, length is <totallength8><chunklength4>
writeLong(dataLength);
if (!isNull) {
if (dataLength > 0) {
writeInt(dataLength);
if (currentObject instanceof String)
writeBytes(toByteArray(currentObject.toString()));
else
writeBytes((byte[]) currentObject);
}
// Send the terminator PLP chunk.
writeInt(0);
}
}
else {
if (isNull)
writeShort((short) -1); // actual len
else {
writeShort((short) dataLength);
if (currentObject instanceof String)
writeBytes(toByteArray(currentObject.toString()));
else
writeBytes((byte[]) currentObject);
}
}
break;
default:
assert false : "Unexpected JDBC type " + jdbcType.toString();
}
}
catch (IllegalArgumentException e) {
throw new SQLServerException(SQLServerException.getErrString("R_errorConvertingValue"), e);
}
catch (ArrayIndexOutOfBoundsException e) {
throw new SQLServerException(SQLServerException.getErrString("R_CSVDataSchemaMismatch"), e);
}
currentColumn++;
}
// send this row, read its response (throw exception in case of errors) and reset command status
if (tdsWritterCached) {
// TVP_END_TOKEN
writeByte((byte) 0x00);
writePacket(TDS.STATUS_BIT_EOM);
TDSReader tdsReader = tdsChannel.getReader(command);
int tokenType = tdsReader.peekTokenType();
if (TDS.TDS_ERR == tokenType) {
StreamError databaseError = new StreamError();
databaseError.setFromTDS(tdsReader);
SQLServerException.makeFromDatabaseError(con, null, databaseError.getMessage(), databaseError, false);
}
command.setInterruptsEnabled(true);
command.setRequestComplete(false);
}
}
}
// reset command status which have been overwritten
if (tdsWritterCached) {
command.setRequestComplete(cachedRequestComplete);
command.setInterruptsEnabled(cachedInterruptsEnabled);
command.setProcessedResponse(cachedProcessedResponse);
}
else {
// TVP_END_TOKEN
writeByte((byte) 0x00);
}
}
private static byte[] toByteArray(String s) {
return DatatypeConverter.parseHexBinary(s);
}
void writeTVPColumnMetaData(TVP value) throws SQLServerException {
boolean isShortValue;
// TVP_COLMETADATA
writeShort((short) value.getTVPColumnCount());
Map<Integer, SQLServerMetaData> columnMetadata = value.getColumnMetadata();
Iterator<Entry<Integer, SQLServerMetaData>> columnsIterator = columnMetadata.entrySet().iterator();
/*
* TypeColumnMetaData = UserType Flags TYPE_INFO ColName ;
*/
while (columnsIterator.hasNext()) {
Map.Entry<Integer, SQLServerMetaData> pair = columnsIterator.next();
JDBCType jdbcType = JDBCType.of(pair.getValue().javaSqlType);
boolean useServerDefault = pair.getValue().useServerDefault;
// ULONG ; UserType of column
// The value will be 0x0000 with the exceptions of TIMESTAMP (0x0050) and alias types (greater than 0x00FF).
writeInt(0);
/*
* Flags = fNullable ; Column is nullable - %x01 fCaseSen -- Ignored ; usUpdateable -- Ignored ; fIdentity ; Column is identity column -
* %x10 fComputed ; Column is computed - %x20 usReservedODBC -- Ignored ; fFixedLenCLRType-- Ignored ; fDefault ; Column is default value
* - %x200 usReserved -- Ignored ;
*/
short flags = TDS.FLAG_NULLABLE;
if (useServerDefault) {
flags |= TDS.FLAG_TVP_DEFAULT_COLUMN;
}
writeShort(flags);
// Type info
switch (jdbcType) {
case BIGINT:
writeByte(TDSType.INTN.byteValue());
writeByte((byte) 8); // max length of datatype
break;
case BIT:
writeByte(TDSType.BITN.byteValue());
writeByte((byte) 1); // max length of datatype
break;
case INTEGER:
writeByte(TDSType.INTN.byteValue());
writeByte((byte) 4); // max length of datatype
break;
case SMALLINT:
case TINYINT:
writeByte(TDSType.INTN.byteValue());
writeByte((byte) 2); // max length of datatype
break;
case DECIMAL:
case NUMERIC:
writeByte(TDSType.NUMERICN.byteValue());
writeByte((byte) 0x11); // maximum length
writeByte((byte) pair.getValue().precision);
writeByte((byte) pair.getValue().scale);
break;
case DOUBLE:
writeByte(TDSType.FLOATN.byteValue());
writeByte((byte) 8); // max length of datatype
break;
case FLOAT:
case REAL:
writeByte(TDSType.FLOATN.byteValue());
writeByte((byte) 4); // max length of datatype
break;
case DATE:
case TIME:
case TIMESTAMP:
case DATETIMEOFFSET:
case TIMESTAMP_WITH_TIMEZONE:
case TIME_WITH_TIMEZONE:
case CHAR:
case VARCHAR:
case NCHAR:
case NVARCHAR:
case LONGVARCHAR:
case LONGNVARCHAR:
case SQLXML:
writeByte(TDSType.NVARCHAR.byteValue());
isShortValue = (2L * pair.getValue().precision) <= DataTypes.SHORT_VARTYPE_MAX_BYTES;
// Use PLP encoding on Yukon and later with long values
if (!isShortValue) // PLP
{
// Handle Yukon v*max type header here.
writeShort((short) 0xFFFF);
con.getDatabaseCollation().writeCollation(this);
}
else // non PLP
{
writeShort((short) DataTypes.SHORT_VARTYPE_MAX_BYTES);
con.getDatabaseCollation().writeCollation(this);
}
break;
case BINARY:
case VARBINARY:
case LONGVARBINARY:
writeByte(TDSType.BIGVARBINARY.byteValue());
isShortValue = pair.getValue().precision <= DataTypes.SHORT_VARTYPE_MAX_BYTES;
// Use PLP encoding on Yukon and later with long values
if (!isShortValue) // PLP
// Handle Yukon v*max type header here.
writeShort((short) 0xFFFF);
else // non PLP
writeShort((short) DataTypes.SHORT_VARTYPE_MAX_BYTES);
break;
default:
assert false : "Unexpected JDBC type " + jdbcType.toString();
}
// Column name - must be null (from TDS - TVP_COLMETADATA)
writeByte((byte) 0x00);
// [TVP_ORDER_UNIQUE]
// [TVP_COLUMN_ORDERING]
}
}
void writeTvpOrderUnique(TVP value) throws SQLServerException {
/*
* TVP_ORDER_UNIQUE = TVP_ORDER_UNIQUE_TOKEN (Count <Count>(ColNum OrderUniqueFlags))
*/
Map<Integer, SQLServerMetaData> columnMetadata = value.getColumnMetadata();
Iterator<Entry<Integer, SQLServerMetaData>> columnsIterator = columnMetadata.entrySet().iterator();
LinkedList<TdsOrderUnique> columnList = new LinkedList<TdsOrderUnique>();
while (columnsIterator.hasNext()) {
byte flags = 0;
Map.Entry<Integer, SQLServerMetaData> pair = columnsIterator.next();
SQLServerMetaData metaData = pair.getValue();
if (SQLServerSortOrder.Ascending == metaData.sortOrder)
flags = TDS.TVP_ORDERASC_FLAG;
else if (SQLServerSortOrder.Descending == metaData.sortOrder)
flags = TDS.TVP_ORDERDESC_FLAG;
if (metaData.isUniqueKey)
flags |= TDS.TVP_UNIQUE_FLAG;
// Remember this column if any flags were set
if (0 != flags)
columnList.add(new TdsOrderUnique(pair.getKey(), flags));
}
// Write flagged columns
if (!columnList.isEmpty()) {
writeByte((byte) TDS.TVP_ORDER_UNIQUE_TOKEN);
writeShort((short) columnList.size());
for (TdsOrderUnique column : columnList) {
writeShort((short) (column.columnOrdinal + 1));
writeByte(column.flags);
}
}
}
private class TdsOrderUnique {
int columnOrdinal;
byte flags;
TdsOrderUnique(int ordinal,
byte flags) {
this.columnOrdinal = ordinal;
this.flags = flags;
}
}
void setCryptoMetaData(CryptoMetadata cryptoMetaForBulk) {
this.cryptoMeta = cryptoMetaForBulk;
}
CryptoMetadata getCryptoMetaData() {
return cryptoMeta;
}
void writeEncryptedRPCByteArray(byte bValue[]) throws SQLServerException {
boolean bValueNull = (bValue == null);
long nValueLen = bValueNull ? 0 : bValue.length;
boolean isShortValue = (nValueLen <= DataTypes.SHORT_VARTYPE_MAX_BYTES);
boolean isPLP = (!isShortValue) && (nValueLen <= DataTypes.MAX_VARTYPE_MAX_BYTES);
// Handle Shiloh types here.
if (isShortValue) {
writeShort((short) DataTypes.SHORT_VARTYPE_MAX_BYTES);
}
else if (isPLP) {
writeShort((short) DataTypes.SQL_USHORTVARMAXLEN);
}
else {
writeInt(DataTypes.IMAGE_TEXT_MAX_BYTES);
}
// Data and length
if (bValueNull) {
writeShort((short) -1); // actual len
}
else {
if (isShortValue) {
writeShort((short) nValueLen); // actual len
}
else if (isPLP) {
writeLong(nValueLen); // actual length
}
else {
writeInt((int) nValueLen); // actual len
}
// If length is zero, we're done.
if (0 != nValueLen) {
if (isPLP) {
writeInt((int) nValueLen);
}
writeBytes(bValue);
}
if (isPLP) {
writeInt(0); // PLP_TERMINATOR, 0x00000000
}
}
}
void writeEncryptedRPCPLP() throws SQLServerException {
writeShort((short) DataTypes.SQL_USHORTVARMAXLEN);
writeLong((long) 0); // actual length
writeInt(0); // PLP_TERMINATOR, 0x00000000
}
void writeCryptoMetaData() throws SQLServerException {
writeByte(cryptoMeta.cipherAlgorithmId);
writeByte(cryptoMeta.encryptionType.getValue());
writeInt(cryptoMeta.cekTableEntry.getColumnEncryptionKeyValues().get(0).databaseId);
writeInt(cryptoMeta.cekTableEntry.getColumnEncryptionKeyValues().get(0).cekId);
writeInt(cryptoMeta.cekTableEntry.getColumnEncryptionKeyValues().get(0).cekVersion);
writeBytes(cryptoMeta.cekTableEntry.getColumnEncryptionKeyValues().get(0).cekMdVersion);
writeByte(cryptoMeta.normalizationRuleVersion);
}
void writeRPCByteArray(String sName,
byte bValue[],
boolean bOut,
JDBCType jdbcType,
SQLCollation collation) throws SQLServerException {
boolean bValueNull = (bValue == null);
int nValueLen = bValueNull ? 0 : bValue.length;
boolean isShortValue = (nValueLen <= DataTypes.SHORT_VARTYPE_MAX_BYTES);
// Use PLP encoding on Yukon and later with long values and OUT parameters
boolean usePLP = (!isShortValue || bOut);
TDSType tdsType;
if (null != cryptoMeta) {
// send encrypted data as BIGVARBINARY
tdsType = (isShortValue || usePLP) ? TDSType.BIGVARBINARY : TDSType.IMAGE;
collation = null;
}
else
switch (jdbcType) {
case BINARY:
case VARBINARY:
case LONGVARBINARY:
case BLOB:
default:
tdsType = (isShortValue || usePLP) ? TDSType.BIGVARBINARY : TDSType.IMAGE;
collation = null;
break;
case CHAR:
case VARCHAR:
case LONGVARCHAR:
case CLOB:
tdsType = (isShortValue || usePLP) ? TDSType.BIGVARCHAR : TDSType.TEXT;
if (null == collation)
collation = con.getDatabaseCollation();
break;
case NCHAR:
case NVARCHAR:
case LONGNVARCHAR:
case NCLOB:
tdsType = (isShortValue || usePLP) ? TDSType.NVARCHAR : TDSType.NTEXT;
if (null == collation)
collation = con.getDatabaseCollation();
break;
}
writeRPCNameValType(sName, bOut, tdsType);
if (usePLP) {
// Handle Yukon v*max type header here.
writeVMaxHeader(nValueLen, bValueNull, collation);
// Send the data.
if (!bValueNull) {
if (nValueLen > 0) {
writeInt(nValueLen);
writeBytes(bValue);
}
// Send the terminator PLP chunk.
writeInt(0);
}
}
else // non-PLP type
{
// Handle Shiloh types here.
if (isShortValue) {
writeShort((short) DataTypes.SHORT_VARTYPE_MAX_BYTES);
}
else {
writeInt(DataTypes.IMAGE_TEXT_MAX_BYTES);
}
if (null != collation)
collation.writeCollation(this);
// Data and length
if (bValueNull) {
writeShort((short) -1); // actual len
}
else {
if (isShortValue)
writeShort((short) nValueLen); // actual len
else
writeInt(nValueLen); // actual len
// If length is zero, we're done.
if (0 != nValueLen)
writeBytes(bValue);
}
}
}
/**
* Append a timestamp in RPC transmission format as a SQL Server DATETIME data type
*
* @param sName
* the optional parameter name
* @param cal
* Pure Gregorian calendar containing the timestamp, including its associated time zone
* @param subSecondNanos
* the sub-second nanoseconds (0 - 999,999,999)
* @param bOut
* boolean true if the data value is being registered as an ouput parameter
*
*/
void writeRPCDateTime(String sName,
GregorianCalendar cal,
int subSecondNanos,
boolean bOut) throws SQLServerException {
assert (subSecondNanos >= 0) && (subSecondNanos < Nanos.PER_SECOND) : "Invalid subNanoSeconds value: " + subSecondNanos;
assert (cal != null) || (cal == null && subSecondNanos == 0) : "Invalid subNanoSeconds value when calendar is null: " + subSecondNanos;
writeRPCNameValType(sName, bOut, TDSType.DATETIMEN);
writeByte((byte) 8); // max length of datatype
if (null == cal) {
writeByte((byte) 0); // len of data bytes
return;
}
writeByte((byte) 8); // len of data bytes
// We need to extract the Calendar's current date & time in terms
// of the number of days since the SQL Base Date (1/1/1900) plus
// the number of milliseconds since midnight in the current day.
// We cannot rely on any pre-calculated value for the number of
// milliseconds in a day or the number of milliseconds since the
// base date to do this because days with DST changes are shorter
// or longer than "normal" days.
// ASSUMPTION: We assume we are dealing with a GregorianCalendar here.
// If not, we have no basis in which to compare dates. E.g. if we
// are dealing with a Chinese Calendar implementation which does not
// use the same value for Calendar.YEAR as the GregorianCalendar,
// we cannot meaningfully compute a value relative to 1/1/1900.
// First, figure out how many days there have been since the SQL Base Date.
// These are based on SQL Server algorithms
int daysSinceSQLBaseDate = DDC.daysSinceBaseDate(cal.get(Calendar.YEAR), cal.get(Calendar.DAY_OF_YEAR), TDS.BASE_YEAR_1900);
// Next, figure out the number of milliseconds since midnight of the current day.
int millisSinceMidnight = (subSecondNanos + Nanos.PER_MILLISECOND / 2) / Nanos.PER_MILLISECOND + // Millis into the current second
1000 * cal.get(Calendar.SECOND) + // Seconds into the current minute
60 * 1000 * cal.get(Calendar.MINUTE) + // Minutes into the current hour
60 * 60 * 1000 * cal.get(Calendar.HOUR_OF_DAY); // Hours into the current day
// The last millisecond of the current day is always rounded to the first millisecond
// of the next day because DATETIME is only accurate to 1/300th of a second.
if (millisSinceMidnight >= 1000 * 60 * 60 * 24 - 1) {
++daysSinceSQLBaseDate;
millisSinceMidnight = 0;
}
// Last-ditch verification that the value is in the valid range for the
// DATETIMEN TDS data type (1/1/1753 to 12/31/9999). If it's not, then
// throw an exception now so that statement execution is safely canceled.
// Attempting to put an invalid value on the wire would result in a TDS
// exception, which would close the connection.
// These are based on SQL Server algorithms
if (daysSinceSQLBaseDate < DDC.daysSinceBaseDate(1753, 1, TDS.BASE_YEAR_1900)
|| daysSinceSQLBaseDate >= DDC.daysSinceBaseDate(10000, 1, TDS.BASE_YEAR_1900)) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_valueOutOfRange"));
Object[] msgArgs = {SSType.DATETIME};
throw new SQLServerException(form.format(msgArgs), SQLState.DATA_EXCEPTION_DATETIME_FIELD_OVERFLOW, DriverError.NOT_SET, null);
}
// And put it all on the wire...
// Number of days since the SQL Server Base Date (January 1, 1900)
writeInt(daysSinceSQLBaseDate);
// Milliseconds since midnight (at a resolution of three hundredths of a second)
writeInt((3 * millisSinceMidnight + 5) / 10);
}
void writeRPCTime(String sName,
GregorianCalendar localCalendar,
int subSecondNanos,
int scale,
boolean bOut) throws SQLServerException {
writeRPCNameValType(sName, bOut, TDSType.TIMEN);
writeByte((byte) scale);
if (null == localCalendar) {
writeByte((byte) 0);
return;
}
writeByte((byte) TDS.timeValueLength(scale));
writeScaledTemporal(localCalendar, subSecondNanos, scale, SSType.TIME);
}
void writeRPCDate(String sName,
GregorianCalendar localCalendar,
boolean bOut) throws SQLServerException {
writeRPCNameValType(sName, bOut, TDSType.DATEN);
if (null == localCalendar) {
writeByte((byte) 0);
return;
}
writeByte((byte) TDS.DAYS_INTO_CE_LENGTH);
writeScaledTemporal(localCalendar, 0, // subsecond nanos (none for a date value)
0, // scale (dates are not scaled)
SSType.DATE);
}
void writeEncryptedRPCTime(String sName,
GregorianCalendar localCalendar,
int subSecondNanos,
int scale,
boolean bOut) throws SQLServerException {
if (con.getSendTimeAsDatetime()) {
throw new SQLServerException(SQLServerException.getErrString("R_sendTimeAsDateTimeForAE"), null);
}
writeRPCNameValType(sName, bOut, TDSType.BIGVARBINARY);
if (null == localCalendar)
writeEncryptedRPCByteArray(null);
else
writeEncryptedRPCByteArray(writeEncryptedScaledTemporal(localCalendar, subSecondNanos, scale, SSType.TIME, (short) 0));
writeByte(TDSType.TIMEN.byteValue());
writeByte((byte) scale);
writeCryptoMetaData();
}
void writeEncryptedRPCDate(String sName,
GregorianCalendar localCalendar,
boolean bOut) throws SQLServerException {
writeRPCNameValType(sName, bOut, TDSType.BIGVARBINARY);
if (null == localCalendar)
writeEncryptedRPCByteArray(null);
else
writeEncryptedRPCByteArray(writeEncryptedScaledTemporal(localCalendar, 0, // subsecond nanos (none for a date value)
0, // scale (dates are not scaled)
SSType.DATE, (short) 0));
writeByte(TDSType.DATEN.byteValue());
writeCryptoMetaData();
}
void writeEncryptedRPCDateTime(String sName,
GregorianCalendar cal,
int subSecondNanos,
boolean bOut,
JDBCType jdbcType) throws SQLServerException {
assert (subSecondNanos >= 0) && (subSecondNanos < Nanos.PER_SECOND) : "Invalid subNanoSeconds value: " + subSecondNanos;
assert (cal != null) || (cal == null && subSecondNanos == 0) : "Invalid subNanoSeconds value when calendar is null: " + subSecondNanos;
writeRPCNameValType(sName, bOut, TDSType.BIGVARBINARY);
if (null == cal)
writeEncryptedRPCByteArray(null);
else
writeEncryptedRPCByteArray(getEncryptedDateTimeAsBytes(cal, subSecondNanos, jdbcType));
if (JDBCType.SMALLDATETIME == jdbcType) {
writeByte(TDSType.DATETIMEN.byteValue());
writeByte((byte) 4);
}
else {
writeByte(TDSType.DATETIMEN.byteValue());
writeByte((byte) 8);
}
writeCryptoMetaData();
}
// getEncryptedDateTimeAsBytes is called if jdbcType/ssType is SMALLDATETIME or DATETIME
byte[] getEncryptedDateTimeAsBytes(GregorianCalendar cal,
int subSecondNanos,
JDBCType jdbcType) throws SQLServerException {
int daysSinceSQLBaseDate = DDC.daysSinceBaseDate(cal.get(Calendar.YEAR), cal.get(Calendar.DAY_OF_YEAR), TDS.BASE_YEAR_1900);
// Next, figure out the number of milliseconds since midnight of the current day.
int millisSinceMidnight = (subSecondNanos + Nanos.PER_MILLISECOND / 2) / Nanos.PER_MILLISECOND + // Millis into the current second
1000 * cal.get(Calendar.SECOND) + // Seconds into the current minute
60 * 1000 * cal.get(Calendar.MINUTE) + // Minutes into the current hour
60 * 60 * 1000 * cal.get(Calendar.HOUR_OF_DAY); // Hours into the current day
// The last millisecond of the current day is always rounded to the first millisecond
// of the next day because DATETIME is only accurate to 1/300th of a second.
if (millisSinceMidnight >= 1000 * 60 * 60 * 24 - 1) {
++daysSinceSQLBaseDate;
millisSinceMidnight = 0;
}
if (JDBCType.SMALLDATETIME == jdbcType) {
int secondsSinceMidnight = (millisSinceMidnight / 1000);
int minutesSinceMidnight = (secondsSinceMidnight / 60);
// Values that are 29.998 seconds or less are rounded down to the nearest minute
minutesSinceMidnight = ((secondsSinceMidnight % 60) > 29.998) ? minutesSinceMidnight + 1 : minutesSinceMidnight;
// minutesSinceMidnight for (23:59:30)
int maxMinutesSinceMidnight_SmallDateTime = 1440;
// Verification for smalldatetime to be within valid range of (1900.01.01) to (2079.06.06)
// smalldatetime for unencrypted does not allow insertion of 2079.06.06 23:59:59 and it is rounded up
// to 2079.06.07 00:00:00, therefore, we are checking minutesSinceMidnight for that condition. If it's not within valid range, then
// throw an exception now so that statement execution is safely canceled.
// 157 is the calculated day of year from 06-06 , 1440 is minutesince midnight for (23:59:30)
if ((daysSinceSQLBaseDate < DDC.daysSinceBaseDate(1900, 1, TDS.BASE_YEAR_1900)
|| daysSinceSQLBaseDate > DDC.daysSinceBaseDate(2079, 157, TDS.BASE_YEAR_1900))
|| (daysSinceSQLBaseDate == DDC.daysSinceBaseDate(2079, 157, TDS.BASE_YEAR_1900)
&& minutesSinceMidnight >= maxMinutesSinceMidnight_SmallDateTime)) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_valueOutOfRange"));
Object[] msgArgs = {SSType.SMALLDATETIME};
throw new SQLServerException(form.format(msgArgs), SQLState.DATA_EXCEPTION_DATETIME_FIELD_OVERFLOW, DriverError.NOT_SET, null);
}
ByteBuffer days = ByteBuffer.allocate(2).order(ByteOrder.LITTLE_ENDIAN);
days.putShort((short) daysSinceSQLBaseDate);
ByteBuffer seconds = ByteBuffer.allocate(2).order(ByteOrder.LITTLE_ENDIAN);
seconds.putShort((short) minutesSinceMidnight);
byte[] value = new byte[4];
System.arraycopy(days.array(), 0, value, 0, 2);
System.arraycopy(seconds.array(), 0, value, 2, 2);
return SQLServerSecurityUtility.encryptWithKey(value, cryptoMeta, con);
}
else if (JDBCType.DATETIME == jdbcType) {
// Last-ditch verification that the value is in the valid range for the
// DATETIMEN TDS data type (1/1/1753 to 12/31/9999). If it's not, then
// throw an exception now so that statement execution is safely canceled.
// Attempting to put an invalid value on the wire would result in a TDS
// exception, which would close the connection.
// These are based on SQL Server algorithms
// And put it all on the wire...
if (daysSinceSQLBaseDate < DDC.daysSinceBaseDate(1753, 1, TDS.BASE_YEAR_1900)
|| daysSinceSQLBaseDate >= DDC.daysSinceBaseDate(10000, 1, TDS.BASE_YEAR_1900)) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_valueOutOfRange"));
Object[] msgArgs = {SSType.DATETIME};
throw new SQLServerException(form.format(msgArgs), SQLState.DATA_EXCEPTION_DATETIME_FIELD_OVERFLOW, DriverError.NOT_SET, null);
}
// Number of days since the SQL Server Base Date (January 1, 1900)
ByteBuffer days = ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN);
days.putInt(daysSinceSQLBaseDate);
ByteBuffer seconds = ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN);
seconds.putInt((3 * millisSinceMidnight + 5) / 10);
byte[] value = new byte[8];
System.arraycopy(days.array(), 0, value, 0, 4);
System.arraycopy(seconds.array(), 0, value, 4, 4);
return SQLServerSecurityUtility.encryptWithKey(value, cryptoMeta, con);
}
assert false : "Unexpected JDBCType type " + jdbcType;
return null;
}
void writeEncryptedRPCDateTime2(String sName,
GregorianCalendar localCalendar,
int subSecondNanos,
int scale,
boolean bOut) throws SQLServerException {
writeRPCNameValType(sName, bOut, TDSType.BIGVARBINARY);
if (null == localCalendar)
writeEncryptedRPCByteArray(null);
else
writeEncryptedRPCByteArray(writeEncryptedScaledTemporal(localCalendar, subSecondNanos, scale, SSType.DATETIME2, (short) 0));
writeByte(TDSType.DATETIME2N.byteValue());
writeByte((byte) (scale));
writeCryptoMetaData();
}
void writeEncryptedRPCDateTimeOffset(String sName,
GregorianCalendar utcCalendar,
int minutesOffset,
int subSecondNanos,
int scale,
boolean bOut) throws SQLServerException {
writeRPCNameValType(sName, bOut, TDSType.BIGVARBINARY);
if (null == utcCalendar)
writeEncryptedRPCByteArray(null);
else {
assert 0 == utcCalendar.get(Calendar.ZONE_OFFSET);
writeEncryptedRPCByteArray(
writeEncryptedScaledTemporal(utcCalendar, subSecondNanos, scale, SSType.DATETIMEOFFSET, (short) minutesOffset));
}
writeByte(TDSType.DATETIMEOFFSETN.byteValue());
writeByte((byte) (scale));
writeCryptoMetaData();
}
void writeRPCDateTime2(String sName,
GregorianCalendar localCalendar,
int subSecondNanos,
int scale,
boolean bOut) throws SQLServerException {
writeRPCNameValType(sName, bOut, TDSType.DATETIME2N);
writeByte((byte) scale);
if (null == localCalendar) {
writeByte((byte) 0);
return;
}
writeByte((byte) TDS.datetime2ValueLength(scale));
writeScaledTemporal(localCalendar, subSecondNanos, scale, SSType.DATETIME2);
}
void writeRPCDateTimeOffset(String sName,
GregorianCalendar utcCalendar,
int minutesOffset,
int subSecondNanos,
int scale,
boolean bOut) throws SQLServerException {
writeRPCNameValType(sName, bOut, TDSType.DATETIMEOFFSETN);
writeByte((byte) scale);
if (null == utcCalendar) {
writeByte((byte) 0);
return;
}
assert 0 == utcCalendar.get(Calendar.ZONE_OFFSET);
writeByte((byte) TDS.datetimeoffsetValueLength(scale));
writeScaledTemporal(utcCalendar, subSecondNanos, scale, SSType.DATETIMEOFFSET);
writeShort((short) minutesOffset);
}
/**
* Returns subSecondNanos rounded to the maximum precision supported. The maximum fractional scale is MAX_FRACTIONAL_SECONDS_SCALE(7). Eg1: if you
* pass 456,790,123 the function would return 456,790,100 Eg2: if you pass 456,790,150 the function would return 456,790,200 Eg3: if you pass
* 999,999,951 the function would return 1,000,000,000 This is done to ensure that we have consistent rounding behaviour in setters and getters.
* Bug #507919
*/
private int getRoundedSubSecondNanos(int subSecondNanos) {
int roundedNanos = ((subSecondNanos + (Nanos.PER_MAX_SCALE_INTERVAL / 2)) / Nanos.PER_MAX_SCALE_INTERVAL) * Nanos.PER_MAX_SCALE_INTERVAL;
return roundedNanos;
}
/**
* Writes to the TDS channel a temporal value as an instance instance of one of the scaled temporal SQL types: DATE, TIME, DATETIME2, or
* DATETIMEOFFSET.
*
* @param cal
* Calendar representing the value to write, except for any sub-second nanoseconds
* @param subSecondNanos
* the sub-second nanoseconds (0 - 999,999,999)
* @param scale
* the scale (in digits: 0 - 7) to use for the sub-second nanos component
* @param ssType
* the SQL Server data type (DATE, TIME, DATETIME2, or DATETIMEOFFSET)
*
* @throws SQLServerException
* if an I/O error occurs or if the value is not in the valid range
*/
private void writeScaledTemporal(GregorianCalendar cal,
int subSecondNanos,
int scale,
SSType ssType) throws SQLServerException {
assert con.isKatmaiOrLater();
assert SSType.DATE == ssType || SSType.TIME == ssType || SSType.DATETIME2 == ssType || SSType.DATETIMEOFFSET == ssType : "Unexpected SSType: "
+ ssType;
// First, for types with a time component, write the scaled nanos since midnight
if (SSType.TIME == ssType || SSType.DATETIME2 == ssType || SSType.DATETIMEOFFSET == ssType) {
assert subSecondNanos >= 0;
assert subSecondNanos < Nanos.PER_SECOND;
assert scale >= 0;
assert scale <= TDS.MAX_FRACTIONAL_SECONDS_SCALE;
int secondsSinceMidnight = cal.get(Calendar.SECOND) + 60 * cal.get(Calendar.MINUTE) + 60 * 60 * cal.get(Calendar.HOUR_OF_DAY);
// Scale nanos since midnight to the desired scale, rounding the value as necessary
long divisor = Nanos.PER_MAX_SCALE_INTERVAL * (long) Math.pow(10, TDS.MAX_FRACTIONAL_SECONDS_SCALE - scale);
// The scaledNanos variable represents the fractional seconds of the value at the scale
// indicated by the scale variable. So, for example, scaledNanos = 3 means 300 nanoseconds
// at scale TDS.MAX_FRACTIONAL_SECONDS_SCALE, but 3000 nanoseconds at
// TDS.MAX_FRACTIONAL_SECONDS_SCALE - 1
long scaledNanos = ((long) Nanos.PER_SECOND * secondsSinceMidnight + getRoundedSubSecondNanos(subSecondNanos) + divisor / 2) / divisor;
// SQL Server rounding behavior indicates that it always rounds up unless
// we are at the max value of the type(NOT every day), in which case it truncates.
// If rounding nanos to the specified scale rolls the value to the next day ...
if (Nanos.PER_DAY / divisor == scaledNanos) {
// If the type is time, always truncate
if (SSType.TIME == ssType) {
--scaledNanos;
}
// If the type is datetime2 or datetimeoffset, truncate only if its the max value supported
else {
assert SSType.DATETIME2 == ssType || SSType.DATETIMEOFFSET == ssType : "Unexpected SSType: " + ssType;
// ... then bump the date, provided that the resulting date is still within
// the valid date range.
// Extreme edge case (literally, the VERY edge...):
// If nanos overflow rolls the date value out of range (that is, we have a value
// a few nanoseconds later than 9999-12-31 23:59:59) then truncate the nanos
// instead of rolling.
// This case is very likely never hit by "real world" applications, but exists
// here as a security measure to ensure that such values don't result in a
// connection-closing TDS exception.
cal.add(Calendar.SECOND, 1);
if (cal.get(Calendar.YEAR) <= 9999) {
scaledNanos = 0;
}
else {
cal.add(Calendar.SECOND, -1);
--scaledNanos;
}
}
}
// Encode the scaled nanos to TDS
int encodedLength = TDS.nanosSinceMidnightLength(scale);
byte[] encodedBytes = scaledNanosToEncodedBytes(scaledNanos, encodedLength);
writeBytes(encodedBytes);
}
// Second, for types with a date component, write the days into the Common Era
if (SSType.DATE == ssType || SSType.DATETIME2 == ssType || SSType.DATETIMEOFFSET == ssType) {
// Computation of the number of days into the Common Era assumes that
// the DAY_OF_YEAR field reflects a pure Gregorian calendar - one that
// uses Gregorian leap year rules across the entire range of dates.
// For the DAY_OF_YEAR field to accurately reflect pure Gregorian behavior,
// we need to use a pure Gregorian calendar for dates that are Julian dates
// under a standard Gregorian calendar and for (Gregorian) dates later than
// the cutover date in the cutover year.
if (cal.getTimeInMillis() < GregorianChange.STANDARD_CHANGE_DATE.getTime()
|| cal.getActualMaximum(Calendar.DAY_OF_YEAR) < TDS.DAYS_PER_YEAR) {
int year = cal.get(Calendar.YEAR);
int month = cal.get(Calendar.MONTH);
int date = cal.get(Calendar.DATE);
// Set the cutover as early as possible (pure Gregorian behavior)
cal.setGregorianChange(GregorianChange.PURE_CHANGE_DATE);
// Initialize the date field by field (preserving the "wall calendar" value)
cal.set(year, month, date);
}
int daysIntoCE = DDC.daysSinceBaseDate(cal.get(Calendar.YEAR), cal.get(Calendar.DAY_OF_YEAR), 1);
// Last-ditch verification that the value is in the valid range for the
// DATE/DATETIME2/DATETIMEOFFSET TDS data type (1/1/0001 to 12/31/9999).
// If it's not, then throw an exception now so that statement execution
// is safely canceled. Attempting to put an invalid value on the wire
// would result in a TDS exception, which would close the connection.
if (daysIntoCE < 0 || daysIntoCE >= DDC.daysSinceBaseDate(10000, 1, 1)) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_valueOutOfRange"));
Object[] msgArgs = {ssType};
throw new SQLServerException(form.format(msgArgs), SQLState.DATA_EXCEPTION_DATETIME_FIELD_OVERFLOW, DriverError.NOT_SET, null);
}
byte encodedBytes[] = new byte[3];
encodedBytes[0] = (byte) ((daysIntoCE >> 0) & 0xFF);
encodedBytes[1] = (byte) ((daysIntoCE >> 8) & 0xFF);
encodedBytes[2] = (byte) ((daysIntoCE >> 16) & 0xFF);
writeBytes(encodedBytes);
}
}
/**
* Writes to the TDS channel a temporal value as an instance instance of one of the scaled temporal SQL types: DATE, TIME, DATETIME2, or
* DATETIMEOFFSET.
*
* @param cal
* Calendar representing the value to write, except for any sub-second nanoseconds
* @param subSecondNanos
* the sub-second nanoseconds (0 - 999,999,999)
* @param scale
* the scale (in digits: 0 - 7) to use for the sub-second nanos component
* @param ssType
* the SQL Server data type (DATE, TIME, DATETIME2, or DATETIMEOFFSET)
* @param minutesOffset
* the offset value for DATETIMEOFFSET
* @throws SQLServerException
* if an I/O error occurs or if the value is not in the valid range
*/
byte[] writeEncryptedScaledTemporal(GregorianCalendar cal,
int subSecondNanos,
int scale,
SSType ssType,
short minutesOffset) throws SQLServerException {
assert con.isKatmaiOrLater();
assert SSType.DATE == ssType || SSType.TIME == ssType || SSType.DATETIME2 == ssType || SSType.DATETIMEOFFSET == ssType : "Unexpected SSType: "
+ ssType;
// store the time and minutesOffset portion of DATETIME2 and DATETIMEOFFSET to be used with date portion
byte encodedBytesForEncryption[] = null;
int secondsSinceMidnight = 0;
long divisor = 0;
long scaledNanos = 0;
// First, for types with a time component, write the scaled nanos since midnight
if (SSType.TIME == ssType || SSType.DATETIME2 == ssType || SSType.DATETIMEOFFSET == ssType) {
assert subSecondNanos >= 0;
assert subSecondNanos < Nanos.PER_SECOND;
assert scale >= 0;
assert scale <= TDS.MAX_FRACTIONAL_SECONDS_SCALE;
secondsSinceMidnight = cal.get(Calendar.SECOND) + 60 * cal.get(Calendar.MINUTE) + 60 * 60 * cal.get(Calendar.HOUR_OF_DAY);
// Scale nanos since midnight to the desired scale, rounding the value as necessary
divisor = Nanos.PER_MAX_SCALE_INTERVAL * (long) Math.pow(10, TDS.MAX_FRACTIONAL_SECONDS_SCALE - scale);
// The scaledNanos variable represents the fractional seconds of the value at the scale
// indicated by the scale variable. So, for example, scaledNanos = 3 means 300 nanoseconds
// at scale TDS.MAX_FRACTIONAL_SECONDS_SCALE, but 3000 nanoseconds at
// TDS.MAX_FRACTIONAL_SECONDS_SCALE - 1
scaledNanos = (((long) Nanos.PER_SECOND * secondsSinceMidnight + getRoundedSubSecondNanos(subSecondNanos) + divisor / 2) / divisor)
* divisor / 100;
// for encrypted time value, SQL server cannot do rounding or casting,
// So, driver needs to cast it before encryption.
if (SSType.TIME == ssType && 864000000000L <= scaledNanos) {
scaledNanos = (((long) Nanos.PER_SECOND * secondsSinceMidnight + getRoundedSubSecondNanos(subSecondNanos)) / divisor) * divisor / 100;
}
// SQL Server rounding behavior indicates that it always rounds up unless
// we are at the max value of the type(NOT every day), in which case it truncates.
// If rounding nanos to the specified scale rolls the value to the next day ...
if (Nanos.PER_DAY / divisor == scaledNanos) {
// If the type is time, always truncate
if (SSType.TIME == ssType) {
--scaledNanos;
}
// If the type is datetime2 or datetimeoffset, truncate only if its the max value supported
else {
assert SSType.DATETIME2 == ssType || SSType.DATETIMEOFFSET == ssType : "Unexpected SSType: " + ssType;
// ... then bump the date, provided that the resulting date is still within
// the valid date range.
// Extreme edge case (literally, the VERY edge...):
// If nanos overflow rolls the date value out of range (that is, we have a value
// a few nanoseconds later than 9999-12-31 23:59:59) then truncate the nanos
// instead of rolling.
// This case is very likely never hit by "real world" applications, but exists
// here as a security measure to ensure that such values don't result in a
// connection-closing TDS exception.
cal.add(Calendar.SECOND, 1);
if (cal.get(Calendar.YEAR) <= 9999) {
scaledNanos = 0;
}
else {
cal.add(Calendar.SECOND, -1);
--scaledNanos;
}
}
}
// Encode the scaled nanos to TDS
int encodedLength = TDS.nanosSinceMidnightLength(TDS.MAX_FRACTIONAL_SECONDS_SCALE);
byte[] encodedBytes = scaledNanosToEncodedBytes(scaledNanos, encodedLength);
if (SSType.TIME == ssType) {
byte[] cipherText = SQLServerSecurityUtility.encryptWithKey(encodedBytes, cryptoMeta, con);
return cipherText;
}
else if (SSType.DATETIME2 == ssType) {
// for DATETIME2 sends both date and time part together for encryption
encodedBytesForEncryption = new byte[encodedLength + 3];
System.arraycopy(encodedBytes, 0, encodedBytesForEncryption, 0, encodedBytes.length);
}
else if (SSType.DATETIMEOFFSET == ssType) {
// for DATETIMEOFFSET sends date, time and offset part together for encryption
encodedBytesForEncryption = new byte[encodedLength + 5];
System.arraycopy(encodedBytes, 0, encodedBytesForEncryption, 0, encodedBytes.length);
}
}
// Second, for types with a date component, write the days into the Common Era
if (SSType.DATE == ssType || SSType.DATETIME2 == ssType || SSType.DATETIMEOFFSET == ssType) {
// Computation of the number of days into the Common Era assumes that
// the DAY_OF_YEAR field reflects a pure Gregorian calendar - one that
// uses Gregorian leap year rules across the entire range of dates.
// For the DAY_OF_YEAR field to accurately reflect pure Gregorian behavior,
// we need to use a pure Gregorian calendar for dates that are Julian dates
// under a standard Gregorian calendar and for (Gregorian) dates later than
// the cutover date in the cutover year.
if (cal.getTimeInMillis() < GregorianChange.STANDARD_CHANGE_DATE.getTime()
|| cal.getActualMaximum(Calendar.DAY_OF_YEAR) < TDS.DAYS_PER_YEAR) {
int year = cal.get(Calendar.YEAR);
int month = cal.get(Calendar.MONTH);
int date = cal.get(Calendar.DATE);
// Set the cutover as early as possible (pure Gregorian behavior)
cal.setGregorianChange(GregorianChange.PURE_CHANGE_DATE);
// Initialize the date field by field (preserving the "wall calendar" value)
cal.set(year, month, date);
}
int daysIntoCE = DDC.daysSinceBaseDate(cal.get(Calendar.YEAR), cal.get(Calendar.DAY_OF_YEAR), 1);
// Last-ditch verification that the value is in the valid range for the
// DATE/DATETIME2/DATETIMEOFFSET TDS data type (1/1/0001 to 12/31/9999).
// If it's not, then throw an exception now so that statement execution
// is safely canceled. Attempting to put an invalid value on the wire
// would result in a TDS exception, which would close the connection.
if (daysIntoCE < 0 || daysIntoCE >= DDC.daysSinceBaseDate(10000, 1, 1)) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_valueOutOfRange"));
Object[] msgArgs = {ssType};
throw new SQLServerException(form.format(msgArgs), SQLState.DATA_EXCEPTION_DATETIME_FIELD_OVERFLOW, DriverError.NOT_SET, null);
}
byte encodedBytes[] = new byte[3];
encodedBytes[0] = (byte) ((daysIntoCE >> 0) & 0xFF);
encodedBytes[1] = (byte) ((daysIntoCE >> 8) & 0xFF);
encodedBytes[2] = (byte) ((daysIntoCE >> 16) & 0xFF);
byte[] cipherText;
if (SSType.DATE == ssType) {
cipherText = SQLServerSecurityUtility.encryptWithKey(encodedBytes, cryptoMeta, con);
}
else if (SSType.DATETIME2 == ssType) {
// for Max value, does not round up, do casting instead.
if (3652058 == daysIntoCE) { // 9999-12-31
if (864000000000L == scaledNanos) { // 24:00:00 in nanoseconds
// does not round up
scaledNanos = (((long) Nanos.PER_SECOND * secondsSinceMidnight + getRoundedSubSecondNanos(subSecondNanos)) / divisor)
* divisor / 100;
int encodedLength = TDS.nanosSinceMidnightLength(TDS.MAX_FRACTIONAL_SECONDS_SCALE);
byte[] encodedNanoBytes = scaledNanosToEncodedBytes(scaledNanos, encodedLength);
// for DATETIME2 sends both date and time part together for encryption
encodedBytesForEncryption = new byte[encodedLength + 3];
System.arraycopy(encodedNanoBytes, 0, encodedBytesForEncryption, 0, encodedNanoBytes.length);
}
}
// Copy the 3 byte date value
System.arraycopy(encodedBytes, 0, encodedBytesForEncryption, (encodedBytesForEncryption.length - 3), 3);
cipherText = SQLServerSecurityUtility.encryptWithKey(encodedBytesForEncryption, cryptoMeta, con);
}
else {
// for Max value, does not round up, do casting instead.
if (3652058 == daysIntoCE) { // 9999-12-31
if (864000000000L == scaledNanos) { // 24:00:00 in nanoseconds
// does not round up
scaledNanos = (((long) Nanos.PER_SECOND * secondsSinceMidnight + getRoundedSubSecondNanos(subSecondNanos)) / divisor)
* divisor / 100;
int encodedLength = TDS.nanosSinceMidnightLength(TDS.MAX_FRACTIONAL_SECONDS_SCALE);
byte[] encodedNanoBytes = scaledNanosToEncodedBytes(scaledNanos, encodedLength);
// for DATETIMEOFFSET sends date, time and offset part together for encryption
encodedBytesForEncryption = new byte[encodedLength + 5];
System.arraycopy(encodedNanoBytes, 0, encodedBytesForEncryption, 0, encodedNanoBytes.length);
}
}
// Copy the 3 byte date value
System.arraycopy(encodedBytes, 0, encodedBytesForEncryption, (encodedBytesForEncryption.length - 5), 3);
// Copy the 2 byte minutesOffset value
System.arraycopy(ByteBuffer.allocate(Short.SIZE / Byte.SIZE).order(ByteOrder.LITTLE_ENDIAN).putShort(minutesOffset).array(), 0,
encodedBytesForEncryption, (encodedBytesForEncryption.length - 2), 2);
cipherText = SQLServerSecurityUtility.encryptWithKey(encodedBytesForEncryption, cryptoMeta, con);
}
return cipherText;
}
// Invalid type ssType. This condition should never happen.
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_unknownSSType"));
Object[] msgArgs = {ssType};
SQLServerException.makeFromDriverError(null, null, form.format(msgArgs), null, true);
return null;
}
private byte[] scaledNanosToEncodedBytes(long scaledNanos,
int encodedLength) {
byte encodedBytes[] = new byte[encodedLength];
for (int i = 0; i < encodedLength; i++)
encodedBytes[i] = (byte) ((scaledNanos >> (8 * i)) & 0xFF);
return encodedBytes;
}
/**
* Append the data in a stream in RPC transmission format.
*
* @param sName
* the optional parameter name
* @param stream
* is the stream
* @param streamLength
* length of the stream (may be unknown)
* @param bOut
* boolean true if the data value is being registered as an ouput parameter
* @param jdbcType
* The JDBC type used to determine whether the value is textual or non-textual.
* @param collation
* The SQL collation associated with the value. Null for non-textual SQL Server types.
* @throws SQLServerException
*/
void writeRPCInputStream(String sName,
InputStream stream,
long streamLength,
boolean bOut,
JDBCType jdbcType,
SQLCollation collation) throws SQLServerException {
assert null != stream;
assert DataTypes.UNKNOWN_STREAM_LENGTH == streamLength || streamLength >= 0;
// Send long values and values with unknown length
// using PLP chunking on Yukon and later.
boolean usePLP = (DataTypes.UNKNOWN_STREAM_LENGTH == streamLength || streamLength > DataTypes.SHORT_VARTYPE_MAX_BYTES);
if (usePLP) {
assert DataTypes.UNKNOWN_STREAM_LENGTH == streamLength || streamLength <= DataTypes.MAX_VARTYPE_MAX_BYTES;
writeRPCNameValType(sName, bOut, jdbcType.isTextual() ? TDSType.BIGVARCHAR : TDSType.BIGVARBINARY);
// Handle Yukon v*max type header here.
writeVMaxHeader(streamLength, false, jdbcType.isTextual() ? collation : null);
}
// Send non-PLP in all other cases
else {
// If the length of the InputStream is unknown then we need to buffer the entire stream
// in memory so that we can determine its length and send that length to the server
// before the stream data itself.
if (DataTypes.UNKNOWN_STREAM_LENGTH == streamLength) {
// Create ByteArrayOutputStream with initial buffer size of 8K to handle typical
// binary field sizes more efficiently. Note we can grow beyond 8000 bytes.
ByteArrayOutputStream baos = new ByteArrayOutputStream(8000);
streamLength = 0L;
// Since Shiloh is limited to 64K TDS packets, that's a good upper bound on the maximum
// length of InputStream we should try to handle before throwing an exception.
long maxStreamLength = 65535L * con.getTDSPacketSize();
try {
byte buff[] = new byte[8000];
int bytesRead;
while (streamLength < maxStreamLength && -1 != (bytesRead = stream.read(buff, 0, buff.length))) {
baos.write(buff);
streamLength += bytesRead;
}
}
catch (IOException e) {
throw new SQLServerException(e.getMessage(), SQLState.DATA_EXCEPTION_NOT_SPECIFIC, DriverError.NOT_SET, e);
}
if (streamLength >= maxStreamLength) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_invalidLength"));
Object[] msgArgs = {Long.valueOf(streamLength)};
SQLServerException.makeFromDriverError(null, null, form.format(msgArgs), "", true);
}
assert streamLength <= Integer.MAX_VALUE;
stream = new ByteArrayInputStream(baos.toByteArray(), 0, (int) streamLength);
}
assert 0 <= streamLength && streamLength <= DataTypes.IMAGE_TEXT_MAX_BYTES;
boolean useVarType = streamLength <= DataTypes.SHORT_VARTYPE_MAX_BYTES;
writeRPCNameValType(sName, bOut,
jdbcType.isTextual() ? (useVarType ? TDSType.BIGVARCHAR : TDSType.TEXT) : (useVarType ? TDSType.BIGVARBINARY : TDSType.IMAGE));
// Write maximum length, optional collation, and actual length
if (useVarType) {
writeShort((short) DataTypes.SHORT_VARTYPE_MAX_BYTES);
if (jdbcType.isTextual())
collation.writeCollation(this);
writeShort((short) streamLength);
}
else {
writeInt(DataTypes.IMAGE_TEXT_MAX_BYTES);
if (jdbcType.isTextual())
collation.writeCollation(this);
writeInt((int) streamLength);
}
}
// Write the data
writeStream(stream, streamLength, usePLP);
}
/**
* Append the XML data in a stream in RPC transmission format.
*
* @param sName
* the optional parameter name
* @param stream
* is the stream
* @param streamLength
* length of the stream (may be unknown)
* @param bOut
* boolean true if the data value is being registered as an ouput parameter
* @throws SQLServerException
*/
void writeRPCXML(String sName,
InputStream stream,
long streamLength,
boolean bOut) throws SQLServerException {
assert DataTypes.UNKNOWN_STREAM_LENGTH == streamLength || streamLength >= 0;
assert DataTypes.UNKNOWN_STREAM_LENGTH == streamLength || streamLength <= DataTypes.MAX_VARTYPE_MAX_BYTES;
writeRPCNameValType(sName, bOut, TDSType.XML);
writeByte((byte) 0); // No schema
// Handle null here and return, we're done here if it's null.
if (null == stream) {
// Null header for v*max types is 0xFFFFFFFFFFFFFFFF.
writeLong(0xFFFFFFFFFFFFFFFFL);
}
else if (DataTypes.UNKNOWN_STREAM_LENGTH == streamLength) {
// Append v*max length.
// UNKNOWN_PLP_LEN is 0xFFFFFFFFFFFFFFFE
writeLong(0xFFFFFFFFFFFFFFFEL);
// NOTE: Don't send the first chunk length, this will be calculated by caller.
}
else {
// For v*max types with known length, length is <totallength8><chunklength4>
// We're sending same total length as chunk length (as we're sending 1 chunk).
writeLong(streamLength);
}
if (null != stream)
// Write the data
writeStream(stream, streamLength, true);
}
/**
* Append the data in a character reader in RPC transmission format.
*
* @param sName
* the optional parameter name
* @param re
* the reader
* @param reLength
* the reader data length (in characters)
* @param bOut
* boolean true if the data value is being registered as an ouput parameter
* @param collation
* The SQL collation associated with the value. Null for non-textual SQL Server types.
* @throws SQLServerException
*/
void writeRPCReaderUnicode(String sName,
Reader re,
long reLength,
boolean bOut,
SQLCollation collation) throws SQLServerException {
assert null != re;
assert DataTypes.UNKNOWN_STREAM_LENGTH == reLength || reLength >= 0;
// Textual RPC requires a collation. If none is provided, as is the case when
// the SSType is non-textual, then use the database collation by default.
if (null == collation)
collation = con.getDatabaseCollation();
// Send long values and values with unknown length
// using PLP chunking on Yukon and later.
boolean usePLP = (DataTypes.UNKNOWN_STREAM_LENGTH == reLength || reLength > DataTypes.SHORT_VARTYPE_MAX_CHARS);
if (usePLP) {
assert DataTypes.UNKNOWN_STREAM_LENGTH == reLength || reLength <= DataTypes.MAX_VARTYPE_MAX_CHARS;
writeRPCNameValType(sName, bOut, TDSType.NVARCHAR);
// Handle Yukon v*max type header here.
writeVMaxHeader((DataTypes.UNKNOWN_STREAM_LENGTH == reLength) ? DataTypes.UNKNOWN_STREAM_LENGTH : 2 * reLength, // Length (in bytes)
false, collation);
}
// Send non-PLP in all other cases
else {
// Length must be known if we're not sending PLP-chunked data. Yukon is handled above.
// For Shiloh, this is enforced in DTV by converting the Reader to some other length-
// prefixed value in the setter.
assert 0 <= reLength && reLength <= DataTypes.NTEXT_MAX_CHARS;
// For non-PLP types, use the long TEXT type rather than the short VARCHAR
// type if the stream is too long to fit in the latter or if we don't know the length up
// front so we have to assume that it might be too long.
boolean useVarType = reLength <= DataTypes.SHORT_VARTYPE_MAX_CHARS;
writeRPCNameValType(sName, bOut, useVarType ? TDSType.NVARCHAR : TDSType.NTEXT);
// Write maximum length, collation, and actual length of the data
if (useVarType) {
writeShort((short) DataTypes.SHORT_VARTYPE_MAX_BYTES);
collation.writeCollation(this);
writeShort((short) (2 * reLength));
}
else {
writeInt(DataTypes.NTEXT_MAX_CHARS);
collation.writeCollation(this);
writeInt((int) (2 * reLength));
}
}
// Write the data
writeReader(re, reLength, usePLP);
}
}
/**
* TDSPacket provides a mechanism for chaining TDS response packets together in a singly-linked list.
*
* Having both the link and the data in the same class allows TDSReader marks (see below) to automatically hold onto exactly as much response data as
* they need, and no more. Java reference semantics ensure that a mark holds onto its referenced packet and subsequent packets (through next
* references). When all marked references to a packet go away, the packet, and any linked unmarked packets, can be reclaimed by GC.
*/
final class TDSPacket {
final byte[] header = new byte[TDS.PACKET_HEADER_SIZE];
final byte[] payload;
int payloadLength;
volatile TDSPacket next;
final public String toString() {
return "TDSPacket(SPID:" + Util.readUnsignedShortBigEndian(header, TDS.PACKET_HEADER_SPID) + " Seq:" + header[TDS.PACKET_HEADER_SEQUENCE_NUM]
+ ")";
}
TDSPacket(int size) {
payload = new byte[size];
payloadLength = 0;
next = null;
}
final boolean isEOM() {
return TDS.STATUS_BIT_EOM == (header[TDS.PACKET_HEADER_MESSAGE_STATUS] & TDS.STATUS_BIT_EOM);
}
};
/**
* TDSReaderMark encapsulates a fixed position in the response data stream.
*
* Response data is quantized into a linked chain of packets. A mark refers to a specific location in a specific packet and relies on Java's reference
* semantics to automatically keep all subsequent packets accessible until the mark is destroyed.
*/
final class TDSReaderMark {
final TDSPacket packet;
final int payloadOffset;
TDSReaderMark(TDSPacket packet,
int payloadOffset) {
this.packet = packet;
this.payloadOffset = payloadOffset;
}
}
/**
* TDSReader encapsulates the TDS response data stream.
*
* Bytes are read from SQL Server into a FIFO of packets. Reader methods traverse the packets to access the data.
*/
final class TDSReader {
private final static Logger logger = Logger.getLogger("com.microsoft.sqlserver.jdbc.internals.TDS.Reader");
final private String traceID;
final public String toString() {
return traceID;
}
private final TDSChannel tdsChannel;
private final SQLServerConnection con;
private final TDSCommand command;
final TDSCommand getCommand() {
assert null != command;
return command;
}
final SQLServerConnection getConnection() {
return con;
}
private TDSPacket currentPacket = new TDSPacket(0);
private TDSPacket lastPacket = currentPacket;
private int payloadOffset = 0;
private int packetNum = 0;
private boolean isStreaming = true;
private boolean useColumnEncryption = false;
private boolean serverSupportsColumnEncryption = false;
private final byte valueBytes[] = new byte[256];
private static final AtomicInteger lastReaderID = new AtomicInteger(0);
private static int nextReaderID() {
return lastReaderID.incrementAndGet();
}
TDSReader(TDSChannel tdsChannel,
SQLServerConnection con,
TDSCommand command) {
this.tdsChannel = tdsChannel;
this.con = con;
this.command = command; // may be null
// if the logging level is not detailed than fine or more we will not have proper readerids.
if (logger.isLoggable(Level.FINE))
traceID = "TDSReader@" + nextReaderID() + " (" + con.toString() + ")";
else
traceID = con.toString();
if (con.isColumnEncryptionSettingEnabled()) {
useColumnEncryption = true;
}
serverSupportsColumnEncryption = con.getServerSupportsColumnEncryption();
}
final boolean isColumnEncryptionSettingEnabled() {
return useColumnEncryption;
}
final boolean getServerSupportsColumnEncryption() {
return serverSupportsColumnEncryption;
}
final void throwInvalidTDS() throws SQLServerException {
if (logger.isLoggable(Level.SEVERE))
logger.severe(toString() + " got unexpected value in TDS response at offset:" + payloadOffset);
con.throwInvalidTDS();
}
final void throwInvalidTDSToken(String tokenName) throws SQLServerException {
if (logger.isLoggable(Level.SEVERE))
logger.severe(toString() + " got unexpected value in TDS response at offset:" + payloadOffset);
con.throwInvalidTDSToken(tokenName);
}
/**
* Ensures that payload data is available to be read, automatically advancing to (and possibly reading) the next packet.
*
* @return true if additional data is available to be read false if no more data is available
*/
private boolean ensurePayload() throws SQLServerException {
if (payloadOffset == currentPacket.payloadLength)
if (!nextPacket())
return false;
assert payloadOffset < currentPacket.payloadLength;
return true;
}
/**
* Advance (and possibly read) the next packet.
*
* @return true if additional data is available to be read false if no more data is available
*/
private boolean nextPacket() throws SQLServerException {
assert null != currentPacket;
// Shouldn't call this function unless we're at the end of the current packet...
TDSPacket consumedPacket = currentPacket;
assert payloadOffset == consumedPacket.payloadLength;
// If no buffered packets are left then maybe we can read one...
// This action must be synchronized against against another thread calling
// readAllPackets() to read in ALL of the remaining packets of the current response.
if (null == consumedPacket.next) {
readPacket();
if (null == consumedPacket.next)
return false;
}
// Advance to that packet. If we are streaming through the
// response, then unlink the current packet from the next
// before moving to allow the packet to be reclaimed.
TDSPacket nextPacket = consumedPacket.next;
if (isStreaming) {
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Moving to next packet -- unlinking consumed packet");
consumedPacket.next = null;
}
currentPacket = nextPacket;
payloadOffset = 0;
return true;
}
/**
* Reads the next packet of the TDS channel.
*
* This method is synchronized to guard against simultaneously reading packets from one thread that is processing the response and another thread
* that is trying to buffer it with TDSCommand.detach().
*/
synchronized final boolean readPacket() throws SQLServerException {
if (null != command && !command.readingResponse())
return false;
// Number of packets in should always be less than number of packets out.
// If the server has been notified for an interrupt, it may be less by
// more than one packet.
assert tdsChannel.numMsgsRcvd < tdsChannel.numMsgsSent : "numMsgsRcvd:" + tdsChannel.numMsgsRcvd + " should be less than numMsgsSent:"
+ tdsChannel.numMsgsSent;
TDSPacket newPacket = new TDSPacket(con.getTDSPacketSize());
// First, read the packet header.
for (int headerBytesRead = 0; headerBytesRead < TDS.PACKET_HEADER_SIZE;) {
int bytesRead = tdsChannel.read(newPacket.header, headerBytesRead, TDS.PACKET_HEADER_SIZE - headerBytesRead);
if (bytesRead < 0) {
if (logger.isLoggable(Level.FINER))
logger.finer(toString() + " Premature EOS in response. packetNum:" + packetNum + " headerBytesRead:" + headerBytesRead);
con.terminate(SQLServerException.DRIVER_ERROR_IO_FAILED, ((0 == packetNum && 0 == headerBytesRead)
? SQLServerException.getErrString("R_noServerResponse") : SQLServerException.getErrString("R_truncatedServerResponse")));
}
headerBytesRead += bytesRead;
}
// Header size is a 2 byte unsigned short integer in big-endian order.
int packetLength = Util.readUnsignedShortBigEndian(newPacket.header, TDS.PACKET_HEADER_MESSAGE_LENGTH);
// Make header size is properly bounded and compute length of the packet payload.
if (packetLength < TDS.PACKET_HEADER_SIZE || packetLength > con.getTDSPacketSize()) {
logger.warning(toString() + " TDS header contained invalid packet length:" + packetLength + "; packet size:" + con.getTDSPacketSize());
throwInvalidTDS();
}
newPacket.payloadLength = packetLength - TDS.PACKET_HEADER_SIZE;
// Just grab the SPID for logging (another big-endian unsigned short).
tdsChannel.setSPID(Util.readUnsignedShortBigEndian(newPacket.header, TDS.PACKET_HEADER_SPID));
// Packet header looks good enough.
// When logging, copy the packet header to the log buffer.
byte[] logBuffer = null;
if (tdsChannel.isLoggingPackets()) {
logBuffer = new byte[packetLength];
System.arraycopy(newPacket.header, 0, logBuffer, 0, TDS.PACKET_HEADER_SIZE);
}
// Now for the payload...
for (int payloadBytesRead = 0; payloadBytesRead < newPacket.payloadLength;) {
int bytesRead = tdsChannel.read(newPacket.payload, payloadBytesRead, newPacket.payloadLength - payloadBytesRead);
if (bytesRead < 0)
con.terminate(SQLServerException.DRIVER_ERROR_IO_FAILED, SQLServerException.getErrString("R_truncatedServerResponse"));
payloadBytesRead += bytesRead;
}
++packetNum;
lastPacket.next = newPacket;
lastPacket = newPacket;
// When logging, append the payload to the log buffer and write out the whole thing.
if (tdsChannel.isLoggingPackets()) {
System.arraycopy(newPacket.payload, 0, logBuffer, TDS.PACKET_HEADER_SIZE, newPacket.payloadLength);
tdsChannel.logPacket(logBuffer, 0, packetLength,
this.toString() + " received Packet:" + packetNum + " (" + newPacket.payloadLength + " bytes)");
}
// If end of message, then bump the count of messages received and disable
// interrupts. If an interrupt happened prior to disabling, then expect
// to read the attention ack packet as well.
if (newPacket.isEOM()) {
++tdsChannel.numMsgsRcvd;
// Notify the command (if any) that we've reached the end of the response.
if (null != command)
command.onResponseEOM();
}
return true;
}
final TDSReaderMark mark() {
TDSReaderMark mark = new TDSReaderMark(currentPacket, payloadOffset);
isStreaming = false;
if (logger.isLoggable(Level.FINEST))
logger.finest(this.toString() + ": Buffering from: " + mark.toString());
return mark;
}
final void reset(TDSReaderMark mark) {
if (logger.isLoggable(Level.FINEST))
logger.finest(this.toString() + ": Resetting to: " + mark.toString());
currentPacket = mark.packet;
payloadOffset = mark.payloadOffset;
}
final void stream() {
isStreaming = true;
}
/**
* Returns the number of bytes that can be read (or skipped over) from this TDSReader without blocking by the next caller of a method for this
* TDSReader.
*
* @return the actual number of bytes available.
*/
final int available() {
// The number of bytes that can be read without blocking is just the number
// of bytes that are currently buffered. That is the number of bytes left
// in the current packet plus the number of bytes in the remaining packets.
int available = currentPacket.payloadLength - payloadOffset;
for (TDSPacket packet = currentPacket.next; null != packet; packet = packet.next)
available += packet.payloadLength;
return available;
}
/**
*
* @return number of bytes available in the current packet
*/
final int availableCurrentPacket() {
/*
* The number of bytes that can be read from the current chunk, without including the next chunk that is buffered. This is so the driver can
* confirm if the next chunk sent is new packet or just continuation
*/
int available = currentPacket.payloadLength - payloadOffset;
return available;
}
final int peekTokenType() throws SQLServerException {
// Check whether we're at EOF
if (!ensurePayload())
return -1;
// Peek at the current byte (don't increment payloadOffset!)
return currentPacket.payload[payloadOffset] & 0xFF;
}
final short peekStatusFlag() throws SQLServerException {
// skip the current packet(i.e, TDS packet type) and peek into the status flag (USHORT)
if (payloadOffset + 3 <= currentPacket.payloadLength) {
short value = Util.readShort(currentPacket.payload, payloadOffset + 1);
return value;
}
// as per TDS protocol, TDS_DONE packet should always be followed by status flag
// throw exception if status packet is not available
throwInvalidTDS();
return 0;
}
final int readUnsignedByte() throws SQLServerException {
// Ensure that we have a packet to read from.
if (!ensurePayload())
throwInvalidTDS();
return currentPacket.payload[payloadOffset++] & 0xFF;
}
final short readShort() throws SQLServerException {
if (payloadOffset + 2 <= currentPacket.payloadLength) {
short value = Util.readShort(currentPacket.payload, payloadOffset);
payloadOffset += 2;
return value;
}
return Util.readShort(readWrappedBytes(2), 0);
}
final int readUnsignedShort() throws SQLServerException {
if (payloadOffset + 2 <= currentPacket.payloadLength) {
int value = Util.readUnsignedShort(currentPacket.payload, payloadOffset);
payloadOffset += 2;
return value;
}
return Util.readUnsignedShort(readWrappedBytes(2), 0);
}
final String readUnicodeString(int length) throws SQLServerException {
int byteLength = 2 * length;
byte bytes[] = new byte[byteLength];
readBytes(bytes, 0, byteLength);
return Util.readUnicodeString(bytes, 0, byteLength, con);
}
final char readChar() throws SQLServerException {
return (char) readShort();
}
final int readInt() throws SQLServerException {
if (payloadOffset + 4 <= currentPacket.payloadLength) {
int value = Util.readInt(currentPacket.payload, payloadOffset);
payloadOffset += 4;
return value;
}
return Util.readInt(readWrappedBytes(4), 0);
}
final int readIntBigEndian() throws SQLServerException {
if (payloadOffset + 4 <= currentPacket.payloadLength) {
int value = Util.readIntBigEndian(currentPacket.payload, payloadOffset);
payloadOffset += 4;
return value;
}
return Util.readIntBigEndian(readWrappedBytes(4), 0);
}
final long readUnsignedInt() throws SQLServerException {
return readInt() & 0xFFFFFFFFL;
}
final long readLong() throws SQLServerException {
if (payloadOffset + 8 <= currentPacket.payloadLength) {
long value = Util.readLong(currentPacket.payload, payloadOffset);
payloadOffset += 8;
return value;
}
return Util.readLong(readWrappedBytes(8), 0);
}
final void readBytes(byte[] value,
int valueOffset,
int valueLength) throws SQLServerException {
for (int bytesRead = 0; bytesRead < valueLength;) {
// Ensure that we have a packet to read from.
if (!ensurePayload())
throwInvalidTDS();
// Figure out how many bytes to copy from the current packet
// (the lesser of the remaining value bytes and the bytes left in the packet).
int bytesToCopy = valueLength - bytesRead;
if (bytesToCopy > currentPacket.payloadLength - payloadOffset)
bytesToCopy = currentPacket.payloadLength - payloadOffset;
// Copy some bytes from the current packet to the destination value.
if (logger.isLoggable(Level.FINEST))
logger.finest(toString() + " Reading " + bytesToCopy + " bytes from offset " + payloadOffset);
System.arraycopy(currentPacket.payload, payloadOffset, value, valueOffset + bytesRead, bytesToCopy);
bytesRead += bytesToCopy;
payloadOffset += bytesToCopy;
}
}
final byte[] readWrappedBytes(int valueLength) throws SQLServerException {
assert valueLength <= valueBytes.length;
readBytes(valueBytes, 0, valueLength);
return valueBytes;
}
final Object readDecimal(int valueLength,
TypeInfo typeInfo,
JDBCType jdbcType,
StreamType streamType) throws SQLServerException {
if (valueLength > valueBytes.length) {
logger.warning(toString() + " Invalid value length:" + valueLength);
throwInvalidTDS();
}
readBytes(valueBytes, 0, valueLength);
return DDC.convertBigDecimalToObject(Util.readBigDecimal(valueBytes, valueLength, typeInfo.getScale()), jdbcType, streamType);
}
final Object readMoney(int valueLength,
JDBCType jdbcType,
StreamType streamType) throws SQLServerException {
BigInteger bi;
switch (valueLength) {
case 8: // money
{
int intBitsHi = readInt();
int intBitsLo = readInt();
if (JDBCType.BINARY == jdbcType) {
byte value[] = new byte[8];
Util.writeIntBigEndian(intBitsHi, value, 0);
Util.writeIntBigEndian(intBitsLo, value, 4);
return value;
}
bi = BigInteger.valueOf(((long) intBitsHi << 32) | (intBitsLo & 0xFFFFFFFFL));
break;
}
case 4: // smallmoney
if (JDBCType.BINARY == jdbcType) {
byte value[] = new byte[4];
Util.writeIntBigEndian(readInt(), value, 0);
return value;
}
bi = BigInteger.valueOf(readInt());
break;
default:
throwInvalidTDS();
return null;
}
return DDC.convertBigDecimalToObject(new BigDecimal(bi, 4), jdbcType, streamType);
}
final Object readReal(int valueLength,
JDBCType jdbcType,
StreamType streamType) throws SQLServerException {
if (4 != valueLength)
throwInvalidTDS();
return DDC.convertFloatToObject(Float.intBitsToFloat(readInt()), jdbcType, streamType);
}
final Object readFloat(int valueLength,
JDBCType jdbcType,
StreamType streamType) throws SQLServerException {
if (8 != valueLength)
throwInvalidTDS();
return DDC.convertDoubleToObject(Double.longBitsToDouble(readLong()), jdbcType, streamType);
}
final Object readDateTime(int valueLength,
Calendar appTimeZoneCalendar,
JDBCType jdbcType,
StreamType streamType) throws SQLServerException {
// Build and return the right kind of temporal object.
int daysSinceSQLBaseDate;
int ticksSinceMidnight;
int msecSinceMidnight;
switch (valueLength) {
case 8:
// SQL datetime is 4 bytes for days since SQL Base Date
// (January 1, 1900 00:00:00 GMT) and 4 bytes for
// the number of three hundredths (1/300) of a second
// since midnight.
daysSinceSQLBaseDate = readInt();
ticksSinceMidnight = readInt();
if (JDBCType.BINARY == jdbcType) {
byte value[] = new byte[8];
Util.writeIntBigEndian(daysSinceSQLBaseDate, value, 0);
Util.writeIntBigEndian(ticksSinceMidnight, value, 4);
return value;
}
msecSinceMidnight = (ticksSinceMidnight * 10 + 1) / 3; // Convert to msec (1 tick = 1 300th of a sec = 3 msec)
break;
case 4:
// SQL smalldatetime has less precision. It stores 2 bytes
// for the days since SQL Base Date and 2 bytes for minutes
// after midnight.
daysSinceSQLBaseDate = readUnsignedShort();
ticksSinceMidnight = readUnsignedShort();
if (JDBCType.BINARY == jdbcType) {
byte value[] = new byte[4];
Util.writeShortBigEndian((short) daysSinceSQLBaseDate, value, 0);
Util.writeShortBigEndian((short) ticksSinceMidnight, value, 2);
return value;
}
msecSinceMidnight = ticksSinceMidnight * 60 * 1000; // Convert to msec (1 tick = 1 min = 60,000 msec)
break;
default:
throwInvalidTDS();
return null;
}
// Convert the DATETIME/SMALLDATETIME value to the desired Java type.
return DDC.convertTemporalToObject(jdbcType, SSType.DATETIME, appTimeZoneCalendar, daysSinceSQLBaseDate, msecSinceMidnight, 0); // scale
// (ignored
// for
// fixed-scale
// DATETIME/SMALLDATETIME
// types)
}
final Object readDate(int valueLength,
Calendar appTimeZoneCalendar,
JDBCType jdbcType) throws SQLServerException {
if (TDS.DAYS_INTO_CE_LENGTH != valueLength)
throwInvalidTDS();
// Initialize the date fields to their appropriate values.
int localDaysIntoCE = readDaysIntoCE();
// Convert the DATE value to the desired Java type.
return DDC.convertTemporalToObject(jdbcType, SSType.DATE, appTimeZoneCalendar, localDaysIntoCE, 0, // midnight local to app time zone
0); // scale (ignored for DATE)
}
final Object readTime(int valueLength,
TypeInfo typeInfo,
Calendar appTimeZoneCalendar,
JDBCType jdbcType) throws SQLServerException {
if (TDS.timeValueLength(typeInfo.getScale()) != valueLength)
throwInvalidTDS();
// Read the value from the server
long localNanosSinceMidnight = readNanosSinceMidnight(typeInfo.getScale());
// Convert the TIME value to the desired Java type.
return DDC.convertTemporalToObject(jdbcType, SSType.TIME, appTimeZoneCalendar, 0, localNanosSinceMidnight, typeInfo.getScale());
}
final Object readDateTime2(int valueLength,
TypeInfo typeInfo,
Calendar appTimeZoneCalendar,
JDBCType jdbcType) throws SQLServerException {
if (TDS.datetime2ValueLength(typeInfo.getScale()) != valueLength)
throwInvalidTDS();
// Read the value's constituent components
long localNanosSinceMidnight = readNanosSinceMidnight(typeInfo.getScale());
int localDaysIntoCE = readDaysIntoCE();
// Convert the DATETIME2 value to the desired Java type.
return DDC.convertTemporalToObject(jdbcType, SSType.DATETIME2, appTimeZoneCalendar, localDaysIntoCE, localNanosSinceMidnight,
typeInfo.getScale());
}
final Object readDateTimeOffset(int valueLength,
TypeInfo typeInfo,
JDBCType jdbcType) throws SQLServerException {
if (TDS.datetimeoffsetValueLength(typeInfo.getScale()) != valueLength)
throwInvalidTDS();
// The nanos since midnight and days into Common Era parts of DATETIMEOFFSET values
// are in UTC. Use the minutes offset part to convert to local.
long utcNanosSinceMidnight = readNanosSinceMidnight(typeInfo.getScale());
int utcDaysIntoCE = readDaysIntoCE();
int localMinutesOffset = readShort();
// Convert the DATETIMEOFFSET value to the desired Java type.
return DDC.convertTemporalToObject(jdbcType, SSType.DATETIMEOFFSET,
new GregorianCalendar(new SimpleTimeZone(localMinutesOffset * 60 * 1000, ""), Locale.US), utcDaysIntoCE, utcNanosSinceMidnight,
typeInfo.getScale());
}
private int readDaysIntoCE() throws SQLServerException {
byte value[] = new byte[TDS.DAYS_INTO_CE_LENGTH];
readBytes(value, 0, value.length);
int daysIntoCE = 0;
for (int i = 0; i < value.length; i++)
daysIntoCE |= ((value[i] & 0xFF) << (8 * i));
// Theoretically should never encounter a value that is outside of the valid date range
if (daysIntoCE < 0)
throwInvalidTDS();
return daysIntoCE;
}
// Scale multipliers used to convert variable-scaled temporal values to a fixed 100ns scale.
// Using this array is measurably faster than using Math.pow(10, ...)
private final static int[] SCALED_MULTIPLIERS = {10000000, 1000000, 100000, 10000, 1000, 100, 10, 1};
private long readNanosSinceMidnight(int scale) throws SQLServerException {
assert 0 <= scale && scale <= TDS.MAX_FRACTIONAL_SECONDS_SCALE;
byte value[] = new byte[TDS.nanosSinceMidnightLength(scale)];
readBytes(value, 0, value.length);
long hundredNanosSinceMidnight = 0;
for (int i = 0; i < value.length; i++)
hundredNanosSinceMidnight |= (value[i] & 0xFFL) << (8 * i);
hundredNanosSinceMidnight *= SCALED_MULTIPLIERS[scale];
if (!(0 <= hundredNanosSinceMidnight && hundredNanosSinceMidnight < Nanos.PER_DAY / 100))
throwInvalidTDS();
return 100 * hundredNanosSinceMidnight;
}
final static String guidTemplate = "NNNNNNNN-NNNN-NNNN-NNNN-NNNNNNNNNNNN";
final Object readGUID(int valueLength,
JDBCType jdbcType,
StreamType streamType) throws SQLServerException {
// GUIDs must be exactly 16 bytes
if (16 != valueLength)
throwInvalidTDS();
// Read in the GUID's binary value
byte guid[] = new byte[16];
readBytes(guid, 0, 16);
switch (jdbcType) {
case CHAR:
case VARCHAR:
case LONGVARCHAR:
case GUID: {
StringBuilder sb = new StringBuilder(guidTemplate.length());
for (int i = 0; i < 4; i++) {
sb.append(Util.hexChars[(guid[3 - i] & 0xF0) >> 4]);
sb.append(Util.hexChars[guid[3 - i] & 0x0F]);
}
sb.append('-');
for (int i = 0; i < 2; i++) {
sb.append(Util.hexChars[(guid[5 - i] & 0xF0) >> 4]);
sb.append(Util.hexChars[guid[5 - i] & 0x0F]);
}
sb.append('-');
for (int i = 0; i < 2; i++) {
sb.append(Util.hexChars[(guid[7 - i] & 0xF0) >> 4]);
sb.append(Util.hexChars[guid[7 - i] & 0x0F]);
}
sb.append('-');
for (int i = 0; i < 2; i++) {
sb.append(Util.hexChars[(guid[8 + i] & 0xF0) >> 4]);
sb.append(Util.hexChars[guid[8 + i] & 0x0F]);
}
sb.append('-');
for (int i = 0; i < 6; i++) {
sb.append(Util.hexChars[(guid[10 + i] & 0xF0) >> 4]);
sb.append(Util.hexChars[guid[10 + i] & 0x0F]);
}
try {
return DDC.convertStringToObject(sb.toString(), Encoding.UNICODE.charset(), jdbcType, streamType);
}
catch (UnsupportedEncodingException e) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_errorConvertingValue"));
throw new SQLServerException(form.format(new Object[] {"UNIQUEIDENTIFIER", jdbcType}), null, 0, e);
}
}
default: {
if (StreamType.BINARY == streamType || StreamType.ASCII == streamType)
return new ByteArrayInputStream(guid);
return guid;
}
}
}
/**
* Reads a multi-part table name from TDS and returns it as an array of Strings.
*/
final SQLIdentifier readSQLIdentifier() throws SQLServerException {
// Multi-part names should have between 1 and 4 parts
int numParts = readUnsignedByte();
if (!(1 <= numParts && numParts <= 4))
throwInvalidTDS();
// Each part is a length-prefixed Unicode string
String[] nameParts = new String[numParts];
for (int i = 0; i < numParts; i++)
nameParts[i] = readUnicodeString(readUnsignedShort());
// Build the identifier from the name parts
SQLIdentifier identifier = new SQLIdentifier();
identifier.setObjectName(nameParts[numParts - 1]);
if (numParts >= 2)
identifier.setSchemaName(nameParts[numParts - 2]);
if (numParts >= 3)
identifier.setDatabaseName(nameParts[numParts - 3]);
if (4 == numParts)
identifier.setServerName(nameParts[numParts - 4]);
return identifier;
}
final SQLCollation readCollation() throws SQLServerException {
SQLCollation collation = null;
try {
collation = new SQLCollation(this);
}
catch (UnsupportedEncodingException e) {
con.terminate(SQLServerException.DRIVER_ERROR_INVALID_TDS, e.getMessage(), e);
// not reached
}
return collation;
}
final void skip(int bytesToSkip) throws SQLServerException {
assert bytesToSkip >= 0;
while (bytesToSkip > 0) {
// Ensure that we have a packet to read from.
if (!ensurePayload())
throwInvalidTDS();
int bytesSkipped = bytesToSkip;
if (bytesSkipped > currentPacket.payloadLength - payloadOffset)
bytesSkipped = currentPacket.payloadLength - payloadOffset;
bytesToSkip -= bytesSkipped;
payloadOffset += bytesSkipped;
}
}
final void TryProcessFeatureExtAck(boolean featureExtAckReceived) throws SQLServerException {
// in case of redirection, do not check if TDS_FEATURE_EXTENSION_ACK is received or not.
if (null != this.con.getRoutingInfo()) {
return;
}
if (isColumnEncryptionSettingEnabled() && !featureExtAckReceived)
throw new SQLServerException(this, SQLServerException.getErrString("R_AE_NotSupportedByServer"), null, 0, false);
}
}
/**
* Timer for use with Commands that support a timeout.
*
* Once started, the timer runs for the prescribed number of seconds unless stopped. If the timer runs out, it interrupts its associated Command with
* a reason like "timed out".
*/
final class TimeoutTimer implements Runnable {
private static final String threadGroupName = "mssql-jdbc-TimeoutTimer";
private final int timeoutSeconds;
private final TDSCommand command;
private volatile Future<?> task;
private static final ExecutorService executor = Executors.newCachedThreadPool(new ThreadFactory() {
private final ThreadGroup tg = new ThreadGroup(threadGroupName);
private final String threadNamePrefix = tg.getName() + "-";
private final AtomicInteger threadNumber = new AtomicInteger(0);
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(tg, r, threadNamePrefix + threadNumber.incrementAndGet());
t.setDaemon(true);
return t;
}
});
private volatile boolean canceled = false;
TimeoutTimer(int timeoutSeconds,
TDSCommand command) {
assert timeoutSeconds > 0;
assert null != command;
this.timeoutSeconds = timeoutSeconds;
this.command = command;
}
final void start() {
task = executor.submit(this);
}
final void stop() {
task.cancel(true);
canceled = true;
}
public void run() {
int secondsRemaining = timeoutSeconds;
try {
// Poll every second while time is left on the timer.
// Return if/when the timer is canceled.
do {
if (canceled)
return;
Thread.sleep(1000);
}
while (--secondsRemaining > 0);
}
catch (InterruptedException e) {
// re-interrupt the current thread, in order to restore the thread's interrupt status.
Thread.currentThread().interrupt();
return;
}
// If the timer wasn't canceled before it ran out of
// time then interrupt the registered command.
try {
command.interrupt(SQLServerException.getErrString("R_queryTimedOut"));
}
catch (SQLServerException e) {
// Unfortunately, there's nothing we can do if we
// fail to time out the request. There is no way
// to report back what happened.
command.log(Level.FINE, "Command could not be timed out. Reason: " + e.getMessage());
}
}
}
/**
* TDSCommand encapsulates an interruptable TDS conversation.
*
* A conversation may consist of one or more TDS request and response messages. A command may be interrupted at any point, from any thread, and for
* any reason. Acknowledgement and handling of an interrupt is fully encapsulated by this class.
*
* Commands may be created with an optional timeout (in seconds). Timeouts are implemented as a form of interrupt, where the interrupt event occurs
* when the timeout period expires. Currently, only the time to receive the response from the channel counts against the timeout period.
*/
abstract class TDSCommand {
abstract boolean doExecute() throws SQLServerException;
final static Logger logger = Logger.getLogger("com.microsoft.sqlserver.jdbc.internals.TDS.Command");
private final String logContext;
final String getLogContext() {
return logContext;
}
private String traceID;
final public String toString() {
if (traceID == null)
traceID = "TDSCommand@" + Integer.toHexString(hashCode()) + " (" + logContext + ")";
return traceID;
}
final void log(Level level,
String message) {
logger.log(level, toString() + ": " + message);
}
// Optional timer that is set if the command was created with a non-zero timeout period.
// When the timer expires, the command is interrupted.
private final TimeoutTimer timeoutTimer;
// TDS channel accessors
// These are set/reset at command execution time.
// Volatile ensures visibility to execution thread and interrupt thread
private volatile TDSWriter tdsWriter;
private volatile TDSReader tdsReader;
protected TDSWriter getTDSWriter(){
return tdsWriter;
}
// Lock to ensure atomicity when manipulating more than one of the following
// shared interrupt state variables below.
private final Object interruptLock = new Object();
// Flag set when this command starts execution, indicating that it is
// ready to respond to interrupts; and cleared when its last response packet is
// received, indicating that it is no longer able to respond to interrupts.
// If the command is interrupted after interrupts have been disabled, then the
// interrupt is ignored.
private volatile boolean interruptsEnabled = false;
protected boolean getInterruptsEnabled() {
return interruptsEnabled;
}
protected void setInterruptsEnabled(boolean interruptsEnabled) {
synchronized (interruptLock) {
this.interruptsEnabled = interruptsEnabled;
}
}
// Flag set to indicate that an interrupt has happened.
private volatile boolean wasInterrupted = false;
private boolean wasInterrupted() {
return wasInterrupted;
}
// The reason for the interrupt.
private volatile String interruptReason = null;
// Flag set when this command's request to the server is complete.
// If a command is interrupted before its request is complete, it is the executing
// thread's responsibility to send the attention signal to the server if necessary.
// After the request is complete, the interrupting thread must send the attention signal.
private volatile boolean requestComplete;
protected boolean getRequestComplete() {
return requestComplete;
}
protected void setRequestComplete(boolean requestComplete) {
synchronized (interruptLock) {
this.requestComplete = requestComplete;
}
}
// Flag set when an attention signal has been sent to the server, indicating that a
// TDS packet containing the attention ack message is to be expected in the response.
// This flag is cleared after the attention ack message has been received and processed.
private volatile boolean attentionPending = false;
boolean attentionPending() {
return attentionPending;
}
// Flag set when this command's response has been processed. Until this flag is set,
// there may be unprocessed information left in the response, such as transaction
// ENVCHANGE notifications.
private volatile boolean processedResponse;
protected boolean getProcessedResponse() {
return processedResponse;
}
protected void setProcessedResponse(boolean processedResponse) {
synchronized (interruptLock) {
this.processedResponse = processedResponse;
}
}
// Flag set when this command's response is ready to be read from the server and cleared
// after its response has been received, but not necessarily processed, up to and including
// any attention ack. The command's response is read either on demand as it is processed,
// or by detaching.
private volatile boolean readingResponse;
final boolean readingResponse() {
return readingResponse;
}
/**
* Creates this command with an optional timeout.
*
* @param logContext
* the string describing the context for this command.
* @param timeoutSeconds
* (optional) the time before which the command must complete before it is interrupted. A value of 0 means no timeout.
*/
TDSCommand(String logContext,
int timeoutSeconds) {
this.logContext = logContext;
this.timeoutTimer = (timeoutSeconds > 0) ? (new TimeoutTimer(timeoutSeconds, this)) : null;
}
/**
* Executes this command.
*
* @param tdsWriter
* @param tdsReader
* @throws SQLServerException
* on any error executing the command, including cancel or timeout.
*/
boolean execute(TDSWriter tdsWriter,
TDSReader tdsReader) throws SQLServerException {
this.tdsWriter = tdsWriter;
this.tdsReader = tdsReader;
assert null != tdsReader;
try {
return doExecute(); // Derived classes implement the execution details
}
catch (SQLServerException e) {
try {
// If command execution threw an exception for any reason before the request
// was complete then interrupt the command (it may already be interrupted)
// and close it out to ensure that any response to the error/interrupt
// is processed.
// no point in trying to cancel on a closed connection.
if (!requestComplete && !tdsReader.getConnection().isClosed()) {
interrupt(e.getMessage());
onRequestComplete();
close();
}
}
catch (SQLServerException interruptException) {
if (logger.isLoggable(Level.FINE))
logger.fine(this.toString() + ": Ignoring error in sending attention: " + interruptException.getMessage());
}
// throw the original exception even if trying to interrupt fails even in the case
// of trying to send a cancel to the server.
throw e;
}
}
/**
* Provides sane default response handling.
*
* This default implementation just consumes everything in the response message.
*/
void processResponse(TDSReader tdsReader) throws SQLServerException {
if (logger.isLoggable(Level.FINEST))
logger.finest(this.toString() + ": Processing response");
try {
TDSParser.parse(tdsReader, getLogContext());
}
catch (SQLServerException e) {
if (SQLServerException.DRIVER_ERROR_FROM_DATABASE != e.getDriverErrorCode())
throw e;
if (logger.isLoggable(Level.FINEST))
logger.finest(this.toString() + ": Ignoring error from database: " + e.getMessage());
}
}
/**
* Clears this command from the TDS channel so that another command can execute.
*
* This method does not process the response. It just buffers it in memory, including any attention ack that may be present.
*/
final void detach() throws SQLServerException {
if (logger.isLoggable(Level.FINEST))
logger.finest(this + ": detaching...");
// Read any remaining response packets from the server.
// This operation may be timed out or cancelled from another thread.
while (tdsReader.readPacket())
;
// Postcondition: the entire response has been read
assert !readingResponse;
}
final void close() {
if (logger.isLoggable(Level.FINEST))
logger.finest(this + ": closing...");
if (logger.isLoggable(Level.FINEST))
logger.finest(this + ": processing response...");
while (!processedResponse) {
try {
processResponse(tdsReader);
}
catch (SQLServerException e) {
if (logger.isLoggable(Level.FINEST))
logger.finest(this + ": close ignoring error processing response: " + e.getMessage());
if (tdsReader.getConnection().isSessionUnAvailable()) {
processedResponse = true;
attentionPending = false;
}
}
}
if (attentionPending) {
if (logger.isLoggable(Level.FINEST))
logger.finest(this + ": processing attention ack...");
try {
TDSParser.parse(tdsReader, "attention ack");
}
catch (SQLServerException e) {
if (tdsReader.getConnection().isSessionUnAvailable()) {
if (logger.isLoggable(Level.FINEST))
logger.finest(this + ": giving up on attention ack after connection closed by exception: " + e);
attentionPending = false;
}
else {
if (logger.isLoggable(Level.FINEST))
logger.finest(this + ": ignored exception: " + e);
}
}
// If the parser returns to us without processing the expected attention ack,
// then assume that no attention ack is forthcoming from the server and
// terminate the connection to prevent any other command from executing.
if (attentionPending) {
logger.severe(this + ": expected attn ack missing or not processed; terminating connection...");
try {
tdsReader.throwInvalidTDS();
}
catch (SQLServerException e) {
if (logger.isLoggable(Level.FINEST))
logger.finest(this + ": ignored expected invalid TDS exception: " + e);
assert tdsReader.getConnection().isSessionUnAvailable();
attentionPending = false;
}
}
}
// Postcondition:
// Response has been processed and there is no attention pending -- the command is closed.
// Of course the connection may be closed too, but the command is done regardless...
assert processedResponse && !attentionPending;
}
/**
* Interrupts execution of this command, typically from another thread.
*
* Only the first interrupt has any effect. Subsequent interrupts are ignored. Interrupts are also ignored until enabled. If interrupting the
* command requires an attention signal to be sent to the server, then this method sends that signal if the command's request is already complete.
*
* Signalling mechanism is "fire and forget". It is up to either the execution thread or, possibly, a detaching thread, to ensure that any pending
* attention ack later will be received and processed.
*
* @param reason
* the reason for the interrupt, typically cancel or timeout.
* @throws SQLServerException
* if interrupting fails for some reason. This call does not throw the reason for the interrupt.
*/
void interrupt(String reason) throws SQLServerException {
// Multiple, possibly simultaneous, interrupts may occur.
// Only the first one should be recognized and acted upon.
synchronized (interruptLock) {
if (interruptsEnabled && !wasInterrupted()) {
if (logger.isLoggable(Level.FINEST))
logger.finest(this + ": Raising interrupt for reason:" + reason);
wasInterrupted = true;
interruptReason = reason;
if (requestComplete)
attentionPending = tdsWriter.sendAttention();
}
}
}
private boolean interruptChecked = false;
/**
* Checks once whether an interrupt has occurred, and, if it has, throws an exception indicating that fact.
*
* Any calls after the first to check for interrupts are no-ops. This method is called periodically from this command's execution thread to notify
* the app when an interrupt has happened.
*
* It should only be called from places where consistent behavior can be ensured after the exception is thrown. For example, it should not be
* called at arbitrary times while processing the response, as doing so could leave the response token stream in an inconsistent state. Currently,
* response processing only checks for interrupts after every result or OUT parameter.
*
* Request processing checks for interrupts before writing each packet.
*
* @throws SQLServerException
* if this command was interrupted, throws the reason for the interrupt.
*/
final void checkForInterrupt() throws SQLServerException {
// Throw an exception with the interrupt reason if this command was interrupted.
// Note that the interrupt reason may be null. Checking whether the
// command was interrupted does not require the interrupt lock since only one
// of the shared state variables is being manipulated; interruptChecked is not
// shared with the interrupt thread.
if (wasInterrupted() && !interruptChecked) {
interruptChecked = true;
if (logger.isLoggable(Level.FINEST))
logger.finest(this + ": throwing interrupt exception, reason: " + interruptReason);
throw new SQLServerException(interruptReason, SQLState.STATEMENT_CANCELED, DriverError.NOT_SET, null);
}
}
/**
* Notifies this command when no more request packets are to be sent to the server.
*
* After the last packet has been sent, the only way to interrupt the request is to send an attention signal from the interrupt() method.
*
* Note that this method is called when the request completes normally (last packet sent with EOM bit) or when it completes after being
* interrupted (0 or more packets sent with no EOM bit).
*/
final void onRequestComplete() throws SQLServerException {
assert !requestComplete;
if (logger.isLoggable(Level.FINEST))
logger.finest(this + ": request complete");
synchronized (interruptLock) {
requestComplete = true;
// If this command was interrupted before its request was complete then
// we need to send the attention signal if necessary. Note that if no
// attention signal is sent (i.e. no packets were sent to the server before
// the interrupt happened), then don't expect an attention ack or any
// other response.
if (!interruptsEnabled) {
assert !attentionPending;
assert !processedResponse;
assert !readingResponse;
processedResponse = true;
}
else if (wasInterrupted()) {
if (tdsWriter.isEOMSent()) {
attentionPending = tdsWriter.sendAttention();
readingResponse = attentionPending;
}
else {
assert !attentionPending;
readingResponse = tdsWriter.ignoreMessage();
}
processedResponse = !readingResponse;
}
else {
assert !attentionPending;
assert !processedResponse;
readingResponse = true;
}
}
}
/**
* Notifies this command when the last packet of the response has been read.
*
* When the last packet is read, interrupts are disabled. If an interrupt occurred prior to disabling that caused an attention signal to be sent
* to the server, then an extra packet containing the attention ack is read.
*
* This ensures that on return from this method, the TDS channel is clear of all response packets for this command.
*
* Note that this method is called for the attention ack message itself as well, so we need to be sure not to expect more than one attention
* ack...
*/
final void onResponseEOM() throws SQLServerException {
boolean readAttentionAck = false;
// Atomically disable interrupts and check for a previous interrupt requiring
// an attention ack to be read.
synchronized (interruptLock) {
if (interruptsEnabled) {
if (logger.isLoggable(Level.FINEST))
logger.finest(this + ": disabling interrupts");
// Determine whether we still need to read the attention ack packet.
// When a command is interrupted, Yukon (and later) always sends a response
// containing at least a DONE(ERROR) token before it sends the attention ack,
// even if the command's request was not complete.
readAttentionAck = attentionPending;
interruptsEnabled = false;
}
}
// If an attention packet needs to be read then read it. This should
// be done outside of the interrupt lock to avoid unnecessarily blocking
// interrupting threads. Note that it is remotely possible that the call
// to readPacket won't actually read anything if the attention ack was
// already read by TDSCommand.detach(), in which case this method could
// be called from multiple threads, leading to a benign race to clear the
// readingResponse flag.
if (readAttentionAck)
tdsReader.readPacket();
readingResponse = false;
}
/**
* Notifies this command when the end of its response token stream has been reached.
*
* After this call, we are guaranteed that tokens in the response have been processed.
*/
final void onTokenEOF() {
processedResponse = true;
}
/**
* Notifies this command when the attention ack (a DONE token with a special flag) has been processed.
*
* After this call, the attention ack should no longer be expected.
*/
final void onAttentionAck() {
assert attentionPending;
attentionPending = false;
}
/**
* Starts sending this command's TDS request to the server.
*
* @param tdsMessageType
* the type of the TDS message (RPC, QUERY, etc.)
* @return the TDS writer used to write the request.
* @throws SQLServerException
* on any error, including acknowledgement of an interrupt.
*/
final TDSWriter startRequest(byte tdsMessageType) throws SQLServerException {
if (logger.isLoggable(Level.FINEST))
logger.finest(this + ": starting request...");
// Start this command's request message
try {
tdsWriter.startMessage(this, tdsMessageType);
}
catch (SQLServerException e) {
if (logger.isLoggable(Level.FINEST))
logger.finest(this + ": starting request: exception: " + e.getMessage());
throw e;
}
// (Re)initialize this command's interrupt state for its current execution.
// To ensure atomically consistent behavior, do not leave the interrupt lock
// until interrupts have been (re)enabled.
synchronized (interruptLock) {
requestComplete = false;
readingResponse = false;
processedResponse = false;
attentionPending = false;
wasInterrupted = false;
interruptReason = null;
interruptsEnabled = true;
}
return tdsWriter;
}
/**
* Finishes the TDS request and then starts reading the TDS response from the server.
*
* @return the TDS reader used to read the response.
* @throws SQLServerException
* if there is any kind of error.
*/
final TDSReader startResponse() throws SQLServerException {
return startResponse(false);
}
final TDSReader startResponse(boolean isAdaptive) throws SQLServerException {
// Finish sending the request message. If this command was interrupted
// at any point before endMessage() returns, then endMessage() throws an
// exception with the reason for the interrupt. Request interrupts
// are disabled by the time endMessage() returns.
if (logger.isLoggable(Level.FINEST))
logger.finest(this + ": finishing request");
try {
tdsWriter.endMessage();
}
catch (SQLServerException e) {
if (logger.isLoggable(Level.FINEST))
logger.finest(this + ": finishing request: endMessage threw exception: " + e.getMessage());
throw e;
}
// If command execution is subject to timeout then start timing until
// the server returns the first response packet.
if (null != timeoutTimer) {
if (logger.isLoggable(Level.FINEST))
logger.finest(this.toString() + ": Starting timer...");
timeoutTimer.start();
}
if (logger.isLoggable(Level.FINEST))
logger.finest(this.toString() + ": Reading response...");
try {
// Wait for the server to execute the request and read the first packet
// (responseBuffering=adaptive) or all packets (responseBuffering=full)
// of the response.
if (isAdaptive) {
tdsReader.readPacket();
}
else {
while (tdsReader.readPacket())
;
}
}
catch (SQLServerException e) {
if (logger.isLoggable(Level.FINEST))
logger.finest(this.toString() + ": Exception reading response: " + e.getMessage());
throw e;
}
finally {
// If command execution was subject to timeout then stop timing as soon
// as the server returns the first response packet or errors out.
if (null != timeoutTimer) {
if (logger.isLoggable(Level.FINEST))
logger.finest(this.toString() + ": Stopping timer...");
timeoutTimer.stop();
}
}
return tdsReader;
}
}
/**
* UninterruptableTDSCommand encapsulates an uninterruptable TDS conversation.
*
* TDSCommands have interruptability built in. However, some TDSCommands such as DTC commands, connection commands, cursor close and prepared
* statement handle close shouldn't be interruptable. This class provides a base implementation for such commands.
*/
abstract class UninterruptableTDSCommand extends TDSCommand {
UninterruptableTDSCommand(String logContext) {
super(logContext, 0);
}
final void interrupt(String reason) throws SQLServerException {
// Interrupting an uninterruptable command is a no-op. That is,
// it can happen, but it should have no effect.
logger.finest(toString() + " Ignoring interrupt of uninterruptable TDS command; Reason:" + reason);
}
}
|
package protocolsupport.protocol.pipeline.common;
import java.io.IOException;
import java.nio.channels.ClosedChannelException;
import java.text.MessageFormat;
import java.util.HashSet;
import org.bukkit.Bukkit;
import org.bukkit.plugin.java.JavaPlugin;
import io.netty.channel.ChannelDuplexHandler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPromise;
import io.netty.handler.timeout.ReadTimeoutException;
import protocolsupport.ProtocolSupport;
import protocolsupport.api.events.ConnectionCloseEvent;
import protocolsupport.api.events.ConnectionOpenEvent;
import protocolsupport.api.events.PlayerDisconnectEvent;
import protocolsupport.protocol.ConnectionImpl;
import protocolsupport.protocol.storage.ProtocolStorage;
import protocolsupport.zplatform.ServerPlatform;
import protocolsupport.zplatform.network.NetworkManagerWrapper;
public class LogicHandler extends ChannelDuplexHandler {
private static final HashSet<Class<? extends Throwable>> ignoreExceptions = new HashSet<>();
static {
ignoreExceptions.add(ClosedChannelException.class);
ignoreExceptions.add(ReadTimeoutException.class);
ignoreExceptions.add(IOException.class);
}
private final ConnectionImpl connection;
public LogicHandler(ConnectionImpl connection) {
this.connection = connection;
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
if (connection.handlePacketReceive(msg)) {
super.channelRead(ctx, msg);
}
}
@Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception {
if (connection.handlePacketSend(msg)) {
super.write(ctx, msg, promise);
} else {
promise.setSuccess();
}
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable e) throws Exception {
if (ServerPlatform.get().getMiscUtils().isDebugging() && !ignoreExceptions.contains(e.getClass())) {
super.exceptionCaught(ctx, new NetworkException(e, connection));
} else {
super.exceptionCaught(ctx, e);
}
}
private static final class NetworkException extends Exception {
private static final long serialVersionUID = 1L;
public NetworkException(Throwable original, ConnectionImpl connection) {
super(MessageFormat.format(
"ProtocolSupport(buildinfo: {0}): Network exception occured(address: {1}, username: {2}, version: {3})",
JavaPlugin.getPlugin(ProtocolSupport.class).getBuildInfo(),
connection.getAddress(),
connection.getNetworkManagerWrapper().getUserName(),
connection.getVersion()
), original);
}
}
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
super.channelActive(ctx);
Bukkit.getPluginManager().callEvent(new ConnectionOpenEvent(connection));
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
super.channelInactive(ctx);
NetworkManagerWrapper networkmanager = connection.getNetworkManagerWrapper();
String username = networkmanager.getUserName();
if (username != null) {
Bukkit.getPluginManager().callEvent(new PlayerDisconnectEvent(connection, username));
}
Bukkit.getPluginManager().callEvent(new ConnectionCloseEvent(connection));
ProtocolStorage.removeConnection(networkmanager.getRawAddress());
}
}
|
package com.robohorse.pagerbullet;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.drawable.Drawable;
import android.support.annotation.IntDef;
import android.support.v4.content.ContextCompat;
import android.support.v4.graphics.drawable.DrawableCompat;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
public class PagerBullet extends FrameLayout {
private static final String DIGIT_PATTERN = "[^0-9.]";
private static final int DEFAULT_INDICATOR_OFFSET_VALUE = 20;
private int offset = DEFAULT_INDICATOR_OFFSET_VALUE;
private ViewPager viewPager;
private TextView textIndicator;
private LinearLayout layoutIndicator;
private View indicatorContainer;
private int activeColorTint;
private int inactiveColorTint;
public PagerBullet(Context context) {
super(context);
init(context);
}
public PagerBullet(Context context, AttributeSet attrs) {
super(context, attrs);
init(context);
setAttributes(context, attrs);
}
public PagerBullet(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init(context);
setAttributes(context, attrs);
}
private void setAttributes(Context context, AttributeSet attrs) {
TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.PagerBullet);
String heightValue = typedArray.getString(R.styleable.PagerBullet_panelHeightInDp);
if (null != heightValue) {
heightValue = heightValue.replaceAll(DIGIT_PATTERN, "");
float height = Float.parseFloat(heightValue);
FrameLayout.LayoutParams params = (LayoutParams) indicatorContainer.getLayoutParams();
params.height = Math.round(TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, height,
getResources().getDisplayMetrics()));
indicatorContainer.requestLayout();
}
typedArray.recycle();
}
public void setIndicatorTintColorScheme(int activeColorTint, int inactiveColorTint) {
this.activeColorTint = activeColorTint;
this.inactiveColorTint = inactiveColorTint;
invalidateBullets();
}
public void setTextSeparatorOffset(int offset) {
this.offset = offset;
}
public void setAdapter(final PagerAdapter adapter) {
viewPager.setAdapter(adapter);
invalidateBullets(adapter);
}
public void setCurrentItem(int position) {
viewPager.setCurrentItem(position);
setIndicatorItem(position);
}
public ViewPager getViewPager() {
return viewPager;
}
public void addOnPageChangeListener(ViewPager.OnPageChangeListener onPageChangeListener) {
viewPager.addOnPageChangeListener(onPageChangeListener);
}
public void invalidateBullets() {
PagerAdapter adapter = viewPager.getAdapter();
if (null != adapter) {
invalidateBullets(adapter);
}
}
public void invalidateBullets(PagerAdapter adapter) {
final boolean hasSeparator = hasSeparator();
textIndicator.setVisibility(hasSeparator ? VISIBLE : INVISIBLE);
layoutIndicator.setVisibility(hasSeparator ? INVISIBLE : VISIBLE);
if (!hasSeparator) {
initIndicator(adapter.getCount());
}
setIndicatorItem(viewPager.getCurrentItem());
}
public void setIndicatorVisibility(boolean visibility) {
indicatorContainer.setVisibility(visibility ? VISIBLE : INVISIBLE);
}
private void init(Context context) {
LayoutInflater layoutInflater = LayoutInflater.from(context);
View rootView = layoutInflater.inflate(R.layout.item_view_pager, this);
indicatorContainer = rootView.findViewById(R.id.pagerBulletIndicatorContainer);
textIndicator = (TextView) indicatorContainer.findViewById(R.id.pagerBulletIndicatorText);
layoutIndicator = (LinearLayout) indicatorContainer.findViewById(R.id.pagerBulletIndicator);
viewPager = (ViewPager) rootView.findViewById(R.id.viewPagerBullet);
viewPager.addOnPageChangeListener(new ViewPager.OnPageChangeListener() {
@Override
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
}
@Override
public void onPageSelected(int position) {
setIndicatorItem(position);
}
@Override
public void onPageScrollStateChanged(int state) {
}
});
}
private void initIndicator(int count) {
layoutIndicator.removeAllViews();
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(
LinearLayout.LayoutParams.WRAP_CONTENT,
LinearLayout.LayoutParams.WRAP_CONTENT
);
int margin = Math.round(getContext().getResources()
.getDimension(R.dimen.pager_bullet_indicator_dot_margin));
params.setMargins(margin, 0, margin, 0);
Drawable drawableInactive = ContextCompat.getDrawable(getContext(),
R.drawable.inactive_dot);
for (int i = 0; i < count; i++) {
ImageView imageView = new ImageView(getContext());
imageView.setImageDrawable(drawableInactive);
layoutIndicator.addView(imageView, params);
}
}
private void setIndicatorItem(int index) {
if (!hasSeparator()) {
setItemBullet(index);
} else {
setItemText(index);
}
}
private boolean hasSeparator() {
PagerAdapter pagerAdapter = viewPager.getAdapter();
return null != pagerAdapter && pagerAdapter.getCount() > offset;
}
private void setItemText(int index) {
PagerAdapter adapter = viewPager.getAdapter();
if (null != adapter) {
final int count = adapter.getCount();
textIndicator.setText(String.format(getContext()
.getString(R.string.pager_bullet_separator), index + 1, count));
}
}
private void setItemBullet(int selectedPosition) {
Drawable drawableInactive = ContextCompat.getDrawable(getContext(), R.drawable.inactive_dot);
drawableInactive = wrapTintDrawable(drawableInactive, inactiveColorTint);
Drawable drawableActive = ContextCompat.getDrawable(getContext(), R.drawable.active_dot);
drawableActive = wrapTintDrawable(drawableActive, activeColorTint);
final int indicatorItemsCount = layoutIndicator.getChildCount();
for (int position = 0; position < indicatorItemsCount; position++) {
ImageView imageView = (ImageView) layoutIndicator.getChildAt(position);
if (position != selectedPosition) {
imageView.setImageDrawable(drawableInactive);
} else {
imageView.setImageDrawable(drawableActive);
}
}
}
public static Drawable wrapTintDrawable(Drawable sourceDrawable, int color) {
if (color != 0) {
Drawable wrapDrawable = DrawableCompat.wrap(sourceDrawable);
DrawableCompat.setTint(wrapDrawable, color);
wrapDrawable.setBounds(0, 0, wrapDrawable.getIntrinsicWidth(),
wrapDrawable.getIntrinsicHeight());
return wrapDrawable;
} else {
return sourceDrawable;
}
}
}
|
package com.sidie88.IndocyberTest;
import java.math.BigDecimal;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.zkoss.bind.annotation.BindingParam;
import org.zkoss.bind.annotation.Command;
import org.zkoss.bind.annotation.Init;
import org.zkoss.bind.annotation.NotifyChange;
import org.zkoss.zhtml.Messagebox;
import org.zkoss.zk.ui.select.annotation.VariableResolver;
import org.zkoss.zk.ui.select.annotation.Wire;
import org.zkoss.zk.ui.select.annotation.WireVariable;
import org.zkoss.zul.ListModelList;
import org.zkoss.zul.Textbox;
import com.sidie88.IndocyberTest.entity.Invoice;
import com.sidie88.IndocyberTest.entity.InvoiceDetails;
import com.sidie88.IndocyberTest.entity.Product;
import com.sidie88.IndocyberTest.services.InvoiceService;
import com.sidie88.IndocyberTest.services.ProductService;
@VariableResolver(org.zkoss.zkplus.spring.DelegatingVariableResolver.class)
public class MyViewModel {
@WireVariable
private ProductService productService;
@WireVariable
private InvoiceService invoiceService;
private ListModelList<Product> productListModel;
private ListModelList<InvoiceDetails> invoiceListModel;
private String message;
private String invoiceNo;
private Date transDate;
private String customer;
private String searchBox;
private String total;
public String getSearchBox() {
return searchBox;
}
public void setSearchBox(String searchBox) {
this.searchBox = searchBox;
}
public String getInvoiceNo() {
return invoiceNo;
}
public void setInvoiceNo(String invoiceNo) {
this.invoiceNo = invoiceNo;
}
public Date getTransDate() {
return transDate;
}
public void setTransDate(Date transDate) {
this.transDate = transDate;
}
@Init
public void init() {
initForm();
}
public ListModelList<Product> getProductListModel() {
return productListModel;
}
public void setProductListModel(ListModelList<Product> productListModel) {
this.productListModel = productListModel;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public String getCustomer() {
return customer;
}
public void setCustomer(String customer) {
this.customer = customer;
}
@Command
public void searchProduct() {
productListModel.clear();
List<Product> list = productService.getListSearchProduct(searchBox);
productListModel.addAll(list);
}
@Command @NotifyChange({"total"})
public void addToCart(@BindingParam("p") Product p) {
if (p == null) {
return;
}
InvoiceDetails ids = new InvoiceDetails(p.getProductId(),
p.getProductName(), p.getPrice(), 1);
if (invoiceListModel.contains(ids)) {
InvoiceDetails invoiceDetails = invoiceListModel
.get(invoiceListModel.indexOf(ids));
invoiceListModel.remove(invoiceDetails);
invoiceDetails.setQuantity(invoiceDetails.getQuantity() + 1);
invoiceDetails.setSubTotal(invoiceDetails.getPrice().multiply(
new BigDecimal(invoiceDetails.getQuantity())));
invoiceListModel.add(invoiceDetails);
} else {
ids.setSubTotal(ids.getPrice().multiply(
new BigDecimal(ids.getQuantity())));
invoiceListModel.add(ids);
}
}
@Command
public void removeFromCart(@BindingParam("inv") InvoiceDetails invD) {
if (invD == null) {
return;
}
if (invoiceListModel.contains(invD)) {
InvoiceDetails invoiceDetails = invoiceListModel
.get(invoiceListModel.indexOf(invD));
invoiceListModel.remove(invoiceDetails);
if (invoiceDetails.getQuantity() > 1) {
invoiceDetails.setQuantity(invoiceDetails.getQuantity() - 1);
invoiceListModel.add(invoiceDetails);
}
} else {
invoiceListModel.remove(invD);
}
}
@Command @NotifyChange({"invoiceNo","total"})
public void saveInvoice() throws Exception{
try {
if(invoiceListModel.size()==0){
throw new Exception("Please Add Invoice Items First");
}
if(transDate==null){
throw new Exception("Please Set Transaction Date Fist");
}
Invoice invoice = new Invoice();
invoice.setInvoiceNo(invoiceNo);
invoice.setCustomer(customer);
invoice.setTransDate(transDate);
invoice.setInvoiceDetails(invoiceListModel);
invoiceService.addInvoice(invoice);
Messagebox.show("Invoice save succesfully");
reset();
} catch (Exception e) {
Messagebox.show(e.getMessage());
e.printStackTrace();
}
}
public ListModelList<InvoiceDetails> getInvoiceListModel() {
return invoiceListModel;
}
public void setInvoiceListModel(
ListModelList<InvoiceDetails> invoiceListModel) {
this.invoiceListModel = invoiceListModel;
}
public String getTotal() {
BigDecimal amount = BigDecimal.ZERO;
for (InvoiceDetails i : invoiceListModel) {
amount = amount.add(i.getSubTotal());
}
return amount.toString();
}
public void setTotal(String total) {
this.total = total;
}
private void initForm() {
List<Product> prodList = productService.getListProduct();
productListModel = new ListModelList<Product>(prodList);
invoiceListModel = new ListModelList<InvoiceDetails>();
reset();
}
private void reset(){
invoiceListModel.clear();
invoiceNo = generateInvoiceNo();
customer = "";
}
private String generateInvoiceNo(){
DateTime jodaTime = new DateTime();
DateTimeFormatter formatter = DateTimeFormat.forPattern("YYYYMMddHHmm");
Integer sequence = invoiceService.getCountInvoice()+1;
return String.format("%s%s", formatter.print(jodaTime), sequence.toString());
}
}
|
package com.skcraft.smes.util.setup;
import com.skcraft.smes.SMES;
import com.skcraft.smes.item.ItemMeta;
import cpw.mods.fml.common.registry.GameRegistry;
import net.minecraft.item.EnumRarity;
import net.minecraft.item.ItemStack;
public class SMESItems {
public static ItemMeta itemDusts;
public static ItemStack dustYttrium = null;
public static ItemStack dustBarium = null;
public static ItemStack dustNiobium = null;
public static void preInit() {
initItems();
registerItems();
}
public static void init() {
registerRecipes();
}
private static void initItems() {
SMES.log.info("Composing items...");
itemDusts = new ItemMeta("dust");
dustYttrium = itemDusts.addMetaItem(0, new ItemMeta.MetaItem("dustYttrium", null, EnumRarity.common, true), true, true);
dustBarium = itemDusts.addMetaItem(1, new ItemMeta.MetaItem("dustBarium", null, EnumRarity.common, true), true, true);
dustNiobium = itemDusts.addMetaItem(2, new ItemMeta.MetaItem("dustNiobium", null, EnumRarity.common, true), true, true);
SMES.log.info("Items created");
}
private static void registerItems() {
GameRegistry.registerItem(itemDusts, SMES.PREFIX + "dust");
}
private static void registerRecipes() {}
// public static Item itemYttriumDust;
// public static Item itemBariumDust;
// public static Item itemNiobiumDust;
// public static void preInit() {
// SMES.log.info("Initializing items...");
// itemYttriumDust = new ItemSMESDust().setUnlocalizedName("dustYttrium");
// registerItem(itemYttriumDust);
// itemBariumDust = new ItemSMESDust().setUnlocalizedName("dustBarium");
// registerItem(itemBariumDust);
// itemNiobiumDust = new ItemSMESDust().setUnlocalizedName("dustNiobium");
// registerItem(itemNiobiumDust);
// SMES.log.info("Items initialized");
// private static void registerItem(Item item, String suffix) {
// String name = item.getUnlocalizedName().replace("item." + SMES.PREFIX, "") + suffix;
// GameRegistry.registerItem(item, name);
// private static void registerItem(Item item) {
// registerItem(item, "");
}
|
package smp.components.staff;
import java.util.ArrayList;
import java.util.Set;
import smp.ImageIndex;
import smp.ImageLoader;
import smp.SoundfontLoader;
import smp.components.Values;
import smp.components.InstrumentIndex;
import smp.components.staff.sequences.StaffAccidental;
import smp.components.staff.sequences.StaffNote;
import smp.components.staff.sequences.StaffNoteLine;
import smp.components.topPanel.ButtonLine;
import smp.stateMachine.Settings;
import smp.stateMachine.StateMachine;
import javafx.collections.ObservableList;
import javafx.event.Event;
import javafx.event.EventHandler;
import javafx.scene.Node;
import javafx.scene.image.ImageView;
import javafx.scene.input.KeyCode;
import javafx.scene.input.MouseButton;
import javafx.scene.input.MouseEvent;
import javafx.scene.layout.StackPane;
public class StaffInstrumentEventHandler_Hack implements EventHandler<Event> {
/** The line number of this note, on the screen. */
private int line;
/** The position of this note. */
private int position;
/** Whether the mouse is in the frame or not. */
private static boolean focus = false;
/**
* This is the list of image notes that we have. These should all be
* ImageView-type objects.
*/
private static ObservableList<Node> theImages;
/** The StackPane that will display sharps, flats, etc. */
private static ObservableList<Node> accList;
/**
* This is the <code>ImageView</code> object responsible for displaying the
* silhouette of the note that we are about to place on the staff.
*/
private static ImageView silhouette = new ImageView();
/** The pointer to the staff object that this handler is linked to. */
private Staff theStaff;
/**
* This is the <code>ImageView</code> object responsible for displaying the
* silhouette of the sharp / flat of the note that we are about to place on
* the staff.
*/
private static ImageView accSilhouette;
/** The topmost image of the instrument. */
private StaffNote theStaffNote;
/**
* This is the image that holds the different types of sharps/flats etc.
*/
private StaffAccidental accidental;
/** This is the ImageLoader class. */
private static ImageLoader il;
/** This is the amount that we want to sharp / flat / etc. a note. */
private static int acc = 0;
/**
* Constructor for this StaffEventHandler. This creates a handler that takes
* a StackPane and a position on the staff.
*
* -@param stPane
* The StackPane that we are interested in.
* This will be updated whenever the mouse moves to a new stackpane.
* -@param acc
* The accidental display pane.
* This will be updated whenever the mouse moves to a new stackpane.
* -@param pos
* The position that this handler is located on the staff.
* This will be updated whenever the mouse moves.
* -@param l
* The line of this event handler. Typically between 0 and 9.
* This will be updated whenever the mouse moves.
* @param s
* The pointer to the Staff object that this event handler is
* linked to.
*/
public StaffInstrumentEventHandler_Hack(Staff s, ImageLoader i) {
// disableAllStackPanes();
il = i;
// position = pos;
// line = l;
// theImages = stPane.getChildren();//-
// accList = acc.getChildren();//-
theStaff = s;
accSilhouette = new ImageView();
if ((Settings.debug & 0b10) == 0b10) {
// System.out.println("Line: " + l);
// System.out.println("Position: " + pos);
}
}
/**
* Disable all the stack panes. Called when the first mouse event in the
* scene is registered. It is called at that point because the stackpanes
* may not have been initialized before then.
*/
private void disableAllStackPanes() {
for (int index = 0; index < Values.NOTELINES_IN_THE_WINDOW; index++) {
for (int i = 0; i < Values.NOTES_IN_A_LINE; i++) {
StackPane[] noteAndAcc = theStaff.getNoteMatrix().getNote(index, i);
noteAndAcc[0].setDisable(true);
}
}
}
@Override
public void handle(Event event) {
if(event instanceof MouseEvent){
int lineTmp = getLine(((MouseEvent)event).getX());
int positionTmp = getPosition(((MouseEvent)event).getY());
//invalid
if(lineTmp < 0 || positionTmp < 0) {//MOUSE_EXITED
InstrumentIndex theInd = ButtonLine.getSelectedInstrument();
mouseExited(theInd);
return;
}
//new note
if(line != lineTmp || position != positionTmp){
line = lineTmp;
position = positionTmp;
StackPane[] noteAndAcc = theStaff.getNoteMatrix().getNote(line, position);
if(!noteAndAcc[0].isDisabled())
disableAllStackPanes();
theImages = noteAndAcc[0].getChildren();
accList = noteAndAcc[1].getChildren();
}
}
InstrumentIndex theInd = ButtonLine.getSelectedInstrument();
//Drag-add notes, hold e to drag-remove notes
if (event instanceof MouseEvent && ((MouseEvent) event).isPrimaryButtonDown()) {
MouseButton b = ((MouseEvent) event).getButton();
if (b == MouseButton.PRIMARY)
leftMousePressed(theInd);
else if (b == MouseButton.SECONDARY)
rightMousePressed(theInd);
event.consume();
StateMachine.setSongModified(true);
}
else if (event.getEventType() == MouseEvent.MOUSE_PRESSED) {
MouseButton b = ((MouseEvent) event).getButton();
if (b == MouseButton.PRIMARY)
leftMousePressed(theInd);
else if (b == MouseButton.SECONDARY)
rightMousePressed(theInd);
event.consume();
StateMachine.setSongModified(true);
} else if (event.getEventType() == MouseEvent.MOUSE_MOVED) {//was MOUSE_ENTERED
focus = true;
mouseEntered(theInd);
event.consume();
} else if (event.getEventType() == MouseEvent.MOUSE_EXITED) {
focus = false;
mouseExited(theInd);
event.consume();
}
}
/**
* The method that is called when the left mouse button is pressed. This is
* generally the signal to add an instrument to that line.
*
* @param theInd
* The InstrumentIndex corresponding to what instrument is
* currently selected.
*/
private void leftMousePressed(InstrumentIndex theInd) {
if (StateMachine.getButtonsPressed().contains(KeyCode.E)) {
removeNote();
} else {
placeNote(theInd);
}
}
/**
* Places a note where the mouse currently is.
*
* @param theInd
* The <code>InstrumentIndex</code> that we are going to use to
* place this note.
*/
private void placeNote(InstrumentIndex theInd) {
boolean mute = StateMachine.isMutePressed();
boolean muteA = StateMachine.isMuteAPressed();
if (!mute && !muteA)
playSound(theInd, position, acc);
theStaffNote = new StaffNote(theInd, position, acc);
theStaffNote.setMuteNote(muteA ? 2 : mute ? 1 : 0);
if (!mute && !muteA) {
theStaffNote.setImage(il.getSpriteFX(theInd.imageIndex()));
} else if (mute) {
theStaffNote.setImage(il.getSpriteFX(theInd.imageIndex().alt()));
} else if (muteA) {
theStaffNote.setImage(il.getSpriteFX(theInd.imageIndex()
.silhouette()));
}
accidental = new StaffAccidental(theStaffNote);
accidental.setImage(il.getSpriteFX(Staff.switchAcc(acc)));
theImages.remove(silhouette);
accList.remove(accSilhouette);
if (!theImages.contains(theStaffNote))
theImages.add(theStaffNote);
if (!accList.contains(accidental))
accList.add(accidental);
StaffNoteLine temp = theStaff.getSequence().getLine(
line + StateMachine.getMeasureLineNum());
if (temp.isEmpty()) {
temp.setVolumePercent(((double) Values.DEFAULT_VELOCITY)
/ Values.MAX_VELOCITY);
}
if (!temp.contains(theStaffNote))
temp.add(theStaffNote);
StaffVolumeEventHandler sveh = theStaff.getNoteMatrix().getVolHandler(
line);
sveh.updateVolume();
theStaff.redraw();
}
/**
* The method that is called when the right mouse button is pressed. This is
* generally the signal to remove the instrument from that line.
*
* @param theInd
* The InstrumentIndex corresponding to what instrument is
* currently selected. (currently not actually used, but can be
* extended later to selectively remove instruments.
*/
private void rightMousePressed(InstrumentIndex theInd) {
removeNote();
}
/**
* This removes a note.
*/
private void removeNote() {
theImages.remove(silhouette);
accList.remove(accSilhouette);
if (!theImages.isEmpty())
theImages.remove(theImages.size() - 1);
if (!accList.isEmpty())
accList.remove(0);
StaffNoteLine temp = theStaff.getSequence().getLine(
line + StateMachine.getMeasureLineNum());
if (!temp.isEmpty()) {
ArrayList<StaffNote> nt = temp.getNotes();
for (int i = nt.size() - 1; i >= 0; i
StaffNote s = nt.get(i);
if (s.getPosition() == position) {
nt.remove(i);
break;
}
}
}
if (temp.isEmpty()) {
StaffVolumeEventHandler sveh = theStaff.getNoteMatrix()
.getVolHandler(line);
sveh.setVolumeVisible(false);
}
theStaff.redraw();
}
/**
* The method that is called when the mouse enters the object.
*
* @param theInd
* The InstrumentIndex corresponding to what instrument is
* currently selected.
*/
private void mouseEntered(InstrumentIndex theInd) {
// StateMachine.setFocusPane(this);
// theStaff.getNoteMatrix().setFocusPane(this);
updateAccidental();
silhouette.setImage(il.getSpriteFX(theInd.imageIndex().silhouette()));
if (!theImages.contains(silhouette))
theImages.add(silhouette);
accSilhouette.setImage(il
.getSpriteFX(Staff.switchAcc(acc).silhouette()));
if (!accList.contains(accSilhouette))
accList.add(accSilhouette);
silhouette.setVisible(true);
accSilhouette.setVisible(true);
}
/**
* The method that is called when the mouse exits the object.
*
* @param children
* List of Nodes that we have here, hopefully full of
* ImageView-type objects.
* @param theInd
* The InstrumentIndex corresponding to what instrument is
* currently selected.
*/
private void mouseExited(InstrumentIndex theInd) {
if(silhouette.getImage() != null)
theImages.remove(silhouette);
if(accSilhouette.getImage() != null)
accList.remove(accSilhouette);
}
/**
* Updates how much we want to sharp / flat a note.
*/
public static void updateAccidental() {
if (!focus)
return;
Set<KeyCode> bp = StateMachine.getButtonsPressed();
boolean ctrl = bp.contains(KeyCode.CONTROL);
boolean shift = bp.contains(KeyCode.SHIFT);
boolean alt = bp.contains(KeyCode.ALT) || bp.contains(KeyCode.ALT_GRAPH);
if (alt && ctrl)
acc = -2;
else if (ctrl && shift)
acc = 2;
else if (shift)
acc = 1;
else if (alt || ctrl)
acc = -1;
else
acc = 0;
switch (acc) {
case 2:
accSilhouette.setImage(il.getSpriteFX(ImageIndex.DOUBLESHARP_SIL));
break;
case 1:
accSilhouette.setImage(il.getSpriteFX(ImageIndex.SHARP_SIL));
break;
case -1:
accSilhouette.setImage(il.getSpriteFX(ImageIndex.FLAT_SIL));
break;
case -2:
accSilhouette.setImage(il.getSpriteFX(ImageIndex.DOUBLEFLAT_SIL));
break;
default:
accSilhouette.setVisible(false);
break;
}
if (acc != 0)
accSilhouette.setVisible(true);
if (acc != 0 && !accList.contains(accSilhouette))
accList.add(accSilhouette);
if (acc != 0 && !theImages.contains(silhouette)) {
theImages.add(silhouette);
silhouette.setImage(il.getSpriteFX(ButtonLine
.getSelectedInstrument().imageIndex().silhouette()));
silhouette.setVisible(true);
}
//Cannot use this in a static context... will fix this later
// if ((Settings.debug & 0b01) == 0b01) {
// System.out.println(this);
}
/**
* Called whenever we request a redraw of the staff.
*/
public void redraw() {
if (!focus)
return;
InstrumentIndex ind = ButtonLine.getSelectedInstrument();
mouseExited(ind);
mouseEntered(ind);
}
/**
* Plays a sound given an index and a position.
*
* @param theInd
* The index at which this instrument is located at.
* @param pos
* The position at which this note is located at.
* @param acc
* The sharp / flat that we want to play this note at.
*/
private static void playSound(InstrumentIndex theInd, int pos, int acc) {
SoundfontLoader.playSound(Values.staffNotes[pos].getKeyNum(), theInd,
acc);
}
/**
* Sets the amount that we want to sharp / flat a note.
*
* @param accidental
* Any integer between -2 and 2.
*/
public void setAcc(int accidental) {
acc = accidental;
}
/**
* @return The amount that a note is to be offset from its usual position.
*/
public int getAcc() {
return acc;
}
/**
* @return The line that this handler is located on.
*/
public int getLine() {
return line;
}
/**
* @return Whether the mouse is currently in the frame.
*/
public boolean hasMouse() {
return focus;
}
@Override
public String toString() {
String out = "Line: " + (StateMachine.getMeasureLineNum() + line)
+ "\nPosition: " + position + "\nAccidental: " + acc;
return out;
}
/**
*
* @param x mouse pos
* @return line in the current window based on x coord
*/
private static int getLine(double x){
if(x < 135 || x > 775)
return -1;
return (((int)x - 135) / 64);
}
/**
*
* @param y mouse pos
* @return note position based on y coord
*/
private static int getPosition(double y){
if(y < 66 || y >= 354)
return -1;
return Values.NOTES_IN_A_LINE - (((int)y - 66) / 16) - 1;
}
}
|
package com.walkertribe.ian.iface;
import java.util.Collections;
import java.util.List;
import com.walkertribe.ian.protocol.ArtemisPacket;
import com.walkertribe.ian.protocol.ArtemisPacketException;
import com.walkertribe.ian.protocol.core.world.ObjectUpdatePacket;
import com.walkertribe.ian.world.ArtemisObject;
/**
* Object which reports the results of a packet parsing attempt.
* @author rjwut
*/
public class ParseResult {
private ArtemisPacket packet;
private List<ListenerMethod> interestedPacketListeners = Collections.emptyList();
private List<ListenerMethod> interestedObjectListeners = Collections.emptyList();
private ArtemisPacketException exception;
ParseResult() {
// make constructor accessible only to the package
}
/**
* Returns the packet object generated by the parse attempt.
*/
public ArtemisPacket getPacket() {
return packet;
}
/**
* Sets the packet object generated by the parse attempt.
*/
void setPacket(ArtemisPacket packet) {
this.packet = packet;
}
/**
* Adds a ListenerMethod that is interested in the packet.
*/
void setPacketListeners(List<ListenerMethod> listeners) {
interestedPacketListeners = listeners;
}
/**
* Adds a ListenerMethod that is interested in the objects in the packet.
* (Applies only to ObjectUpdatePackets.)
*/
void setObjectListeners(List<ListenerMethod> listeners) {
interestedObjectListeners = listeners;
}
/**
* Return any exception that occurred while parsing the packet. Note that
* this doesn't handle exceptions that occur before the payload is read,
* since in that case, IAN will not have gotten far enough along to even
* produce a ParseResult.
*/
public ArtemisPacketException getException() {
return exception;
}
/**
* Sets the exception that occurred during the parsing of this packet. This
* is only for non-fatal exceptions. A fatal exception (one occuring before
* the payload can be read) should cause the exception to be thrown
* instead.
*/
void setException(ArtemisPacketException exception) {
this.exception = exception;
}
/**
* Convenience method for isInterestingPacket() || isInterestingObject().
*/
public boolean isInteresting() {
return !(interestedPacketListeners.isEmpty() && interestedObjectListeners.isEmpty());
}
/**
* Returns true if the packet was of interest to any listeners. Note that in
* the case of an ObjectUpdatePacket, there may be listeners that aren't
* interested in the packet itself, but are interested in certain types of
* objects the packet may contain. Thus, it's entirely possible for
* isInterestingPacket() to return false while isContainsInterestingObject()
* returns true.
*/
public boolean isInterestingPacket() {
return !interestedPacketListeners.isEmpty();
}
/**
* Returns true if the packet is an ObjectUpdatePacket and it contains an
* object of interest to one or more listeners.
*/
public boolean isContainsInterestingObject() {
return !interestedObjectListeners.isEmpty();
}
/**
* Fire the listeners that were interested in this packet or its contents.
*/
public void fireListeners() {
if (packet == null) {
return;
}
for (ListenerMethod method : interestedPacketListeners) {
method.offer(packet);
}
if (packet instanceof ObjectUpdatePacket) {
ObjectUpdatePacket ouPacket = (ObjectUpdatePacket) packet;
for (ArtemisObject obj : ouPacket.getObjects()) {
for (ListenerMethod method : interestedObjectListeners) {
method.offer(obj);
}
}
}
}
}
|
package com.wandrell.velocity.tool;
import static com.google.common.base.Preconditions.checkNotNull;
import java.util.regex.Pattern;
import org.apache.maven.doxia.site.decoration.DecorationModel;
import org.apache.velocity.tools.ToolContext;
import org.apache.velocity.tools.config.DefaultKey;
import org.apache.velocity.tools.generic.SafeConfig;
import org.apache.velocity.tools.generic.ValueParser;
import org.codehaus.plexus.util.xml.Xpp3Dom;
@DefaultKey("config")
public final class ConfigTool extends SafeConfig {
/**
* Identifier for the current file.
* <p>
* This is a slug created from the current file's name.
*/
private String fileId;
/**
* Regex for multiple hyphens.
*/
private final Pattern multipleHyphen = Pattern.compile("-+");
/**
* Regex for non-latin characters.
*/
private final Pattern nonLatin = Pattern.compile("[^\\w-]");
/**
* Skin configuration node.
* <p>
* This is the {@code <skinConfig>} located in the site.xml file, inside the
* {@code <custom>} node.
*/
private Xpp3Dom skinConfig = new Xpp3Dom("");
/**
* Regex for whitespaces.
*/
private final Pattern whitespace = Pattern.compile("[\\s]");
/**
* Constructs an instance of the {@code ConfigTool}.
*/
public ConfigTool() {
super();
}
/**
* Returns a configuration's node property.
* <p>
* This will be the data on the site.xml file where the node is called like
* the property.
* <p>
* Thanks to Velocity, instead of using {@code $config.get("myproperty")},
* this method can be called as a getter by using {@code $config.myproperty}
* .
* <p>
* The method will look for the property first in the page configuration. If
* it is not found there, then it looks for it in the global configuration.
* If again it is not found, then the {@code null} value is returned.
*
* @param property
* the property being queried
* @return the value assigned to the property in the page or the global
* properties
*/
public final Xpp3Dom get(final String property) {
Xpp3Dom value; // Node with the property's value
checkNotNull(property, "Received a null pointer as property");
// Looks for it in the global properties
value = getSkinConfig().getChild(property);
return value;
}
/**
* Returns the file identifier.
* <p>
* This is the slugged current file name.
* <p>
* It can be called through Velocity with the command {@code $config.fileId}
* .
*
* @return the file identifier
*/
public final String getFileId() {
return fileId;
}
/**
* Returns the boolean value of a property's value.
* <p>
* This will transform whatever value the property has assigned to a
* boolean.
*
* @param property
* the property to check
* @return the property's value transformed to a boolean
*/
public final Boolean isTrue(final String property) {
final Xpp3Dom value; // Node with the property's value
final Boolean result; // Value transformed to a boolean
checkNotNull(property, "Received a null pointer as property");
value = get(property);
if (value == null) {
result = false;
} else {
result = Boolean.valueOf(value.getValue());
}
return result;
}
/**
* Returns the regular expression for multiple hyphens.
*
* @return the regular expression for multiple hyphens
*/
private final Pattern getMultipleHyphenPattern() {
return multipleHyphen;
}
/**
* Returns the non-latin characters regular expression.
*
* @return the non-latin characters regular expression
*/
private final Pattern getNonLatinPattern() {
return nonLatin;
}
/**
* Returns the skin config node.
*
* @return the skin config node
*/
private final Xpp3Dom getSkinConfig() {
return skinConfig;
}
/**
* Returns the regular expression for whitespaces.
*
* @return the regular expression for whitespaces
*/
private final Pattern getWhitespacePattern() {
return whitespace;
}
/**
* Loads the file identifier from the velocity tools context.
* <p>
* This is generated from the file's name.
*
* @param context
* the Velocity tools context
*/
private final void loadFileId(final ToolContext context) {
final Integer lastDot; // Location of the extension dot
final Object currentFileObj; // File's name as received
String currentFile; // File's name
if (context.containsKey(ConfigToolConstants.CURRENT_FILE_NAME_KEY)) {
currentFileObj = context
.get(ConfigToolConstants.CURRENT_FILE_NAME_KEY);
if (currentFileObj == null) {
setFileId("");
} else {
currentFile = String.valueOf(currentFileObj);
// Drops the extension
lastDot = currentFile.lastIndexOf('.');
if (lastDot >= 0) {
currentFile = currentFile.substring(0, lastDot);
}
// File name is slugged
setFileId(slug(currentFile));
}
} else {
setFileId("");
}
}
/**
* Processes the decoration model, acquiring the skin and page
* configuration.
* <p>
* The decoration model are the contents of the site.xml file.
*
* @param model
* decoration data
*/
private final void processDecoration(final DecorationModel model) {
final Object customObj; // Object for the <custom> node
final Xpp3Dom customNode; // <custom> node
final Xpp3Dom skinNode; // <skinConfig> node
customObj = model.getCustom();
if (customObj instanceof Xpp3Dom) {
// This is the <custom> node in the site.xml file
customNode = (Xpp3Dom) customObj;
// Acquires <skinConfig> node
skinNode = customNode.getChild(ConfigToolConstants.SKIN_KEY);
if (skinNode == null) {
setSkinConfig(new Xpp3Dom(""));
} else {
setSkinConfig(skinNode);
}
}
}
/**
* Sets the identifier for the current file.
*
* @param id
* the identifier for the current file
*/
private final void setFileId(final String id) {
fileId = id;
}
/**
* Sets the skin config node.
*
* @param config
* the skin config node.
*/
private final void setSkinConfig(final Xpp3Dom config) {
skinConfig = config;
}
private final String slug(final String text) {
final String separator; // Separator for swapping whitespaces
String corrected; // Modified string
checkNotNull(text, "Received a null pointer as the text");
separator = "-";
corrected = text.replace('/', '-').replace('\\', '-').replace('.', '-')
.replace('_', '-');
// Removes multiple lines
corrected = getMultipleHyphenPattern().matcher(corrected)
.replaceAll(separator);
// Removes white spaces
corrected = getWhitespacePattern().matcher(corrected)
.replaceAll(separator);
// Removes non-latin characters
corrected = getNonLatinPattern().matcher(corrected).replaceAll("");
return corrected.toLowerCase();
}
@Override
protected final void configure(final ValueParser values) {
final Object velocityContext; // Value from the parser
final ToolContext ctxt; // Casted context
final Object decorationObj; // Value of the decoration key
checkNotNull(values, "Received a null pointer as values");
velocityContext = values.get(ConfigToolConstants.VELOCITY_CONTEXT_KEY);
if (velocityContext instanceof ToolContext) {
ctxt = (ToolContext) velocityContext;
loadFileId(ctxt);
decorationObj = ctxt.get(ConfigToolConstants.DECORATION_KEY);
if (decorationObj instanceof DecorationModel) {
processDecoration((DecorationModel) decorationObj);
}
}
}
}
|
package au.com.windyroad.hyperstate.server;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
import org.apache.http.impl.nio.client.CloseableHttpAsyncClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.SpringApplicationConfiguration;
import org.springframework.boot.test.SpringApplicationContextLoader;
import org.springframework.boot.test.WebIntegrationTest;
import org.springframework.context.ApplicationContext;
import org.springframework.test.context.ContextConfiguration;
import au.com.windyroad.hyperstate.core.EntityRepository;
import au.com.windyroad.hyperstate.core.Relationship;
import au.com.windyroad.hyperstate.core.Resolver;
import au.com.windyroad.hyperstate.server.entities.Account;
import au.com.windyroad.hyperstate.server.entities.AccountProperties;
import cucumber.api.java.en.Given;
import cucumber.api.java.en.Then;
import cucumber.api.java.en.When;
@ContextConfiguration(classes = {
HyperstateTestConfiguration.class }, loader = SpringApplicationContextLoader.class)
@SpringApplicationConfiguration(classes = { HyperstateTestConfiguration.class })
@WebIntegrationTest({ "server.port=0", "management.port=0" })
public class StepDefs {
public final Logger LOGGER = LoggerFactory.getLogger(this.getClass());
@Autowired
private Resolver resolver;
@Autowired
CloseableHttpAsyncClient httpAsyncClient;
@Autowired
HyperstateTestConfiguration config;
@Autowired
EntityRepository repository;
@Autowired
ApplicationContext context;
private Account currentEntity;
private AccountBuilder currentAccountBuilder;
class AccountBuilder {
private AccountProperties properties;
private String[] expectedActions;
private Map<String, String> expectedLinks = new HashMap<String, String>();
private String path;
public AccountBuilder(AccountProperties accountProperties) {
this.properties = accountProperties;
}
public void setExpectedActions(String[] actions) {
this.expectedActions = actions;
}
public void setPath(String path) {
this.path = path;
}
public void setExpectedLinkAddress(String rel, String path) {
this.expectedLinks.put(rel, path);
}
public CompletableFuture<Account> build(String path)
throws InterruptedException, ExecutionException {
Account entity = new Account(context, repository, path, properties,
"The Account");
String[] actionNames = entity.getActions().stream()
.map(a -> a.getLabel()).collect(Collectors.toList())
.toArray(new String[] {});
assertThat(actionNames, equalTo(expectedActions));
Map<String, String> actualLinks = new HashMap<>();
entity.getLinks().stream().forEach(nav -> {
for (String rel : nav.getNature()) {
actualLinks.put(rel, nav.getLink().getPath());
}
});
assertThat(actualLinks, equalTo(expectedLinks));
return repository.save(entity);
}
}
@Given("^an \"([^\"]*)\" domain entity with$")
public void an_domain_entity_with(String entityName,
Map<String, String> properties) throws Throwable {
assertThat(entityName, equalTo("Account"));
assertThat(properties.keySet(), contains("username", "creationDate"));
currentAccountBuilder = new AccountBuilder(new AccountProperties(
properties.get("username"), properties.get("creationDate")));
}
@Given("^it has no actions$")
public void it_has_no_actions() throws Throwable {
currentAccountBuilder.setExpectedActions(new String[] {});
}
@Given("^it's only link is self link referencing \"([^\"]*)\"$")
public void it_s_only_link_is_self_link_referencing(String path)
throws Throwable {
currentAccountBuilder.setExpectedLinkAddress(Relationship.SELF, path);
}
@Given("^it is exposed at \"([^\"]*)\"$")
public void it_is_exposed_at(String path) throws Throwable {
currentAccountBuilder.build(path);
}
@When("^request is made to \"([^\"]*)\"$")
public void request_is_made_to(String path) throws Throwable {
currentEntity = resolver.get(path, Account.class).get();
}
@Then("^the response will be an? \"([^\"]*)\" domain entity with$")
public void the_response_will_be_an_domain_entity_with(String type,
Map<String, String> properties) throws Throwable {
assertThat(currentEntity.getNatures(), hasItem(type));
assertThat(properties.keySet(), contains("username", "creationDate"));
assertThat(currentEntity.getProperties().getUsername(),
equalTo(properties.get("username")));
assertThat(currentEntity.getProperties().getCreationDate(),
equalTo(properties.get("creationDate")));
}
@Then("^it will have no actions$")
public void it_will_have_no_actions() throws Throwable {
assertThat(currentEntity.getActions(), empty());
}
@Then("^it will have no links apart from \"([^\"]*)\"$")
public void it_will_have_no_links_apart_from(String rel) throws Throwable {
assertThat(currentEntity.getLinks().size(), equalTo(1));
assertThat(currentEntity.getLinks().asList().get(0).getNature(),
hasItemInArray(rel));
}
@Then("^it will have a self link referencing \"([^\"]*)\"$")
public void it_will_have_a_self_link_referencing(String path)
throws Throwable {
assertThat(currentEntity.getLink(Relationship.SELF).getPath(),
endsWith(path));
}
}
|
package de.bettinggame.adapter;
import de.bettinggame.domain.Game;
import de.bettinggame.domain.TournamentLevel;
import de.bettinggame.domain.repository.GameRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.servlet.ModelAndView;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.function.Function;
import java.util.stream.Collectors;
@Controller
public class GameController implements AbstractController {
@Autowired
private GameRepository gameRepository;
@GetMapping("/game")
public ModelAndView list() {
ModelAndView mav = new ModelAndView("game/all-games");
List<Game> games = gameRepository.findByOrderByStarttime();
Map<TournamentLevel, List<Game>> matchesByLevel = games.stream()
.collect(Collectors.groupingBy(Game::getLevel, TreeMap::new,
Collectors.mapping(Function.identity(), Collectors.toList())));
mav.addObject("gamesByLevel", matchesByLevel);
return mav;
}
}
|
package br.com.rcmoutinho.javatohtml.core.tag;
import static br.com.rcmoutinho.javatohtml.core.ElementTestUtil.countSupportedTagToAppend;
import static br.com.rcmoutinho.javatohtml.core.ElementTestUtil.countSupportedTagToPrepend;
import static br.com.rcmoutinho.javatohtml.core.ElementTestUtil.countUnsupportedTagExceptionToAppend;
import static br.com.rcmoutinho.javatohtml.core.ElementTestUtil.countUnsupportedTagExceptionToPrepend;
import static br.com.rcmoutinho.javatohtml.core.ElementTestUtil.testStringToAppend;
import static br.com.rcmoutinho.javatohtml.core.ElementTestUtil.testStringToPrepend;
import static org.junit.Assert.assertEquals;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import br.com.rcmoutinho.javatohtml.core.Element;
import br.com.rcmoutinho.javatohtml.core.ElementUtils;
/**
* Unit test for {@link A}.
*
* @rcmoutinho
* @author rodrigo.moutinho
* @date 15 de nov de 2016
* @email rcm1989@gmail.com
*/
public class ATest {
private Class<? extends Element<?>> implClass;
private List<Class<? extends Element<?>>> supportedElements;
private List<Class<? extends Element<?>>> notSupportedElements;
@Before
public void beforeTesting() {
this.implClass = A.class;
this.supportedElements = new A().getSupportedElements();
this.notSupportedElements = ElementUtils.getAllImplementedElements();
this.notSupportedElements.removeAll(this.supportedElements);
}
@Test
public void checkSupportedElementsToAppend() throws InstantiationException, IllegalAccessException {
int supportedTagCount = countSupportedTagToAppend(this.implClass, this.supportedElements);
assertEquals(supportedTagCount, this.supportedElements.size());
}
@Test
public void checkSupportedElementsToPrepend() throws InstantiationException, IllegalAccessException {
int supportedTagCount = countSupportedTagToPrepend(this.implClass, this.supportedElements);
assertEquals(supportedTagCount, this.supportedElements.size());
}
public void checkSupportedStringToAppend() {
testStringToAppend(this.implClass);
}
public void checkSupportedStringToPrepend() {
testStringToPrepend(this.implClass);
}
@Test
public void checkUnsupportedElementsToAppend() {
int unsupportedTagCount = countUnsupportedTagExceptionToAppend(new A(), this.notSupportedElements);
assertEquals(unsupportedTagCount, this.notSupportedElements.size());
}
@Test
public void checkUnsupportedElementsToPrepend() {
int unsupportedTagCount = countUnsupportedTagExceptionToPrepend(new A(), this.notSupportedElements);
assertEquals(unsupportedTagCount, this.notSupportedElements.size());
}
@Test
public void emptyAttrsGenerateSimpleTag() {
A a = new A().download("").href("").hrefLang("").media("").rel("").target("").type("");
assertEquals("<a></a>", a.toHtml());
}
@Test
public void fullAttrsBuildedInOrder() {
A a = new A().download("#").href("#").hrefLang("#").media("#").rel("#").target("#").type("#");
assertEquals("<a download=\"#\" href=\"#\" hrefLang=\"#\" media=\"#\" rel=\"#\" target=\"#\" type=\"#\"></a>",
a.toHtml());
}
@Test
public void textWithHref() {
A a = new A().href("#anchor").append("testing");
assertEquals("<a href=\"#anchor\">testing</a>", a.toHtml());
}
}
|
package de.cubeisland.engine.i18n;
import de.cubeisland.engine.i18n.language.ClonedLanguage;
import de.cubeisland.engine.i18n.language.Language;
import de.cubeisland.engine.i18n.language.SourceLanguage;
import de.cubeisland.engine.i18n.translation.TranslationLoader;
import java.util.Collection;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
public class I18nService
{
private final SourceLanguage sourceLanguage;
private final TranslationLoader tLoader;
private final LanguageLoader lLoader;
private final Locale defaultLocale;
private final Map<Locale, Language> languages = new HashMap<Locale, Language>();
public I18nService(SourceLanguage source, TranslationLoader tLoader, LanguageLoader lLoader, Locale defaultLocale)
{
this.sourceLanguage = source;
this.tLoader = tLoader;
this.lLoader = lLoader;
this.defaultLocale = defaultLocale;
this.languages.put(this.getSourceLanguage().getLocale(), this.getSourceLanguage());
}
public SourceLanguage getSourceLanguage()
{
return sourceLanguage;
}
public TranslationLoader getTranslationLoader()
{
return tLoader;
}
public LanguageLoader getLanguageLoader()
{
return lLoader;
}
public Locale getDefaultLocale()
{
return defaultLocale;
}
public Language getLanguage(Locale locale) throws TranslationLoadingException, DefinitionLoadingException
{
if (locale == null)
{
throw new NullPointerException("The locale must not be null!");
}
Language result = this.languages.get(locale);
if (result == null)
{
Language language = this.lLoader.loadLanguage(this, locale);
if (language == null)
{
return null;
}
result = language;
if (result instanceof ClonedLanguage)
{
Language original = ((ClonedLanguage)result).getOriginal();
this.languages.put(original.getLocale(), original);
}
this.languages.put(locale, result);
}
return result;
}
public Language getDefaultLanguage() throws TranslationLoadingException, DefinitionLoadingException
{
Language language = this.getLanguage(this.defaultLocale);
if (language == null)
{
language = this.getSourceLanguage();
}
return language;
}
public Collection<Language> getLoadedLanguages()
{
return this.languages.values();
}
public String translate(String toTranslate)
{
return this.translate(this.getDefaultLocale(), toTranslate);
}
public String translate(Locale locale, String toTranslate)
{
return this.translateN(locale, toTranslate, 0);
}
public String translateN(String toTranslate, int n)
{
return this.translateN(this.getDefaultLocale(), toTranslate, n);
}
public String translateN(Locale locale, String toTranslate, int n)
{
try
{
String translated = this.translate0(locale, toTranslate, n, true);
if (translated == null)
{
// Fallback to Default
translated = this.translate0(this.getDefaultLocale(), toTranslate, n, false);
}
if (translated == null)
{
// Fallback to Source
if (n == 0)
{
translated = this.getSourceLanguage().getTranslation(toTranslate);
}
else
{
translated = this.getSourceLanguage().getTranslation(toTranslate, n);
}
}
return translated;
}
catch (DefinitionLoadingException e)
{
throw new TranslationException(e);
}
catch (TranslationLoadingException e)
{
throw new TranslationException(e);
}
}
private String translate0(Locale locale, String toTranslate, int n, boolean fallbackToBaseLocale) throws DefinitionLoadingException, TranslationLoadingException
{
Language language = this.getLanguage(locale);
if (language != null)
{
if (n == 0)
{
return language.getTranslation(toTranslate);
}
return language.getTranslation(toTranslate, n);
}
else if (fallbackToBaseLocale && !locale.getLanguage().toLowerCase().equals(locale.getCountry().toLowerCase()))
{
// Search BaseLocale
return this.translate0(new Locale(locale.getLanguage(), locale.getLanguage().toUpperCase()), toTranslate, n, false);
}
return null;
}
}
|
package com.angkorteam.fintech.installation;
import java.io.File;
import java.io.FileInputStream;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import javax.sql.DataSource;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.apache.commons.dbcp2.BasicDataSource;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.junit.Test;
import org.w3c.dom.Comment;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import com.angkorteam.fintech.Constants;
import com.angkorteam.fintech.MifosDataSourceManager;
import com.angkorteam.framework.spring.JdbcTemplate;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
public class ERDData {
private static final int CHAR_WIDTH = 8;
private static final BigDecimal LINE_HEIGHT = new BigDecimal(13.96875d);
private static final BigDecimal TOP_PADDING = new BigDecimal(4d);
@Test
public void initERDData() throws Exception {
File fintechFile = new File(FileUtils.getUserDirectory(), ".xml/fintech.properties.xml");
Properties properties = new Properties();
try (FileInputStream inputStream = FileUtils.openInputStream(fintechFile)) {
properties.loadFromXML(inputStream);
}
BasicDataSource platformDataSource = new BasicDataSource();
platformDataSource.setUsername(properties.getProperty("app.jdbc.username"));
platformDataSource.setPassword(properties.getProperty("app.jdbc.password"));
platformDataSource.setUrl(properties.getProperty("app.jdbc.url"));
platformDataSource.setDriverClassName(properties.getProperty("app.jdbc.driver"));
String mifosUrl = properties.getProperty("mifos.url");
MifosDataSourceManager dataSourceManager = new MifosDataSourceManager();
dataSourceManager.setDelegate(platformDataSource);
dataSourceManager.setMifosUrl(mifosUrl);
dataSourceManager.afterPropertiesSet();
DataSource dataSource = dataSourceManager.getDataSource(Constants.AID);
Map<String, String> tableDictionary = new HashMap<>();
JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
List<String> tables = queryForTables(jdbcTemplate);
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document document = dBuilder.newDocument();
Element graphmlElement = document.createElement("graphml");
graphmlElement.setAttribute("xmlns", "http://graphml.graphdrawing.org/xmlns");
graphmlElement.setAttribute("xmlns:java", "http:
graphmlElement.setAttribute("xmlns:sys", "http:
graphmlElement.setAttribute("xmlns:x", "http:
graphmlElement.setAttribute("xmlns:xsi", "http:
graphmlElement.setAttribute("xmlns:y", "http:
graphmlElement.setAttribute("xmlns:yed", "http:
graphmlElement.setAttribute("xsi:schemaLocation", "http:
document.appendChild(graphmlElement);
Comment comment = document.createComment("Created by yEd 3.17.1");
graphmlElement.appendChild(comment);
{
Element keyElement = document.createElement("key");
keyElement.setAttribute("attr.name", "Description");
keyElement.setAttribute("attr.type", "string");
keyElement.setAttribute("for", "graph");
keyElement.setAttribute("id", "d0");
graphmlElement.appendChild(keyElement);
}
{
Element keyElement = document.createElement("key");
keyElement.setAttribute("yfiles.type", "portgraphics");
keyElement.setAttribute("for", "port");
keyElement.setAttribute("id", "d1");
graphmlElement.appendChild(keyElement);
}
{
Element keyElement = document.createElement("key");
keyElement.setAttribute("yfiles.type", "portgeometry");
keyElement.setAttribute("for", "port");
keyElement.setAttribute("id", "d2");
graphmlElement.appendChild(keyElement);
}
{
Element keyElement = document.createElement("key");
keyElement.setAttribute("yfiles.type", "portuserdata");
keyElement.setAttribute("for", "port");
keyElement.setAttribute("id", "d3");
graphmlElement.appendChild(keyElement);
}
{
Element keyElement = document.createElement("key");
keyElement.setAttribute("attr.type", "string");
keyElement.setAttribute("for", "node");
keyElement.setAttribute("id", "d4");
keyElement.setAttribute("attr.name", "url");
graphmlElement.appendChild(keyElement);
}
{
Element keyElement = document.createElement("key");
keyElement.setAttribute("attr.type", "string");
keyElement.setAttribute("for", "node");
keyElement.setAttribute("id", "d5");
keyElement.setAttribute("attr.name", "description");
graphmlElement.appendChild(keyElement);
}
{
Element keyElement = document.createElement("key");
keyElement.setAttribute("yfiles.type", "nodegraphics");
keyElement.setAttribute("for", "node");
keyElement.setAttribute("id", "d6");
graphmlElement.appendChild(keyElement);
}
{
Element keyElement = document.createElement("key");
keyElement.setAttribute("yfiles.type", "resources");
keyElement.setAttribute("for", "graphml");
keyElement.setAttribute("id", "d7");
graphmlElement.appendChild(keyElement);
}
{
Element keyElement = document.createElement("key");
keyElement.setAttribute("attr.type", "string");
keyElement.setAttribute("for", "edge");
keyElement.setAttribute("id", "d8");
keyElement.setAttribute("attr.name", "url");
graphmlElement.appendChild(keyElement);
}
{
Element keyElement = document.createElement("key");
keyElement.setAttribute("attr.type", "string");
keyElement.setAttribute("for", "edge");
keyElement.setAttribute("id", "d9");
keyElement.setAttribute("attr.name", "description");
graphmlElement.appendChild(keyElement);
}
{
Element keyElement = document.createElement("key");
keyElement.setAttribute("yfiles.type", "edgegraphics");
keyElement.setAttribute("for", "edge");
keyElement.setAttribute("id", "d10");
graphmlElement.appendChild(keyElement);
}
Element graphElement = document.createElement("graph");
graphElement.setAttribute("edgedefault", "directed");
graphElement.setAttribute("id", "G");
graphmlElement.appendChild(graphElement);
Element dataD0Element = document.createElement("data");
dataD0Element.setAttribute("key", "d0");
graphElement.appendChild(dataD0Element);
List<String> process = new ArrayList<>();
List<String> filters = new ArrayList<>();
boolean hasFilter = false;
if (System.getProperty("tables") != null && !"".equals(System.getProperty("tables"))) {
hasFilter = true;
String temp = (String) System.getProperty("tables");
for (String filter : temp.split(",")) {
filter = StringUtils.trimToEmpty(filter);
if (!"".equals(filter)) {
if (!filters.contains(filter)) {
filters.add(filter);
}
}
}
}
String json = FileUtils.readFileToString(new File("src/main/resources/erd.json"), "UTF-8");
Gson gson = new Gson();
Map<String, List<ErdVO>> erds = gson.fromJson(json, new TypeToken<Map<String, List<ErdVO>>>() {
}.getType());
if (hasFilter) {
for (Entry<String, List<ErdVO>> erd : erds.entrySet()) {
if (filters.contains(erd.getKey())) {
if (!process.contains(erd.getKey())) {
process.add(erd.getKey());
}
for (ErdVO vo : erd.getValue()) {
if (!process.contains(vo.getReferenceTo().getTableName())) {
process.add(vo.getReferenceTo().getTableName());
}
}
} else {
for (ErdVO vo : erd.getValue()) {
if (filters.contains(vo.getReferenceTo().getTableName())) {
if (!process.contains(erd.getKey())) {
process.add(erd.getKey());
}
// if (!process.contains(vo.getReferenceTo().getTableName())) {
// process.add(vo.getReferenceTo().getTableName());
}
}
}
}
} else {
for (Entry<String, List<ErdVO>> erd : erds.entrySet()) {
if (!process.contains(erd.getKey())) {
process.add(erd.getKey());
}
for (ErdVO vo : erd.getValue()) {
if (!process.contains(vo.getReferenceTo().getTableName())) {
process.add(vo.getReferenceTo().getTableName());
}
}
}
}
int index = 0;
for (String table : tables) {
if (hasFilter && !process.contains(table)) {
continue;
}
List<Map<String, Object>> fields = queryForFields(jdbcTemplate, table);
int maxWidth = 0;
maxWidth = Math.max(maxWidth, table.length() * CHAR_WIDTH);
List<String> lines = new LinkedList<>();
for (Map<String, Object> field : fields) {
String name = (String) field.get("field");
String type = (String) field.get("type");
String key = (String) field.get("key");
String commonType = parseType(name, type);
String prefix = " -";
String suffix = "";
if ("PRI".equals(key)) {
prefix = " *";
suffix = "(PK)";
}
if ("UNI".equals(key)) {
prefix = " -";
suffix = "(UQ)";
}
String line = prefix + " " + name + suffix + " :: " + commonType;
maxWidth = Math.max(maxWidth, (line.length() * CHAR_WIDTH) + 20);
lines.add(line);
}
BigDecimal height = LINE_HEIGHT.multiply(new BigDecimal(fields.size() + 1)).add(TOP_PADDING);
tableDictionary.put(table, "n" + index);
Element nodeElement = document.createElement("node");
nodeElement.setAttribute("id", "n" + index);
graphElement.appendChild(nodeElement);
Element dataD6Element = document.createElement("data");
dataD6Element.setAttribute("key", "d6");
nodeElement.appendChild(dataD6Element);
Element genericNodeElement = document.createElement("y:GenericNode");
genericNodeElement.setAttribute("configuration", "com.yworks.entityRelationship.big_entity");
dataD6Element.appendChild(genericNodeElement);
Element geometryElement = document.createElement("y:Geometry");
geometryElement.setAttribute("height", String.valueOf(height.add(new BigDecimal(26))));
geometryElement.setAttribute("width", String.valueOf(maxWidth));
genericNodeElement.appendChild(geometryElement);
Element fillElement = document.createElement("y:Fill");
fillElement.setAttribute("color", "#E8EEF7");
fillElement.setAttribute("color2", "#B7C9E3");
fillElement.setAttribute("transparent", "false");
genericNodeElement.appendChild(fillElement);
Element borderStyleElement = document.createElement("y:BorderStyle");
borderStyleElement.setAttribute("color", "#000000");
borderStyleElement.setAttribute("type", "line");
borderStyleElement.setAttribute("width", "1.0");
genericNodeElement.appendChild(borderStyleElement);
{
Element nodeLabelElement = document.createElement("y:NodeLabel");
nodeLabelElement.setAttribute("alignment", "center");
nodeLabelElement.setAttribute("autoSizePolicy", "content");
nodeLabelElement.setAttribute("backgroundColor", "#B7C9E3");
nodeLabelElement.setAttribute("configuration", "com.yworks.entityRelationship.label.name");
nodeLabelElement.setAttribute("fontFamily", "Dialog");
nodeLabelElement.setAttribute("fontSize", "12");
nodeLabelElement.setAttribute("fontStyle", "plain");
nodeLabelElement.setAttribute("hasLineColor", "false");
nodeLabelElement.setAttribute("horizontalTextPosition", "center");
nodeLabelElement.setAttribute("iconTextGap", "4");
nodeLabelElement.setAttribute("modelName", "internal");
nodeLabelElement.setAttribute("modelPosition", "t");
nodeLabelElement.setAttribute("textColor", "#000000");
nodeLabelElement.setAttribute("verticalTextPosition", "bottom");
nodeLabelElement.setAttribute("visible", "true");
nodeLabelElement.setTextContent(table);
genericNodeElement.appendChild(nodeLabelElement);
}
{
Element nodeLabelElement = document.createElement("y:NodeLabel");
nodeLabelElement.setAttribute("alignment", "left");
nodeLabelElement.setAttribute("autoSizePolicy", "node_size");
nodeLabelElement.setAttribute("borderDistance", "3.0");
nodeLabelElement.setAttribute("configuration", "com.yworks.entityRelationship.label.attributes");
nodeLabelElement.setAttribute("fontFamily", "Dialog");
nodeLabelElement.setAttribute("fontSize", "12");
nodeLabelElement.setAttribute("fontStyle", "plain");
nodeLabelElement.setAttribute("hasBackgroundColor", "false");
nodeLabelElement.setAttribute("hasLineColor", "false");
nodeLabelElement.setAttribute("horizontalTextPosition", "center");
nodeLabelElement.setAttribute("iconTextGap", "4");
nodeLabelElement.setAttribute("modelName", "internal");
nodeLabelElement.setAttribute("modelPosition", "lt");
nodeLabelElement.setAttribute("textColor", "#000000");
nodeLabelElement.setAttribute("verticalTextPosition", "top");
nodeLabelElement.setAttribute("visible", "true");
StringBuffer xml = new StringBuffer();
xml.append("\n");
xml.append("\n");
for (int p = 0; p < lines.size(); p++) {
String line = lines.get(p);
if (p == lines.size() - 1) {
xml.append(line);
} else {
xml.append(line).append("\n");
}
}
nodeLabelElement.setTextContent(xml.toString());
genericNodeElement.appendChild(nodeLabelElement);
}
Element stylePropertiesElement = document.createElement("y:StyleProperties");
genericNodeElement.appendChild(stylePropertiesElement);
Element propertyElement = document.createElement("y:Property");
propertyElement.setAttribute("class", "java.lang.Boolean");
propertyElement.setAttribute("name", "y.view.ShadowNodePainter.SHADOW_PAINTING");
propertyElement.setAttribute("value", "true");
stylePropertiesElement.appendChild(propertyElement);
index++;
}
int edge = 0;
for (Entry<String, List<ErdVO>> erd : erds.entrySet()) {
List<ErdVO> values = erd.getValue();
String sourceTable = erd.getKey();
for (ErdVO value : values) {
String sourceField = "";
if (value.getFieldName().size() == 1) {
sourceField = value.getFieldName().get(0);
} else {
sourceField = "(" + StringUtils.join(value.getFieldName(), "/") + ")";
}
String source = tableDictionary.get(sourceTable);
String targetTable = value.getReferenceTo().getTableName();
String targetField = value.getReferenceTo().getFieldName();
String target = tableDictionary.get(targetTable);
if (hasFilter) {
if (target == null || source == null) {
continue;
}
} else {
if (target == null) {
throw new RuntimeException("could not find " + targetTable + " table in data dictionary");
}
if (source == null) {
throw new RuntimeException("could not find " + sourceTable + " table in data dictionary");
}
}
String linked = sourceTable + "." + sourceField + " <=> " + targetTable + "." + targetField;
Element edgeElement = document.createElement("edge");
edgeElement.setAttribute("id", "e" + edge);
edgeElement.setAttribute("source", source);
edgeElement.setAttribute("target", target);
graphElement.appendChild(edgeElement);
Element dataD10Element = document.createElement("data");
dataD10Element.setAttribute("key", "d10");
edgeElement.appendChild(dataD10Element);
Element polyLineEdgeElement = document.createElement("y:PolyLineEdge");
dataD10Element.appendChild(polyLineEdgeElement);
Element pathElement = document.createElement("y:Path");
pathElement.setAttribute("sx", "0.0");
pathElement.setAttribute("sy", "0.0");
pathElement.setAttribute("tx", "0.0");
pathElement.setAttribute("ty", "0.0");
polyLineEdgeElement.appendChild(pathElement);
Element lineStyleElement = document.createElement("y:LineStyle");
lineStyleElement.setAttribute("color", "#000000");
lineStyleElement.setAttribute("type", "line");
lineStyleElement.setAttribute("width", "1.0");
polyLineEdgeElement.appendChild(lineStyleElement);
Element arrowsElement = document.createElement("y:Arrows");
arrowsElement.setAttribute("source", "none");
arrowsElement.setAttribute("target", "none");
polyLineEdgeElement.appendChild(arrowsElement);
Element edgeLabelElement = document.createElement("y:EdgeLabel");
edgeLabelElement.setAttribute("alignment", "center");
edgeLabelElement.setAttribute("configuration", "AutoFlippingLabel");
edgeLabelElement.setAttribute("distance", "2.0");
edgeLabelElement.setAttribute("fontFamily", "Dialog");
edgeLabelElement.setAttribute("fontSize", "12");
edgeLabelElement.setAttribute("fontStyle", "plain");
edgeLabelElement.setAttribute("hasBackgroundColor", "false");
edgeLabelElement.setAttribute("hasLineColor", "false");
edgeLabelElement.setAttribute("horizontalTextPosition", "center");
edgeLabelElement.setAttribute("iconTextGap", "4");
edgeLabelElement.setAttribute("modelName", "custom");
edgeLabelElement.setAttribute("preferredPlacement", "anywhere");
edgeLabelElement.setAttribute("ratio", "0.5");
edgeLabelElement.setAttribute("textColor", "#000000");
edgeLabelElement.setAttribute("verticalTextPosition", "bottom");
edgeLabelElement.setAttribute("visible", "true");
edgeLabelElement.setTextContent(linked);
polyLineEdgeElement.appendChild(edgeLabelElement);
Element labelModelElement = document.createElement("y:LabelModel");
edgeLabelElement.appendChild(labelModelElement);
Element rotatedDiscreteEdgeLabelModelElement = document.createElement("y:RotatedDiscreteEdgeLabelModel");
rotatedDiscreteEdgeLabelModelElement.setAttribute("angle", "0.0");
rotatedDiscreteEdgeLabelModelElement.setAttribute("autoRotationEnabled", "true");
rotatedDiscreteEdgeLabelModelElement.setAttribute("candidateMask", "18");
rotatedDiscreteEdgeLabelModelElement.setAttribute("distance", "2.0");
rotatedDiscreteEdgeLabelModelElement.setAttribute("positionRelativeToSegment", "false");
labelModelElement.appendChild(rotatedDiscreteEdgeLabelModelElement);
Element modelParameterElement = document.createElement("y:ModelParameter");
edgeLabelElement.appendChild(modelParameterElement);
Element rotatedDiscreteEdgeLabelModelParameterElement = document.createElement("y:RotatedDiscreteEdgeLabelModelParameter");
rotatedDiscreteEdgeLabelModelParameterElement.setAttribute("position", "head");
modelParameterElement.appendChild(rotatedDiscreteEdgeLabelModelParameterElement);
Element preferredPlacementDescriptorElement = document.createElement("y:PreferredPlacementDescriptor");
preferredPlacementDescriptorElement.setAttribute("angle", "0.0");
preferredPlacementDescriptorElement.setAttribute("angleOffsetOnRightSide", "0");
preferredPlacementDescriptorElement.setAttribute("angleReference", "absolute");
preferredPlacementDescriptorElement.setAttribute("angleRotationOnRightSide", "co");
preferredPlacementDescriptorElement.setAttribute("distance", "-1.0");
preferredPlacementDescriptorElement.setAttribute("frozen", "true");
preferredPlacementDescriptorElement.setAttribute("placement", "anywhere");
preferredPlacementDescriptorElement.setAttribute("side", "anywhere");
preferredPlacementDescriptorElement.setAttribute("sideReference", "relative_to_edge_flow");
edgeLabelElement.appendChild(preferredPlacementDescriptorElement);
Element bendStyleElement = document.createElement("y:BendStyle");
bendStyleElement.setAttribute("smoothed", "true");
polyLineEdgeElement.appendChild(bendStyleElement);
edge++;
}
}
Element dataD7Element = document.createElement("data");
dataD7Element.setAttribute("key", "d7");
graphmlElement.appendChild(dataD7Element);
Element resourcesElement = document.createElement("y:Resources");
dataD7Element.appendChild(resourcesElement);
// write the content into xml file
TransformerFactory transformerFactory = TransformerFactory.newInstance();
Transformer transformer = transformerFactory.newTransformer();
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2");
DOMSource source = new DOMSource(document);
File outputFile = new File("target/erd.graphml");
StreamResult result = new StreamResult(outputFile);
transformer.transform(source, result);
System.out.println("[INFO] ERD is generated to " + outputFile.getAbsolutePath());
}
public String parseType(String name, String type) {
String commonType = "";
if ("tinyint(1)".equals(type) || "bit(1)".equals(type)) {
commonType = "boolean";
} else if ("int(11) unsigned".equals(type) || "smallint(4)".equals(type) || "int(4)".equals(type) || "int(2)".equals(type) || "int(3)".equals(type) || "int(15)".equals(type) || "tinyint(3)".equals(type) || "smallint(3)".equals(type) || "bigint(10)".equals(type) || "smallint(11)".equals(type) || "tinyint(2)".equals(type) || "int(20)".equals(type) || "smallint(5)".equals(type) || "smallint(2)".equals(type) || "int(10)".equals(type) || "bigint(20) unsigned".equals(type) || "tinyint(4)".equals(type) || "int(11)".equals(type) || "bigint(20)".equals(type) || "int(5)".equals(type)) {
commonType = "number";
} else if ("smallint(1)".equals(type) || "int(1)".equals(type)) {
if (name.contains("allow") || name.startsWith("is_") || name.startsWith("can_")) {
commonType = "boolean";
} else {
commonType = "number";
}
} else if ("timestamp".equals(type) || "datetime".equals(type)) {
commonType = "datetime";
} else if ("decimal(10,2)".equals(type) || "decimal(20,2)".equals(type) || "decimal(19,5)".equals(type) || "decimal(5,2)".equals(type) || "decimal(20,6)".equals(type) || "decimal(10,8) unsigned".equals(type) || "decimal(19,6)".equals(type)) {
commonType = "decimal";
} else if ("date".equals(type)) {
commonType = "date";
} else if ("time".equals(type)) {
commonType = "time";
} else if ("blob".equals(type) || "longtext".equals(type)) {
commonType = "binary";
} else if ("varchar(1)".equals(type) || "varchar(4096)".equals(type) || "varchar(128)".equals(type) || "varchar(256)".equals(type) || "varchar(11)".equals(type) || "varchar(2)".equals(type) || "varchar(4000)".equals(type) || "varchar(32)".equals(type) || "varchar(102)".equals(type) || "varchar(4)".equals(type) || "varchar(70)".equals(type) || "varchar(2000)".equals(type) || "varchar(25)".equals(type) || "smallint(6)".equals(type) || "varchar(150)".equals(type) || "varchar(300)".equals(type) || "varchar(3)".equals(type) || "varchar(250)".equals(type) || "varchar(200)".equals(type) || "text".equals(type) || "varchar(1000)".equals(type) || "varchar(10)".equals(type) || "varchar(45)".equals(type) || "varchar(50)".equals(type) || "varchar(20)".equals(type) || "varchar(255)".equals(type) || "varchar(100)".equals(type) || "varchar(500)".equals(type)) {
commonType = "text";
} else {
commonType = type;
}
return commonType;
}
public List<Map<String, Object>> queryForFields(JdbcTemplate jdbcTemplate, String table) {
return jdbcTemplate.queryForList("DESC " + table);
}
public List<String> queryForTables(JdbcTemplate jdbcTemplate) {
return jdbcTemplate.queryForList("show tables", String.class);
}
}
|
package de.rwth.idsg.velocity.domain;
import de.rwth.idsg.velocity.domain.login.User;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import org.hibernate.annotations.Type;
import org.joda.time.LocalDate;
import javax.persistence.*;
import java.io.Serializable;
import java.util.Set;
@Entity
@DiscriminatorValue("customer")
@Table(name="T_CUSTOMER",
indexes = {
@Index(columnList="address_id", unique = true) })
@EqualsAndHashCode(of = {"customerId"}, callSuper = false)
@ToString(includeFieldNames = true)
public class Customer extends User implements Serializable {
@Column(name = "customer_id")
@Getter @Setter
private String customerId;
@Column(name = "card_id")
@Getter @Setter
private String cardId;
@Column(name = "first_name")
@Getter @Setter
private String firstname;
@Column(name = "last_name")
@Getter @Setter
private String lastname;
@OneToOne(cascade = CascadeType.ALL)
@JoinColumn(name = "address_id")
@Getter @Setter
private Address address;
@Type(type = "org.jadira.usertype.dateandtime.joda.PersistentLocalDate")
@Column(name = "birthday")
@Getter @Setter
private LocalDate birthday;
@Column(name = "mail_address")
@Getter @Setter
private String mailAddress;
@Column(name = "is_activated")
@Getter @Setter
private Boolean isActivated;
@Column(name = "in_transaction")
@Getter @Setter
private Boolean inTransaction;
@Column(name = "card_pin")
@Getter @Setter
private Integer cardPin;
@OneToMany(cascade = CascadeType.ALL, fetch = FetchType.EAGER, mappedBy = "customer", orphanRemoval = true)
@Getter @Setter
private Set<Transaction> transactions;
@PrePersist
public void prePersist() {
if (inTransaction == null) {
inTransaction = false;
}
}
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.asascience.ncsos.getCaps;
import com.asascience.ncsos.getcaps.SOSGetCapabilitiesRequestHandler;
import com.asascience.ncsos.outputformatter.SOSOutputFormatter;
import com.asascience.ncsos.service.SOSParser;
import com.asascience.ncsos.util.XMLDomUtils;
import java.io.*;
import java.util.Formatter;
import java.util.HashMap;
import static org.junit.Assert.*;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import org.w3c.dom.Document;
import ucar.nc2.constants.FeatureType;
import ucar.nc2.dataset.NetcdfDataset;
import ucar.nc2.ft.FeatureDataset;
import ucar.nc2.ft.FeatureDatasetFactoryManager;
/**
*
* @author abird
*/
public class SOSgetCapsTest {
// base location of resources
private static String baseLocalDir = null;
private static String baseTomcatDir = null;
private static String exampleOutputDir = null;
// work thredds
private static String catalinaThredds = "work/Catalina/localhost/thredds";
//imeds data
private static String imeds1 = "resources/datasets/sura/Hsig_UNDKennedy_IKE_VIMS_3D_WAVEONLY.nc";
private static String imeds5 = "resources/datasets/sura/hwm_TCOON_NAVD88.nc";
private static String imeds8 = "resources/datasets/sura/watlev_CRMS.nc";
private static String imeds13 = "resources/datasets/sura/watlev_IKE.nc";
private static String imeds15 = "resources/datasets/sura/watlev_NOAA_NAVD_PRE.nc";
//timeseries
private static String tsIncompleteMultiDimensionalMultipleStations = "resources/datasets/timeSeries-Incomplete-MultiDimensional-MultipleStations-H.2.2/timeSeries-Incomplete-MultiDimensional-MultipleStations-H.2.2.nc";
private static String tsOrthogonalMultidimenstionalMultipleStations = "resources/datasets/timeSeries-Orthogonal-Multidimenstional-MultipleStations-H.2.1/timeSeries-Orthogonal-Multidimenstional-MultipleStations-H.2.1.nc";
//ragged Array - timeseries profile
private static String RaggedSingleConventions = "resources/datasets/timeSeriesProfile-Ragged-SingleStation-H.5.3/timeSeriesProfile-Ragged-SingleStation-H.5.3.nc";
private static String RaggedMultiConventions = "resources/datasets/timeSeriesProfile-Ragged-MultipeStations-H.5.3/timeSeriesProfile-Ragged-MultipeStations-H.5.3.nc";
private static String OrthogonalMultidimensionalMultiStations = "resources/datasets/timeSeriesProfile-Orthogonal-Multidimensional-MultipeStations-H.5.1/timeSeriesProfile-Orthogonal-Multidimensional-MultipeStations-H.5.1.nc";
private static String MultiDimensionalSingleStations = "resources/datasets/timeSeriesProfile-Multidimensional-SingleStation-H.5.2/timeSeriesProfile-Multidimensional-SingleStation-H.5.2.nc";
private static String MultiDimensionalMultiStations = "resources/datasets/timeSeriesProfile-Multidimensional-MultipeStations-H.5.1/timeSeriesProfile-Multidimensional-MultipeStations-H.5.1.nc";
//point
private static String cfPoint = "resources/datasets/point-H.1/point-H.1.nc";
// profile
private static String ContiguousRaggedMultipleProfiles = "resources/datasets/profile-Contiguous-Ragged-MultipleProfiles-H.3.4/profile-Contiguous-Ragged-MultipleProfiles-H.3.4.nc";
private static String IncompleteMultiDimensionalMultipleProfiles = "resources/datasets/profile-Incomplete-MultiDimensional-MultipleProfiles-H.3.2/profile-Incomplete-MultiDimensional-MultipleProfiles-H.3.2.nc";
private static String IndexedRaggedMultipleProfiles = "resources/datasets/profile-Indexed-Ragged-MultipleProfiles-H.3.5/profile-Indexed-Ragged-MultipleProfiles-H.3.5.nc";
private static String OrthogonalMultiDimensionalMultipleProfiles = "resources/datasets/profile-Orthogonal-MultiDimensional-MultipleProfiles-H.3.1/profile-Orthogonal-MultiDimensional-MultipleProfiles-H.3.1.nc";
private static String OrthogonalSingleDimensionalSingleProfile = "resources/datasets/profile-Orthogonal-SingleDimensional-SingleProfile-H.3.3/profile-Orthogonal-SingleDimensional-SingleProfile-H.3.3.nc";
// public static String base = "C:/Users/scowan/Projects/maven/ncSOS/src/test/java/com/asascience/ncSOS/getCaps/output/";
private static String base = null;
private static String baseRequest = "request=GetCapabilities&version=1.0.0&service=sos";
// trajectories
private static String TCRMTH43 = "resources/datasets/trajectory-Contiguous-Ragged-MultipleTrajectories-H.4.3/trajectory-Contiguous-Ragged-MultipleTrajectories-H.4.3.nc";
// section
private static String SectionMultidimensionalMultiTrajectories = "resources/datasets/trajectoryProfile-Multidimensional-MultipleTrajectories-H.6.1/trajectoryProfile-Multidimensional-MultipleTrajectories-H.6.1.nc";
@BeforeClass
public static void SetupEnviron() throws FileNotFoundException {
// not really a test, just used to set up the various string values
if (base != null && baseLocalDir != null && baseTomcatDir != null && exampleOutputDir != null) {
// exit early if the environ is already set
return;
}
String container = "getCaps";
InputStream templateInputStream = null;
try {
File configFile = new File("resources/tests_config.xml");
templateInputStream = new FileInputStream(configFile);
Document configDoc = XMLDomUtils.getTemplateDom(templateInputStream);
// read from the config file
base = XMLDomUtils.getNodeValue(configDoc, container, "outputBase");
baseLocalDir = XMLDomUtils.getNodeValue(configDoc, container, "projectDir");
baseTomcatDir = XMLDomUtils.getNodeValue(configDoc, container, "tomcatLocation");
container = "examples";
exampleOutputDir = XMLDomUtils.getNodeValue(configDoc, container, "outputDir");
} finally {
if (templateInputStream != null) {
try {
templateInputStream.close();
} catch (IOException e) {
// ignore, closing..
}
}
}
File file = new File(base);
file.mkdirs();
}
private void writeOutput(HashMap<String, Object> outMap, Writer write) {
SOSOutputFormatter output = (SOSOutputFormatter)outMap.get("outputHandler");
assertNotNull("got null output", output);
output.writeOutput(write);
}
private static void fileWriter(String base, String fileName, Writer write) throws IOException {
File file = new File(base + fileName);
Writer output = new BufferedWriter(new FileWriter(file));
output.write(write.toString());
output.close();
System.out.println("Your file has been written");
}
private static String getCurrentMethod() {
final StackTraceElement[] ste = Thread.currentThread().getStackTrace();
for (int i=0; i<ste.length; i++) {
if (ste[i].getMethodName().contains(("test")))
return ste[i].getMethodName();
}
return "could not find test name";
}
@Test
public void testCanIdentifyTimeSeriesCDM() throws IOException {
NetcdfDataset dataset = NetcdfDataset.openDataset(imeds8);
SOSGetCapabilitiesRequestHandler sosget = new SOSGetCapabilitiesRequestHandler(dataset, "threddsURI-IMEDS8");
assertEquals(FeatureType.STATION, sosget.getDatasetFeatureType());
//station
}
@Test
public void testCanIdentifyTrajectoryCDM() throws IOException {
NetcdfDataset dataset = NetcdfDataset.openDataset(baseLocalDir + TCRMTH43);
SOSGetCapabilitiesRequestHandler sosget = new SOSGetCapabilitiesRequestHandler(dataset, "threddsURI");
assertEquals(FeatureType.TRAJECTORY, sosget.getDatasetFeatureType());
//trajectory
}
@Test
public void testCanProcessTrajectory() throws IOException {
NetcdfDataset dataset = NetcdfDataset.openDataset(baseLocalDir + TCRMTH43);
assertNotNull(dataset);
SOSParser md = new SOSParser();
Writer write = new CharArrayWriter();
writeOutput(md.enhance(dataset, baseRequest, baseLocalDir + TCRMTH43),write);
write.flush();
write.close();
assertFalse(write.toString().contains("Exception"));
String fileName = "trajectory-Contiguous-Ragged-MultipleTrajectories-H.4.3.xml";
fileWriter(base, fileName, write);
// write as an example
fileWriter(exampleOutputDir, "GetCapabilities-Trajectory.xml", write);
assertTrue(write.toString().contains("<ObservationOffering gml:id="));
//traj
}
// @Test
// public void testCanIdentifyTimeSeriesCDM() throws IOException {
// NetcdfDataset dataset = NetcdfDataset.openDataset(imeds8);
// SOSGetCapabilitiesRequestHandler sosget = new SOSGetCapabilitiesRequestHandler(dataset, "threddsURI-IMEDS8");
// assertEquals(FeatureType.STATION, sosget.getDatasetFeatureType());
// //station
// @Test
// public void testCanIdentifyTrajectoryCDM() throws IOException {
// NetcdfDataset dataset = NetcdfDataset.openDataset(baseLocalDir + TCRMTH43);
// SOSGetCapabilitiesRequestHandler sosget = new SOSGetCapabilitiesRequestHandler(dataset, "threddsURI");
// assertEquals(FeatureType.TRAJECTORY, sosget.getDatasetFeatureType());
// //trajectory
// @Test
// public void testCanProcessTrajectory() throws IOException {
// NetcdfDataset dataset = NetcdfDataset.openDataset(baseLocalDir + TCRMTH43);
// assertNotNull(dataset);
// SOSParser md = new SOSParser();
// Writer write = new CharArrayWriter();
// writeOutput(md.enhance(dataset, baseRequest, baseLocalDir + TCRMTH43),write);
// write.flush();
// write.close();
// assertFalse(write.toString().contains("Exception"));
// String fileName = "trajectory-Contiguous-Ragged-MultipleTrajectories-H.4.3.xml";
// fileWriter(base, fileName, write);
// assertTrue(write.toString().contains("<ObservationOffering gml:id="));
// //traj
@Test
public void testTrajLatLongCorrect() throws IOException {
NetcdfDataset dataset = NetcdfDataset.openDataset(baseLocalDir + TCRMTH43);
SOSParser md = new SOSParser();
Writer write = new CharArrayWriter();
writeOutput(md.enhance(dataset, baseRequest, baseLocalDir + TCRMTH43),write);
write.flush();
write.close();
String fileName = "trajectory-Contiguous-Ragged-MultipleTrajectories-H.4.3.xml";
fileWriter(base, fileName, write);
assertFalse(write.toString().contains("Exception"));
assertTrue(write.toString().contains("<gml:lowerCorner>3.024412155151367 -68.12552642822266</gml:lowerCorner>"));
assertTrue(write.toString().contains("<gml:upperCorner>43.00862503051758 -1.6318601369857788</gml:upperCorner>"));
//traj
}
// @Test
// public void testTrajStartEndTimeCorrect() throws IOException {
// NetcdfDataset dataset = NetcdfDataset.openDataset(baseLocalDir + TCRMTH43);
// SOSParser md = new SOSParser();
// Writer write = new CharArrayWriter();
// writeOutput(md.enhance(dataset, baseRequest, baseLocalDir + TCRMTH43),write);
// write.flush();
// write.close();
// assertFalse(write.toString().contains("Exception"));
// String fileName = "trajectory-Contiguous-Ragged-MultipleTrajectories-H.4.3.xml";
// fileWriter(base, fileName, write);
// assertTrue(write.toString().contains("<gml:beginPosition>1990-01-01T00:00:00.000Z</gml:beginPosition>"));
// assertTrue(write.toString().contains("<gml:endPosition>1990-01-01T23:00:00.000Z</gml:endPosition>"));
// //traj
// // caching doesn't quite work just yet
// @Ignore
// @Test
// public void testCacheReturnsTrueFileDoesNOTExist() throws IOException {
// fail("removed - caching temporarily unavailable");
// NetcdfDataset dataset = NetcdfDataset.openDataset(imeds13);
// SOSParser md = new SOSParser();
// Writer write = new CharArrayWriter();
// File f = new File(baseTomcatDir + catalinaThredds + "/xmlFile.xml");
// f.delete();
// writeOutput(md.enhance(dataset, baseRequest + "&useCache=true", imeds13, baseTomcatDir + catalinaThredds),write);
//// assertEquals("true", md.getCacheValue());
// f = new File(baseTomcatDir + catalinaThredds + "/watlev_IKE.xml");
// assertTrue("file watlev_IKE.xml does not exist - testCacheReturnsTrueFileDoesNotExist", f.exists());
// f.delete();
// //station
// @Ignore
// @Test
// public void testCacheReturnsTrueFileDoesExist() throws IOException {
// fail("removed - caching temporarily unavailable");
// NetcdfDataset dataset = NetcdfDataset.openDataset(imeds13);
// SOSParser md = new SOSParser();
// Writer write = new CharArrayWriter();
// writeOutput(md.enhance(dataset, baseRequest + "&useCache=true", imeds13, baseTomcatDir + catalinaThredds),write);
// File f = new File(baseTomcatDir + catalinaThredds + "/watlev_IKE.xml");
// assertTrue("file watlev_IKE.xml does not exist - testCacheReturnsTrueFileDoesExist", f.exists());
// writeOutput(md.enhance(dataset, baseRequest + "&useCache=true", imeds13, baseTomcatDir + catalinaThredds),write);
// assertTrue("file watlev_IKE.xml does not exist (test 2) - testCacheReturnsTrueFileDoesExist", f.exists());
// f.delete();
// @Ignore
// @Test
// public void testCanGetCorrectDataSetFileName() throws IOException {
// fail("removed - caching temporarily unavailable");
// NetcdfDataset dataset = NetcdfDataset.openDataset(imeds13);
// SOSParser md = new SOSParser();
//// assertEquals("/watlev_IKE.xml", md.getCacheXmlFileName(imeds13));
// @Ignore
// @Test
// public void testAddAdditionalParamForCachingDataTRUE() throws IOException {
// fail("removed - caching temporarily unavailable");
// NetcdfDataset dataset = NetcdfDataset.openDataset(imeds13);
// SOSParser md = new SOSParser();
// Writer write = new CharArrayWriter();
// String fileName = baseTomcatDir + catalinaThredds + "/watlev_IKE.xml";
// //check file exists
// File f = new File(fileName);
// f.delete();
// writeOutput(md.enhance(dataset, baseRequest + "&useCache=true", imeds13, baseTomcatDir + catalinaThredds),write);
//// HashMap<String, Object> outMap = md.enhance(dataset, null, imeds13, baseTomcatDir + catalinaThredds);
// write.flush();
// write.close();
// if (write.toString().contains("Exception")) {
// System.out.println("have exception - testAddAdditionalParamForCachingDataTRUE");
// assertFalse(write.toString().contains("Exception"));
// f = new File(fileName);
//// f.createNewFile();
// if (!f.exists()) {
// System.out.println("file does not exist - testAddAdditionalParamForCachingDataTRUE");
// assertTrue(f.exists());
// @Test
// public void testLargeDatasets() throws IOException {
//// fail("removed - test is expensive");
// NetcdfDataset dataset = NetcdfDataset.openDataset(imeds13);
// SOSParser md = new SOSParser();
// Writer write = new CharArrayWriter();
// long start = System.currentTimeMillis();
// writeOutput(md.enhance(dataset, baseRequest, imeds13),write);
// long elapsedTimeMillis = System.currentTimeMillis() - start;
// float elapsedTimeSec = elapsedTimeMillis / 1000F;
// System.out.println("Time To Complete Mil: " + elapsedTimeMillis + ": SEC: " + elapsedTimeSec);
// write.flush();
// write.close();
// assertFalse(write.toString().contains("Exception"));
// String fileName = "largeDataSetIKE.xml";
// fileWriter(base, fileName, write);
// assertTrue(write.toString().contains("<ObservationOffering gml:id="));
|
package de.skuzzle.jeve;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.function.BiConsumer;
import de.skuzzle.jeve.ExceptionCallback;
import de.skuzzle.jeve.Listeners;
/**
* Implementation of basic {@link EventProvider} methods. All implementations are
* thread-safe.
*
* <p>Note about thread safe interface: All publicly accessible methods are thread safe,
* internal and protected helper methods are not thread safe.</p>
*
* @author Simon Taddiken
* @since 1.0.0
*/
public abstract class AbstractEventProvider implements EventProvider {
/**
* Copies a list of listeners into a new list, casting each element to the target
* listener type.
*
* @param <T> Type of listeners in the result.
* @param listeners List to copy.
* @param listenerClass Target listener type.
* @return A new typed list of listeners.
*/
private static <T extends Listener> List<T> copyList(List<Object> listeners,
Class<T> listenerClass) {
final List<T> result = new ArrayList<>(listeners.size());
listeners.forEach(l -> result.add(listenerClass.cast(l)));
return result;
}
/** Holds the listener classes mapped to listener instances */
protected final Map<Class<? extends Listener>, List<Object>> listeners;
/** Default callback to handle event handler exceptions */
protected ExceptionCallback exceptionHandler;
/**
* Creates a new {@link AbstractEventProvider}.
*/
public AbstractEventProvider() {
this.listeners = new HashMap<>();
this.exceptionHandler = DEFAULT_HANDLER;
}
@Override
public <T extends Listener> Listeners<T> getListeners(Class<T> listenerClass) {
if (listenerClass == null) {
throw new IllegalArgumentException("listenerClass");
}
synchronized (this.listeners) {
final List<Object> listeners = this.listeners.get(listenerClass);
if (listeners == null) {
return Listeners.empty(this, listenerClass);
}
return new Listeners<T>(
copyList(listeners, listenerClass), listenerClass, this);
}
}
@Override
public <T extends Listener> void clearAllListeners(Class<T> listenerClass) {
synchronized (this.listeners) {
final List<Object> listeners = this.listeners.get(listenerClass);
if (listeners != null) {
final Iterator<Object> it = listeners.iterator();
while (it.hasNext()) {
this.removeInternal(listenerClass, it);
}
this.listeners.remove(listenerClass);
}
}
}
@Override
public void clearAllListeners() {
synchronized (this.listeners) {
this.listeners.forEach((k, v) -> {
final Iterator<Object> it = v.iterator();
while (it.hasNext()) {
removeInternal(k, it);
}
});
this.listeners.clear();
}
}
/**
* Internal method for removing a single listener and notifying it about the
* removal. Prior to calling this method, the passed iterators
* {@link Iterator#hasNext() hasNext} method must hold <code>true</code>.
*
* @param <T> Type of the listener to remove
* @param listenerClass The class of the listener to remove.
* @param it Iterator which provides the next listener to remove.
*/
protected <T extends Listener> void removeInternal(Class<T> listenerClass,
Iterator<Object> it) {
final Object next = it.next();
final T listener = listenerClass.cast(next);
it.remove();
try {
final RegistrationEvent e = new RegistrationEvent(this, listenerClass);
listener.onUnregister(e);
} catch (Exception e) {
this.handleException(this.exceptionHandler, e, listener, null);
}
}
@Override
public <T extends Listener> void addListener(Class<T> listenerClass, T listener) {
if (listenerClass == null) {
throw new IllegalArgumentException("listenerClass is null");
} else if (listener == null) {
throw new IllegalArgumentException("listener is null");
}
synchronized (this.listeners) {
List<Object> listeners = this.listeners.get(listenerClass);
if (listeners == null) {
listeners = new LinkedList<>();
this.listeners.put(listenerClass, listeners);
}
listeners.add(listener);
}
try {
final RegistrationEvent e = new RegistrationEvent(this, listenerClass);
listener.onRegister(e);
} catch (Exception e) {
this.handleException(this.exceptionHandler, e, listener, null);
}
}
@Override
public <T extends Listener> void removeListener(Class<T> listenerClass,
T listener) {
if (listenerClass == null || listener == null) {
return;
}
synchronized (this.listeners) {
final List<Object> listeners = this.listeners.get(listenerClass);
if (listeners == null) {
return;
}
listeners.remove(listener);
if (listeners.isEmpty()) {
this.listeners.remove(listenerClass);
}
}
try {
final RegistrationEvent e = new RegistrationEvent(this, listenerClass);
listener.onUnregister(e);
} catch (Exception e) {
this.handleException(this.exceptionHandler, e, listener, null);
}
}
@Override
public <L extends Listener, E extends Event<?>> void dispatch(
Class<L> listenerClass, E event, BiConsumer<L, E> bc) {
this.dispatch(listenerClass, event, bc, this.exceptionHandler);
}
@Override
public <L extends Listener, E extends Event<?>> void dispatch(
Class<L> listenerClass, E event, BiConsumer<L, E> bc, ExceptionCallback ec) {
this.checkDispatchArgs(listenerClass, event, bc, ec);
if (this.canDispatch()) {
this.notifyListeners(listenerClass, event, bc, ec);
}
}
protected <L extends Listener, E extends Event<?>> void checkDispatchArgs(
Class<L> listenerClass, E event, BiConsumer<L, E> bc, ExceptionCallback ec) {
if (listenerClass == null) {
throw new IllegalArgumentException("listenerClass is null");
} else if (event == null) {
throw new IllegalArgumentException("event is null");
} else if (bc == null) {
throw new IllegalArgumentException("bc is null");
} else if (ec == null) {
throw new IllegalArgumentException("ec is null");
}
}
@Override
public synchronized void setExceptionCallback(ExceptionCallback callBack) {
if (callBack == null) {
callBack = DEFAULT_HANDLER;
}
this.exceptionHandler = callBack;
}
/**
* Notifies all listeners registered for the provided class with the provided event.
* This method is failure tolerant and will continue notifying listeners even if one
* of them threw an exception. Exceptions are passed to the provided
* {@link ExceptionCallback}.
*
* <p>This method does not check whether this provider is ready for dispatching and
* might thus throw an exception when trying to dispatch an event while the provider
* is not ready.</p>
*
* @param <L> Type of the listeners which will be notified.
* @param <E> Type of the event which will be passed to a listener.
* @param listenerClass The class of listeners that should be notified.
* @param event The event to pass to each listener.
* @param bc The method of the listener to call.
* @param ec The callback which gets notified about exceptions.
* @return Returns <code>true</code> if all listeners have been notified successfully.
* Return <code>false</code> if one listener threw an exception.
*/
protected <L extends Listener, E extends Event<?>> boolean notifyListeners(
Class<L> listenerClass, E event, BiConsumer<L, E> bc, ExceptionCallback ec) {
boolean result = true;
// HINT: getListeners is thread safe
final Listeners<L> listeners = this.getListeners(listenerClass);
for (L listener : listeners) {
try {
if (event.isHandled()) {
return result;
}
bc.accept(listener, event);
if (listener.workDone(this)) {
this.removeListener(listenerClass, listener);
}
} catch (RuntimeException e) {
result = false;
this.handleException(ec, e, listener, event);
}
}
return result;
}
/**
* Internal method for notifying the {@link ExceptionCallback}. This method swallows
* every error raised by the passed exception callback.
*
* @param ec The ExceptionCallback to handle the exception.
* @param e The occurred exception.
* @param listener The listener which caused the exception.
* @param ev The event which is currently being dispatched.
*/
protected void handleException(ExceptionCallback ec, Exception e, Listener listener,
Event<?> ev) {
try {
ec.exception(e, listener, ev);
} catch (Exception ignore) {
ignore.printStackTrace();
// where is your god now?
}
}
@Override
public void close() {
this.clearAllListeners();
}
@Override
public String toString() {
return this.listeners.toString();
}
}
|
package edu.neu.ccs.pyramid.experiment;
import edu.neu.ccs.pyramid.configuration.Config;
import edu.neu.ccs.pyramid.dataset.DataSetType;
import edu.neu.ccs.pyramid.dataset.MultiLabel;
import edu.neu.ccs.pyramid.dataset.MultiLabelClfDataSet;
import edu.neu.ccs.pyramid.dataset.TRECFormat;
import edu.neu.ccs.pyramid.eval.Accuracy;
import edu.neu.ccs.pyramid.eval.Overlap;
import edu.neu.ccs.pyramid.multilabel_classification.bmm_variant.BMMClassifier;
import edu.neu.ccs.pyramid.multilabel_classification.bmm_variant.BMMInitializer;
import edu.neu.ccs.pyramid.multilabel_classification.bmm_variant.BMMOptimizer;
import org.apache.commons.io.FileUtils;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.nio.file.Paths;
public class Exp211 {
private static BMMOptimizer getOptimizer(Config config, BMMClassifier bmmClassifier, MultiLabelClfDataSet trainSet){
BMMOptimizer optimizer = new BMMOptimizer(bmmClassifier,trainSet);
optimizer.setInverseTemperature(config.getDouble("em.inverseTemperature"));
optimizer.setMeanRegVariance(config.getDouble("lr.meanRegVariance"));
optimizer.setMeanRegularization(config.getBoolean("lr.meanRegularization"));
optimizer.setPriorVarianceMultiClass(config.getDouble("lr.multiClassVariance"));
optimizer.setPriorVarianceBinary(config.getDouble("lr.binaryVariance"));
optimizer.setNumIterationsBinary(config.getInt("boost.numIterationsBinary"));
optimizer.setNumIterationsMultiClass(config.getInt("boost.numIterationsMultiClass"));
optimizer.setShrinkageBinary(config.getDouble("boost.shrinkageBinary"));
optimizer.setShrinkageMultiClass(config.getDouble("boost.shrinkageMultiClass"));
return optimizer;
}
public static BMMClassifier loadBMM(Config config, MultiLabelClfDataSet trainSet, MultiLabelClfDataSet testSet) throws Exception{
int numClusters = config.getInt("mixture.numClusters");
String output = config.getString("output");
String modelName = config.getString("modelName");
BMMClassifier bmmClassifier;
if (config.getBoolean("train.warmStart")) {
bmmClassifier = BMMClassifier.deserialize(new File(output, modelName));
} else {
bmmClassifier = BMMClassifier.getBuilder()
.setNumClasses(trainSet.getNumClasses())
.setNumFeatures(trainSet.getNumFeatures())
.setNumClusters(numClusters)
.setMultiClassClassifierType(config.getString("mixture.multiClassClassifierType"))
.setBinaryClassifierType(config.getString("mixture.binaryClassifierType"))
.build();
bmmClassifier.setPredictMode(config.getString("predict.mode"));
bmmClassifier.setNumSample(config.getInt("predict.sampling.numSamples"));
bmmClassifier.setAllowEmpty(config.getBoolean("predict.allowEmpty"));
MultiLabel[] trainPredict;
MultiLabel[] testPredict;
if (config.getBoolean("train.initialize")) {
System.out.println("start initialization");
BMMOptimizer optimizer = getOptimizer(config,bmmClassifier,trainSet);
BMMInitializer.initialize(bmmClassifier, trainSet, optimizer);
System.out.println("finish initialization");
}
trainPredict = bmmClassifier.predict(trainSet);
testPredict = bmmClassifier.predict(testSet);
System.out.print("trainAcc : " + Accuracy.accuracy(trainSet.getMultiLabels(), trainPredict) + "\t");
System.out.print("trainOver: " + Overlap.overlap(trainSet.getMultiLabels(), trainPredict) + "\t");
System.out.print("testACC : " + Accuracy.accuracy(testSet.getMultiLabels(), testPredict) + "\t");
System.out.println("testOver : " + Overlap.overlap(testSet.getMultiLabels(), testPredict) + "\t");
}
return bmmClassifier;
}
public static void main(String[] args) throws Exception {
if (args.length != 1) {
throw new IllegalArgumentException("Please specify a properties file.");
}
Config config = new Config(args[0]);
System.out.println(config);
String matrixType = config.getString("input.matrixType");
MultiLabelClfDataSet trainSet;
MultiLabelClfDataSet testSet;
switch (matrixType){
case "sparse_random":
trainSet= TRECFormat.loadMultiLabelClfDataSet(config.getString("input.trainData"),
DataSetType.ML_CLF_SPARSE, true);
testSet = TRECFormat.loadMultiLabelClfDataSet(config.getString("input.testData"),
DataSetType.ML_CLF_SPARSE, true);
break;
case "sparse_sequential":
trainSet= TRECFormat.loadMultiLabelClfDataSet(config.getString("input.trainData"),
DataSetType.ML_CLF_SEQ_SPARSE, true);
testSet = TRECFormat.loadMultiLabelClfDataSet(config.getString("input.testData"),
DataSetType.ML_CLF_SEQ_SPARSE, true);
break;
default:
throw new IllegalArgumentException("unknown type");
}
int numIterations = config.getInt("em.numIterations");
String output = config.getString("output");
String modelName = config.getString("modelName");
File path = Paths.get(output, modelName).toFile();
path.mkdirs();
// FileUtils.cleanDirectory(path);
BMMClassifier bmmClassifier = loadBMM(config,trainSet,testSet);
BMMOptimizer optimizer = getOptimizer(config,bmmClassifier,trainSet);
for (int i=1;i<=numIterations;i++){
System.out.print("iter : "+i + "\t");
optimizer.iterate();
MultiLabel[] trainPredict;
MultiLabel[] testPredict;
trainPredict = bmmClassifier.predict(trainSet);
testPredict = bmmClassifier.predict(testSet);
System.out.print("objective: "+optimizer.getTerminator().getLastValue() + "\t");
System.out.print("trainAcc : "+ Accuracy.accuracy(trainSet.getMultiLabels(),trainPredict)+ "\t");
System.out.print("trainOver: "+ Overlap.overlap(trainSet.getMultiLabels(), trainPredict)+ "\t");
System.out.print("testAcc : "+ Accuracy.accuracy(testSet.getMultiLabels(),testPredict)+ "\t");
System.out.println("testOver : "+ Overlap.overlap(testSet.getMultiLabels(), testPredict)+ "\t");
if (config.getBoolean("saveModelForEachIter")) {
File serializeModel = new File(path, "iter." + i + ".model");
bmmClassifier.serialize(serializeModel);
double[][] gammas = optimizer.getGammas();
double[][] PIs = optimizer.getPIs();
BufferedWriter bw = new BufferedWriter(new FileWriter(new File(path, "iter."+i+".gammas")));
BufferedWriter bw1 = new BufferedWriter(new FileWriter(new File(path, "iter."+i+".PIs")));
for (int n=0; n<gammas.length; n++) {
for (int k=0; k<gammas[n].length; k++) {
bw.write(gammas[n][k] + "\t");
bw1.write(PIs[n][k] + "\t");
}
bw.write("\n");
bw1.write("\n");
}
bw.close();
bw1.close();
}
}
System.out.println("history = "+optimizer.getTerminator().getHistory());
System.out.println("
System.out.println();
System.out.print("trainAcc : " + Accuracy.accuracy(bmmClassifier, trainSet) + "\t");
System.out.print("trainOver: "+ Overlap.overlap(bmmClassifier, trainSet)+ "\t");
System.out.print("testAcc : "+ Accuracy.accuracy(bmmClassifier,testSet)+ "\t");
System.out.println("testOver : "+ Overlap.overlap(bmmClassifier, testSet)+ "\t");
System.out.println();
System.out.println();
// System.out.println(bmmClassifier);
if (config.getBoolean("saveModel")) {
File serializeModel = new File(path, "model");
bmmClassifier.serialize(serializeModel);
}
}
}
|
package com.google.step.snippet;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import com.google.step.snippet.data.Card;
import com.google.step.snippet.external.W3SchoolsClient;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public final class W3SchoolsClientTest {
private final W3SchoolsClient client = new W3SchoolsClient();
@Test
public void htmlCodeCard() {
Card actual = client.search("https:
Card expected =
new Card(
"HTML <img> Tag",
"<img src=\"img_girl.jpg\" alt=\"Girl in a jacket\" width=\"500\" height=\"600\">",
"https:
"How to insert an image:");
assertEquals(expected, actual);
}
@Test
public void jsonCodeCard() {
Card actual = client.search("https:
Card expected =
new Card(
"JSON - Introduction",
"var myObj = {name: \"John\", age: 31, city: \"New York\"}; var myJSON ="
+ " JSON.stringify(myObj); window.location = \"demo_json.php?x=\" + myJSON;",
"https:
"JSON: JavaScript Object Notation.");
assertEquals(expected, actual);
}
@Test
public void partiallyFilledCard() {
Card actual = client.search("https:
assertNull(actual);
}
@Test
public void nonexistentLink() {
Card actual = client.search("https:
assertNull(actual);
}
@Test
public void blankPageLink() {
Card actual = client.search("https:
assertNull(actual);
}
}
|
package eu.scape_project.pt.util.fs;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
/**
* Handles the transportation of files from the local filesystem to HDFS
* and vice-versa.
*
* @author Rainer Schmidt [rschmidt13]
* @author Matthias Rella [myrho]
* @author Martin Schenck [schenck]
*/
public class HDFSFiler extends Filer {
private static Log LOG = LogFactory.getLog(HDFSFiler.class);
/**
* Hadoop Filesystem handle.
*/
private final FileSystem hdfs;
/**
* File to handle by this filer
*/
private final Path file;
HDFSFiler(URI uri) throws IOException {
this.file = new Path(uri);
hdfs = file.getFileSystem(new Configuration());
}
/**
* Copies a file or directory from the local filesystem to a remote one.
*/
private void depositDirectoryOrFile(String strSrc, String strDest) throws IOException {
File source = new File( strSrc );
if(source.isDirectory()) {
depositDirectory(strSrc, strDest);
} else {
depositFile(strSrc, strDest);
}
}
/**
* Copies a directory from the local filesystem to a remote one.
*/
private void depositDirectory(String strSrc, String strDest) throws IOException {
// Get output directory name from strSrc
File localDir = new File( strSrc );
if(!localDir.isDirectory()) {
throw new IOException("Could not find correct local output directory: " + localDir );
}
LOG.debug("Local directory is: " + localDir );
for(File localFile : localDir.listFiles()) {
depositDirectoryOrFile(localFile.getCanonicalPath(), strDest + File.separator + localFile.getName());
}
}
/**
* Copies a file from the local filesystem to a remote one.
*/
private void depositFile(String strSrc, String strDest) throws IOException {
Path src = new Path(strSrc);
Path dest = new Path(strDest);
LOG.debug("local file name is: "+src+" destination path is:" +dest);
hdfs.copyFromLocalFile(src, dest);
}
@Override
public void localize() throws IOException {
File fileRef = new File(getAbsoluteFileRef());
LOG.debug("localize " + fileRef);
new File(fileRef.getParent()).mkdirs();
Path localfile = new Path( fileRef.toString() );
if(hdfs.exists(file)) {
if( LOG.isDebugEnabled() ) {
FileStatus fs = hdfs.getFileStatus(file);
BlockLocation[] locations = hdfs.getFileBlockLocations(fs, (long)0, fs.getLen());
for( BlockLocation location : locations ) {
LOG.debug("location hosts: ");
String[] hosts = location.getHosts();
LOG.debug(" one blockLocation on: ");
for( String host : hosts ) {
LOG.debug(" host = " + host);
}
}
}
hdfs.copyToLocalFile(file, localfile);
}
}
@Override
public void delocalize() throws IOException {
this.depositDirectoryOrFile(getAbsoluteFileRef(), file.toString());
}
@Override
public void setWorkingDir(String strDir ) {
LOG.debug("setDirectory " + strDir );
File dir = new File(strDir);
if( !dir.isAbsolute() ) {
this.dir = this.getTmpDir() + strDir;
} else {
this.dir = strDir;
}
LOG.debug("this.dir = " + this.dir );
}
@Override
public String getAbsoluteFileRef() {
return this.getFullDirectory();
}
@Override
public String getRelativeFileRef() {
String path = this.getPath();
if( path.startsWith(System.getProperty("file.separator")) )
path = path.substring(1);
return path;
}
/**
* Returns the user defined directory of the file.
*/
private String getPath() {
URI uri = this.file.toUri();
String path = uri.getPath();
LOG.debug("path = " + path);
String sep = System.getProperty("file.separator");
return path.replace(Path.SEPARATOR, sep);
}
/**
* Returns working space directory with user defined directories.
*/
private String getFullDirectory() {
String sep = System.getProperty("file.separator");
String par = this.getPath();
return (this.dir.isEmpty()
? "hdfsfiler_" + file.hashCode()
: this.dir)
+ par;
}
@Override
public InputStream getInputStream() throws IOException {
return hdfs.open(file);
}
@Override
public OutputStream getOutputStream() throws IOException {
return hdfs.create(file);
}
}
|
package net.openhft.chronicle.core;
import org.junit.Test;
import static junit.framework.TestCase.assertTrue;
public class JvmSafepointTest {
@Test
public void testSafepoint() {
@SuppressWarnings("AnonymousHasLambdaAlternative")
Thread t = new Thread() {
public void run() {
long start = System.currentTimeMillis();
while (System.currentTimeMillis() < start + 500) {
for (int i = 0; i < 100; i++)
if (Jvm.areOptionalSafepointsEnabled())
Jvm.optionalSafepoint();
else
Jvm.safepoint();
}
}
};
t.start();
Jvm.pause(100);
int counter = 0;
while (t.isAlive()) {
StackTraceElement[] stackTrace = t.getStackTrace();
if (stackTrace.length > 1) {
String s = stackTrace[1].toString();
if (s.contains("safepoint"))
counter++;
else if (t.isAlive())
System.out.println(s);
}
}
assertTrue("counter: " + counter, counter > 200);
}
@Test
public void safePointPerf() {
for (int t = 0; t < 8; t++) {
long start = System.nanoTime();
int count = 2_000_000;
for (int i = 0; i < count; i++)
Jvm.safepoint();
long time = System.nanoTime() - start;
if (t > 1) {
long avg = time / count;
System.out.println("avg: " + avg);
assertTrue("avg: " + avg, 2 < avg && avg < 200);
}
Jvm.pause(5);
}
}
}
|
package gov.nasa.jpl.mbee.mdk.emf;
import com.nomagic.magicdraw.core.Project;
import com.nomagic.magicdraw.core.ProjectUtilities;
import com.nomagic.uml2.ext.jmi.helpers.ModelHelper;
import com.nomagic.uml2.ext.jmi.helpers.StereotypesHelper;
import com.nomagic.uml2.ext.magicdraw.auxiliaryconstructs.mdmodels.Model;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.Class;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.*;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.Package;
import com.nomagic.uml2.ext.magicdraw.compositestructures.mdinternalstructures.Connector;
import com.nomagic.uml2.ext.magicdraw.compositestructures.mdinternalstructures.ConnectorEnd;
import com.nomagic.uml2.ext.magicdraw.mdprofiles.Stereotype;
import com.nomagic.uml2.ext.magicdraw.metadata.UMLPackage;
import gov.nasa.jpl.mbee.mdk.api.function.TriFunction;
import gov.nasa.jpl.mbee.mdk.api.incubating.MDKConstants;
import gov.nasa.jpl.mbee.mdk.lib.ClassUtils;
import gov.nasa.jpl.mbee.mdk.lib.Utils;
import org.eclipse.emf.ecore.EDataType;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EReference;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.emf.ecore.util.EcoreUtil;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.function.BiFunction;
import java.util.function.BiPredicate;
import java.util.stream.Collectors;
public class EMFExporter implements BiFunction<Element, Project, JSONObject> {
@Override
public JSONObject apply(Element element, Project project) {
return convert(element, project);
}
private static JSONObject convert(Element element, Project project) {
return convert(element, project, false);
}
private static JSONObject convert(Element element, Project project, boolean nestedValueSpecification) {
if (element == null) {
return null;
}
JSONObject jsonObject = new JSONObject();
for (PreProcessor preProcessor : PreProcessor.values()) {
if (nestedValueSpecification && preProcessor == PreProcessor.VALUE_SPECIFICATION) {
continue;
}
jsonObject = preProcessor.getFunction().apply(element, project, jsonObject);
if (jsonObject == null) {
return null;
}
}
for (EStructuralFeature eStructuralFeature : element.eClass().getEAllStructuralFeatures()) {
ExportFunction function = Arrays.stream(EStructuralFeatureOverride.values())
.filter(override -> override.getPredicate().test(element, eStructuralFeature)).map(EStructuralFeatureOverride::getFunction)
.findAny().orElse(DEFAULT_E_STRUCTURAL_FEATURE_FUNCTION);
jsonObject = function.apply(element, project, eStructuralFeature, jsonObject);
if (jsonObject == null) {
return null;
}
}
return jsonObject;
}
public static String getEID(EObject eObject) {
if (eObject == null) {
return null;
}
if (eObject instanceof Slot) {
Slot slot = (Slot) eObject;
if (slot.getOwner() == null || ((Slot) eObject).getDefiningFeature() == null) {
return null;
}
return getEID(slot.getOwner()) + "-slot-" + getEID(slot.getDefiningFeature());
}
if (eObject instanceof Model) {
Model model = (Model) eObject;
Project project = Project.getProject(model);
if (eObject == project.getModel()) {
return project.getPrimaryProject().getProjectID();
}
}
return EcoreUtil.getID(eObject);
}
private static JSONObject fillMetatype(Element e, JSONObject einfo) {
// info.put("isMetatype", false);
if (e instanceof Stereotype) {
einfo.put("isMetatype", true);
JSONArray metatypes = ((Stereotype) e).getSuperClass().stream().filter(c -> c instanceof Stereotype).map(EMFExporter::getEID).collect(Collectors.toCollection(JSONArray::new));
metatypes.addAll(StereotypesHelper.getBaseClasses((Stereotype) e).stream().map(EMFExporter::getEID).collect(Collectors.toList()));
einfo.put("metatypesId", metatypes);
}
if (e instanceof Class) {
try {
java.lang.Class c = StereotypesHelper.getClassOfMetaClass((Class) e);
if (c != null) {
einfo.put("isMetatype", true);
einfo.put("metatypes", new JSONArray());
}
} catch (Exception ex) {
}
}
List<Stereotype> stereotypes = StereotypesHelper.getStereotypes(e);
JSONArray applied = stereotypes.stream().map(EMFExporter::getEID).collect(Collectors.toCollection(JSONArray::new));
einfo.put("appliedStereotypeIds", applied);
return einfo;
}
private static void debugUMLPackageLiterals() {
for (Field field : UMLPackage.Literals.class.getDeclaredFields()) {
if (Modifier.isStatic(field.getModifiers())) {
try {
field.setAccessible(true);
Object o = field.get(null);
System.out.println(field.getName() + ": " + o);
if (o instanceof EReference) {
EReference eReference = (EReference) o;
System.out.println(" --- " + eReference.getEReferenceType() + " : " + eReference.getEReferenceType().getInstanceClass());
}
} catch (IllegalAccessException e) {
e.printStackTrace();
}
}
}
}
private enum PreProcessor {
TYPE(
(element, project, jsonObject) -> {
jsonObject.put("type", element.eClass().getName());
return jsonObject;
}
),
METATYPE(
(element, project, jsonObject) -> {
if (element instanceof Stereotype) {
jsonObject.put("isMetatype", true);
JSONArray metatypes = ((Stereotype) element).getSuperClass().stream().filter(c -> c instanceof Stereotype).map(EMFExporter::getEID).collect(Collectors.toCollection(JSONArray::new));
metatypes.addAll(StereotypesHelper.getBaseClasses((Stereotype) element).stream().map(EMFExporter::getEID).collect(Collectors.toList()));
jsonObject.put("metatypesId", metatypes);
}
if (element instanceof Class) {
try {
java.lang.Class c = StereotypesHelper.getClassOfMetaClass((Class) element);
if (c != null) {
jsonObject.put("isMetatype", true);
jsonObject.put("metatypes", new JSONArray());
}
} catch (Exception ex) {
}
}
List<Stereotype> stereotypes = StereotypesHelper.getStereotypes(element);
JSONArray applied = stereotypes.stream().map(EMFExporter::getEID).collect(Collectors.toCollection(JSONArray::new));
jsonObject.put("_appliedStereotypeIds", applied);
return jsonObject;
}
),
DOCUMENTATION(
(element, project, jsonObject) -> {
jsonObject.put("documentation", Utils.stripHtmlWrapper(ModelHelper.getComment(element)));
return jsonObject;
}
),
SITE_CHARACTERIZATION(
(element, project, jsonObject) -> {
if (element instanceof Package) {
jsonObject.put("_isSite", Utils.isSiteChar((Package) element));
}
return jsonObject;
}
),
VALUE_SPECIFICATION(
(element, project, jsonObject) -> element instanceof ValueSpecification ? null : jsonObject
),
CONNECTOR_END(
(element, project, jsonObject) -> element instanceof ConnectorEnd ? null : jsonObject
),
DIAGRAM(
(element, project, jsonObject) -> element instanceof Diagram ? null : jsonObject
),
COMMENT(
(element, project, jsonObject) -> {
if (!(element instanceof Comment)) {
return jsonObject;
}
Comment comment = (Comment) element;
return comment.getAnnotatedElement().size() == 1 && comment.getAnnotatedElement().iterator().next() == comment.getOwner() ? null : jsonObject;
}
),
SYNC(
(element, project, jsonObject) -> element.getID().endsWith(MDKConstants.SYNC_SYSML_ID_SUFFIX) ||
element.getOwner() != null && element.getOwner().getID().endsWith(MDKConstants.SYNC_SYSML_ID_SUFFIX) ? null : jsonObject
),
ATTACHED_PROJECT(
(element, project, jsonObject) -> ProjectUtilities.isElementInAttachedProject(element) ? null : jsonObject
);
private TriFunction<Element, Project, JSONObject, JSONObject> function;
PreProcessor(TriFunction<Element, Project, JSONObject, JSONObject> function) {
this.function = function;
}
public TriFunction<Element, Project, JSONObject, JSONObject> getFunction() {
return function;
}
}
private static final SerializationFunction DEFAULT_SERIALIZATION_FUNCTION = (object, project, eStructuralFeature) -> {
if (object == null) {
return null;
}
else if (object instanceof Collection) {
JSONArray jsonArray = new JSONArray();
for (Object o : ((Collection<?>) object)) {
Object serialized = EMFExporter.DEFAULT_SERIALIZATION_FUNCTION.apply(o, project, eStructuralFeature);
if (serialized == null && o != null) {
// failed to serialize; taking the conservative approach and returning entire thing as null
return null;
}
jsonArray.add(serialized);
}
return jsonArray;
}
else if (object instanceof ValueSpecification) {
return convert((ValueSpecification) object, project, true);
//return fillValueSpecification((ValueSpecification) object);
}
else if (eStructuralFeature instanceof EReference && object instanceof EObject) {
return EMFExporter.DEFAULT_SERIALIZATION_FUNCTION.apply(getEID(((EObject) object)), project, eStructuralFeature);
}
else if (eStructuralFeature.getEType() instanceof EDataType) {
return EcoreUtil.convertToString((EDataType) eStructuralFeature.getEType(), object);
//return ((Enumerator) object).getLiteral();
}
else if (object instanceof String || ClassUtils.isPrimitive(object)) {
return object;
}
// if we get here we have no idea what to do with this object
return null;
};
private static final ExportFunction DEFAULT_E_STRUCTURAL_FEATURE_FUNCTION = (element, project, eStructuralFeature, jsonObject) -> {
if (!eStructuralFeature.isChangeable() || eStructuralFeature.isVolatile() || eStructuralFeature.isTransient() || eStructuralFeature.isUnsettable() || eStructuralFeature.isDerived() || eStructuralFeature.getName().startsWith("_")) {
return EMFExporter.EMPTY_E_STRUCTURAL_FEATURE_FUNCTION.apply(element, project, eStructuralFeature, jsonObject);
}
return EMFExporter.UNCHECKED_E_STRUCTURAL_FEATURE_FUNCTION.apply(element, project, eStructuralFeature, jsonObject);
};
private static final ExportFunction UNCHECKED_E_STRUCTURAL_FEATURE_FUNCTION = (element, project, eStructuralFeature, jsonObject) -> {
Object value = element.eGet(eStructuralFeature);
Object serializedValue = DEFAULT_SERIALIZATION_FUNCTION.apply(value, project, eStructuralFeature);
if (value != null && serializedValue == null) {
System.err.println("[EMF] Failed to serialize " + eStructuralFeature + " for " + element + ": " + value + " - " + value.getClass());
return jsonObject;
}
String key = eStructuralFeature.getName();
if (eStructuralFeature instanceof EReference && EObject.class.isAssignableFrom(((EReference) eStructuralFeature).getEReferenceType().getInstanceClass())
&& !ValueSpecification.class.isAssignableFrom(((EReference) eStructuralFeature).getEReferenceType().getInstanceClass())) {
key += "Id" + (eStructuralFeature.isMany() ? "s" : "");
}
jsonObject.put(key, serializedValue);
return jsonObject;
};
private static final ExportFunction EMPTY_E_STRUCTURAL_FEATURE_FUNCTION = (element, project, eStructuralFeature, jsonObject) -> jsonObject;
private enum EStructuralFeatureOverride {
ID(
(element, eStructuralFeature) -> eStructuralFeature == element.eClass().getEIDAttribute(),
(element, project, eStructuralFeature, jsonObject) -> {
jsonObject.put("sysmlId", getEID(element));
return jsonObject;
}
),
OWNER(
(element, eStructuralFeature) -> UMLPackage.Literals.ELEMENT__OWNER == eStructuralFeature,
(element, project, eStructuralFeature, jsonObject) -> {
//UNCHECKED_E_STRUCTURAL_FEATURE_FUNCTION.apply(element, UMLPackage.Literals.ELEMENT__OWNER, jsonObject);
// safest way to prevent circular references, like with ValueSpecifications
jsonObject.put(UMLPackage.Literals.ELEMENT__OWNER.getName() + "Id", getEID(element.getOwner()));
return jsonObject;
}
),
OWNING(
(element, eStructuralFeature) -> eStructuralFeature.getName().startsWith("owning"),
EMPTY_E_STRUCTURAL_FEATURE_FUNCTION
),
OWNED(
(element, eStructuralFeature) -> eStructuralFeature.getName().startsWith("owned") && !eStructuralFeature.isOrdered(),
EMPTY_E_STRUCTURAL_FEATURE_FUNCTION
),
PACKAGED_ELEMENT(
(element, eStructuralFeature) -> UMLPackage.Literals.PACKAGE__PACKAGED_ELEMENT == eStructuralFeature || UMLPackage.Literals.COMPONENT__PACKAGED_ELEMENT == eStructuralFeature,
EMPTY_E_STRUCTURAL_FEATURE_FUNCTION
),
NAMESPACE__OWNED_DIAGRAM(
(element, eStructuralFeature) -> eStructuralFeature == UMLPackage.Literals.NAMESPACE__OWNED_DIAGRAM,
EMPTY_E_STRUCTURAL_FEATURE_FUNCTION
),
DIRECTED_RELATIONSHIP__SOURCE(
(element, eStructuralFeature) -> UMLPackage.Literals.DIRECTED_RELATIONSHIP__SOURCE == eStructuralFeature,
(element, project, eStructuralFeature, jsonObject) -> {
jsonObject.put("_" + eStructuralFeature.getName() + "Ids", DEFAULT_SERIALIZATION_FUNCTION.apply(element.eGet(eStructuralFeature), project, eStructuralFeature));
return jsonObject;
}
),
DIRECTED_RELATIONSHIP__TARGET(
(element, eStructuralFeature) -> UMLPackage.Literals.DIRECTED_RELATIONSHIP__TARGET == eStructuralFeature,
(element, project, eStructuralFeature, jsonObject) -> {
jsonObject.put("_" + eStructuralFeature.getName() + "Ids", DEFAULT_SERIALIZATION_FUNCTION.apply(element.eGet(eStructuralFeature), project, eStructuralFeature));
return jsonObject;
}
),
CONNECTOR__END(
(element, eStructuralFeature) -> eStructuralFeature == UMLPackage.Literals.CONNECTOR__END,
(element, project, eStructuralFeature, jsonObject) -> {
Connector connector = (Connector) element;
// TODO Stop using Strings @donbot
List<List<Object>> propertyPaths = connector.getEnd().stream()
.map(connectorEnd -> StereotypesHelper.hasStereotype(connectorEnd, "NestedConnectorEnd") ? StereotypesHelper.getStereotypePropertyValue(connectorEnd, "NestedConnectorEnd", "propertyPath") : null)
.map(elements -> {
if (elements == null) {
return new ArrayList<>(1);
}
List<Object> list = new ArrayList<>(elements.size() + 1);
for (Object o : elements) {
list.add(o instanceof ElementValue ? ((ElementValue) o).getElement() : o);
}
return list;
}).collect(Collectors.toList());
for (int i = 0; i < propertyPaths.size(); i++) {
propertyPaths.get(i).add(connector.getEnd().get(i).getRole());
}
jsonObject.put("_propertyPathIds", DEFAULT_SERIALIZATION_FUNCTION.apply(propertyPaths, project, eStructuralFeature));
return DEFAULT_E_STRUCTURAL_FEATURE_FUNCTION.apply(element, project, eStructuralFeature, jsonObject);
}
),
VALUE_SPECIFICATION__EXPRESSION(
(element, eStructuralFeature) -> eStructuralFeature == UMLPackage.Literals.VALUE_SPECIFICATION__EXPRESSION,
(element, project, eStructuralFeature, jsonObject) -> {
Expression expression = null;
Object object = element.eGet(UMLPackage.Literals.VALUE_SPECIFICATION__EXPRESSION);
if (object instanceof Expression) {
expression = (Expression) object;
}
jsonObject.put(UMLPackage.Literals.VALUE_SPECIFICATION__EXPRESSION.getName() + "Id", expression != null ? expression.getID() : null);
return jsonObject;
}
);
private BiPredicate<Element, EStructuralFeature> predicate;
private ExportFunction function;
EStructuralFeatureOverride(BiPredicate<Element, EStructuralFeature> predicate, ExportFunction function) {
this.predicate = predicate;
this.function = function;
}
public BiPredicate<Element, EStructuralFeature> getPredicate() {
return predicate;
}
public ExportFunction getFunction() {
return function;
}
}
@FunctionalInterface
interface SerializationFunction {
Object apply(Object object, Project project, EStructuralFeature eStructuralFeature);
}
@FunctionalInterface
interface ExportFunction {
JSONObject apply(Element element, Project project, EStructuralFeature eStructuralFeature, JSONObject jsonObject);
}
}
|
package imagej.workflowpipes.pipesentity;
import java.io.Serializable;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Iterator;
import org.json.JSONObject;
/*
* Used to represent the conf JSON entity
*/
public class Conf implements Serializable {
private Name name;
private Type type;
private Value value;
/**
*
* @param name - Conf name
* @param value - Conf value
* @param type - Conf type
*/
public Conf( Name name, Value value, Type type )
{
this.name = name;
this.type = type;
this.value = value;
}
public Type getType() {
return type;
}
public Value getValue() {
return value;
}
/**
* returns true if conf's name matches inputName
* @param name
* @return
*/
public boolean isName( String inputName )
{
if( inputName.equals( this.name.getValue() ) )
{
return true;
}
return false;
}
/**
*
* @param confJSON - JSON input string used to create new Conf type
*/
public Conf( String name, JSONObject confJSON )
{
this.name = new Name( name );
this.type = new Type( confJSON.getString("type") );
System.out.println("confJSON is " + confJSON);
this.value = new Value("");
try {
this.value = new Value( confJSON.getString("value") );
}
catch (java.util.NoSuchElementException e) {
System.out.println("NoSuchElementException " + e.getMessage());
}
}
/**
* @returns the JSON Object representing this object
*/
public JSONObject getJSONObject()
{
//create a new object
JSONObject json = new JSONObject();
//populate the value
json.put("value", value.getValue() );
//poplutate the type
json.put("type", type.getValue() );
JSONObject jsonOutput = new JSONObject();
jsonOutput.put( name.getValue(), json );
//return the object
return jsonOutput;
}
public static ArrayList<Conf> getConfs( String string )
{
ArrayList<Conf> confs = new ArrayList<Conf>();
//let class parse the input string
JSONObject json = null;
try {
json = new JSONObject( string );
} catch (ParseException e) {
e.printStackTrace();
}
//use an iterator to find all the confs in the JSONObject
Iterator i = json.keys();
while( i.hasNext() )
{
JSONObject jsonConf;
// get the name (key) value
String name = (String) i.next();
// get the next conf as JSONObject
jsonConf = json.getJSONObject( name );
//Create a conf object and add to the array
confs.add( new Conf( name, jsonConf ) );
}
return confs;
}
/**
* returns the first conf found matching the string name
* @param confName
* @param confs
* @return null if no match
*/
public static Conf getConf( String confName, ArrayList<Conf> confs )
{
//for each conf in the array
for(Conf conf:confs)
{
System.out.println("Conf :: getConf :: conf is " + conf.getJSONObject() );
//check to see if the name matches
if( conf.isName(confName))
{
//it matches so return this conf
return conf;
}
}
// no match - return null
return null;
}
public static JSONObject getJSON(ArrayList<Conf> confs) {
JSONObject json = new JSONObject();
// iterate over the confs
for ( Conf conf : confs )
{
JSONObject internalJSON = new JSONObject();
internalJSON.put( "value", conf.value.getValue() );
internalJSON.put( "type", conf.type.getValue() );
json.put( conf.name.getValue(), internalJSON );
}
return json;
}
}
|
package nallar.tickprofiler.minecraft;
import me.nallar.modpatcher.ModPatcher;
import nallar.tickprofiler.Log;
import net.minecraftforge.fml.relauncher.IFMLLoadingPlugin;
import java.util.*;
@IFMLLoadingPlugin.MCVersion("@MC_VERSION@")
@IFMLLoadingPlugin.SortingIndex(1001)
public class CoreMod implements IFMLLoadingPlugin {
static {
ModPatcher.requireVersion("1.8.9.95");
}
@Override
public String[] getASMTransformerClass() {
return new String[0];
}
@Override
public String getModContainerClass() {
return null;
}
@Override
public String getSetupClass() {
return ModPatcher.getSetupClass();
}
private Boolean spongePresent;
private boolean isSpongePresent() {
if (spongePresent == null) {
try {
Class.forName("org.spongepowered.asm.mixin.MixinEnvironment", false, CoreMod.class.getClassLoader());
spongePresent = true;
} catch (ClassNotFoundException e) {
spongePresent = false;
}
}
return spongePresent;
}
@Override
public void injectData(Map<String, Object> data) {
Log.info("TickProfiler v@MOD_VERSION@ coremod loading. Sponge present: " + isSpongePresent());
if (isSpongePresent())
ModPatcher.loadPatches(CoreMod.class.getResourceAsStream("/entityhook_sponge.xml"));
else
ModPatcher.loadPatches(CoreMod.class.getResourceAsStream("/entityhook.xml"));
// TODO: Not implemented
// ModPatcher.loadPatches(CoreMod.class.getResourceAsStream("/packethook.xml"));
}
@Override
public String getAccessTransformerClass() {
return null;
}
}
|
package net.darkhax.bookshelf.util;
import net.minecraft.world.World;
import net.minecraft.world.WorldServer;
public final class WorldUtils {
/**
* Utility classes, such as this one, are not meant to be instantiated. Java
* adds an implicit public constructor to every class which does not define at
* lease one explicitly. Hence why this constructor was added.
*/
private WorldUtils () {
throw new IllegalAccessError("Utility class");
}
/**
* Gets the display name of a world.
*
* @param world The world to get the name of.
* @return The name of the world.
*/
public static String getWorldName (World world) {
String result = "Unknown";
// TODO add more fallback options
if (world.provider != null) {
result = world.provider.getDimensionType().getName();
}
return result;
}
/**
* Gets the amount of loaded chunks.
*
* @param world The world to get the chunk count of.
* @return The amount of chunks. -1 means it was unable to get the amount.
*/
public static int getLoadedChunks (WorldServer world) {
return world.getChunkProvider() != null ? world.getChunkProvider().getLoadedChunkCount() : -1;
}
/**
* Gets the dimension id of a world.
*
* @param world The world to get the id of.
* @return The id of the world. 0 (surface) is used if none is found.
*/
public static int getDimId (WorldServer world) {
return world.provider != null ? world.provider.getDimension() : 0;
}
}
|
package net.folab.fo.bytecode;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import net.folab.fo.ast.Statement;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
public class MethodGenerator implements Opcodes {
private final ClassGenerator cg;
private final Access access;
private final JavaType returnType;
private final String name;
private final JavaType[] parameterTypes;
private final List<Statement> statements;
public MethodGenerator(ClassGenerator cg, Access access,
JavaType returnType, String name, JavaType[] parameterTypes,
List<Statement> statements) {
this.cg = cg;
this.access = access;
this.returnType = returnType;
this.name = name;
this.parameterTypes = parameterTypes;
this.statements = Collections.unmodifiableList(statements);
}
public static MethodGenerator build(String name) {
return new MethodGenerator(null, Access.PUBLIC, JavaType.VOID, name,
new JavaType[0], new ArrayList<Statement>());
}
public void generate(ClassWriter cw) {
String desc = "(";
for (JavaType pt : parameterTypes) {
desc += pt.getDescName();
}
desc += ")";
desc += returnType.getDescName();
int modifier = access.modifier;
MethodVisitor mv = cw.visitMethod(
modifier, // access
name, // name
desc, // desc
null, // signature
null // exceptions
);
mv.visitCode();
StatementContext ctx = new StatementContext();
if ((modifier & ACC_STATIC) != ACC_STATIC) {
ctx.addLocal("this", new JavaType(cg.getName()));
}
for (Statement statement : statements) {
statement.generate(mv, ctx);
}
mv.visitMaxs(ctx.maxStack(), ctx.maxLocals());
mv.visitEnd();
}
public MethodGenerator setClassGenerator(ClassGenerator cg) {
return new MethodGenerator(cg, access, returnType, name,
parameterTypes, statements);
}
public Access getAccessModifier() {
return access;
}
public MethodGenerator setAccessModifier(Access accessModifier) {
return new MethodGenerator(cg, accessModifier, returnType, name,
parameterTypes, statements);
}
public JavaType getReturnType() {
return returnType;
}
public MethodGenerator setReturnType(JavaType returnType) {
return new MethodGenerator(cg, access, returnType, name,
parameterTypes, statements);
}
public String getName() {
return name;
}
public JavaType[] getParameterTypes() {
return parameterTypes;
}
public MethodGenerator setParameterTypes(JavaType... parameterTypes) {
return new MethodGenerator(cg, access, returnType, name,
parameterTypes, statements);
}
public MethodGenerator addStatement(Statement statement) {
List<Statement> statements = new ArrayList<Statement>(this.statements);
statements.add(statement);
return new MethodGenerator(cg, access, returnType, name,
parameterTypes, statements);
}
}
|
package net.imagej.ui.swing.script;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.Toolkit;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ComponentAdapter;
import java.awt.event.ComponentEvent;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import java.awt.event.KeyEvent;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.io.BufferedReader;
import java.io.CharArrayWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.io.PrintWriter;
import java.io.Reader;
import java.io.Writer;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.Vector;
import java.util.concurrent.ExecutionException;
import java.util.jar.JarEntry;
import java.util.jar.JarOutputStream;
import java.util.zip.ZipException;
import javax.script.ScriptEngine;
import javax.script.ScriptException;
import javax.swing.AbstractAction;
import javax.swing.BorderFactory;
import javax.swing.BoxLayout;
import javax.swing.ButtonGroup;
import javax.swing.JCheckBoxMenuItem;
import javax.swing.JFrame;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JRadioButtonMenuItem;
import javax.swing.JTabbedPane;
import javax.swing.JTextArea;
import javax.swing.KeyStroke;
import javax.swing.SwingUtilities;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import javax.swing.text.BadLocationException;
import javax.swing.text.Position;
import net.imagej.ui.swing.script.commands.ChooseFontSize;
import net.imagej.ui.swing.script.commands.ChooseTabSize;
import net.imagej.ui.swing.script.commands.GitGrep;
import net.imagej.ui.swing.script.commands.KillScript;
import org.fife.ui.rsyntaxtextarea.AbstractTokenMakerFactory;
import org.fife.ui.rsyntaxtextarea.RSyntaxTextArea;
import org.fife.ui.rsyntaxtextarea.TokenMakerFactory;
import org.scijava.Context;
import org.scijava.command.CommandService;
import org.scijava.event.ContextDisposingEvent;
import org.scijava.event.EventHandler;
import org.scijava.io.IOService;
import org.scijava.log.LogService;
import org.scijava.module.ModuleException;
import org.scijava.module.ModuleService;
import org.scijava.platform.PlatformService;
import org.scijava.plugin.Parameter;
import org.scijava.plugin.PluginInfo;
import org.scijava.plugin.PluginService;
import org.scijava.plugins.scripting.java.JavaEngine;
import org.scijava.prefs.PrefService;
import org.scijava.script.ScriptHeaderService;
import org.scijava.script.ScriptInfo;
import org.scijava.script.ScriptLanguage;
import org.scijava.script.ScriptModule;
import org.scijava.script.ScriptService;
import org.scijava.ui.CloseConfirmable;
import org.scijava.ui.UIService;
import org.scijava.util.AppUtils;
import org.scijava.util.FileUtils;
import org.scijava.util.MiscUtils;
import org.scijava.widget.FileWidget;
@SuppressWarnings("serial")
public class TextEditor extends JFrame implements ActionListener,
ChangeListener, CloseConfirmable, DocumentListener
{
private static final Set<String> TEMPLATE_PATHS = new HashSet<>();
public static final String AUTO_IMPORT_PREFS = "script.editor.AutoImport";
public static final String WINDOW_HEIGHT = "script.editor.height";
public static final String WINDOW_WIDTH = "script.editor.width";
public static final int DEFAULT_WINDOW_WIDTH = 800;
public static final int DEFAULT_WINDOW_HEIGHT = 600;
static {
// Add known script template paths.
addTemplatePath("script_templates");
// This path interferes with javadoc generation but is preserved for
// backwards compatibility
addTemplatePath("script-templates");
}
private static AbstractTokenMakerFactory tokenMakerFactory = null;
private JTabbedPane tabbed;
private JMenuItem newFile, open, save, saveas, compileAndRun, compile,
close, undo, redo, cut, copy, paste, find, replace, selectAll, kill,
gotoLine, makeJar, makeJarWithSource, removeUnusedImports, sortImports,
removeTrailingWhitespace, findNext, findPrevious, openHelp, addImport,
clearScreen, nextError, previousError, openHelpWithoutFrames, nextTab,
previousTab, runSelection, extractSourceJar, toggleBookmark,
listBookmarks, openSourceForClass, openSourceForMenuItem,
openMacroFunctions, decreaseFontSize, increaseFontSize, chooseFontSize,
chooseTabSize, gitGrep, openInGitweb, replaceTabsWithSpaces,
replaceSpacesWithTabs, toggleWhiteSpaceLabeling, zapGremlins,
savePreferences;
private RecentFilesMenuItem openRecent;
private JMenu gitMenu, tabsMenu, fontSizeMenu, tabSizeMenu, toolsMenu,
runMenu, whiteSpaceMenu;
private int tabsMenuTabsStart;
private Set<JMenuItem> tabsMenuItems;
private FindAndReplaceDialog findDialog;
private JCheckBoxMenuItem autoSave, wrapLines, tabsEmulated, autoImport;
private JTextArea errorScreen = new JTextArea();
private int compileStartOffset;
private Position compileStartPosition;
private ErrorHandler errorHandler;
private boolean respectAutoImports;
@Parameter
private Context context;
@Parameter
private LogService log;
@Parameter
private ModuleService moduleService;
@Parameter
private PlatformService platformService;
@Parameter
private IOService ioService;
@Parameter
private CommandService commandService;
@Parameter
private ScriptService scriptService;
@Parameter
private PluginService pluginService;
@Parameter
private ScriptHeaderService scriptHeaderService;
@Parameter
private UIService uiService;
@Parameter
private PrefService prefService;
private Map<ScriptLanguage, JRadioButtonMenuItem> languageMenuItems;
private JRadioButtonMenuItem noneLanguageItem;
public TextEditor(final Context context) {
super("Script Editor");
context.inject(this);
initializeTokenMakers();
loadPreferences();
// Initialize menu
final int ctrl = Toolkit.getDefaultToolkit().getMenuShortcutKeyMask();
final int shift = ActionEvent.SHIFT_MASK;
final JMenuBar mbar = new JMenuBar();
setJMenuBar(mbar);
final JMenu file = new JMenu("File");
file.setMnemonic(KeyEvent.VK_F);
newFile = addToMenu(file, "New", KeyEvent.VK_N, ctrl);
newFile.setMnemonic(KeyEvent.VK_N);
open = addToMenu(file, "Open...", KeyEvent.VK_O, ctrl);
open.setMnemonic(KeyEvent.VK_O);
openRecent = new RecentFilesMenuItem(prefService, this);
openRecent.setMnemonic(KeyEvent.VK_R);
file.add(openRecent);
save = addToMenu(file, "Save", KeyEvent.VK_S, ctrl);
save.setMnemonic(KeyEvent.VK_S);
saveas = addToMenu(file, "Save as...", 0, 0);
saveas.setMnemonic(KeyEvent.VK_A);
file.addSeparator();
makeJar = addToMenu(file, "Export as .jar", 0, 0);
makeJar.setMnemonic(KeyEvent.VK_E);
makeJarWithSource = addToMenu(file, "Export as .jar (with source)", 0, 0);
makeJarWithSource.setMnemonic(KeyEvent.VK_X);
file.addSeparator();
close = addToMenu(file, "Close", KeyEvent.VK_W, ctrl);
mbar.add(file);
final JMenu edit = new JMenu("Edit");
edit.setMnemonic(KeyEvent.VK_E);
undo = addToMenu(edit, "Undo", KeyEvent.VK_Z, ctrl);
redo = addToMenu(edit, "Redo", KeyEvent.VK_Y, ctrl);
edit.addSeparator();
selectAll = addToMenu(edit, "Select All", KeyEvent.VK_A, ctrl);
cut = addToMenu(edit, "Cut", KeyEvent.VK_X, ctrl);
copy = addToMenu(edit, "Copy", KeyEvent.VK_C, ctrl);
paste = addToMenu(edit, "Paste", KeyEvent.VK_V, ctrl);
edit.addSeparator();
find = addToMenu(edit, "Find...", KeyEvent.VK_F, ctrl);
find.setMnemonic(KeyEvent.VK_F);
findNext = addToMenu(edit, "Find Next", KeyEvent.VK_F3, 0);
findNext.setMnemonic(KeyEvent.VK_N);
findPrevious = addToMenu(edit, "Find Previous", KeyEvent.VK_F3, shift);
findPrevious.setMnemonic(KeyEvent.VK_P);
replace = addToMenu(edit, "Find and Replace...", KeyEvent.VK_H, ctrl);
gotoLine = addToMenu(edit, "Goto line...", KeyEvent.VK_G, ctrl);
gotoLine.setMnemonic(KeyEvent.VK_G);
toggleBookmark = addToMenu(edit, "Toggle Bookmark", KeyEvent.VK_B, ctrl);
toggleBookmark.setMnemonic(KeyEvent.VK_B);
listBookmarks = addToMenu(edit, "List Bookmarks", 0, 0);
listBookmarks.setMnemonic(KeyEvent.VK_O);
edit.addSeparator();
// Font adjustments
decreaseFontSize =
addToMenu(edit, "Decrease font size", KeyEvent.VK_MINUS, ctrl);
decreaseFontSize.setMnemonic(KeyEvent.VK_D);
increaseFontSize =
addToMenu(edit, "Increase font size", KeyEvent.VK_PLUS, ctrl);
increaseFontSize.setMnemonic(KeyEvent.VK_C);
fontSizeMenu = new JMenu("Font sizes");
fontSizeMenu.setMnemonic(KeyEvent.VK_Z);
final boolean[] fontSizeShortcutUsed = new boolean[10];
final ButtonGroup buttonGroup = new ButtonGroup();
for (final int size : new int[] { 8, 10, 12, 16, 20, 28, 42 }) {
final JRadioButtonMenuItem item =
new JRadioButtonMenuItem("" + size + " pt");
item.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent event) {
getEditorPane().setFontSize(size);
updateTabAndFontSize(false);
}
});
for (final char c : ("" + size).toCharArray()) {
final int digit = c - '0';
if (!fontSizeShortcutUsed[digit]) {
item.setMnemonic(KeyEvent.VK_0 + digit);
fontSizeShortcutUsed[digit] = true;
break;
}
}
buttonGroup.add(item);
fontSizeMenu.add(item);
}
chooseFontSize = new JRadioButtonMenuItem("Other...", false);
chooseFontSize.setMnemonic(KeyEvent.VK_O);
chooseFontSize.addActionListener(this);
buttonGroup.add(chooseFontSize);
fontSizeMenu.add(chooseFontSize);
edit.add(fontSizeMenu);
// Add tab size adjusting menu
tabSizeMenu = new JMenu("Tab sizes");
tabSizeMenu.setMnemonic(KeyEvent.VK_T);
final ButtonGroup bg = new ButtonGroup();
for (final int size : new int[] { 2, 4, 8 }) {
final JRadioButtonMenuItem item = new JRadioButtonMenuItem("" + size);
item.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent event) {
getEditorPane().setTabSize(size);
updateTabAndFontSize(false);
}
});
item.setMnemonic(KeyEvent.VK_0 + (size % 10));
bg.add(item);
tabSizeMenu.add(item);
}
chooseTabSize = new JRadioButtonMenuItem("Other...", false);
chooseTabSize.setMnemonic(KeyEvent.VK_O);
chooseTabSize.addActionListener(this);
bg.add(chooseTabSize);
tabSizeMenu.add(chooseTabSize);
edit.add(tabSizeMenu);
wrapLines = new JCheckBoxMenuItem("Wrap lines");
wrapLines.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(final ChangeEvent e) {
getEditorPane().setLineWrap(wrapLines.getState());
}
});
edit.add(wrapLines);
// Add Tab inserts as spaces
tabsEmulated = new JCheckBoxMenuItem("Tab key inserts spaces");
tabsEmulated.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(final ChangeEvent e) {
getEditorPane().setTabsEmulated(tabsEmulated.getState());
}
});
edit.add(tabsEmulated);
savePreferences = addToMenu(edit, "Save Preferences", 0, 0);
edit.addSeparator();
clearScreen = addToMenu(edit, "Clear output panel", 0, 0);
clearScreen.setMnemonic(KeyEvent.VK_L);
zapGremlins = addToMenu(edit, "Zap Gremlins", 0, 0);
edit.addSeparator();
addImport = addToMenu(edit, "Add import...", 0, 0);
addImport.setMnemonic(KeyEvent.VK_I);
removeUnusedImports = addToMenu(edit, "Remove unused imports", 0, 0);
removeUnusedImports.setMnemonic(KeyEvent.VK_U);
sortImports = addToMenu(edit, "Sort imports", 0, 0);
sortImports.setMnemonic(KeyEvent.VK_S);
respectAutoImports = prefService.getBoolean(AUTO_IMPORT_PREFS, false);
autoImport =
new JCheckBoxMenuItem("Auto-import (deprecated)", respectAutoImports);
autoImport.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(final ItemEvent e) {
respectAutoImports = e.getStateChange() == ItemEvent.SELECTED;
prefService.put(AUTO_IMPORT_PREFS, respectAutoImports);
}
});
edit.add(autoImport);
mbar.add(edit);
whiteSpaceMenu = new JMenu("Whitespace");
whiteSpaceMenu.setMnemonic(KeyEvent.VK_W);
removeTrailingWhitespace =
addToMenu(whiteSpaceMenu, "Remove trailing whitespace", 0, 0);
removeTrailingWhitespace.setMnemonic(KeyEvent.VK_W);
replaceTabsWithSpaces =
addToMenu(whiteSpaceMenu, "Replace tabs with spaces", 0, 0);
replaceTabsWithSpaces.setMnemonic(KeyEvent.VK_S);
replaceSpacesWithTabs =
addToMenu(whiteSpaceMenu, "Replace spaces with tabs", 0, 0);
replaceSpacesWithTabs.setMnemonic(KeyEvent.VK_T);
toggleWhiteSpaceLabeling = new JRadioButtonMenuItem("Label whitespace");
toggleWhiteSpaceLabeling.setMnemonic(KeyEvent.VK_L);
toggleWhiteSpaceLabeling.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
getTextArea().setWhitespaceVisible(
toggleWhiteSpaceLabeling.isSelected());
}
});
whiteSpaceMenu.add(toggleWhiteSpaceLabeling);
edit.add(whiteSpaceMenu);
languageMenuItems =
new LinkedHashMap<>();
final Set<Integer> usedShortcuts = new HashSet<>();
final JMenu languages = new JMenu("Language");
languages.setMnemonic(KeyEvent.VK_L);
final ButtonGroup group = new ButtonGroup();
// get list of languages, and sort them by name
final ArrayList<ScriptLanguage> list =
new ArrayList<>(scriptService.getLanguages());
Collections.sort(list, new Comparator<ScriptLanguage>() {
@Override
public int compare(final ScriptLanguage l1, final ScriptLanguage l2) {
final String name1 = l1.getLanguageName();
final String name2 = l2.getLanguageName();
return MiscUtils.compare(name1, name2);
}
});
list.add(null);
final Map<String, ScriptLanguage> languageMap =
new HashMap<>();
for (final ScriptLanguage language : list) {
final String name =
language == null ? "None" : language.getLanguageName();
languageMap.put(name, language);
final JRadioButtonMenuItem item = new JRadioButtonMenuItem(name);
if (language == null) {
noneLanguageItem = item;
}
else {
languageMenuItems.put(language, item);
}
int shortcut = -1;
for (final char ch : name.toCharArray()) {
final int keyCode = KeyStroke.getKeyStroke(ch, 0).getKeyCode();
if (usedShortcuts.contains(keyCode)) continue;
shortcut = keyCode;
usedShortcuts.add(shortcut);
break;
}
if (shortcut > 0) item.setMnemonic(shortcut);
item.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
setLanguage(language, true);
}
});
group.add(item);
languages.add(item);
}
noneLanguageItem.setSelected(true);
mbar.add(languages);
final JMenu templates = new JMenu("Templates");
templates.setMnemonic(KeyEvent.VK_T);
addTemplates(templates);
mbar.add(templates);
runMenu = new JMenu("Run");
runMenu.setMnemonic(KeyEvent.VK_R);
compileAndRun = addToMenu(runMenu, "Compile and Run", KeyEvent.VK_R, ctrl);
compileAndRun.setMnemonic(KeyEvent.VK_R);
runSelection =
addToMenu(runMenu, "Run selected code", KeyEvent.VK_R, ctrl | shift);
runSelection.setMnemonic(KeyEvent.VK_S);
compile = addToMenu(runMenu, "Compile", KeyEvent.VK_C, ctrl | shift);
compile.setMnemonic(KeyEvent.VK_C);
autoSave = new JCheckBoxMenuItem("Auto-save before compiling");
runMenu.add(autoSave);
runMenu.addSeparator();
nextError = addToMenu(runMenu, "Next Error", KeyEvent.VK_F4, 0);
nextError.setMnemonic(KeyEvent.VK_N);
previousError = addToMenu(runMenu, "Previous Error", KeyEvent.VK_F4, shift);
previousError.setMnemonic(KeyEvent.VK_P);
runMenu.addSeparator();
kill = addToMenu(runMenu, "Kill running script...", 0, 0);
kill.setMnemonic(KeyEvent.VK_K);
kill.setEnabled(false);
mbar.add(runMenu);
toolsMenu = new JMenu("Tools");
toolsMenu.setMnemonic(KeyEvent.VK_O);
openHelpWithoutFrames =
addToMenu(toolsMenu, "Open Help for Class...", 0, 0);
openHelpWithoutFrames.setMnemonic(KeyEvent.VK_O);
openHelp =
addToMenu(toolsMenu, "Open Help for Class (with frames)...", 0, 0);
openHelp.setMnemonic(KeyEvent.VK_P);
openMacroFunctions =
addToMenu(toolsMenu, "Open Help on Macro Functions...", 0, 0);
openMacroFunctions.setMnemonic(KeyEvent.VK_H);
extractSourceJar = addToMenu(toolsMenu, "Extract source .jar...", 0, 0);
extractSourceJar.setMnemonic(KeyEvent.VK_E);
openSourceForClass =
addToMenu(toolsMenu, "Open .java file for class...", 0, 0);
openSourceForClass.setMnemonic(KeyEvent.VK_J);
openSourceForMenuItem =
addToMenu(toolsMenu, "Open .java file for menu item...", 0, 0);
openSourceForMenuItem.setMnemonic(KeyEvent.VK_M);
mbar.add(toolsMenu);
gitMenu = new JMenu("Git");
gitMenu.setMnemonic(KeyEvent.VK_G);
/*
showDiff = addToMenu(gitMenu,
"Show diff...", 0, 0);
showDiff.setMnemonic(KeyEvent.VK_D);
commit = addToMenu(gitMenu,
"Commit...", 0, 0);
commit.setMnemonic(KeyEvent.VK_C);
*/
gitGrep = addToMenu(gitMenu, "Grep...", 0, 0);
gitGrep.setMnemonic(KeyEvent.VK_G);
openInGitweb = addToMenu(gitMenu, "Open in gitweb", 0, 0);
openInGitweb.setMnemonic(KeyEvent.VK_W);
mbar.add(gitMenu);
tabsMenu = new JMenu("Tabs");
tabsMenu.setMnemonic(KeyEvent.VK_A);
nextTab = addToMenu(tabsMenu, "Next Tab", KeyEvent.VK_PAGE_DOWN, ctrl);
nextTab.setMnemonic(KeyEvent.VK_N);
previousTab =
addToMenu(tabsMenu, "Previous Tab", KeyEvent.VK_PAGE_UP, ctrl);
previousTab.setMnemonic(KeyEvent.VK_P);
tabsMenu.addSeparator();
tabsMenuTabsStart = tabsMenu.getItemCount();
tabsMenuItems = new HashSet<>();
mbar.add(tabsMenu);
// Add the editor and output area
tabbed = new JTabbedPane();
tabbed.addChangeListener(this);
open(null); // make sure the editor pane is added
tabbed.setBorder(BorderFactory.createEmptyBorder(4, 4, 4, 4));
getContentPane().setLayout(
new BoxLayout(getContentPane(), BoxLayout.Y_AXIS));
getContentPane().add(tabbed);
// for Eclipse and MS Visual Studio lovers
addAccelerator(compileAndRun, KeyEvent.VK_F11, 0, true);
addAccelerator(compileAndRun, KeyEvent.VK_F5, 0, true);
addAccelerator(nextTab, KeyEvent.VK_PAGE_DOWN, ctrl, true);
addAccelerator(previousTab, KeyEvent.VK_PAGE_UP, ctrl, true);
addAccelerator(increaseFontSize, KeyEvent.VK_EQUALS, ctrl | shift, true);
// make sure that the window is not closed by accident
addWindowListener(new WindowAdapter() {
@Override
public void windowClosing(final WindowEvent e) {
if (!confirmClose()) return;
dispose();
}
});
addWindowFocusListener(new WindowAdapter() {
@Override
public void windowGainedFocus(final WindowEvent e) {
checkForOutsideChanges();
}
});
final Font font = new Font("Courier", Font.PLAIN, 12);
errorScreen.setFont(font);
errorScreen.setEditable(false);
errorScreen.setLineWrap(true);
setDefaultCloseOperation(DO_NOTHING_ON_CLOSE);
try {
if (SwingUtilities.isEventDispatchThread()) {
pack();
}
else {
SwingUtilities.invokeAndWait(new Runnable() {
@Override
public void run() {
pack();
}
});
}
}
catch (final Exception ie) {
/* ignore */
}
findDialog = new FindAndReplaceDialog(this);
// Save the size of the window in the preferences
addComponentListener(new ComponentAdapter() {
@Override
public void componentResized(final ComponentEvent e) {
saveWindowSizeToPrefs();
}
});
setLocationRelativeTo(null); // center on screen
open(null);
final EditorPane editorPane = getEditorPane();
editorPane.requestFocus();
}
public LogService log() { return log; }
public PlatformService getPlatformService() { return platformService; }
public JTextArea getErrorScreen() { return errorScreen; }
public void setErrorScreen(final JTextArea errorScreen) {
this.errorScreen = errorScreen;
}
public ErrorHandler getErrorHandler() { return errorHandler; }
public void setErrorHandler(final ErrorHandler errorHandler) {
this.errorHandler = errorHandler;
}
private synchronized void initializeTokenMakers() {
if (tokenMakerFactory != null) return;
tokenMakerFactory =
(AbstractTokenMakerFactory) TokenMakerFactory.getDefaultInstance();
for (final PluginInfo<SyntaxHighlighter> info : pluginService
.getPluginsOfType(SyntaxHighlighter.class))
try {
tokenMakerFactory.putMapping("text/" + info.getName(), info
.getClassName());
}
catch (final Throwable t) {
log.warn("Could not register " + info.getName(), t);
}
}
/**
* Check whether the file was edited outside of this {@link EditorPane} and
* ask the user whether to reload.
*/
public void checkForOutsideChanges() {
final EditorPane editorPane = getEditorPane();
if (editorPane.wasChangedOutside()) {
reload("The file " + editorPane.getFile().getName() +
"was changed outside of the editor");
}
}
/**
* Adds a script template path that will be scanned by future TextEditor
* instances.
*
* @param path Resource path to scan for scripts.
*/
public static void addTemplatePath(final String path) {
TEMPLATE_PATHS.add(path);
}
@SuppressWarnings("unused")
@EventHandler
private void onEvent(final ContextDisposingEvent e) {
if (isDisplayable()) dispose();
}
/**
* Loads the preferences for the JFrame from file
*/
public void loadPreferences() {
final Dimension dim = getSize();
// If a dimension is 0 then use the default dimension size
if (0 == dim.width) {
dim.width = DEFAULT_WINDOW_WIDTH;
}
if (0 == dim.height) {
dim.height = DEFAULT_WINDOW_HEIGHT;
}
setPreferredSize(new Dimension(prefService.getInt(WINDOW_WIDTH, dim.width),
prefService.getInt(WINDOW_HEIGHT, dim.height)));
}
/**
* Saves the window size to preferences.
* <p>
* Separated from savePreferences because we always want to save the window
* size when it's resized, however, we don't want to automatically save the
* font, tab size, etc. without the user pressing "Save Preferences"
* </p>
*/
public void saveWindowSizeToPrefs() {
final Dimension dim = getSize();
prefService.put(WINDOW_HEIGHT, dim.height);
prefService.put(WINDOW_WIDTH, dim.width);
}
final public RSyntaxTextArea getTextArea() {
return getEditorPane();
}
/**
* Get the currently selected tab.
*
* @return The currently selected tab. Never null.
*/
public TextEditorTab getTab() {
int index = tabbed.getSelectedIndex();
if (index < 0) {
// should not happen, but safety first.
if (tabbed.getTabCount() == 0) {
// should not happen either, but, again, safety first.
createNewDocument();
}
// Ensure the new document is returned - otherwise we would pass
// the negative index to the getComponentAt call below.
tabbed.setSelectedIndex(0);
index = 0;
}
return (TextEditorTab) tabbed.getComponentAt(index);
}
/**
* Get tab at provided index.
*
* @param index the index of the tab.
* @return the {@link TextEditorTab} at given index or <code>null</code>.
*/
public TextEditorTab getTab(final int index) {
return (TextEditorTab) tabbed.getComponentAt(index);
}
/**
* Return the {@link EditorPane} of the currently selected
* {@link TextEditorTab}.
*
* @return the current {@link EditorPane}. Never <code>null</code>.
*/
public EditorPane getEditorPane() {
return getTab().editorPane;
}
/**
* @return {@link ScriptLanguage} used in the current {@link EditorPane}.
*/
public ScriptLanguage getCurrentLanguage() {
return getEditorPane().getCurrentLanguage();
}
public JMenuItem addToMenu(final JMenu menu, final String menuEntry,
final int key, final int modifiers)
{
final JMenuItem item = new JMenuItem(menuEntry);
menu.add(item);
if (key != 0) item.setAccelerator(KeyStroke.getKeyStroke(key, modifiers));
item.addActionListener(this);
return item;
}
protected static class AcceleratorTriplet {
JMenuItem component;
int key, modifiers;
}
protected List<AcceleratorTriplet> defaultAccelerators =
new ArrayList<>();
public void addAccelerator(final JMenuItem component, final int key,
final int modifiers)
{
addAccelerator(component, key, modifiers, false);
}
public void addAccelerator(final JMenuItem component, final int key,
final int modifiers, final boolean record)
{
if (record) {
final AcceleratorTriplet triplet = new AcceleratorTriplet();
triplet.component = component;
triplet.key = key;
triplet.modifiers = modifiers;
defaultAccelerators.add(triplet);
}
final RSyntaxTextArea textArea = getTextArea();
if (textArea != null) addAccelerator(textArea, component, key, modifiers);
}
public void addAccelerator(final RSyntaxTextArea textArea,
final JMenuItem component, final int key, final int modifiers)
{
textArea.getInputMap().put(KeyStroke.getKeyStroke(key, modifiers),
component);
textArea.getActionMap().put(component, new AbstractAction() {
@Override
public void actionPerformed(final ActionEvent e) {
if (!component.isEnabled()) return;
final ActionEvent event = new ActionEvent(component, 0, "Accelerator");
TextEditor.this.actionPerformed(event);
}
});
}
public void addDefaultAccelerators(final RSyntaxTextArea textArea) {
for (final AcceleratorTriplet triplet : defaultAccelerators)
addAccelerator(textArea, triplet.component, triplet.key,
triplet.modifiers);
}
private JMenu getMenu(final JMenu root, final String menuItemPath,
final boolean createIfNecessary)
{
final int slash = menuItemPath.indexOf('/');
if (slash < 0) return root;
final String menuLabel = menuItemPath.substring(0, slash);
final String rest = menuItemPath.substring(slash + 1);
for (int i = 0; i < root.getItemCount(); i++) {
final JMenuItem item = root.getItem(i);
if (item instanceof JMenu && menuLabel.equals(item.getText())) {
return getMenu((JMenu) item, rest, createIfNecessary);
}
}
if (!createIfNecessary) return null;
final JMenu subMenu = new JMenu(menuLabel);
root.add(subMenu);
return getMenu(subMenu, rest, createIfNecessary);
}
/**
* Initializes the template menu.
* <p>
* Other components can add templates simply by providing scripts in their
* resources, identified by a path of the form
* {@code /script_templates/<menu path>/<menu label>}.
* </p>
*
* @param templatesMenu the top-level menu to populate
*/
private void addTemplates(final JMenu templatesMenu) {
for (final String templatePath : TEMPLATE_PATHS) {
for (final Map.Entry<String, URL> entry : new TreeMap<>(
FileFunctions.findResources(null, templatePath)).entrySet())
{
final String ext = FileUtils.getExtension(entry.getKey());
// try to determine the scripting language
final ScriptLanguage lang = ext.isEmpty() ? null :
scriptService.getLanguageByExtension(ext);
final String langName = lang == null ? null : lang.getLanguageName();
final String langSuffix = lang == null ? null : " (" + langName + ")";
final String path = adjustPath(entry.getKey());
// create a human-readable label
final int labelIndex = path.lastIndexOf('/') + 1;
final String label = ext.isEmpty() ? path.substring(labelIndex) :
path.substring(labelIndex, path.length() - ext.length() - 1);
final ActionListener menuListener = new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
loadTemplate(entry.getValue());
}
};
// add script to the secondary language-sorted menu structure
if (langName != null) {
final String langPath = "[by language]/" + langName + "/" + path;
final JMenu langMenu = getMenu(templatesMenu, langPath, true);
final JMenuItem langItem = new JMenuItem(label);
langMenu.add(langItem);
langItem.addActionListener(menuListener);
}
// add script to the primary Templates menu structure
final JMenu menu = getMenu(templatesMenu, path, true);
final JMenuItem item = new JMenuItem(label + langSuffix);
menu.add(item);
item.addActionListener(menuListener);
}
}
}
/**
* Loads a template file from the given resource
*
* @param url The resource to load.
*/
public void loadTemplate(final String url) {
try {
loadTemplate(new URL(url));
}
catch (final Exception e) {
log.error(e);
error("The template '" + url + "' was not found.");
}
}
public void loadTemplate(final URL url) {
final String path = url.getPath();
final String ext = FileUtils.getExtension(path);
final ScriptLanguage language =
ext.isEmpty() ? null : scriptService.getLanguageByExtension(ext);
loadTemplate(url, language);
}
public void loadTemplate(final URL url, final ScriptLanguage language) {
createNewDocument();
try {
// Load the template
final InputStream in = url.openStream();
getTextArea().read(new BufferedReader(new InputStreamReader(in)), null);
if (language != null) {
setLanguage(language);
}
final String path = url.getPath();
setEditorPaneFileName(path.substring(path.lastIndexOf('/') + 1));
}
catch (final Exception e) {
e.printStackTrace();
error("The template '" + url + "' was not found.");
}
}
public void createNewDocument() {
open(null);
}
public void createNewDocument(final String title, final String text) {
open(null);
final EditorPane editorPane = getEditorPane();
editorPane.setText(text);
setEditorPaneFileName(title);
editorPane.setLanguageByFileName(title);
updateLanguageMenu(editorPane.getCurrentLanguage());
}
/**
* Open a new editor to edit the given file, with a templateFile if the file
* does not exist yet
*/
public void createNewFromTemplate(final File file, final File templateFile) {
open(file.exists() ? file : templateFile);
if (!file.exists()) {
final EditorPane editorPane = getEditorPane();
try {
editorPane.open(file);
}
catch (final IOException e) {
handleException(e);
}
editorPane.setLanguageByFileName(file.getName());
updateLanguageMenu(editorPane.getCurrentLanguage());
}
}
public boolean fileChanged() {
return getEditorPane().fileChanged();
}
public boolean handleUnsavedChanges() {
return handleUnsavedChanges(false);
}
public boolean handleUnsavedChanges(final boolean beforeCompiling) {
if (!fileChanged()) return true;
if (beforeCompiling && autoSave.getState()) {
save();
return true;
}
switch (JOptionPane.showConfirmDialog(this, "Do you want to save changes?")) {
case JOptionPane.NO_OPTION:
// Compiled languages should not progress if their source is unsaved
return !beforeCompiling;
case JOptionPane.YES_OPTION:
if (save()) return true;
}
return false;
}
@Override
public void actionPerformed(final ActionEvent ae) {
final Object source = ae.getSource();
if (source == newFile) createNewDocument();
else if (source == open) {
final EditorPane editorPane = getEditorPane();
final File defaultDir =
editorPane.getFile() != null ? editorPane.getFile().getParentFile()
: AppUtils.getBaseDirectory("imagej.dir", TextEditor.class, null);
final File file = openWithDialog(defaultDir);
if (file != null) new Thread() {
@Override
public void run() {
open(file);
}
}.start();
return;
}
else if (source == save) save();
else if (source == saveas) saveAs();
else if (source == makeJar) makeJar(false);
else if (source == makeJarWithSource) makeJar(true);
else if (source == compileAndRun) runText();
else if (source == compile) compile();
else if (source == runSelection) runText(true);
else if (source == nextError) new Thread() {
@Override
public void run() {
nextError(true);
}
}.start();
else if (source == previousError) new Thread() {
@Override
public void run() {
nextError(false);
}
}.start();
else if (source == kill) chooseTaskToKill();
else if (source == close) if (tabbed.getTabCount() < 2) processWindowEvent(new WindowEvent(
this, WindowEvent.WINDOW_CLOSING));
else {
if (!handleUnsavedChanges()) return;
int index = tabbed.getSelectedIndex();
removeTab(index);
if (index > 0) index
switchTo(index);
}
else if (source == cut) getTextArea().cut();
else if (source == copy) getTextArea().copy();
else if (source == paste) getTextArea().paste();
else if (source == undo) getTextArea().undoLastAction();
else if (source == redo) getTextArea().redoLastAction();
else if (source == find) findOrReplace(false);
else if (source == findNext) findDialog.searchOrReplace(false);
else if (source == findPrevious) findDialog.searchOrReplace(false, false);
else if (source == replace) findOrReplace(true);
else if (source == gotoLine) gotoLine();
else if (source == toggleBookmark) toggleBookmark();
else if (source == listBookmarks) listBookmarks();
else if (source == selectAll) {
getTextArea().setCaretPosition(0);
getTextArea().moveCaretPosition(getTextArea().getDocument().getLength());
}
else if (source == chooseFontSize) {
commandService.run(ChooseFontSize.class, true, "editor", this);
}
else if (source == chooseTabSize) {
commandService.run(ChooseTabSize.class, true, "editor", this);
}
else if (source == addImport) {
addImport(getSelectedClassNameOrAsk());
}
else if (source == removeUnusedImports) new TokenFunctions(getTextArea())
.removeUnusedImports();
else if (source == sortImports) new TokenFunctions(getTextArea())
.sortImports();
else if (source == removeTrailingWhitespace) new TokenFunctions(
getTextArea()).removeTrailingWhitespace();
else if (source == replaceTabsWithSpaces) getTextArea()
.convertTabsToSpaces();
else if (source == replaceSpacesWithTabs) getTextArea()
.convertSpacesToTabs();
else if (source == clearScreen) {
getTab().getScreen().setText("");
}
else if (source == zapGremlins) zapGremlins();
else if (source == savePreferences) {
getEditorPane().savePreferences();
}
else if (source == openHelp) openHelp(null);
else if (source == openHelpWithoutFrames) openHelp(null, false);
else if (source == openMacroFunctions) try {
new MacroFunctions(this).openHelp(getTextArea().getSelectedText());
}
catch (final IOException e) {
handleException(e);
}
else if (source == extractSourceJar) extractSourceJar();
else if (source == openSourceForClass) {
final String className = getSelectedClassNameOrAsk();
if (className != null) try {
final String path = new FileFunctions(this).getSourcePath(className);
if (path != null) open(new File(path));
else {
final String url = new FileFunctions(this).getSourceURL(className);
try {
platformService.open(new URL(url));
}
catch (final Throwable e) {
handleException(e);
}
}
}
catch (final ClassNotFoundException e) {
error("Could not open source for class " + className);
}
}
/* TODO
else if (source == showDiff) {
new Thread() {
public void run() {
EditorPane pane = getEditorPane();
new FileFunctions(TextEditor.this).showDiff(pane.file, pane.getGitDirectory());
}
}.start();
}
else if (source == commit) {
new Thread() {
public void run() {
EditorPane pane = getEditorPane();
new FileFunctions(TextEditor.this).commit(pane.file, pane.getGitDirectory());
}
}.start();
}
*/
else if (source == gitGrep) {
final String searchTerm = getTextArea().getSelectedText();
File searchRoot = getEditorPane().getFile();
if (searchRoot == null) {
error("File was not yet saved; no location known!");
return;
}
searchRoot = searchRoot.getParentFile();
commandService.run(GitGrep.class, true, "editor", this, "searchTerm",
searchTerm, "searchRoot", searchRoot);
}
else if (source == openInGitweb) {
final EditorPane editorPane = getEditorPane();
new FileFunctions(this).openInGitweb(editorPane.getFile(), editorPane
.getGitDirectory(), editorPane.getCaretLineNumber() + 1);
}
else if (source == increaseFontSize || source == decreaseFontSize) {
getEditorPane().increaseFontSize(
(float) (source == increaseFontSize ? 1.2 : 1 / 1.2));
updateTabAndFontSize(false);
}
else if (source == nextTab) switchTabRelative(1);
else if (source == previousTab) switchTabRelative(-1);
else if (handleTabsMenu(source)) return;
}
protected boolean handleTabsMenu(final Object source) {
if (!(source instanceof JMenuItem)) return false;
final JMenuItem item = (JMenuItem) source;
if (!tabsMenuItems.contains(item)) return false;
for (int i = tabsMenuTabsStart; i < tabsMenu.getItemCount(); i++)
if (tabsMenu.getItem(i) == item) {
switchTo(i - tabsMenuTabsStart);
return true;
}
return false;
}
@Override
public void stateChanged(final ChangeEvent e) {
final int index = tabbed.getSelectedIndex();
if (index < 0) {
setTitle("");
return;
}
final EditorPane editorPane = getEditorPane(index);
editorPane.requestFocus();
checkForOutsideChanges();
toggleWhiteSpaceLabeling.setSelected(editorPane.isWhitespaceVisible());
editorPane.setLanguageByFileName(editorPane.getFileName());
updateLanguageMenu(editorPane.getCurrentLanguage());
setTitle();
}
public EditorPane getEditorPane(final int index) {
return getTab(index).editorPane;
}
public void findOrReplace(final boolean doReplace) {
findDialog.setLocationRelativeTo(this);
// override search pattern only if
// there is sth. selected
final String selection = getTextArea().getSelectedText();
if (selection != null) findDialog.setSearchPattern(selection);
findDialog.show(doReplace);
}
public void gotoLine() {
final String line =
JOptionPane.showInputDialog(this, "Line:", "Goto line...",
JOptionPane.QUESTION_MESSAGE);
if (line == null) return;
try {
gotoLine(Integer.parseInt(line));
}
catch (final BadLocationException e) {
error("Line number out of range: " + line);
}
catch (final NumberFormatException e) {
error("Invalid line number: " + line);
}
}
public void gotoLine(final int line) throws BadLocationException {
getTextArea().setCaretPosition(getTextArea().getLineStartOffset(line - 1));
}
public void toggleBookmark() {
getEditorPane().toggleBookmark();
}
public void listBookmarks() {
final Vector<Bookmark> bookmarks = new Vector<>();
for (int i = 0; i < tabbed.getTabCount(); i++) {
final TextEditorTab tab = (TextEditorTab) tabbed.getComponentAt(i);
tab.editorPane.getBookmarks(tab, bookmarks);
}
final BookmarkDialog dialog = new BookmarkDialog(this, bookmarks);
dialog.setVisible(true);
}
public boolean reload() {
return reload("Reload the file?");
}
public boolean reload(final String message) {
final EditorPane editorPane = getEditorPane();
final File file = editorPane.getFile();
if (file == null || !file.exists()) return true;
final boolean modified = editorPane.fileChanged();
final String[] options = { "Reload", "Do not reload" };
if (modified) options[0] = "Reload (discarding changes)";
switch (JOptionPane.showOptionDialog(this, message, "Reload",
JOptionPane.DEFAULT_OPTION, JOptionPane.WARNING_MESSAGE, null, options,
options[0])) {
case 0:
try {
editorPane.open(file);
return true;
}
catch (final IOException e) {
error("Could not reload " + file.getPath());
}
updateLanguageMenu(editorPane.getCurrentLanguage());
break;
}
return false;
}
public static boolean isBinary(final File file) {
if (file == null) return false;
// heuristic: read the first up to 8000 bytes, and say that it is binary if
// it contains a NUL
try (final FileInputStream in = new FileInputStream(file)) {
int left = 8000;
final byte[] buffer = new byte[left];
while (left > 0) {
final int count = in.read(buffer, 0, left);
if (count < 0) break;
for (int i = 0; i < count; i++)
if (buffer[i] == 0) {
in.close();
return true;
}
left -= count;
}
return false;
}
catch (final IOException e) {
return false;
}
}
/**
* Open a new tab with some content; the languageExtension is like ".java",
* ".py", etc.
*/
public TextEditorTab newTab(final String content, final String language) {
String lang = language;
final TextEditorTab tab = open(null);
if (null != lang && lang.length() > 0) {
lang = lang.trim().toLowerCase();
if ('.' != lang.charAt(0)) {
lang = "." + language;
}
tab.editorPane.setLanguage(scriptService.getLanguageByName(language));
}
if (null != content) {
tab.editorPane.setText(content);
}
return tab;
}
public TextEditorTab open(final File file) {
if (isBinary(file)) {
// TODO!
throw new RuntimeException("TODO: open image using IJ2");
// return null;
}
try {
TextEditorTab tab = (tabbed.getTabCount() == 0) ? null : getTab();
final boolean wasNew = tab != null && tab.editorPane.isNew();
if (!wasNew) {
tab = new TextEditorTab(this);
context.inject(tab.editorPane);
tab.editorPane.loadPreferences();
tab.editorPane.getDocument().addDocumentListener(this);
addDefaultAccelerators(tab.editorPane);
}
synchronized (tab.editorPane) { // tab is never null at this location.
tab.editorPane.open(file);
if (wasNew) {
final int index = tabbed.getSelectedIndex() + tabsMenuTabsStart;
tabsMenu.getItem(index).setText(tab.editorPane.getFileName());
}
else {
tabbed.addTab("", tab);
switchTo(tabbed.getTabCount() - 1);
tabsMenuItems.add(addToMenu(tabsMenu, tab.editorPane.getFileName(),
0, 0));
}
setEditorPaneFileName(tab.editorPane.getFile());
try {
updateTabAndFontSize(true);
}
catch (final NullPointerException e) {
/* ignore */
}
}
if (file != null) openRecent.add(file.getAbsolutePath());
updateLanguageMenu(tab.editorPane.getCurrentLanguage());
return tab;
}
catch (final FileNotFoundException e) {
e.printStackTrace();
error("The file '" + file + "' was not found.");
}
catch (final Exception e) {
e.printStackTrace();
error("There was an error while opening '" + file + "': " + e);
}
return null;
}
public boolean saveAs() {
final EditorPane editorPane = getEditorPane();
File file = editorPane.getFile();
if (file == null) {
final File ijDir =
AppUtils.getBaseDirectory("imagej.dir", TextEditor.class, null);
file = new File(ijDir, editorPane.getFileName());
}
final File fileToSave = uiService.chooseFile(file, FileWidget.SAVE_STYLE);
if (fileToSave == null) return false;
return saveAs(fileToSave.getAbsolutePath(), true);
}
public void saveAs(final String path) {
saveAs(path, true);
}
public boolean saveAs(final String path, final boolean askBeforeReplacing) {
final File file = new File(path);
if (file.exists() &&
askBeforeReplacing &&
JOptionPane.showConfirmDialog(this, "Do you want to replace " + path +
"?", "Replace " + path + "?", JOptionPane.YES_NO_OPTION) != JOptionPane.YES_OPTION) return false;
if (!write(file)) return false;
setEditorPaneFileName(file);
openRecent.add(path);
return true;
}
public boolean save() {
final File file = getEditorPane().getFile();
if (file == null) {
return saveAs();
}
if (!write(file)) {
return false;
}
setTitle();
return true;
}
public boolean write(final File file) {
try {
getEditorPane().write(file);
return true;
}
catch (final IOException e) {
error("Could not save " + file.getName());
e.printStackTrace();
return false;
}
}
public boolean makeJar(final boolean includeSources) {
final File file = getEditorPane().getFile();
if ((file == null || isCompiled()) && !handleUnsavedChanges(true)) {
return false;
}
String name = getEditorPane().getFileName();
final String ext = FileUtils.getExtension(name);
if (!"".equals(ext)) name = name.substring(0, name.length() - ext.length());
if (name.indexOf('_') < 0) name += "_";
name += ".jar";
final File selectedFile = uiService.chooseFile(file, FileWidget.SAVE_STYLE);
if (selectedFile == null) return false;
if (selectedFile.exists() &&
JOptionPane.showConfirmDialog(this, "Do you want to replace " +
selectedFile + "?", "Replace " + selectedFile + "?",
JOptionPane.YES_NO_OPTION) != JOptionPane.YES_OPTION) return false;
try {
makeJar(selectedFile, includeSources);
return true;
}
catch (final IOException e) {
e.printStackTrace();
error("Could not write " + selectedFile + ": " + e.getMessage());
return false;
}
}
/**
* @throws IOException
*/
public void makeJar(final File file, final boolean includeSources)
throws IOException
{
if (!handleUnsavedChanges(true)) return;
final ScriptEngine interpreter = getCurrentLanguage().getScriptEngine();
if (interpreter instanceof JavaEngine) {
final JavaEngine java = (JavaEngine) interpreter;
final JTextAreaWriter errors = new JTextAreaWriter(errorScreen, log);
markCompileStart();
getTab().showErrors();
new Thread() {
@Override
public void run() {
java.makeJar(getEditorPane().getFile(), includeSources, file, errors);
errorScreen.insert("Compilation finished.\n", errorScreen
.getDocument().getLength());
markCompileEnd();
}
}.start();
}
}
static void getClasses(final File directory, final List<String> paths,
final List<String> names)
{
getClasses(directory, paths, names, "");
}
static void getClasses(final File directory, final List<String> paths,
final List<String> names, final String inPrefix)
{
String prefix = inPrefix;
if (!prefix.equals("")) prefix += "/";
for (final File file : directory.listFiles())
if (file.isDirectory()) getClasses(file, paths, names, prefix +
file.getName());
else {
paths.add(file.getAbsolutePath());
names.add(prefix + file.getName());
}
}
static void writeJarEntry(final JarOutputStream out, final String name,
final byte[] buf) throws IOException
{
try {
final JarEntry entry = new JarEntry(name);
out.putNextEntry(entry);
out.write(buf, 0, buf.length);
out.closeEntry();
}
catch (final ZipException e) {
e.printStackTrace();
throw new IOException(e.getMessage());
}
}
static byte[] readFile(final String fileName) throws IOException {
final File file = new File(fileName);
try (final InputStream in = new FileInputStream(file)) {
final byte[] buffer = new byte[(int) file.length()];
in.read(buffer);
return buffer;
}
}
static void deleteRecursively(final File directory) {
for (final File file : directory.listFiles())
if (file.isDirectory()) deleteRecursively(file);
else file.delete();
directory.delete();
}
void setLanguage(final ScriptLanguage language) {
setLanguage(language, false);
}
void setLanguage(final ScriptLanguage language, final boolean addHeader) {
getEditorPane().setLanguage(language, addHeader);
setTitle();
updateLanguageMenu(language);
updateTabAndFontSize(true);
}
void updateLanguageMenu(final ScriptLanguage language) {
JMenuItem item = languageMenuItems.get(language);
if (item == null) item = noneLanguageItem;
if (!item.isSelected()) {
item.setSelected(true);
}
final boolean isRunnable = item != noneLanguageItem;
final boolean isCompileable =
language != null && language.isCompiledLanguage();
runMenu.setVisible(isRunnable);
compileAndRun.setText(isCompileable ? "Compile and Run" : "Run");
compileAndRun.setEnabled(isRunnable);
runSelection.setVisible(isRunnable && !isCompileable);
compile.setVisible(isCompileable);
autoSave.setVisible(isCompileable);
makeJar.setVisible(isCompileable);
makeJarWithSource.setVisible(isCompileable);
final boolean isJava =
language != null && language.getLanguageName().equals("Java");
addImport.setVisible(isJava);
removeUnusedImports.setVisible(isJava);
sortImports.setVisible(isJava);
openSourceForMenuItem.setVisible(isJava);
final boolean isMacro =
language != null && language.getLanguageName().equals("ImageJ Macro");
openMacroFunctions.setVisible(isMacro);
openSourceForClass.setVisible(!isMacro);
openHelp.setVisible(!isMacro && isRunnable);
openHelpWithoutFrames.setVisible(!isMacro && isRunnable);
nextError.setVisible(!isMacro && isRunnable);
previousError.setVisible(!isMacro && isRunnable);
final boolean isInGit = getEditorPane().getGitDirectory() != null;
gitMenu.setVisible(isInGit);
updateTabAndFontSize(false);
}
public void updateTabAndFontSize(final boolean setByLanguage) {
final EditorPane pane = getEditorPane();
if (pane.getCurrentLanguage() == null) return;
if (setByLanguage) {
if (pane.getCurrentLanguage().getLanguageName().equals("Python")) {
pane.setTabSize(4);
}
else {
// set tab size to current preferences.
pane.resetTabSize();
}
}
final int tabSize = pane.getTabSize();
boolean defaultSize = false;
for (int i = 0; i < tabSizeMenu.getItemCount(); i++) {
final JMenuItem item = tabSizeMenu.getItem(i);
if (item == chooseTabSize) {
item.setSelected(!defaultSize);
item.setText("Other" + (defaultSize ? "" : " (" + tabSize + ")") +
"...");
}
else if (tabSize == Integer.parseInt(item.getText())) {
item.setSelected(true);
defaultSize = true;
}
}
final int fontSize = (int) pane.getFontSize();
defaultSize = false;
for (int i = 0; i < fontSizeMenu.getItemCount(); i++) {
final JMenuItem item = fontSizeMenu.getItem(i);
if (item == chooseFontSize) {
item.setSelected(!defaultSize);
item.setText("Other" + (defaultSize ? "" : " (" + fontSize + ")") +
"...");
continue;
}
String label = item.getText();
if (label.endsWith(" pt")) label = label.substring(0, label.length() - 3);
if (fontSize == Integer.parseInt(label)) {
item.setSelected(true);
defaultSize = true;
}
}
wrapLines.setState(pane.getLineWrap());
tabsEmulated.setState(pane.getTabsEmulated());
}
public void setEditorPaneFileName(final String baseName) {
getEditorPane().setFileName(baseName);
}
public void setEditorPaneFileName(final File file) {
final EditorPane editorPane = getEditorPane();
editorPane.setFileName(file);
// update language menu
updateLanguageMenu(editorPane.getCurrentLanguage());
updateGitDirectory();
}
void setTitle() {
final EditorPane editorPane = getEditorPane();
final boolean fileChanged = editorPane.fileChanged();
final String fileName = editorPane.getFileName();
final String title =
(fileChanged ? "*" : "") + fileName +
(executingTasks.isEmpty() ? "" : " (Running)");
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
setTitle(title); // to the main window
// Update all tabs: could have changed
for (int i = 0; i < tabbed.getTabCount(); i++)
tabbed.setTitleAt(i, ((TextEditorTab) tabbed.getComponentAt(i))
.getTitle());
}
});
}
@Override
public synchronized void setTitle(final String title) {
super.setTitle(title);
final int index = tabsMenuTabsStart + tabbed.getSelectedIndex();
if (index < tabsMenu.getItemCount()) {
final JMenuItem item = tabsMenu.getItem(index);
if (item != null) item.setText(title);
}
}
private final ArrayList<Executer> executingTasks = new ArrayList<>();
/**
* Generic Thread that keeps a starting time stamp, sets the priority to
* normal and starts itself.
*/
public abstract class Executer extends ThreadGroup {
JTextAreaWriter output, errors;
Executer(final JTextAreaWriter output, final JTextAreaWriter errors) {
super("Script Editor Run :: " + new Date().toString());
this.output = output;
this.errors = errors;
// Store itself for later
executingTasks.add(this);
setTitle();
// Enable kill menu
kill.setEnabled(true);
// Fork a task, as a part of this ThreadGroup
new Thread(this, getName()) {
{
setPriority(Thread.NORM_PRIORITY);
start();
}
@Override
public void run() {
try {
execute();
// Wait until any children threads die:
int activeCount = getThreadGroup().activeCount();
while (activeCount > 1) {
if (isInterrupted()) break;
try {
Thread.sleep(500);
final List<Thread> ts = getAllThreads();
activeCount = ts.size();
if (activeCount <= 1) break;
log.debug("Waiting for " + ts.size() + " threads to die");
int count_zSelector = 0;
for (final Thread t : ts) {
if (t.getName().equals("zSelector")) {
count_zSelector++;
}
log.debug("THREAD: " + t.getName());
}
if (activeCount == count_zSelector + 1) {
// Do not wait on the stack slice selector thread.
break;
}
}
catch (final InterruptedException ie) {
/* ignore */
}
}
}
catch (final Throwable t) {
handleException(t);
}
finally {
executingTasks.remove(Executer.this);
try {
if (null != output) output.shutdown();
if (null != errors) errors.shutdown();
}
catch (final Exception e) {
handleException(e);
}
// Leave kill menu item enabled if other tasks are running
kill.setEnabled(executingTasks.size() > 0);
setTitle();
}
}
};
}
/** The method to extend, that will do the actual work. */
abstract void execute();
/** Fetch a list of all threads from all thread subgroups, recursively. */
List<Thread> getAllThreads() {
final ArrayList<Thread> threads = new ArrayList<>();
// From all subgroups:
final ThreadGroup[] tgs = new ThreadGroup[activeGroupCount() * 2 + 100];
this.enumerate(tgs, true);
for (final ThreadGroup tg : tgs) {
if (null == tg) continue;
final Thread[] ts = new Thread[tg.activeCount() * 2 + 100];
tg.enumerate(ts);
for (final Thread t : ts) {
if (null == t) continue;
threads.add(t);
}
}
// And from this group:
final Thread[] ts = new Thread[activeCount() * 2 + 100];
this.enumerate(ts);
for (final Thread t : ts) {
if (null == t) continue;
threads.add(t);
}
return threads;
}
/**
* Totally destroy/stop all threads in this and all recursive thread
* subgroups. Will remove itself from the executingTasks list.
*/
@SuppressWarnings("deprecation")
void obliterate() {
try {
// Stop printing to the screen
if (null != output) output.shutdownNow();
if (null != errors) errors.shutdownNow();
}
catch (final Exception e) {
e.printStackTrace();
}
for (final Thread thread : getAllThreads()) {
try {
thread.interrupt();
Thread.yield(); // give it a chance
thread.stop();
}
catch (final Throwable t) {
t.printStackTrace();
}
}
executingTasks.remove(this);
}
@Override
public String toString() {
return getName();
}
}
/** Returns a list of currently executing tasks */
public List<Executer> getExecutingTasks() {
return executingTasks;
}
public void kill(final Executer executer) {
for (int i = 0; i < tabbed.getTabCount(); i++) {
final TextEditorTab tab = (TextEditorTab) tabbed.getComponentAt(i);
if (executer == tab.getExecuter()) {
tab.kill();
break;
}
}
}
/**
* Query the list of running scripts and provide a dialog to choose one and
* kill it.
*/
public void chooseTaskToKill() {
if (executingTasks.size() == 0) {
error("\nNo running scripts\n");
return;
}
commandService.run(KillScript.class, true, "editor", this);
}
/** Run the text in the textArea without compiling it, only if it's not java. */
public void runText() {
runText(false);
}
public void runText(final boolean selectionOnly) {
if (isCompiled()) {
if (selectionOnly) {
error("Cannot run selection of compiled language!");
return;
}
if (handleUnsavedChanges(true)) runScript();
else write("Compiled languages must be saved before they can be run.");
return;
}
final ScriptLanguage currentLanguage = getCurrentLanguage();
if (currentLanguage == null) {
error("Select a language first!");
// TODO guess the language, if possible.
return;
}
markCompileStart();
try {
final TextEditorTab tab = getTab();
tab.showOutput();
execute(selectionOnly);
}
catch (final Throwable t) {
t.printStackTrace();
}
}
/** Invoke in the context of the event dispatch thread. */
private void execute(final boolean selectionOnly) throws IOException {
final TextEditorTab tab = getTab();
tab.prepare();
final JTextAreaWriter output = new JTextAreaWriter(tab.screen, log);
final JTextAreaWriter errors = new JTextAreaWriter(errorScreen, log);
final File file = getEditorPane().getFile();
// Pipe current text into the runScript:
final PipedInputStream pi = new PipedInputStream();
final PipedOutputStream po = new PipedOutputStream(pi);
// The Executer creates a Thread that
// does the reading from PipedInputStream
tab.setExecutor(new Executer(output, errors) {
@Override
public void execute() {
try {
evalScript(file == null ? getEditorPane().getFileName() : file
.getAbsolutePath(), new InputStreamReader(pi), output, errors);
output.flush();
errors.flush();
markCompileEnd();
}
catch (final Throwable t) {
output.flush();
errors.flush();
if (t instanceof ScriptException && t.getCause() != null &&
t.getCause().getClass().getName().endsWith("CompileError"))
{
errorScreen.append("Compilation failed");
tab.showErrors();
}
else {
handleException(t);
}
}
finally {
tab.restore();
}
}
});
// Write into PipedOutputStream
// from another Thread
try {
final String text;
if (selectionOnly) {
final String selected = tab.getEditorPane().getSelectedText();
if (selected == null) {
error("Selection required!");
text = null;
}
else text = selected + "\n"; // Ensure code blocks are terminated
}
else {
text = tab.getEditorPane().getText();
}
new Thread() {
{
setPriority(Thread.NORM_PRIORITY);
}
@Override
public void run() {
try (final PrintWriter pw = new PrintWriter(po)) {
pw.write(text);
pw.flush(); // will lock and wait in some cases
}
}
}.start();
}
catch (final Throwable t) {
t.printStackTrace();
}
finally {
// Re-enable when all text to send has been sent
tab.getEditorPane().setEditable(true);
}
}
public void runScript() {
if (isCompiled()) getTab().showErrors();
else getTab().showOutput();
markCompileStart();
final JTextAreaWriter output = new JTextAreaWriter(getTab().screen, log);
final JTextAreaWriter errors = new JTextAreaWriter(errorScreen, log);
final File file = getEditorPane().getFile();
new TextEditor.Executer(output, errors) {
@Override
public void execute() {
try (final Reader reader = evalScript(getEditorPane().getFile()
.getPath(), new FileReader(file), output, errors))
{
output.flush();
errors.flush();
markCompileEnd();
}
catch (final Throwable e) {
handleException(e);
}
}
};
}
public void compile() {
if (!handleUnsavedChanges(true)) return;
final ScriptEngine interpreter = getCurrentLanguage().getScriptEngine();
if (interpreter instanceof JavaEngine) {
final JavaEngine java = (JavaEngine) interpreter;
final JTextAreaWriter errors = new JTextAreaWriter(errorScreen, log);
markCompileStart();
getTab().showErrors();
new Thread() {
@Override
public void run() {
java.compile(getEditorPane().getFile(), errors);
errorScreen.insert("Compilation finished.\n", errorScreen
.getDocument().getLength());
markCompileEnd();
}
}.start();
}
}
public String getSelectedTextOrAsk(final String label) {
String selection = getTextArea().getSelectedText();
if (selection == null || selection.indexOf('\n') >= 0) {
selection =
JOptionPane.showInputDialog(this, label + ":", label + "...",
JOptionPane.QUESTION_MESSAGE);
if (selection == null) return null;
}
return selection;
}
public String getSelectedClassNameOrAsk() {
String className = getSelectedTextOrAsk("Class name");
if (className != null) className = className.trim();
return className;
}
private static void append(final JTextArea textArea, final String text) {
final int length = textArea.getDocument().getLength();
textArea.insert(text, length);
textArea.setCaretPosition(length);
}
public void markCompileStart() {
errorHandler = null;
final String started =
"Started " + getEditorPane().getFileName() + " at " + new Date() + "\n";
final int offset = errorScreen.getDocument().getLength();
append(errorScreen, started);
append(getTab().screen, started);
compileStartOffset = errorScreen.getDocument().getLength();
try {
compileStartPosition = errorScreen.getDocument().createPosition(offset);
}
catch (final BadLocationException e) {
handleException(e);
}
ExceptionHandler.addThread(Thread.currentThread(), this);
}
public void markCompileEnd() {
if (errorHandler == null) {
errorHandler =
new ErrorHandler(getCurrentLanguage(), errorScreen,
compileStartPosition.getOffset());
if (errorHandler.getErrorCount() > 0) getTab().showErrors();
}
if (compileStartOffset != errorScreen.getDocument().getLength()) getTab()
.showErrors();
if (getTab().showingErrors) {
errorHandler.scrollToVisible(compileStartOffset);
}
}
public boolean nextError(final boolean forward) {
if (errorHandler != null && errorHandler.nextError(forward)) try {
File file = new File(errorHandler.getPath());
if (!file.isAbsolute()) file = getFileForBasename(file.getName());
errorHandler.markLine();
switchTo(file, errorHandler.getLine());
getTab().showErrors();
errorScreen.invalidate();
return true;
}
catch (final Exception e) {
handleException(e);
}
return false;
}
public void switchTo(final String path, final int lineNumber)
throws IOException
{
switchTo(new File(path).getCanonicalFile(), lineNumber);
}
public void switchTo(final File file, final int lineNumber) {
if (!editorPaneContainsFile(getEditorPane(), file)) switchTo(file);
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
try {
gotoLine(lineNumber);
}
catch (final BadLocationException e) {
// ignore
}
}
});
}
public void switchTo(final File file) {
for (int i = 0; i < tabbed.getTabCount(); i++)
if (editorPaneContainsFile(getEditorPane(i), file)) {
switchTo(i);
return;
}
open(file);
}
public void switchTo(final int index) {
if (index == tabbed.getSelectedIndex()) return;
tabbed.setSelectedIndex(index);
}
private void switchTabRelative(final int delta) {
final int count = tabbed.getTabCount();
int index = ((tabbed.getSelectedIndex() + delta) % count);
if (index < 0) {
index += count;
}
switchTo(index);
}
private void removeTab(final int index) {
final int menuItemIndex = index + tabsMenuTabsStart;
tabbed.remove(index);
tabsMenuItems.remove(tabsMenu.getItem(menuItemIndex));
tabsMenu.remove(menuItemIndex);
}
boolean editorPaneContainsFile(final EditorPane editorPane, final File file) {
try {
return file != null && editorPane != null &&
editorPane.getFile() != null &&
file.getCanonicalFile().equals(editorPane.getFile().getCanonicalFile());
}
catch (final IOException e) {
return false;
}
}
public File getFile() {
return getEditorPane().getFile();
}
public File getFileForBasename(final String baseName) {
File file = getFile();
if (file != null && file.getName().equals(baseName)) return file;
for (int i = 0; i < tabbed.getTabCount(); i++) {
file = getEditorPane(i).getFile();
if (file != null && file.getName().equals(baseName)) return file;
}
return null;
}
/** Updates the git directory to the git directory of the current file. */
private void updateGitDirectory() {
final EditorPane editorPane = getEditorPane();
editorPane.setGitDirectory(new FileFunctions(this)
.getGitDirectory(editorPane.getFile()));
}
public void addImport(final String className) {
if (className != null) {
new TokenFunctions(getTextArea()).addImport(className.trim());
}
}
public void openHelp(final String className) {
openHelp(className, true);
}
/**
* @param className
* @param withFrames
*/
public void openHelp(final String className, final boolean withFrames) {
if (className == null) {
// FIXME: This cannot be right.
getSelectedClassNameOrAsk();
}
}
public void extractSourceJar() {
final File file = openWithDialog(null);
if (file != null) extractSourceJar(file);
}
public void extractSourceJar(final File file) {
try {
final FileFunctions functions = new FileFunctions(this);
final File workspace =
uiService.chooseFile(new File(System.getProperty("user.home")),
FileWidget.DIRECTORY_STYLE);
if (workspace == null) return;
final List<String> paths =
functions.extractSourceJar(file.getAbsolutePath(), workspace);
for (final String path : paths)
if (!functions.isBinaryFile(path)) {
open(new File(path));
final EditorPane pane = getEditorPane();
new TokenFunctions(pane).removeTrailingWhitespace();
if (pane.fileChanged()) save();
}
}
catch (final IOException e) {
error("There was a problem opening " + file + ": " + e.getMessage());
}
}
/* extensionMustMatch == false means extension must not match */
private File openWithDialog(final File defaultDir) {
return uiService.chooseFile(defaultDir, FileWidget.OPEN_STYLE);
}
/**
* Write a message to the output screen
*
* @param message The text to write
*/
public void write(String message) {
final TextEditorTab tab = getTab();
if (!message.endsWith("\n")) message += "\n";
tab.screen.insert(message, tab.screen.getDocument().getLength());
}
public void writeError(String message) {
getTab().showErrors();
if (!message.endsWith("\n")) message += "\n";
errorScreen.insert(message, errorScreen.getDocument().getLength());
}
private void error(final String message) {
JOptionPane.showMessageDialog(this, message);
}
public void handleException(final Throwable e) {
handleException(e, errorScreen);
getTab().showErrors();
}
public static void
handleException(final Throwable e, final JTextArea textArea)
{
final CharArrayWriter writer = new CharArrayWriter();
try (final PrintWriter out = new PrintWriter(writer)) {
e.printStackTrace(out);
for (Throwable cause = e.getCause(); cause != null; cause =
cause.getCause())
{
out.write("Caused by: ");
cause.printStackTrace(out);
}
}
textArea.append(writer.toString());
}
/**
* Removes invalid characters, shows a dialog.
*
* @return The amount of invalid characters found.
*/
public int zapGremlins() {
final int count = getEditorPane().zapGremlins();
final String msg =
count > 0 ? "Zap Gremlins converted " + count +
" invalid characters to spaces" : "No invalid characters found!";
JOptionPane.showMessageDialog(this, msg);
return count;
}
// -- Helper methods --
private boolean isCompiled() {
final ScriptLanguage language = getCurrentLanguage();
if (language == null) return false;
return language.isCompiledLanguage();
}
private Reader evalScript(final String filename, Reader reader,
final Writer output, final Writer errors) throws ModuleException
{
final ScriptLanguage language = getCurrentLanguage();
if (respectAutoImports) {
reader =
DefaultAutoImporters.prefixAutoImports(context, language, reader,
errors);
}
// create script module for execution
final ScriptInfo info = new ScriptInfo(context, filename, reader);
final ScriptModule module = info.createModule();
context.inject(module);
// use the currently selected language to execute the script
module.setLanguage(language);
// map stdout and stderr to the UI
module.setOutputWriter(output);
module.setErrorWriter(errors);
// execute the script
try {
moduleService.run(module, true).get();
}
catch (final InterruptedException e) {
error("Interrupted");
}
catch (final ExecutionException e) {
log.error(e);
}
return reader;
}
private String adjustPath(final String path) {
return path.replace('_', ' ');
}
@Override
public boolean confirmClose() {
while (tabbed.getTabCount() > 0) {
if (!handleUnsavedChanges()) return false;
final int index = tabbed.getSelectedIndex();
removeTab(index);
}
return true;
}
@Override
public void insertUpdate(final DocumentEvent e) {
setTitle();
checkForOutsideChanges();
}
@Override
public void removeUpdate(final DocumentEvent e) {
setTitle();
checkForOutsideChanges();
}
@Override
public void changedUpdate(final DocumentEvent e) {
setTitle();
}
}
|
package net.kwaz.chicago.pig;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import com.beust.jcommander.JCommander;
import com.beust.jcommander.Parameter;
import java.io.IOException;
public class GeneratePigInput extends Configured implements Tool {
@Parameter(names="-i", description="Input Path (Sequence File)", required=true)
private String inputPathArg = "";
@Parameter(names="-o", description="Output Path", required=true)
private String outputOutputPathArg = "";
@Parameter(names="-h", help=true)
private boolean help = false;
public static void main(String[] args) throws Exception {
System.exit(
ToolRunner.run(
new GeneratePigInput(),
args
)
);
}
@Override
public int run(String[] arg0) throws Exception {
JCommander commander = new JCommander(this, arg0);
if (help) {
commander.usage();
return 0;
}
Path inputPath = new Path(inputPathArg);
Path outputPath = new Path(outputOutputPathArg);
Job job = Job.getInstance(getConf(), "Generate Pig Input - " + inputPath + " -> " + outputPath);
job.setJarByClass(getClass());
job.getConfiguration().set("mapreduce.job.queuename", "kwaz-queue");
job.setInputFormatClass(SequenceFileInputFormat.class);
SequenceFileInputFormat.setInputPaths(job, inputPath);
job.setMapperClass(GeneratePigInputMapper.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(NullWritable.class);
job.setNumReduceTasks(0);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(NullWritable.class);
job.setOutputFormatClass(TextOutputFormat.class);
TextOutputFormat.setOutputPath(job, outputPath);
addDependenciesToClasspath(new Path("dependencies"), job);
return job.waitForCompletion(true) ? 0 : 1;
}
private void addDependenciesToClasspath(Path directory, Job job) throws IOException {
FileStatus[] files;
try (FileSystem fs = FileSystem.get(job.getConfiguration())) {
|
package net.spy.memcached;
import java.io.IOException;
import java.net.ConnectException;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.net.SocketException;
import java.nio.ByteBuffer;
import java.nio.channels.CancelledKeyException;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.ClosedSelectorException;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.SocketChannel;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CountDownLatch;
import net.spy.memcached.compat.SpyThread;
import net.spy.memcached.compat.log.LoggerFactory;
import net.spy.memcached.ops.KeyedOperation;
import net.spy.memcached.ops.Operation;
import net.spy.memcached.ops.OperationException;
import net.spy.memcached.ops.OperationState;
import net.spy.memcached.ops.TapOperation;
import net.spy.memcached.ops.VBucketAware;
import net.spy.memcached.protocol.binary.TapAckOperationImpl;
import net.spy.memcached.util.StringUtils;
/**
* Connection to a cluster of memcached servers.
*/
public class MemcachedConnection extends SpyThread {
// The number of empty selects we'll allow before assuming we may have
// missed one and should check the current selectors. This generally
// indicates a bug, but we'll check it nonetheless.
private static final int DOUBLE_CHECK_EMPTY = 256;
// The number of empty selects we'll allow before blowing up. It's too
// easy to write a bug that causes it to loop uncontrollably. This helps
// find those bugs and often works around them.
private static final int EXCESSIVE_EMPTY = 0x1000000;
protected volatile boolean shutDown = false;
// If true, optimization will collapse multiple sequential get ops
private final boolean shouldOptimize;
protected Selector selector = null;
protected final NodeLocator locator;
protected final FailureMode failureMode;
// maximum amount of time to wait between reconnect attempts
private final long maxDelay;
private int emptySelects = 0;
private final int bufSize;
private final ConnectionFactory connectionFactory;
// AddedQueue is used to track the QueueAttachments for which operations
// have recently been queued.
protected final ConcurrentLinkedQueue<MemcachedNode> addedQueue;
// reconnectQueue contains the attachments that need to be reconnected
// The key is the time at which they are eligible for reconnect
private final SortedMap<Long, MemcachedNode> reconnectQueue;
protected volatile boolean running = true;
private final Collection<ConnectionObserver> connObservers =
new ConcurrentLinkedQueue<ConnectionObserver>();
private final OperationFactory opFact;
private final int timeoutExceptionThreshold;
private final Collection<Operation> retryOps;
protected final ConcurrentLinkedQueue<MemcachedNode> nodesToShutdown;
/**
* Construct a memcached connection.
*
* @param bufSize the size of the buffer used for reading from the server
* @param f the factory that will provide an operation queue
* @param a the addresses of the servers to connect to
*
* @throws IOException if a connection attempt fails early
*/
public MemcachedConnection(int bufSize, ConnectionFactory f,
List<InetSocketAddress> a, Collection<ConnectionObserver> obs,
FailureMode fm, OperationFactory opfactory) throws IOException {
connObservers.addAll(obs);
reconnectQueue = new TreeMap<Long, MemcachedNode>();
addedQueue = new ConcurrentLinkedQueue<MemcachedNode>();
failureMode = fm;
shouldOptimize = f.shouldOptimize();
maxDelay = f.getMaxReconnectDelay();
opFact = opfactory;
timeoutExceptionThreshold = f.getTimeoutExceptionThreshold();
selector = Selector.open();
retryOps = new ArrayList<Operation>();
nodesToShutdown = new ConcurrentLinkedQueue<MemcachedNode>();
this.bufSize = bufSize;
this.connectionFactory = f;
List<MemcachedNode> connections = createConnections(a);
locator = f.createLocator(connections);
setName("Memcached IO over " + this);
setDaemon(f.isDaemon());
start();
}
protected List<MemcachedNode> createConnections(
final Collection<InetSocketAddress> a) throws IOException {
List<MemcachedNode> connections = new ArrayList<MemcachedNode>(a.size());
for (SocketAddress sa : a) {
SocketChannel ch = SocketChannel.open();
ch.configureBlocking(false);
MemcachedNode qa =
this.connectionFactory.createMemcachedNode(sa, ch, bufSize);
int ops = 0;
ch.socket().setTcpNoDelay(!this.connectionFactory.useNagleAlgorithm());
// Initially I had attempted to skirt this by queueing every
// connect, but it considerably slowed down start time.
try {
if (ch.connect(sa)) {
getLogger().info("Connected to %s immediately", qa);
connected(qa);
} else {
getLogger().info("Added %s to connect queue", qa);
ops = SelectionKey.OP_CONNECT;
}
qa.setSk(ch.register(selector, ops, qa));
assert ch.isConnected()
|| qa.getSk().interestOps() == SelectionKey.OP_CONNECT
: "Not connected, and not wanting to connect";
} catch (SocketException e) {
getLogger().warn("Socket error on initial connect", e);
queueReconnect(qa);
}
connections.add(qa);
}
return connections;
}
private boolean selectorsMakeSense() {
for (MemcachedNode qa : locator.getAll()) {
if (qa.getSk() != null && qa.getSk().isValid()) {
if (qa.getChannel().isConnected()) {
int sops = qa.getSk().interestOps();
int expected = 0;
if (qa.hasReadOp()) {
expected |= SelectionKey.OP_READ;
}
if (qa.hasWriteOp()) {
expected |= SelectionKey.OP_WRITE;
}
if (qa.getBytesRemainingToWrite() > 0) {
expected |= SelectionKey.OP_WRITE;
}
assert sops == expected : "Invalid ops: " + qa + ", expected "
+ expected + ", got " + sops;
} else {
int sops = qa.getSk().interestOps();
assert sops == SelectionKey.OP_CONNECT
: "Not connected, and not watching for connect: " + sops;
}
}
}
getLogger().debug("Checked the selectors.");
return true;
}
/**
* MemcachedClient calls this method to handle IO over the connections.
*/
public void handleIO() throws IOException {
if (shutDown) {
throw new IOException("No IO while shut down");
}
// Deal with all of the stuff that's been added, but may not be marked
// writable.
handleInputQueue();
getLogger().debug("Done dealing with queue.");
long delay = 0;
if (!reconnectQueue.isEmpty()) {
long now = System.currentTimeMillis();
long then = reconnectQueue.firstKey();
delay = Math.max(then - now, 1);
}
getLogger().debug("Selecting with delay of %sms", delay);
assert selectorsMakeSense() : "Selectors don't make sense.";
int selected = selector.select(delay);
Set<SelectionKey> selectedKeys = selector.selectedKeys();
if (selectedKeys.isEmpty() && !shutDown) {
getLogger().debug("No selectors ready, interrupted: "
+ Thread.interrupted());
if (++emptySelects > DOUBLE_CHECK_EMPTY) {
for (SelectionKey sk : selector.keys()) {
getLogger().debug("%s has %s, interested in %s", sk, sk.readyOps(),
sk.interestOps());
if (sk.readyOps() != 0) {
getLogger().debug("%s has a ready op, handling IO", sk);
handleIO(sk);
} else {
lostConnection((MemcachedNode) sk.attachment());
}
}
assert emptySelects < EXCESSIVE_EMPTY : "Too many empty selects";
}
} else {
getLogger().debug("Selected %d, selected %d keys", selected,
selectedKeys.size());
emptySelects = 0;
for (SelectionKey sk : selectedKeys) {
handleIO(sk);
}
selectedKeys.clear();
}
// see if any connections blew up with large number of timeouts
for (SelectionKey sk : selector.keys()) {
MemcachedNode mn = (MemcachedNode) sk.attachment();
if (mn.getContinuousTimeout() > timeoutExceptionThreshold) {
getLogger().warn("%s exceeded continuous timeout threshold", sk);
lostConnection(mn);
}
}
if (!shutDown && !reconnectQueue.isEmpty()) {
attemptReconnects();
}
// rehash any operations that are in retry state
redistributeOperations(retryOps);
retryOps.clear();
// try to shutdown odd nodes
for (MemcachedNode qa : nodesToShutdown) {
if (!addedQueue.contains(qa)) {
nodesToShutdown.remove(qa);
Collection<Operation> notCompletedOperations = qa.destroyInputQueue();
if (qa.getChannel() != null) {
qa.getChannel().close();
qa.setSk(null);
if (qa.getBytesRemainingToWrite() > 0) {
getLogger().warn("Shut down with %d bytes remaining to write",
qa.getBytesRemainingToWrite());
}
getLogger().debug("Shut down channel %s", qa.getChannel());
}
redistributeOperations(notCompletedOperations);
}
}
}
// Handle any requests that have been made against the client.
private void handleInputQueue() {
if (!addedQueue.isEmpty()) {
getLogger().debug("Handling queue");
// If there's stuff in the added queue. Try to process it.
Collection<MemcachedNode> toAdd = new HashSet<MemcachedNode>();
// Transfer the queue into a hashset. There are very likely more
// additions than there are nodes.
Collection<MemcachedNode> todo = new HashSet<MemcachedNode>();
MemcachedNode qaNode = null;
while ((qaNode = addedQueue.poll()) != null) {
todo.add(qaNode);
}
// Now process the queue.
for (MemcachedNode qa : todo) {
boolean readyForIO = false;
if (qa.isActive()) {
if (qa.getCurrentWriteOp() != null) {
readyForIO = true;
getLogger().debug("Handling queued write %s", qa);
}
} else {
toAdd.add(qa);
}
qa.copyInputQueue();
if (readyForIO) {
try {
if (qa.getWbuf().hasRemaining()) {
handleWrites(qa.getSk(), qa);
}
} catch (IOException e) {
getLogger().warn("Exception handling write", e);
lostConnection(qa);
}
}
qa.fixupOps();
}
addedQueue.addAll(toAdd);
}
}
/**
* Add a connection observer.
*
* @return whether the observer was successfully added
*/
public boolean addObserver(ConnectionObserver obs) {
return connObservers.add(obs);
}
/**
* Remove a connection observer.
*
* @return true if the observer existed and now doesn't
*/
public boolean removeObserver(ConnectionObserver obs) {
return connObservers.remove(obs);
}
private void connected(MemcachedNode qa) {
assert qa.getChannel().isConnected() : "Not connected.";
int rt = qa.getReconnectCount();
qa.connected();
for (ConnectionObserver observer : connObservers) {
observer.connectionEstablished(qa.getSocketAddress(), rt);
}
}
private void lostConnection(MemcachedNode qa) {
queueReconnect(qa);
for (ConnectionObserver observer : connObservers) {
observer.connectionLost(qa.getSocketAddress());
}
}
// Handle IO for a specific selector. Any IOException will cause a
// reconnect
private void handleIO(SelectionKey sk) {
MemcachedNode qa = (MemcachedNode) sk.attachment();
try {
getLogger().debug("Handling IO for: %s (r=%s, w=%s, c=%s, op=%s)", sk,
sk.isReadable(), sk.isWritable(), sk.isConnectable(),
sk.attachment());
if (sk.isConnectable()) {
getLogger().info("Connection state changed for %s", sk);
final SocketChannel channel = qa.getChannel();
if (channel.finishConnect()) {
connected(qa);
addedQueue.offer(qa);
if (qa.getWbuf().hasRemaining()) {
handleWrites(sk, qa);
}
} else {
assert !channel.isConnected() : "connected";
}
} else {
if (sk.isValid() && sk.isReadable()) {
handleReads(sk, qa);
}
if (sk.isValid() && sk.isWritable()) {
handleWrites(sk, qa);
}
}
} catch (ClosedChannelException e) {
// Note, not all channel closes end up here
if (!shutDown) {
getLogger().info("Closed channel and not shutting down. Queueing"
+ " reconnect on %s", qa, e);
lostConnection(qa);
}
} catch (ConnectException e) {
// Failures to establish a connection should attempt a reconnect
// without signaling the observers.
getLogger().info("Reconnecting due to failure to connect to %s", qa, e);
queueReconnect(qa);
} catch (OperationException e) {
qa.setupForAuth(); // noop if !shouldAuth
getLogger().info("Reconnection due to exception handling a memcached "
+ "operation on %s. This may be due to an authentication failure.",
qa, e);
lostConnection(qa);
} catch (Exception e) {
// Any particular error processing an item should simply
// cause us to reconnect to the server.
// One cause is just network oddness or servers
// restarting, which lead here with IOException
qa.setupForAuth(); // noop if !shouldAuth
getLogger().info("Reconnecting due to exception on %s", qa, e);
lostConnection(qa);
}
qa.fixupOps();
}
private void handleWrites(SelectionKey sk, MemcachedNode qa)
throws IOException {
qa.fillWriteBuffer(shouldOptimize);
boolean canWriteMore = qa.getBytesRemainingToWrite() > 0;
while (canWriteMore) {
int wrote = qa.writeSome();
qa.fillWriteBuffer(shouldOptimize);
canWriteMore = wrote > 0 && qa.getBytesRemainingToWrite() > 0;
}
}
private void handleReads(SelectionKey sk, MemcachedNode qa)
throws IOException {
Operation currentOp = qa.getCurrentReadOp();
// If it's a tap ack there is no response
if (currentOp instanceof TapAckOperationImpl) {
qa.removeCurrentReadOp();
return;
}
ByteBuffer rbuf = qa.getRbuf();
final SocketChannel channel = qa.getChannel();
int read = channel.read(rbuf);
if (read < 0) {
if (currentOp instanceof TapOperation) {
// If were doing tap then we won't throw an exception
currentOp.getCallback().complete();
((TapOperation) currentOp).streamClosed(OperationState.COMPLETE);
getLogger().debug("Completed read op: %s and giving the next %d bytes",
currentOp, rbuf.remaining());
Operation op = qa.removeCurrentReadOp();
assert op == currentOp : "Expected to pop " + currentOp + " got " + op;
currentOp = qa.getCurrentReadOp();
} else {
// our model is to keep the connection alive for future ops
// so we'll queue a reconnect if disconnected via an IOException
throw new IOException("Disconnected unexpected, will reconnect.");
}
}
while (read > 0) {
getLogger().debug("Read %d bytes", read);
rbuf.flip();
while (rbuf.remaining() > 0) {
if (currentOp == null) {
throw new IllegalStateException("No read operation.");
}
synchronized(currentOp) {
currentOp.readFromBuffer(rbuf);
if (currentOp.getState() == OperationState.COMPLETE) {
getLogger().debug("Completed read op: %s and giving the next %d "
+ "bytes", currentOp, rbuf.remaining());
Operation op = qa.removeCurrentReadOp();
assert op == currentOp : "Expected to pop " + currentOp + " got "
+ op;
} else if (currentOp.getState() == OperationState.RETRY) {
getLogger().debug("Reschedule read op due to NOT_MY_VBUCKET error: "
+ "%s ", currentOp);
((VBucketAware) currentOp).addNotMyVbucketNode(
currentOp.getHandlingNode());
Operation op = qa.removeCurrentReadOp();
assert op == currentOp : "Expected to pop " + currentOp + " got "
+ op;
retryOps.add(currentOp);
}
}
currentOp=qa.getCurrentReadOp();
}
rbuf.clear();
read = channel.read(rbuf);
}
}
// Make a debug string out of the given buffer's values
static String dbgBuffer(ByteBuffer b, int size) {
StringBuilder sb = new StringBuilder();
byte[] bytes = b.array();
for (int i = 0; i < size; i++) {
char ch = (char) bytes[i];
if (Character.isWhitespace(ch) || Character.isLetterOrDigit(ch)) {
sb.append(ch);
} else {
sb.append("\\x");
sb.append(Integer.toHexString(bytes[i] & 0xff));
}
}
return sb.toString();
}
protected void queueReconnect(MemcachedNode qa) {
if (!shutDown) {
getLogger().warn("Closing, and reopening %s, attempt %d.", qa,
qa.getReconnectCount());
if (qa.getSk() != null) {
qa.getSk().cancel();
assert !qa.getSk().isValid() : "Cancelled selection key is valid";
}
qa.reconnecting();
try {
if (qa.getChannel() != null && qa.getChannel().socket() != null) {
qa.getChannel().socket().close();
} else {
getLogger().info("The channel or socket was null for %s", qa);
}
} catch (IOException e) {
getLogger().warn("IOException trying to close a socket", e);
}
qa.setChannel(null);
long delay = (long) Math.min(maxDelay, Math.pow(2,
qa.getReconnectCount())) * 1000;
long reconTime = System.currentTimeMillis() + delay;
// Avoid potential condition where two connections are scheduled
// for reconnect at the exact same time. This is expected to be
// a rare situation.
while (reconnectQueue.containsKey(reconTime)) {
reconTime++;
}
reconnectQueue.put(reconTime, qa);
// Need to do a little queue management.
qa.setupResend();
if (failureMode == FailureMode.Redistribute) {
redistributeOperations(qa.destroyInputQueue());
} else if (failureMode == FailureMode.Cancel) {
cancelOperations(qa.destroyInputQueue());
}
}
}
private void cancelOperations(Collection<Operation> ops) {
for (Operation op : ops) {
op.cancel();
}
}
private void redistributeOperations(Collection<Operation> ops) {
for (Operation op : ops) {
if (op.isCancelled() || op.isTimedOut()) {
continue;
}
if (op instanceof KeyedOperation) {
KeyedOperation ko = (KeyedOperation) op;
int added = 0;
for (String k : ko.getKeys()) {
for (Operation newop : opFact.clone(ko)) {
addOperation(k, newop);
added++;
}
}
assert added > 0 : "Didn't add any new operations when redistributing";
} else {
// Cancel things that don't have definite targets.
op.cancel();
}
}
}
private void attemptReconnects() throws IOException {
final long now = System.currentTimeMillis();
final Map<MemcachedNode, Boolean> seen =
new IdentityHashMap<MemcachedNode, Boolean>();
final List<MemcachedNode> rereQueue = new ArrayList<MemcachedNode>();
SocketChannel ch = null;
for (Iterator<MemcachedNode> i =
reconnectQueue.headMap(now).values().iterator(); i.hasNext();) {
final MemcachedNode qa = i.next();
i.remove();
try {
if (!seen.containsKey(qa)) {
seen.put(qa, Boolean.TRUE);
getLogger().info("Reconnecting %s", qa);
ch = SocketChannel.open();
ch.configureBlocking(false);
int ops = 0;
if (ch.connect(qa.getSocketAddress())) {
connected(qa);
addedQueue.offer(qa);
getLogger().info("Immediately reconnected to %s", qa);
assert ch.isConnected();
} else {
ops = SelectionKey.OP_CONNECT;
}
qa.registerChannel(ch, ch.register(selector, ops, qa));
assert qa.getChannel() == ch : "Channel was lost.";
} else {
getLogger().debug("Skipping duplicate reconnect request for %s", qa);
}
} catch (SocketException e) {
getLogger().warn("Error on reconnect", e);
rereQueue.add(qa);
} catch (Exception e) {
getLogger().error("Exception on reconnect, lost node %s", qa, e);
} finally {
// it's possible that above code will leak file descriptors under
// abnormal
// conditions (when ch.open() fails and throws IOException.
// always close non connected channel
if (ch != null && !ch.isConnected() && !ch.isConnectionPending()) {
try {
ch.close();
} catch (IOException x) {
getLogger().error("Exception closing channel: %s", qa, x);
}
}
}
}
// Requeue any fast-failed connects.
for (MemcachedNode n : rereQueue) {
queueReconnect(n);
}
}
/**
* Get the node locator used by this connection.
*/
public NodeLocator getLocator() {
return locator;
}
public void enqueueOperation(String key, Operation o) {
StringUtils.validateKey(key);
checkState();
addOperation(key, o);
}
/**
* Add an operation to the given connection.
*
* @param key the key the operation is operating upon
* @param o the operation
*/
protected void addOperation(final String key, final Operation o) {
MemcachedNode placeIn = null;
MemcachedNode primary = locator.getPrimary(key);
if (primary.isActive() || failureMode == FailureMode.Retry) {
placeIn = primary;
} else if (failureMode == FailureMode.Cancel) {
o.cancel();
} else {
// Look for another node in sequence that is ready.
for (Iterator<MemcachedNode> i = locator.getSequence(key); placeIn == null
&& i.hasNext();) {
MemcachedNode n = i.next();
if (n.isActive()) {
placeIn = n;
}
}
// If we didn't find an active node, queue it in the primary node
// and wait for it to come back online.
if (placeIn == null) {
placeIn = primary;
this.getLogger().warn(
"Could not redistribute "
+ "to another node, retrying primary node for %s.", key);
}
}
assert o.isCancelled() || placeIn != null : "No node found for key " + key;
if (placeIn != null) {
addOperation(placeIn, o);
} else {
assert o.isCancelled() : "No node found for " + key
+ " (and not immediately cancelled)";
}
}
public void insertOperation(final MemcachedNode node, final Operation o) {
o.setHandlingNode(node);
o.initialize();
node.insertOp(o);
addedQueue.offer(node);
Selector s = selector.wakeup();
assert s == selector : "Wakeup returned the wrong selector.";
getLogger().debug("Added %s to %s", o, node);
}
protected void addOperation(final MemcachedNode node, final Operation o) {
o.setHandlingNode(node);
o.initialize();
node.addOp(o);
addedQueue.offer(node);
Selector s = selector.wakeup();
assert s == selector : "Wakeup returned the wrong selector.";
getLogger().debug("Added %s to %s", o, node);
}
public void addOperations(final Map<MemcachedNode, Operation> ops) {
for (Map.Entry<MemcachedNode, Operation> me : ops.entrySet()) {
final MemcachedNode node = me.getKey();
Operation o = me.getValue();
o.setHandlingNode(node);
o.initialize();
node.addOp(o);
addedQueue.offer(node);
}
Selector s = selector.wakeup();
assert s == selector : "Wakeup returned the wrong selector.";
}
/**
* Broadcast an operation to all nodes.
*/
public CountDownLatch broadcastOperation(BroadcastOpFactory of) {
return broadcastOperation(of, locator.getAll());
}
/**
* Broadcast an operation to a specific collection of nodes.
*/
public CountDownLatch broadcastOperation(final BroadcastOpFactory of,
Collection<MemcachedNode> nodes) {
final CountDownLatch latch = new CountDownLatch(nodes.size());
for (MemcachedNode node : nodes) {
getLogger().debug("broadcast Operation: node = " + node);
Operation op = of.newOp(node, latch);
op.initialize();
node.addOp(op);
op.setHandlingNode(node);
addedQueue.offer(node);
}
Selector s = selector.wakeup();
assert s == selector : "Wakeup returned the wrong selector.";
return latch;
}
/**
* Shut down all of the connections.
*/
public void shutdown() throws IOException {
shutDown = true;
Selector s = selector.wakeup();
assert s == selector : "Wakeup returned the wrong selector.";
for (MemcachedNode qa : locator.getAll()) {
if (qa.getChannel() != null) {
qa.getChannel().close();
qa.setSk(null);
if (qa.getBytesRemainingToWrite() > 0) {
getLogger().warn("Shut down with %d bytes remaining to write",
qa.getBytesRemainingToWrite());
}
getLogger().debug("Shut down channel %s", qa.getChannel());
}
}
running = false;
selector.close();
getLogger().debug("Shut down selector %s", selector);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{MemcachedConnection to");
for (MemcachedNode qa : locator.getAll()) {
sb.append(" ");
sb.append(qa.getSocketAddress());
}
sb.append("}");
return sb.toString();
}
/**
* helper method: increase timeout count on node attached to this op.
*
* @param op
*/
public static void opTimedOut(Operation op) {
MemcachedConnection.setTimeout(op, true);
}
/**
* helper method: reset timeout counter.
*
* @param op
*/
public static void opSucceeded(Operation op) {
MemcachedConnection.setTimeout(op, false);
}
/**
* helper method: do some error checking and set timeout boolean.
*
* @param op
* @param isTimeout
*/
private static void setTimeout(Operation op, boolean isTimeout) {
try {
if (op == null || op.isTimedOutUnsent()) {
return; // op may be null in some cases, e.g. flush
}
MemcachedNode node = op.getHandlingNode();
if (node == null) {
LoggerFactory.getLogger(MemcachedConnection.class).warn(
"handling node for operation is not set");
} else {
node.setContinuousTimeout(isTimeout);
}
} catch (Exception e) {
LoggerFactory.getLogger(MemcachedConnection.class).error(e.getMessage());
}
}
protected void checkState() {
if (shutDown) {
throw new IllegalStateException("Shutting down");
}
assert isAlive() : "IO Thread is not running.";
}
/**
* Infinitely loop processing IO.
*/
@Override
public void run() {
while (running) {
try {
handleIO();
} catch (IOException e) {
logRunException(e);
} catch (CancelledKeyException e) {
logRunException(e);
} catch (ClosedSelectorException e) {
logRunException(e);
} catch (IllegalStateException e) {
logRunException(e);
}
}
getLogger().info("Shut down memcached client");
}
private void logRunException(Exception e) {
if (shutDown) {
// There are a couple types of errors that occur during the
// shutdown sequence that are considered OK. Log at debug.
getLogger().debug("Exception occurred during shutdown", e);
} else {
getLogger().warn("Problem handling memcached IO", e);
}
}
}
|
package org.alienlabs.amazon;
import java.io.UnsupportedEncodingException;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.form.AjaxFormSubmitBehavior;
import org.apache.wicket.extensions.ajax.markup.html.IndicatingAjaxButton;
import org.apache.wicket.injection.Injector;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.form.Button;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.form.TextField;
import org.apache.wicket.markup.html.form.upload.FileUpload;
import org.apache.wicket.markup.html.form.upload.FileUploadField;
import org.apache.wicket.markup.html.panel.Panel;
import org.apache.wicket.model.Model;
import org.apache.wicket.spring.injection.annot.SpringBean;
import org.apache.wicket.util.lang.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Required;
import com.xuggle.mediatool.IMediaReader;
import com.xuggle.mediatool.IMediaWriter;
import com.xuggle.mediatool.ToolFactory;
import com.xuggle.mediatool.IMediaViewer;
import com.xuggle.xuggler.ICodec;
@edu.umd.cs.findbugs.annotations.SuppressFBWarnings(value = { "SE_INNER_CLASS",
"SIC_INNER_SHOULD_BE_STATIC_ANON" }, justification = "In Wicket, serializable inner classes are common. And as the parent Page is serialized as well, this is no concern. This is no bad practice in Wicket")
public class ImportVideoPanel extends Panel
{
private static final Logger LOGGER = LoggerFactory.getLogger(ImportVideoPanel.class);
private static final long serialVersionUID = 1L;
final FileUploadField file;
public ImportVideoPanel(final String id)
{
super(id);
Injector.get().inject(this);
final Form<Void> form = new Form<>("form");
this.file = new FileUploadField("videoFile");
form.setMarkupId("inputForm").setOutputMarkupId(true);
form.setMaxSize(Bytes.kilobytes(5));
form.setMultiPart(true);
form.add(this.file);
this.add(form);
final Button upload = new Button("upload")
{
@Override
public void onSubmit()
{
ImportVideoPanel.LOGGER.info("trying to upload something");
//final FileUpload fupload = ImportDeckDialog.this.file.getFileUpload();
//if (fupload == null)
// No file was provided
// ImportDeckDialog.LOGGER.info("Please provide a valid file");
// return;
//else if (fupload.getSize() == 0)
// ImportDeckDialog.LOGGER.info("Please provide a non-empty file");
// return;
//else if ((fupload.getClientFileName() == null)
// || ("".equals(fupload.getClientFileName().trim()))
// || (fupload.getClientFileName().endsWith(".txt")))
// ImportDeckDialog.LOGGER.info("Please provide a valid file");
// return;
//ImportDeckDialog.LOGGER.info("uploading file: "
// + ImportDeckDialog.this.file.getFileUpload().getClientFileName());
//try
// new String(fupload.getBytes(), "UTF-8");
ImportVideoPanel.convert("/home/nostromo/test.avi", "/home/nostromo/test.wav");
//catch (final UnsupportedEncodingException e)
// ImportDeckDialog.LOGGER.info("Please provide a file encoded with UTF-8 charset");
// return;
//ImportDeckDialog.LOGGER.info("successfully added deck: "
// + fupload.getClientFileName());
ImportVideoPanel.LOGGER.info("Your file has been successfully uploaded");
}
};
form.add(upload);
}
public static void convert(String from, final String to) {
//Runtime.getRuntime().exec(
// "ffmpeg -i " + from + "-acodec pcm_s16le -ac 2 " + to);
try {
ProcessBuilder pb = new ProcessBuilder("ffmpeg", "i", from, "acodec", "pcm_s16le", "ac", "2", to);
System.out.println("Run ffmpeg command");
Process process = pb.start();
int errCode = process.waitFor();
System.out.println("ffmpeg command executed, any errors? " + (errCode == 0 ? "No" : "Yes"));
System.out.println("ffpmeg Output:\n" + output(process.getInputStream()));
}
catch (Exception e) {
ImportVideoPanel.LOGGER.error("error fonverti g video to wav", e);
}
}
private static String output(InputStream inputStream) throws IOException {
StringBuilder sb = new StringBuilder();
BufferedReader br = null;
try {
br = new BufferedReader(new InputStreamReader(inputStream));
String line = null;
while ((line = br.readLine()) != null) {
sb.append(line + System.getProperty("line.separator"));
}
} finally {
br.close();
}
return sb.toString();
}
}
|
package org.basex.query.util.http;
import static org.basex.io.MimeTypes.*;
import static org.basex.query.util.Err.*;
import static org.basex.query.util.http.HTTPText.*;
import static org.basex.util.Token.*;
import java.io.*;
import java.net.*;
import java.util.*;
import org.basex.build.file.*;
import org.basex.core.*;
import org.basex.io.*;
import org.basex.io.in.*;
import org.basex.query.*;
import org.basex.query.iter.*;
import org.basex.query.util.*;
import org.basex.query.value.*;
import org.basex.query.value.item.*;
import org.basex.query.value.map.Map;
import org.basex.query.value.node.*;
import org.basex.util.*;
import org.basex.util.list.*;
public final class HTTPPayload {
/** Payloads (may be {@code null}). */
private final ValueBuilder payloads;
/** Input stream. */
private final InputStream in;
/** Input info. */
private final InputInfo info;
/** Database properties. */
private final Prop prop;
/**
* Constructor.
* @param is input stream
* @param st only create status
* @param ii input info
* @param pr database properties
*/
public HTTPPayload(final InputStream is, final boolean st, final InputInfo ii,
final Prop pr) {
in = is;
info = ii;
prop = pr;
payloads = st ? null : new ValueBuilder();
}
/**
* Parses the HTTP payload and returns a result body element.
* @param error error flag
* @param ctype content type defined in the connection
* @param utype content type specified by the user
* @return body element
* @throws IOException I/O exception
* @throws QueryException query exception
*/
public FElem parse(final boolean error, final String ctype, final String utype)
throws IOException, QueryException {
// error: use text/plain as content type
final String ct = error ? MimeTypes.TEXT_PLAIN :
utype != null ? utype : contentType(ctype);
final FElem body;
if(MimeTypes.isMultipart(ct)) {
// multipart response
final byte[] boundary = boundary(ctype);
if(boundary == null) HC_REQ.thrw(info, "No separation boundary specified");
body = new FElem(Q_MULTIPART).add(MEDIA_TYPE, ct).add(BOUNDARY, boundary);
final ANodeList parts = new ANodeList();
extractParts(concat(DASHES, boundary), parts);
for(final ANode node : parts) body.add(node);
} else {
// single part response
body = new FElem(Q_BODY).add(MEDIA_TYPE, ct);
if(payloads != null) {
final byte[] pl = extract(ct, charset(ctype));
payloads.add(parse(pl, ct));
}
}
return body;
}
/**
* Returns all payloads.
* @return payloads
*/
public Value payloads() {
return payloads.value();
}
/**
* Extracts payload from HTTP message and returns it as a byte array encoded
* in UTF-8.
* @param ctype content type
* @param ce response content charset
* @return payload as byte array
* @throws IOException I/O Exception
*/
private byte[] extract(final String ctype, final String ce) throws IOException {
final BufferedInputStream bis = new BufferedInputStream(in);
try {
final ByteList bl = new ByteList();
for(int i; (i = bis.read()) != -1;) bl.add(i);
// In case of XML, HTML or text content type, use supplied character set
if(MimeTypes.isXML(ctype) || MimeTypes.isText(ctype))
return new TextInput(new IOContent(bl.toArray())).encoding(ce).content();
// In case of binary data, do not encode anything
return bl.toArray();
} finally {
bis.close();
}
}
/**
* Interprets a payload according to content type and returns a corresponding value.
* @param payload payload
* @param ctype content type
* @return interpreted payload
* @throws QueryException query exception
*/
private Value parse(final byte[] payload, final String ctype) throws QueryException {
try {
return value(new IOContent(payload), prop, ctype, null);
} catch(final IOException ex) {
throw HC_PARSE.thrw(info, ex);
}
}
/**
* Extracts the parts from a multipart message.
* @param sep separation boundary
* @param parts list with all parts (may be {@code null})
* @throws IOException I/O Exception
* @throws QueryException query exception
*/
private void extractParts(final byte[] sep, final ANodeList parts)
throws IOException, QueryException {
try {
// RFC 1341: Preamble is to be ignored -> read till 1st boundary
while(true) {
final byte[] l = readLine();
if(l == null) HC_REQ.thrw(info, "No body specified for http:part");
if(eq(sep, l)) break;
}
while(extractPart(sep, concat(sep, DASHES), parts));
} finally {
in.close();
}
}
/**
* Extracts a part from a multipart message.
* @param sep separation boundary
* @param end closing boundary
* @param parts list with all parts (may be {@code null})
* @return part
* @throws IOException I/O Exception
* @throws QueryException query exception
*/
private boolean extractPart(final byte[] sep, final byte[] end, final ANodeList parts)
throws IOException, QueryException {
// check if last line is reached
final byte[] line = readLine();
if(line == null || eq(line, end)) return false;
// content type of part payload - if not defined by header 'Content-Type',
// it is equal to 'text/plain' (RFC 1341)
String ctype = MimeTypes.TEXT_PLAIN, enc = null;
// extract headers
for(byte[] l = line; l != null && l.length > 0;) {
final int pos = indexOf(l, ':');
if(pos > 0) {
final byte[] key = substring(l, 0, pos);
final byte[] val = trim(substring(l, pos + 1));
if(eq(lc(key), CONTENT_TYPE_LC)) {
ctype = string(val);
enc = charset(ctype);
}
if(val.length != 0 && parts != null)
parts.add(new FElem(Q_HEADER).add(NAME, key).add(VALUE, val));
}
l = readLine();
}
if(parts != null) parts.add(new FElem(Q_BODY).add(MEDIA_TYPE, ctype));
final byte[] pl = extractPart(sep, end, enc);
if(payloads != null) payloads.add(parse(pl, ctype));
return true;
}
/**
* Reads the next line of an HTTP multipart content.
* @return line, or {@code null} if end of stream is reached
* @throws IOException I/O Exception
*/
private byte[] readLine() throws IOException {
final ByteList bl = new ByteList();
for(int b; (b = in.read()) != -1;) {
// RFC 1341: a line ends with CRLF
while(b == '\r') {
b = in.read();
if(b == '\n') return bl.toArray();
bl.add('\r');
if(b == -1) return bl.toArray();
}
bl.add(b);
}
return bl.isEmpty() ? null : bl.toArray();
}
/**
* Reads the payload of a part.
* @param sep separation boundary
* @param end closing boundary
* @param enc part content encoding
* @return payload part content
* @throws IOException I/O Exception
*/
private byte[] extractPart(final byte[] sep, final byte[] end, final String enc)
throws IOException {
final ByteList bl = new ByteList();
while(true) {
final byte[] next = readLine();
if(next == null || eq(next, sep)) break;
// RFC 1341: Epilogue is to be ignored
if(eq(next, end)) {
while(readLine() != null);
break;
}
bl.add(next).add('\n');
}
return new TextInput(new IOContent(bl.toArray())).encoding(enc).content();
}
/**
* Extracts the encapsulation boundary from the content type.
* @param ct content type
* @return boundary, or {@code null}
* @throws QueryException query exception
*/
private byte[] boundary(final String ct) throws QueryException {
int i = ct.toLowerCase(Locale.ENGLISH).indexOf(BOUNDARY_IS);
if(i == -1) HC_REQ.thrw(info, "No separation boundary specified");
String b = ct.substring(i + BOUNDARY_IS.length());
if(b.charAt(0) == '"') {
// if the boundary is enclosed in quotes, strip them
i = b.lastIndexOf('"');
b = b.substring(1, i);
}
return token(b);
}
/**
* Returns a map with multipart form data.
* @param ext content type extension (may be {@code null})
* @return map, or {@code null}
* @throws IOException I/O exception
* @throws QueryException query exception
*/
public HashMap<String, Value> multiForm(final String ext)
throws IOException, QueryException {
// parse boundary, create helper arrays
final byte[] b = boundary(ext);
final byte[] boundary = concat(DASHES, b), last = concat(boundary, DASHES);
HashMap<String, Value> map = new HashMap<String, Value>();
final ByteList cont = new ByteList();
int lines = -1;
String name = null, fn = null;
for(byte[] line; (line = readLine()) != null;) {
if(lines >= 0) {
if(startsWith(line, boundary)) {
Value val = map.get(name);
if(val == null && fn != null) val = Map.EMPTY;
if(fn != null && val instanceof Map) {
val = ((Map) val).insert(Str.get(fn), new B64(cont.toArray()), null);
} else {
val = Str.get(cont.toArray());
}
map.put(name, val);
cont.reset();
lines = -1;
if(eq(line, last)) break;
} else {
if(lines++ > 0) cont.add(CRLF);
cont.add(line);
}
} else {
if(startsWith(line, CONTENT_DISPOSITION)) {
name = !contains(line, token(NAME_IS)) ? null : string(line).
replaceAll("^.*?" + NAME_IS + "\"|\".*", "").replaceAll("\\[\\]", "");
fn = !contains(line, token(FILENAME_IS)) ? null :
string(line).replaceAll("^.*" + FILENAME_IS + "\"|\"$", "");
} else if(line.length == 0) {
lines = 0;
}
}
}
return map;
}
/**
* Returns an XQuery value for the specified content type.
* @param in input source
* @param prop database properties
* @param ctype content type
* @param ext content type extension (may be {@code null})
* @return xml parser
* @throws IOException I/O exception
* @throws QueryException query exception
*/
public static Value value(final IO in, final Prop prop, final String ctype,
final String ext) throws IOException, QueryException {
Value val = null;
if(ctype != null) {
if(Token.eq(ctype, APP_JSON, APP_JSONML)) {
final String options = ParserProp.JSONML[0] + "=" + eq(ctype, APP_JSONML);
val = new DBNode(new JSONParser(in, prop, options));
} else if(TEXT_CSV.equals(ctype)) {
val = new DBNode(new CSVParser(in, prop));
} else if(TEXT_HTML.equals(ctype)) {
val = new DBNode(new HTMLParser(in, prop));
} else if(APP_FORM_URLENCODED.equals(ctype)) {
final String enc = charset(ext);
val = Str.get(URLDecoder.decode(string(in.read()), enc == null ? UTF8 : enc));
} else if(MimeTypes.isXML(ctype)) {
val = new DBNode(in, prop);
} else if(MimeTypes.isText(ctype)) {
val = Str.get(new TextInput(in).content());
} else if(MimeTypes.isMultipart(ctype)) {
final HTTPPayload hp = new HTTPPayload(in.inputStream(), false, null, prop);
hp.extractParts(concat(DASHES, hp.boundary(ext)), null);
val = hp.payloads();
}
}
return val == null ? new B64(in.read()) : val;
}
/**
* Extracts the content from a "Content-type" header.
* @param ctype value for "Content-type" header
* @return result
*/
public static String contentType(final String ctype) {
if(ctype == null) return MimeTypes.APP_OCTET;
final int end = ctype.indexOf(';');
return end == -1 ? ctype : ctype.substring(0, end);
}
/**
* Extracts the charset from the 'Content-Type' header if present.
* @param ctype Content-Type header
* @return charset charset
*/
private static String charset(final String ctype) {
// content type is unknown
if(ctype == null) return null;
final int i = ctype.toLowerCase(Locale.ENGLISH).indexOf(CHARSET_IS);
return i == -1 ? null : ctype.substring(i + CHARSET_IS.length());
}
}
|
package org.dungeon.creatures;
import org.dungeon.game.Engine;
import org.dungeon.io.DLogger;
import org.dungeon.io.IO;
import org.dungeon.items.Item;
import org.dungeon.skill.Skill;
import org.dungeon.util.Constants;
import org.dungeon.util.Utils;
import java.awt.Color;
class AttackAlgorithm {
private static final double BAT_CRITICAL_MAXIMUM_LUMINOSITY = 0.5;
private static final double BEAST_HIT_RATE = 0.9;
private static final double HERO_CRITICAL_CHANCE = 0.1;
private static final double HERO_CRITICAL_CHANCE_UNARMED = 0.05;
private static final double UNDEAD_UNARMED_HIT_RATE = 0.85;
public static void attack(Creature attacker, Creature defender, String algorithmID) {
if (algorithmID.equals("BAT")) {
batAttack(attacker, defender);
} else if (algorithmID.equals("BEAST")) {
beastAttack(attacker, defender);
} else if (algorithmID.equals("CRITTER")) {
critterAttack(attacker);
} else if (algorithmID.equals("DUMMY")) {
dummyAttack(attacker);
} else if (algorithmID.equals("UNDEAD")) {
undeadAttack(attacker, defender);
} else if (algorithmID.equals("HERO")) {
heroAttack(attacker, defender);
} else {
DLogger.warning("algorithmID does not match any implemented algorithm.");
}
}
// Similar to beastAttack, but with miss chance dependant on luminosity and critical chance in complete darkness.
private static void batAttack(Creature attacker, Creature defender) {
double luminosity = attacker.getLocation().getLuminosity();
if (Utils.roll(0.9 - luminosity / 2)) { // If the permittivity is 1, this value ranges from 0.8 to 0.4.
int hitDamage = attacker.getAttack();
if (luminosity <= BAT_CRITICAL_MAXIMUM_LUMINOSITY) {
hitDamage *= 2;
printInflictedDamage(attacker, hitDamage, defender, true);
} else {
printInflictedDamage(attacker, hitDamage, defender, false);
}
defender.takeDamage(hitDamage);
} else {
printMiss(attacker);
}
}
private static void beastAttack(Creature attacker, Creature defender) {
if (Utils.roll(BEAST_HIT_RATE)) {
int hitDamage = attacker.getAttack();
defender.takeDamage(hitDamage);
printInflictedDamage(attacker, hitDamage, defender, false);
} else {
printMiss(attacker);
}
}
private static void critterAttack(Creature attacker) {
if (Engine.RANDOM.nextBoolean()) {
IO.writeBattleString(attacker.getName() + " does nothing.", Color.YELLOW);
} else {
IO.writeBattleString(attacker.getName() + " tries to run away.", Color.YELLOW);
}
}
private static void dummyAttack(Creature attacker) {
IO.writeBattleString(attacker.getName() + " stands still.", Color.YELLOW);
}
private static void undeadAttack(Creature attacker, Creature defender) {
Item weapon = attacker.getWeapon();
int hitDamage;
// Check that there is a weapon and that it is not broken.
if (weapon != null && !weapon.isBroken()) {
if (weapon.rollForHit()) {
hitDamage = weapon.getDamage() + attacker.getAttack();
printInflictedDamage(attacker, hitDamage, defender, false);
weapon.decrementIntegrityByHit();
if (weapon.isBroken()) {
printWeaponBreak(weapon);
if (!weapon.isRepairable()) {
attacker.getInventory().removeItem(weapon);
}
}
} else {
printMiss(attacker);
return;
}
} else {
if (Utils.roll(UNDEAD_UNARMED_HIT_RATE)) {
hitDamage = attacker.getAttack();
printInflictedDamage(attacker, hitDamage, defender, false);
} else {
printMiss(attacker);
return;
}
}
defender.takeDamage(hitDamage);
// The inflicted damage message cannot be here (what would avoid code duplication) as that would make it appear
// after an eventual "weaponName broke" message, what looks really weird.
}
private static void heroAttack(Creature attacker, Creature defender) {
Item weapon = attacker.getWeapon();
int hitDamage;
if (attacker.getSkillRotation().hasReadySkill()) {
Skill skill = attacker.getSkillRotation().getNextSkill();
hitDamage = skill.getDamage();
skill.startCoolDown();
printSkillCast(attacker, skill, defender);
} else {
// Check that there is a weapon and that it is not broken.
if (weapon != null && !weapon.isBroken()) {
if (weapon.rollForHit()) {
hitDamage = weapon.getDamage() + attacker.getAttack();
if (Utils.roll(HERO_CRITICAL_CHANCE)) {
hitDamage *= 2;
printInflictedDamage(attacker, hitDamage, defender, true);
} else {
printInflictedDamage(attacker, hitDamage, defender, false);
}
weapon.decrementIntegrityByHit();
if (weapon.isBroken()) {
printWeaponBreak(weapon);
if (!weapon.isRepairable()) {
attacker.getInventory().removeItem(weapon);
}
}
} else {
printMiss(attacker);
return;
}
} else {
hitDamage = attacker.getAttack();
if (Utils.roll(HERO_CRITICAL_CHANCE_UNARMED)) {
hitDamage *= 2;
printInflictedDamage(attacker, hitDamage, defender, true);
} else {
printInflictedDamage(attacker, hitDamage, defender, false);
}
}
}
defender.takeDamage(hitDamage);
// The inflicted damage message cannot be here (what would avoid code duplication) as that would make it appear
// after an eventual "weaponName broke" message, what looks really weird.
}
/**
* Prints that a weapon broke.
*
* @param weapon the weapon that broke.
*/
private static void printWeaponBreak(Item weapon) {
IO.writeString(weapon.getName() + " broke!", Color.RED);
}
/**
* Prints a message about the inflicted damage based on the parameters.
*
* @param attacker the Creature that performed the attack.
* @param hitDamage the damage inflicted by the attacker.
* @param defender the target of the attack.
* @param criticalHit a boolean indicating if the attack was a critical hit or not.
*/
private static void printInflictedDamage(Creature attacker, int hitDamage, Creature defender, boolean criticalHit) {
StringBuilder builder = new StringBuilder();
builder.append(attacker.getName());
builder.append(" inflicted ");
builder.append(hitDamage);
builder.append(" damage points to ");
builder.append(defender.getName());
if (criticalHit) {
builder.append(" with a critical hit");
}
builder.append(".");
IO.writeBattleString(builder.toString(), attacker.getID().equals(Constants.HERO_ID) ? Color.GREEN : Color.RED);
}
/**
* Prints a message about the inflicted damage due to a casted Skill.
*
* @param attacker the Creature that performed the attack.
* @param skill the Skill casted.
* @param defender the target of the attack.
*/
private static void printSkillCast(Creature attacker, Skill skill, Creature defender) {
String result = attacker.getName() + " casted " +
skill.getName() + " and inflicted " +
skill.getDamage() + " damage points to " +
defender.getName() + ".";
IO.writeBattleString(result, attacker.getID().equals(Constants.HERO_ID) ? Color.GREEN : Color.RED);
}
/**
* Prints a miss message.
*
* @param attacker the attacker creature.
*/
private static void printMiss(Creature attacker) {
IO.writeBattleString(attacker.getName() + " missed.", Color.YELLOW);
}
}
|
package org.embulk.output.sftp;
import com.google.common.base.Function;
import com.google.common.base.Throwables;
import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.FileSystemException;
import org.apache.commons.vfs2.FileSystemOptions;
import org.apache.commons.vfs2.impl.StandardFileSystemManager;
import org.apache.commons.vfs2.provider.sftp.IdentityInfo;
import org.apache.commons.vfs2.provider.sftp.SftpFileSystemConfigBuilder;
import org.embulk.config.ConfigException;
import org.embulk.config.TaskReport;
import org.embulk.spi.Buffer;
import org.embulk.spi.Exec;
import org.embulk.spi.FileOutput;
import org.embulk.spi.TransactionalFileOutput;
import org.embulk.spi.unit.LocalFile;
import org.slf4j.Logger;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URI;
import java.net.URISyntaxException;
import static org.embulk.output.sftp.SftpFileOutputPlugin.PluginTask;
public class SftpFileOutput
implements FileOutput, TransactionalFileOutput
{
private final Logger logger = Exec.getLogger(SftpFileOutput.class);
private final StandardFileSystemManager manager;
private final FileSystemOptions fsOptions;
private final String userInfo;
private final String host;
private final int port;
private final int maxConnectionRetry;
private final String pathPrefix;
private final String sequenceFormat;
private final String fileNameExtension;
private final int taskIndex;
private int fileIndex = 0;
private FileObject currentFile;
private OutputStream currentFileOutputStream;
private StandardFileSystemManager initializeStandardFileSystemManager()
{
if (!logger.isDebugEnabled()) {
// TODO: change logging format: org.apache.commons.logging.Log
System.setProperty("org.apache.commons.logging.Log", "org.apache.commons.logging.impl.NoOpLog");
}
StandardFileSystemManager manager = new StandardFileSystemManager();
manager.setClassLoader(SftpFileOutput.class.getClassLoader());
try {
manager.init();
}
catch (FileSystemException e) {
logger.error(e.getMessage());
throw new ConfigException(e);
}
return manager;
}
private String initializeUserInfo(PluginTask task)
{
String userInfo = task.getUser();
if (task.getPassword().isPresent()) {
userInfo += ":" + task.getPassword().get();
}
return userInfo;
}
private FileSystemOptions initializeFsOptions(PluginTask task)
{
FileSystemOptions fsOptions = new FileSystemOptions();
try {
SftpFileSystemConfigBuilder builder = SftpFileSystemConfigBuilder.getInstance();
builder.setUserDirIsRoot(fsOptions, task.getUserDirIsRoot());
builder.setTimeout(fsOptions, task.getSftpConnectionTimeout() * 1000);
builder.setStrictHostKeyChecking(fsOptions, "no");
if (task.getSecretKeyFilePath().isPresent()) {
IdentityInfo identityInfo = new IdentityInfo(
new File((task.getSecretKeyFilePath().transform(localFileToPathString()).get())),
task.getSecretKeyPassphrase().getBytes()
);
builder.setIdentityInfo(fsOptions, identityInfo);
logger.info("set identity: {}", task.getSecretKeyFilePath().get());
}
if (task.getProxy().isPresent()) {
ProxyTask proxy = task.getProxy().get();
ProxyTask.ProxyType.setProxyType(builder, fsOptions, proxy.getType());
if (proxy.getHost().isPresent()) {
builder.setProxyHost(fsOptions, proxy.getHost().get());
builder.setProxyPort(fsOptions, proxy.getPort());
}
if (proxy.getUser().isPresent()) {
builder.setProxyUser(fsOptions, proxy.getUser().get());
}
if (proxy.getPassword().isPresent()) {
builder.setProxyPassword(fsOptions, proxy.getPassword().get());
}
if (proxy.getCommand().isPresent()) {
builder.setProxyCommand(fsOptions, proxy.getCommand().get());
}
}
}
catch (FileSystemException e) {
logger.error(e.getMessage());
throw new ConfigException(e);
}
return fsOptions;
}
SftpFileOutput(PluginTask task, int taskIndex)
{
this.manager = initializeStandardFileSystemManager();
this.userInfo = initializeUserInfo(task);
this.fsOptions = initializeFsOptions(task);
this.host = task.getHost();
this.port = task.getPort();
this.maxConnectionRetry = task.getMaxConnectionRetry();
this.pathPrefix = task.getPathPrefix();
this.sequenceFormat = task.getSequenceFormat();
this.fileNameExtension = task.getFileNameExtension();
this.taskIndex = taskIndex;
}
@Override
public void nextFile()
{
closeCurrentFile();
try {
currentFile = newSftpFile(getSftpFileUri(getOutputFilePath()));
currentFileOutputStream = newSftpOutputStream(currentFile);
logger.info("new sftp file: {}", currentFile.getPublicURIString());
}
catch (FileSystemException e) {
logger.error(e.getMessage());
Throwables.propagate(e);
}
}
@Override
public void add(final Buffer buffer)
{
if (currentFile == null) {
throw new IllegalStateException("nextFile() must be called before poll()");
}
try {
Retriable<Void> retriable = new Retriable<Void>() {
public Void execute() throws IOException
{
currentFileOutputStream.write(buffer.array(), buffer.offset(), buffer.limit());
return null;
}
};
try {
withConnectionRetry(retriable);
}
catch (Exception e) {
throw (IOException) e;
}
}
catch (IOException e) {
logger.error(e.getMessage());
Throwables.propagate(e);
}
finally {
buffer.release();
}
}
@Override
public void finish()
{
closeCurrentFile();
}
@Override
public void close()
{
closeCurrentFile();
manager.close();
}
@Override
public void abort()
{
}
@Override
public TaskReport commit()
{
return Exec.newTaskReport();
}
private void closeCurrentFile()
{
if (currentFile == null) {
return;
}
try {
currentFileOutputStream.close();
}
catch (IOException e) {
logger.info(e.getMessage());
}
try {
currentFile.close();
}
catch (FileSystemException e) {
logger.warn(e.getMessage());
}
fileIndex++;
currentFile = null;
currentFileOutputStream = null;
}
private URI getSftpFileUri(String remoteFilePath)
{
try {
return new URI("sftp", userInfo, host, port, remoteFilePath, null, null);
}
catch (URISyntaxException e) {
logger.error(e.getMessage());
throw new ConfigException(e);
}
}
private String getOutputFilePath()
{
return pathPrefix + String.format(sequenceFormat, taskIndex, fileIndex) + fileNameExtension;
}
interface Retriable<T>
{
/**
* Execute the operation with the given (or null) return value.
* @return any return value from the operation
* @throws Exception
*/
public T execute() throws Exception;
}
private <T> T withConnectionRetry(final Retriable<T> op)
throws Exception
{
int count = 0;
while (true) {
try {
return op.execute();
}
catch (final Exception e) {
if (++count > maxConnectionRetry) {
throw e;
}
logger.warn("failed to connect sftp server: " + e.getMessage(), e);
try {
long sleepTime = ((long) Math.pow(2, count) * 1000);
logger.warn("sleep in next connection retry: {} milliseconds", sleepTime);
Thread.sleep(sleepTime); // milliseconds
}
catch (InterruptedException e1) {
// Ignore this exception because this exception is just about `sleep`.
logger.warn(e1.getMessage(), e1);
}
logger.warn("retry to connect sftp server: " + count + " times");
}
}
}
private FileObject newSftpFile(final URI sftpUri)
throws FileSystemException
{
Retriable<FileObject> retriable = new Retriable<FileObject>() {
public FileObject execute() throws FileSystemException
{
FileObject file = manager.resolveFile(sftpUri.toString(), fsOptions);
if (file.getParent().exists()) {
logger.info("parent directory {} exists there", file.getParent().getPublicURIString());
}
else {
logger.info("trying to create parent directory {}", file.getParent().getPublicURIString());
file.getParent().createFolder();
}
return file;
}
};
try {
return withConnectionRetry(retriable);
}
catch (Exception e) {
throw (FileSystemException) e;
}
}
private OutputStream newSftpOutputStream(final FileObject file)
throws FileSystemException
{
Retriable<OutputStream> retriable = new Retriable<OutputStream>() {
public OutputStream execute() throws FileSystemException
{
return file.getContent().getOutputStream();
}
};
try {
return withConnectionRetry(retriable);
}
catch (Exception e) {
throw (FileSystemException) e;
}
}
private Function<LocalFile, String> localFileToPathString()
{
return new Function<LocalFile, String>()
{
public String apply(LocalFile file)
{
return file.getPath().toString();
}
};
}
}
|
package org.holmes.evaluator.support;
/**
* Represents a interval, between left boundary and right boundary.
*
* @author diegossilveira
*/
public final class Interval<T extends Comparable<T>> {
private final T leftBoundary;
private final T rightBoundary;
private final boolean leftOpen;
private final boolean rightOpen;
private Interval(T leftBoundary, T rightBoundary, boolean leftOpen, boolean rightOpen) {
this.leftBoundary = leftBoundary;
this.rightBoundary = rightBoundary;
this.leftOpen = leftOpen;
this.rightOpen = rightOpen;
}
/**
* Creates a closed interval.
*
* @param leftBoundary
* @param rightBoundary
* @return
*/
public static <T extends Comparable<T>> Interval<T> closedInterval(T leftBoundary, T rightBoundary) {
return new Interval<T>(leftBoundary, rightBoundary, false, false);
}
/**
* Creates a left-open interval.
*
* @param leftBoundary
* @param rightBoundary
* @return
*/
public static <T extends Comparable<T>> Interval<T> leftOpenInterval(T leftBoundary, T rightBoundary) {
return new Interval<T>(leftBoundary, rightBoundary, true, false);
}
/**
* Creates a right-open interval.
*
* @param leftBoundary
* @param rightBoundary
* @return
*/
public static <T extends Comparable<T>> Interval<T> rightOpenInterval(T leftBoundary, T rightBoundary) {
return new Interval<T>(leftBoundary, rightBoundary, false, true);
}
/**
* Creates an open interval.
*
* @param leftBoundary
* @param rightBoundary
* @return
*/
public static <T extends Comparable<T>> Interval<T> openInterval(T leftBoundary, T rightBoundary) {
return new Interval<T>(leftBoundary, rightBoundary, true, true);
}
/**
* Checks if this interval contains the element.
*
* @param element
* @return
*/
public boolean contains(T element) {
return element != null && analyzeLeftBoundary(element) && analyzeRightBoundary(element);
}
private boolean analyzeLeftBoundary(T element) {
if (leftOpen) {
return element.compareTo(leftBoundary) > 0;
}
return element.compareTo(leftBoundary) >= 0;
}
private boolean analyzeRightBoundary(T element) {
if (rightOpen) {
return element.compareTo(rightBoundary) < 0;
}
return element.compareTo(rightBoundary) <= 0;
}
}
|
package org.jboss.aesh.extensions.mkdir;
import org.jboss.aesh.cl.Arguments;
import org.jboss.aesh.cl.CommandDefinition;
import org.jboss.aesh.cl.Option;
import org.jboss.aesh.console.command.Command;
import org.jboss.aesh.console.command.CommandResult;
import org.jboss.aesh.console.command.invocation.CommandInvocation;
import org.jboss.aesh.terminal.Shell;
import org.jboss.aesh.util.PathResolver;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.StringTokenizer;
/**
* A simple mkdir command.
*
* @author Helio Frota 00hf11 at gmail dot com
*/
@CommandDefinition(name = "mkdir", description = "create directory(ies), if they do not already exist.")
public class Mkdir implements Command<CommandInvocation> {
@Option(shortName = 'h', name = "help", hasValue = false, description = "display this help and exit")
private boolean help;
@Option(shortName = 'p', name = "parents", hasValue = false,
description = "make parent directories as needed")
private boolean parents;
@Option(shortName = 'v', name = "verbose", hasValue = false,
description = "print a message for each created directory")
private boolean verbose;
@Arguments
private List<String> arguments;
@Override
public CommandResult execute(CommandInvocation commandInvocation) throws IOException {
if (help) {
commandInvocation.getShell().out().println(commandInvocation.getHelpInfo("mkdir"));
return CommandResult.SUCCESS;
}
if (arguments != null && !arguments.isEmpty()) {
for (String a : arguments) {
File currentWorkingDirectory = commandInvocation.getAeshContext().getCurrentWorkingDirectory();
Shell shell = commandInvocation.getShell();
if (parents || a.contains(File.separator)) {
makeDirs(a, PathResolver.resolvePath(new File(a), currentWorkingDirectory).get(0), shell);
} else {
makeDir(PathResolver.resolvePath(new File(a), currentWorkingDirectory).get(0), shell);
}
}
}
return CommandResult.SUCCESS;
}
private void makeDir(File dir, Shell shell) {
if (!dir.exists()) {
dir.mkdir();
if (verbose) {
shell.out().println("created directory '" + dir.getName() + "'");
}
} else {
shell.out().println("cannot create directory '" + dir.getName() + "': Directory exists");
}
}
private void makeDirs(String path, File dir, Shell shell) {
if (!dir.exists()) {
dir.mkdirs();
if (verbose) {
StringTokenizer st = new StringTokenizer(path, File.separator);
String dirName = "";
while (st.hasMoreElements()) {
dirName += st.nextElement() + File.separator;
shell.out().println("created directory '" + dirName + "'");
}
}
}
}
}
|
package org.lightmare.utils.reflect;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.lang.reflect.AccessibleObject;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.lightmare.libraries.LibraryLoader;
import org.lightmare.utils.ObjectUtils;
/**
* Class to use reflection {@link Method} calls and {@link Field} information
* sets
*
* @author levan
*
*/
public class MetaUtils {
// default values for primitives
private static final byte BYTE_DEF = 0;
private static final boolean BOOLEAN_DEF = Boolean.FALSE;
private static final char CHAR_DEF = '\u0000';
private static final short SHORT_DEF = 0;
private static final int INT_DEF = 0;
private static final long LONG_DEF = 0L;
private static final float FLOAT_DEF = 0F;
private static final double DOUBLE_DEF = 0D;
// default value for modifier
private static final int DEFAULT_MODIFIER = 0;
// Lock to modify accessible mode of AccessibleObject instances
private static final Lock ACCESSOR_LOCK = new ReentrantLock();
/**
* Sets object accessible flag as true if it is not
*
* @param accessibleObject
* @param accessible
*/
private static void setAccessible(AccessibleObject accessibleObject,
boolean accessible) {
if (ObjectUtils.notTrue(accessible)) {
boolean locked = Boolean.FALSE;
while (ObjectUtils.notTrue(locked)) {
locked = ObjectUtils.notTrue(accessibleObject.isAccessible())
&& ObjectUtils.tryLock(ACCESSOR_LOCK);
if (locked) {
try {
if (ObjectUtils
.notTrue(accessibleObject.isAccessible())) {
accessibleObject.setAccessible(Boolean.TRUE);
}
} finally {
ObjectUtils.unlock(ACCESSOR_LOCK);
}
}
}
}
}
/**
* Sets passed {@link AccessibleObject}'s accessible flag as passed
* accessible boolean value if the last one is false
*
* @param accessibleObject
* @param accessible
*/
private static void resetAccessible(AccessibleObject accessibleObject,
boolean accessible) {
if (ObjectUtils.notTrue(accessible)) {
boolean locked = Boolean.FALSE;
while (ObjectUtils.notTrue(locked)) {
locked = ObjectUtils.tryLock(ACCESSOR_LOCK);
if (locked) {
try {
if (accessibleObject.isAccessible()) {
accessibleObject.setAccessible(accessible);
}
} finally {
ObjectUtils.unlock(ACCESSOR_LOCK);
}
}
}
}
}
/**
* Makes accessible passed {@link Constructor}'s and invokes
* {@link Constructor#newInstance(Object...)} method
*
* @param constructor
* @param parameters
* @return <code>T</code>
* @throws IOException
*/
public static <T> T newInstance(Constructor<T> constructor,
Object... parameters) throws IOException {
T instance;
boolean accessible = constructor.isAccessible();
try {
setAccessible(constructor, accessible);
instance = constructor.newInstance(parameters);
} catch (InstantiationException ex) {
throw new IOException(ex);
} catch (IllegalAccessException ex) {
throw new IOException(ex);
} catch (IllegalArgumentException ex) {
throw new IOException(ex);
} catch (InvocationTargetException ex) {
throw new IOException(ex);
} finally {
resetAccessible(constructor, accessible);
}
return instance;
}
/**
* Gets declared constructor for given {@link Class} and given parameters
*
* @param type
* @param parameterTypes
* @return {@link Constructor}
* @throws IOException
*/
public static <T> Constructor<T> getConstructor(Class<T> type,
Class<?>... parameterTypes) throws IOException {
Constructor<T> constructor;
try {
constructor = type.getDeclaredConstructor(parameterTypes);
} catch (NoSuchMethodException ex) {
throw new IOException(ex);
} catch (SecurityException ex) {
throw new IOException(ex);
}
return constructor;
}
/**
* Instantiates class by {@link Constructor} (MetaUtils
* {@link #newInstance(Constructor, Object...)}) after
* {@link MetaUtils#getConstructor(Class, Class...)} method call
*
* @param type
* @param parameterTypes
* @param parameters
* @return <code>T</code>
* @throws IOException
*/
public static <T> T callConstructor(Class<T> type,
Class<?>[] parameterTypes, Object... parameters) throws IOException {
T instance;
Constructor<T> constructor = getConstructor(type, parameterTypes);
instance = newInstance(constructor, parameters);
return instance;
}
/**
* Loads class by name
*
* @param className
* @return {@link Class}
* @throws IOException
*/
public static Class<?> classForName(String className) throws IOException {
Class<?> clazz = classForName(className, null);
return clazz;
}
/**
* Loads class by name with specific {@link ClassLoader} if it is not
* <code>null</code>
*
* @param className
* @param loader
* @return {@link Class}
* @throws IOException
*/
public static Class<?> classForName(String className, ClassLoader loader)
throws IOException {
Class<?> clazz = classForName(className, Boolean.TRUE, loader);
return clazz;
}
/**
* Loads and if initialize parameter is true initializes class by name with
* specific {@link ClassLoader} if it is not <code>null</code>
*
* @param className
* @param initialize
* @param loader
* @return {@link Class}
* @throws IOException
*/
public static Class<?> classForName(String className, boolean initialize,
ClassLoader loader) throws IOException {
Class<?> clazz;
try {
if (loader == null) {
clazz = Class.forName(className);
} else {
clazz = Class.forName(className, initialize, loader);
}
} catch (ClassNotFoundException ex) {
throw new IOException(ex);
}
return clazz;
}
/**
* Loads class by name with current {@link Thread}'s {@link ClassLoader} and
* initializes it
*
* @param className
* @param loader
* @return {@link Class}
* @throws IOException
*/
public static Class<?> initClassForName(String className)
throws IOException {
Class<?> clazz;
ClassLoader loader = LibraryLoader.getContextClassLoader();
clazz = classForName(className, Boolean.TRUE, loader);
return clazz;
}
/**
* Creates {@link Class} instance by {@link Class#newInstance()} method call
*
* @param clazz
* @return
*/
public static <T> T instantiate(Class<T> clazz) throws IOException {
T instance;
try {
instance = clazz.newInstance();
} catch (InstantiationException ex) {
throw new IOException(ex);
} catch (IllegalAccessException ex) {
throw new IOException(ex);
}
return instance;
}
/**
* Gets declared method from class
*
* @param clazz
* @param methodName
* @param parameterTypes
* @return {@link Method}
* @throws IOException
*/
public static Method getDeclaredMethod(Class<?> clazz, String methodName,
Class<?>... parameterTypes) throws IOException {
Method method;
try {
method = clazz.getDeclaredMethod(methodName, parameterTypes);
} catch (NoSuchMethodException ex) {
throw new IOException(ex);
} catch (SecurityException ex) {
throw new IOException(ex);
}
return method;
}
/**
* Gets all declared methods from class
*
* @param clazz
* @param methodName
* @param parameterTypes
* @return {@link Method}
* @throws IOException
*/
public static Method[] getDeclaredMethods(Class<?> clazz)
throws IOException {
Method[] methods;
try {
methods = clazz.getDeclaredMethods();
} catch (SecurityException ex) {
throw new IOException(ex);
}
return methods;
}
/**
* Gets one modifier <code>int</code> value for passed collection
*
* @param modifiers
* @return <code>int</code>
*/
private static int calculateModifier(int[] modifiers) {
int modifier = DEFAULT_MODIFIER;
if (ObjectUtils.notNull(modifiers)) {
int length = modifiers.length;
int modifierValue;
for (int i = 0; i < length; i++) {
modifierValue = modifiers[i];
modifier = modifier | modifierValue;
}
}
return modifier;
}
/**
* Finds if passed {@link Class} has declared public {@link Method} with
* appropriated name
*
* @param clazz
* @param modifiers
* @param methodName
* @return <code>boolean</code>
* @throws IOException
*/
private static boolean classHasMethod(Class<?> clazz, String methodName,
int... modifiers) throws IOException {
boolean found = Boolean.FALSE;
Method[] methods = getDeclaredMethods(clazz);
int length = methods.length;
int modifier = calculateModifier(modifiers);
Method method;
for (int i = 0; i < length && ObjectUtils.notTrue(found); i++) {
method = methods[i];
found = method.getName().equals(methodName);
if (found && ObjectUtils.notEquals(modifier, DEFAULT_MODIFIER)) {
found = ((method.getModifiers() & modifier) > DEFAULT_MODIFIER);
}
}
return found;
}
/**
* Finds if passed {@link Class} has {@link Method} with appropriated name
* and modifiers
*
* @param clazz
* @param methodName
* @param modifiers
* @return <code>boolean</code>
* @throws IOException
*/
public static boolean hasMethod(Class<?> clazz, String methodName,
int... modifiers) throws IOException {
boolean found = Boolean.FALSE;
Class<?> superClass = clazz;
while (ObjectUtils.notNull(superClass) && ObjectUtils.notTrue(found)) {
found = MetaUtils.classHasMethod(superClass, methodName, modifiers);
if (ObjectUtils.notTrue(found)) {
superClass = superClass.getSuperclass();
}
}
return found;
}
/**
* Finds if passed {@link Class} has public {@link Method} with appropriated
* name
*
* @param clazz
* @param methodName
* @return <code>boolean</code>
* @throws IOException
*/
public static boolean hasPublicMethod(Class<?> clazz, String methodName)
throws IOException {
boolean found = MetaUtils.hasMethod(clazz, methodName, Modifier.PUBLIC);
return found;
}
/**
* Gets declared field from passed class with specified name
*
* @param clazz
* @param name
* @return {@link Field}
* @throws IOException
*/
public static Field getDeclaredField(Class<?> clazz, String name)
throws IOException {
Field field;
try {
field = clazz.getDeclaredField(name);
} catch (NoSuchFieldException ex) {
throw new IOException(ex);
} catch (SecurityException ex) {
throw new IOException(ex);
}
return field;
}
/**
* Returns passed {@link Field}'s modifier
*
* @param field
* @return <code>int</code>
*/
public static int getModifiers(Field field) {
return field.getModifiers();
}
/**
* Returns passed {@link Method}'s modifier
*
* @param method
* @return <code>int</code>
*/
public static int getModifiers(Method method) {
return method.getModifiers();
}
/**
* Returns type of passed {@link Field} invoking {@link Field#getType()}
* method
*
* @param field
* @return {@link Class}<?>
*/
public static Class<?> getType(Field field) {
return field.getType();
}
/**
* Common method to invoke {@link Method} with reflection
*
* @param method
* @param data
* @param arguments
* @return {@link Object}
* @throws IOException
*/
public static Object invoke(Method method, Object data, Object... arguments)
throws IOException {
Object value;
try {
value = method.invoke(data, arguments);
} catch (IllegalAccessException ex) {
throw new IOException(ex);
} catch (IllegalArgumentException ex) {
throw new IOException(ex);
} catch (InvocationTargetException ex) {
throw new IOException(ex);
}
return value;
}
/**
* Common method to invoke {@link Method} with reflection
*
* @param method
* @param data
* @param arguments
* @return {@link Object}
* @throws IOException
*/
public static Object invokePrivate(Method method, Object data,
Object... arguments) throws IOException {
Object value;
boolean accessible = method.isAccessible();
try {
setAccessible(method, accessible);
value = invoke(method, data, arguments);
} finally {
resetAccessible(method, accessible);
}
return value;
}
/**
* Common method to invoke static {@link Method} with reflection
*
* @param method
* @param arguments
* @return
* @throws IOException
*/
public static Object invokeStatic(Method method, Object... arguments)
throws IOException {
Object value;
try {
value = method.invoke(null, arguments);
} catch (IllegalAccessException ex) {
throw new IOException(ex);
} catch (IllegalArgumentException ex) {
throw new IOException(ex);
} catch (InvocationTargetException ex) {
throw new IOException(ex);
}
return value;
}
/**
* Common method to invoke private static {@link Method}
*
* @param method
* @param arguments
* @return
* @throws IOException
*/
public static Object invokePrivateStatic(Method method, Object... arguments)
throws IOException {
Object value;
boolean accessible = method.isAccessible();
try {
setAccessible(method, accessible);
value = invokeStatic(method, arguments);
} finally {
resetAccessible(method, accessible);
}
return value;
}
/**
* Sets value to {@link Field} sets accessible Boolean.TRUE remporary if
* needed
*
* @param field
* @param value
* @throws IOException
*/
public static void setFieldValue(Field field, Object data, Object value)
throws IOException {
boolean accessible = field.isAccessible();
try {
setAccessible(field, accessible);
field.set(data, value);
} catch (IllegalArgumentException ex) {
throw new IOException(ex);
} catch (IllegalAccessException ex) {
throw new IOException(ex);
} finally {
resetAccessible(field, accessible);
}
}
/**
* Gets value of specific field in specific {@link Object}
*
* @param field
* @param data
* @return {@link Object}
* @throws IOException
*/
public static Object getFieldValue(Field field, Object data)
throws IOException {
Object value;
boolean accessible = field.isAccessible();
try {
setAccessible(field, accessible);
value = field.get(data);
} catch (IllegalArgumentException ex) {
throw new IOException(ex);
} catch (IllegalAccessException ex) {
throw new IOException(ex);
} finally {
resetAccessible(field, accessible);
}
return value;
}
/**
* Gets value of specific static field
*
* @param field
* @return {@link Object}
* @throws IOException
*/
public static Object getFieldValue(Field field) throws IOException {
Object value = getFieldValue(field, null);
return value;
}
/**
* Gets {@link List} of all {@link Method}s from passed class annotated with
* specified annotation
*
* @param clazz
* @param annotationClass
* @return {@link List}<Method>
* @throws IOException
*/
public static List<Method> getAnnotatedMethods(Class<?> clazz,
Class<? extends Annotation> annotationClass) throws IOException {
List<Method> methods = new ArrayList<Method>();
Method[] allMethods = getDeclaredMethods(clazz);
for (Method method : allMethods) {
if (method.isAnnotationPresent(annotationClass)) {
methods.add(method);
}
}
return methods;
}
/**
* Gets {@link List} of all {@link Field}s from passed class annotated with
* specified annotation
*
* @param clazz
* @param annotationClass
* @return {@link List}<Field>
* @throws IOException
*/
public static List<Field> getAnnotatedFields(Class<?> clazz,
Class<? extends Annotation> annotationClass) throws IOException {
List<Field> fields = new ArrayList<Field>();
Field[] allFields = clazz.getDeclaredFields();
for (Field field : allFields) {
if (field.isAnnotationPresent(annotationClass)) {
fields.add(field);
}
}
return fields;
}
/**
* Gets wrapper class if passed class is primitive type
*
* @param type
* @return {@link Class}<T>
*/
public static <T> Class<T> getWrapper(Class<?> type) {
Class<T> wrapper;
if (type.isPrimitive()) {
if (type.equals(byte.class)) {
wrapper = ObjectUtils.cast(Byte.class);
} else if (type.equals(boolean.class)) {
wrapper = ObjectUtils.cast(Boolean.class);
} else if (type.equals(char.class)) {
wrapper = ObjectUtils.cast(Character.class);
} else if (type.equals(short.class)) {
wrapper = ObjectUtils.cast(Short.class);
} else if (type.equals(int.class)) {
wrapper = ObjectUtils.cast(Integer.class);
} else if (type.equals(long.class)) {
wrapper = ObjectUtils.cast(Long.class);
} else if (type.equals(float.class)) {
wrapper = ObjectUtils.cast(Float.class);
} else if (type.equals(double.class)) {
wrapper = ObjectUtils.cast(Double.class);
} else {
wrapper = ObjectUtils.cast(type);
}
} else {
wrapper = ObjectUtils.cast(type);
}
return wrapper;
}
/**
* Returns default values if passed class is primitive else returns null
*
* @param clazz
* @return Object
*/
public static Object getDefault(Class<?> clazz) {
Object value;
if (clazz.isPrimitive()) {
if (clazz.equals(byte.class)) {
value = BYTE_DEF;
} else if (clazz.equals(boolean.class)) {
value = BOOLEAN_DEF;
} else if (clazz.equals(char.class)) {
value = CHAR_DEF;
} else if (clazz.equals(short.class)) {
value = SHORT_DEF;
} else if (clazz.equals(int.class)) {
value = INT_DEF;
} else if (clazz.equals(long.class)) {
value = LONG_DEF;
} else if (clazz.equals(float.class)) {
value = FLOAT_DEF;
} else if (clazz.equals(double.class)) {
value = DOUBLE_DEF;
} else {
value = null;
}
} else {
value = null;
}
return value;
}
}
|
package org.lightmare.utils.reflect;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.lang.reflect.AccessibleObject;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.List;
import org.lightmare.libraries.LibraryLoader;
import org.lightmare.utils.ObjectUtils;
/**
* Class to use reflection {@link Method} calls and {@link Field} information
* sets
*
* @author levan
*
*/
public class MetaUtils {
// default values for primitives
private static byte byteDef = 0;
private static boolean booleanDef = Boolean.FALSE;
private static char charDef = '\u0000';
private static short shortDef = 0;
private static int intDef = 0;
private static long longDef = 0L;
private static float floatDef = 0F;
private static double doubleDef;
// default value for modifier
private static final int DEFAULT_MODIFIER = 0;
/**
* Sets object accessible flag as true if it is not
*
* @param accessibleObject
* @param accessible
*/
private static void setAccessible(AccessibleObject accessibleObject,
boolean accessible) {
if (ObjectUtils.notTrue(accessible)) {
synchronized (accessibleObject) {
if (ObjectUtils.notTrue(accessibleObject.isAccessible())) {
accessibleObject.setAccessible(Boolean.TRUE);
}
}
}
}
/**
* Sets passed {@link AccessibleObject}'s accessible flag as passed
* accessible boolean value if the last one is false
*
* @param accessibleObject
* @param accessible
*/
private static void resetAccessible(AccessibleObject accessibleObject,
boolean accessible) {
if (ObjectUtils.notTrue(accessible)) {
synchronized (accessibleObject) {
if (accessibleObject.isAccessible()) {
accessibleObject.setAccessible(accessible);
}
}
}
}
/**
* Makes accessible passed {@link Constructor}'s and invokes
* {@link Constructor#newInstance(Object...)} method
*
* @param constructor
* @param parameters
* @return <code>T</code>
* @throws IOException
*/
public static <T> T newInstance(Constructor<T> constructor,
Object... parameters) throws IOException {
T instance;
boolean accessible = constructor.isAccessible();
try {
setAccessible(constructor, accessible);
instance = constructor.newInstance(parameters);
} catch (InstantiationException ex) {
throw new IOException(ex);
} catch (IllegalAccessException ex) {
throw new IOException(ex);
} catch (IllegalArgumentException ex) {
throw new IOException(ex);
} catch (InvocationTargetException ex) {
throw new IOException(ex);
} finally {
resetAccessible(constructor, accessible);
}
return instance;
}
/**
* Gets declared constructor for given {@link Class} and given parameters
*
* @param type
* @param parameterTypes
* @return {@link Constructor}
* @throws IOException
*/
public static <T> Constructor<T> getConstructor(Class<T> type,
Class<?>... parameterTypes) throws IOException {
Constructor<T> constructor;
try {
constructor = type.getDeclaredConstructor(parameterTypes);
} catch (NoSuchMethodException ex) {
throw new IOException(ex);
} catch (SecurityException ex) {
throw new IOException(ex);
}
return constructor;
}
/**
* Instantiates class by {@link Constructor} (MetaUtils
* {@link #newInstance(Constructor, Object...)}) after
* {@link MetaUtils#getConstructor(Class, Class...)} method call
*
* @param type
* @param parameterTypes
* @param parameters
* @return <code>T</code>
* @throws IOException
*/
public static <T> T callConstructor(Class<T> type,
Class<?>[] parameterTypes, Object... parameters) throws IOException {
T instance;
Constructor<T> constructor = getConstructor(type, parameterTypes);
instance = newInstance(constructor, parameters);
return instance;
}
/**
* Loads class by name
*
* @param className
* @return {@link Class}
* @throws IOException
*/
public static Class<?> classForName(String className) throws IOException {
Class<?> clazz = classForName(className, null);
return clazz;
}
/**
* Loads class by name with specific {@link ClassLoader} if it is not
* <code>null</code>
*
* @param className
* @param loader
* @return {@link Class}
* @throws IOException
*/
public static Class<?> classForName(String className, ClassLoader loader)
throws IOException {
Class<?> clazz = classForName(className, Boolean.TRUE, loader);
return clazz;
}
/**
* Loads and if initialize parameter is true initializes class by name with
* specific {@link ClassLoader} if it is not <code>null</code>
*
* @param className
* @param initialize
* @param loader
* @return {@link Class}
* @throws IOException
*/
public static Class<?> classForName(String className, boolean initialize,
ClassLoader loader) throws IOException {
Class<?> clazz;
try {
if (loader == null) {
clazz = Class.forName(className);
} else {
clazz = Class.forName(className, initialize, loader);
}
} catch (ClassNotFoundException ex) {
throw new IOException(ex);
}
return clazz;
}
/**
* Loads class by name with current {@link Thread}'s {@link ClassLoader} and
* initializes it
*
* @param className
* @param loader
* @return {@link Class}
* @throws IOException
*/
public static Class<?> initClassForName(String className)
throws IOException {
Class<?> clazz;
ClassLoader loader = LibraryLoader.getContextClassLoader();
clazz = classForName(className, Boolean.TRUE, loader);
return clazz;
}
/**
* Creates {@link Class} instance by {@link Class#newInstance()} method call
*
* @param clazz
* @return
*/
public static <T> T instantiate(Class<T> clazz) throws IOException {
T instance;
try {
instance = clazz.newInstance();
} catch (InstantiationException ex) {
throw new IOException(ex);
} catch (IllegalAccessException ex) {
throw new IOException(ex);
}
return instance;
}
/**
* Gets declared method from class
*
* @param clazz
* @param methodName
* @param parameterTypes
* @return {@link Method}
* @throws IOException
*/
public static Method getDeclaredMethod(Class<?> clazz, String methodName,
Class<?>... parameterTypes) throws IOException {
Method method;
try {
method = clazz.getDeclaredMethod(methodName, parameterTypes);
} catch (NoSuchMethodException ex) {
throw new IOException(ex);
} catch (SecurityException ex) {
throw new IOException(ex);
}
return method;
}
/**
* Gets all declared methods from class
*
* @param clazz
* @param methodName
* @param parameterTypes
* @return {@link Method}
* @throws IOException
*/
public static Method[] getDeclaredMethods(Class<?> clazz)
throws IOException {
Method[] methods;
try {
methods = clazz.getDeclaredMethods();
} catch (SecurityException ex) {
throw new IOException(ex);
}
return methods;
}
/**
* Gets one modifier <code>int</code> value for passed collection
*
* @param modifiers
* @return <code>int</code>
*/
private static int calculateModifier(int[] modifiers) {
int modifier = DEFAULT_MODIFIER;
if (ObjectUtils.notNull(modifiers)) {
int length = modifiers.length;
int modifierValue;
for (int i = 0; i < length; i++) {
modifierValue = modifiers[i];
modifier = modifier | modifierValue;
}
}
return modifier;
}
/**
* Finds if passed {@link Class} has declared public {@link Method} with
* appropriated name
*
* @param clazz
* @param modifiers
* @param methodName
* @return <code>boolean</code>
* @throws IOException
*/
private static boolean classHasMethod(Class<?> clazz, String methodName,
int... modifiers) throws IOException {
boolean found = Boolean.FALSE;
Method[] methods = getDeclaredMethods(clazz);
int length = methods.length;
int modifier = calculateModifier(modifiers);
Method method;
for (int i = 0; i < length && ObjectUtils.notTrue(found); i++) {
method = methods[i];
found = method.getName().equals(methodName);
if (found && ObjectUtils.notEquals(modifier, DEFAULT_MODIFIER)) {
found = ((method.getModifiers() & modifier) > DEFAULT_MODIFIER);
}
}
return found;
}
/**
* Finds if passed {@link Class} has {@link Method} with appropriated name
* and modifiers
*
* @param clazz
* @param methodName
* @param modifiers
* @return <code>boolean</code>
* @throws IOException
*/
public static boolean hasMethod(Class<?> clazz, String methodName,
int... modifiers) throws IOException {
boolean found = Boolean.FALSE;
Class<?> superClass = clazz;
while (ObjectUtils.notNull(superClass) && ObjectUtils.notTrue(found)) {
found = MetaUtils.classHasMethod(superClass, methodName, modifiers);
if (ObjectUtils.notTrue(found)) {
superClass = superClass.getSuperclass();
}
}
return found;
}
/**
* Finds if passed {@link Class} has public {@link Method} with appropriated
* name
*
* @param clazz
* @param methodName
* @return <code>boolean</code>
* @throws IOException
*/
public static boolean hasPublicMethod(Class<?> clazz, String methodName)
throws IOException {
boolean found = MetaUtils.hasMethod(clazz, methodName, Modifier.PUBLIC);
return found;
}
/**
* Gets declared field from passed class with specified name
*
* @param clazz
* @param name
* @return {@link Field}
* @throws IOException
*/
public static Field getDeclaredField(Class<?> clazz, String name)
throws IOException {
Field field;
try {
field = clazz.getDeclaredField(name);
} catch (NoSuchFieldException ex) {
throw new IOException(ex);
} catch (SecurityException ex) {
throw new IOException(ex);
}
return field;
}
/**
* Returns passed {@link Field}'s modifier
*
* @param field
* @return <code>int</code>
*/
public static int getModifiers(Field field) {
return field.getModifiers();
}
/**
* Returns passed {@link Method}'s modifier
*
* @param method
* @return <code>int</code>
*/
public static int getModifiers(Method method) {
return method.getModifiers();
}
/**
* Returns type of passed {@link Field} invoking {@link Field#getType()}
* method
*
* @param field
* @return {@link Class}<?>
*/
public static Class<?> getType(Field field) {
return field.getType();
}
/**
* Common method to invoke {@link Method} with reflection
*
* @param method
* @param data
* @param arguments
* @return {@link Object}
* @throws IOException
*/
public static Object invoke(Method method, Object data, Object... arguments)
throws IOException {
Object value;
try {
value = method.invoke(data, arguments);
} catch (IllegalAccessException ex) {
throw new IOException(ex);
} catch (IllegalArgumentException ex) {
throw new IOException(ex);
} catch (InvocationTargetException ex) {
throw new IOException(ex);
}
return value;
}
/**
* Common method to invoke {@link Method} with reflection
*
* @param method
* @param data
* @param arguments
* @return {@link Object}
* @throws IOException
*/
public static Object invokePrivate(Method method, Object data,
Object... arguments) throws IOException {
Object value;
boolean accessible = method.isAccessible();
try {
setAccessible(method, accessible);
value = invoke(method, data, arguments);
} finally {
resetAccessible(method, accessible);
}
return value;
}
/**
* Common method to invoke static {@link Method} with reflection
*
* @param method
* @param arguments
* @return
* @throws IOException
*/
public static Object invokeStatic(Method method, Object... arguments)
throws IOException {
Object value;
try {
value = method.invoke(null, arguments);
} catch (IllegalAccessException ex) {
throw new IOException(ex);
} catch (IllegalArgumentException ex) {
throw new IOException(ex);
} catch (InvocationTargetException ex) {
throw new IOException(ex);
}
return value;
}
/**
* Common method to invoke private static {@link Method}
*
* @param method
* @param arguments
* @return
* @throws IOException
*/
public static Object invokePrivateStatic(Method method, Object... arguments)
throws IOException {
Object value;
boolean accessible = method.isAccessible();
try {
setAccessible(method, accessible);
value = invokeStatic(method, arguments);
} finally {
resetAccessible(method, accessible);
}
return value;
}
/**
* Sets value to {@link Field} sets accessible Boolean.TRUE remporary if
* needed
*
* @param field
* @param value
* @throws IOException
*/
public static void setFieldValue(Field field, Object data, Object value)
throws IOException {
boolean accessible = field.isAccessible();
try {
setAccessible(field, accessible);
field.set(data, value);
} catch (IllegalArgumentException ex) {
throw new IOException(ex);
} catch (IllegalAccessException ex) {
throw new IOException(ex);
} finally {
resetAccessible(field, accessible);
}
}
/**
* Gets value of specific field in specific {@link Object}
*
* @param field
* @param data
* @return {@link Object}
* @throws IOException
*/
public static Object getFieldValue(Field field, Object data)
throws IOException {
Object value;
boolean accessible = field.isAccessible();
try {
setAccessible(field, accessible);
value = field.get(data);
} catch (IllegalArgumentException ex) {
throw new IOException(ex);
} catch (IllegalAccessException ex) {
throw new IOException(ex);
} finally {
resetAccessible(field, accessible);
}
return value;
}
/**
* Gets value of specific static field
*
* @param field
* @return {@link Object}
* @throws IOException
*/
public static Object getFieldValue(Field field) throws IOException {
Object value = getFieldValue(field, null);
return value;
}
/**
* Gets {@link List} of all {@link Method}s from passed class annotated with
* specified annotation
*
* @param clazz
* @param annotationClass
* @return {@link List}<Method>
* @throws IOException
*/
public static List<Method> getAnnotatedMethods(Class<?> clazz,
Class<? extends Annotation> annotationClass) throws IOException {
List<Method> methods = new ArrayList<Method>();
Method[] allMethods = getDeclaredMethods(clazz);
for (Method method : allMethods) {
if (method.isAnnotationPresent(annotationClass)) {
methods.add(method);
}
}
return methods;
}
/**
* Gets {@link List} of all {@link Field}s from passed class annotated with
* specified annotation
*
* @param clazz
* @param annotationClass
* @return {@link List}<Field>
* @throws IOException
*/
public static List<Field> getAnnotatedFields(Class<?> clazz,
Class<? extends Annotation> annotationClass) throws IOException {
List<Field> fields = new ArrayList<Field>();
Field[] allFields = clazz.getDeclaredFields();
for (Field field : allFields) {
if (field.isAnnotationPresent(annotationClass)) {
fields.add(field);
}
}
return fields;
}
/**
* Gets wrapper class if passed class is primitive type
*
* @param type
* @return {@link Class}<T>
*/
public static <T> Class<T> getWrapper(Class<?> type) {
Class<T> wrapper;
if (type.isPrimitive()) {
if (type.equals(byte.class)) {
wrapper = ObjectUtils.cast(Byte.class);
} else if (type.equals(boolean.class)) {
wrapper = ObjectUtils.cast(Boolean.class);
} else if (type.equals(char.class)) {
wrapper = ObjectUtils.cast(Character.class);
} else if (type.equals(short.class)) {
wrapper = ObjectUtils.cast(Short.class);
} else if (type.equals(int.class)) {
wrapper = ObjectUtils.cast(Integer.class);
} else if (type.equals(long.class)) {
wrapper = ObjectUtils.cast(Long.class);
} else if (type.equals(float.class)) {
wrapper = ObjectUtils.cast(Float.class);
} else if (type.equals(double.class)) {
wrapper = ObjectUtils.cast(Double.class);
} else {
wrapper = ObjectUtils.cast(type);
}
} else {
wrapper = ObjectUtils.cast(type);
}
return wrapper;
}
/**
* Returns default values if passed class is primitive else returns null
*
* @param clazz
* @return Object
*/
public static Object getDefault(Class<?> clazz) {
Object value;
if (clazz.isPrimitive()) {
if (clazz.equals(byte.class)) {
value = byteDef;
} else if (clazz.equals(boolean.class)) {
value = booleanDef;
} else if (clazz.equals(char.class)) {
value = charDef;
} else if (clazz.equals(short.class)) {
value = shortDef;
} else if (clazz.equals(int.class)) {
value = intDef;
} else if (clazz.equals(long.class)) {
value = longDef;
} else if (clazz.equals(float.class)) {
value = floatDef;
} else if (clazz.equals(double.class)) {
value = doubleDef;
} else {
value = null;
}
} else {
value = null;
}
return value;
}
}
|
package org.nybatis.core.validation;
import org.nybatis.core.exception.unchecked.ParseException;
import org.nybatis.core.model.NDate;
import org.nybatis.core.util.Types;
import java.lang.reflect.Array;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Collection;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Validator to check value's validation
*
* @author nayasis@gmail.com
*/
public class Validator {
/**
* check whether value is valid date format.
*
* @param value value (format is supposed to 'YYYY-MM-DD')
* @return true value is valid date format.
*/
public static boolean isDate( String value ) {
return isDate( value, null );
}
/**
* check whether value is valid date format.
*
* @param value date text
* @param format date format (ex: YYYY-MM-DD HH:MI:SS)
* @return true value is valid date format.
*/
public static boolean isDate( String value, String format ) {
try {
new NDate( value, format );
return true;
} catch ( ParseException e ) {
return false;
}
}
/**
* check whether value is null.
* @param value check value
* @return true if value is null.
*/
public static boolean isNull( Object value ) {
return value == null;
}
/**
* check whether value is not null.
* @param value check value
* @return true if value is not null.
*/
public static boolean isNotNull( Object value ) {
return ! isNull( value );
}
/**
* check whether value is null or empty or consists with only spaces.
* @param value check value
* @return true if value is null or empty or consists with only spaces.
*/
public static boolean isBlank( String value ) {
return value == null || value.length() == 0 || value.trim().length() == 0;
}
/**
* check whether value is not null nor not empty or not consists with only spaces.
* @param value check value
* @return true if value is not null nor not empty or not consists with only spaces.
*/
public static boolean isNotBlank( String value ) {
return ! isBlank( value );
}
/**
* check whether value is null or empty.<br>
*
* Condition to judge empty is different from type of instance.
* <pre>
* 1. String, StringBuffer, StringBuilder : empty
* 2. Map, Collection : empty
* 3. Array : size is zero.
* 4. Any
* </pre>
* @param value check value
* @return true if value is null or empty.
*/
public static boolean isEmpty( Object value ) {
if( value == null ) return true;
if( value instanceof String ) {
return ( (String) value ).length() == 0;
} else if( value instanceof StringBuffer ) {
return ( (StringBuffer) value ).length() == 0;
} else if( value instanceof StringBuilder ) {
return ( (StringBuilder) value ).length() == 0;
} else if( value instanceof Map ) {
return ( (Map) value ).isEmpty();
} else if( value instanceof Collection ) {
return ( (Collection) value ).isEmpty();
} else if( Types.isArrayOrList( value ) ) {
return Array.getLength( value ) == 0;
}
return false;
}
/**
* check whether value is not null nor not empty.<br>
*
* Condition to judge empty is different from type of instance.
* <pre>
* 1. String, StringBuffer, StringBuilder : empty
* 2. Map, Collection : empty
* 3. Array : size is zero.
* 4. Any
* </pre>
* @param value check value
* @return true if value is not null nor not empty.
*/
public static boolean isNotEmpty( Object value ) {
return ! isEmpty( value );
}
public static boolean isMatched( String value, String pattern ) {
return value != null && pattern != null && Pattern.matches( pattern, value );
}
public static boolean isNotMatched( String value, String pattern ) {
return ! isMatched( value, pattern );
}
public static boolean isFound( String value, String pattern ) {
if( value == null || pattern == null ) return false;
Pattern regexp = Pattern.compile( pattern, Pattern.MULTILINE | Pattern.DOTALL );
Matcher matcher = regexp.matcher( value );
return matcher.find();
}
public static boolean isNotFound( String value, String pattern ) {
return ! isFound( value, pattern );
}
public static boolean isFound( String value, String pattern, int flags ) {
if( value == null || pattern == null ) return false;
Pattern regexp = Pattern.compile( pattern, flags );
Matcher matcher = regexp.matcher( value );
return matcher.find();
}
public static boolean isNotFound( String value, String pattern, int flags ) {
return ! isFound( value, pattern, flags );
}
/**
* .
*
* @param value
* @return
*/
public static boolean isFixedNumber( String value ) {
return isMatched( value, "^[-0-9]+$" );
}
/**
* .
* @param value
* @return
*/
public static boolean isPositiveFixedNumber( String value ) {
if( ! isFixedNumber( value ) ) return false;
return Long.parseLong( value ) >= 0;
}
/**
* . ( )
*
* @param value
* @return
*/
public static boolean isNumeric( String value ) {
try {
Double.parseDouble( value );
return true;
} catch( Exception e ) {
return false;
}
}
/**
* value class .
*
* @param value value
* @return
*/
public static boolean isNumericClass( Object value ) {
return value != null && isNumericClass( value.getClass() );
}
/**
* object class .
*
* @param object Instance
* @return
*/
public static boolean isNumericClass( Class<?> klass ) {
if( klass == null ) return false;
return (
klass == int.class ||
klass == Integer.class ||
klass == long.class ||
klass == Long.class ||
klass == short.class ||
klass == Short.class ||
klass == float.class ||
klass == Float.class ||
klass == double.class ||
klass == Double.class ||
klass == byte.class ||
klass == Byte.class ||
klass == BigDecimal.class ||
klass == BigInteger.class
);
}
/**
* object class Boolean .
*
* @param object Instance
* @return Boolean
*/
public static boolean isBooleanClass( Object object ) {
if( object == null ) return false;
Class klass = object.getClass();
return ( klass == boolean.class || klass == Boolean.class );
}
/**
* .
*
* @param value
* @return
*/
public static boolean hasNumber( String value ) {
return ! isMatched( value, "^[^0-9]+$" );
}
/**
* .
*
* @param value
* @return
*/
public static boolean hasKorean( String value ) {
return ! isMatched( value, "^[^
}
/**
* .
*
* @param value
* @return
*/
public static boolean isKorean( String value ) {
return isMatched( value, "^[
}
/**
* .
*
* @param value
* @return
*/
public static boolean isEnglish( String value ) {
return isMatched( value, "^[a-zA-Z]+$" );
}
/**
* .
*
* @param value
* @return
*/
public static boolean hasEnglish( String value ) {
return ! isMatched( value, "^[^a-zA-Z]+$" );
}
/**
* .
*
* @param value
* @return
*/
public static boolean isNumberOrKorean( String value ) {
return isMatched( value, "^[0-9
}
/**
* .
*
* @param value
* @return
*/
public static boolean isNumberOrEnglish( String value ) {
return isMatched( value, "^[0-9a-zA-Z]+$" );
}
/**
* check whether value's pattern is email or not
*
* @param value check value
* @return true if value's pattern is email
*/
public static boolean isEmail( String value ) {
return isMatched( value, "^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,6}$" );
}
/**
* Let you replace null (or empty) with another value.
*
* if value is null or empty, examine replaceValue.
* if replaceValue is null, examine next anotherReplaceValue.
* if anotherReplaceValue is not null, it is returned as result.
*
* @param value value to examine not null or not empty.
* @param replaceValue other value to examine not null.
* @param anotherReplaceValue another values to examine not null.
* @return not null value from begin with.
*/
public static <T> T nvl( T value, T replaceValue, T... anotherReplaceValue ) {
if( isNotEmpty(value) ) return value;
if( isNotNull(replaceValue) ) return replaceValue;
for( T val : anotherReplaceValue ) {
if( isNotNull( val ) ) return val;
}
return null;
}
}
|
package org.opentripplanner.analyst;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonGenerator;
import org.apache.commons.math3.util.FastMath;
import org.opentripplanner.analyst.core.WeightingFunction;
import java.io.IOException;
import java.io.Serializable;
/**
* A pair of parallel histograms representing how many features are located at each amount of travel time
* away from from a single origin. One array contains the raw counts of features (e.g. number of places of employment
* M minutes from home) and the other array contains the weighted sums of those features accounting for their
* magnitudes (e.g. the number of jobs in all places of employment M minutes away from home).
* All time values are rounded down into 1-minute bins (0-60 seconds = minute 0, 61-120 = min 1, etc.)
*/
public class Histogram implements Serializable {
/**
* The weighting functions to be used, as an array. Generally this will be an array of 120
* functions, one to calculate cumulative accessibility for each minute in two hours.
* But any additive function can be used, and the output of the functions will be places in
* counts and sums parallel to this array.
*/
public static WeightingFunction[] weightingFunctions;
/**
* The steepness of the logistic rolloff, basically a smoothing parameter.
* Must be negative or your results will be backwards (i.e. jobs nearby will be worth less than jobs far away).
*
* The larger it is in magnitude, the less smoothing. setting it to -2 / 60.0 yields a rolloff of about 5 minutes.
*/
// TODO: should not be final, but that means that we need to rebuild the weighting functions when it is changed.
public static final double LOGISTIC_STEEPNESS = -2 / 60.0;
static {
weightingFunctions = new WeightingFunction[120];
for (int i = 0; i < 120; i++) {
weightingFunctions[i] = new WeightingFunction.Logistic((i + 1) * 60, LOGISTIC_STEEPNESS);
}
}
/**
* The number features that can be reached within each one-minute bin. Index 0 is 0-1 minutes, index 50 is 50-51
* minutes, etc. The features are not weighted by their magnitudes, so values represent (for example) the number of
* places of employment that can be reached rather than the total number of jobs in all those places of employment.
*/
public int[] counts;
/**
* The weighted sum of all features that can be reached within each one-minute bin.
* Index 0 is 0-1 minutes, index 50 is 50-51 minutes, etc.
* Features are weighted by their magnitudes, so values represent (for example) the total number of jobs in
* all accessible places of employment, rather than the number of places of employment.
*/
public int[] sums;
/**
* Given parallel arrays of travel times and magnitudes for any number of destination features, construct
* histograms that represent the distribution of individual features and total opportunities as a function of
* travel time. The length of the arrays containing these histograms will be equal to the maximum travel time
* specified in the original search request, in minutes.
* @param times the time at which each destination is reached. The array will be destructively sorted in place.
* @param weight the weight or magnitude of each destination reached. it is parallel to times.
*/
public Histogram (int[] times, int[] weight) {
int size = weightingFunctions.length;
// optimization: bin times and weights by seconds.
// there will often be more than one destination in a seconds due to the pigeonhole principle:
// there are a lot more destinations than there are seconds
int maxSecs = Integer.MIN_VALUE;
for (int time : times) {
if (time == Integer.MAX_VALUE)
continue;
if (time > maxSecs)
maxSecs = time;
}
int[] binnedCounts = new int[maxSecs + 1];
int[] binnedWeights = new int[maxSecs + 1];
for (int i = 0; i < times.length; i++) {
if (times[i] == Integer.MAX_VALUE)
continue;
binnedCounts[times[i]] += 1;
binnedWeights[times[i]] += weight[i];
}
// we use logistic rolloff, so we want to compute the counts and sums using floating-point values before truncation
double[] tmpCounts = new double[size];
double[] tmpSums = new double[size];
for (int i = 0; i < binnedCounts.length; i++) {
if (binnedCounts[i] == 0)
continue;
for (int j = 0; j < weightingFunctions.length; j++) {
double w = weightingFunctions[j].getWeight(i);
tmpCounts[j] += w * binnedCounts[i];
tmpSums[j] += w * binnedWeights[i];
}
}
// convert to ints
counts = new int[size];
sums = new int[size];
for (int i = 0; i < weightingFunctions.length; i++) {
counts[i] = (int) FastMath.round(tmpCounts[i]);
sums[i] = (int) FastMath.round(tmpSums[i]);
}
// make density rather than cumulative
// note that counts[0] is already a density so we don't touch it
for (int i = weightingFunctions.length - 1; i > 0; i
counts[i] -= counts[i - 1];
sums[i] -= sums[i - 1];
}
}
/** no-arg constructor for serialization/deserialization */
public Histogram () {}
/**
* Serialize this pair of histograms out as a JSON document using the given JsonGenerator. The format is:
* <pre> {
* sums: [],
* counts: []
* } </pre>
*/
public void writeJson(JsonGenerator jgen) throws JsonGenerationException, IOException {
// The number of features reached during each minute, ignoring their magnitudes
jgen.writeArrayFieldStart("sums"); {
for(int sum : sums) {
jgen.writeNumber(sum);
}
}
jgen.writeEndArray();
// The total number of opportunities reached during each minute (the sum of the features' magnitudes)
jgen.writeArrayFieldStart("counts"); {
for(int count : counts) {
jgen.writeNumber(count);
}
}
jgen.writeEndArray();
}
}
|
package org.redline_rpm.header;
import org.redline_rpm.Util;
import java.io.IOException;
import java.lang.reflect.Array;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.nio.channels.WritableByteChannel;
import java.nio.charset.Charset;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
public abstract class AbstractHeader {
public interface Tag {
int NULL_ENTRY = 0;
int CHAR_ENTRY = 1;
int INT8_ENTRY = 2;
int INT16_ENTRY = 3;
int INT32_ENTRY = 4;
int INT64_ENTRY = 5;
int STRING_ENTRY = 6;
int BIN_ENTRY = 7;
int STRING_ARRAY_ENTRY = 8;
int I18NSTRING_ENTRY = 9;
int ASN1_ENTRY = 10;
int OPENPGP_ENTRY = 11;
int getCode();
int getType();
String getName();
/**
* @return true if the tag's type expects an array, false otherwise.
*/
public abstract boolean isArrayType();
}
protected static final int HEADER_HEADER_SIZE = 16;
protected static final int ENTRY_SIZE = 16;
protected static final int MAGIC_WORD = 0x8EADE801;
protected final Map< Integer, Tag> tags = new HashMap< Integer, Tag>();
protected final Map< Integer, Entry< ?>> entries = new TreeMap< Integer, Entry< ?>>();
/**
* place to put the changelog entries. we can't use entries because it is a map and each changelog
*
*/
protected final List<Entry< ?>> changelogs = new LinkedList<Entry< ?>>();
protected final Map< Entry< ?>, Integer> pending = new LinkedHashMap< Entry< ?>, Integer>();
protected int startPos;
protected int endPos;
protected abstract boolean pad();
/**
* Reads the entire header contents for this channel and returns the number of entries
* found.
* @param in the ReadableByteChannel to read
* @return the number read
* @throws IOException there was an IO error
*/
public int read( ReadableByteChannel in) throws IOException {
ByteBuffer header = Util.fill( in, HEADER_HEADER_SIZE);
int magic = header.getInt();
// TODO: Determine if this hack to fix mangled headers for some RPMs is really needed.
if ( magic == 0) {
header.compact();
Util.fill( in, header);
magic = header.getInt();
}
Util.check( MAGIC_WORD, magic);
header.getInt();
final ByteBuffer index = Util.fill( in, header.getInt() * ENTRY_SIZE);
final int total = header.getInt();
final int pad = pad() ? Util.round( total, 7) - total : 0;
final ByteBuffer data = Util.fill( in, total + pad);
int count = 0;
while ( index.remaining() >= ENTRY_SIZE) {
readEntry( index.getInt(), index.getInt(), index.getInt(), index.getInt(), data);
count++;
}
return count;
}
/**
* Writes this header section to the provided file at the current position and returns the
* required padding. The caller is responsible for adding the padding immediately after
* this data.
* @param out the WritableByteChannel to output to
* @return the number written
* @throws IOException there was an IO error
*/
public int write( WritableByteChannel out) throws IOException {
final ByteBuffer header = getHeader();
final ByteBuffer index = getIndex();
final ByteBuffer data = getData( index);
data.flip();
int pad = pad() ? Util.round( data.remaining(), 7) - data.remaining() : 0;
header.putInt( data.remaining());
Util.empty( out, ( ByteBuffer) header.flip());
Util.empty( out, ( ByteBuffer) index.flip());
Util.empty( out, data);
return pad;
}
public int count() {
return entries.size();
}
/**
* Memory maps the portion of the destination file that will contain the header structure
* header and advances the file channels position. The resulting buffer will be prefilled with
* the necesssary magic data and the correct index count, but will require an integer value to
* be written with the total data section size once data writing is complete.
* This method must be invoked before mapping the index or data sections.
* @return a buffer containing the header
* @throws IOException there was an IO error
*/
protected ByteBuffer getHeader() throws IOException {
ByteBuffer buffer = ByteBuffer.allocate( HEADER_HEADER_SIZE);
buffer.putInt( MAGIC_WORD);
buffer.putInt( 0);
buffer.putInt( count());
return buffer;
}
/**
* Memory maps the portion of the destination file that will contain the index structure
* header and advances the file channels position. The resulting buffer will be ready for
* writing of the entry indexes.
* This method must be invoked before mapping the data section, but after mapping the header.
* @return a buffer containing the header
* @throws IOException there was an IO error
*/
protected ByteBuffer getIndex() throws IOException {
return ByteBuffer.allocate( count() * ENTRY_SIZE);
}
/**
* Writes the data section of the file, starting at the current position which must be immediately
* after the header section. Each entry writes its corresponding index into the provided index buffer
* and then writes its data to the file channel.
* @param index ByteBuffer of the index
* @return the total number of bytes written to the data section of the file.
* @throws IOException there was an IO error
*/
protected ByteBuffer getData( final ByteBuffer index) throws IOException {
int offset = 0;
final List< ByteBuffer> buffers = new LinkedList< ByteBuffer>();
final Iterator< Integer> i = entries.keySet().iterator();
index.position( 16);
final Entry< ?> first = entries.get( i.next());
Entry< ?> entry = null;
try {
while ( i.hasNext()) {
entry = entries.get( i.next());
offset = writeData( buffers, index, entry, offset);
}
// now write the changelogs
for (Entry< ?> clentry : changelogs) {
offset = writeData(buffers, index, clentry, offset);
}
index.position( 0);
offset = writeData( buffers, index, first, offset);
index.position( index.limit());
} catch ( IllegalArgumentException e) {
throw new RuntimeException( "Error while writing '" + entry + "'.", e);
}
ByteBuffer data = ByteBuffer.allocate( offset);
for ( ByteBuffer buffer : buffers) data.put( buffer);
return data;
}
protected int writeData( final Collection< ByteBuffer> buffers, final ByteBuffer index, final Entry< ?> entry, int offset) {
final int shift = entry.getOffset( offset) - offset;
if ( shift > 0) buffers.add( ByteBuffer.allocate( shift));
offset += shift;
final int size = entry.size();
final ByteBuffer buffer = ByteBuffer.allocate( size);
entry.index( index, offset);
if ( entry.ready()) {
entry.write( buffer);
buffer.flip();
}
else pending.put( entry, offset);
buffers.add( buffer);
return offset + size;
}
public void writePending( final FileChannel channel) {
for ( Entry< ?> entry : pending.keySet()) {
try {
ByteBuffer data = ByteBuffer.allocate( entry.size());
entry.write( data);
channel.position( Lead.LEAD_SIZE + HEADER_HEADER_SIZE + count() * ENTRY_SIZE + pending.get( entry));
Util.empty( channel, ( ByteBuffer) data.flip());
}
catch ( Exception e) {
throw new RuntimeException( "Error writing pending entry '" + entry.getTag() + "'.", e);
}
}
}
public Map< Entry< ?>, Integer> getPending() {
return pending;
}
public void removeEntry( final Entry< ?> entry) {
entries.remove( entry.getTag());
}
public Entry< ?> getEntry( final Tag tag) {
return getEntry( tag.getCode());
}
public Entry< ?> getEntry( final int tag) {
return entries.get( tag);
}
@SuppressWarnings( "unchecked")
public Entry< String[]> createEntry( Tag tag, CharSequence value) {
Entry< String[]> entry = ( Entry< String[]>) createEntry( tag.getCode(), tag.getType(), 1);
entry.setValues( new String[] { value.toString()});
return entry;
}
@SuppressWarnings( "unchecked")
public < T> Entry< T> appendChangeLogEntry( Tag tag, T values) {
Entry< T> entry = ( Entry< T>) createChangeLogEntry( tag.getCode(), tag.getType(), values.getClass().isArray() ? Array.getLength( values) : 1);
entry.setValues( values);
return entry;
}
@SuppressWarnings( "unchecked")
public Entry< String[]> appendChangeLogEntry( Tag tag, String[] values) {
Entry< String[]> entry = ( Entry< String[]>) createChangeLogEntry( tag.getCode(), tag.getType(), values.length);
entry.setValues( values);
return entry;
}
@SuppressWarnings( "unchecked")
public Entry< int[]> createEntry( Tag tag, int value) {
Entry< int[]> entry = ( Entry< int[]>) createEntry( tag.getCode(), tag.getType(), 1);
entry.setValues( new int[] { value});
return entry;
}
@SuppressWarnings( "unchecked")
public < T> Entry< T> createEntry( Tag tag, T values) {
Entry< T> entry = ( Entry< T>) createEntry( tag.getCode(), tag.getType(), values.getClass().isArray() ? Array.getLength( values) : 1);
entry.setValues( values);
return entry;
}
@SuppressWarnings( "unchecked")
public < T> Entry< T> createEntry( Tag tag, int type, T values) {
Entry< T> entry = ( Entry< T>) createEntry( tag.getCode(), type, values.getClass().isArray() ? Array.getLength( values) : 1);
entry.setValues( values);
return entry;
}
@SuppressWarnings( "unchecked")
public < T> Entry< T> createEntry( int tag, int type, T values) {
Entry< T> entry = ( Entry< T>) createEntry( tag, type, values.getClass().isArray() ? Array.getLength( values) : 1);
entry.setValues( values);
return entry;
}
/**
* Adds a pending entry to this header. This entry will have the correctly sized buffer allocated, but
* will not be written until the caller writes a value and then invokes {@link #writePending} on this
* object.
* @param tag the tag
* @param count the count
* @return the entry added
*/
public Entry< ?> addEntry( Tag tag, int count) {
return createEntry( tag.getCode(), tag.getType(), count);
}
public Entry< ?> readEntry( final int tag, final int type, final int offset, final int count, final ByteBuffer data) {
final Entry< ?> entry = createEntry( tag, type, count);
final ByteBuffer buffer = data.duplicate();
buffer.position( offset);
entry.read( buffer);
entry.setOffset( offset);
return entry;
}
public Entry< ?> createEntry( final int tag, final int type, final int count) {
final Entry< ?> entry = createEntry( type);
entry.setTag( tag);
entry.setCount( count);
entries.put( tag, entry);
return entry;
}
public Entry< ?> createChangeLogEntry( final int tag, final int type, final int count) {
final Entry< ?> entry = createEntry( type);
entry.setTag( tag);
entry.setCount( count);
changelogs.add(entry);
return entry;
}
protected Entry< ?> createEntry( int type) {
switch ( type) {
case Tag.NULL_ENTRY:
return new NullEntry();
case Tag.CHAR_ENTRY:
return new CharEntry();
case Tag.INT8_ENTRY:
return new Int8Entry();
case Tag.INT16_ENTRY:
return new Int16Entry();
case Tag.INT32_ENTRY:
return new Int32Entry();
case Tag.INT64_ENTRY:
return new Int64Entry();
case Tag.STRING_ENTRY:
return new StringEntry();
case Tag.BIN_ENTRY:
return new BinEntry();
case Tag.STRING_ARRAY_ENTRY:
return new StringArrayEntry();
case Tag.I18NSTRING_ENTRY:
return new I18NStringEntry();
default:
throw new IllegalStateException( "Unknown entry type '" + type + "'.");
}
}
public int getEndPos() {
return endPos;
}
public void setEndPos(int endPos) {
this.endPos = endPos;
}
public int getStartPos() {
return startPos;
}
public void setStartPos(int startPos) {
this.startPos = startPos;
}
public interface Entry< T> {
void setTag( int tag);
void setSize( int size);
void setCount( int count);
void setOffset( int offset);
void setValues( T values);
T getValues();
int getTag();
int getType();
int getOffset( int offset);
int size();
boolean ready();
void read( ByteBuffer buffer);
void write( ByteBuffer buffer);
void index( ByteBuffer buffer, int position);
}
public abstract class AbstractEntry< T> implements Entry< T> {
protected int size;
protected int tag;
protected int count;
protected int offset;
protected T values;
public void setTag( Tag tag) { this.tag = tag.getCode(); }
public void setTag( int tag) { this.tag = tag; }
public void setSize( int size) { this.size = size; }
public void setCount( int count) { this.count = count; }
public void setOffset( int offset) { this.offset = offset; }
/**
* Fails fast if Tag and T are not compatible.
* @param values
* @throws ClassCastException - if the type of values is not compatible with the type
* required by tag.type()
*/
protected abstract void typeCheck(T values);
/**
* @param values
* @throws ClassCastException - if the type of values is not compatible with the type
* required by tag.type()
*/
public void setValues( T values) {
if (values.getClass().isArray()) {
typeCheck(values);
}
this.values = values;
}
public T getValues() { return values; }
public int getTag() { return tag; }
public int getOffset( int offset) { return offset; }
/**
* Returns true if this entry is ready to write, indicated by the presence of
* a set of values.
* @return true if ready
*/
public boolean ready() { return values != null; }
/**
* Returns the data type of this entry.
*/
public abstract int getType();
/**
* Returns the size this entry will need in the provided data buffer to write
* it's contents, corrected for any trailing zeros to fill to a boundary.
*/
public abstract int size();
/**
* Reads this entries value from the provided buffer using the set count.
*/
public abstract void read( final ByteBuffer buffer);
/**
* Writes this entries index to the index buffer and its values to the output
* channel provided.
*/
public abstract void write( final ByteBuffer data);
/**
* Writes the index entry into the provided buffer at the current position.
*/
public void index( final ByteBuffer index, final int position) {
index.putInt( tag).putInt( getType()).putInt( position).putInt( count);
}
public String toString() {
StringBuilder builder = new StringBuilder();
if ( tags.containsKey( tag)) builder.append( tags.get( tag).getName());
else builder.append( super.toString());
builder.append( "[tag=").append( tag);
builder.append( ",type=").append( getType());
builder.append( ",count=").append( count);
builder.append( ",size=").append( size());
builder.append( ",offset=").append( offset);
builder.append( "]");
return builder.toString();
}
}
class NullEntry extends AbstractEntry< Object> {
public int getType() { return 0; }
public int size() { return 0; }
public void read( final ByteBuffer buffer) {}
public void write( final ByteBuffer data) {}
@Override
protected void typeCheck(Object values) {
return;
}
}
class CharEntry extends AbstractEntry< byte[]> {
public int getType() { return Tag.CHAR_ENTRY; }
public int size() { return count ; }
public void read( final ByteBuffer buffer) {
byte[] values = new byte[ count];
for ( int x = 0; x < count; x++) values[ x] = buffer.get();
setValues( values);
}
public void write( final ByteBuffer data) {
for ( byte c : values) data.put( c);
}
public String toString() {
StringBuilder builder = new StringBuilder( super.toString());
for ( byte c : values) builder.append( c);
builder.append( "\n\t");
return builder.toString();
}
@Override
protected void typeCheck(byte[] values) {
for ( @SuppressWarnings("unused") byte c : values) {/*intentionally do nothing*/}
}
}
class Int8Entry extends AbstractEntry< byte[]> {
public int getType() { return Tag.INT8_ENTRY; }
public int size() { return count; }
public void read( final ByteBuffer buffer) {
byte[] values = new byte[ count];
for ( int x = 0; x < count; x++) values[ x] = buffer.get();
setValues( values);
}
public void write( final ByteBuffer data) {
for ( byte b : values) data.put( b);
}
@Override
protected void typeCheck(byte[] values) {
for ( @SuppressWarnings("unused") byte c : values) {/*intentionally do nothing*/}
}
public String toString() {
StringBuilder builder = new StringBuilder( super.toString());
builder.append( "\n\t");
for ( byte b : values) builder.append( b).append( ", ");
return builder.toString();
}
}
class Int16Entry extends AbstractEntry< short[]> {
public int getOffset( int offset) { return Util.round( offset, 1); }
public int getType() { return Tag.INT16_ENTRY; }
public int size() { return count * ( Short.SIZE / 8); }
public void read( final ByteBuffer buffer) {
short[] values = new short[ count];
for ( int x = 0; x < count; x++) values[ x] = buffer.getShort();
setValues( values);
}
public void write( final ByteBuffer data) {
for ( short s : values) data.putShort( s);
}
public String toString() {
StringBuilder builder = new StringBuilder( super.toString());
builder.append( "\n\t");
for ( short s : values) builder.append( s & 0xFFFF).append( ", ");
return builder.toString();
}
@Override
protected void typeCheck(short[] values) {
for ( @SuppressWarnings("unused") short c : values) {/*intentionally do nothing*/}
}
}
class Int32Entry extends AbstractEntry< int[]> {
public int getOffset( int offset) { return Util.round( offset, 3); }
public int getType() { return Tag.INT32_ENTRY; }
public int size() { return count * ( Integer.SIZE / 8); }
public void read( final ByteBuffer buffer) {
int[] values = new int[ count];
for ( int x = 0; x < count; x++) values[ x] = buffer.getInt();
setValues( values);
}
public void write( final ByteBuffer data) {
for ( int i : values) data.putInt( i);
}
public String toString() {
StringBuilder builder = new StringBuilder( super.toString());
builder.append( "\n\t");
for ( int i : values) builder.append( i).append( ", ");
return builder.toString();
}
@Override
protected void typeCheck(int[] values) {
for ( @SuppressWarnings("unused") int c : values) {/*intentionally do nothing*/}
}
}
class Int64Entry extends AbstractEntry< long[]> {
public int getOffset( int offset) { return Util.round( offset, 7); }
public int getType() { return Tag.INT64_ENTRY; }
public int size() { return count * ( Long.SIZE / 8); }
public void read( final ByteBuffer buffer) {
long[] values = new long[ count];
for ( int x = 0; x < count; x++) values[ x] = buffer.getLong();
setValues( values);
}
public void write( final ByteBuffer data) {
for ( long l : values) data.putLong( l);
}
public String toString() {
StringBuilder builder = new StringBuilder( super.toString());
builder.append( "\n\t");
for ( long l : values) builder.append( l).append( ", ");
return builder.toString();
}
@Override
protected void typeCheck(long[] values) {
for ( @SuppressWarnings("unused") long c : values) {/*intentionally do nothing*/}
}
}
class StringEntry extends AbstractEntry< String[]> {
public int getType() { return Tag.STRING_ENTRY; }
public int size() {
if ( size != 0) return size;
for ( String s : values) size += Charset.forName( "UTF-8").encode( s).remaining() + 1;
return size;
}
public void read( final ByteBuffer buffer) {
String[] values = new String[ count];
for ( int x = 0; x < count; x++) {
int length = 0;
while ( buffer.get( buffer.position() + length) != 0) length++;
final ByteBuffer slice = buffer.slice();
buffer.position( buffer.position() + length + 1);
slice.limit( length);
values[ x] = Charset.forName( "UTF-8").decode( slice).toString();
}
setValues( values);
}
public void write( final ByteBuffer data) {
for ( String s : values) data.put( Charset.forName( "UTF-8").encode( s)).put(( byte) 0);
}
public String toString() {
StringBuilder builder = new StringBuilder( super.toString());
if ( values != null) {
for ( String s : values) {
builder.append( "\n\t");
builder.append( s);
}
}
return builder.toString();
}
@Override
protected void typeCheck(String[] values) {
for ( @SuppressWarnings("unused") String c : values) {/*intentionally do nothing*/}
}
}
class BinEntry extends AbstractEntry< byte[]> {
public int getType() { return Tag.BIN_ENTRY; }
public int size() { return count; }
public void read( final ByteBuffer buffer) {
byte[] values = new byte[ count];
buffer.get( values);
setValues( values);
}
public void write( final ByteBuffer data) {
data.put( values);
}
public String toString() {
StringBuilder builder = new StringBuilder( super.toString());
if ( values != null) {
builder.append( "\n");
Util.dump( values, builder);
}
return builder.toString();
}
@Override
protected void typeCheck(byte[] values) {
for ( @SuppressWarnings("unused") byte c : values) {/*intentionally do nothing*/}
}
}
class StringArrayEntry extends StringEntry {
public int getType() { return Tag.STRING_ARRAY_ENTRY; }
}
class I18NStringEntry extends StringEntry {
public int getType() { return Tag.I18NSTRING_ENTRY; }
}
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append( "Start Header ( ").append( getClass()).append( ")").append( "\n");
int count = 0;
for ( int tag : entries.keySet()) {
builder.append( count++).append( ": ").append( entries.get( tag)).append( "\n");
}
return builder.toString();
}
}
|
package org.robolectric;
import android.app.Application;
import android.os.Build;
import org.apache.maven.artifact.ant.DependenciesTask;
import org.apache.maven.model.Dependency;
import org.apache.tools.ant.Project;
import org.jetbrains.annotations.TestOnly;
import org.junit.runner.notification.RunNotifier;
import org.junit.runners.BlockJUnit4ClassRunner;
import org.junit.runners.model.FrameworkMethod;
import org.junit.runners.model.InitializationError;
import org.junit.runners.model.Statement;
import org.robolectric.annotation.Config;
import org.robolectric.annotation.DisableStrictI18n;
import org.robolectric.annotation.EnableStrictI18n;
import org.robolectric.annotation.WithConstantInt;
import org.robolectric.annotation.WithConstantString;
import org.robolectric.bytecode.AndroidTranslator;
import org.robolectric.bytecode.AsmInstrumentingClassLoader;
import org.robolectric.bytecode.ClassCache;
import org.robolectric.bytecode.ClassHandler;
import org.robolectric.bytecode.JavassistInstrumentingClassLoader;
import org.robolectric.bytecode.RobolectricInternals;
import org.robolectric.bytecode.Setup;
import org.robolectric.bytecode.ShadowMap;
import org.robolectric.bytecode.ShadowWrangler;
import org.robolectric.bytecode.ZipClassCache;
import org.robolectric.internal.ParallelUniverse;
import org.robolectric.internal.ParallelUniverseInterface;
import org.robolectric.res.OverlayResourceLoader;
import org.robolectric.res.PackageResourceLoader;
import org.robolectric.res.ResName;
import org.robolectric.res.ResourceExtractor;
import org.robolectric.res.ResourceLoader;
import org.robolectric.res.ResourcePath;
import org.robolectric.res.RoutingResourceLoader;
import org.robolectric.shadows.ShadowLog;
import org.robolectric.util.AnnotationUtil;
import org.robolectric.util.DatabaseConfig.DatabaseMap;
import org.robolectric.util.DatabaseConfig.UsingDatabaseMap;
import org.robolectric.util.SQLiteMap;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import static org.fest.reflect.core.Reflection.staticField;
/**
* Installs a {@link org.robolectric.bytecode.InstrumentingClassLoader} and
* {@link org.robolectric.res.ResourceLoader} in order to
* provide a simulation of the Android runtime environment.
*/
public class RobolectricTestRunner extends BlockJUnit4ClassRunner {
private static final Project PROJECT = new Project();
private static final Map<Class<? extends RobolectricTestRunner>, EnvHolder> envHoldersByTestRunner = new HashMap<Class<? extends RobolectricTestRunner>, EnvHolder>();
private static final Map<AndroidManifest, ResourceLoader> resourceLoadersByAppManifest = new HashMap<AndroidManifest, ResourceLoader>();
private static final Map<ResourcePath, ResourceLoader> systemResourceLoaders = new HashMap<ResourcePath, ResourceLoader>();
private static Class<? extends RobolectricTestRunner> lastTestRunnerClass;
private static SdkConfig lastSdkConfig;
private static SdkEnvironment lastSdkEnvironment;
private static ShadowMap mainShadowMap;
private final EnvHolder envHolder;
private DatabaseMap databaseMap;
private TestLifecycle<Application> testLifecycle;
static {
new SecureRandom(); // this starts up the Poller SunPKCS11-Darwin thread early, outside of any Robolectric classloader
}
/**
* Creates a runner to run {@code testClass}. Looks in your working directory for your AndroidManifest.xml file
* and res directory.
*
* @param testClass the test class to be run
* @throws InitializationError if junit says so
*/
public RobolectricTestRunner(final Class<?> testClass) throws InitializationError {
super(testClass);
EnvHolder envHolder;
synchronized (envHoldersByTestRunner) {
Class<? extends RobolectricTestRunner> testRunnerClass = getClass();
envHolder = envHoldersByTestRunner.get(testRunnerClass);
if (envHolder == null) {
envHolder = new EnvHolder();
envHoldersByTestRunner.put(testRunnerClass, envHolder);
}
}
this.envHolder = envHolder;
databaseMap = setupDatabaseMap(testClass, new SQLiteMap());
}
private void assureTestLifecycle(SdkEnvironment sdkEnvironment) {
try {
ClassLoader robolectricClassLoader = sdkEnvironment.getRobolectricClassLoader();
testLifecycle = (TestLifecycle) robolectricClassLoader.loadClass(getTestLifecycleClass().getName()).newInstance();
} catch (InstantiationException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
public SdkEnvironment createSdkEnvironment(AndroidManifest appManifest, Config config, SdkConfig sdkConfig) {
Setup setup = createSetup();
ClassLoader robolectricClassLoader = createRobolectricClassLoader(setup, sdkConfig);
return new SdkEnvironment(appManifest, robolectricClassLoader);
}
protected ClassHandler createClassHandler(ShadowMap shadowMap) {
return new ShadowWrangler(shadowMap);
}
protected AndroidManifest createAppManifest(File baseDir) {
return new AndroidManifest(baseDir);
}
public Setup createSetup() {
return new Setup();
}
protected Class<? extends TestLifecycle> getTestLifecycleClass() {
return DefaultTestLifecycle.class;
}
protected ClassLoader createRobolectricClassLoader(Setup setup, SdkConfig sdkConfig) {
URL[] urls = artifactUrls(realAndroidDependency("android-base", sdkConfig),
realAndroidDependency("android-kxml2", sdkConfig),
realAndroidDependency("android-luni", sdkConfig),
createDependency("org.json", "json", "20080701", "jar", null),
createDependency("org.ccil.cowan.tagsoup", "tagsoup", "1.2", "jar", null)
);
ClassLoader robolectricClassLoader;
if (useAsm()) {
robolectricClassLoader = new AsmInstrumentingClassLoader(setup, urls);
} else {
ClassCache classCache = createClassCache();
AndroidTranslator androidTranslator = createAndroidTranslator(setup, classCache);
ClassLoader realSdkClassLoader = JavassistInstrumentingClassLoader.makeClassloader(this.getClass().getClassLoader(), urls);
robolectricClassLoader = new JavassistInstrumentingClassLoader(realSdkClassLoader, classCache, androidTranslator, setup);
}
return robolectricClassLoader;
}
public ClassCache createClassCache() {
final String classCachePath = System.getProperty("cached.robolectric.classes.path");
final File classCacheDirectory;
if (null == classCachePath || "".equals(classCachePath.trim())) {
classCacheDirectory = new File("./tmp");
} else {
classCacheDirectory = new File(classCachePath);
}
return new ZipClassCache(new File(classCacheDirectory, "cached-robolectric-classes.jar").getAbsolutePath(), AndroidTranslator.CACHE_VERSION);
}
public AndroidTranslator createAndroidTranslator(Setup setup, ClassCache classCache) {
return new AndroidTranslator(classCache, setup);
}
public boolean useAsm() {
return true;
}
public static void injectClassHandler(ClassLoader robolectricClassLoader, ClassHandler classHandler) {
try {
String className = RobolectricInternals.class.getName();
Class<?> robolectricInternalsClass = robolectricClassLoader.loadClass(className);
Field field = robolectricInternalsClass.getDeclaredField("classHandler");
field.setAccessible(true);
field.set(null, classHandler);
} catch (NoSuchFieldException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
private URL[] artifactUrls(Dependency... dependencies) {
DependenciesTask dependenciesTask = new DependenciesTask();
configureMaven(dependenciesTask);
dependenciesTask.setProject(PROJECT);
for (Dependency dependency : dependencies) {
dependenciesTask.addDependency(dependency);
}
dependenciesTask.execute();
@SuppressWarnings("unchecked")
Hashtable<String, String> artifacts = PROJECT.getProperties();
URL[] urls = new URL[artifacts.size()];
int i = 0;
for (String path : artifacts.values()) {
try {
urls[i++] = new URL("file:/" + path);
} catch (MalformedURLException e) {
throw new RuntimeException(e);
}
}
return urls;
}
@SuppressWarnings("UnusedParameters")
protected void configureMaven(DependenciesTask dependenciesTask) {
// maybe you want to override this method and some settings?
}
private Dependency realAndroidDependency(String artifactId, SdkConfig sdkConfig) {
return createDependency("org.robolectric", artifactId, sdkConfig.getArtifactVersionString(), "jar", "real");
}
private Dependency createDependency(String groupId, String artifactId, String version, String type, String classifier) {
Dependency dependency = new Dependency();
dependency.setGroupId(groupId);
dependency.setArtifactId(artifactId);
dependency.setVersion(version);
dependency.setType(type);
dependency.setClassifier(classifier);
return dependency;
}
@Override
protected Statement classBlock(RunNotifier notifier) {
final Statement statement = super.classBlock(notifier);
return new Statement() {
@Override
public void evaluate() throws Throwable {
try {
statement.evaluate();
} finally {
afterClass();
}
}
};
}
@Override protected Statement methodBlock(final FrameworkMethod method) {
return new Statement() {
@Override public void evaluate() throws Throwable {
final Config config = getConfig(method.getMethod());
AndroidManifest appManifest = getAppManifest(config);
SdkEnvironment sdkEnvironment = getEnvironment(appManifest, config);
// todo: is this really needed?
Thread.currentThread().setContextClassLoader(sdkEnvironment.getRobolectricClassLoader());
Class bootstrappedTestClass = sdkEnvironment.bootstrappedClass(getTestClass().getJavaClass());
HelperTestRunner helperTestRunner;
try {
helperTestRunner = new HelperTestRunner(bootstrappedTestClass);
} catch (InitializationError initializationError) {
throw new RuntimeException(initializationError);
}
final Method bootstrappedMethod;
try {
//noinspection unchecked
bootstrappedMethod = bootstrappedTestClass.getMethod(method.getName());
} catch (NoSuchMethodException e) {
throw new RuntimeException(e);
}
configureShadows(sdkEnvironment, config);
setupLogging();
ParallelUniverseInterface parallelUniverseInterface = getHooksInterface(sdkEnvironment);
try {
assureTestLifecycle(sdkEnvironment);
parallelUniverseInterface.resetStaticState();
parallelUniverseInterface.setDatabaseMap(databaseMap); //Set static DatabaseMap in DBConfig
boolean strictI18n = RobolectricTestRunner.determineI18nStrictState(bootstrappedMethod);
int sdkVersion = pickReportedSdkVersion(config, sdkEnvironment);
Class<?> versionClass = sdkEnvironment.bootstrappedClass(Build.VERSION.class);
staticField("SDK_INT").ofType(int.class).in(versionClass).set(sdkVersion);
ResourcePath systemResourcePath = sdkEnvironment.getSystemResourcePath();
ResourceLoader systemResourceLoader = getSystemResourceLoader(systemResourcePath);
setUpApplicationState(bootstrappedMethod, parallelUniverseInterface, strictI18n, systemResourceLoader, sdkEnvironment);
testLifecycle.beforeTest(bootstrappedMethod);
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException(e);
}
final Statement statement = helperTestRunner.methodBlock(new FrameworkMethod(bootstrappedMethod));
Map<Field, Object> withConstantAnnos = getWithConstantAnnotations(bootstrappedMethod);
// todo: this try/finally probably isn't right -- should mimic RunAfters? [xw]
try {
if (withConstantAnnos.isEmpty()) {
statement.evaluate();
} else {
synchronized (this) {
setupConstants(withConstantAnnos);
statement.evaluate();
setupConstants(withConstantAnnos);
}
}
} finally {
try {
parallelUniverseInterface.tearDownApplication();
} finally {
try {
internalAfterTest(bootstrappedMethod);
} finally {
parallelUniverseInterface.resetStaticState(); // afterward too, so stuff doesn't hold on to classes?
// todo: is this really needed?
Thread.currentThread().setContextClassLoader(RobolectricTestRunner.class.getClassLoader());
}
}
}
}
};
}
private SdkEnvironment getEnvironment(final AndroidManifest appManifest, final Config config) {
final SdkConfig sdkConfig = pickSdkVersion(appManifest, config);
// keep the most recently-used SdkEnvironment strongly reachable to prevent thrashing in low-memory situations.
if (getClass().equals(lastTestRunnerClass) && sdkConfig.equals(sdkConfig)) {
return lastSdkEnvironment;
}
lastTestRunnerClass = null;
lastSdkConfig = null;
lastSdkEnvironment = envHolder.getSdkEnvironment(sdkConfig, new SdkEnvironment.Factory() {
@Override public SdkEnvironment create() {
return createSdkEnvironment(appManifest, config, sdkConfig);
}
});
lastTestRunnerClass = getClass();
lastSdkConfig = sdkConfig;
return lastSdkEnvironment;
}
protected SdkConfig pickSdkVersion(AndroidManifest appManifest, Config config) {
if (config != null && config.emulateSdk() != -1) {
throw new UnsupportedOperationException("Sorry, emulateSdk is not yet supported... coming soon!");
}
if (appManifest != null) {
// todo: something smarter
int useSdkVersion = appManifest.getTargetSdkVersion();
}
// right now we only have real jars for Ice Cream Sandwich aka 4.1 aka API 16
return new SdkConfig("4.1.2_r1_rc");
}
protected AndroidManifest getAppManifest(Config config) {
File appManifestBaseDir = new File(".");
synchronized (envHolder) {
AndroidManifest appManifest;
appManifest = envHolder.appManifestsByFile.get(appManifestBaseDir);
if (appManifest == null) {
appManifest = createAppManifest(appManifestBaseDir);
envHolder.appManifestsByFile.put(appManifestBaseDir, appManifest);
}
return appManifest;
}
}
public Config getConfig(Method method) {
Config methodConfig = method.getAnnotation(Config.class);
if (methodConfig == null) {
methodConfig = AnnotationUtil.defaultsFor(Config.class);
}
Config classConfig = method.getDeclaringClass().getAnnotation(Config.class);
if (classConfig == null) {
classConfig = AnnotationUtil.defaultsFor(Config.class);
}
return new Config.Implementation(classConfig, methodConfig);
}
protected void configureShadows(SdkEnvironment sdkEnvironment, Config config) {
ShadowMap shadowMap = createShadowMap();
if (config != null) {
Class<?>[] shadows = config.shadows();
if (shadows.length > 0) {
shadowMap = shadowMap.newBuilder()
.addShadowClasses(shadows)
.build();
}
}
ClassHandler classHandler = getClassHandler(sdkEnvironment, shadowMap);
injectClassHandler(sdkEnvironment.getRobolectricClassLoader(), classHandler);
}
private ClassHandler getClassHandler(SdkEnvironment sdkEnvironment, ShadowMap shadowMap) {
ClassHandler classHandler;
synchronized (sdkEnvironment) {
classHandler = sdkEnvironment.classHandlersByShadowMap.get(shadowMap);
if (classHandler == null) {
classHandler = createClassHandler(shadowMap);
}
sdkEnvironment.setCurrentClassHandler(classHandler);
}
return classHandler;
}
protected void setUpApplicationState(Method method, ParallelUniverseInterface parallelUniverseInterface, boolean strictI18n, ResourceLoader systemResourceLoader, SdkEnvironment sdkEnvironment) {
parallelUniverseInterface.setUpApplicationState(method, testLifecycle, sdkEnvironment, strictI18n, systemResourceLoader);
}
private int getTargetSdkVersion(SdkEnvironment sdkEnvironment) {
AndroidManifest appManifest = sdkEnvironment.getAppManifest();
return getTargetVersionWhenAppManifestMightBeNullWhaaa(appManifest);
}
public static int getTargetVersionWhenAppManifestMightBeNullWhaaa(AndroidManifest appManifest) {
return appManifest == null // app manifest would be null for libraries
? Build.VERSION_CODES.ICE_CREAM_SANDWICH // todo: how should we be picking this?
: appManifest.getTargetSdkVersion();
}
protected int pickReportedSdkVersion(Config config, SdkEnvironment sdkEnvironment) {
if (config != null && config.reportSdk() != -1) {
return config.reportSdk();
} else {
return getTargetSdkVersion(sdkEnvironment);
}
}
private ParallelUniverseInterface getHooksInterface(SdkEnvironment sdkEnvironment) {
try {
@SuppressWarnings("unchecked")
Class<ParallelUniverseInterface> aClass = (Class<ParallelUniverseInterface>) sdkEnvironment.getRobolectricClassLoader().loadClass(ParallelUniverse.class.getName());
return aClass.newInstance();
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
} catch (InstantiationException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
public void internalAfterTest(final Method method) {
testLifecycle.afterTest(method);
}
private void afterClass() {
testLifecycle = null;
databaseMap = null;
}
@TestOnly
boolean allStateIsCleared() {
return testLifecycle == null && databaseMap == null;
}
@Override
public Object createTest() throws Exception {
throw new UnsupportedOperationException("this should always be invoked on the HelperTestRunner!");
}
public static String determineResourceQualifiers(Method method) {
String qualifiers = "";
Config config = method.getAnnotation(Config.class);
if (config != null) {
qualifiers = config.qualifiers();
}
return qualifiers;
}
/**
* Sets Robolectric config to determine if Robolectric should blacklist API calls that are not
* I18N/L10N-safe.
* <p/>
* I18n-strict mode affects suitably annotated shadow methods. Robolectric will throw exceptions
* if these methods are invoked by application code. Additionally, Robolectric's ResourceLoader
* will throw exceptions if layout resources use bare string literals instead of string resource IDs.
* <p/>
* To enable or disable i18n-strict mode for specific test cases, annotate them with
* {@link org.robolectric.annotation.EnableStrictI18n} or
* {@link org.robolectric.annotation.DisableStrictI18n}.
* <p/>
* <p/>
* By default, I18n-strict mode is disabled.
*
* @param method
*/
public static boolean determineI18nStrictState(Method method) {
// Global
boolean strictI18n = globalI18nStrictEnabled();
// Test case class
Class<?> testClass = method.getDeclaringClass();
if (testClass.getAnnotation(EnableStrictI18n.class) != null) {
strictI18n = true;
} else if (testClass.getAnnotation(DisableStrictI18n.class) != null) {
strictI18n = false;
}
// Test case method
if (method.getAnnotation(EnableStrictI18n.class) != null) {
strictI18n = true;
} else if (method.getAnnotation(DisableStrictI18n.class) != null) {
strictI18n = false;
}
return strictI18n;
}
/**
* Default implementation of global switch for i18n-strict mode.
* To enable i18n-strict mode globally, set the system property
* "robolectric.strictI18n" to true. This can be done via java
* system properties in either Ant or Maven.
* <p/>
* Subclasses can override this method and establish their own policy
* for enabling i18n-strict mode.
*
* @return
*/
protected static boolean globalI18nStrictEnabled() {
return Boolean.valueOf(System.getProperty("robolectric.strictI18n"));
}
private Map<Field, Object> getWithConstantAnnotations(Method method) {
Map<Field, Object> constants = new HashMap<Field, Object>();
for (Annotation anno : method.getDeclaringClass().getAnnotations()) {
addConstantFromAnnotation(constants, anno);
}
for (Annotation anno : method.getAnnotations()) {
addConstantFromAnnotation(constants, anno);
}
return constants;
}
/**
* If the annotation is a constant redefinition, add it to the provided hash
*
* @param constants
* @param anno
*/
private void addConstantFromAnnotation(Map<Field, Object> constants, Annotation anno) {
try {
String name = anno.annotationType().getName();
Object newValue = null;
if (name.equals(WithConstantString.class.getName())) {
newValue = anno.annotationType().getMethod("newValue").invoke(anno);
} else if (name.equals(WithConstantInt.class.getName())) {
newValue = anno.annotationType().getMethod("newValue").invoke(anno);
} else {
return;
}
@SuppressWarnings("rawtypes")
Class classWithField = (Class) anno.annotationType().getMethod("classWithField").invoke(anno);
String fieldName = (String) anno.annotationType().getMethod("fieldName").invoke(anno);
Field field = classWithField.getDeclaredField(fieldName);
constants.put(field, newValue);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Defines static finals from the provided hash and stores the old values back
* into the hash.
* <p/>
* Call it twice with the same hash, and it puts everything back the way it was originally.
*
* @param constants
*/
private void setupConstants(Map<Field, Object> constants) {
for (Field field : constants.keySet()) {
Object newValue = constants.get(field);
Object oldValue = Robolectric.Reflection.setFinalStaticField(field, newValue);
constants.put(field, oldValue);
}
}
public static ResourceLoader getSystemResourceLoader(ResourcePath systemResourcePath) {
ResourceLoader systemResourceLoader = systemResourceLoaders.get(systemResourcePath);
if (systemResourceLoader == null) {
systemResourceLoader = createSystemResourceLoader(systemResourcePath);
systemResourceLoaders.put(systemResourcePath, systemResourceLoader);
}
return systemResourceLoader;
}
public static ResourceLoader getAppResourceLoader(ResourceLoader systemResourceLoader, final AndroidManifest appManifest) {
ResourceLoader resourceLoader = resourceLoadersByAppManifest.get(appManifest);
if (resourceLoader == null) {
resourceLoader = createAppResourceLoader(systemResourceLoader, appManifest);
resourceLoadersByAppManifest.put(appManifest, resourceLoader);
}
return resourceLoader;
}
// this method must live on a InstrumentingClassLoader-loaded class, so it can't be on SdkEnvironment
protected static ResourceLoader createAppResourceLoader(ResourceLoader systemResourceLoader, AndroidManifest appManifest) {
List<PackageResourceLoader> appAndLibraryResourceLoaders = new ArrayList<PackageResourceLoader>();
for (ResourcePath resourcePath : appManifest.getIncludedResourcePaths()) {
appAndLibraryResourceLoaders.add(createResourceLoader(resourcePath));
}
OverlayResourceLoader overlayResourceLoader = new OverlayResourceLoader(appManifest.getPackageName(), appAndLibraryResourceLoaders);
Map<String, ResourceLoader> resourceLoaders = new HashMap<String, ResourceLoader>();
resourceLoaders.put("android", systemResourceLoader);
resourceLoaders.put(appManifest.getPackageName(), overlayResourceLoader);
return new RoutingResourceLoader(resourceLoaders);
}
public static PackageResourceLoader createResourceLoader(ResourcePath systemResourcePath) {
return new PackageResourceLoader(systemResourcePath);
}
public static PackageResourceLoader createSystemResourceLoader(ResourcePath systemResourcePath) {
return new PackageResourceLoader(systemResourcePath, new SystemResourceExtractor(systemResourcePath));
}
/*
* Specifies what database to use for testing (ex: H2 or Sqlite),
* this will load H2 by default, the SQLite TestRunner version will override this.
*/
protected DatabaseMap setupDatabaseMap(Class<?> testClass, DatabaseMap map) {
DatabaseMap dbMap = map;
if (testClass.isAnnotationPresent(UsingDatabaseMap.class)) {
UsingDatabaseMap usingMap = testClass.getAnnotation(UsingDatabaseMap.class);
if (usingMap.value() != null) {
dbMap = Robolectric.newInstanceOf(usingMap.value());
} else {
if (dbMap == null)
throw new RuntimeException("UsingDatabaseMap annotation value must provide a class implementing DatabaseMap");
}
}
return dbMap;
}
protected ShadowMap createShadowMap() {
synchronized (RobolectricTestRunner.class) {
if (mainShadowMap != null) return mainShadowMap;
mainShadowMap = new ShadowMap.Builder()
//.addShadowClasses(RobolectricBase.DEFAULT_SHADOW_CLASSES)
.build();
//mainShadowMap = new ShadowMap.Builder()
// .addShadowClasses(RobolectricBase.DEFAULT_SHADOW_CLASSES)
// .build();
return mainShadowMap;
}
}
private void setupLogging() {
String logging = System.getProperty("robolectric.logging");
if (logging != null && ShadowLog.stream == null) {
PrintStream stream = null;
if ("stdout".equalsIgnoreCase(logging)) {
stream = System.out;
} else if ("stderr".equalsIgnoreCase(logging)) {
stream = System.err;
} else {
try {
final PrintStream file = new PrintStream(new FileOutputStream(logging));
stream = file;
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override public void run() {
try {
file.close();
} catch (Exception ignored) {
}
}
});
} catch (IOException e) {
e.printStackTrace();
}
}
ShadowLog.stream = stream;
}
}
private static class SystemResourceExtractor extends ResourceExtractor {
public SystemResourceExtractor(ResourcePath systemResourcePath) {
super(systemResourcePath);
}
@Override public synchronized ResName getResName(int resourceId) {
ResName resName = super.getResName(resourceId);
if (resName == null) {
// todo: pull in android.internal.R, remove this, and remove the "synchronized" on methods since we should then be immutable...
if ((resourceId & 0xfff00000) == 0x01000000) {
new RuntimeException("WARN: couldn't find a name for resource id " + resourceId).printStackTrace(System.out);
ResName internalResName = new ResName("android.internal", "unknown", resourceId + "");
resourceNameToId.put(internalResName, resourceId);
resourceIdToResName.put(resourceId, internalResName);
return internalResName;
}
}
return resName;
}
@Override public synchronized Integer getResourceId(ResName resName) {
return super.getResourceId(resName);
}
}
public class HelperTestRunner extends BlockJUnit4ClassRunner {
public HelperTestRunner(Class<?> testClass) throws InitializationError {
super(testClass);
}
@Override protected Object createTest() throws Exception {
Object test = super.createTest();
testLifecycle.prepareTest(test);
return test;
}
@Override public Statement classBlock(RunNotifier notifier) {
return super.classBlock(notifier);
}
@Override public Statement methodBlock(FrameworkMethod method) {
return super.methodBlock(method);
}
}
}
|
package org.rundeck.client.tool.commands;
import com.lexicalscope.jewel.cli.CommandLineInterface;
import com.lexicalscope.jewel.cli.Option;
import com.lexicalscope.jewel.cli.Unparsed;
import com.simplifyops.toolbelt.Command;
import com.simplifyops.toolbelt.CommandOutput;
import com.simplifyops.toolbelt.InputError;
import okhttp3.MediaType;
import okhttp3.RequestBody;
import okhttp3.ResponseBody;
import org.rundeck.client.api.RundeckApi;
import org.rundeck.client.api.model.KeyStorageItem;
import org.rundeck.client.util.Client;
import org.rundeck.client.util.Util;
import java.io.*;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.charset.Charset;
import java.util.function.Supplier;
import java.util.stream.Collectors;
@Command(description = "Manage Keys via the Key Storage Facility." +
"\nSpecify the path using -p/--path, or as the last argument to the command.")
public class Keys extends ApiCommand {
public Keys(final HasClient client) {
super(client);
}
public static class Path {
String path;
public Path(final String path) {
this.path = path;
}
public String keysPath() {
if (path.startsWith("keys/")) {
return path.substring(5);
}
return path;
}
@Override
public String toString() {
return path;
}
}
interface PathArgs {
@Option(shortName = "p",
longName = "path",
description = "Storage path, default: keys/",
defaultValue = "keys/")
Path getPath();
@Unparsed(defaultToNull = true, description = "Storage path", name = "PATH")
Path getPath2();
}
@CommandLineInterface(application = "list") interface ListArg extends PathArgs {
}
@Command(description = "List the keys and directories at a given path, or at the root by default.",
synonyms = {"ls"})
public boolean list(ListArg options, CommandOutput output) throws IOException, InputError {
Path path = argPath(options);
KeyStorageItem keyStorageItem = getClient().checkError(getClient().getService()
.listKeyStorage(path.keysPath()));
output.output(keyStorageItem.toBasicString());
if (keyStorageItem.getType() == KeyStorageItem.KeyItemType.directory) {
output.output(
keyStorageItem.getResources()
.stream()
.sorted()
.map(KeyStorageItem::toBasicString)
.collect(Collectors.toList()));
return true;
} else {
output.error(String.format("Path is not a directory: %s", path));
return false;
}
}
private Path argPath(final PathArgs options) {
return options.getPath2() != null ? options.getPath2() : options.getPath();
}
@CommandLineInterface(application = "info") interface Info extends PathArgs {
}
@Command(description = "Get metadata about the given path")
public void info(Info options, CommandOutput output) throws IOException, InputError {
Path path = argPath(options);
KeyStorageItem keyStorageItem = getClient().checkError(getClient().getService()
.listKeyStorage(keysPath(path
.keysPath())));
output.output(String.format("Path: %s", keyStorageItem.getPath()));
output.output(String.format("Type: %s", keyStorageItem.getType()));
// output.output(keyStorageItem.toBasicString());
if (keyStorageItem.getType() == KeyStorageItem.KeyItemType.directory) {
output.output(String.format("Directory: %d entries", keyStorageItem.getResources().size()));
} else {
output.output(String.format("Name: %s", keyStorageItem.getName()));
output.output("Metadata:");
output.output(keyStorageItem.getMetaString(" "));
}
}
/**
* Remove keys/ prefix if present
*
* @param path
*
* @return
*/
private String keysPath(final String path) {
if (path.startsWith("keys/")) {
return path.substring(5);
}
return path;
}
@CommandLineInterface(application = "get") interface GetOpts extends PathArgs {
@Option(shortName = "f",
longName = "file",
defaultToNull = true,
description = "File path for storing the public key. If unset, the output will be written to stdout.")
File getFile();
}
@Command(description = "Get the contents of a public key")
public boolean get(GetOpts options, CommandOutput output) throws IOException, InputError {
Path path = argPath(options);
String path1 = path.keysPath();
if (path1.length() < 1) {
throw new InputError("-p/--path is required");
}
KeyStorageItem keyStorageItem = getClient().checkError(getClient().getService()
.listKeyStorage(path.keysPath()));
if (keyStorageItem.getType() != KeyStorageItem.KeyItemType.file) {
output.error(String.format("Requested path (%s) is not a file", path));
return false;
}
if (keyStorageItem.getFileType() != KeyStorageItem.KeyFileType.publicKey) {
output.error(String.format(
"Requested path (%s) is not a public key. Type: %s",
path,
keyStorageItem.getFileType()
));
return false;
}
ResponseBody body = getClient().checkError(getClient().getService().getPublicKey(path.keysPath()));
if (!Client.hasAnyMediaType(body, Client.MEDIA_TYPE_GPG_KEYS)) {
throw new IllegalStateException("Unexpected response format: " + body.contentType());
}
InputStream inputStream = body.byteStream();
File outFile = options.getFile();
if (outFile != null) {
try (FileOutputStream out = new FileOutputStream(outFile)) {
long total = Util.copyStream(inputStream, out);
output.info(String.format(
"Wrote %d bytes of %s to file %s%n",
total,
body.contentType(),
outFile
));
}
} else {
long total = Util.copyStream(inputStream, System.out);
}
return true;
}
@CommandLineInterface(application = "delete") interface Delete extends PathArgs {
}
@Command(synonyms = {"rm"}, description = "Delete the key at the given path.")
public void delete(Delete opts, CommandOutput output) throws IOException, InputError {
Path path = argPath(opts);
String path1 = path.keysPath();
if (path1.length() < 1) {
throw new InputError("-p/--path is required");
}
getClient().checkError(getClient().getService().deleteKeyStorage(path.keysPath()));
output.info(String.format("Deleted: %s", path));
}
@CommandLineInterface(application = "create") interface Upload extends PathArgs {
@Option(shortName = "t",
longName = "type",
description = "Type of key to store: publicKey,privateKey,password.")
KeyStorageItem.KeyFileType getType();
@Option(shortName = "f",
longName = "file",
description = "File path for reading the upload contents.")
File getFile();
boolean isFile();
@Option(
shortName = "P",
longName = "prompt",
description = "(password type only) prompt on console for the password value, if -f is not specified."
)
boolean isPrompt();
}
@Command(description = "Create a new key entry.")
public boolean create(Upload options, CommandOutput output) throws IOException, InputError {
Path path = argPath(options);
String path1 = path.keysPath();
if (path1.length() < 1) {
throw new InputError("-p/--path is required");
}
RequestBody requestBody = prepareKeyUpload(options);
KeyStorageItem keyStorageItem = getClient().checkError(getClient().getService()
.createKeyStorage(
path1,
requestBody
));
output.info(String.format("Created: %s", keyStorageItem.toBasicString()));
return true;
}
private RequestBody prepareKeyUpload(final Upload options) throws IOException, InputError {
MediaType contentType = getUploadContentType(options.getType());
if (null == contentType) {
throw new InputError(String.format("Type is not supported: %s", options.getType()));
}
RequestBody requestBody;
if (options.getType() != KeyStorageItem.KeyFileType.password && !options.isFile()) {
throw new InputError(String.format("File (-f) is required for type: %s", options.getType()));
}
if (options.getType() == KeyStorageItem.KeyFileType.password && !options.isFile() && !options.isPrompt()) {
throw new InputError(String.format(
"File (-f) or -p is required for type: %s",
options.getType()
));
}
if (options.isFile()) {
File input = options.getFile();
if (!input.canRead() || !input.isFile()) {
throw new InputError(String.format("File is not readable or does not exist: %s", input));
}
if (options.getType() == KeyStorageItem.KeyFileType.password) {
//read the first line of the file only, and leave off line breaks
char[] chars = null;
try (BufferedReader read = new BufferedReader(new InputStreamReader(new FileInputStream(input)))) {
String s = read.readLine();
if (null != s) {
System.err.println("Read file string: '" + s + "'");
chars = s.toCharArray();
}
}
if (chars == null || chars.length == 0) {
throw new IllegalStateException("Could not read first line of file: " + input);
}
ByteBuffer byteBuffer = Charset.forName("UTF-8").encode(CharBuffer.wrap(chars));
requestBody = RequestBody.create(
contentType,
byteBuffer.array()
);
} else {
requestBody = RequestBody.create(
contentType,
input
);
}
} else {
char[] chars = System.console().readPassword("Enter password: ");
ByteBuffer byteBuffer = Charset.forName("UTF-8").encode(CharBuffer.wrap(chars));
requestBody = RequestBody.create(
contentType,
byteBuffer.array()
);
}
return requestBody;
}
@CommandLineInterface(application = "update") interface Update extends Upload {
}
@Command(description = "Update an existing key entry")
public boolean update(Update options, CommandOutput output) throws IOException, InputError {
Path path = argPath(options);
String path1 = path.keysPath();
if (path1.length() < 1) {
throw new InputError("-p/--path is required");
}
RequestBody requestBody = prepareKeyUpload(options);
KeyStorageItem keyStorageItem = getClient().checkError(getClient().getService()
.updateKeyStorage(
path.keysPath(),
requestBody
));
output.info(String.format("Updated: %s", keyStorageItem.toBasicString()));
return true;
}
private MediaType getUploadContentType(final KeyStorageItem.KeyFileType type) {
return type == KeyStorageItem.KeyFileType.privateKey ? Client.MEDIA_TYPE_OCTET_STREAM :
type == KeyStorageItem.KeyFileType.publicKey ? Client.MEDIA_TYPE_GPG_KEYS :
type == KeyStorageItem.KeyFileType.password ? Client.MEDIA_TYPE_X_RUNDECK_PASSWORD : null;
}
}
|
package org.scijava.annotations;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.HashSet;
import java.util.Set;
import java.util.jar.Attributes.Name;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import org.scijava.util.FileUtils;
/**
* Helps Eclipse's lack of support for annotation processing in incremental
* build mode.
* <p>
* Eclipse has a very, let's say, "creative" way to interpret the Java
* specifications when it comes to annotation processing: while Java mandates
* that annotation processors need to be run after compiling Java classes,
* Eclipse cops out of that because it poses a challenge to its incremental
* compilation (and especially to Eclipse's attempt at compiling .class files
* even from .java sources that contain syntax errors).
* </p>
* <p>
* So we need to do something about this. Our strategy is to detect when the
* annotation index was not updated properly and just do it ourselves, whenever
* {@link Index#load(Class)} is called.
* </p>
* <p>
* Since our aim here is to compensate for Eclipse's shortcoming, we need only
* care about the scenario where the developer launches either a Java main class
* or a unit test from within Eclipse, and even then only when the annotation
* index is to be accessed.
* </p>
* <p>
* The way Eclipse launches Java main classes or unit tests, it makes a single
* {@link URLClassLoader} with all the necessary class path elements. Crucially,
* the class path elements corresponding to Eclipse projects will never point to
* {@code .jar} files but to directories. This allows us to assume that the
* annotation classes as well as the annotated classes can be loaded using that
* exact class loader, too.
* </p>
* <p>
* It is quite possible that a developer may launch a main class in a different
* project than the one which needs annotation indexing, therefore we need to
* inspect all class path elements.
* </p>
* <p>
* To provide at least a semblance of a performant component, before going all
* out and indexing the annotations, we verify that the {@code META-INF/json/}
* directory has an outdated timestamp relative to the {@code .class} files. If
* that is not the case, we may safely assume that the annotation indexes are
* up-to-date.
* </p>
* <p>
* To avoid indexing class path elements over and over again which simply do not
* contain indexable annotations, we make the {@code META-INF/json/} directory
* nevertheless, updating the timestamp to reflect that we indexed the
* annotations.
* </p>
*
* @author Johannes Schindelin
*/
public class EclipseHelper extends DirectoryIndexer {
private static final String FORCE_ANNOTATION_INDEX_PROPERTY = "force.annotation.index";
static Set<URL> indexed = new HashSet<URL>();
private boolean bannerShown;
private static boolean debug =
"debug".equals(System.getProperty("scijava.log.level"));
private boolean autoDetectEclipse = true;
private static void debug(final String message) {
if (debug) {
System.err.println(message);
}
}
/**
* Updates the annotation index in the current Eclipse project.
* <p>
* The assumption is that Eclipse -- after failing to run the annotation
* processors correctly -- will launch any tests or main classes with a class
* path that contains the project's output directory with the {@code .class}
* files (as opposed to a {@code .jar} file). We only need to update that
* first class path element (or for tests, the first two), and only if it is a
* local directory.
* </p>
*
* @param loader the class loader whose class path to inspect
*/
public static void updateAnnotationIndex(final ClassLoader loader) {
debug("Checking class loader: " + loader);
if (loader == null ||
!(loader instanceof URLClassLoader))
{
debug("Not an URLClassLoader: " + loader);
return;
}
EclipseHelper helper = new EclipseHelper();
if (Boolean.getBoolean(FORCE_ANNOTATION_INDEX_PROPERTY)) {
helper.autoDetectEclipse = false;
}
boolean first = true;
for (final URL url : ((URLClassLoader) loader).getURLs()) {
debug("Checking URL: " + url);
if (helper.autoDetectEclipse && first) {
if (!"file".equals(url.getProtocol()) ||
(!url.getPath().endsWith("/") && !url.getPath().contains("surefire")))
{
debug("Not Eclipse because first entry is: " + url);
return;
}
first = false;
}
if (url.toString().endsWith("/./")) {
// Eclipse never adds "." to the class path
break;
}
helper.maybeIndex(url, loader);
}
updateAnnotationIndex(loader.getParent());
}
private void maybeIndex(final URL url, final ClassLoader loader) {
synchronized (indexed) {
if (indexed.contains(url)) {
return;
}
indexed.add(url);
}
if (!"file".equals(url.getProtocol())) {
debug("Not a file URL: " + url);
return;
}
String path = url.getFile();
if (!path.startsWith("/")) {
debug("Not an absolute file URL: " + url);
return;
}
if (path.endsWith(".jar")) {
/*
* To support mixed development with Eclipse and Maven, let's handle
* the case where Eclipse compiled classes, did not run the annotation
* processors, then the developer called "mvn test". In this case, we
* have a surefirebooter.jar whose manifest contains the dependencies,
* but crucially also the target/classes/ and target/test-classes/
* directories which may need to be indexed.
*/
if (!autoDetectEclipse || path.matches(".*/target/surefire/surefirebooter[0-9]*\\.jar")) try {
final JarFile jar = new JarFile(path);
Manifest manifest = jar.getManifest();
if (manifest != null) {
final String classPath =
manifest.getMainAttributes().getValue(Name.CLASS_PATH);
if (classPath != null) {
for (final String element : classPath.split(" +"))
try {
maybeIndex(new URL(url, element), loader);
}
catch (MalformedURLException e) {
e.printStackTrace();
}
}
}
}
catch (final IOException e) {
System.err.println("Warning: could not index annotations due to ");
e.printStackTrace();
}
return;
}
File directory = FileUtils.urlToFile(url);
if (!directory.isDirectory()) {
return;
}
index(directory, loader);
}
private void index(File directory, ClassLoader loader) {
debug("Directory: " + directory);
if (!directory.canWrite() || upToDate(directory) || isIJ1(directory)) {
debug("can write: " + directory.canWrite() + ", up-to-date: " +
upToDate(directory) + ", : is IJ1: " + isIJ1(directory));
return;
}
final File jsonDirectory = new File(directory, Index.INDEX_PREFIX);
try {
discoverAnnotations(directory, "", loader);
if (!jsonDirectory.exists() && !foundAnnotations()) return;
if (!bannerShown) {
System.err.println("[ECLIPSE HELPER] Indexing annotations...");
bannerShown = true;
}
write(directory);
}
catch (IOException e) {
e.printStackTrace();
}
// update the timestamp of META-INF/json/
if (jsonDirectory.isDirectory()) {
jsonDirectory.setLastModified(System.currentTimeMillis());
}
else {
jsonDirectory.mkdirs();
}
}
/**
* A hacky way of detecting whether the given directory is the root of an
* ImageJ1 codebase containing unpacked ImageJ1 classes.
*/
private boolean isIJ1(File directory) {
return new File(directory, "IJ_Props.txt").exists();
}
private boolean upToDate(final File directory) {
final File jsonDirectory = new File(directory, Index.INDEX_PREFIX);
if (!jsonDirectory.isDirectory()) {
return false;
}
return upToDate(directory, jsonDirectory.lastModified());
}
private boolean upToDate(File directory, long lastModified) {
if (directory.lastModified() > lastModified) {
return false;
}
final File[] list = directory.listFiles();
if (list != null) {
for (final File file : list) {
if (file.isFile()) {
if (file.lastModified() > lastModified) {
return false;
}
}
else if (file.isDirectory()) {
if (!upToDate(file, lastModified)) {
return false;
}
}
}
}
return true;
}
/**
* Updates the annotation index in the current Eclipse project.
* <p>
* The assumption is that Eclipse -- after failing to run the annotation
* processors correctly -- will launch any tests or main classes with a class
* path that contains the project's output directory with the {@code .class}
* files (as opposed to a {@code .jar} file). We only need to update that
* first class path element (or for tests, the first two), and only if it is a
* local directory.
* </p>
*/
public static void main(final String... args) {
System.setProperty(FORCE_ANNOTATION_INDEX_PROPERTY, "true");
updateAnnotationIndex(Thread.currentThread().getContextClassLoader());
}
}
|
package org.shyyko.ex.fm.reanimate;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.scene.control.ProgressBar;
import javafx.scene.control.TableView;
import javafx.scene.input.MouseEvent;
import javafx.scene.media.Media;
import javafx.scene.media.MediaPlayer;
import javafx.scene.media.MediaView;
import javafx.stage.DirectoryChooser;
import javafx.stage.FileChooser;
import javafx.stage.Window;
import org.shyyko.ex.fm.reanimate.alternative.IsAlternative;
import org.shyyko.ex.fm.reanimate.alternative.net.zaycev.ZaycevNetAlternative;
import org.shyyko.ex.fm.reanimate.download.AudioDownloader;
import org.shyyko.ex.fm.reanimate.model.Track;
import org.shyyko.ex.fm.reanimate.xspf.XSPFParser;
import javax.swing.*;
import java.io.File;
import java.util.Arrays;
import java.util.List;
public class Controller {
public TableView tracksTableView;
public MediaView mediaView;
public ProgressBar downloadProgressBar;
public ProgressBar downloadSuccessProgressBar;
public ProgressBar downloadFailureProgressBar;
private List<Track> tracks;
private Window window;
private IsAlternative alternative;
private MediaPlayer mediaPlayer;
public void init(Window window) {
this.window = window;
alternative = new ZaycevNetAlternative();
}
@SuppressWarnings("unchecked")
public void onOpenXspfFileMenuClick(ActionEvent event) {
FileChooser fileChooser = new FileChooser();
File file = fileChooser.showOpenDialog(window);
if (file != null && file.exists()) {
XSPFParser xspfParser = new XSPFParser();
try {
tracks = xspfParser.parse(file);
tracksTableView.getItems().addAll(tracks);
} catch (Exception e) {
//e.printStackTrace(); //todo
}
}
tracksTableView.setOnMouseClicked(new EventHandler<MouseEvent>() {
@Override
public void handle(MouseEvent mouseEvent) {
if (mouseEvent.getClickCount() > 1) {
playSelectedTrack();
}
}
});
}
public void onDownloadMenuClick(ActionEvent event) {
Object selectedItem = tracksTableView.getSelectionModel().getSelectedItem();
if (selectedItem == null) {
showError("No track selected", null);
return;
}
Track selectedTrack = (Track) selectedItem;
DirectoryChooser directoryChooser = new DirectoryChooser();
File file = directoryChooser.showDialog(window);
if (file != null) {
AudioDownloader audioDownloader = new AudioDownloader(alternative);
audioDownloader.setOnProgressListener(new DownloadListener());
audioDownloader.downloadTracks(Arrays.asList(selectedTrack), file);
}
}
private void showError(String error, Exception e) {
//todo
}
public void onDownloadAllMenuClick(ActionEvent event) {
if (tracks == null || tracks.isEmpty()) {
showError("No tracks to download", null);
return;
}
DirectoryChooser directoryChooser = new DirectoryChooser();
File file = directoryChooser.showDialog(window);
if (file != null) {
AudioDownloader audioDownloader = new AudioDownloader(alternative);
audioDownloader.setOnProgressListener(new DownloadListener());
audioDownloader.downloadTracks(tracks, file);
}
}
private void playSelectedTrack() {
Track track = (Track) tracksTableView.getSelectionModel().getSelectedItem();
try {
track = alternative.getAlternative(track);
} catch (Exception e) {
showError("Error while finding alternatives", e);
return;
}
Media media = new Media(track.getLocation());
//mediaView.setMediaPlayer(player);
if (mediaPlayer != null) {
mediaPlayer.pause();
}
mediaPlayer = new MediaPlayer(media);
mediaPlayer.setOnError(new Runnable() {
@Override
public void run() {
showError("Error while playing", mediaPlayer.getError());
}
});
mediaPlayer.play();
}
private class DownloadListener implements AudioDownloader.OnProgressListener {
@Override
public void onProgress(double progress, double success, double failure) {
downloadProgressBar.setProgress(progress);
downloadSuccessProgressBar.setProgress(success);
downloadFailureProgressBar.setProgress(failure);
}
}
}
|
package org.tndata.android.compass.model;
import java.io.Serializable;
public class User implements Serializable {
private static final long serialVersionUID = 4582633283983173348L;
private int id = -1;
private String first_name = "";
private String last_name = "";
private String full_name = "";
private String email = "";
private int userprofile_id = -1;
private String date_joined = "";
private String token = "";
private String password = "";
private String error = "";
private boolean needs_onboarding = true;
public User() {
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getFirstName() {
return first_name;
}
public void setFirstName(String first_name) {
this.first_name = first_name;
}
public String getLastName() {
return last_name;
}
public void setLastName(String last_name) {
this.last_name = last_name;
}
public String getFullName() {
return full_name;
}
public void setFullName(String full_name) {
this.full_name = full_name;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public int getUserprofileId() {
return userprofile_id;
}
public void setUserprofileId(int userprofile_id) {
this.userprofile_id = userprofile_id;
}
public String getDateJoined() {
return date_joined;
}
public void setDateJoined(String date_joined) {
this.date_joined = date_joined;
}
public String getToken() {
return token;
}
public void setToken(String token) {
this.token = token;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getError() {
return error;
}
public void setError(String error) {
this.error = error;
}
public void onBoardingComplete(){
needs_onboarding = false;
}
public boolean needsOnBoarding(){
return needs_onboarding;
}
}
|
package org.trancecode.xml.saxon;
import com.google.common.collect.ImmutableList;
import net.sf.saxon.Configuration;
import net.sf.saxon.event.Receiver;
import net.sf.saxon.event.TreeReceiver;
import net.sf.saxon.om.NamePool;
import net.sf.saxon.om.NamespaceIterator;
import net.sf.saxon.om.NodeInfo;
import net.sf.saxon.s9api.QName;
import net.sf.saxon.s9api.SaxonApiException;
import net.sf.saxon.s9api.XdmDestination;
import net.sf.saxon.s9api.XdmNode;
import net.sf.saxon.trans.XPathException;
/**
* A builder to create new XdmNode documents using a push API. It provides a
* facade to the lower-level Saxon Receiver API.
*
* @see Receiver
* @see TreeReceiver
* @author Romain Deltour
*/
public class SaxonBuilder
{
private final XdmDestination destination = new XdmDestination();
private final TreeReceiver receiver;
private final NamePool namePool;
/**
* Creates a new builder based on the given Saxon configuration.
*
* @param configuration
* The Saxon configuration. In particular, the name pool of the
* configuration will be used to construct the new item codes.
*/
public SaxonBuilder(final Configuration configuration)
{
try
{
receiver = new TreeReceiver(destination.getReceiver(configuration));
receiver.setPipelineConfiguration(configuration.makePipelineConfiguration());
namePool = configuration.getNamePool();
receiver.open();
}
catch (final SaxonApiException e)
{
throw new IllegalStateException(e);
}
catch (final XPathException e)
{
throw new IllegalStateException(e);
}
}
/**
* Starts a document node.
*/
public void startDocument()
{
try
{
receiver.startDocument(0);
}
catch (final XPathException e)
{
throw new IllegalStateException(e);
}
}
/**
* Ends the document node.
*/
public void endDocument()
{
try
{
receiver.endDocument();
}
catch (final XPathException e)
{
throw new IllegalStateException(e);
}
}
/**
* Starts a new element with the given QName.
*
* @param qname
* The QName of the new element.
*/
public void startElement(final QName qname)
{
try
{
final int nameCode = namePool.allocate(qname.getPrefix(), qname.getNamespaceURI(), qname.getLocalName());
receiver.startElement(nameCode, -1, -1, 0);
}
catch (final XPathException e)
{
throw new IllegalStateException(e);
}
}
/**
* Start a new element with the given QName, and adds the in-scope
* namespaces of the given node to the new element.
*
* @param qname
* The QName of the new element.
* @param nsContext
* A node whose in-scope namespaces are copied to the new
* element.
*/
public void startElement(final QName qname, final XdmNode nsContext)
{
try
{
startElement(qname);
final int[] inscopeNsCodes = NamespaceIterator.getInScopeNamespaceCodes(nsContext.getUnderlyingNode());
for (final int nsCode : inscopeNsCodes)
{
receiver.namespace(nsCode, 0);
}
}
catch (final XPathException e)
{
throw new IllegalStateException(e);
}
}
/**
* Ends the current element node.
*/
public void endElement()
{
try
{
receiver.endElement();
}
catch (final XPathException e)
{
throw new IllegalStateException(e);
}
}
/**
* Starts the content of the current element. Must be called after the
* declaration of namespaces and attributes, before adding text or node
* children to the element.
*/
public void startContent()
{
try
{
receiver.startContent();
}
catch (final XPathException e)
{
throw new IllegalStateException(e);
}
}
/**
* Adds an attribute to the current element.
*
* @param qname
* The QName of the attribute
* @param value
* The value of the attribute
*/
public void attribute(final QName qname, final String value)
{
try
{
final int nameCode = namePool.allocate(qname.getPrefix(), qname.getNamespaceURI(), qname.getLocalName());
receiver.attribute(nameCode, -1, value, 0, 0);
}
catch (final XPathException e)
{
throw new IllegalStateException(e);
}
}
/**
* Adds a new comment node.
*
* @param comment
* The comment text
*/
public void comment(final String comment)
{
try
{
receiver.comment(comment, 0, 0);
}
catch (final XPathException e)
{
throw new IllegalStateException(e);
}
}
/**
* Declares a new namespace in the current element
*
* @param prefix
* The namespace prefix
* @param uri
* The namespace URI
*/
public void namespace(final String prefix, final String uri)
{
try
{
final int nsCode = namePool.allocateNamespaceCode((prefix != null) ? prefix : "", uri);
receiver.namespace(nsCode, 0);
}
catch (final XPathException e)
{
throw new IllegalStateException(e);
}
}
/**
* Appends the given nodes to the document being built.
*/
public void nodes(final XdmNode... nodes)
{
nodes(ImmutableList.copyOf(nodes));
}
/**
* Appends the given nodes to the document being built.
*/
public void nodes(final Iterable<XdmNode> nodes)
{
try
{
for (final XdmNode node : nodes)
{
receiver.append(node.getUnderlyingNode(), 0, NodeInfo.NO_NAMESPACES);
}
}
catch (final XPathException e)
{
throw new IllegalStateException(e);
}
}
/**
* Adds a processing instruction.
*
* @param name
* The processing instruction name
* @param data
* The processing instruction data
*/
public void processingInstruction(final String name, final String data)
{
try
{
receiver.processingInstruction(name, data, 0, 0);
}
catch (final XPathException e)
{
throw new IllegalStateException(e);
}
}
/**
* Adds a text node
*
* @param text
* The text content
*/
public void text(final String text)
{
try
{
receiver.characters(text, 0, 0);
}
catch (final XPathException e)
{
throw new IllegalStateException(e);
}
}
/**
* Returns the result node built by this builder.
*/
public XdmNode getNode()
{
return destination.getXdmNode();
}
}
|
package org.wahlzeit.model;
public class LiquorPhotoFactory extends PhotoFactory{
/**
* Make instance of factory accessible without exposing the constructor
* @return
*/
public static LiquorPhotoFactory getInstance(){
return instance;
}
private static LiquorPhotoFactory instance = new LiquorPhotoFactory();
public Photo createPhoto(){
return new LiquorPhoto();
}
public LiquorPhoto createPhoto(PhotoId id) {
return new LiquorPhoto(id);
}
}
|
/*
* This is free and unencumbered software released into the public domain.
*/
package org.wicketsample;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.markup.html.WebPage;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.link.BookmarkablePageLink;
import org.apache.wicket.markup.html.link.Link;
import javax.servlet.http.HttpServletRequest;
/*
import org.apache.directory.fortress.web.control.FtBookmarkablePageLink;
import org.apache.directory.fortress.web.control.SecUtils;
import org.apache.directory.fortress.core.AccessMgr;
import org.apache.directory.fortress.realm.J2eePolicyMgr;
import org.apache.wicket.spring.injection.annot.SpringBean;
*/
/**
* Base class for wicketsample project.
*
* @author Shawn McKinney
* @version $Rev$
*/
public abstract class WicketSampleBasePage extends WebPage
{
// TODO STEP: enable spring injection of fortress bean here:
/*
@SpringBean
private AccessMgr accessMgr;
@SpringBean
private J2eePolicyMgr j2eePolicyMgr;
*/
public WicketSampleBasePage()
{
// TODO STEP: uncomment call to enableFortress:
/*
try
{
SecUtils.enableFortress( this, ( HttpServletRequest ) getRequest().getContainerRequest(), j2eePolicyMgr,
accessMgr );
}
catch (org.apache.directory.fortress.core.SecurityException se)
{
String error = "WicketSampleBasePage caught security exception : " + se;
LOG.warn( error );
}
*/
// TODO STEP: change to FtBookmarkablePageLink:
add( new BookmarkablePageLink( "wspage1.link", Page1.class ) );
add( new BookmarkablePageLink( "wspage2.link", Page2.class ) );
add( new BookmarkablePageLink( "wspage3.link", Page3.class ) );
final Link actionLink = new Link( "logout.link" )
{
@Override
public void onClick()
{
setResponsePage(LogoutPage.class);
}
};
add( actionLink );
add( new Label( "footer", "This is free and unencumbered software released into the public domain." ) );
}
/**
* Used by the child pages.
*
* @param target for modal panel
* @param msg to log and display user info
*/
protected void logIt(AjaxRequestTarget target, String msg)
{
info( msg );
LOG.info( msg );
target.appendJavaScript(";alert('" + msg + "');");
}
protected static final Logger LOG = LoggerFactory.getLogger( WicketSampleBasePage.class.getName() );
}
|
package picard.fingerprint;
import com.google.cloud.storage.contrib.nio.SeekableByteChannelPrefetcher;
import htsjdk.samtools.SAMFileHeader;
import htsjdk.samtools.SAMReadGroupRecord;
import htsjdk.samtools.SAMRecord;
import htsjdk.samtools.SamReader;
import htsjdk.samtools.SamReaderFactory;
import htsjdk.samtools.ValidationStringency;
import htsjdk.samtools.filter.SamRecordFilter;
import htsjdk.samtools.filter.SecondaryAlignmentFilter;
import htsjdk.samtools.util.Interval;
import htsjdk.samtools.util.IntervalList;
import htsjdk.samtools.util.Log;
import htsjdk.samtools.util.SamLocusIterator;
import htsjdk.samtools.util.SequenceUtil;
import htsjdk.samtools.util.StringUtil;
import htsjdk.variant.utils.SAMSequenceDictionaryExtractor;
import htsjdk.variant.variantcontext.Allele;
import htsjdk.variant.variantcontext.Genotype;
import htsjdk.variant.variantcontext.GenotypeLikelihoods;
import htsjdk.variant.variantcontext.VariantContext;
import htsjdk.variant.vcf.VCFFileReader;
import picard.PicardException;
import picard.util.AlleleSubsettingUtils;
import picard.util.ThreadPoolExecutorWithExceptions;
import java.io.File;
import java.io.IOException;
import java.nio.channels.SeekableByteChannel;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Random;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import static htsjdk.samtools.SamReaderFactory.Option.CACHE_FILE_BASED_INDEXES;
/**
* Major class that coordinates the activities involved in comparing genetic fingerprint
* data whether the source is from a genotyping platform or derived from sequence data.
*
* @author Tim Fennell
*/
public class FingerprintChecker {
public static final double DEFAULT_GENOTYPING_ERROR_RATE = 0.01;
public static final int DEFAULT_MINIMUM_MAPPING_QUALITY = 10;
public static final int DEFAULT_MINIMUM_BASE_QUALITY = 20;
public static final int DEFAULT_MAXIMAL_PL_DIFFERENCE = 30;
private final HaplotypeMap haplotypes;
private int minimumBaseQuality = DEFAULT_MINIMUM_BASE_QUALITY;
private int minimumMappingQuality = DEFAULT_MINIMUM_MAPPING_QUALITY;
private double genotypingErrorRate = DEFAULT_GENOTYPING_ERROR_RATE;
private int maximalPLDifference = DEFAULT_MAXIMAL_PL_DIFFERENCE;
public ValidationStringency getValidationStringency() {
return validationStringency;
}
public void setValidationStringency(final ValidationStringency validationStringency) {
this.validationStringency = validationStringency;
}
public File getReferenceFasta() { return referenceFasta;}
public void setReferenceFasta(final File referenceFasta) {
this.referenceFasta = referenceFasta;
}
private ValidationStringency validationStringency = ValidationStringency.DEFAULT_STRINGENCY;
private File referenceFasta;
private boolean allowDuplicateReads = false;
private double pLossofHet = 0;
private final Log log = Log.getInstance(FingerprintChecker.class);
/**
* Creates a fingerprint checker that will work with the set of haplotypes stored in
* the supplied file.
*/
public FingerprintChecker(final File haplotypeData) {
this.haplotypes = new HaplotypeMap(haplotypeData);
}
/**
* Creates a fingerprint checker that will work with the set of haplotyped provided.
*/
public FingerprintChecker(final HaplotypeMap haplotypes) {
this.haplotypes = haplotypes;
}
/**
* Sets the minimum base quality for bases used when computing a fingerprint from sequence data.
*/
public void setMinimumBaseQuality(final int minimumBaseQuality) {
this.minimumBaseQuality = minimumBaseQuality;
}
/**
* Sets the minimum mapping quality for reads used when computing fingerprints from sequence data.
*/
public void setMinimumMappingQuality(final int minimumMappingQuality) {
this.minimumMappingQuality = minimumMappingQuality;
}
/**
* Sets the assumed genotyping error rate used when accurate error rates are not available.
*/
public void setGenotypingErrorRate(final double genotypingErrorRate) {
this.genotypingErrorRate = genotypingErrorRate;
}
/**
* Sets the maximal difference in PL scores considered when reading PLs from a VCF.
*/
public void setmaximalPLDifference(final int maximalPLDifference) {
this.maximalPLDifference = maximalPLDifference;
}
public SAMFileHeader getHeader() {
return haplotypes.getHeader();
}
/**
* Sets whether duplicate reads should be allowed when calling genotypes from SAM files. This is
* useful when comparing read groups within a SAM file and individual read groups show artifactually
* high duplication (e.g. a single-ended read group mixed in with paired-end read groups).
*
* @param allowDuplicateReads should fingerprinting use duplicate reads?
*/
public void setAllowDuplicateReads(final boolean allowDuplicateReads) {
this.allowDuplicateReads = allowDuplicateReads;
}
//sets the value of the probability that a genotype underwent a Loss of Hetrozygosity (for Tumors)
public void setpLossofHet(final double pLossofHet) {
this.pLossofHet = pLossofHet;
}
/**
* Loads genotypes from the supplied file into one or more Fingerprint objects and returns them in a
* Map of Sample->Fingerprint.
*
* @param fingerprintFile - VCF file containing genotypes for one or more samples
* @param specificSample - null to load genotypes for all samples contained in the file or the name
* of an individual sample to load (and exclude all others).
* @return a Map of Sample name to Fingerprint
*/
public Map<String, Fingerprint> loadFingerprints(final Path fingerprintFile, final String specificSample) {
SequenceUtil.assertSequenceDictionariesEqual(this.haplotypes.getHeader().getSequenceDictionary(),
VCFFileReader.getSequenceDictionary(fingerprintFile));
final VCFFileReader reader = new VCFFileReader(fingerprintFile, false);
final Map<String, Fingerprint> fingerprints;
if (reader.isQueryable()) {
fingerprints = loadFingerprintsFromQueriableReader(reader, specificSample, fingerprintFile);
} else {
log.warn("Couldn't find index for file " + fingerprintFile + " going to read through it all.");
fingerprints = loadFingerprintsFromVariantContexts(reader, specificSample, fingerprintFile);
}
//add an entry for each sample which was not fingerprinted
for (final String sample : reader.getFileHeader().getGenotypeSamples()) {
fingerprints.computeIfAbsent(sample, s -> new Fingerprint(s, fingerprintFile, null));
}
return fingerprints;
}
/**
* Loads genotypes from the supplied file into one or more Fingerprint objects and returns them in a
* Map of Sample->Fingerprint.
*
* @param fingerprintFile - VCF file containing genotypes for one or more samples
* @param specificSample - null to load genotypes for all samples contained in the file or the name
* of an individual sample to load (and exclude all others).
* @return a Map of Sample name to Fingerprint
*
*/
public Map<String, Fingerprint> loadFingerprintsFromNonIndexedVcf(final Path fingerprintFile, final String specificSample) {
final VCFFileReader reader = new VCFFileReader(fingerprintFile, false);
SequenceUtil.assertSequenceDictionariesEqual(this.haplotypes.getHeader().getSequenceDictionary(), SAMSequenceDictionaryExtractor.extractDictionary(fingerprintFile));
return loadFingerprintsFromVariantContexts(reader, specificSample, fingerprintFile);
}
/**
* Loads genotypes from the supplied file into one or more Fingerprint objects and returns them in a
* Map of Sample->Fingerprint.
*
* @param iterable - an iterable over variantContexts containing genotypes for one or more samples
* @param specificSample - null to load genotypes for all samples contained in the file or the name
* of an individual sample to load (and exclude all others).
* @param source The path of the source file used. used to emit errors, and annotate the fingerprints.
* @return a Map of Sample name to Fingerprint
*/
public Map<String, Fingerprint> loadFingerprintsFromVariantContexts(final Iterable<VariantContext> iterable, final String specificSample, final Path source) {
final Map<String, Fingerprint> fingerprints = new HashMap<>();
Set<String> samples = null;
for (final VariantContext ctx : iterable) {
// Setup the sample names set if needed
if (ctx == null) continue;
if (samples == null) {
if (specificSample != null) {
samples = new HashSet<>();
samples.add(specificSample);
} else {
samples = ctx.getSampleNames();
if (samples == null) {
log.warn("No samples found in file: " + source.toUri().toString() + ". Skipping.");
return Collections.emptyMap();
}
}
samples.forEach(s -> fingerprints.put(s, new Fingerprint(s, source, null)));
}
try {
getFingerprintFromVc(fingerprints, ctx);
} catch (final IllegalArgumentException e) {
log.warn(e,"There was a genotyping error in File: " + source.toUri().toString() + "\n" + e.getMessage());
}
}
return fingerprints;
}
/**
* Loads genotypes from the supplied file into one or more Fingerprint objects and returns them in a
* Map of Sample->Fingerprint.
*
* @param fingerprintFile - VCF file containing genotypes for one or more samples
* @param specificSample - null to load genotypes for all samples contained in the file or the name
* of an individual sample to load (and exclude all others).
* @return a Map of Sample name to Fingerprint
*/
public Map<String, Fingerprint> loadFingerprintsFromIndexedVcf(final Path fingerprintFile, final String specificSample) {
final VCFFileReader reader = new VCFFileReader(fingerprintFile, true);
return loadFingerprintsFromQueriableReader(reader, specificSample, fingerprintFile);
}
/**
* Loads genotypes from the supplied reader into one or more Fingerprint objects and returns them in a
* Map of Sample->Fingerprint.
*
* @param reader - VCF reader containing genotypes for one or more samples
* @param specificSample - null to load genotypes for all samples contained in the file or the name
* of an individual sample to load (and exclude all others).
* @param source The path of the source file used. used to emit errors.
* @return a Map of Sample name to Fingerprint
*/
public Map<String, Fingerprint> loadFingerprintsFromQueriableReader(final VCFFileReader reader, final String specificSample, final Path source) {
SequenceUtil.assertSequenceDictionariesEqual(this.haplotypes.getHeader().getSequenceDictionary(),
reader.getFileHeader().getSequenceDictionary());
final SortedSet<Snp> snps = new TreeSet<>(haplotypes.getAllSnps());
return loadFingerprintsFromVariantContexts(() ->
snps.stream().map(snp -> {
try {
return reader.query(snp.getChrom(), snp.getPos(), snp.getPos()).next();
} catch (NoSuchElementException e) {
return null;
}
}).iterator(),
specificSample, source);
}
/**
* Adds the fingerprints found in the variant Context to the map.
*
* @param fingerprints a map from Sample to fingerprint
* @param ctx the VC from which to extract (part of ) a fingerprint
*/
private void getFingerprintFromVc(final Map<String, Fingerprint> fingerprints, final VariantContext ctx) throws IllegalArgumentException {
final HaplotypeBlock h = this.haplotypes.getHaplotype(ctx.getContig(), ctx.getStart());
if (h == null) return;
final Snp snp = this.haplotypes.getSnp(ctx.getContig(), ctx.getStart());
final VariantContext usableSnp = AlleleSubsettingUtils.subsetVCToMatchSnp(ctx, snp);
if (usableSnp == null) {
return;
}
// Check the alleles from the file against the expected set of genotypes
{
boolean allelesOk = true;
for (final Allele allele : usableSnp.getAlleles()) {
final byte[] bases = allele.getBases();
if (bases.length > 1 || (bases[0] != snp.getAllele1() && bases[0] != snp.getAllele2())) {
allelesOk = false;
}
}
if (!allelesOk) {
log.warn("Problem with genotype file: Alleles "
+ usableSnp.getAlleles() + " do not match to alleles for SNP " + snp
+ " with alleles " + snp.getAlleleString());
throw new IllegalArgumentException("Alleles do not match between database and file");
}
}
for (final String sample : fingerprints.keySet()) {
final Fingerprint fp = fingerprints.get(sample);
//PLs are preferred over GTs
//TODO: this code is replicated in various places (ReconstructTriosFromVCF for example). Needs refactoring.
//TODO: add a way to force using GTs when both are available (why?)
// Get the genotype for the sample and check that it is useful
final Genotype genotype = usableSnp.getGenotype(sample);
if (genotype == null) {
throw new IllegalArgumentException("Cannot find sample " + sample + " in provided file. ");
}
if (genotype.hasPL()) {
final HaplotypeProbabilitiesFromGenotypeLikelihoods hFp = new HaplotypeProbabilitiesFromGenotypeLikelihoods(h);
//do not modify the PL array directly fragile!!!!!
final int[] pls = genotype.getPL();
final int[] newPLs = new int[pls.length];
for (int i = 0; i < pls.length; i++) {
newPLs[i] = Math.min(maximalPLDifference, pls[i]);
}
hFp.addToLogLikelihoods(snp, usableSnp.getAlleles(), GenotypeLikelihoods.fromPLs(newPLs).getAsVector());
fp.add(hFp);
} else {
if (genotype.isNoCall()) continue;
// TODO: when multiple genotypes are available for a Haplotype check that they
// TODO: agree. Not urgent since DownloadGenotypes already does this.
// TODO: more urgent now as we convert vcfs to haplotypeProbabilities and
// TODO: there could be different VCs with information we'd like to use...
if (fp.containsKey(h)) continue;
final boolean hom = genotype.isHom();
final byte allele = StringUtil.toUpperCase(genotype.getAllele(0).getBases()[0]);
final double halfError = this.genotypingErrorRate / 2;
final double accuracy = 1 - this.genotypingErrorRate;
final double[] probs = new double[]{
(hom && allele == snp.getAllele1()) ? accuracy : halfError,
(!hom) ? accuracy : halfError,
(hom && allele == snp.getAllele2()) ? accuracy : halfError
};
fp.add(new HaplotypeProbabilitiesFromGenotype(snp, h, probs[0], probs[1], probs[2]));
}
}
}
/**
* Takes a set of fingerprints and returns an IntervalList containing all the loci that
* can be productively examined in sequencing data to compare to one or more of the
* fingerprints.
*/
public IntervalList getLociToGenotype(final Collection<Fingerprint> fingerprints) {
final IntervalList intervals = new IntervalList(this.haplotypes.getHeader());
for (final Fingerprint fp : fingerprints) {
for (final HaplotypeProbabilities genotype : fp.values()) {
final HaplotypeBlock h = genotype.getHaplotype();
for (final Snp snp : h.getSnps()) {
intervals.add(new Interval(snp.getChrom(), snp.getPos(), snp.getPos(), false, snp.getName()));
}
}
}
return intervals.uniqued();
}
public Map<FingerprintIdDetails, Fingerprint> fingerprintVcf(final Path vcfFile) {
final Map<FingerprintIdDetails, Fingerprint> fpIdMap = new HashMap<>();
final Map<String, Fingerprint> sampleFpMap = loadFingerprints(vcfFile, null);
sampleFpMap.forEach((key, value) -> {
final FingerprintIdDetails fpId = new FingerprintIdDetails();
fpId.sample = key;
fpId.file = vcfFile.toUri().toString();
fpIdMap.put(fpId, value);
});
return fpIdMap;
}
private static final Function<SeekableByteChannel, SeekableByteChannel> seekableChannelFunction = (chan) -> {
try {
return SeekableByteChannelPrefetcher.addPrefetcher(1, chan);
} catch (IOException e) {
throw new RuntimeException("Trouble wrapping seekable stream with prefetcher.", e);
}
};
/**
* Generates a Fingerprint per read group in the supplied SAM file using the loci provided in
* the interval list.
*/
public Map<FingerprintIdDetails, Fingerprint> fingerprintSamFile(final Path samFile, final IntervalList loci) {
// the seekableChannelFunction adds a buffered stream wrapper around the index reading which
// makes reading the index over NIO not hang indefinitely.
final SamReader in = SamReaderFactory.makeDefault()
.enable(SamReaderFactory.Option.CACHE_FILE_BASED_INDEXES)
.referenceSequence(referenceFasta)
.open(samFile, null, seekableChannelFunction);
SequenceUtil.assertSequenceDictionariesEqual(this.haplotypes.getHeader().getSequenceDictionary(),
in.getFileHeader().getSequenceDictionary());
final SamLocusIterator iterator = new SamLocusIterator(in, loci, in.hasIndex());
iterator.setEmitUncoveredLoci(true);
iterator.setMappingQualityScoreCutoff(this.minimumMappingQuality);
iterator.setQualityScoreCutoff(this.minimumBaseQuality);
// In some cases it is useful to allow duplicate reads to be used - the most common is in single-end
// sequence data where the duplicate marking may have been overly aggressive, and there is useful
// non-redundant data in the reads marked as "duplicates'.
if (this.allowDuplicateReads) {
final List<SamRecordFilter> filters = new ArrayList<>(1);
filters.add(new SecondaryAlignmentFilter());
iterator.setSamFilters(filters);
}
final Map<SAMReadGroupRecord, FingerprintIdDetails> fingerprintIdDetailsMap = new HashMap<>();
final Map<FingerprintIdDetails, Fingerprint> fingerprintsByReadGroup = new HashMap<>();
for (final SAMReadGroupRecord rg : in.getFileHeader().getReadGroups()) {
final FingerprintIdDetails id = new FingerprintIdDetails(rg.getPlatformUnit(), samFile.toUri().toString());
id.library = rg.getLibrary();
id.sample = rg.getSample();
fingerprintIdDetailsMap.put(rg, id);
final Fingerprint fingerprint = new Fingerprint(id.sample,
samFile,
id.platformUnit);
fingerprintsByReadGroup.put(id, fingerprint);
for (final HaplotypeBlock h : this.haplotypes.getHaplotypes()) {
fingerprint.add(new HaplotypeProbabilitiesFromSequence(h));
}
}
// Set of read/template names from which we have already sampled a base and a qual. Since we assume
// that all evidence for a haplotype is independent we can't sample two or more bases from a single
// read or read-pair because they would not be independent!
final Set<String> usedReadNames = new HashSet<>(10000);
// Now go through the data at each locus and figure stuff out!
for (final SamLocusIterator.LocusInfo info : iterator) {
// if statement to avoid string building.
if (Log.isEnabled(Log.LogLevel.DEBUG)) {
log.debug("At locus " + info.toString());
}
// TODO: Filter out the locus if the allele balance doesn't make sense for either a
// TODO: 50/50 het or a hom with some errors; in HS data with deep coverage any base
// TODO: with major strand bias could cause errors
// Find the matching Snp and HaplotypeProbs
final HaplotypeBlock haplotypeBlock = this.haplotypes.getHaplotype(info.getSequenceName(), info.getPosition());
final Snp snp = this.haplotypes.getSnp(info.getSequenceName(), info.getPosition());
for (final SamLocusIterator.RecordAndOffset rec : info.getRecordAndOffsets()) {
final SAMReadGroupRecord rg = rec.getRecord().getReadGroup();
final FingerprintIdDetails details;
if (rg == null || !fingerprintIdDetailsMap.containsKey(rg)) {
final FingerprintIdDetails unknownFPDetails = createUnknownFP(samFile, rec.getRecord());
fingerprintIdDetailsMap.put(null, unknownFPDetails);
final Fingerprint fp = new Fingerprint(unknownFPDetails.sample, samFile, unknownFPDetails.platformUnit);
fingerprintsByReadGroup.put(unknownFPDetails, fp);
for (final HaplotypeBlock h : this.haplotypes.getHaplotypes()) {
fp.add(new HaplotypeProbabilitiesFromSequence(h));
}
}
if (fingerprintIdDetailsMap.containsKey(rg)) {
details = fingerprintIdDetailsMap.get(rg);
final String readName = rec.getRecord().getReadName();
if (!usedReadNames.contains(readName)) {
final HaplotypeProbabilitiesFromSequence probs = (HaplotypeProbabilitiesFromSequence) fingerprintsByReadGroup.get(details).get(haplotypeBlock);
final byte base = StringUtil.toUpperCase(rec.getReadBase());
final byte qual = rec.getBaseQuality();
probs.addToProbs(snp, base, qual);
usedReadNames.add(readName);
}
} else {
final PicardException e = new PicardException("Unknown read group: " + rg + " in file: " + samFile);
log.error(e);
throw e;
}
}
}
return fingerprintsByReadGroup;
}
private FingerprintIdDetails createUnknownFP(final Path samFile, final SAMRecord rec) {
final PicardException e = new PicardException("Found read with no readgroup: " + rec.getReadName() + " in file: " + samFile);
if (validationStringency != ValidationStringency.STRICT) {
final SAMReadGroupRecord readGroupRecord = new SAMReadGroupRecord("<UNKNOWN>:::" + samFile.toUri().toString());
readGroupRecord.setLibrary("<UNKNOWN>");
readGroupRecord.setSample("<UNKNOWN>");
readGroupRecord.setPlatformUnit("<UNKNOWN>.0.ZZZ");
if (validationStringency == ValidationStringency.LENIENT) {
log.warn(e);
log.warn("further messages from this file will be suppressed");
}
return new FingerprintIdDetails(readGroupRecord, samFile.toUri().toString());
} else {
log.error(e);
throw e;
}
}
/**
* Generates a per-sample Fingerprint for the contaminant in the supplied SAM file.
* Data is aggregated by sample, not read-group.
*/
public Map<String, Fingerprint> identifyContaminant(final Path samFile, final double contamination, final int locusMaxReads) {
final Map<String, Fingerprint> fingerprintsBySample = new HashMap<>();
try (final SamReader in = SamReaderFactory.makeDefault().enable(CACHE_FILE_BASED_INDEXES).open(samFile)) {
SequenceUtil.assertSequenceDictionariesEqual(this.haplotypes.getHeader().getSequenceDictionary(),
in.getFileHeader().getSequenceDictionary());
final SamLocusIterator iterator = new SamLocusIterator(in, haplotypes.getIntervalList(), in.hasIndex());
iterator.setEmitUncoveredLoci(true);
iterator.setMappingQualityScoreCutoff(this.minimumMappingQuality);
iterator.setQualityScoreCutoff(this.minimumBaseQuality);
// In some cases it is useful to allow duplicate reads to be used - the most common is in single-end
// sequence data where the duplicate marking may have been overly aggressive, and there is useful
// non-redundant data in the reads marked as "duplicates'.
if (this.allowDuplicateReads) {
final List<SamRecordFilter> filters = new ArrayList<>(1);
filters.add(new SecondaryAlignmentFilter());
iterator.setSamFilters(filters);
}
for (final SAMReadGroupRecord rg : in.getFileHeader().getReadGroups()) {
if (!fingerprintsBySample.containsKey(rg.getSample())) {
final Fingerprint fingerprint = new Fingerprint(rg.getSample(),
samFile,
rg.getSample());
for (final HaplotypeBlock h : this.haplotypes.getHaplotypes()) {
fingerprint.add(new HaplotypeProbabilitiesFromContaminatorSequence(h, contamination));
}
fingerprintsBySample.put(rg.getSample(), fingerprint);
}
}
// Set of read/template names from which we have already sampled a base and a qual. Since we assume
// that all evidence for a haplotype is independent we can't sample two or more bases from a single
// read or read-pair because they would not be independent!
final Set<String> usedReadNames = new HashSet<>(10000);
// Now go through the data at each locus and figure stuff out!
for (final SamLocusIterator.LocusInfo info : iterator) {
// Find the matching Snp and HaplotypeProbs
final HaplotypeBlock haplotypeBlock = this.haplotypes.getHaplotype(info.getSequenceName(), info.getPosition());
final Snp snp = this.haplotypes.getSnp(info.getSequenceName(), info.getPosition());
// randomly select locusMaxReads elements from the list
final List<SamLocusIterator.RecordAndOffset> recordAndOffsetList = randomSublist(info.getRecordAndPositions(), locusMaxReads);
for (final SamLocusIterator.RecordAndOffset rec : recordAndOffsetList) {
final SAMReadGroupRecord rg = rec.getRecord().getReadGroup();
if (rg == null || !fingerprintsBySample.containsKey(rg.getSample())) {
final PicardException e = new PicardException("Unknown sample: " + (rg != null ? rg.getSample() : "(null readgroup)"));
log.error(e);
throw e;
} else {
final String readName = rec.getRecord().getReadName();
if (!usedReadNames.contains(readName)) {
final HaplotypeProbabilitiesFromContaminatorSequence probs =
(HaplotypeProbabilitiesFromContaminatorSequence) fingerprintsBySample.get(rg.getSample()).get(haplotypeBlock);
final byte base = StringUtil.toUpperCase(rec.getReadBase());
final byte qual = rec.getBaseQuality();
probs.addToProbs(snp, base, qual);
usedReadNames.add(readName);
}
}
}
}
} catch (IOException e) {
log.error("Unexpected Error while reading from " + samFile + ". Trying to continue.", e.getMessage(), e.getStackTrace());
}
return fingerprintsBySample;
}
/**
* A small utility function to choose n random elements (un-shuffled) from a list
*
* @param list A list of elements
* @param n a number of elements requested from list
* @return a list of n randomly chosen (but in the original order) elements from list.
* If the list has less than n elements it is returned in its entirety.
*/
protected static <T> List<T> randomSublist(final List<T> list, final int n) {
int availableElements = list.size();
if (availableElements <= n) return list;
int stillNeeded = n;
final Random rg = new Random();
final List<T> shortList = new ArrayList<>(n);
for (final T aList : list) {
if (rg.nextDouble() < stillNeeded / (double) availableElements) {
shortList.add(aList);
stillNeeded
}
if (stillNeeded == 0) break; // fast out if do not need more elements
availableElements
}
return shortList;
}
/**
* Fingerprints one or more SAM/BAM/VCF files at all available loci within the haplotype map, using multiple threads
* to speed up the processing.
*/
public Map<FingerprintIdDetails, Fingerprint> fingerprintFiles(final Collection<Path> files, final int threads,
final int waitTime, final TimeUnit waitTimeUnit) {
// Generate fingerprints from each file
final AtomicInteger filesRead = new AtomicInteger(0);
final ExecutorService executor = new ThreadPoolExecutorWithExceptions(threads);
final ExecutorCompletionService<Path> executorCompletionService = new ExecutorCompletionService<>(executor);
final IntervalList intervals = this.haplotypes.getIntervalList();
final Map<FingerprintIdDetails, Fingerprint> retval = new ConcurrentHashMap<>();
for (final Path p : files) {
executorCompletionService.submit(() -> {
if (CheckFingerprint.fileContainsReads(p)) {
retval.putAll(fingerprintSamFile(p, intervals));
} else {
retval.putAll(fingerprintVcf(p));
}
log.debug("Processed file: " + p.toUri().toString() + " (" + filesRead.get() + ")");
if (filesRead.incrementAndGet() % 100 == 0) {
log.info("Processed " + filesRead.get() + " out of " + files.size());
}
}, p);
}
executor.shutdown();
try {
executor.awaitTermination(waitTime, waitTimeUnit);
} catch (final InterruptedException ie) {
throw new PicardException("Interrupted while waiting for executor to terminate.", ie);
}
for (int i = 0; i < files.size(); i++) {
try {
executorCompletionService.take().get();
} catch (InterruptedException | ExecutionException e) {
throw new PicardException("Failed to fingerprint", e);
}
}
return retval;
}
/**
* Top level method to take a set of one or more SAM files and one or more Genotype files and compare
* each read group in each SAM file to each set of fingerprint genotypes.
*
* @param samFiles the list of SAM files to fingerprint
* @param genotypeFiles the list of genotype files from which to pull fingerprint genotypes
* @param specificSample an optional single sample who's genotypes to load from the supplied files
* @param ignoreReadGroups aggregate data into one fingerprint per file, instead of splitting by RG
*/
public List<FingerprintResults> checkFingerprints(final List<Path> samFiles,
final List<Path> genotypeFiles,
final String specificSample,
final boolean ignoreReadGroups) {
// Load the fingerprint genotypes
final List<Fingerprint> expectedFingerprints = new LinkedList<>();
for (final Path p : genotypeFiles) {
expectedFingerprints.addAll(loadFingerprints(p, specificSample).values());
}
if (expectedFingerprints.isEmpty()) {
throw new IllegalStateException("Could not find any fingerprints in: " + genotypeFiles);
}
final List<FingerprintResults> resultsList = new ArrayList<>();
final IntervalList intervals = getLociToGenotype(expectedFingerprints);
// Fingerprint the SAM files and calculate the results
for (final Path p : samFiles) {
final Map<FingerprintIdDetails, Fingerprint> fingerprintsByReadGroup = fingerprintSamFile(p, intervals);
if (ignoreReadGroups) {
final Fingerprint combinedFp = new Fingerprint(specificSample, p, null);
fingerprintsByReadGroup.values().forEach(combinedFp::merge);
final FingerprintResults results = new FingerprintResults(p, null, specificSample);
for (final Fingerprint expectedFp : expectedFingerprints) {
final MatchResults result = calculateMatchResults(combinedFp, expectedFp, 0, pLossofHet);
results.addResults(result);
}
resultsList.add(results);
} else {
for (final FingerprintIdDetails rg : fingerprintsByReadGroup.keySet()) {
final FingerprintResults results = new FingerprintResults(p, rg.platformUnit, rg.sample);
for (final Fingerprint expectedFp : expectedFingerprints) {
final MatchResults result = calculateMatchResults(fingerprintsByReadGroup.get(rg), expectedFp, 0, pLossofHet);
results.addResults(result);
}
resultsList.add(results);
}
}
}
return resultsList;
}
/**
* Top level method to take a set of one or more observed genotype (VCF) files and one or more expected genotype (VCF) files and compare
* one or more sample in the observed genotype file with one or more in the expected file and generate results for each set.
*
* @param observedGenotypeFiles The list of genotype files containing observed calls, from which to pull fingerprint genotypes
* @param expectedGenotypeFiles The list of genotype files containing expected calls, from which to pull fingerprint genotypes
* @param observedSample an optional single sample whose genotypes to load from the observed genotype file (if null, use all)
* @param expectedSample an optional single sample whose genotypes to load from the expected genotype file (if null, use all)
*/
public List<FingerprintResults> checkFingerprintsFromPaths(final List<Path> observedGenotypeFiles,
final List<Path> expectedGenotypeFiles,
final String observedSample,
final String expectedSample) {
// Load the expected fingerprint genotypes
final List<Fingerprint> expectedFingerprints = new ArrayList<>();
for (final Path p : expectedGenotypeFiles) {
expectedFingerprints.addAll(loadFingerprints(p, expectedSample).values());
}
if (expectedFingerprints.isEmpty()) {
throw new IllegalStateException("Could not find any fingerprints in: " + expectedGenotypeFiles);
}
final List<FingerprintResults> resultsList = new ArrayList<>();
for (final Path p : observedGenotypeFiles) {
final Map<String, Fingerprint> observedFingerprintsBySample = loadFingerprints(p, observedSample);
if (observedFingerprintsBySample.isEmpty()) {
throw new IllegalStateException("Found no fingerprints in observed genotypes file: " + observedGenotypeFiles);
}
for (final String sample : observedFingerprintsBySample.keySet()) {
final FingerprintResults results = new FingerprintResults(p, null, sample);
for (final Fingerprint expectedFp : expectedFingerprints) {
final MatchResults result = calculateMatchResults(observedFingerprintsBySample.get(sample), expectedFp, 0, pLossofHet);
results.addResults(result);
}
resultsList.add(results);
}
}
return resultsList;
}
public static MatchResults calculateMatchResults(final Fingerprint observedFp, final Fingerprint expectedFp, final double minPExpected, final double pLoH) {
return calculateMatchResults(observedFp, expectedFp, minPExpected, pLoH, true, true);
}
/**
* Compares two fingerprints and calculates a MatchResults object which contains detailed
* information about the match (or mismatch) between fingerprints including the LOD score
* for whether or not the two are likely from the same sample.
* <p>
* If comparing sequencing data to genotype data then the sequencing data should be passed
* as the observedFp and the genotype data as the expectedFp in order to get the best output.
* <p>
* In the cases where the most likely genotypes from the two fingerprints do not match the
* lExpectedSample is Max(actualpExpectedSample, minPExpected).
*/
public static MatchResults calculateMatchResults(final Fingerprint observedFp, final Fingerprint expectedFp, final double minPExpected, final double pLoH, final boolean calculateLocusInfo, final boolean calculateTumorAwareLod) {
final List<LocusResult> locusResults = calculateLocusInfo ? new ArrayList<>() : null;
double llThisSample = 0;
double llOtherSample = 0;
double lodExpectedSampleTumorNormal = 0;
double lodExpectedSampleNormalTumor = 0;
final double lminPExpected = Math.log10(minPExpected);
for (final HaplotypeProbabilities probs2 : expectedFp.values()) {
final HaplotypeBlock haplotypeBlock = probs2.getHaplotype();
final HaplotypeProbabilities probs1 = observedFp.get(haplotypeBlock);
if (probs1 == null) continue;
final HaplotypeProbabilityOfNormalGivenTumor prob1AssumingDataFromTumor;
final HaplotypeProbabilityOfNormalGivenTumor prob2AssumingDataFromTumor;
if (calculateTumorAwareLod) {
prob1AssumingDataFromTumor = new HaplotypeProbabilityOfNormalGivenTumor(probs1, pLoH);
prob2AssumingDataFromTumor = new HaplotypeProbabilityOfNormalGivenTumor(probs2, pLoH);
} else {
prob1AssumingDataFromTumor = null;
prob2AssumingDataFromTumor = null;
}
// If one is from genotype data we'd like to report the output relative
// to the genotyped SNP instead of against a random SNP from the haplotype
final Snp snp = probs2.getRepresentativeSnp();
if (calculateLocusInfo) {
final DiploidGenotype externalGenotype = probs2.getMostLikelyGenotype(snp);
final LocusResult lr = new LocusResult(snp,
externalGenotype,
probs1.getMostLikelyGenotype(snp),
probs1.getObsAllele1(),
probs1.getObsAllele2(),
probs1.getLodMostProbableGenotype(),
// expected sample log-likelihood
probs1.shiftedLogEvidenceProbabilityGivenOtherEvidence(probs2),
// random sample log-likelihood
probs1.shiftedLogEvidenceProbability(),
// probs1 is tumor probs2 is normal, correct sample lod
calculateTumorAwareLod ? prob1AssumingDataFromTumor.shiftedLogEvidenceProbabilityGivenOtherEvidence(probs2) -
prob1AssumingDataFromTumor.shiftedLogEvidenceProbability() : 0,
// probs1 is normal probs2 is tumor, correct sample lod
calculateTumorAwareLod ? probs1.shiftedLogEvidenceProbabilityGivenOtherEvidence(prob2AssumingDataFromTumor) -
probs1.shiftedLogEvidenceProbability() : 0);
locusResults.add(lr);
}
if (probs1.hasEvidence() && probs2.hasEvidence()) {
//TODO: what's the mathematics behind the lminPexpected?
llThisSample += Math.max(lminPExpected,
probs1.shiftedLogEvidenceProbabilityGivenOtherEvidence(probs2));
llOtherSample += probs1.shiftedLogEvidenceProbability();
if (calculateTumorAwareLod) {
lodExpectedSampleTumorNormal += prob1AssumingDataFromTumor.shiftedLogEvidenceProbabilityGivenOtherEvidence(probs2) -
prob1AssumingDataFromTumor.shiftedLogEvidenceProbability();
lodExpectedSampleNormalTumor += probs1.shiftedLogEvidenceProbabilityGivenOtherEvidence(prob2AssumingDataFromTumor) -
probs1.shiftedLogEvidenceProbability();
}
}
}
// TODO: prune the set of LocusResults for things that are too close together?
return new MatchResults(expectedFp.getSource(), expectedFp.getSample(), llThisSample, llOtherSample, lodExpectedSampleTumorNormal, lodExpectedSampleNormalTumor, locusResults);
}
/**
* Compares two fingerprints and calculates a MatchResults object which contains detailed
* information about the match (or mismatch) between fingerprints including the LOD score
* for whether or not the two are likely from the same sample.
* <p>
* If comparing sequencing data to genotype data then the sequencing data should be passed
* as the observedFp and the genotype data as the expectedFp in order to get the best output.
*/
public static MatchResults calculateMatchResults(final Fingerprint observedFp, final Fingerprint expectedFp) {
return calculateMatchResults(observedFp, expectedFp, 0, 0);
}
}
|
/*
* @ControllerHelper 1.0 19/02/13. Sistema Integral de Gestion Hospitalaria
*/
package py.una.med.base.util;
import static py.una.med.base.util.Checker.notNull;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.TimeZone;
import javax.annotation.Nonnull;
import javax.el.ELContext;
import javax.el.ExpressionFactory;
import javax.el.MethodExpression;
import javax.el.ValueExpression;
import javax.faces.FacesException;
import javax.faces.application.FacesMessage;
import javax.faces.application.FacesMessage.Severity;
import javax.faces.component.UIComponent;
import javax.faces.component.UIComponentBase;
import javax.faces.component.UIInput;
import javax.faces.component.UISelectOne;
import javax.faces.component.UIViewRoot;
import javax.faces.component.html.HtmlOutputText;
import javax.faces.context.FacesContext;
import javax.faces.convert.DateTimeConverter;
import org.hibernate.exception.ConstraintViolationException;
import org.richfaces.component.UICalendar;
import org.richfaces.component.UIColumn;
import org.richfaces.component.UIExtendedDataTable;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import py.una.med.base.math.Quantity;
import py.una.med.base.reports.Column;
import com.google.common.annotations.VisibleForTesting;
/**
* Clase que implementa funcionalidades generales para la manipulacion de
* vistas, proveen funcionalidades que ya integran todas las partes del sistema.
* Es un singleton compartido por todas las sesiones
*
* @author Arturo Volpe
* @author Nathalia Ochoa
* @since 2.0 08/02/2013
* @version 2.0 19/02/2013
*/
@Component
public class ControllerHelper {
private static final String NULL_SEVERITY_IS_NOT_ALLOWED = "Null severity is not allowed";
private static final String EMPTY_STRING = "";
private static final String EL_VALUE_PROPERTY = "value";
@Autowired
private UniqueHelper uniqueHelper;
@Autowired
private I18nHelper i18nHelper;
public void createGlobalFacesMessage(final Severity severity,
@Nonnull final String summary, @Nonnull final String detail) {
createGlobalSimpleMessage(severity, getString(notNull(summary)),
getString(notNull(detail)));
}
public void createGlobalFacesMessage(final Severity severity,
final String summary) {
createGlobalSimpleMessage(severity, getString(summary), null);
}
public void createGlobalFacesMessageSimple(final Severity severity,
final String summary) {
createGlobalSimpleMessage(severity, summary, null);
}
public void createGlobalSimpleMessage(@Nonnull final Severity severity,
final String summary, final String detail) {
notNull(severity, NULL_SEVERITY_IS_NOT_ALLOWED);
String sum = summary == null ? EMPTY_STRING : summary;
String det = detail == null ? EMPTY_STRING : detail;
createFacesMessageSimple(severity, sum, det, null);
}
@Deprecated
public String getMessage(final String code) {
return i18nHelper.getString(code);
}
/**
* Crea un mensaje para un componente determinado
*
* @param severity
* Severidad {@link FacesMessage}
* @param summary
* Clave internacionalizada del sumario
* @param detail
* Clave internacionalizada del detalle
* @param componentId
* Nombre del componente,
* {@link ControllerHelper#getClientId(String)}
*/
public void createFacesMessage(final Severity severity,
final String summary, final String detail, final String componentId) {
createFacesMessageSimple(severity, getString(summary),
getString(detail), componentId);
}
public void createFacesMessageSimple(@Nonnull final Severity severity,
final String summary, final String detail, final String componentId) {
notNull(severity, NULL_SEVERITY_IS_NOT_ALLOWED);
addMessage(severity, summary, detail, componentId);
}
public void addInfoMessage(@Nonnull final String summary,
final Object ... params) {
addMessage(FacesMessage.SEVERITY_INFO,
i18nHelper.getString(summary, params), null, null);
}
/**
* Agrega un mensaje con severidad warn a un componente.
*
* @see I18nHelper#getString(String, Object...)
* @param id
* identificador del componente (puede ser id de cliente o no).
* @param summary
* sumario del mensaje
* @param detail
* detalle
*/
public void addWarnMessage(@Nonnull final String id,
@Nonnull final String summary, final String detail) {
addMessage(FacesMessage.SEVERITY_WARN, summary, detail, id);
}
/**
* Agrega un mensaje con severidad <b>warn</b> e internacionalizado
*
* @param summary
* @param params
*/
public void addGlobalWarnMessage(@Nonnull final String summary,
final String detail) {
addMessage(FacesMessage.SEVERITY_WARN, summary, detail, null);
}
/**
* Agrega un mensaje con severidad <b>warn</b>.
*
* <p>
* Las cadenas pasadas ya deben estar internacionalizadas
* </p>
*
* @param summary
* @param params
*/
public void addSimpleGlobalWarnMessage(@Nonnull final String summary,
final String detail) {
addMessage(FacesMessage.SEVERITY_WARN, summary, detail, null);
}
private void addMessage(FacesMessage.Severity severity, String summary,
String detail, String id) {
String realId = id;
if (realId != null && realId.indexOf(':') == -1) {
realId = getClientId(id);
}
getContext().addMessage(realId,
new FacesMessage(severity, summary, detail));
}
/**
* Returns the clientId for a component with id, esto se usa por que JSF
* genera claves distintas a las que se configuran en los componentes, esto
* se hace para evitar que el mismo ID se repite, por ejemplo si ponemos en
* un form un label, con formID y labelID como sus IDs respectivamente, JSF
* generara identificadorse parecidos a: formID para el form, y
* formID:labelID para el label, esta funcion recibe como parametro
* "labelID" y retorna "formID:labelID".
*
* @param id
* de la vista del elemento a buscar
* @return id del componente del lado del cliente
*/
public String getClientId(final String id) {
UIComponent c = findComponent(id);
if (c == null) {
throw new IllegalArgumentException(
"NO se encontro comoponente con id " + id);
}
return c.getClientId(getContext());
}
/**
* Dado un ID (vease {@link ControllerHelper#getClientId(String)}) retorna
* el componente al que pertenece
*
* @param id
* id del cliente para obtener el componente
* @return Componente de vista
*/
public UIComponent findComponent(final String id) {
FacesContext context = getContext();
UIViewRoot root = context.getViewRoot();
return findComponent(root, id);
}
/**
* Retorna una EL expression correspondiente a un metodo.
*
* @param valueExpression
* cadena que representa la expresion.
* @param expectedReturnType
* clase del tipo que se espera que retorna la expresion
* @param expectedParamTypes
* clase de los parametros esperados que reciba el metodo
*
* @return {@link MethodExpression} correspondiente
*/
public MethodExpression createMethodExpression(
final String valueExpression, final Class<?> expectedReturnType,
final Class<?> ... expectedParamTypes) {
MethodExpression methodExpression = null;
try {
FacesContext fc = getContext();
ExpressionFactory factory = fc.getApplication()
.getExpressionFactory();
methodExpression = factory.createMethodExpression(
fc.getELContext(), valueExpression, expectedReturnType,
expectedParamTypes);
} catch (Exception e) {
throw new FacesException("Method expression '" + valueExpression
+ "' could not be created.", e);
}
return methodExpression;
}
/**
* Escanea el archivo columns.xhtml donde se definen las columnas
* visualizadas en la grilla, y retorna las mismas.
*
* @return Lista de columnas -> [header, field]
*/
public List<Column> getColumns() {
String id = "idListEntities";
List<Column> columns = new LinkedList<Column>();
UIExtendedDataTable table = (UIExtendedDataTable) findComponent(id);
for (UIComponent ui : table.getChildren()) {
if (ui instanceof UIColumn) {
Column toAdd = buildColumn(ui);
if (toAdd != null) {
columns.add(buildColumn(ui));
}
}
}
return columns;
}
/**
* @param columns
* @param ui
*/
private Column buildColumn(UIComponent ui) {
ValueExpression expressionHeader = ((HtmlOutputText) ((UIColumn) ui)
.getHeader()).getValueExpression(EL_VALUE_PROPERTY);
String header = ELParser.getHeaderColumn(expressionHeader
.getExpressionString());
for (UIComponent children : ui.getChildren()) {
if (children instanceof HtmlOutputText) {
HtmlOutputText text = (HtmlOutputText) children;
ValueExpression expression = text
.getValueExpression(EL_VALUE_PROPERTY);
String field = ELParser.getFieldByExpression(expression
.getExpressionString());
return new Column(header, field);
}
}
return null;
}
public Exception convertException(final Exception e, final Class<?> clazz) {
if (e instanceof ConstraintViolationException) {
return uniqueHelper.createUniqueException(e, clazz);
}
return e;
}
public void updateModel(String componentID) {
// Obtenemos el id
UIComponent formulario = findComponent(componentID);
updateModel(formulario);
}
private void updateModel(UIComponent formulario) {
FacesContext context = getContext();
ELContext elContext = getContext().getELContext();
Iterator<UIComponent> iter = formulario.getFacetsAndChildren();
while (iter.hasNext()) {
UIComponent component = iter.next();
// Si es un valor submiteable, o INPUTEaBLE
if (component instanceof UISelectOne) {
UISelectOne com = (UISelectOne) component;
Object newValue = com.getSubmittedValue();
ValueExpression value = com
.getValueExpression(EL_VALUE_PROPERTY);
if (newValue != null && com.getConverter() != null) {
// Si tiene un converter definido, entonces utilizamos ese
// converter para obtener el valor
newValue = com.getConverter().getAsObject(context, com,
newValue.toString());
}
value.setValue(elContext, newValue);
} else if (component instanceof UICalendar) {
UICalendar com = (UICalendar) component;
Object newValue = com.getSubmittedValue();
if (newValue != null) {
ValueExpression value = com
.getValueExpression(EL_VALUE_PROPERTY);
newValue = getConverter().getAsObject(context, component,
newValue.toString());
value.setValue(elContext, newValue);
}
} else if (component instanceof UIInput
&& !(component instanceof UICalendar)) {
UIInput com = (UIInput) component;
Object newValue = com.getSubmittedValue();
ValueExpression value = com
.getValueExpression(EL_VALUE_PROPERTY);
if (value.getType(elContext).equals(Quantity.class)) {
if (StringUtils.isValid(newValue)) {
newValue = new Quantity((String) newValue);
} else {
newValue = Quantity.ZERO;
}
}
if (newValue instanceof String
&& !StringUtils.isValid(newValue)) {
newValue = null;
}
value.setValue(elContext, newValue);
}
updateModel(component);
}
}
private DateTimeConverter getConverter() {
DateTimeConverter converter = new DateTimeConverter();
converter.setPattern(FormatProvider.DATE_FORMAT);
converter.setTimeZone(TimeZone.getDefault());
return converter;
}
@VisibleForTesting
protected String getRealId(String id) {
if (id.indexOf(':') != -1) {
return getClientId(id);
}
return id;
}
/**
* Finds component with the given id
*/
@VisibleForTesting
protected UIComponent findComponent(final UIComponent c, final String id) {
if (id.equals(c.getId())) {
return c;
}
Iterator<UIComponent> kids = c.getFacetsAndChildren();
while (kids.hasNext()) {
UIComponent found = findComponent(kids.next(), id);
if (found != null) {
return found;
}
}
return null;
}
private String getString(final String code) {
return i18nHelper.getString(code);
}
/**
* Retorna el contexto.
*
* @return
*/
@VisibleForTesting
protected FacesContext getContext() {
return FacesContext.getCurrentInstance();
}
}
|
package redis.clients.jedis;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.pool2.impl.GenericObjectPoolConfig;
import redis.clients.jedis.exceptions.JedisConnectionException;
import redis.clients.jedis.exceptions.JedisException;
public class JedisSentinelPool extends JedisPoolAbstract {
protected GenericObjectPoolConfig poolConfig;
protected int timeout = Protocol.DEFAULT_TIMEOUT;
protected String password;
protected int database = Protocol.DEFAULT_DATABASE;
protected Set<MasterListener> masterListeners = new HashSet<MasterListener>();
protected Logger log = Logger.getLogger(getClass().getName());
private volatile JedisFactory factory;
private volatile HostAndPort currentHostMaster;
public JedisSentinelPool(String masterName, Set<String> sentinels,
final GenericObjectPoolConfig poolConfig) {
this(masterName, sentinels, poolConfig, Protocol.DEFAULT_TIMEOUT, null,
Protocol.DEFAULT_DATABASE);
}
public JedisSentinelPool(String masterName, Set<String> sentinels) {
this(masterName, sentinels, new GenericObjectPoolConfig(), Protocol.DEFAULT_TIMEOUT, null,
Protocol.DEFAULT_DATABASE);
}
public JedisSentinelPool(String masterName, Set<String> sentinels, String password) {
this(masterName, sentinels, new GenericObjectPoolConfig(), Protocol.DEFAULT_TIMEOUT, password);
}
public JedisSentinelPool(String masterName, Set<String> sentinels,
final GenericObjectPoolConfig poolConfig, int timeout, final String password) {
this(masterName, sentinels, poolConfig, timeout, password, Protocol.DEFAULT_DATABASE);
}
public JedisSentinelPool(String masterName, Set<String> sentinels,
final GenericObjectPoolConfig poolConfig, final int timeout) {
this(masterName, sentinels, poolConfig, timeout, null, Protocol.DEFAULT_DATABASE);
}
public JedisSentinelPool(String masterName, Set<String> sentinels,
final GenericObjectPoolConfig poolConfig, final String password) {
this(masterName, sentinels, poolConfig, Protocol.DEFAULT_TIMEOUT, password);
}
public JedisSentinelPool(String masterName, Set<String> sentinels,
final GenericObjectPoolConfig poolConfig, int timeout, final String password,
final int database) {
this.poolConfig = poolConfig;
this.timeout = timeout;
this.password = password;
this.database = database;
HostAndPort master = initSentinels(sentinels, masterName);
initPool(master);
}
public void destroy() {
for (MasterListener m : masterListeners) {
m.shutdown();
}
super.destroy();
}
public HostAndPort getCurrentHostMaster() {
return currentHostMaster;
}
private void initPool(HostAndPort master) {
if (!master.equals(currentHostMaster)) {
currentHostMaster = master;
if (factory == null) {
factory = new JedisFactory(master.getHost(), master.getPort(), timeout, password, database);
initPool(poolConfig, factory);
} else {
factory.setHostAndPort(currentHostMaster);
// although we clear the pool, we still have to check the
// returned object
// in getResource, this call only clears idle instances, not
// borrowed instances
internalPool.clear();
}
log.info("Created JedisPool to master at " + master);
}
}
private HostAndPort initSentinels(Set<String> sentinels, final String masterName) {
HostAndPort master = null;
boolean sentinelAvailable = false;
log.info("Trying to find master from available Sentinels...");
for (String sentinel : sentinels) {
final HostAndPort hap = toHostAndPort(Arrays.asList(sentinel.split(":")));
log.fine("Connecting to Sentinel " + hap);
Jedis jedis = null;
try {
jedis = new Jedis(hap.getHost(), hap.getPort());
List<String> masterAddr = jedis.sentinelGetMasterAddrByName(masterName);
// connected to sentinel...
sentinelAvailable = true;
if (masterAddr == null || masterAddr.size() != 2) {
log.warning("Can not get master addr, master name: " + masterName + ". Sentinel: " + hap
+ ".");
continue;
}
master = toHostAndPort(masterAddr);
log.fine("Found Redis master at " + master);
break;
} catch (JedisConnectionException e) {
log.warning("Cannot connect to sentinel running @ " + hap + ". Trying next one.");
} finally {
if (jedis != null) {
jedis.close();
}
}
}
if (master == null) {
if (sentinelAvailable) {
// can connect to sentinel, but master name seems to not
// monitored
throw new JedisException("Can connect to sentinel, but " + masterName
+ " seems to be not monitored...");
} else {
throw new JedisConnectionException("All sentinels down, cannot determine where is "
+ masterName + " master is running...");
}
}
log.info("Redis master running at " + master + ", starting Sentinel listeners...");
for (String sentinel : sentinels) {
final HostAndPort hap = toHostAndPort(Arrays.asList(sentinel.split(":")));
MasterListener masterListener = new MasterListener(masterName, hap.getHost(), hap.getPort());
masterListeners.add(masterListener);
masterListener.start();
}
return master;
}
private HostAndPort toHostAndPort(List<String> getMasterAddrByNameResult) {
String host = getMasterAddrByNameResult.get(0);
int port = Integer.parseInt(getMasterAddrByNameResult.get(1));
return new HostAndPort(host, port);
}
@Override
public Jedis getResource() {
while (true) {
Jedis jedis = super.getResource();
jedis.setDataSource(this);
// get a reference because it can change concurrently
final HostAndPort master = currentHostMaster;
final HostAndPort connection = new HostAndPort(jedis.getClient().getHost(), jedis.getClient()
.getPort());
if (master.equals(connection)) {
// connected to the correct master
return jedis;
} else {
returnBrokenResource(jedis);
}
}
}
protected void returnBrokenResource(final Jedis resource) {
if (resource != null) {
returnBrokenResourceObject(resource);
}
}
protected void returnResource(final Jedis resource) {
if (resource != null) {
resource.resetState();
returnResourceObject(resource);
}
}
protected class MasterListener extends Thread {
protected String masterName;
protected String host;
protected int port;
protected long subscribeRetryWaitTimeMillis = 5000;
protected Jedis j;
protected AtomicBoolean running = new AtomicBoolean(false);
protected MasterListener() {
}
public MasterListener(String masterName, String host, int port) {
this.masterName = masterName;
this.host = host;
this.port = port;
}
public MasterListener(String masterName, String host, int port,
long subscribeRetryWaitTimeMillis) {
this(masterName, host, port);
this.subscribeRetryWaitTimeMillis = subscribeRetryWaitTimeMillis;
}
public void run() {
running.set(true);
while (running.get()) {
j = new Jedis(host, port);
try {
j.subscribe(new JedisPubSub() {
@Override
public void onMessage(String channel, String message) {
log.fine("Sentinel " + host + ":" + port + " published: " + message + ".");
String[] switchMasterMsg = message.split(" ");
if (switchMasterMsg.length > 3) {
if (masterName.equals(switchMasterMsg[0])) {
initPool(toHostAndPort(Arrays.asList(switchMasterMsg[3], switchMasterMsg[4])));
} else {
log.fine("Ignoring message on +switch-master for master name "
+ switchMasterMsg[0] + ", our master name is " + masterName);
}
} else {
log.severe("Invalid message received on Sentinel " + host + ":" + port
+ " on channel +switch-master: " + message);
}
}
}, "+switch-master");
} catch (JedisConnectionException e) {
if (running.get()) {
log.severe("Lost connection to Sentinel at " + host + ":" + port
+ ". Sleeping 5000ms and retrying.");
try {
Thread.sleep(subscribeRetryWaitTimeMillis);
} catch (InterruptedException e1) {
e1.printStackTrace();
}
} else {
log.fine("Unsubscribing from Sentinel at " + host + ":" + port);
}
}
}
}
public void shutdown() {
try {
log.fine("Shutting down listener on " + host + ":" + port);
running.set(false);
// This isn't good, the Jedis object is not thread safe
j.disconnect();
} catch (Exception e) {
log.log(Level.SEVERE,"Caught exception while shutting down: ",e);
}
}
}
}
|
package se.kth.csc.controller;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableMap;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.servlet.ModelAndView;
import se.kth.csc.auth.Role;
import se.kth.csc.model.Account;
import se.kth.csc.model.Queue;
import se.kth.csc.model.QueuePosition;
import se.kth.csc.payload.QueueCreationInfo;
import se.kth.csc.persist.AccountStore;
import se.kth.csc.persist.QueuePositionStore;
import se.kth.csc.persist.QueueStore;
import javax.servlet.http.HttpServletRequest;
import java.security.Principal;
import java.util.List;
import java.net.InetAddress;
import java.net.UnknownHostException;
@Controller
@RequestMapping(value = "/queue")
public class QueueController {
private static final Logger log = LoggerFactory.getLogger(QueueController.class);
private final ObjectMapper objectMapper;
private final QueueStore queueStore;
private final AccountStore accountStore;
private final QueuePositionStore queuePositionStore;
private static final int MAX_LEN = 30; //max len for comment fields
protected QueueController() {
// Needed for injection
objectMapper = null;
queueStore = null;
accountStore = null;
queuePositionStore = null;
}
@Autowired
public QueueController(
ObjectMapper objectMapper,
QueueStore queueStore,
AccountStore accountStore,
QueuePositionStore queuePositionStore) {
this.objectMapper = objectMapper;
this.queueStore = queueStore;
this.accountStore = accountStore;
this.queuePositionStore = queuePositionStore;
}
@RequestMapping(value = {"", "/"}, method = RequestMethod.GET)
public ModelAndView list(HttpServletRequest request) throws JsonProcessingException {
List<Queue> queues;
Principal user = request.getUserPrincipal();
if (request.isUserInRole("admin")) {
queues = queueStore.fetchAllQueues();
} else {
queues = queueStore.fetchAllActiveQueues();
if (user != null) { // Anonymous user check
List<Queue> modQueues = queueStore.fetchAllModeratedQueues(getCurrentAccount(user));
List<Queue> ownQueues = queueStore.fetchAllOwnedQueues(getCurrentAccount(user));
for(Queue q : modQueues){
if(!queues.contains(q)){
queues.add(q);
}
}
for(Queue q : ownQueues){
if(!queues.contains(q)){
queues.add(q);
}
}
}
}
String queuesJson = objectMapper.writerWithView(Queue.class).writeValueAsString(queues);
return new ModelAndView("queue/list", ImmutableMap.of("queues", queues, "queuesJson", queuesJson));
}
/**
* @param principal
* @return the account of the given principal, return null if the principal is null
*/
private Account getCurrentAccount(Principal principal) {
if (principal != null) // Anonymous users won't ha a principal
return accountStore.fetchAccountWithPrincipalName(principal.getName());
else {
return null;
}
}
@Transactional
@RequestMapping(value = "/create", method = RequestMethod.POST)
public String create(@ModelAttribute("queueCreationInfo") QueueCreationInfo queueCreationInfo,
HttpServletRequest request,
Principal principal)
throws ForbiddenException, BadNameException {
if (principal == null) // Anonymous user
throw new ForbiddenException();
if (request.isUserInRole(Role.ADMIN.getAuthority())) {
String queueName = queueCreationInfo.getName();
if (queueName.trim().length() > 0) {
Queue queue = new Queue();
queue.setName(queueName);
queue.addOwner(getCurrentAccount(principal));
queue.setActive(true);
queue.setLocked(false);
queueStore.storeQueue(queue);
return "redirect:/queue";
} else {
throw new BadNameException();
}
} else {
throw new ForbiddenException();
}
}
@RequestMapping(value = "/{id}", method = RequestMethod.GET)
public ModelAndView show(@PathVariable("id") int id, Principal principal, HttpServletRequest request)
throws NotFoundException, JsonProcessingException {
Queue queue = queueStore.fetchQueueWithId(id);
if (queue == null) {
throw new NotFoundException();
}
String queueJson = objectMapper.writerWithView(Queue.class).writeValueAsString(queue);
String hostName = "";
try{
hostName = InetAddress.getByName(request.getRemoteHost()).getCanonicalHostName();
} catch (UnknownHostException e){
log.debug(e.getMessage());
}
Account account = getCurrentAccount(principal);
if (account != null) {
return new ModelAndView("queue/show", ImmutableMap.of("queue", queue, "queueJson", queueJson,
"account", account, "hostName", hostName));
} else { // TODO Cannot be null in map
return new ModelAndView("queue/show", ImmutableMap.of("queue", queue, "queueJson", queueJson,
"account", account, "hostName", hostName));
}
}
@Transactional
@RequestMapping(value = "/{id}/remove", method = RequestMethod.POST)
public String remove(@PathVariable("id") int id, HttpServletRequest request)
throws NotFoundException, ForbiddenException {
Account account = getCurrentAccount(request.getUserPrincipal());
if (account == null) // Anonymous user
throw new ForbiddenException();
Queue queue = queueStore.fetchQueueWithId(id);
if (account.canEditQueue(queue)) {
if (queue == null) {
throw new NotFoundException();
}
queueStore.removeQueue(queue);
return "redirect:/queue";
} else {
throw new ForbiddenException();
}
}
@Transactional
@RequestMapping(value = "/{id}/position/create", method = RequestMethod.POST)
public String createPosition(@PathVariable("id") int id, Principal principal) throws Exception {
Queue queue = queueStore.fetchQueueWithId(id);
if (queue == null) {
throw new NotFoundException();
}
// Users not logged in won't have a principal
if (!queue.isActive() || queue.isLocked() || principal == null) {
throw new ForbiddenException();
} else {
// Check if user already in queue. If so, throw exception.
for (QueuePosition queuePos : queue.getPositions()) {
if (queuePos.getAccount().getPrincipalName().equals(principal.getName())) {
throw new ForbiddenException();
}
}
}
QueuePosition queuePosition = new QueuePosition();
queuePosition.setQueue(queue);
queuePosition.setAccount(getCurrentAccount(principal));
queuePosition.setStartTime(DateTime.now());
queuePositionStore.storeQueuePosition(queuePosition);
queue.getPositions().add(queuePosition);
return "redirect:/queue/" + id;
}
@Transactional
@RequestMapping(value = "/{id}/position/{positionId}/remove", method = RequestMethod.POST)
public String deletePosition(@PathVariable("id") int id, @PathVariable("positionId") int positionId,
HttpServletRequest request, Principal principal) throws Exception {
Account account = getCurrentAccount(principal);
QueuePosition queuePosition = queuePositionStore.fetchQueuePositionWithId(positionId);
if (queuePosition == null) {
throw new NotFoundException();
} else if (account == null) { // Anonymous user
throw new ForbiddenException();
}
if (request.isUserInRole(Role.ADMIN.getAuthority()) || queuePosition.getAccount().equals(account)) {
Queue queue = queueStore.fetchQueueWithId(id);
if (queue == null) {
throw new NotFoundException();
}
queue.getPositions().remove(queuePosition);
queuePositionStore.removeQueuePosition(queuePosition);
return "redirect:/queue/" + id;
} else {
throw new ForbiddenException();
}
}
@Transactional
@RequestMapping(value = "/{id}/position/{positionId}/location", method = {RequestMethod.POST})
public String updateLocation(@PathVariable("id") int id, @PathVariable("positionId") int positionId, String location,
Principal principal)
throws NotFoundException, ForbiddenException {
QueuePosition queuePosition = queuePositionStore.fetchQueuePositionWithId(positionId);
Queue queue = queueStore.fetchQueueWithId(id);
// Null if anonymous
if (principal == null || !getCurrentAccount(principal).equals(queuePosition.getAccount()))
throw new ForbiddenException();
if (queuePosition == null || queue == null) {
throw new NotFoundException();
}
int length = Math.min(location.length(), MAX_LEN);
queuePosition.setLocation(location.substring(0, length));
return "redirect:/queue/" + id;
}
@Transactional
@RequestMapping(value = "/{id}/position/{positionId}/comment", method = {RequestMethod.POST})
public String updateComment(@PathVariable("id") int id, @PathVariable("positionId") int positionId, String comment,
Principal principal)
throws NotFoundException, ForbiddenException {
QueuePosition queuePosition = queuePositionStore.fetchQueuePositionWithId(positionId);
Queue queue = queueStore.fetchQueueWithId(id);
// Null if anonymous
if (principal == null || !getCurrentAccount(principal).equals(queuePosition.getAccount()))
throw new ForbiddenException();
if (queuePosition == null || queue == null) {
throw new NotFoundException();
}
int length = Math.min(comment.length(), MAX_LEN);
queuePosition.setComment(comment.substring(0, length));
return "redirect:/queue/" + id;
}
@Transactional
@RequestMapping(value = "/{id}/close", method = RequestMethod.POST)
public String closeQueue(@PathVariable("id") int id, HttpServletRequest request)
throws ForbiddenException {
Account account = getCurrentAccount(request.getUserPrincipal());
if (account == null) // Anonymous user
throw new ForbiddenException();
Queue queue = queueStore.fetchQueueWithId(id);
if (account.canModerateQueue(queue)) {
queue.setActive(false);
for (QueuePosition pos : queue.getPositions ()) {
queuePositionStore.removeQueuePosition(queuePositionStore.fetchQueuePositionWithId(pos.getId()));
}
queue.getPositions().clear();
return "redirect:/queue/" + id;
} else {
throw new ForbiddenException();
}
}
@Transactional
@RequestMapping(value = "/{id}/open", method = RequestMethod.POST)
public String openQueue(@PathVariable("id") int id, HttpServletRequest request)
throws ForbiddenException {
Account account = getCurrentAccount(request.getUserPrincipal());
if (account == null) // Anonymous user
throw new ForbiddenException();
Queue queue = queueStore.fetchQueueWithId(id);
if (account.canModerateQueue(queue)) {
queue.setActive(true);
return "redirect:/queue/" + id;
} else {
throw new ForbiddenException();
}
}
@Transactional
@RequestMapping(value = "/{id}/lock", method = RequestMethod.POST)
public String lockQueue(@PathVariable("id") int id, HttpServletRequest request)
throws ForbiddenException {
Account account = getCurrentAccount(request.getUserPrincipal());
if (account == null) // Anonymous user
throw new ForbiddenException();
Queue queue = queueStore.fetchQueueWithId(id);
if (account.canModerateQueue(queue)) {
queue.setLocked(true);
return "redirect:/queue/" + id;
} else {
throw new ForbiddenException();
}
}
@Transactional
@RequestMapping(value = "/{id}/unlock", method = RequestMethod.POST)
public String unlockQueue(@PathVariable("id") int id, HttpServletRequest request)
throws ForbiddenException {
Account account = getCurrentAccount(request.getUserPrincipal());
if (account == null) // Anonymous user
throw new ForbiddenException();
Queue queue = queueStore.fetchQueueWithId(id);
if (account.canModerateQueue(queue)) {
queue.setLocked(false);
return "redirect:/queue/" + id;
} else {
throw new ForbiddenException();
}
}
@Transactional
@RequestMapping(value = "/{id}/add-owner", method = RequestMethod.POST)
public String addQueueOwner(@RequestParam("name") String newQueueOwner,
@PathVariable("id") int id, HttpServletRequest request)
throws NotFoundException, ForbiddenException {
Account accountOfAdder = getCurrentAccount(request.getUserPrincipal());
if (accountOfAdder == null) // Anonymous user
throw new ForbiddenException();
Account accountToAdd = accountStore.fetchAccountWithPrincipalName(newQueueOwner);
Queue queue = queueStore.fetchQueueWithId(id);
if(accountOfAdder.canEditQueue(queue)) {
if(accountToAdd == null) {
log.info("Account " + newQueueOwner + " could not be found");
throw new NotFoundException("Could not find the account " + newQueueOwner);
}
queue.addOwner(accountToAdd);
log.info("Queue with id " + id + " now has " + newQueueOwner
+ " as a queue owner");
return "redirect:/queue/" + id;
} else {
throw new ForbiddenException();
}
}
@Transactional
@RequestMapping(value = "/{id}/remove-owner", method = RequestMethod.POST)
public String removeQueueOwner(@RequestParam("name") String oldOwnerName,
@PathVariable("id") int id, HttpServletRequest request)
throws NotFoundException, ForbiddenException {
Account accountOfRemover = getCurrentAccount(request.getUserPrincipal());
if (accountOfRemover == null) // Anonymous user
throw new ForbiddenException();
Account accountToRemove = accountStore.fetchAccountWithPrincipalName(oldOwnerName);
Queue queue = queueStore.fetchQueueWithId(id);
if(accountOfRemover.canEditQueue(queue)) {
if(accountToRemove == null) {
log.info("Account " + oldOwnerName + " could not be found");
throw new NotFoundException("Couldn't find the owner " + oldOwnerName);
}
queue.removeOwner(accountToRemove);
log.info(oldOwnerName + " remove from ownerlist of queue with id " + id);
return "redirect:/queue/" + id;
} else {
throw new ForbiddenException();
}
}
@Transactional
@RequestMapping(value = "/{id}/add-moderator", method = RequestMethod.POST)
public String addQueueModerator(@RequestParam("name") String newQueueModerator,
@PathVariable("id") int id, HttpServletRequest request)
throws NotFoundException, ForbiddenException {
Account accountOfAdder = getCurrentAccount(request.getUserPrincipal());
if (accountOfAdder == null) // Anonymous user
throw new ForbiddenException();
Account accountToAdd = accountStore.fetchAccountWithPrincipalName(newQueueModerator);
Queue queue = queueStore.fetchQueueWithId(id);
if(accountOfAdder.canEditQueue(queue)) {
if(accountToAdd == null) {
log.info("Account " + newQueueModerator + " could not be found");
throw new NotFoundException("Could not find the account " + newQueueModerator);
}
queue.addModerator(accountToAdd);
log.info("Queue with id " + id + " now has " + newQueueModerator
+ " as a queue moderator");
return "redirect:/queue/" + id;
} else {
throw new ForbiddenException();
}
}
@Transactional
@RequestMapping(value = "/{id}/remove-moderator", method = RequestMethod.POST)
public String removeQueueModerator(@RequestParam("name") String oldModeratorName,
@PathVariable("id") int id, HttpServletRequest request)
throws NotFoundException, ForbiddenException {
Account accountOfRemover = getCurrentAccount(request.getUserPrincipal());
if (accountOfRemover == null) // Anonymous user
throw new ForbiddenException();
Account accountToRemove = accountStore.fetchAccountWithPrincipalName(oldModeratorName);
Queue queue = queueStore.fetchQueueWithId(id);
if(accountOfRemover.canEditQueue(queue)) {
if(accountToRemove == null) {
log.info("Account " + oldModeratorName + " could not be found");
throw new NotFoundException("Couldn't find the moderator " + oldModeratorName);
}
queue.removeModerator(accountToRemove);
log.info(oldModeratorName + " remove from moderatorlist of queue with id " + id);
return "redirect:/queue/" + id;
} else {
throw new ForbiddenException();
}
}
}
|
package skadistats.clarity.examples.seek;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import skadistats.clarity.processor.entities.UsesEntities;
import skadistats.clarity.processor.runner.ControllableRunner;
import skadistats.clarity.source.MappedFileSource;
import java.util.Random;
@UsesEntities
public class Main {
private final int N_SEEKS = 1000;
private final Logger log = LoggerFactory.getLogger(Main.class.getPackage().getClass());
public void runSeek(String[] args) throws Exception {
ControllableRunner runner = new ControllableRunner(new MappedFileSource(args[0])).runWith(this);
int lastTick = runner.getLastTick();
Random r = new Random();
int i = N_SEEKS;
try {
long tStart = System.nanoTime();
while (i
int nextTick = r.nextInt(lastTick);
runner.seek(nextTick);
}
long tTick = System.nanoTime() - tStart;
double tMs = tTick / 1000000.0d;
log.warn("{} seek operations took {}ms, {}ms/seek", N_SEEKS, tMs, tMs / N_SEEKS);
} finally {
runner.halt();
}
}
public static void main(String[] args) throws Exception {
new Main().runSeek(args);
}
}
|
package tigase.muc.modules;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import tigase.component.exceptions.RepositoryException;
import tigase.criteria.Criteria;
import tigase.criteria.ElementCriteria;
import tigase.muc.Affiliation;
import tigase.muc.DateUtil;
import tigase.muc.Role;
import tigase.muc.Room;
import tigase.muc.RoomConfig;
import tigase.muc.exceptions.MUCException;
import tigase.muc.history.HistoryProvider;
import tigase.muc.logger.MucLogger;
import tigase.server.Message;
import tigase.server.Packet;
import tigase.util.TigaseStringprepException;
import tigase.xml.Element;
import tigase.xml.XMLNodeIfc;
import tigase.xmpp.Authorization;
import tigase.xmpp.BareJID;
import tigase.xmpp.JID;
/**
* @author bmalkow
*
*/
public class PresenceModuleImpl extends AbstractMucModule implements PresenceModule {
/**
* Class description
*
*
* @version Enter version here..., 13/02/20
* @author Enter your name here...
*/
public static class DelayDeliveryThread extends Thread {
/**
* Interface description
*
*
* @version Enter version here..., 13/02/20
* @author Enter your name here...
*/
public static interface DelDeliverySend {
/**
* Method description
*
*
* @param packet
*/
void sendDelayedPacket(Packet packet);
}
private final LinkedList<Element[]> items = new LinkedList<Element[]>();
private final DelDeliverySend sender;
/**
* Constructs ...
*
*
* @param component
*/
public DelayDeliveryThread(DelDeliverySend component) {
this.sender = component;
}
/**
* @param elements
*/
public void put(Collection<Element> elements) {
if ((elements != null) && (elements.size() > 0)) {
items.push(elements.toArray(new Element[] {}));
}
}
/**
* Method description
*
*
* @param element
*/
public void put(Element element) {
items.add(new Element[] { element });
}
/**
* Method description
*
*/
@Override
public void run() {
try {
do {
sleep(553);
if (items.size() > 0) {
Element[] toSend = items.poll();
if (toSend != null) {
for (Element element : toSend) {
try {
Packet p = Packet.packetInstance(element);
p.setXMLNS(Packet.CLIENT_XMLNS);
sender.sendDelayedPacket(p);
} catch (TigaseStringprepException ex) {
if (log.isLoggable(Level.INFO)) {
log.info("Packet addressing problem, stringprep failed: " + element);
}
}
}
}
}
} while (true);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
private static final Criteria CRIT = ElementCriteria.name("presence");
/** Field description */
protected static final Logger log = Logger.getLogger(PresenceModule.class.getName());
public static void addCodes(PresenceWrapper wrapper, boolean newRoomCreated, String newNickName) {
if (newRoomCreated) {
wrapper.addStatusCode(201);
}
if (newNickName != null) {
wrapper.addStatusCode(303);
for (Element item : wrapper.items) {
item.setAttribute("nick", newNickName);
}
}
}
private static Role getDefaultRole(final RoomConfig config, final Affiliation affiliation) {
Role newRole;
if (config.isRoomModerated() && (affiliation == Affiliation.none)) {
newRole = Role.visitor;
} else {
switch (affiliation) {
case admin:
newRole = Role.moderator;
break;
case member:
newRole = Role.participant;
break;
case none:
newRole = Role.participant;
break;
case outcast:
newRole = Role.none;
break;
case owner:
newRole = Role.moderator;
break;
default:
newRole = Role.none;
break;
}
}
return newRole;
}
private static Integer toInteger(String v, Integer defaultValue) {
if (v == null) {
return defaultValue;
}
try {
return Integer.parseInt(v);
} catch (Exception e) {
return defaultValue;
}
}
private final Set<Criteria> allowedElements = new HashSet<Criteria>();
/**
* Constructs ...
*
*
* @param config
* @param writer
* @param mucRepository
* @param historyProvider
* @param sender
* @param mucLogger
*/
public PresenceModuleImpl() {
allowedElements.add(ElementCriteria.name("show"));
allowedElements.add(ElementCriteria.name("status"));
allowedElements.add(ElementCriteria.name("priority"));
allowedElements.add(ElementCriteria.xmlns("http://jabber.org/protocol/caps"));
}
/**
* @param room
* @param date
* @param senderJID
* @param nickName
*/
private void addJoinToHistory(Room room, Date date, JID senderJID, String nickName) {
HistoryProvider historyProvider = context.getHistoryProvider();
if (historyProvider != null) {
historyProvider.addJoinEvent(room, date, senderJID, nickName);
}
MucLogger mucLogger = context.getMucLogger();
if ((mucLogger != null) && room.getConfig().isLoggingEnabled()) {
mucLogger.addJoinEvent(room, date, senderJID, nickName);
}
}
/**
* @param room
* @param date
* @param senderJID
* @param nickName
*/
private void addLeaveToHistory(Room room, Date date, JID senderJID, String nickName) {
HistoryProvider historyProvider = context.getHistoryProvider();
if (historyProvider != null) {
historyProvider.addLeaveEvent(room, date, senderJID, nickName);
}
MucLogger mucLogger = context.getMucLogger();
if ((mucLogger != null) && room.getConfig().isLoggingEnabled()) {
mucLogger.addLeaveEvent(room, date, senderJID, nickName);
}
}
/**
* Method description
*
*
* @param element
*
* @return
*/
protected Element clonePresence(Element element) {
Element presence = new Element(element);
if (context.isPresenceFilterEnabled()) {
List<Element> cc = element.getChildren();
if (cc != null) {
@SuppressWarnings("rawtypes")
List<XMLNodeIfc> children = new ArrayList<XMLNodeIfc>();
for (Element c : cc) {
for (Criteria crit : allowedElements) {
if (crit.match(c)) {
children.add(c);
break;
}
}
}
presence.setChildren(children);
}
}
Element toRemove = presence.getChild("x", "http://jabber.org/protocol/muc");
if (toRemove != null) {
presence.removeChild(toRemove);
}
return presence;
}
/**
* @param room
* @param senderJID
* @throws TigaseStringprepException
*/
@Override
public void doQuit(final Room room, final JID senderJID) throws TigaseStringprepException {
final String leavingNickname = room.getOccupantsNickname(senderJID);
final Affiliation leavingAffiliation = room.getAffiliation(leavingNickname);
final Role leavingRole = room.getRole(leavingNickname);
Element presenceElement = new Element("presence");
presenceElement.setAttribute("type", "unavailable");
Collection<JID> occupantJIDs = new ArrayList<JID>(room.getOccupantsJidsByNickname(leavingNickname));
boolean nicknameGone = room.removeOccupant(senderJID);
context.getGhostbuster().remove(senderJID, room);
room.updatePresenceByJid(senderJID, leavingNickname, null, false);
if (context.isMultiItemMode()) {
final PresenceWrapper selfPresence = PresenceWrapper.preparePresenceW(room, senderJID, presenceElement,
senderJID.getBareJID(), occupantJIDs, leavingNickname, leavingAffiliation, leavingRole);
write(selfPresence.packet);
} else {
Collection<JID> z = new ArrayList<JID>(1);
z.add(senderJID);
final PresenceWrapper selfPresence = PresenceWrapper.preparePresenceW(room, senderJID, presenceElement,
senderJID.getBareJID(), z, leavingNickname, leavingAffiliation, leavingRole);
write(selfPresence.packet);
}
// TODO if highest priority is gone, then send current highest priority
// to occupants
if (nicknameGone) {
for (String occupantNickname : room.getOccupantsNicknames()) {
for (JID occupantJid : room.getOccupantsJidsByNickname(occupantNickname)) {
presenceElement = new Element("presence");
presenceElement.setAttribute("type", "unavailable");
PresenceWrapper presence = PresenceWrapper.preparePresenceW(room, occupantJid, presenceElement,
senderJID.getBareJID(), occupantJIDs, leavingNickname, leavingAffiliation, leavingRole);
write(presence.packet);
}
}
if (room.getConfig().isLoggingEnabled()) {
addLeaveToHistory(room, new Date(), senderJID, leavingNickname);
}
} else {
occupantJIDs = new ArrayList<JID>(room.getOccupantsJidsByNickname(leavingNickname));
Element pe = room.getLastPresenceCopyByJid(senderJID.getBareJID());
for (String occupantNickname : room.getOccupantsNicknames()) {
for (JID occupantJid : room.getOccupantsJidsByNickname(occupantNickname)) {
if (context.isMultiItemMode()) {
PresenceWrapper presence = PresenceWrapper.preparePresenceW(room, occupantJid, pe.clone(),
senderJID.getBareJID(), occupantJIDs, leavingNickname, leavingAffiliation, leavingRole);
write(presence.packet);
} else {
for (JID jid : occupantJIDs) {
Collection<JID> z = new ArrayList<JID>(1);
z.add(jid);
PresenceWrapper presence = PresenceWrapper.preparePresenceW(room, occupantJid, pe.clone(),
senderJID.getBareJID(), z, leavingNickname, leavingAffiliation, leavingRole);
write(presence.packet);
}
}
}
}
}
if (room.getOccupantsCount() == 0) {
HistoryProvider historyProvider = context.getHistoryProvider();
if ((historyProvider != null) && !room.getConfig().isPersistentRoom()) {
historyProvider.removeHistory(room);
}
context.getMucRepository().leaveRoom(room);
}
}
/**
* Method description
*
*
* @return
*/
@Override
public String[] getFeatures() {
return null;
}
/**
* Method description
*
*
* @return
*/
@Override
public Criteria getModuleCriteria() {
return CRIT;
}
protected PresenceWrapper preparePresence(JID destinationJID, final Element presence, Room room, JID occupantJID,
boolean newRoomCreated, String newNickName) throws TigaseStringprepException {
final PresenceWrapper wrapper = PresenceWrapper.preparePresenceW(room, destinationJID, presence, occupantJID);
addCodes(wrapper, newRoomCreated, newNickName);
return wrapper;
}
/**
* Method description
*
*
* @param element
*
* @throws MUCException
* @throws TigaseStringprepException
*/
@Override
public void process(Packet element) throws MUCException, TigaseStringprepException {
final JID senderJID = JID.jidInstance(element.getAttributeStaticStr(Packet.FROM_ATT));
final BareJID roomJID = BareJID.bareJIDInstance(element.getAttributeStaticStr(Packet.TO_ATT));
final String nickName = getNicknameFromJid(JID.jidInstance(element.getAttributeStaticStr(Packet.TO_ATT)));
final String presenceType = element.getAttributeStaticStr(Packet.TYPE_ATT);
// final String id = element.getAttribute("id");
if ((presenceType != null) && "error".equals(presenceType)) {
if (log.isLoggable(Level.FINER)) {
log.finer("Ignoring presence with type='" + presenceType + "' from " + senderJID);
}
return;
}
if (nickName == null) {
throw new MUCException(Authorization.JID_MALFORMED);
}
try {
Room room = context.getMucRepository().getRoom(roomJID);
if ((presenceType != null) && "unavailable".equals(presenceType)) {
processExit(room, element.getElement(), senderJID);
return;
}
final String knownNickname;
final boolean roomCreated;
if (room == null) {
if (log.isLoggable(Level.INFO)) {
log.info("Creating new room '" + roomJID + "' by user " + nickName + "' <" + senderJID.toString() + ">");
}
room = context.getMucRepository().createNewRoom(roomJID, senderJID);
room.addAffiliationByJid(senderJID.getBareJID(), Affiliation.owner);
room.setRoomLocked(context.isNewRoomLocked());
roomCreated = true;
knownNickname = null;
room.getConfig().notifyConfigUpdate();
} else {
roomCreated = false;
knownNickname = room.getOccupantsNickname(senderJID);
}
final boolean probablyReEnter = element.getElement().getChild("x", "http://jabber.org/protocol/muc") != null;
if ((knownNickname != null) && !knownNickname.equals(nickName)) {
processChangeNickname(room, element.getElement(), senderJID, knownNickname, nickName);
} else if (probablyReEnter || (knownNickname == null)) {
processEntering(room, roomCreated, element.getElement(), senderJID, nickName);
} else if (knownNickname.equals(nickName)) {
processChangeAvailabilityStatus(room, element.getElement(), senderJID, knownNickname);
}
} catch (MUCException e) {
throw e;
} catch (TigaseStringprepException e) {
throw e;
} catch (RepositoryException e) {
throw new RuntimeException(e);
}
}
/**
* Method description
*
*
* @param room
* @param presenceElement
* @param senderJID
* @param nickname
*
* @throws TigaseStringprepException
*/
protected void processChangeAvailabilityStatus(final Room room, final Element presenceElement, final JID senderJID,
final String nickname) throws TigaseStringprepException {
if (log.isLoggable(Level.FINEST)) {
log.finest("Processing stanza " + presenceElement.toString());
}
room.updatePresenceByJid(null, nickname, clonePresence(presenceElement), false);
Element pe = room.getLastPresenceCopyByJid(senderJID.getBareJID());
sendPresenceToAllOccupants(pe, room, senderJID, false, null);
}
/**
* Method description
*
*
* @param room
* @param element
* @param senderJID
* @param senderNickname
* @param newNickName
*
* @throws MUCException
* @throws TigaseStringprepException
*/
protected void processChangeNickname(final Room room, final Element element, final JID senderJID,
final String senderNickname, final String newNickName) throws TigaseStringprepException, MUCException {
if (log.isLoggable(Level.FINEST)) {
log.finest("Processing stanza " + element.toString());
}
throw new MUCException(Authorization.FEATURE_NOT_IMPLEMENTED, "Will me done soon");
// TODO Example 23. Service Denies Room Join Because Roomnicks Are
// Locked Down (???)
}
/**
* Method description
*
*
* @param room
* @param roomCreated
* @param element
* @param senderJID
* @param nickname
*
* @throws MUCException
* @throws TigaseStringprepException
*/
protected void processEntering(final Room room, final boolean roomCreated, final Element element, final JID senderJID,
final String nickname) throws MUCException, TigaseStringprepException {
if (log.isLoggable(Level.FINEST)) {
log.finest("Processing stanza " + element.toString());
}
final Affiliation affiliation = room.getAffiliation(senderJID.getBareJID());
final Element xElement = element.getChild("x", "http://jabber.org/protocol/muc");
final Element password = (xElement == null) ? null : xElement.getChild("password");
if (room.getConfig().isPasswordProtectedRoom()) {
final String psw = (password == null) ? null : password.getCData();
final String roomPassword = room.getConfig().getPassword();
if ((psw == null) || !psw.equals(roomPassword)) {
// Service Denies Access Because No Password Provided
if (log.isLoggable(Level.FINEST)) {
log.finest("Password '" + psw + "' is not match to room password '" + roomPassword + "' ");
}
throw new MUCException(Authorization.NOT_AUTHORIZED);
}
}
if (room.isRoomLocked() && (affiliation != Affiliation.owner)) {
// Service Denies Access Because Room Does Not (Yet) Exist
throw new MUCException(Authorization.ITEM_NOT_FOUND, null, "Room exists but is locked");
}
if (!affiliation.isEnterOpenRoom()) {
// Service Denies Access Because User is Banned
if (log.isLoggable(Level.INFO)) {
log.info("User " + nickname + "' <" + senderJID.toString() + "> is on rooms '" + room.getRoomJID()
+ "' blacklist");
}
throw new MUCException(Authorization.FORBIDDEN);
} else if (room.getConfig().isRoomMembersOnly() && !affiliation.isEnterMembersOnlyRoom()) {
// Service Denies Access Because User Is Not on Member List
if (log.isLoggable(Level.INFO)) {
log.info("User " + nickname + "' <" + senderJID.toString() + "> is NOT on rooms '" + room.getRoomJID()
+ "' member list.");
}
throw new MUCException(Authorization.REGISTRATION_REQUIRED);
}
final BareJID currentOccupantJid = room.getOccupantsJidByNickname(nickname);
if ((currentOccupantJid != null) && !currentOccupantJid.equals(senderJID.getBareJID())) {
// Service Denies Access Because of Nick Conflict
throw new MUCException(Authorization.CONFLICT);
}
// TODO Service Informs User that Room Occupant Limit Has Been Reached
// Service Sends Presence from Existing Occupants to New Occupant
sendPresencesToNewOccupant(room, senderJID);
final Role newRole = getDefaultRole(room.getConfig(), affiliation);
if (log.isLoggable(Level.FINEST)) {
log.finest("Occupant '" + nickname + "' <" + senderJID.toString() + "> is entering room " + room.getRoomJID()
+ " as role=" + newRole.name() + ", affiliation=" + affiliation.name());
}
room.addOccupantByJid(senderJID, nickname, newRole);
context.getGhostbuster().add(senderJID, room);
Element pe = clonePresence(element);
room.updatePresenceByJid(null, nickname, pe, true);
// if (currentOccupantJid == null) {
// Service Sends New Occupant's Presence to All Occupants
// Service Sends New Occupant's Presence to New Occupant
sendPresenceToAllOccupants(room, senderJID, roomCreated, null);
Integer maxchars = null;
Integer maxstanzas = null;
Integer seconds = null;
Date since = null;
Element hist = (xElement == null) ? null : xElement.getChild("history");
if (hist != null) {
maxchars = toInteger(hist.getAttributeStaticStr("maxchars"), null);
maxstanzas = toInteger(hist.getAttributeStaticStr("maxstanzas"), null);
seconds = toInteger(hist.getAttributeStaticStr("seconds"), null);
since = DateUtil.parse(hist.getAttributeStaticStr("since"));
}
sendHistoryToUser(room, senderJID, maxchars, maxstanzas, seconds, since);
if ((room.getSubject() != null) && (room.getSubjectChangerNick() != null) && (room.getSubjectChangeDate() != null)) {
Element message = new Element(Message.ELEM_NAME, new String[] { Packet.TYPE_ATT, Packet.FROM_ATT, Packet.TO_ATT },
new String[] { "groupchat", room.getRoomJID() + "/" + room.getSubjectChangerNick(), senderJID.toString() });
message.addChild(new Element("subject", room.getSubject()));
String stamp = DateUtil.formatDatetime(room.getSubjectChangeDate());
Element delay = new Element("delay", new String[] { "xmlns", "stamp" }, new String[] { "urn:xmpp:delay", stamp });
delay.setAttribute("jid", room.getRoomJID() + "/" + room.getSubjectChangerNick());
Element x = new Element("x", new String[] { "xmlns", "stamp" }, new String[] { "jabber:x:delay",
DateUtil.formatOld(room.getSubjectChangeDate()) });
message.addChild(delay);
message.addChild(x);
Packet p = Packet.packetInstance(message);
p.setXMLNS(Packet.CLIENT_XMLNS);
write(p);
}
if (room.isRoomLocked()) {
sendMucMessage(room, room.getOccupantsNickname(senderJID), "Room is locked. Please configure.");
}
if (roomCreated) {
StringBuilder sb = new StringBuilder();
sb.append("Welcome! You created new Multi User Chat Room.");
if (room.isRoomLocked()) {
sb.append(" Room is locked now. Configure it please!");
} else if (context.isNewRoomLocked()) {
sb.append(" Room is unlocked and ready for occupants!");
}
sendMucMessage(room, room.getOccupantsNickname(senderJID), sb.toString());
}
if (room.getConfig().isLoggingEnabled()) {
addJoinToHistory(room, new Date(), senderJID, nickname);
}
}
/**
* Method description
*
*
* @param room
* @param presenceElement
* @param senderJID
*
* @throws MUCException
* @throws TigaseStringprepException
*/
protected void processExit(final Room room, final Element presenceElement, final JID senderJID) throws MUCException,
TigaseStringprepException {
if (log.isLoggable(Level.FINEST)) {
log.finest("Processing stanza " + presenceElement.toString());
}
if (room == null) {
throw new MUCException(Authorization.ITEM_NOT_FOUND, "Unkown room");
}
final String leavingNickname = room.getOccupantsNickname(senderJID);
if (leavingNickname == null) {
// do it quietly
// throw new MUCException(Authorization.ITEM_NOT_FOUND,
// "Unkown occupant");
return;
}
doQuit(room, senderJID);
}
/**
* @param room
* @param senderJID
* @param maxchars
* @param maxstanzas
* @param seconds
* @param since
*/
private void sendHistoryToUser(final Room room, final JID senderJID, final Integer maxchars, final Integer maxstanzas,
final Integer seconds, final Date since) {
HistoryProvider historyProvider = context.getHistoryProvider();
if (historyProvider != null) {
historyProvider.getHistoryMessages(room, senderJID, maxchars, maxstanzas, seconds, since, context.getWriter());
}
}
@Override
public void sendPresencesToNewOccupant(Room room, JID senderJID) throws TigaseStringprepException {
BareJID currentOccupantJid = senderJID.getBareJID();
for (String occupantNickname : room.getOccupantsNicknames()) {
final BareJID occupantJid = room.getOccupantsJidByNickname(occupantNickname);
if (currentOccupantJid != null && currentOccupantJid.equals(occupantJid)) {
continue;
}
Element op = room.getLastPresenceCopyByJid(occupantJid);
final Collection<JID> occupantJIDs = room.getOccupantsJidsByNickname(occupantNickname);
final BareJID occupantBareJID = room.getOccupantsJidByNickname(occupantNickname);
final Affiliation occupantAffiliation = room.getAffiliation(occupantBareJID);
final Role occupantRole = room.getRole(occupantNickname);
if (context.isMultiItemMode()) {
PresenceWrapper l = PresenceWrapper.preparePresenceW(room, senderJID, op.clone(), occupantBareJID,
occupantJIDs, occupantNickname, occupantAffiliation, occupantRole);
write(l.packet);
} else {
for (JID jid : occupantJIDs) {
Collection<JID> z = new ArrayList<JID>(1);
z.add(jid);
PresenceWrapper l = PresenceWrapper.preparePresenceW(room, senderJID, op.clone(), occupantBareJID, z,
occupantNickname, occupantAffiliation, occupantRole);
write(l.packet);
}
}
}
}
protected void sendPresenceToAllOccupants(final Element $presence, Room room, JID senderJID, boolean newRoomCreated,
String newNickName) throws TigaseStringprepException {
final String occupantNickname = room.getOccupantsNickname(senderJID);
final BareJID occupantJID = room.getOccupantsJidByNickname(occupantNickname);
final Affiliation occupantAffiliation = room.getAffiliation(occupantJID);
final Role occupantRole = room.getRole(occupantNickname);
for (String destinationNickname : room.getOccupantsNicknames()) {
for (JID destinationJID : room.getOccupantsJidsByNickname(destinationNickname)) {
if (context.isMultiItemMode()) {
PresenceWrapper presence = preparePresence(destinationJID, $presence.clone(), room, senderJID,
newRoomCreated, newNickName);
write(presence.packet);
} else {
for (JID jid : room.getOccupantsJidsByNickname(occupantNickname)) {
Collection<JID> z = new ArrayList<JID>(1);
z.add(jid);
PresenceWrapper l = PresenceWrapper.preparePresenceW(room, destinationJID, $presence.clone(),
occupantJID, z, occupantNickname, occupantAffiliation, occupantRole);
addCodes(l, newRoomCreated, newNickName);
// l.packet.getElement().setAttribute("id", "sta");
write(l.packet);
}
}
}
}
}
protected void sendPresenceToAllOccupants(Room room, JID senderJID, boolean newRoomCreated, String newNickName)
throws TigaseStringprepException {
Element presence;
if (newNickName != null) {
presence = new Element("presence");
presence.setAttribute("type", "unavailable");
} else if (room.getOccupantsNickname(senderJID) == null) {
presence = new Element("presence");
presence.setAttribute("type", "unavailable");
} else {
presence = room.getLastPresenceCopyByJid(senderJID.getBareJID());
}
sendPresenceToAllOccupants(presence, room, senderJID, newRoomCreated, newNickName);
}
}
|
package network.packets.swg.zone;
import network.packets.swg.SWGPacket;
import java.nio.ByteBuffer;
import java.util.EnumSet;
import resources.PvpFlag;
public class UpdatePvpStatusMessage extends SWGPacket {
public static final int CRC = getCrc("UpdatePvpStatusMessage");
private PvpFlag flag = PvpFlag.PLAYER;
private int playerFaction = 0;
private long objId = 0;
public UpdatePvpStatusMessage() {
}
public UpdatePvpStatusMessage(PvpFlag playerType, int flag, long objId) {
this.flag = playerType;
this.playerFaction = flag;
this.objId = objId;
}
public void decode(ByteBuffer data) {
if (!super.decode(data, CRC))
return;
EnumSet<PvpFlag> flags = PvpFlag.getFlags(getInt(data));
PvpFlag tempFlag = flags.iterator().next();
if (tempFlag!= null)
{
flag = flags.iterator().next();
}
else
{
flag = PvpFlag.PLAYER;
}
playerFaction = getInt(data);
objId = getLong(data);
}
public ByteBuffer encode() {
int length = 22;
ByteBuffer data = ByteBuffer.allocate(length);
addShort(data, 4);
addInt( data, CRC);
addInt( data, flag.getBitmask());
addInt( data, playerFaction);
addLong( data, objId);
return data;
}
public long getObjectId() { return objId; }
public int getPlayerFaction() { return playerFaction; }
public PvpFlag getPlayerType() { return flag; }
}
|
package org.concord.framework.otrunk.view;
import org.concord.framework.otrunk.OTObject;
import org.concord.framework.otrunk.OTObjectList;
public interface OTViewFactory {
public String getDefaultViewMode();
public void setDefaultViewMode(String mode);
public static String NO_VIEW_MODE = "_no_view_mode";
public OTView getView(OTObject otObject, Class viewInterface);
/**
* @see #getView(OTObject, OTViewEntry, String)
* @param otObject
* @param viewInterface
* @param mode
* @return
*/
public OTView getView(OTObject otObject, Class viewInterface, String mode);
/**
* This method is the same as calling getView(otObject, viewEntry, null)
* so the current mode of the viewFactory is used.
*
* @param otObject
* @param viewEntry
* @return
*/
public OTView getView(OTObject otObject, OTViewEntry viewEntry);
/**
* If the mode is null then the current mode of this viewFactory is used.
* If the mode of this viewFactory is null then the mode of the parent view
* factory is used.
* If the mode is NO_VIEW_MODE then no mode will be used and the
* viewEntry will be used directly.
*
* If the looked up view is not null and is not DEFAULT_VIEW_MODE
* then an OTViewMode is looked up with that
* name. And a mapping between this viewEntry and another viewEntry is
* searched for. If no mapping exists then, a default viewEntry is used.
* Initially there will only be one default per map but eventually
* information from the otObject and the viewEntry could be used to
* determine an appropriate default.
*
* @param otObject
* @param viewEntry
* @param mode
* @return
*/
public OTView getView(OTObject otObject, OTViewEntry viewEntry, String mode);
/**
* If all the use cases have been properly taken care of this shouldn't be
* needed. The viewContext should only be available to views which
* have been instanciated by a view factory. However there are still
* some cases where views are created manually.
*
* @return
*/
public OTViewContext getViewContext();
/**
* This will allow other viewBundles to add their view entries to existing
* set of view entries. If the view entries are added to the top of the list,
* they will effectively override existing view entries for the same class.
*
* @param entry the view entry to be added
* @param addToTop whether the entry should be added to the top of the list
*/
public void addViewEntry(OTViewEntry entry, boolean addToTop);
/**
* This returns a list of view modes offered by the view bundle.
*
* @return OTObjectList containing list of OTViewModes
*/
public String [] getModeNames();
}
|
package org.opencms.ui.apps.sitemanager;
import org.opencms.ade.configuration.CmsADEManager;
import org.opencms.file.CmsObject;
import org.opencms.file.CmsProject;
import org.opencms.file.CmsProperty;
import org.opencms.file.CmsPropertyDefinition;
import org.opencms.file.CmsResource;
import org.opencms.file.CmsResourceFilter;
import org.opencms.file.types.CmsResourceTypeFolderSubSitemap;
import org.opencms.file.types.CmsResourceTypeJsp;
import org.opencms.file.types.I_CmsResourceType;
import org.opencms.i18n.CmsLocaleManager;
import org.opencms.main.CmsException;
import org.opencms.main.CmsIllegalArgumentException;
import org.opencms.main.CmsLog;
import org.opencms.main.OpenCms;
import org.opencms.relations.CmsRelation;
import org.opencms.relations.CmsRelationFilter;
import org.opencms.security.CmsOrganizationalUnit;
import org.opencms.site.CmsSSLMode;
import org.opencms.site.CmsSite;
import org.opencms.site.CmsSiteMatcher;
import org.opencms.ui.A_CmsUI;
import org.opencms.ui.CmsVaadinUtils;
import org.opencms.ui.apps.Messages;
import org.opencms.ui.components.CmsBasicDialog;
import org.opencms.ui.components.CmsRemovableFormRow;
import org.opencms.ui.components.CmsResourceInfo;
import org.opencms.ui.components.editablegroup.CmsEditableGroup;
import org.opencms.ui.components.editablegroup.I_CmsEditableGroupRow;
import org.opencms.ui.components.fileselect.CmsPathSelectField;
import org.opencms.ui.report.CmsReportWidget;
import org.opencms.util.CmsFileUtil;
import org.opencms.util.CmsMacroResolver;
import org.opencms.util.CmsStringUtil;
import org.opencms.util.CmsUUID;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.commons.logging.Log;
import com.google.common.base.Supplier;
import com.vaadin.event.FieldEvents.BlurEvent;
import com.vaadin.event.FieldEvents.BlurListener;
import com.vaadin.server.StreamResource;
import com.vaadin.server.UserError;
import com.vaadin.ui.Button;
import com.vaadin.ui.Button.ClickEvent;
import com.vaadin.ui.Button.ClickListener;
import com.vaadin.ui.Component;
import com.vaadin.ui.FormLayout;
import com.vaadin.ui.Image;
import com.vaadin.ui.Panel;
import com.vaadin.ui.TabSheet;
import com.vaadin.ui.UI;
import com.vaadin.v7.data.Item;
import com.vaadin.v7.data.Property.ValueChangeEvent;
import com.vaadin.v7.data.Property.ValueChangeListener;
import com.vaadin.v7.data.Validator;
import com.vaadin.v7.data.util.BeanItemContainer;
import com.vaadin.v7.data.util.IndexedContainer;
import com.vaadin.v7.shared.ui.combobox.FilteringMode;
import com.vaadin.v7.ui.AbstractField;
import com.vaadin.v7.ui.CheckBox;
import com.vaadin.v7.ui.ComboBox;
import com.vaadin.v7.ui.TextField;
import com.vaadin.v7.ui.Upload;
import com.vaadin.v7.ui.Upload.Receiver;
import com.vaadin.v7.ui.Upload.SucceededEvent;
import com.vaadin.v7.ui.Upload.SucceededListener;
import com.vaadin.v7.ui.VerticalLayout;
/**
* Class for the Form to edit or add a site.<p>
*/
public class CmsEditSiteForm extends CmsBasicDialog {
/**
* Bean for the ComboBox to edit the position.<p>
*/
public class PositionComboBoxElementBean {
/**Position of site in List. */
private float m_position;
/**Title of site to show. */
private String m_title;
/**
* Constructor. <p>
*
* @param title of site
* @param position of site
*/
public PositionComboBoxElementBean(String title, float position) {
m_position = position;
m_title = title;
}
/**
* Getter for position.<p>
*
* @return float position
*/
public float getPosition() {
return m_position;
}
/**
* Getter for title.<p>
*
* @return String title
*/
public String getTitle() {
return m_title;
}
}
/**
*Validator for server field.<p>
*/
class AliasValidator implements Validator {
/**vaadin serial id.*/
private static final long serialVersionUID = 9014118214418269697L;
/**
* @see com.vaadin.v7.data.Validator#validate(java.lang.Object)
*/
public void validate(Object value) throws InvalidValueException {
String enteredServer = (String)value;
if (enteredServer == null) {
return;
}
if (enteredServer.isEmpty()) {
return;
}
if (m_alreadyUsedURL.contains(new CmsSiteMatcher(enteredServer))) {
if (!OpenCms.getSiteManager().getSites().get(new CmsSiteMatcher(enteredServer)).equals(m_site)) {
throw new InvalidValueException(
CmsVaadinUtils.getMessageText(Messages.GUI_SITE_SERVER_ALREADYUSED_1, enteredServer));
}
}
if ((new CmsSiteMatcher(enteredServer)).equals(new CmsSiteMatcher(getFieldServer()))) {
throw new InvalidValueException(CmsVaadinUtils.getMessageText(Messages.GUI_SITE_SERVER_EQUAL_ALIAS_0));
}
if (isDoubleAlias(enteredServer)) {
throw new InvalidValueException(
CmsVaadinUtils.getMessageText(Messages.GUI_SITE_SERVER_ALREADYUSED_1, enteredServer));
}
}
}
/**
* Receiver class for upload of favicon.<p>
*/
class FavIconReceiver implements Receiver, SucceededListener {
/**vaadin serial id. */
private static final long serialVersionUID = 688021741970679734L;
/**
* @see com.vaadin.ui.Upload.Receiver#receiveUpload(java.lang.String, java.lang.String)
*/
public OutputStream receiveUpload(String filename, String mimeType) {
m_os.reset();
if (!mimeType.startsWith("image")) {
return new ByteArrayOutputStream(0);
}
return m_os;
}
/**
* @see com.vaadin.ui.Upload.SucceededListener#uploadSucceeded(com.vaadin.ui.Upload.SucceededEvent)
*/
public void uploadSucceeded(SucceededEvent event) {
if (m_os.size() <= 1) {
m_imageCounter = 0;
m_fieldUploadFavIcon.setComponentError(
new UserError(CmsVaadinUtils.getMessageText(Messages.GUI_SITE_FAVICON_MIME_0)));
setFaviconIfExist();
return;
}
if (m_os.size() > 4096) {
m_fieldUploadFavIcon.setComponentError(
new UserError(CmsVaadinUtils.getMessageText(Messages.GUI_SITE_FAVICON_SIZE_0)));
m_imageCounter = 0;
setFaviconIfExist();
return;
}
m_imageCounter++;
setCurrentFavIcon(m_os.toByteArray());
}
}
/**
*Validator for Folder Name field.<p>
*/
class FolderPathValidator implements Validator {
/**vaadin serial id.*/
private static final long serialVersionUID = 2269520781911597613L;
/**
* @see com.vaadin.data.Validator#validate(java.lang.Object)
*/
public void validate(Object value) throws InvalidValueException {
String enteredName = (String)value;
if (FORBIDDEN_FOLDER_NAMES.contains(enteredName)) {
throw new InvalidValueException(
CmsVaadinUtils.getMessageText(Messages.GUI_SITE_FOLDERNAME_FORBIDDEN_1, enteredName));
}
// if (m_alreadyUsedFolderPath.contains(getParentFolder() + enteredName)) {
if (OpenCms.getSiteManager().getSiteForRootPath(getParentFolder() + enteredName) != null) {
throw new InvalidValueException(
CmsVaadinUtils.getMessageText(Messages.GUI_SITE_FOLDERNAME_ALREADYUSED_1, enteredName));
}
try {
CmsResource.checkResourceName(enteredName);
} catch (CmsIllegalArgumentException e) {
throw new InvalidValueException(CmsVaadinUtils.getMessageText(Messages.GUI_SITE_FOLDERNAME_EMPTY_0));
}
}
}
/**
* Validator for the parent field.<p>
*/
class ParentFolderValidator implements Validator {
/**vaadin serial id.*/
private static final long serialVersionUID = 5217828150841769662L;
/**
* @see com.vaadin.data.Validator#validate(java.lang.Object)
*/
public void validate(Object value) throws InvalidValueException {
try {
m_clonedCms.getRequestContext().setSiteRoot("");
m_clonedCms.readResource(getParentFolder());
} catch (CmsException e) {
throw new InvalidValueException(
CmsVaadinUtils.getMessageText(Messages.GUI_SITE_PARENTFOLDER_NOT_EXIST_0));
}
if (OpenCms.getSiteManager().getSiteForRootPath(
CmsFileUtil.removeTrailingSeparator(getParentFolder())) != null) {
throw new InvalidValueException(
CmsVaadinUtils.getMessageText(
Messages.GUI_SITE_FOLDERNAME_ALREADYUSED_1,
CmsFileUtil.removeTrailingSeparator(getParentFolder())));
}
if (!(getParentFolder()).startsWith(CmsSiteManager.PATH_SITES)) {
throw new InvalidValueException(
CmsVaadinUtils.getMessageText(Messages.GUI_SITE_FOLDERNAME_WRONGPARENT_0));
}
if (!getSiteTemplatePath().isEmpty()) {
if (ensureFoldername(getParentFolder()).equals(ensureFoldername(getSiteTemplatePath()))) {
throw new InvalidValueException(
CmsVaadinUtils.getMessageText(Messages.GUI_SITE_FOLDERNAME_EQUAL_SITETEMPLATE_0));
}
}
}
}
/**
* Validator for parent OU.<p>
*/
class SelectOUValidator implements Validator {
/**vaadin serial id.*/
private static final long serialVersionUID = -911831798529729185L;
/**
* @see com.vaadin.data.Validator#validate(java.lang.Object)
*/
public void validate(Object value) throws InvalidValueException {
String OU = (String)value;
if (OU.equals("/")) {
return;
}
if (OU.split("/").length < 2) {
return; //ou is under root
}
OU = OU.split("/")[0] + "/";
if (getParentFolder().isEmpty() | getFieldFolder().isEmpty()) {
return; //not ok, but gets catched in an other validator
}
String rootPath = "/" + ensureFoldername(getParentFolder()) + ensureFoldername(getFieldFolder());
boolean ok = false;
try {
List<CmsResource> res = OpenCms.getOrgUnitManager().getResourcesForOrganizationalUnit(m_clonedCms, OU);
for (CmsResource resource : res) {
if (rootPath.startsWith(resource.getRootPath())) {
ok = true;
}
}
} catch (CmsException e) {
throw new InvalidValueException(CmsVaadinUtils.getMessageText(Messages.GUI_SITE_OU_INVALID_0));
}
if (!ok) {
throw new InvalidValueException(CmsVaadinUtils.getMessageText(Messages.GUI_SITE_OU_INVALID_0));
}
}
}
/**
* Validator for parent OU.<p>
*/
class SelectParentOUValidator implements Validator {
/**vaadin serial id.*/
private static final long serialVersionUID = -911831798529729185L;
/**
* @see com.vaadin.v7.data.Validator#validate(java.lang.Object)
*/
public void validate(Object value) throws InvalidValueException {
String parentOU = (String)value;
if (parentOU.equals("/")) {
return;
}
if (getParentFolder().isEmpty() | getFieldFolder().isEmpty()) {
return; //not ok, but gets catched in an other validator
}
String rootPath = "/" + ensureFoldername(getParentFolder()) + ensureFoldername(getFieldFolder());
boolean ok = false;
try {
List<CmsResource> res = OpenCms.getOrgUnitManager().getResourcesForOrganizationalUnit(
m_clonedCms,
parentOU);
for (CmsResource resource : res) {
if (rootPath.startsWith(resource.getRootPath())) {
ok = true;
}
}
} catch (CmsException e) {
throw new InvalidValueException(CmsVaadinUtils.getMessageText(Messages.GUI_SITE_PARENTOU_INVALID_0));
}
if (!ok) {
throw new InvalidValueException(CmsVaadinUtils.getMessageText(Messages.GUI_SITE_PARENTOU_INVALID_0));
}
}
}
/**
*Validator for server field.<p>
*/
class ServerValidator implements Validator {
/**vaadin serial id.*/
private static final long serialVersionUID = 9014118214418269697L;
/**
* @see com.vaadin.data.Validator#validate(java.lang.Object)
*/
public void validate(Object value) throws InvalidValueException {
String enteredServer = (String)value;
if (enteredServer.isEmpty()) {
throw new InvalidValueException(CmsVaadinUtils.getMessageText(Messages.GUI_SITE_SERVER_EMPTY_0));
}
if (m_alreadyUsedURL.contains(new CmsSiteMatcher(enteredServer))) {
throw new InvalidValueException(
CmsVaadinUtils.getMessageText(Messages.GUI_SITE_SERVER_ALREADYUSED_1, enteredServer));
}
}
}
/**
* Validator for site root (in case of editing a site, fails for broken sites.<p>
*/
class SiteRootValidator implements Validator {
/**vaadin serial id.*/
private static final long serialVersionUID = 7499390905843603642L;
/**
* @see com.vaadin.v7.data.Validator#validate(java.lang.Object)
*/
@Deprecated
public void validate(Object value) throws InvalidValueException {
CmsSite parentSite = m_manager.getElement(CmsFileUtil.removeTrailingSeparator((String)value));
if (parentSite != null) {
if (!parentSite.equals(m_site)) {
throw new InvalidValueException(
CmsVaadinUtils.getMessageText(
Messages.GUI_SITE_FOLDERNAME_ALREADYUSED_1,
CmsFileUtil.removeTrailingSeparator((String)value)));
}
}
CmsProject currentProject = m_clonedCms.getRequestContext().getCurrentProject();
try {
m_clonedCms.getRequestContext().setCurrentProject(
m_clonedCms.readProject(CmsProject.ONLINE_PROJECT_ID));
m_clonedCms.readResource((String)value);
} catch (CmsException e) {
m_clonedCms.getRequestContext().setCurrentProject(currentProject);
if (!m_clonedCms.existsResource((String)value)) {
throw new InvalidValueException(CmsVaadinUtils.getMessageText(Messages.GUI_SITE_SITEROOT_WRONG_0));
}
}
m_clonedCms.getRequestContext().setCurrentProject(currentProject);
}
}
/**
* Validator for Site Template selection field.<p>
*/
class SiteTemplateValidator implements Validator {
/**vaadin serial id.*/
private static final long serialVersionUID = -8730991818750657154L;
/**
* @see com.vaadin.data.Validator#validate(java.lang.Object)
*/
public void validate(Object value) throws InvalidValueException {
String pathToCheck = (String)value;
if (pathToCheck == null) {
return;
}
if (pathToCheck.isEmpty()) { //Empty -> no template chosen, ok
return;
}
if (!getParentFolder().isEmpty() & !getFieldFolder().isEmpty()) {
String rootPath = "/" + ensureFoldername(getParentFolder()) + ensureFoldername(getFieldFolder());
if (m_clonedCms.existsResource(rootPath)) {
throw new InvalidValueException(
CmsVaadinUtils.getMessageText(Messages.GUI_SITE_SITETEMPLATE_OVERWRITE_0));
}
}
try {
m_clonedCms.readResource(pathToCheck + CmsADEManager.CONTENT_FOLDER_NAME);
} catch (CmsException e) {
throw new InvalidValueException(
CmsVaadinUtils.getMessageText(Messages.GUI_SITE_SITETEMPLATE_INVALID_0));
}
}
}
/**
* Validator for the title field.<p>
*/
class TitleValidator implements Validator {
/**vaadin serial id.*/
private static final long serialVersionUID = 7878441125879949490L;
/**
* @see com.vaadin.data.Validator#validate(java.lang.Object)
*/
public void validate(Object value) throws InvalidValueException {
if (CmsStringUtil.isEmptyOrWhitespaceOnly((String)value)) {
throw new InvalidValueException(CmsVaadinUtils.getMessageText(Messages.GUI_SITE_TITLE_EMPTY_0));
}
}
}
/** The module name constant. */
public static final String MODULE_NAME = "org.opencms.ui.apps.sitemanager";
/** Module parameter constant for the web server script. */
public static final String PARAM_OU_DESCRIPTION = "oudescription";
/**List of all forbidden folder names as new site-roots.*/
static final List<String> FORBIDDEN_FOLDER_NAMES = new ArrayList<String>() {
private static final long serialVersionUID = 8074588073232610426L;
{
add("system");
add(OpenCms.getSiteManager().getSharedFolder().replaceAll("/", ""));
}
};
/** The logger for this class. */
static Log LOG = CmsLog.getLog(CmsEditSiteForm.class.getName());
/**vaadin serial id.*/
private static final long serialVersionUID = -1011525709082939562L;
/**Flag to block change events. */
protected boolean m_blockChange;
/**List of all folder names already used for sites. */
List<String> m_alreadyUsedFolderPath = new ArrayList<String>();
/**List of all urls already used for sites.*/
Set<CmsSiteMatcher> m_alreadyUsedURL = new HashSet<CmsSiteMatcher>();
/**cloned cms obejct.*/
CmsObject m_clonedCms;
/**vaadin component.*/
ComboBox m_fieldSelectOU;
/**vaadin coponent.*/
ComboBox m_fieldSelectParentOU;
/**vaadin component. */
Upload m_fieldUploadFavIcon;
/**Needed to check if favicon was changed. */
int m_imageCounter;
/** The site manager instance.*/
CmsSiteManager m_manager;
/**OutputStream to store the uploaded favicon temporarily. */
ByteArrayOutputStream m_os = new ByteArrayOutputStream(5500);
/**current site which is supposed to be edited, null if site should be added.*/
CmsSite m_site;
/**vaadin component.*/
TabSheet m_tab;
/**button to add parameter.*/
private Button m_addParameter;
/**vaadin component.*/
private VerticalLayout m_aliases;
/**Edit group for workplace servers.*/
private CmsEditableGroup m_aliasGroup;
/**automatic setted folder name.*/
private String m_autoSetFolderName;
/**Map to connect vaadin text fields with bundle keys.*/
private Map<TextField, String> m_bundleComponentKeyMap;
/**vaadin component.*/
private FormLayout m_bundleValues;
/**vaadin component.*/
private Button m_cancel;
/**vaadin component.*/
private CheckBox m_fieldCreateOU;
/**vaadin component.*/
private CmsPathSelectField m_fieldErrorPage;
/**vaadin component.*/
private CheckBox m_fieldExclusiveError;
/**vaadin component.*/
private CheckBox m_fieldExclusiveURL;
/**vaadin component. */
private Image m_fieldFavIcon;
/**vaadin component. */
private CheckBox m_fieldKeepTemplate;
/**vaadin component.*/
private CmsPathSelectField m_fieldLoadSiteTemplate;
/**vaadin component.*/
private ComboBox m_fieldPosition;
/**vaadin component.*/
private TextField m_fieldSecureServer;
/**vaadin component.*/
private CheckBox m_fieldWebServer;
/**vaadin component. */
private Panel m_infoSiteRoot;
/**boolean indicates if folder name was changed by user.*/
private boolean m_isFolderNameTouched;
/**vaadin component.*/
private Button m_ok;
/**Click listener for ok button. */
private Button.ClickListener m_okClickListener;
/**vaadin component.*/
private FormLayout m_parameter;
/**Panel holding the report widget.*/
private Panel m_report;
/**Vaadin component. */
private ComboBox m_simpleFieldEncryption;
/**vaadin component.*/
private TextField m_simpleFieldFolderName;
/**vaadin component.*/
private CmsPathSelectField m_simpleFieldParentFolderName;
/**vaadin component.*/
private TextField m_simpleFieldServer;
/**vaadin component.*/
private CmsPathSelectField m_simpleFieldSiteRoot;
/**vaadin component.*/
private ComboBox m_simpleFieldTemplate;
/**vaadin component.*/
private TextField m_simpleFieldTitle;
/**List of templates. */
private List<CmsResource> m_templates;
/**Layout for the report widget. */
private FormLayout m_threadReport;
/**
* Public constructor.<p>
*
* @param cms CmsObject
* @param site Site to be shown / edited
* @param manager calling the dialog
* @param editable flag indicates if fields should be editable
*/
public CmsEditSiteForm(CmsObject cms, CmsSite site, CmsSiteManager manager, boolean editable) {
m_clonedCms = cms;
m_site = site;
m_manager = manager;
CmsVaadinUtils.readAndLocalizeDesign(this, CmsVaadinUtils.getWpMessagesForCurrentLocale(), null);
setUpComboBoxPosition();
setUpComboBoxTemplate();
setUpComboBoxSSL();
setUpOUComboBox(m_fieldSelectOU);
setUpOUComboBox(m_fieldSelectParentOU);
m_tab.setHeight("400px");
m_report.setVisible(false);
m_ok.setVisible(editable);
m_infoSiteRoot.setVisible(false);
if (editable) {
m_aliasGroup = new CmsEditableGroup(m_aliases, new Supplier<Component>() {
public Component get() {
Component c = createAliasComponent("", true);
c.setEnabled(false);
return c;
}
}, CmsVaadinUtils.getMessageText(Messages.GUI_SITE_ADD_ALIAS_0));
m_aliasGroup.init();
}
setFieldsForSite(editable);
m_cancel.addClickListener(e -> CmsVaadinUtils.getWindow(CmsEditSiteForm.this).close());
}
/**
* Constructor.<p>
* Use this to create a new site.<p>
*
* @param manager the site manager instance
* @param cms the CmsObject
*/
public CmsEditSiteForm(CmsObject cms, CmsSiteManager manager) {
m_isFolderNameTouched = false;
m_blockChange = true;
m_autoSetFolderName = "";
m_clonedCms = cms;
List<CmsSite> allSites = manager.getAllElements();
allSites.addAll(manager.getCorruptedSites());
for (CmsSite site : allSites) {
if (site.getSiteMatcher() != null) {
m_alreadyUsedFolderPath.add(site.getSiteRoot());
}
}
m_alreadyUsedURL.addAll(OpenCms.getSiteManager().getSites().keySet());
CmsVaadinUtils.readAndLocalizeDesign(this, CmsVaadinUtils.getWpMessagesForCurrentLocale(), null);
m_tab.setHeight("400px");
m_infoSiteRoot.setVisible(false);
m_simpleFieldSiteRoot.setVisible(false);
if (!OpenCms.getSiteManager().isConfigurableWebServer()) {
m_fieldWebServer.setVisible(false);
m_fieldWebServer.setValue(new Boolean(true));
}
m_fieldKeepTemplate.setVisible(false);
m_fieldKeepTemplate.setValue(Boolean.FALSE);
m_simpleFieldParentFolderName.setValue(CmsSiteManager.PATH_SITES);
m_simpleFieldParentFolderName.setUseRootPaths(true);
m_simpleFieldParentFolderName.setCmsObject(m_clonedCms);
m_simpleFieldParentFolderName.requireFolder();
m_simpleFieldParentFolderName.setResourceFilter(CmsResourceFilter.DEFAULT_FOLDERS);
m_simpleFieldParentFolderName.addValueChangeListener(new ValueChangeListener() {
private static final long serialVersionUID = 4043563040462776139L;
public void valueChange(ValueChangeEvent event) {
try {
String folderPath = m_simpleFieldParentFolderName.getValue();
if (CmsResourceTypeFolderSubSitemap.TYPE_SUBSITEMAP.equals(
OpenCms.getResourceManager().getResourceType(
m_clonedCms.readResource(folderPath)).getTypeName())) {
String newFolderName = folderPath.split("/")[folderPath.split("/").length - 1];
m_simpleFieldFolderName.setValue(newFolderName);
m_isFolderNameTouched = true;
if (m_simpleFieldTitle.isEmpty()) {
CmsProperty title = m_clonedCms.readPropertyObject(
m_clonedCms.readResource(folderPath),
"Title",
false);
if (!CmsProperty.getNullProperty().equals(title)) {
m_simpleFieldTitle.setValue(title.getValue());
}
}
setTemplateFieldForSiteroot(folderPath);
m_simpleFieldParentFolderName.setValue(
m_simpleFieldParentFolderName.getValue().substring(
0,
folderPath.length() - 1 - newFolderName.length()));
}
} catch (CmsException e) {
// Resource was not found. Not ok, but will be validated later
}
setUpOUComboBox(m_fieldSelectParentOU);
setUpOUComboBox(m_fieldSelectOU);
}
});
m_manager = manager;
m_addParameter.addClickListener(new ClickListener() {
private static final long serialVersionUID = 6814134727761004218L;
public void buttonClick(ClickEvent event) {
addParameter(null);
}
});
m_okClickListener = new ClickListener() {
private static final long serialVersionUID = 6814134727761004218L;
public void buttonClick(ClickEvent event) {
setupValidators();
setupValidatorAliase();
if (isValidInputSimple() & isValidInputSiteTemplate() & isValidAliase()) {
submit();
return;
}
if (isValidInputSimple()) {
if (isValidAliase()) {
m_tab.setSelectedTab(4);
return;
}
m_tab.setSelectedTab(3);
return;
}
m_tab.setSelectedTab(0);
}
};
m_ok.addClickListener(m_okClickListener);
m_cancel.addClickListener(new ClickListener() {
private static final long serialVersionUID = -276802394623141951L;
public void buttonClick(ClickEvent event) {
closeDailog(false);
}
});
m_fieldCreateOU.addValueChangeListener(new ValueChangeListener() {
private static final long serialVersionUID = -2837270577662919541L;
public void valueChange(ValueChangeEvent event) {
toggleSelectOU();
}
});
setUpComboBoxPosition();
setUpComboBoxTemplate();
setUpComboBoxSSL();
setUpOUComboBox(m_fieldSelectOU);
setUpOUComboBox(m_fieldSelectParentOU);
m_fieldSecureServer.addValueChangeListener(new ValueChangeListener() {
private static final long serialVersionUID = -2837270577662919541L;
public void valueChange(ValueChangeEvent event) {
toggleSecureServer();
}
});
m_fieldExclusiveURL.setEnabled(false);
m_fieldExclusiveError.setEnabled(false);
Receiver uploadReceiver = new FavIconReceiver();
m_fieldWebServer.setValue(new Boolean(true));
m_fieldUploadFavIcon.setReceiver(uploadReceiver);
m_fieldUploadFavIcon.setButtonCaption(CmsVaadinUtils.getMessageText(Messages.GUI_SITE_SELECT_FILE_0));
m_fieldUploadFavIcon.setImmediate(true);
m_fieldUploadFavIcon.addSucceededListener((SucceededListener)uploadReceiver);
m_fieldUploadFavIcon.setCaption(CmsVaadinUtils.getMessageText(Messages.GUI_SITE_FAVICON_NEW_0));
m_fieldFavIcon.setVisible(false);
m_simpleFieldTitle.addBlurListener(new BlurListener() {
private static final long serialVersionUID = -4147179568264310325L;
public void blur(BlurEvent event) {
if (!getFieldTitle().isEmpty() & !isFolderNameTouched()) {
String niceName = OpenCms.getResourceManager().getNameGenerator().getUniqueFileName(
m_clonedCms,
"/sites",
getFieldTitle().toLowerCase());
setFolderNameState(niceName);
setFieldFolder(niceName);
}
}
});
m_simpleFieldFolderName.addBlurListener(new BlurListener() {
private static final long serialVersionUID = 2080245499551324408L;
public void blur(BlurEvent event) {
checkTemplate();
setFolderNameState(null);
}
});
m_fieldLoadSiteTemplate.addValidator(new SiteTemplateValidator());
m_fieldLoadSiteTemplate.addValueChangeListener(new ValueChangeListener() {
private static final long serialVersionUID = -5859547073423161234L;
public void valueChange(ValueChangeEvent event) {
resetFields();
loadMessageBundle();
m_manager.centerWindow();
}
});
m_fieldLoadSiteTemplate.setUseRootPaths(true);
m_fieldLoadSiteTemplate.setCmsObject(m_clonedCms);
m_fieldLoadSiteTemplate.requireFolder();
m_fieldLoadSiteTemplate.setResourceFilter(CmsResourceFilter.ONLY_VISIBLE_NO_DELETED.addRequireFolder());
m_fieldSelectParentOU.setEnabled(false);
m_report.setVisible(false);
m_blockChange = false;
m_aliasGroup = new CmsEditableGroup(m_aliases, new Supplier<Component>() {
public Component get() {
return createAliasComponent("", true);
}
}, CmsVaadinUtils.getMessageText(Messages.GUI_SITE_ADD_ALIAS_0));
m_aliasGroup.init();
}
/**
* Constructor.<p>
* Used to edit existing site.<p>
*
* @param manager the manager instance
* @param siteRoot of site to edit
* @param cms the CmsObject
*/
public CmsEditSiteForm(CmsObject cms, CmsSiteManager manager, String siteRoot) {
this(cms, manager);
m_site = manager.getElement(siteRoot);
setFieldsForSite(true);
}
/**
* Creates an IndexedContaienr for use in SSL mode selection widgets.<p>
*
* @param captionProp the name of the property to use for captions
* @param includeOldStyle true if the old-style secure server mode should be included
* @param currentValue the current value of the mode (may be null)
*
* @return the container with the SSL mode items
*/
protected static IndexedContainer getSSLModeContainer(
String captionProp,
boolean includeOldStyle,
CmsSSLMode currentValue) {
IndexedContainer res = new IndexedContainer();
res.addContainerProperty(captionProp, String.class, "");
boolean isLetsEncrypt = currentValue == CmsSSLMode.LETS_ENCRYPT;
boolean letsEncryptConfigured = (OpenCms.getLetsEncryptConfig() != null)
&& OpenCms.getLetsEncryptConfig().isValidAndEnabled();
boolean skipLetsEncrypt = !letsEncryptConfigured && !isLetsEncrypt;
for (CmsSSLMode mode : CmsSSLMode.availableModes(includeOldStyle, !skipLetsEncrypt)) {
Item item = res.addItem(mode);
item.getItemProperty(captionProp).setValue(mode.getLocalizedMessage());
}
return res;
}
/**
* Returns a Folder Name for a given site-root.<p>
*
* @param siteRoot site root of a site
* @return Folder Name
*/
static String getFolderNameFromSiteRoot(String siteRoot) {
return siteRoot.split("/")[siteRoot.split("/").length - 1];
}
/**
* Checks if site root exists in on and offline repository.<p>
*/
protected void checkOnOfflineSiteRoot() {
try {
CmsObject cmsOnline = OpenCms.initCmsObject(m_clonedCms);
cmsOnline.getRequestContext().setCurrentProject(m_clonedCms.readProject(CmsProject.ONLINE_PROJECT_ID));
String rootPath = m_simpleFieldSiteRoot.getValue();
if (cmsOnline.existsResource(rootPath) & !m_clonedCms.existsResource(rootPath)) {
m_ok.setEnabled(false);
m_infoSiteRoot.setVisible(true);
return;
}
if (!m_site.getSiteRootUUID().isNullUUID()) {
if (m_clonedCms.existsResource(m_site.getSiteRootUUID()) & !m_clonedCms.existsResource(rootPath)) {
m_ok.setEnabled(false);
m_infoSiteRoot.setVisible(true);
return;
}
}
} catch (CmsException e) {
LOG.error("Can not initialize CmsObject", e);
}
m_ok.setEnabled(true);
m_infoSiteRoot.setVisible(false);
}
/**
* Checks the Template Property of the site root and fills the form field.<p>
*/
protected void checkTemplate() {
if (CmsStringUtil.isEmptyOrWhitespaceOnly(m_simpleFieldFolderName.getValue())) {
return;
}
if (!m_clonedCms.existsResource(getSiteRoot())) {
return;
}
try {
String templateValue = m_clonedCms.readPropertyObject(
getSiteRoot(),
CmsPropertyDefinition.PROPERTY_TEMPLATE,
false).getValue();
m_simpleFieldTemplate.addItem(templateValue);
m_simpleFieldTemplate.setValue(templateValue);
} catch (CmsException e) {
}
}
/**
* Creates field for aliases.<p>
*
* @param alias url
* @param red redirect
* @return component
*/
protected FormLayout createAliasComponent(String alias, boolean red) {
FormLayout layout = new FormLayout();
TextField field = new TextField(CmsVaadinUtils.getMessageText(Messages.GUI_SITE_ALIAS_0));
field.setWidth("100%");
field.setValue(alias);
field.setDescription(CmsVaadinUtils.getMessageText(Messages.GUI_SITE_ALIAS_HELP_0));
CheckBox redirect = new CheckBox(CmsVaadinUtils.getMessageText(Messages.GUI_SITE_ALIAS_REDIRECT_0), red);
redirect.setDescription(CmsVaadinUtils.getMessageText(Messages.GUI_SITE_ALIAS_REDIRECT_HELP_0));
layout.addComponent(field);
layout.addComponent(redirect);
return layout;
}
/**
* Reads server field.<p>
*
* @return server as string
*/
protected String getFieldServer() {
return m_simpleFieldServer.getValue();
}
/**
* Handles SSL changes.<p>
*/
protected void handleSSLChange() {
String toBeReplaced = "http:";
String newString = "https:";
CmsSSLMode mode = (CmsSSLMode)m_simpleFieldEncryption.getValue();
if (mode == null) {
// mode is null if this is triggered by setContainerDataSource
return;
}
if (mode.equals(CmsSSLMode.NO) | mode.equals(CmsSSLMode.SECURE_SERVER)) {
toBeReplaced = "https:";
newString = "http:";
}
m_simpleFieldServer.setValue(m_simpleFieldServer.getValue().replaceAll(toBeReplaced, newString));
m_fieldSecureServer.setVisible(mode.equals(CmsSSLMode.SECURE_SERVER));
m_fieldExclusiveError.setVisible(mode.equals(CmsSSLMode.SECURE_SERVER));
m_fieldExclusiveURL.setVisible(mode.equals(CmsSSLMode.SECURE_SERVER));
}
/**
* Sets the template field depending on current set site root field(s).<p>
*/
protected void setTemplateField() {
setTemplateFieldForSiteroot(getSiteRoot());
}
/**
* Add a given parameter to the form layout.<p>
*
* @param parameter parameter to add to form
*/
void addParameter(String parameter) {
TextField textField = new TextField();
if (parameter != null) {
textField.setValue(parameter);
}
CmsRemovableFormRow<TextField> row = new CmsRemovableFormRow<TextField>(
textField,
CmsVaadinUtils.getMessageText(Messages.GUI_SITE_REMOVE_PARAMETER_0));
row.setCaption(CmsVaadinUtils.getMessageText(Messages.GUI_SITE_PARAMETER_0));
row.setDescription(CmsVaadinUtils.getMessageText(Messages.GUI_SITE_PARAMETER_HELP_0));
m_parameter.addComponent(row);
}
/**
* Closes the dialog.<p>
*
* @param updateTable <code>true</code> to update the site table
*/
void closeDailog(boolean updateTable) {
m_manager.closeDialogWindow(updateTable);
}
String ensureFoldername(String resourcename) {
if (CmsStringUtil.isEmpty(resourcename)) {
return "";
}
if (!CmsResource.isFolder(resourcename)) {
resourcename = resourcename.concat("/");
}
if (resourcename.charAt(0) == '/') {
resourcename = resourcename.substring(1);
}
return resourcename;
}
/**
* Returns the value of the site-folder.<p>
*
* @return String of folder path.
*/
String getFieldFolder() {
return m_simpleFieldFolderName.getValue();
}
/**
* Reads title field.<p>
*
* @return title as string.
*/
String getFieldTitle() {
return m_simpleFieldTitle.getValue();
}
/**
* Returns parent folder.<p>
*
* @return parent folder as string
*/
String getParentFolder() {
return m_simpleFieldParentFolderName.getValue();
}
/**
* Returns the value of the site template field.<p>
*
* @return string root path
*/
String getSiteTemplatePath() {
return m_fieldLoadSiteTemplate.getValue();
}
/**
* Checks if an alias was entered twice.<p>
*
* @param aliasName to check
* @return true if it was defined double
*/
boolean isDoubleAlias(String aliasName) {
CmsSiteMatcher testAlias = new CmsSiteMatcher(aliasName);
int count = 0;
for (Component c : m_aliases) {
if (c instanceof CmsRemovableFormRow<?>) {
String alName = (String)((CmsRemovableFormRow<? extends AbstractField<?>>)c).getInput().getValue();
if (testAlias.equals(new CmsSiteMatcher(alName))) {
count++;
}
}
}
return count > 1;
}
/**
* Checks if folder name was touched.<p>
*
* Considered as touched if side is edited or value of foldername was changed by user.<p>
*
* @return boolean true means Folder value was set by user or existing site and should not be changed by title-listener
*/
boolean isFolderNameTouched() {
if (m_site != null) {
return true;
}
if (m_autoSetFolderName.equals(getFieldFolder())) {
return false;
}
return m_isFolderNameTouched;
}
/**
* Are the aliase valid?<p>
*
* @return true if ok
*/
boolean isValidAliase() {
boolean ret = true;
for (I_CmsEditableGroupRow row : m_aliasGroup.getRows()) {
FormLayout layout = (FormLayout)(row.getComponent());
TextField field = (TextField)layout.getComponent(0);
ret = ret & field.isValid();
}
return ret;
}
/**
* Checks if all required fields are set correctly at first Tab.<p>
*
* @return true if all inputs are valid.
*/
boolean isValidInputSimple() {
return (m_simpleFieldFolderName.isValid()
& m_simpleFieldServer.isValid()
& m_simpleFieldTitle.isValid()
& m_simpleFieldParentFolderName.isValid()
& m_fieldSelectOU.isValid()
& m_simpleFieldSiteRoot.isValid());
}
/**
* Checks if all required fields are set correctly at site template tab.<p>
*
* @return true if all inputs are valid.
*/
boolean isValidInputSiteTemplate() {
return (m_fieldLoadSiteTemplate.isValid() & m_fieldSelectParentOU.isValid());
}
/**
* Loads message bundle from bundle defined inside the site-template which is used to create new site.<p>
*/
void loadMessageBundle() {
//Check if chosen site template is valid and not empty
if (!m_fieldLoadSiteTemplate.isValid()
| m_fieldLoadSiteTemplate.isEmpty()
| !CmsSiteManager.isFolderWithMacros(m_clonedCms, m_fieldLoadSiteTemplate.getValue())) {
return;
}
try {
m_bundleComponentKeyMap = new HashMap<TextField, String>();
//Get resource of the descriptor.
CmsResource descriptor = m_clonedCms.readResource(
m_fieldLoadSiteTemplate.getValue()
+ CmsSiteManager.MACRO_FOLDER
+ "/"
+ CmsSiteManager.BUNDLE_NAME
+ "_desc");
//Read related bundle
Properties resourceBundle = getLocalizedBundle();
Map<String, String[]> bundleKeyDescriptorMap = CmsMacroResolver.getBundleMapFromResources(
resourceBundle,
descriptor,
m_clonedCms);
for (String key : bundleKeyDescriptorMap.keySet()) {
//Create TextField
TextField field = new TextField();
field.setCaption(bundleKeyDescriptorMap.get(key)[0]);
field.setValue(bundleKeyDescriptorMap.get(key)[1]);
field.setWidth("100%");
//Add vaadin component to UI and keep related key in HashMap
m_bundleValues.addComponent(field);
m_bundleComponentKeyMap.put(field, key);
}
} catch (CmsException | IOException e) {
LOG.error("Error reading bundle", e);
}
}
/**
* Clears the message bundle and removes related text fields from UI.<p>
*/
void resetFields() {
if (m_bundleComponentKeyMap != null) {
Set<TextField> setBundles = m_bundleComponentKeyMap.keySet();
for (TextField field : setBundles) {
m_bundleValues.removeComponent(field);
}
m_bundleComponentKeyMap.clear();
}
m_fieldKeepTemplate.setVisible(!CmsStringUtil.isEmptyOrWhitespaceOnly(m_fieldLoadSiteTemplate.getValue()));
m_fieldKeepTemplate.setValue(
Boolean.valueOf(!CmsStringUtil.isEmptyOrWhitespaceOnly(m_fieldLoadSiteTemplate.getValue())));
}
/**
* Sets a new uploaded favicon and changes the caption of the upload button.<p>
*
* @param imageData holdings byte array of favicon
*/
void setCurrentFavIcon(final byte[] imageData) {
m_fieldFavIcon.setVisible(true);
m_fieldUploadFavIcon.setCaption(CmsVaadinUtils.getMessageText(Messages.GUI_SITE_FAVICON_CHANGE_0));
m_fieldFavIcon.setSource(new StreamResource(new StreamResource.StreamSource() {
private static final long serialVersionUID = -8868657402793427460L;
public InputStream getStream() {
return new ByteArrayInputStream(imageData);
}
}, ""));
}
/**
* Tries to read and show the favicon of the site.<p>
*/
void setFaviconIfExist() {
try {
CmsResource favicon = m_clonedCms.readResource(m_site.getSiteRoot() + "/" + CmsSiteManager.FAVICON);
setCurrentFavIcon(m_clonedCms.readFile(favicon).getContents()); //FavIcon was found -> give it to the UI
} catch (CmsException e) {
//no favicon, do nothing
}
}
/**
* Sets the folder field.<p>
*
* @param newValue value of the field
*/
void setFieldFolder(String newValue) {
m_simpleFieldFolderName.setValue(newValue);
}
/**
* Sets the folder Name state to recognize if folder field was touched.<p>
*
* @param setFolderName name of folder set by listener from title.
*/
void setFolderNameState(String setFolderName) {
if (setFolderName == null) {
if (m_simpleFieldFolderName.getValue().isEmpty()) {
m_isFolderNameTouched = false;
return;
}
m_isFolderNameTouched = true;
} else {
m_autoSetFolderName = setFolderName;
}
}
/**
* Enables the ok button after finishing report thread.<p>
*/
void setOkButtonEnabled() {
m_ok.setEnabled(true);
m_ok.setCaption(CmsVaadinUtils.getMessageText(org.opencms.workplace.Messages.GUI_DIALOG_BUTTON_CLOSE_0));
m_ok.removeClickListener(m_okClickListener);
m_ok.addClickListener(new ClickListener() {
private static final long serialVersionUID = 5637556711524961424L;
public void buttonClick(ClickEvent event) {
closeDailog(true);
}
});
}
/**
* Fill ComboBox for OU selection.<p>
* @param combo combo box
*/
void setUpOUComboBox(ComboBox combo) {
combo.removeAllItems();
try {
if (m_site != null) {
String siteOu = getSiteOU();
combo.addItem(siteOu);
combo.select(siteOu);
combo.setEnabled(false);
} else {
combo.addItem("/");
m_clonedCms.getRequestContext().setSiteRoot("");
List<CmsOrganizationalUnit> ous = OpenCms.getOrgUnitManager().getOrganizationalUnits(
m_clonedCms,
"/",
true);
for (CmsOrganizationalUnit ou : ous) {
if (ouIsOK(ou)) {
combo.addItem(ou.getName());
}
}
combo.select("/");
}
} catch (CmsException e) {
LOG.error("Error on reading OUs", e);
}
combo.setNullSelectionAllowed(false);
combo.setTextInputAllowed(true);
combo.setFilteringMode(FilteringMode.CONTAINS);
combo.setNewItemsAllowed(false);
}
/**
* Setup for the aliase validator.<p>
*/
void setupValidatorAliase() {
for (I_CmsEditableGroupRow row : m_aliasGroup.getRows()) {
FormLayout layout = (FormLayout)(row.getComponent());
TextField field = (TextField)layout.getComponent(0);
field.removeAllValidators();
field.addValidator(new AliasValidator());
}
}
/**
* Setup validators which get called on click.<p>
* Site-template gets validated separately.<p>
*/
void setupValidators() {
if (m_simpleFieldServer.getValidators().size() == 0) {
if (m_site == null) {
m_simpleFieldFolderName.addValidator(new FolderPathValidator());
m_simpleFieldParentFolderName.addValidator(new ParentFolderValidator());
}
m_simpleFieldServer.addValidator(new ServerValidator());
m_simpleFieldTitle.addValidator(new TitleValidator());
if (m_site == null) {
m_fieldSelectOU.addValidator(new SelectOUValidator());
}
if (m_fieldCreateOU.getValue().booleanValue()) {
m_fieldSelectParentOU.addValidator(new SelectParentOUValidator());
}
}
}
/**
* Saves the entered site-data as a CmsSite object.<p>
*/
void submit() {
// switch to root site
m_clonedCms.getRequestContext().setSiteRoot("");
CmsSite site = getSiteFromForm();
if (m_site == null) {
//Show report field and hide form fields
m_report.setVisible(true);
m_tab.setVisible(false);
m_ok.setEnabled(false);
m_ok.setVisible(true);
//Change cancel caption to close (will not interrupt site creation anymore)
m_cancel.setVisible(false);
setOkButtonEnabled();
m_cancel.setCaption(
CmsVaadinUtils.getMessageText(org.opencms.workplace.Messages.GUI_DIALOG_BUTTON_CLOSE_0));
Map<String, String> bundle = getBundleMap();
boolean createOU = m_fieldCreateOU.isEnabled() & m_fieldCreateOU.getValue().booleanValue();
CmsCreateSiteThread createThread = new CmsCreateSiteThread(
m_clonedCms,
m_manager,
site,
m_site,
m_fieldLoadSiteTemplate.getValue(),
getFieldTemplate(),
createOU,
(String)m_fieldSelectParentOU.getValue(),
(String)m_fieldSelectOU.getValue(),
m_os,
bundle,
new Runnable() {
public void run() {
}
});
CmsReportWidget report = new CmsReportWidget(createThread);
report.setWidth("100%");
report.setHeight("350px");
m_threadReport.addComponent(report);
createThread.start();
} else {
if (!site.getSiteRoot().equals(m_site.getSiteRoot())) {
m_manager.deleteElements(Collections.singletonList(m_site.getSiteRoot()));
}
m_manager.writeElement(site);
m_manager.closeDialogWindow(true);
}
}
/**
* Toogles secure server options.<p>
*/
void toggleSecureServer() {
if (m_fieldSecureServer.isEmpty()) {
m_fieldExclusiveURL.setEnabled(false);
m_fieldExclusiveError.setEnabled(false);
return;
}
m_fieldExclusiveURL.setEnabled(true);
m_fieldExclusiveError.setEnabled(true);
}
/**
* Toogles the select OU combo box depending on create ou check box.<p>
*/
void toggleSelectOU() {
boolean create = m_fieldCreateOU.getValue().booleanValue();
m_fieldSelectOU.setEnabled(!create);
m_fieldSelectParentOU.setEnabled(create);
m_fieldSelectOU.select("/");
}
/**
* Reads out all aliases from the form.<p>
*
* @return a List of CmsSiteMatcher
*/
private List<CmsSiteMatcher> getAliases() {
List<CmsSiteMatcher> ret = new ArrayList<CmsSiteMatcher>();
for (I_CmsEditableGroupRow row : m_aliasGroup.getRows()) {
FormLayout layout = (FormLayout)(row.getComponent());
CheckBox box = (CheckBox)(layout.getComponent(1));
TextField field = (TextField)layout.getComponent(0);
CmsSiteMatcher matcher = new CmsSiteMatcher(field.getValue());
matcher.setRedirect(box.getValue().booleanValue());
ret.add(matcher);
}
return ret;
}
/**
* Returns the correct varaint of a resource name accoreding to locale.<p>
*
* @param path where the considered resource is.
* @param baseName of the resource
* @return localized name of resource
*/
private String getAvailableLocalVariant(String path, String baseName) {
//First look for a bundle with the locale of the folder..
try {
CmsProperty propLoc = m_clonedCms.readPropertyObject(path, CmsPropertyDefinition.PROPERTY_LOCALE, true);
if (!propLoc.isNullProperty()) {
if (m_clonedCms.existsResource(path + baseName + "_" + propLoc.getValue())) {
return baseName + "_" + propLoc.getValue();
}
}
} catch (CmsException e) {
LOG.error("Can not read locale property", e);
}
//If no bundle was found with the locale of the folder, or the property was not set, search for other locales
A_CmsUI.get();
List<String> localVariations = CmsLocaleManager.getLocaleVariants(
baseName,
UI.getCurrent().getLocale(),
false,
true);
for (String name : localVariations) {
if (m_clonedCms.existsResource(path + name)) {
return name;
}
}
return null;
}
/**
* Reads out bundle values from UI and stores keys with values in HashMap.<p>
*
* @return hash map
*/
private Map<String, String> getBundleMap() {
Map<String, String> bundles = new HashMap<String, String>();
if (m_bundleComponentKeyMap != null) {
Set<TextField> fields = m_bundleComponentKeyMap.keySet();
for (TextField field : fields) {
bundles.put(m_bundleComponentKeyMap.get(field), field.getValue());
}
}
return bundles;
}
/**
* Reads ComboBox with Template information.<p>
*
* @return string of chosen template path.
*/
private String getFieldTemplate() {
if (m_fieldKeepTemplate.getValue().booleanValue()) {
return ""; //No template property will be changed
}
Object value = m_simpleFieldTemplate.getValue();
if (value != null) {
return (String)value;
}
return "";
}
/**
* Gets localized property object.<p>
*
* @return Properties object
* @throws CmsException exception
* @throws IOException exception
*/
private Properties getLocalizedBundle() throws CmsException, IOException {
CmsResource bundleResource = m_clonedCms.readResource(
m_fieldLoadSiteTemplate.getValue()
+ CmsSiteManager.MACRO_FOLDER
+ "/"
+ getAvailableLocalVariant(
m_fieldLoadSiteTemplate.getValue() + CmsSiteManager.MACRO_FOLDER + "/",
CmsSiteManager.BUNDLE_NAME));
Properties ret = new Properties();
InputStreamReader reader = new InputStreamReader(
new ByteArrayInputStream(m_clonedCms.readFile(bundleResource).getContents()),
StandardCharsets.UTF_8);
ret.load(reader);
return ret;
}
/**
* Reads parameter from form.<p>
*
* @return a Map with Parameter information.
*/
private Map<String, String> getParameter() {
Map<String, String> ret = new TreeMap<String, String>();
for (Component c : m_parameter) {
if (c instanceof CmsRemovableFormRow<?>) {
String[] parameterStringArray = ((String)((CmsRemovableFormRow<? extends AbstractField<?>>)c).getInput().getValue()).split(
"=");
ret.put(parameterStringArray[0], parameterStringArray[1]);
}
}
return ret;
}
/**
* Map entry of parameter to String representation.<p>
*
* @param parameter Entry holding parameter info.
* @return the parameter formatted as string
*/
private String getParameterString(Entry<String, String> parameter) {
return parameter.getKey() + "=" + parameter.getValue();
}
/**
* Reads out all forms and creates a site object.<p>
*
* @return the site object.
*/
private CmsSite getSiteFromForm() {
String siteRoot = getSiteRoot();
CmsSiteMatcher matcher = (CmsStringUtil.isNotEmpty(m_fieldSecureServer.getValue())
& m_simpleFieldEncryption.getValue().equals(CmsSSLMode.SECURE_SERVER))
? new CmsSiteMatcher(m_fieldSecureServer.getValue())
: null;
CmsSite site = OpenCms.getSiteManager().getSiteForSiteRoot(siteRoot);
CmsUUID uuid = new CmsUUID();
if ((site != null) && (site.getSiteMatcher() != null)) {
uuid = (CmsUUID)site.getSiteRootUUID().clone();
}
String errorPage = CmsStringUtil.isNotEmptyOrWhitespaceOnly(m_fieldErrorPage.getValue())
? m_fieldErrorPage.getValue()
: null;
List<CmsSiteMatcher> aliases = getAliases();
CmsSite ret = new CmsSite(
siteRoot,
uuid,
getFieldTitle(),
new CmsSiteMatcher(getFieldServer()),
((PositionComboBoxElementBean)m_fieldPosition.getValue()).getPosition() == -1
? String.valueOf(m_site.getPosition())
: String.valueOf(((PositionComboBoxElementBean)m_fieldPosition.getValue()).getPosition()),
errorPage,
matcher,
m_fieldExclusiveURL.getValue().booleanValue(),
m_fieldExclusiveError.getValue().booleanValue(),
m_fieldWebServer.getValue().booleanValue(),
aliases);
ret.setParameters((SortedMap<String, String>)getParameter());
ret.setSSLMode((CmsSSLMode)m_simpleFieldEncryption.getValue());
return ret;
}
/**
* Get ou name for current site.<p>
*
* @return Full ou name
*/
private String getSiteOU() {
try {
m_clonedCms.getRequestContext().setSiteRoot("");
CmsResource resource = m_clonedCms.readResource(m_site.getSiteRoot());
List<CmsRelation> relations = m_clonedCms.getRelationsForResource(resource, CmsRelationFilter.SOURCES);
for (CmsRelation relation : relations) {
if (relation.getSourcePath().startsWith("/system/orgunits/")) {
return (relation.getSourcePath().substring("/system/orgunits/".length()));
}
}
} catch (CmsException e) {
LOG.error("Error on reading OUs", e);
}
return "/";
}
/**
* Gets the site root.<p>
* Usable for new sites and for existing sites.
*
* @return site root string
*/
private String getSiteRoot() {
String res;
if (m_simpleFieldSiteRoot.isVisible()) {
res = m_simpleFieldSiteRoot.getValue();
} else {
res = "/" + ensureFoldername(getParentFolder()) + ensureFoldername(getFieldFolder());
res = res.endsWith("/") ? res.substring(0, res.length() - 1) : res;
}
return res;
}
/**
* Checks if given Ou has resources matching to currently set parent folder.<p>
*
* @param ou to check
* @return true if ou is ok for parent folder
*/
private boolean ouIsOK(CmsOrganizationalUnit ou) {
try {
for (CmsResource res : OpenCms.getOrgUnitManager().getResourcesForOrganizationalUnit(
m_clonedCms,
ou.getName())) {
if (m_simpleFieldParentFolderName.getValue().startsWith(res.getRootPath())) {
return true;
}
}
} catch (CmsException e) {
LOG.error("Unable to read Resources for Org Unit", e);
}
return false;
}
/**
* Sets the server field.<p>
*
* @param newValue value of the field.
*/
private void setFieldServer(String newValue) {
m_simpleFieldServer.setValue(newValue);
}
/**
* Sets the fields for a given site (m_site).<p>
*
* @param enableAll if true, the site is editable
*/
private void setFieldsForSite(boolean enableAll) {
if (!CmsStringUtil.isEmptyOrWhitespaceOnly(m_site.getSiteRoot())) {
setTemplateFieldForSiteroot(m_site.getSiteRoot());
m_simpleFieldTemplate.setEnabled(false);
}
m_simpleFieldSiteRoot.setVisible(true);
m_simpleFieldSiteRoot.setValue(m_site.getSiteRoot());
m_simpleFieldSiteRoot.setCmsObject(m_clonedCms);
m_simpleFieldSiteRoot.addValidator(new SiteRootValidator());
m_simpleFieldSiteRoot.setEnabled(enableAll);
m_simpleFieldSiteRoot.addValueChangeListener(new ValueChangeListener() {
/**vaadin serial id. */
private static final long serialVersionUID = 4680456758446195524L;
public void valueChange(ValueChangeEvent event) {
setTemplateField();
checkOnOfflineSiteRoot();
}
});
m_simpleFieldParentFolderName.setVisible(false);
m_simpleFieldFolderName.setVisible(false);
displayResourceInfoDirectly(
Collections.singletonList(
new CmsResourceInfo(
m_site.getTitle(),
m_site.getSiteRoot(),
m_manager.getFavIcon(m_site.getSiteRoot()))));
m_tab.removeTab(m_tab.getTab(4));
m_simpleFieldTitle.removeTextChangeListener(null);
m_simpleFieldTitle.setEnabled(enableAll);
m_simpleFieldParentFolderName.setEnabled(false);
m_simpleFieldParentFolderName.setValue(
m_site.getSiteRoot().substring(
0,
m_site.getSiteRoot().length()
- m_site.getSiteRoot().split("/")[m_site.getSiteRoot().split("/").length - 1].length()));
m_simpleFieldFolderName.removeAllValidators(); //can not be changed
m_fieldCreateOU.setVisible(false);
m_alreadyUsedURL.remove(m_site.getSiteMatcher().forDifferentScheme("https")); //Remove current url to avoid validation problem
m_alreadyUsedURL.remove(m_site.getSiteMatcher().forDifferentScheme("http"));
setFieldTitle(m_site.getTitle());
setFieldFolder(getFolderNameFromSiteRoot(m_site.getSiteRoot()));
m_simpleFieldFolderName.setEnabled(false);
m_simpleFieldTitle.setEnabled(enableAll);
setFieldServer(m_site.getUrl());
m_simpleFieldServer.setEnabled(enableAll);
if (m_site.hasSecureServer()) {
m_fieldSecureServer.setValue(m_site.getSecureUrl());
}
if (m_site.getErrorPage() != null) {
m_fieldErrorPage.setValue(m_site.getErrorPage());
}
m_fieldWebServer.setValue(new Boolean(m_site.isWebserver()));
m_fieldWebServer.setEnabled(enableAll);
m_fieldExclusiveURL.setValue(new Boolean(m_site.isExclusiveUrl()));
m_fieldExclusiveURL.setEnabled(enableAll);
m_fieldExclusiveError.setValue(new Boolean(m_site.isExclusiveError()));
m_fieldExclusiveError.setEnabled(enableAll);
Map<String, String> siteParameters = m_site.getParameters();
for (Entry<String, String> parameter : siteParameters.entrySet()) {
addParameter(getParameterString(parameter));
}
List<CmsSiteMatcher> siteAliases = m_site.getAliases();
for (CmsSiteMatcher siteMatcher : siteAliases) {
if (enableAll) {
m_aliasGroup.addRow(createAliasComponent(siteMatcher.getUrl(), siteMatcher.isRedirect()));
} else {
Component c = createAliasComponent(siteMatcher.getUrl(), siteMatcher.isRedirect());
c.setEnabled(false);
m_aliases.addComponent(c);
}
}
setTemplateField();
setUpComboBoxPosition();
if (!m_fieldSecureServer.isEmpty()) {
m_fieldExclusiveURL.setEnabled(true && enableAll);
m_fieldExclusiveError.setEnabled(true && enableAll);
}
setFaviconIfExist();
checkOnOfflineSiteRoot();
m_fieldUploadFavIcon.setVisible(false);
m_simpleFieldEncryption.setContainerDataSource(getSSLModeContainer("caption", true, m_site.getSSLMode()));
m_simpleFieldEncryption.select(m_site.getSSLMode());
m_simpleFieldEncryption.setEnabled(enableAll);
m_fieldErrorPage.setEnabled(enableAll);
m_addParameter.setVisible(enableAll);
m_fieldPosition.setEnabled(enableAll);
}
/**
* Sets the title field.<p>
*
* @param newValue value of the field.
*/
private void setFieldTitle(String newValue) {
m_simpleFieldTitle.setValue(newValue);
}
private void setTemplateFieldForSiteroot(String siteroot) {
try {
CmsProperty prop = m_clonedCms.readPropertyObject(siteroot, CmsPropertyDefinition.PROPERTY_TEMPLATE, false);
if (!prop.isNullProperty()) {
if (!m_templates.contains(prop.getValue())) {
m_simpleFieldTemplate.addItem(prop.getValue());
}
m_simpleFieldTemplate.select(prop.getValue());
} else {
if (!m_templates.isEmpty()) {
m_simpleFieldTemplate.setValue(m_templates.get(0).getRootPath());
}
}
} catch (CmsException e) {
LOG.error("Unable to read template property.", e);
m_simpleFieldTemplate.setValue(null);
}
}
/**
* Set the combo box for the position.<p>
* Copied from workplace tool.<p>
*/
private void setUpComboBoxPosition() {
m_fieldPosition.removeAllItems();
List<CmsSite> sites = new ArrayList<CmsSite>();
List<PositionComboBoxElementBean> beanList = new ArrayList<PositionComboBoxElementBean>();
for (CmsSite site : OpenCms.getSiteManager().getAvailableSites(m_clonedCms, true)) {
if (site.getSiteMatcher() != null) {
sites.add(site);
}
}
float maxValue = 0;
float nextPos = 0;
// calculate value for the first navigation position
float firstValue = 1;
if (sites.size() > 0) {
try {
maxValue = sites.get(0).getPosition();
} catch (Exception e) {
// should usually never happen
}
}
if (maxValue != 0) {
firstValue = maxValue / 2;
}
// add the first entry: before first element
beanList.add(
new PositionComboBoxElementBean(
CmsVaadinUtils.getMessageText(Messages.GUI_SITE_CHNAV_POS_FIRST_0),
firstValue));
// show all present navigation elements in box
for (int i = 0; i < sites.size(); i++) {
float navPos = sites.get(i).getPosition();
String siteRoot = sites.get(i).getSiteRoot();
// get position of next nav element
nextPos = navPos + 2;
if ((i + 1) < sites.size()) {
nextPos = sites.get(i + 1).getPosition();
}
// calculate new position of current nav element
float newPos;
if ((nextPos - navPos) > 1) {
newPos = navPos + 1;
} else {
newPos = (navPos + nextPos) / 2;
}
// check new maxValue of positions and increase it
if (navPos > maxValue) {
maxValue = navPos;
}
// if the element is the current file, mark it in select box
if ((m_site != null) && (m_site.getSiteRoot() != null) && m_site.getSiteRoot().equals(siteRoot)) {
beanList.add(
new PositionComboBoxElementBean(
CmsVaadinUtils.getMessageText(Messages.GUI_SITE_CHNAV_POS_CURRENT_1, m_site.getTitle()),
-1));
} else {
beanList.add(new PositionComboBoxElementBean(sites.get(i).getTitle(), newPos));
}
}
// add the entry: at the last position
PositionComboBoxElementBean lastEntry = new PositionComboBoxElementBean(
CmsVaadinUtils.getMessageText(Messages.GUI_SITE_CHNAV_POS_LAST_0),
maxValue + 1);
beanList.add(lastEntry);
// add the entry: no change
beanList.add(
new PositionComboBoxElementBean(CmsVaadinUtils.getMessageText(Messages.GUI_SITE_CHNAV_POS_NOCHANGE_0), -1));
BeanItemContainer<PositionComboBoxElementBean> objects = new BeanItemContainer<PositionComboBoxElementBean>(
PositionComboBoxElementBean.class,
beanList);
m_fieldPosition.setContainerDataSource(objects);
m_fieldPosition.setItemCaptionPropertyId("title");
m_fieldPosition.setValue(beanList.get(beanList.size() - 1));
if (m_site == null) {
m_fieldPosition.setValue(lastEntry);
}
}
/**
* Sets up the ComboBox for the SSL Mode.<p>
*/
private void setUpComboBoxSSL() {
IndexedContainer container = getSSLModeContainer("caption", true, null);
m_simpleFieldEncryption.setContainerDataSource(container);
m_simpleFieldEncryption.setItemCaptionPropertyId("caption");
m_simpleFieldEncryption.setNullSelectionAllowed(false);
m_simpleFieldEncryption.setNewItemsAllowed(false);
m_simpleFieldEncryption.select(CmsSSLMode.getDefault());
m_simpleFieldEncryption.addValueChangeListener(new ValueChangeListener() {
private static final long serialVersionUID = 3267990233897064320L;
public void valueChange(ValueChangeEvent event) {
if (m_blockChange) {
return;
}
handleSSLChange();
}
});
m_fieldSecureServer.setVisible(CmsSSLMode.getDefault().equals(CmsSSLMode.SECURE_SERVER));
m_fieldExclusiveError.setVisible(CmsSSLMode.getDefault().equals(CmsSSLMode.SECURE_SERVER));
m_fieldExclusiveURL.setVisible(CmsSSLMode.getDefault().equals(CmsSSLMode.SECURE_SERVER));
}
/**
* Sets the combobox for the template.<p>
*/
private void setUpComboBoxTemplate() {
try {
I_CmsResourceType templateType = OpenCms.getResourceManager().getResourceType(
CmsResourceTypeJsp.getContainerPageTemplateTypeName());
m_templates = m_clonedCms.readResources("/system/", CmsResourceFilter.DEFAULT.addRequireType(templateType));
for (CmsResource res : m_templates) {
m_simpleFieldTemplate.addItem(res.getRootPath());
}
if (!m_templates.isEmpty()) {
m_simpleFieldTemplate.setValue(m_templates.get(0).getRootPath());
}
m_simpleFieldTemplate.setNewItemsAllowed(true);
m_simpleFieldTemplate.setNullSelectionAllowed(true);
} catch (CmsException e) {
// should not happen
}
}
}
|
// This file was generated by RobotBuilder. It contains sections of
// code that are automatically generated and assigned by robotbuilder.
// These sections will be updated in the future when you export to
// Java from RobotBuilder. Do not put any code or make any change in
// the blocks indicating autogenerated code or it will be lost on an
// update. Deleting the comments indicating the section will prevent
// it from being updated in the future.
package org.usfirst.frc1073.robot15.subsystems;
import org.usfirst.frc1073.robot15.Robot;
import org.usfirst.frc1073.robot15.RobotMap;
import org.usfirst.frc1073.robot15.commands.*;
import edu.wpi.first.wpilibj.*;
import edu.wpi.first.wpilibj.DoubleSolenoid.Value;
import edu.wpi.first.wpilibj.command.Subsystem;
public class Elevator extends Subsystem {
// BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=DECLARATIONS
DigitalInput elevatorMagLow = RobotMap.elevatorelevatorMagLow;
DigitalInput elevatorMagMed = RobotMap.elevatorelevatorMagMed;
DigitalInput elevatorMagHight = RobotMap.elevatorelevatorMagHight;
Solenoid elevatorLifterSolenoid = RobotMap.elevatorelevatorLifterSolenoid;
AnalogInput irSensor = RobotMap.elevatorirSensor;
DigitalInput limitSwitch = RobotMap.elevatorlimitSwitch;
CANTalon elevatorRollerTalon = RobotMap.elevatorelevatorRollerTalon;
DoubleSolenoid elevatorClamperDoubleSolenoid = RobotMap.elevatorelevatorClamperDoubleSolenoid;
// END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=DECLARATIONS
private int totesHeld = 0;
// Put methods for controlling this subsystem
// here. Call these from Commands.
public Elevator() {
}
public void initDefaultCommand() {
// BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=DEFAULT_COMMAND
setDefaultCommand(new ToteElevatorCounter());
// END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=DEFAULT_COMMAND
// Set the default command for a subsystem here.
//setDefaultCommand(new MySpecialCommand());
}
public void setTotesHeld(int totes) {
totesHeld = totes;
}
public int getTotesHeld() {
return totesHeld;
}
public int getIrVal(){
return irSensor.getValue();
}
public boolean isSwitchHit(){
return limitSwitch.get();
}
public void rollersCollect()
{
elevatorRollerTalon.set(0.7);
}
public void rollersPurge()
{
elevatorRollerTalon.set(-0.7);
}
public void rollersOff()
{
elevatorRollerTalon.set(0.0);
}
}
|
package org.vitrivr.cineast.core.data;
import java.awt.image.BufferedImage;
import java.awt.image.ColorModel;
import java.awt.image.WritableRaster;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.vitrivr.cineast.core.config.Config;
import org.vitrivr.cineast.core.config.ImageCacheConfig.Policy;
public class MultiImageFactory {
private static final Logger LOGGER = LogManager.getLogger();
private MultiImageFactory(){
}
public static MultiImage newMultiImage(BufferedImage bimg){
return newMultiImage(bimg, null);
}
public static MultiImage newMultiImage(BufferedImage bimg, BufferedImage thumb){
if(keepInMemory()){
return new InMemoryMultiImage(bimg, thumb);
}else{
return new CachedMultiImage(bimg, thumb);
}
}
public static MultiImage newMultiImage(int width, int height, int[] colors){
if(keepInMemory()){
height = MultiImageFactory.checkHeight(width, height, colors);
BufferedImage bimg = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
bimg.setRGB(0, 0, width, height, colors, 0, width);
return new InMemoryMultiImage(bimg);
}else{
return new CachedMultiImage(width, height, colors);
}
}
public static MultiImage newInMemoryMultiImage(BufferedImage bimg){
if(Config.sharedConfig().getImagecache().getCachingPolicy() == Policy.FORCE_DISK_CACHE){
LOGGER.warn("creating cached instead of in memory MultiImage because of policy");
return new CachedMultiImage(bimg);
}
return new InMemoryMultiImage(bimg);
}
/**
* determines whether or not an image should be held in memory or cached to disk
* @return
*/
private static boolean keepInMemory(){
long freeMemory = Runtime.getRuntime().freeMemory();
Policy cachePolicy = Config.sharedConfig().getImagecache().getCachingPolicy();
long hardMinMemory = Config.sharedConfig().getImagecache().getHardMinMemory();
long softMinMemory = Config.sharedConfig().getImagecache().getSoftMinMemory();
if(cachePolicy == Policy.AVOID_CACHE){
if(freeMemory > hardMinMemory){
return true;
}else{
System.gc();
return false;
}
}
if(cachePolicy == Policy.FORCE_DISK_CACHE || cachePolicy == Policy.DISK_CACHE){
return false;
}
//check if access comes from preferred source
boolean isVideoDecoder = false;
StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace();
for(StackTraceElement element : stackTraceElements){
if(element.getClassName().toLowerCase().contains("decoder")){
isVideoDecoder = true;
break;
}
}
if(freeMemory > hardMinMemory){
if(!isVideoDecoder){
return true;
}
return freeMemory > softMinMemory;
}else{
System.gc();
return false;
}
}
static int checkHeight(int width, int height, int[] colors){
if(colors.length / width != height){
LOGGER.debug("dimension missmatch in MultiImage, setting height from {} to {}", height, (height = colors.length / width));
}
return height;
}
public static BufferedImage copyBufferedImg(BufferedImage img) {
ColorModel cm = img.getColorModel();
boolean isAlphaPremultiplied = cm.isAlphaPremultiplied();
WritableRaster raster = img.copyData(null);
return new BufferedImage(cm, raster, isAlphaPremultiplied, null);
}
}
|
package soot.jimple.infoflow.android.data;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import soot.SootMethod;
import soot.jimple.infoflow.data.SootMethodAndClass;
import soot.jimple.infoflow.util.SootMethodRepresentationParser;
/**
* Class representing a single method in the Android SDK
*
* @author Steven Arzt, Siegfried Rasthofer, Daniel Magin, Joern Tillmanns
*
*/
public class AndroidMethod extends SootMethodAndClass {
public enum CATEGORY {
// all categories
ALL,
// SOURCES
NO_CATEGORY, HARDWARE_INFO, UNIQUE_IDENTIFIER, LOCATION_INFORMATION, NETWORK_INFORMATION, ACCOUNT_INFORMATION, EMAIL_INFORMATION, FILE_INFORMATION, BLUETOOTH_INFORMATION, VOIP_INFORMATION, DATABASE_INFORMATION, PHONE_INFORMATION,
// SINKS
PHONE_CONNECTION, INTER_APP_COMMUNICATION, VOIP, PHONE_STATE, EMAIL, BLUETOOTH, ACCOUNT_SETTINGS, VIDEO, SYNCHRONIZATION_DATA, NETWORK, EMAIL_SETTINGS, FILE, LOG,
// SHARED
AUDIO, SMS_MMS, CONTACT_INFORMATION, CALENDAR_INFORMATION, SYSTEM_SETTINGS, IMAGE, BROWSER_INFORMATION, NFC
}
private Set<String> permissions;
private boolean isSource = false;
private boolean isSink = false;
private boolean isNeitherNor = false;
private CATEGORY category = null;
public AndroidMethod(String methodName, String returnType, String className) {
super(methodName, className, returnType, new ArrayList<String>());
this.permissions = null;
}
public AndroidMethod(String methodName, List<String> parameters, String returnType, String className) {
super(methodName, className, returnType, parameters);
this.permissions = null;
}
public AndroidMethod(String methodName, List<String> parameters, String returnType, String className,
Set<String> permissions) {
super(methodName, className, returnType, parameters);
this.permissions = permissions;
}
public AndroidMethod(SootMethod sm) {
super(sm);
this.permissions = null;
}
public AndroidMethod(SootMethodAndClass methodAndClass) {
super(methodAndClass);
this.permissions = null;
}
public Set<String> getPermissions() {
return this.permissions == null ? Collections.<String>emptySet()
: this.permissions;
}
public boolean isSource() {
return isSource;
}
public void setSource(boolean isSource) {
this.isSource = isSource;
}
public void addPermission(String permission) {
if (this.permissions == null)
this.permissions = new HashSet<>();
this.permissions.add(permission);
}
public boolean isSink() {
return isSink;
}
public void setSink(boolean isSink) {
this.isSink = isSink;
}
public boolean isNeitherNor() {
return isNeitherNor;
}
public void setNeitherNor(boolean isNeitherNor) {
this.isNeitherNor = isNeitherNor;
}
public void setCategory(CATEGORY category) {
this.category = category;
}
public CATEGORY getCategory() {
return this.category;
}
@Override
public String toString() {
String s = getSignature();
if (permissions != null)
for (String perm : permissions)
s += " " + perm;
if (this.isSource || this.isSink || this.isNeitherNor)
s += " ->";
if (this.isSource)
s += " _SOURCE_";
if (this.isSink)
s += " _SINK_ ";
if (this.isNeitherNor)
s += " _NONE_";
if (this.category != null)
s += "|" + category;
return s;
}
public String getSignatureAndPermissions() {
String s = getSignature();
if (permissions != null)
for (String perm : permissions)
s += " " + perm;
return s;
}
/**
* Gets whether this method has been annotated as a source, sink or neither
* nor.
*
* @return True if there is an annotations for this method, otherwise false.
*/
public boolean isAnnotated() {
return isSource || isSink || isNeitherNor;
}
/***
* Static method to create AndroidMethod from Soot method signature
*
* @param signature The Soot method signature
* @return The new AndroidMethod object
*/
public static AndroidMethod createFromSignature(String signature) {
if (!signature.startsWith("<"))
signature = "<" + signature;
if (!signature.endsWith(">"))
signature = signature + ">";
SootMethodAndClass smac = SootMethodRepresentationParser.v()
.parseSootMethodString(signature);
return new AndroidMethod(smac.getMethodName(), smac.getParameters(),
smac.getReturnType(), smac.getClassName());
}
}
|
package com.bk.common;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import org.junit.runner.RunWith;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.transaction.TransactionConfiguration;
import org.springframework.transaction.annotation.Transactional;
/**
* @author Andrei Petraru
* Apr 27, 2013
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = { "/test-context.xml" })
@Transactional
@TransactionConfiguration(defaultRollback = true)
public abstract class AbstractIntegrationTest {
@PersistenceContext
private EntityManager entityManager;
// If RESTART IDENTITY is specified, all table IDENTITY sequences and
// all SEQUENCE objects in the schema are reset to their start values
protected void reset() {
entityManager.createNativeQuery("TRUNCATE SCHEMA PUBLIC RESTART IDENTITY AND COMMIT NO CHECK").executeUpdate();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.